Bug 716647 - Part 4: Recompile on-stack baseline scripts when toggling debug mode. (r=jandem)
authorShu-yu Guo <shu@rfrn.org>
Thu, 24 Apr 2014 01:59:37 -0700
changeset 199508 a19a7c0a4b04602d66c3ecf3c956a6cee8ba6559
parent 199507 061ebab47be320047966424d449c90de296ea930
child 199509 a1354a3e748efbbdf62fa2b93b95eb80a4857324
push id486
push userasasaki@mozilla.com
push dateMon, 14 Jul 2014 18:39:42 +0000
treeherdermozilla-release@d33428174ff1 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjandem
bugs716647
milestone31.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 716647 - Part 4: Recompile on-stack baseline scripts when toggling debug mode. (r=jandem)
js/src/jit/BaselineBailouts.cpp
js/src/jit/BaselineCompiler.cpp
js/src/jit/BaselineCompiler.h
js/src/jit/BaselineDebugModeOSR.cpp
js/src/jit/BaselineDebugModeOSR.h
js/src/jit/BaselineFrame.h
js/src/jit/BaselineFrameInfo.h
js/src/jit/BaselineIC.cpp
js/src/jit/BaselineIC.h
js/src/jit/BaselineJIT.cpp
js/src/jit/BaselineJIT.h
js/src/jit/Ion.cpp
js/src/jit/Ion.h
js/src/jit/IonFrames.cpp
js/src/jit/IonFrames.h
js/src/jit/IonSpewer.cpp
js/src/jit/IonSpewer.h
js/src/jit/JitCompartment.h
js/src/jit/arm/BaselineCompiler-arm.cpp
js/src/jit/arm/BaselineCompiler-arm.h
js/src/jit/arm/BaselineHelpers-arm.h
js/src/jit/shared/BaselineCompiler-shared.cpp
js/src/jit/shared/BaselineCompiler-shared.h
js/src/jit/shared/BaselineCompiler-x86-shared.cpp
js/src/jit/shared/BaselineCompiler-x86-shared.h
js/src/jit/x64/BaselineCompiler-x64.cpp
js/src/jit/x64/BaselineCompiler-x64.h
js/src/jit/x64/BaselineHelpers-x64.h
js/src/jit/x86/BaselineCompiler-x86.cpp
js/src/jit/x86/BaselineCompiler-x86.h
js/src/jit/x86/BaselineHelpers-x86.h
js/src/moz.build
js/src/vm/Stack.h
--- a/js/src/jit/BaselineBailouts.cpp
+++ b/js/src/jit/BaselineBailouts.cpp
@@ -6,16 +6,17 @@
 
 #include "jsprf.h"
 #include "jit/arm/Simulator-arm.h"
 #include "jit/BaselineIC.h"
 #include "jit/BaselineJIT.h"
 #include "jit/CompileInfo.h"
 #include "jit/IonSpewer.h"
 #include "jit/Recover.h"
+#include "jit/RematerializedFrame.h"
 #include "vm/ArgumentsObject.h"
 #include "vm/TraceLogging.h"
 
 #include "jsscriptinlines.h"
 
 #include "jit/IonFrames-inl.h"
 
 using namespace js;
@@ -379,22 +380,22 @@ IsInlinableFallback(ICFallbackStub *icEn
     return icEntry->isCall_Fallback() || icEntry->isGetProp_Fallback() ||
            icEntry->isSetProp_Fallback();
 }
 
 static inline void*
 GetStubReturnAddress(JSContext *cx, jsbytecode *pc)
 {
     if (IsGetPropPC(pc))
-        return cx->compartment()->jitCompartment()->baselineGetPropReturnAddr();
+        return cx->compartment()->jitCompartment()->baselineGetPropReturnFromIonAddr();
     if (IsSetPropPC(pc))
-        return cx->compartment()->jitCompartment()->baselineSetPropReturnAddr();
+        return cx->compartment()->jitCompartment()->baselineSetPropReturnFromIonAddr();
     // This should be a call op of some kind, now.
     JS_ASSERT(IsCallPC(pc));
-    return cx->compartment()->jitCompartment()->baselineCallReturnAddr();
+    return cx->compartment()->jitCompartment()->baselineCallReturnFromIonAddr();
 }
 
 static inline jsbytecode *
 GetNextNonLoopEntryPc(jsbytecode *pc)
 {
     JSOp op = JSOp(*pc);
     if (op == JSOP_GOTO)
         return pc + GET_JUMP_OFFSET(pc);
@@ -1530,16 +1531,44 @@ HandleBaselineInfoBailout(JSContext *cx,
             outerScript->filename(), outerScript->lineno());
 
     JS_ASSERT(!outerScript->ionScript()->invalidated());
 
     IonSpew(IonSpew_BaselineBailouts, "Invalidating due to invalid baseline info");
     return Invalidate(cx, outerScript);
 }
 
+static void
+CopyFromRematerializedFrame(JSContext *cx, JitActivation *act, uint8_t *fp, size_t inlineDepth,
+                            BaselineFrame *frame)
+{
+    RematerializedFrame *rematFrame = act->lookupRematerializedFrame(fp, inlineDepth);
+
+    // We might not have rematerialized a frame if the user never requested a
+    // Debugger.Frame for it.
+    if (!rematFrame)
+        return;
+
+    MOZ_ASSERT(rematFrame->script() == frame->script());
+    MOZ_ASSERT(rematFrame->numActualArgs() == frame->numActualArgs());
+
+    frame->setScopeChain(rematFrame->scopeChain());
+    frame->thisValue() = rematFrame->thisValue();
+
+    for (unsigned i = 0; i < frame->numActualArgs(); i++)
+        frame->argv()[i] = rematFrame->argv()[i];
+
+    for (size_t i = 0; i < frame->script()->nfixed(); i++)
+        *frame->valueSlot(i) = rematFrame->locals()[i];
+
+    IonSpew(IonSpew_BaselineBailouts,
+            "  Copied from rematerialized frame at (%p,%u)",
+            fp, inlineDepth);
+}
+
 uint32_t
 jit::FinishBailoutToBaseline(BaselineBailoutInfo *bailoutInfo)
 {
     // The caller pushes R0 and R1 on the stack without rooting them.
     // Since GC here is very unlikely just suppress it.
     JSContext *cx = GetJSContextFromJitCode();
     js::gc::AutoSuppressGC suppressGC(cx);
 
@@ -1569,16 +1598,17 @@ jit::FinishBailoutToBaseline(BaselineBai
 
     // Create arguments objects for bailed out frames, to maintain the invariant
     // that script->needsArgsObj() implies frame->hasArgsObj().
     RootedScript innerScript(cx, nullptr);
     RootedScript outerScript(cx, nullptr);
 
     JS_ASSERT(cx->currentlyRunningInJit());
     JitFrameIterator iter(cx);
+    uint8_t *outerFp = nullptr;
 
     uint32_t frameno = 0;
     while (frameno < numFrames) {
         JS_ASSERT(!iter.isIonJS());
 
         if (iter.isBaselineJS()) {
             BaselineFrame *frame = iter.baselineFrame();
             MOZ_ASSERT(frame->script()->hasBaselineScript());
@@ -1602,27 +1632,52 @@ jit::FinishBailoutToBaseline(BaselineBai
                 // to the slot.
                 RootedScript script(cx, frame->script());
                 SetFrameArgumentsObject(cx, frame, script, argsObj);
             }
 
             if (frameno == 0)
                 innerScript = frame->script();
 
-            if (frameno == numFrames - 1)
+            if (frameno == numFrames - 1) {
                 outerScript = frame->script();
+                outerFp = iter.fp();
+            }
 
             frameno++;
         }
 
         ++iter;
     }
 
-    JS_ASSERT(innerScript);
-    JS_ASSERT(outerScript);
+    MOZ_ASSERT(innerScript);
+    MOZ_ASSERT(outerScript);
+    MOZ_ASSERT(outerFp);
+
+    // If we rematerialized Ion frames due to debug mode toggling, copy their
+    // values into the baseline frame. We need to do this even when debug mode
+    // is off, as we should respect the mutations made while debug mode was
+    // on.
+    JitActivation *act = cx->mainThread().activation()->asJit();
+    if (act->hasRematerializedFrame(outerFp)) {
+        JitFrameIterator iter(cx);
+        size_t inlineDepth = numFrames;
+        while (inlineDepth > 0) {
+            if (iter.isBaselineJS()) {
+                inlineDepth--;
+                CopyFromRematerializedFrame(cx, act, outerFp, inlineDepth, iter.baselineFrame());
+            }
+            ++iter;
+        }
+
+        // After copying from all the rematerialized frames, remove them from
+        // the table to keep the table up to date.
+        act->removeRematerializedFrame(outerFp);
+    }
+
     IonSpew(IonSpew_BaselineBailouts,
             "  Restored outerScript=(%s:%u,%u) innerScript=(%s:%u,%u) (bailoutKind=%u)",
             outerScript->filename(), outerScript->lineno(), outerScript->getUseCount(),
             innerScript->filename(), innerScript->lineno(), innerScript->getUseCount(),
             (unsigned) bailoutKind);
 
     switch (bailoutKind) {
       case Bailout_Normal:
--- a/js/src/jit/BaselineCompiler.cpp
+++ b/js/src/jit/BaselineCompiler.cpp
@@ -20,17 +20,17 @@
 
 #include "jsscriptinlines.h"
 
 #include "vm/Interpreter-inl.h"
 
 using namespace js;
 using namespace js::jit;
 
-BaselineCompiler::BaselineCompiler(JSContext *cx, TempAllocator &alloc, HandleScript script)
+BaselineCompiler::BaselineCompiler(JSContext *cx, TempAllocator &alloc, JSScript *script)
   : BaselineCompilerSpecific(cx, alloc, script),
     modifiesArguments_(false)
 {
 }
 
 bool
 BaselineCompiler::init()
 {
@@ -65,17 +65,17 @@ BaselineCompiler::addPCMappingEntry(bool
 
     return pcMappingEntries_.append(entry);
 }
 
 MethodStatus
 BaselineCompiler::compile()
 {
     IonSpew(IonSpew_BaselineScripts, "Baseline compiling script %s:%d (%p)",
-            script->filename(), script->lineno(), script.get());
+            script->filename(), script->lineno(), script);
 
     IonSpew(IonSpew_Codegen, "# Emitting baseline code for script %s:%d",
             script->filename(), script->lineno());
 
     TraceLogger *logger = TraceLoggerForMainThread(cx->runtime());
     AutoTraceLog logScript(logger, TraceLogCreateTextId(logger, script));
     AutoTraceLog logCompile(logger, TraceLogger::BaselineCompilation);
 
@@ -116,17 +116,18 @@ BaselineCompiler::compile()
     JitCode *code = linker.newCode<CanGC>(cx, JSC::BASELINE_CODE);
     if (!code)
         return Method_Error;
 
     JSObject *templateScope = nullptr;
     if (script->functionNonDelazifying()) {
         RootedFunction fun(cx, script->functionNonDelazifying());
         if (fun->isHeavyweight()) {
-            templateScope = CallObject::createTemplateObject(cx, script, gc::TenuredHeap);
+            RootedScript scriptRoot(cx, script);
+            templateScope = CallObject::createTemplateObject(cx, scriptRoot, gc::TenuredHeap);
             if (!templateScope)
                 return Method_Error;
 
             if (fun->isNamedLambda()) {
                 RootedObject declEnvObject(cx, DeclEnvObject::createTemplateObject(cx, fun, gc::TenuredHeap));
                 if (!declEnvObject)
                     return Method_Error;
                 templateScope->as<ScopeObject>().setEnclosingScope(declEnvObject);
@@ -169,23 +170,27 @@ BaselineCompiler::compile()
 
         previousOffset = entry.nativeOffset;
     }
 
     if (pcEntries.oom())
         return Method_Error;
 
     prologueOffset_.fixup(&masm);
+    epilogueOffset_.fixup(&masm);
     spsPushToggleOffset_.fixup(&masm);
+    postDebugPrologueOffset_.fixup(&masm);
 
     // Note: There is an extra entry in the bytecode type map for the search hint, see below.
     size_t bytecodeTypeMapEntries = script->nTypeSets() + 1;
 
     BaselineScript *baselineScript = BaselineScript::New(cx, prologueOffset_.offset(),
+                                                         epilogueOffset_.offset(),
                                                          spsPushToggleOffset_.offset(),
+                                                         postDebugPrologueOffset_.offset(),
                                                          icEntries_.length(),
                                                          pcMappingIndexEntries.length(),
                                                          pcEntries.length(),
                                                          bytecodeTypeMapEntries);
     if (!baselineScript)
         return Method_Error;
 
     baselineScript->setMethod(code);
@@ -384,16 +389,20 @@ BaselineCompiler::emitPrologue()
         return false;
 
     return true;
 }
 
 bool
 BaselineCompiler::emitEpilogue()
 {
+    // Record the offset of the epilogue, so we can do early return from
+    // Debugger handlers during on-stack recompile.
+    epilogueOffset_ = masm.currentOffset();
+
     masm.bind(&return_);
 
 #ifdef JS_TRACE_LOGGING
     TraceLogger *logger = TraceLoggerForMainThread(cx->runtime());
     Register loggerReg = RegisterSet::Volatile().takeGeneral();
     masm.Push(loggerReg);
     masm.movePtr(ImmPtr(logger), loggerReg);
     masm.tracelogStop(loggerReg, TraceLogger::Baseline);
@@ -442,19 +451,19 @@ BaselineCompiler::emitOutOfLinePostBarri
     masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, PostWriteBarrier));
 
     masm.ret();
     return true;
 }
 #endif // JSGC_GENERATIONAL
 
 bool
-BaselineCompiler::emitIC(ICStub *stub, bool isForOp)
+BaselineCompiler::emitIC(ICStub *stub, ICEntry::Kind kind)
 {
-    ICEntry *entry = allocateICEntry(stub, isForOp);
+    ICEntry *entry = allocateICEntry(stub, kind);
     if (!entry)
         return false;
 
     CodeOffsetLabel patchOffset;
     EmitCallIC(&patchOffset, masm);
     entry->setReturnOffset(masm.currentOffset());
     if (!addICLoadLabel(patchOffset))
         return false;
@@ -522,37 +531,42 @@ BaselineCompiler::emitStackCheck(bool ea
 }
 
 typedef bool (*DebugPrologueFn)(JSContext *, BaselineFrame *, jsbytecode *, bool *);
 static const VMFunction DebugPrologueInfo = FunctionInfo<DebugPrologueFn>(jit::DebugPrologue);
 
 bool
 BaselineCompiler::emitDebugPrologue()
 {
-    if (!debugMode_)
-        return true;
-
-    // Load pointer to BaselineFrame in R0.
-    masm.loadBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
-
-    prepareVMCall();
-    pushArg(ImmPtr(pc));
-    pushArg(R0.scratchReg());
-    if (!callVM(DebugPrologueInfo))
-        return false;
-
-    // If the stub returns |true|, we have to return the value stored in the
-    // frame's return value slot.
-    Label done;
-    masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, &done);
-    {
-        masm.loadValue(frame.addressOfReturnValue(), JSReturnOperand);
-        masm.jump(&return_);
+    if (debugMode_) {
+        // Load pointer to BaselineFrame in R0.
+        masm.loadBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
+
+        prepareVMCall();
+        pushArg(ImmPtr(pc));
+        pushArg(R0.scratchReg());
+        if (!callVM(DebugPrologueInfo))
+            return false;
+
+        // Fix up the fake ICEntry appended by callVM for on-stack recompilation.
+        icEntries_.back().setForDebugPrologue();
+
+        // If the stub returns |true|, we have to return the value stored in the
+        // frame's return value slot.
+        Label done;
+        masm.branchTest32(Assembler::Zero, ReturnReg, ReturnReg, &done);
+        {
+            masm.loadValue(frame.addressOfReturnValue(), JSReturnOperand);
+            masm.jump(&return_);
+        }
+        masm.bind(&done);
     }
-    masm.bind(&done);
+
+    postDebugPrologueOffset_ = masm.currentOffset();
+
     return true;
 }
 
 typedef bool (*StrictEvalPrologueFn)(JSContext *, BaselineFrame *);
 static const VMFunction StrictEvalPrologueInfo =
     FunctionInfo<StrictEvalPrologueFn>(jit::StrictEvalPrologue);
 
 typedef bool (*HeavyweightFunPrologueFn)(JSContext *, BaselineFrame *);
@@ -716,17 +730,17 @@ BaselineCompiler::emitDebugTrap()
 
 #ifdef DEBUG
     // Patchable call offset has to match the pc mapping offset.
     PCMappingEntry &entry = pcMappingEntries_.back();
     JS_ASSERT((&offset)->offset() == entry.nativeOffset);
 #endif
 
     // Add an IC entry for the return offset -> pc mapping.
-    ICEntry icEntry(script->pcToOffset(pc), false);
+    ICEntry icEntry(script->pcToOffset(pc), ICEntry::Kind_DebugTrap);
     icEntry.setReturnOffset(masm.currentOffset());
     if (!icEntries_.append(icEntry))
         return false;
 
     return true;
 }
 
 bool
@@ -2736,16 +2750,19 @@ BaselineCompiler::emit_JSOP_EXCEPTION()
 
 typedef bool (*OnDebuggerStatementFn)(JSContext *, BaselineFrame *, jsbytecode *pc, bool *);
 static const VMFunction OnDebuggerStatementInfo =
     FunctionInfo<OnDebuggerStatementFn>(jit::OnDebuggerStatement);
 
 bool
 BaselineCompiler::emit_JSOP_DEBUGGER()
 {
+    if (!debugMode_)
+        return true;
+
     prepareVMCall();
     pushArg(ImmPtr(pc));
 
     masm.loadBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
     pushArg(R0.scratchReg());
 
     if (!callVM(OnDebuggerStatementInfo))
         return false;
@@ -2778,16 +2795,19 @@ BaselineCompiler::emitReturn()
 
         prepareVMCall();
         pushArg(Imm32(1));
         pushArg(ImmPtr(pc));
         pushArg(R0.scratchReg());
         if (!callVM(DebugEpilogueInfo))
             return false;
 
+        // Fix up the fake ICEntry appended by callVM for on-stack recompilation.
+        icEntries_.back().setForDebugEpilogue();
+
         masm.loadValue(frame.addressOfReturnValue(), JSReturnOperand);
     }
 
     // Only emit the jump if this JSOP_RETRVAL is not the last instruction.
     // Not needed for last instruction, because last instruction flows
     // into return label.
     if (pc + GetBytecodeLength(pc) < script->codeEnd())
         masm.jump(&return_);
--- a/js/src/jit/BaselineCompiler.h
+++ b/js/src/jit/BaselineCompiler.h
@@ -168,50 +168,58 @@ class BaselineCompiler : public Baseline
     NonAssertingLabel           return_;
 #ifdef JSGC_GENERATIONAL
     NonAssertingLabel           postBarrierSlot_;
 #endif
 
     // Native code offset right before the scope chain is initialized.
     CodeOffsetLabel prologueOffset_;
 
+    // Native code offset right before the frame is popped and the method
+    // returned from.
+    CodeOffsetLabel epilogueOffset_;
+
+    // Native code offset right after debug prologue and epilogue, or
+    // equivalent positions when debug mode is off.
+    CodeOffsetLabel postDebugPrologueOffset_;
+
     // Whether any on stack arguments are modified.
     bool modifiesArguments_;
 
     Label *labelOf(jsbytecode *pc) {
         return &labels_[script->pcToOffset(pc)];
     }
 
     // If a script has more |nslots| than this, then emit code to do an
     // early stack check.
     static const unsigned EARLY_STACK_CHECK_SLOT_COUNT = 128;
     bool needsEarlyStackCheck() const {
         return script->nslots() > EARLY_STACK_CHECK_SLOT_COUNT;
     }
 
   public:
-    BaselineCompiler(JSContext *cx, TempAllocator &alloc, HandleScript script);
+    BaselineCompiler(JSContext *cx, TempAllocator &alloc, JSScript *script);
     bool init();
 
     MethodStatus compile();
 
   private:
     MethodStatus emitBody();
 
     bool emitPrologue();
     bool emitEpilogue();
 #ifdef JSGC_GENERATIONAL
     bool emitOutOfLinePostBarrierSlot();
 #endif
-    bool emitIC(ICStub *stub, bool isForOp);
+    bool emitIC(ICStub *stub, ICEntry::Kind kind);
     bool emitOpIC(ICStub *stub) {
-        return emitIC(stub, true);
+        return emitIC(stub, ICEntry::Kind_Op);
     }
     bool emitNonOpIC(ICStub *stub) {
-        return emitIC(stub, false);
+        return emitIC(stub, ICEntry::Kind_NonOp);
     }
 
     bool emitStackCheck(bool earlyCheck=false);
     bool emitInterruptCheck();
     bool emitUseCountIncrement(bool allowOsr=true);
     bool emitArgumentTypeChecks();
     bool emitDebugPrologue();
     bool emitDebugTrap();
new file mode 100644
--- /dev/null
+++ b/js/src/jit/BaselineDebugModeOSR.cpp
@@ -0,0 +1,709 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sts=4 et sw=4 tw=99:
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "jit/BaselineDebugModeOSR.h"
+
+#include "mozilla/DebugOnly.h"
+
+#include "jit/IonLinker.h"
+
+#include "jit/IonFrames-inl.h"
+#include "vm/Stack-inl.h"
+
+using namespace mozilla;
+using namespace js;
+using namespace js::jit;
+
+struct DebugModeOSREntry
+{
+    JSScript *script;
+    BaselineScript *oldBaselineScript;
+    BaselineDebugModeOSRInfo *recompInfo;
+    uint32_t pcOffset;
+    ICEntry::Kind frameKind;
+
+    // Used for sanity asserts in debug builds.
+    DebugOnly<ICStub *> stub;
+
+    DebugModeOSREntry(JSScript *script)
+      : script(script),
+        oldBaselineScript(script->baselineScript()),
+        recompInfo(nullptr),
+        pcOffset(uint32_t(-1)),
+        frameKind(ICEntry::Kind_NonOp),
+        stub(nullptr)
+    { }
+
+    DebugModeOSREntry(JSScript *script, const ICEntry &icEntry)
+      : script(script),
+        oldBaselineScript(script->baselineScript()),
+        recompInfo(nullptr),
+        pcOffset(icEntry.pcOffset()),
+        frameKind(icEntry.kind()),
+        stub(nullptr)
+    {
+#ifdef DEBUG
+        MOZ_ASSERT(pcOffset == icEntry.pcOffset());
+        MOZ_ASSERT(frameKind == icEntry.kind());
+
+        // Assert that if we have a NonOp ICEntry, that there are no unsynced
+        // slots, since such a recompile could have only been triggered from
+        // either an interrupt check or a debug trap handler.
+        //
+        // If triggered from an interrupt check, the stack should be fully
+        // synced.
+        //
+        // If triggered from a debug trap handler, we must be recompiling for
+        // toggling debug mode on->off, in which case the old baseline script
+        // should have fully synced stack at every bytecode.
+        if (frameKind == ICEntry::Kind_NonOp) {
+            PCMappingSlotInfo slotInfo;
+            jsbytecode *pc = script->offsetToPC(pcOffset);
+            oldBaselineScript->nativeCodeForPC(script, pc, &slotInfo);
+            MOZ_ASSERT(slotInfo.numUnsynced() == 0);
+        }
+#endif
+    }
+
+    DebugModeOSREntry(DebugModeOSREntry &&other)
+      : script(other.script),
+        oldBaselineScript(other.oldBaselineScript),
+        recompInfo(other.recompInfo ? other.takeRecompInfo() : nullptr),
+        pcOffset(other.pcOffset),
+        frameKind(other.frameKind),
+        stub(other.stub)
+    { }
+
+    ~DebugModeOSREntry() {
+        // Note that this is nulled out when the recompInfo is taken by the
+        // frame. The frame then has the responsibility of freeing the
+        // recompInfo.
+        js_delete(recompInfo);
+    }
+
+    bool needsRecompileInfo() const {
+        return (frameKind == ICEntry::Kind_CallVM ||
+                frameKind == ICEntry::Kind_DebugTrap ||
+                frameKind == ICEntry::Kind_DebugPrologue ||
+                frameKind == ICEntry::Kind_DebugEpilogue);
+    }
+
+    BaselineDebugModeOSRInfo *takeRecompInfo() {
+        MOZ_ASSERT(recompInfo);
+        BaselineDebugModeOSRInfo *tmp = recompInfo;
+        recompInfo = nullptr;
+        return tmp;
+    }
+
+    bool allocateRecompileInfo(JSContext *cx) {
+        MOZ_ASSERT(needsRecompileInfo());
+
+        // If we are returning to a frame which needs a continuation fixer,
+        // allocate the recompile info up front so that the patching function
+        // is infallible.
+        jsbytecode *pc = script->offsetToPC(pcOffset);
+
+        // XXX: Work around compiler error disallowing using bitfields
+        // with the template magic of new_.
+        ICEntry::Kind kind = frameKind;
+        recompInfo = cx->new_<BaselineDebugModeOSRInfo>(pc, kind);
+        return !!recompInfo;
+    }
+};
+
+typedef js::Vector<DebugModeOSREntry> DebugModeOSREntryVector;
+
+static bool
+CollectOnStackScripts(JSContext *cx, const JitActivationIterator &activation,
+                      DebugModeOSREntryVector &entries)
+{
+    DebugOnly<ICStub *> prevFrameStubPtr = nullptr;
+    bool needsRecompileHandler = false;
+    for (JitFrameIterator iter(activation); !iter.done(); ++iter) {
+        switch (iter.type()) {
+          case JitFrame_BaselineJS: {
+            JSScript *script = iter.script();
+            uint8_t *retAddr = iter.returnAddressToFp();
+            ICEntry &entry = script->baselineScript()->icEntryFromReturnAddress(retAddr);
+
+            if (!entries.append(DebugModeOSREntry(script, entry)))
+                return false;
+
+            if (entries.back().needsRecompileInfo()) {
+                if (!entries.back().allocateRecompileInfo(cx))
+                    return false;
+
+                needsRecompileHandler |= true;
+            }
+
+            entries.back().stub = prevFrameStubPtr;
+            prevFrameStubPtr = nullptr;
+            break;
+          }
+
+          case JitFrame_BaselineStub:
+            prevFrameStubPtr =
+                reinterpret_cast<IonBaselineStubFrameLayout *>(iter.fp())->maybeStubPtr();
+            break;
+
+          case JitFrame_IonJS: {
+            JSScript *script = iter.script();
+            if (!entries.append(DebugModeOSREntry(script)))
+                return false;
+            for (InlineFrameIterator inlineIter(cx, &iter); inlineIter.more(); ++inlineIter) {
+                if (!entries.append(DebugModeOSREntry(inlineIter.script())))
+                    return false;
+            }
+            break;
+          }
+
+          default:;
+        }
+    }
+
+    // Initialize the on-stack recompile handler, which may fail, so that
+    // patching the stack is infallible.
+    if (needsRecompileHandler) {
+        JitRuntime *rt = cx->runtime()->jitRuntime();
+        if (!rt->getBaselineDebugModeOSRHandlerAddress(cx, true))
+            return false;
+    }
+
+    return true;
+}
+
+static inline uint8_t *
+GetStubReturnFromStubAddress(JSContext *cx, jsbytecode *pc)
+{
+    JitCompartment *comp = cx->compartment()->jitCompartment();
+    void *addr;
+    if (IsGetPropPC(pc)) {
+        addr = comp->baselineGetPropReturnFromStubAddr();
+    } else if (IsSetPropPC(pc)) {
+        addr = comp->baselineSetPropReturnFromStubAddr();
+    } else {
+        JS_ASSERT(IsCallPC(pc));
+        addr = comp->baselineCallReturnFromStubAddr();
+    }
+    return reinterpret_cast<uint8_t *>(addr);
+}
+
+static const char *
+ICEntryKindToString(ICEntry::Kind kind)
+{
+    switch (kind) {
+      case ICEntry::Kind_Op:
+        return "IC";
+      case ICEntry::Kind_NonOp:
+        return "non-op IC";
+      case ICEntry::Kind_CallVM:
+        return "callVM";
+      case ICEntry::Kind_DebugTrap:
+        return "debug trap";
+      case ICEntry::Kind_DebugPrologue:
+        return "debug prologue";
+      case ICEntry::Kind_DebugEpilogue:
+        return "debug epilogue";
+      default:
+        MOZ_ASSUME_UNREACHABLE("bad ICEntry kind");
+    }
+}
+
+static void
+SpewPatchBaselineFrame(uint8_t *oldReturnAddress, uint8_t *newReturnAddress,
+                       JSScript *script, ICEntry::Kind frameKind, jsbytecode *pc)
+{
+    IonSpew(IonSpew_BaselineDebugModeOSR,
+            "Patch return %#016llx -> %#016llx to BaselineJS (%s:%d) from %s at %s",
+            uintptr_t(oldReturnAddress), uintptr_t(newReturnAddress),
+            script->filename(), script->lineno(),
+            ICEntryKindToString(frameKind), js_CodeName[(JSOp)*pc]);
+}
+
+static void
+SpewPatchStubFrame(uint8_t *oldReturnAddress, uint8_t *newReturnAddress,
+                   ICStub *oldStub, ICStub *newStub)
+{
+    IonSpew(IonSpew_BaselineDebugModeOSR,
+            "Patch return %#016llx -> %#016llx",
+            uintptr_t(oldReturnAddress), uintptr_t(newReturnAddress));
+    IonSpew(IonSpew_BaselineDebugModeOSR,
+            "Patch   stub %#016llx -> %#016llx to BaselineStub",
+            uintptr_t(oldStub), uintptr_t(newStub));
+}
+
+static void
+PatchBaselineFramesForDebugMode(JSContext *cx, const JitActivationIterator &activation,
+                                DebugModeOSREntryVector &entries, size_t *start)
+{
+    //
+    // Recompile Patching Overview
+    //
+    // When toggling debug mode with live baseline scripts on the stack, we
+    // could have entered the VM via the following ways from the baseline
+    // script.
+    //
+    // Off to On:
+    //  A. From a "can call" stub.
+    //  B. From a VM call (interrupt handler, debugger statement handler).
+    //
+    // On to Off:
+    //  - All the ways above.
+    //  C. From the debug trap handler.
+    //  D. From the debug prologue.
+    //  E. From the debug epilogue.
+    //
+    // In general, we patch the return address from the VM call to return to a
+    // "continuation fixer" to fix up machine state (registers and stack
+    // state). Specifics on what need to be done are documented below.
+    //
+
+    IonCommonFrameLayout *prev = nullptr;
+    size_t entryIndex = *start;
+    DebugOnly<bool> expectedDebugMode = cx->compartment()->debugMode();
+
+    for (JitFrameIterator iter(activation); !iter.done(); ++iter) {
+        switch (iter.type()) {
+          case JitFrame_BaselineJS: {
+            JSScript *script = entries[entryIndex].script;
+            uint32_t pcOffset = entries[entryIndex].pcOffset;
+            jsbytecode *pc = script->offsetToPC(pcOffset);
+
+            MOZ_ASSERT(script == iter.script());
+            MOZ_ASSERT(pcOffset < script->length());
+            MOZ_ASSERT(script->baselineScript()->debugMode() == expectedDebugMode);
+
+            BaselineScript *bl = script->baselineScript();
+            ICEntry::Kind kind = entries[entryIndex].frameKind;
+
+            if (kind == ICEntry::Kind_Op) {
+                // Case A above.
+                //
+                // Patching this case needs to patch both the stub frame and
+                // the baseline frame. The stub frame is patched below. For
+                // the baseline frame here, we resume right after the IC
+                // returns.
+                //
+                // Since we're using the IC-specific k-fixer, we can resume
+                // directly to the IC resume address.
+                uint8_t *retAddr = bl->returnAddressForIC(bl->icEntryFromPCOffset(pcOffset));
+                SpewPatchBaselineFrame(prev->returnAddress(), retAddr, script, kind, pc);
+                prev->setReturnAddress(retAddr);
+                entryIndex++;
+                break;
+            }
+
+            bool popFrameReg;
+
+            // The RecompileInfo must already be allocated so that this
+            // function may be infallible.
+            BaselineDebugModeOSRInfo *recompInfo = entries[entryIndex].takeRecompInfo();
+
+            switch (kind) {
+              case ICEntry::Kind_CallVM:
+                // Case B above.
+                //
+                // Patching returns from an interrupt handler or the debugger
+                // statement handler is similar in that we can resume at the
+                // next op.
+                pc += GetBytecodeLength(pc);
+                recompInfo->resumeAddr = bl->nativeCodeForPC(script, pc, &recompInfo->slotInfo);
+                popFrameReg = true;
+                break;
+
+              case ICEntry::Kind_DebugTrap:
+                // Case C above.
+                //
+                // Debug traps are emitted before each op, so we resume at the
+                // same op. Calling debug trap handlers is done via a toggled
+                // call to a thunk (DebugTrapHandler) that takes care tearing
+                // down its own stub frame so we don't need to worry about
+                // popping the frame reg.
+                recompInfo->resumeAddr = bl->nativeCodeForPC(script, pc, &recompInfo->slotInfo);
+                popFrameReg = false;
+                break;
+
+              case ICEntry::Kind_DebugPrologue:
+                // Case D above.
+                //
+                // We patch a jump directly to the right place in the prologue
+                // after popping the frame reg and checking for forced return.
+                recompInfo->resumeAddr = bl->postDebugPrologueAddr();
+                popFrameReg = true;
+                break;
+
+              default:
+                // Case E above.
+                //
+                // We patch a jump directly to the epilogue after popping the
+                // frame reg and checking for forced return.
+                MOZ_ASSERT(kind == ICEntry::Kind_DebugEpilogue);
+                recompInfo->resumeAddr = bl->epilogueEntryAddr();
+                popFrameReg = true;
+                break;
+            }
+
+            SpewPatchBaselineFrame(prev->returnAddress(), recompInfo->resumeAddr,
+                                   script, kind, recompInfo->pc);
+
+            // The recompile handler must already be created so that this
+            // function may be infallible.
+            JitRuntime *rt = cx->runtime()->jitRuntime();
+            void *handlerAddr = rt->getBaselineDebugModeOSRHandlerAddress(cx, popFrameReg);
+            MOZ_ASSERT(handlerAddr);
+
+            prev->setReturnAddress(reinterpret_cast<uint8_t *>(handlerAddr));
+            iter.baselineFrame()->setDebugModeOSRInfo(recompInfo);
+
+            entryIndex++;
+            break;
+          }
+
+          case JitFrame_BaselineStub: {
+            JSScript *script = entries[entryIndex].script;
+            IonBaselineStubFrameLayout *layout =
+                reinterpret_cast<IonBaselineStubFrameLayout *>(iter.fp());
+            MOZ_ASSERT(script->baselineScript()->debugMode() == expectedDebugMode);
+            MOZ_ASSERT(layout->maybeStubPtr() == entries[entryIndex].stub);
+
+            // Patch baseline stub frames for case A above.
+            //
+            // We need to patch the stub frame return address to go to the
+            // k-fixer that is at the end of fallback stubs of all such
+            // can-call ICs. These k-fixers share code with bailout-from-Ion
+            // fixers, but in this case we are returning from VM and not
+            // Ion. See e.g., JitCompartment::baselineCallReturnFromStubAddr()
+            //
+            // Subtlety here: the debug trap handler of case C above pushes a
+            // stub frame with a null stub pointer. This handler will exist
+            // across recompiling the script, so we don't patch anything for
+            // such stub frames. We will return to that handler, which takes
+            // care of cleaning up the stub frame.
+            //
+            // Note that for stub pointers that are already on the C stack
+            // (i.e. fallback calls), we need to check for recompilation using
+            // DebugModeOSRVolatileStub.
+            if (layout->maybeStubPtr()) {
+                MOZ_ASSERT(layout->maybeStubPtr() == entries[entryIndex].stub);
+                uint32_t pcOffset = entries[entryIndex].pcOffset;
+                uint8_t *retAddr = GetStubReturnFromStubAddress(cx, script->offsetToPC(pcOffset));
+
+                // Get the fallback stub for the IC in the recompiled
+                // script. The fallback stub is guaranteed to exist.
+                ICEntry &entry = script->baselineScript()->icEntryFromPCOffset(pcOffset);
+                ICStub *newStub = entry.fallbackStub();
+                SpewPatchStubFrame(prev->returnAddress(), retAddr, layout->maybeStubPtr(), newStub);
+                prev->setReturnAddress(retAddr);
+                layout->setStubPtr(newStub);
+            }
+
+            break;
+          }
+
+          case JitFrame_IonJS:
+            // Nothing to patch.
+            entryIndex++;
+            for (InlineFrameIterator inlineIter(cx, &iter); inlineIter.more(); ++inlineIter)
+                entryIndex++;
+            break;
+
+          default:;
+        }
+
+        prev = iter.current();
+    }
+
+    *start = entryIndex;
+}
+
+static bool
+RecompileBaselineScriptForDebugMode(JSContext *cx, JSScript *script)
+{
+    BaselineScript *oldBaselineScript = script->baselineScript();
+
+    // If a script is on the stack multiple times, it may have already
+    // been recompiled.
+    bool expectedDebugMode = cx->compartment()->debugMode();
+    if (oldBaselineScript->debugMode() == expectedDebugMode)
+        return true;
+
+    IonSpew(IonSpew_BaselineDebugModeOSR, "Recompiling (%s:%d) for debug mode %s",
+            script->filename(), script->lineno(), expectedDebugMode ? "ON" : "OFF");
+
+    if (script->hasIonScript())
+        Invalidate(cx, script, /* resetUses = */ false);
+
+    script->setBaselineScript(cx, nullptr);
+
+    MethodStatus status = BaselineCompile(cx, script);
+    if (status != Method_Compiled) {
+        // We will only fail to recompile for debug mode due to OOM. Restore
+        // the old baseline script in case something doesn't properly
+        // propagate OOM.
+        MOZ_ASSERT(status == Method_Error);
+        script->setBaselineScript(cx, oldBaselineScript);
+        return false;
+    }
+
+    // Don't destroy the old baseline script yet, since if we fail any of the
+    // recompiles we need to rollback all the old baseline scripts.
+    MOZ_ASSERT(script->baselineScript()->debugMode() == expectedDebugMode);
+    return true;
+}
+
+static void
+UndoRecompileBaselineScriptsForDebugMode(JSContext *cx,
+                                         const DebugModeOSREntryVector &entries)
+{
+    // In case of failure, roll back the entire set of active scripts so that
+    // we don't have to patch return addresses on the stack.
+    for (size_t i = 0; i < entries.length(); i++) {
+        JSScript *script = entries[i].script;
+        BaselineScript *baselineScript = script->baselineScript();
+        if (baselineScript != entries[i].oldBaselineScript) {
+            script->setBaselineScript(cx, entries[i].oldBaselineScript);
+            BaselineScript::Destroy(cx->runtime()->defaultFreeOp(), baselineScript);
+        }
+    }
+}
+
+bool
+jit::RecompileOnStackBaselineScriptsForDebugMode(JSContext *cx, JSCompartment *comp)
+{
+    AutoCompartment ac(cx, comp);
+
+    // First recompile the active scripts on the stack and patch the live
+    // frames.
+    Vector<DebugModeOSREntry> entries(cx);
+
+    for (JitActivationIterator iter(cx->runtime()); !iter.done(); ++iter) {
+        if (iter.activation()->compartment() == comp) {
+            if (!CollectOnStackScripts(cx, iter, entries))
+                return false;
+        }
+    }
+
+#ifdef JSGC_GENERATIONAL
+    // Scripts can entrain nursery things. See note in js::ReleaseAllJITCode.
+    if (!entries.empty())
+        MinorGC(cx->runtime(), JS::gcreason::EVICT_NURSERY);
+#endif
+
+    // Try to recompile all the scripts. If we encounter an error, we need to
+    // roll back as if none of the compilations happened, so that we don't
+    // crash.
+    for (size_t i = 0; i < entries.length(); i++) {
+        JSScript *script = entries[i].script;
+        if (!RecompileBaselineScriptForDebugMode(cx, script)) {
+            UndoRecompileBaselineScriptsForDebugMode(cx, entries);
+            return false;
+        }
+    }
+
+    // If all recompiles succeeded, destroy the old baseline scripts and patch
+    // the live frames.
+    //
+    // After this point the function must be infallible.
+
+    for (size_t i = 0; i < entries.length(); i++)
+        BaselineScript::Destroy(cx->runtime()->defaultFreeOp(), entries[i].oldBaselineScript);
+
+    size_t processed = 0;
+    for (JitActivationIterator iter(cx->runtime()); !iter.done(); ++iter) {
+        if (iter.activation()->compartment() == comp)
+            PatchBaselineFramesForDebugMode(cx, iter, entries, &processed);
+    }
+    MOZ_ASSERT(processed == entries.length());
+
+    return true;
+}
+
+void
+BaselineDebugModeOSRInfo::popValueInto(PCMappingSlotInfo::SlotLocation loc, Value *vp)
+{
+    switch (loc) {
+      case PCMappingSlotInfo::SlotInR0:
+        valueR0 = vp[stackAdjust];
+        break;
+      case PCMappingSlotInfo::SlotInR1:
+        valueR1 = vp[stackAdjust];
+        break;
+      case PCMappingSlotInfo::SlotIgnore:
+        break;
+      default:
+        MOZ_ASSUME_UNREACHABLE("Bad slot location");
+    }
+
+    stackAdjust++;
+}
+
+static inline bool
+HasForcedReturn(BaselineDebugModeOSRInfo *info, bool rv)
+{
+    ICEntry::Kind kind = info->frameKind;
+
+    // The debug epilogue always checks its resumption value, so we don't need
+    // to check rv.
+    if (kind == ICEntry::Kind_DebugEpilogue)
+        return true;
+
+    // |rv| is the value in ReturnReg. If true, in the case of the prologue,
+    // debug trap, and debugger statement handler, it means a forced return.
+    if (kind == ICEntry::Kind_DebugPrologue ||
+        (kind == ICEntry::Kind_CallVM && JSOp(*info->pc) == JSOP_DEBUGGER))
+    {
+        return rv;
+    }
+
+    // N.B. The debug trap handler handles its own forced return, so no
+    // need to deal with it here.
+    return false;
+}
+
+static void
+SyncBaselineDebugModeOSRInfo(BaselineFrame *frame, Value *vp, bool rv)
+{
+    BaselineDebugModeOSRInfo *info = frame->debugModeOSRInfo();
+    MOZ_ASSERT(info);
+    MOZ_ASSERT(frame->script()->baselineScript()->containsCodeAddress(info->resumeAddr));
+
+    if (HasForcedReturn(info, rv)) {
+        // Load the frame's rval and overwrite the resume address to go to the
+        // epilogue.
+        MOZ_ASSERT(R0 == JSReturnOperand);
+        info->valueR0 = frame->returnValue();
+        info->resumeAddr = frame->script()->baselineScript()->epilogueEntryAddr();
+        return;
+    }
+
+    // Read stack values and make sure R0 and R1 have the right values.
+    unsigned numUnsynced = info->slotInfo.numUnsynced();
+    MOZ_ASSERT(numUnsynced <= 2);
+    if (numUnsynced > 0)
+        info->popValueInto(info->slotInfo.topSlotLocation(), vp);
+    if (numUnsynced > 1)
+        info->popValueInto(info->slotInfo.nextSlotLocation(), vp);
+
+    // Scale stackAdjust.
+    info->stackAdjust *= sizeof(Value);
+}
+
+static void
+FinishBaselineDebugModeOSR(BaselineFrame *frame)
+{
+    frame->deleteDebugModeOSRInfo();
+}
+
+void
+BaselineFrame::deleteDebugModeOSRInfo()
+{
+    js_delete(getDebugModeOSRInfo());
+    flags_ &= ~HAS_DEBUG_MODE_OSR_INFO;
+}
+
+JitCode *
+JitRuntime::getBaselineDebugModeOSRHandler(JSContext *cx)
+{
+    if (!baselineDebugModeOSRHandler_) {
+        AutoLockForExclusiveAccess lock(cx);
+        AutoCompartment ac(cx, cx->runtime()->atomsCompartment());
+        uint32_t offset;
+        if (JitCode *code = generateBaselineDebugModeOSRHandler(cx, &offset)) {
+            baselineDebugModeOSRHandler_ = code;
+            baselineDebugModeOSRHandlerNoFrameRegPopAddr_ = code->raw() + offset;
+        }
+    }
+
+    return baselineDebugModeOSRHandler_;
+}
+
+void *
+JitRuntime::getBaselineDebugModeOSRHandlerAddress(JSContext *cx, bool popFrameReg)
+{
+    if (!getBaselineDebugModeOSRHandler(cx))
+        return nullptr;
+    return (popFrameReg
+            ? baselineDebugModeOSRHandler_->raw()
+            : baselineDebugModeOSRHandlerNoFrameRegPopAddr_);
+}
+
+JitCode *
+JitRuntime::generateBaselineDebugModeOSRHandler(JSContext *cx, uint32_t *noFrameRegPopOffsetOut)
+{
+    MacroAssembler masm(cx);
+
+    GeneralRegisterSet regs(GeneralRegisterSet::All());
+    regs.take(BaselineFrameReg);
+    regs.take(ReturnReg);
+    Register temp = regs.takeAny();
+    Register syncedStackStart = regs.takeAny();
+
+    // Pop the frame reg.
+    masm.pop(BaselineFrameReg);
+
+    // Not all patched baseline frames are returning from a situation where
+    // the frame reg is already fixed up.
+    CodeOffsetLabel noFrameRegPopOffset = masm.currentOffset();
+
+    // Record the stack pointer for syncing.
+    masm.movePtr(StackPointer, syncedStackStart);
+    masm.push(BaselineFrameReg);
+
+    // Call a stub to fully initialize the info.
+    masm.setupUnalignedABICall(3, temp);
+    masm.loadBaselineFramePtr(BaselineFrameReg, temp);
+    masm.passABIArg(temp);
+    masm.passABIArg(syncedStackStart);
+    masm.passABIArg(ReturnReg);
+    masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, SyncBaselineDebugModeOSRInfo));
+
+    // Discard stack values depending on how many were unsynced, as we always
+    // have a fully synced stack in the recompile handler. See assert in
+    // DebugModeOSREntry constructor.
+    masm.pop(BaselineFrameReg);
+    masm.loadPtr(Address(BaselineFrameReg, BaselineFrame::reverseOffsetOfScratchValue()), temp);
+    masm.addPtr(Address(temp, offsetof(BaselineDebugModeOSRInfo, stackAdjust)), StackPointer);
+
+    // Save real return address on the stack temporarily.
+    masm.pushValue(Address(temp, offsetof(BaselineDebugModeOSRInfo, valueR0)));
+    masm.pushValue(Address(temp, offsetof(BaselineDebugModeOSRInfo, valueR1)));
+    masm.push(BaselineFrameReg);
+    masm.push(Address(temp, offsetof(BaselineDebugModeOSRInfo, resumeAddr)));
+
+    // Call a stub to free the allocated info.
+    masm.setupUnalignedABICall(1, temp);
+    masm.loadBaselineFramePtr(BaselineFrameReg, temp);
+    masm.passABIArg(temp);
+    masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, FinishBaselineDebugModeOSR));
+
+    // Restore saved values.
+    GeneralRegisterSet jumpRegs(GeneralRegisterSet::All());
+    jumpRegs.take(R0);
+    jumpRegs.take(R1);
+    jumpRegs.take(BaselineFrameReg);
+    Register target = jumpRegs.takeAny();
+
+    masm.pop(target);
+    masm.pop(BaselineFrameReg);
+    masm.popValue(R1);
+    masm.popValue(R0);
+
+    masm.jump(target);
+
+    Linker linker(masm);
+    JitCode *code = linker.newCode<NoGC>(cx, JSC::OTHER_CODE);
+    if (!code)
+        return nullptr;
+
+    noFrameRegPopOffset.fixup(&masm);
+    *noFrameRegPopOffsetOut = noFrameRegPopOffset.offset();
+
+#ifdef JS_ION_PERF
+    writePerfSpewerJitCodeProfile(code, "BaselineDebugModeOSRHandler");
+#endif
+
+    return code;
+}
new file mode 100644
--- /dev/null
+++ b/js/src/jit/BaselineDebugModeOSR.h
@@ -0,0 +1,106 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sts=4 et sw=4 tw=99:
+ * This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef jit_BaselineDebugModeOSR_h
+#define jit_BaselineDebugModeOSR_h
+
+#ifdef JS_ION
+
+#include "jit/BaselineFrame.h"
+#include "jit/BaselineIC.h"
+#include "jit/BaselineJIT.h"
+
+namespace js {
+namespace jit {
+
+// Note that this file and the corresponding .cpp implement debug mode
+// on-stack recompilation. This is to be distinguished from ordinary
+// Baseline->Ion OSR, which is used to jump into compiled loops.
+
+//
+// A volatile location due to recompilation of an on-stack baseline script
+// (e.g., for debug mode toggling).
+//
+// It is usually used in fallback stubs which may trigger on-stack
+// recompilation by calling out into the VM. Example use:
+//
+//     DebugModeOSRVolatileStub<FallbackStubT *> stub(frame, stub_)
+//
+//     // Call out to the VM
+//     // Other effectful operations like TypeScript::Monitor
+//
+//     if (stub.invalid())
+//         return true;
+//
+//     // First use of stub after VM call.
+//
+template <typename T>
+class DebugModeOSRVolatileStub
+{
+    T stub_;
+    BaselineFrame *frame_;
+    uint32_t pcOffset_;
+
+  public:
+    DebugModeOSRVolatileStub(BaselineFrame *frame, ICFallbackStub *stub)
+      : stub_(static_cast<T>(stub)),
+        frame_(frame),
+        pcOffset_(stub->icEntry()->pcOffset())
+    { }
+
+    bool invalid() const {
+        ICEntry &entry = frame_->script()->baselineScript()->icEntryFromPCOffset(pcOffset_);
+        return stub_ != entry.fallbackStub();
+    }
+
+    operator const T&() const { MOZ_ASSERT(!invalid()); return stub_; }
+    T operator->() const { MOZ_ASSERT(!invalid()); return stub_; }
+    T *address() { MOZ_ASSERT(!invalid()); return &stub_; }
+    const T *address() const { MOZ_ASSERT(!invalid()); return &stub_; }
+    T &get() { MOZ_ASSERT(!invalid()); return stub_; }
+    const T &get() const { MOZ_ASSERT(!invalid()); return stub_; }
+
+    bool operator!=(const T &other) const { MOZ_ASSERT(!invalid()); return stub_ != other; }
+    bool operator==(const T &other) const { MOZ_ASSERT(!invalid()); return stub_ == other; }
+};
+
+//
+// Auxiliary info to help the DebugModeOSRHandler fix up state.
+//
+struct BaselineDebugModeOSRInfo
+{
+    uint8_t *resumeAddr;
+    jsbytecode *pc;
+    PCMappingSlotInfo slotInfo;
+    ICEntry::Kind frameKind;
+
+    // Filled in by SyncBaselineDebugModeOSRInfo.
+    uintptr_t stackAdjust;
+    Value valueR0;
+    Value valueR1;
+
+    BaselineDebugModeOSRInfo(jsbytecode *pc, ICEntry::Kind kind)
+      : resumeAddr(nullptr),
+        pc(pc),
+        slotInfo(0),
+        frameKind(kind),
+        stackAdjust(0),
+        valueR0(UndefinedValue()),
+        valueR1(UndefinedValue())
+    { }
+
+    void popValueInto(PCMappingSlotInfo::SlotLocation loc, Value *vp);
+};
+
+bool
+RecompileOnStackBaselineScriptsForDebugMode(JSContext *cx, JSCompartment *comp);
+
+} // namespace jit
+} // namespace js
+
+#endif // JS_ION
+
+#endif // jit_BaselineDebugModeOSR_h
--- a/js/src/jit/BaselineFrame.h
+++ b/js/src/jit/BaselineFrame.h
@@ -10,16 +10,18 @@
 #ifdef JS_ION
 
 #include "jit/IonFrames.h"
 #include "vm/Stack.h"
 
 namespace js {
 namespace jit {
 
+struct BaselineDebugModeOSRInfo;
+
 // The stack looks like this, fp is the frame pointer:
 //
 // fp+y   arguments
 // fp+x   IonJSFrameLayout (frame header)
 // fp  => saved frame pointer
 // fp-x   BaselineFrame
 //        locals
 //        stack values
@@ -52,17 +54,21 @@ class BaselineFrame
 
         // Frame has hookData_ set.
         HAS_HOOK_DATA    = 1 << 7,
 
         // Frame has profiler entry pushed.
         HAS_PUSHED_SPS_FRAME = 1 << 8,
 
         // Frame has over-recursed on an early check.
-        OVER_RECURSED    = 1 << 9
+        OVER_RECURSED    = 1 << 9,
+
+        // Frame has a BaselineRecompileInfo stashed in the scratch value
+        // slot. See PatchBaselineFramesForDebugMOde.
+        HAS_DEBUG_MODE_OSR_INFO = 1 << 10
     };
 
   protected: // Silence Clang warning about unused private fields.
     // We need to split the Value into 2 fields of 32 bits, otherwise the C++
     // compiler may add some padding between the fields.
     uint32_t loScratchValue_;
     uint32_t hiScratchValue_;
     uint32_t loReturnValue_;        // If HAS_RVAL, the frame's return value.
@@ -291,16 +297,34 @@ class BaselineFrame
     bool overRecursed() const {
         return flags_ & OVER_RECURSED;
     }
 
     void setOverRecursed() {
         flags_ |= OVER_RECURSED;
     }
 
+    BaselineDebugModeOSRInfo *debugModeOSRInfo() {
+        MOZ_ASSERT(flags_ & HAS_DEBUG_MODE_OSR_INFO);
+        return *reinterpret_cast<BaselineDebugModeOSRInfo **>(&loScratchValue_);
+    }
+
+    BaselineDebugModeOSRInfo *getDebugModeOSRInfo() {
+        if (flags_ & HAS_DEBUG_MODE_OSR_INFO)
+            return debugModeOSRInfo();
+        return nullptr;
+    }
+
+    void setDebugModeOSRInfo(BaselineDebugModeOSRInfo *info) {
+        flags_ |= HAS_DEBUG_MODE_OSR_INFO;
+        *reinterpret_cast<BaselineDebugModeOSRInfo **>(&loScratchValue_) = info;
+    }
+
+    void deleteDebugModeOSRInfo();
+
     void trace(JSTracer *trc, JitFrameIterator &frame);
 
     bool isFunctionFrame() const {
         return CalleeTokenIsFunction(calleeToken());
     }
     bool isGlobalFrame() const {
         return !CalleeTokenIsFunction(calleeToken());
     }
--- a/js/src/jit/BaselineFrameInfo.h
+++ b/js/src/jit/BaselineFrameInfo.h
@@ -157,25 +157,25 @@ class StackValue
         knownType_ = JSVAL_TYPE_UNKNOWN;
     }
 };
 
 enum StackAdjustment { AdjustStack, DontAdjustStack };
 
 class FrameInfo
 {
-    RootedScript script;
+    JSScript *script;
     MacroAssembler &masm;
 
     FixedList<StackValue> stack;
     size_t spIndex;
 
   public:
-    FrameInfo(JSContext *cx, HandleScript script, MacroAssembler &masm)
-      : script(cx, script),
+    FrameInfo(JSScript *script, MacroAssembler &masm)
+      : script(script),
         masm(masm),
         stack(),
         spIndex(0)
     { }
 
     bool init(TempAllocator &alloc);
 
     uint32_t nlocals() const {
--- a/js/src/jit/BaselineIC.cpp
+++ b/js/src/jit/BaselineIC.cpp
@@ -8,16 +8,17 @@
 
 #include "mozilla/DebugOnly.h"
 #include "mozilla/TemplateLib.h"
 
 #include "jslibmath.h"
 #include "jstypes.h"
 
 #include "builtin/Eval.h"
+#include "jit/BaselineDebugModeOSR.h"
 #include "jit/BaselineHelpers.h"
 #include "jit/BaselineJIT.h"
 #include "jit/IonLinker.h"
 #include "jit/IonSpewer.h"
 #include "jit/Lowering.h"
 #ifdef JS_ION_PERF
 # include "jit/PerfSpewer.h"
 #endif
@@ -665,16 +666,30 @@ ICStubCompiler::enterStubFrame(MacroAsse
 void
 ICStubCompiler::leaveStubFrame(MacroAssembler &masm, bool calledIntoIon)
 {
     JS_ASSERT(entersStubFrame_);
     EmitLeaveStubFrame(masm, calledIntoIon);
 }
 
 void
+ICStubCompiler::leaveStubFrameHead(MacroAssembler &masm, bool calledIntoIon)
+{
+    JS_ASSERT(entersStubFrame_);
+    EmitLeaveStubFrameHead(masm, calledIntoIon);
+}
+
+void
+ICStubCompiler::leaveStubFrameCommonTail(MacroAssembler &masm)
+{
+    JS_ASSERT(entersStubFrame_);
+    EmitLeaveStubFrameCommonTail(masm);
+}
+
+void
 ICStubCompiler::guardProfilingEnabled(MacroAssembler &masm, Register scratch, Label *skip)
 {
     // This should only be called from the following stubs.
     JS_ASSERT(kind == ICStub::Call_Scripted                             ||
               kind == ICStub::Call_AnyScripted                          ||
               kind == ICStub::Call_Native                               ||
               kind == ICStub::Call_ScriptedApplyArray                   ||
               kind == ICStub::Call_ScriptedApplyArguments               ||
@@ -1752,19 +1767,22 @@ ICNewObject_Fallback::Compiler::generate
     return tailCallVM(DoNewObjectInfo, masm);
 }
 
 //
 // Compare_Fallback
 //
 
 static bool
-DoCompareFallback(JSContext *cx, BaselineFrame *frame, ICCompare_Fallback *stub, HandleValue lhs,
+DoCompareFallback(JSContext *cx, BaselineFrame *frame, ICCompare_Fallback *stub_, HandleValue lhs,
                   HandleValue rhs, MutableHandleValue ret)
 {
+    // This fallback stub may trigger debug mode toggling.
+    DebugModeOSRVolatileStub<ICCompare_Fallback *> stub(frame, stub_);
+
     jsbytecode *pc = stub->icEntry()->pc(frame->script());
     JSOp op = JSOp(*pc);
 
     FallbackICSpew(cx, stub, "Compare(%s)", js_CodeName[op]);
 
     // Case operations in a CONDSWITCH are performing strict equality.
     if (op == JSOP_CASE)
         op = JSOP_STRICTEQ;
@@ -1811,16 +1829,20 @@ DoCompareFallback(JSContext *cx, Baselin
         break;
       default:
         JS_ASSERT(!"Unhandled baseline compare op");
         return false;
     }
 
     ret.setBoolean(out);
 
+    // Check if debug mode toggling made the stub invalid.
+    if (stub.invalid())
+        return true;
+
     // Check to see if a new stub should be generated.
     if (stub->numOptimizedStubs() >= ICCompare_Fallback::MAX_OPTIMIZED_STUBS) {
         // TODO: Discard all stubs in this IC and replace with inert megamorphic stub.
         // But for now we just bail.
         return true;
     }
 
     JSScript *script = frame->script();
@@ -2484,19 +2506,22 @@ ICToNumber_Fallback::Compiler::generateS
 // BinaryArith_Fallback
 //
 
 // Disable PGO (see bug 851490).
 #if defined(_MSC_VER)
 # pragma optimize("g", off)
 #endif
 static bool
-DoBinaryArithFallback(JSContext *cx, BaselineFrame *frame, ICBinaryArith_Fallback *stub,
+DoBinaryArithFallback(JSContext *cx, BaselineFrame *frame, ICBinaryArith_Fallback *stub_,
                       HandleValue lhs, HandleValue rhs, MutableHandleValue ret)
 {
+    // This fallback stub may trigger debug mode toggling.
+    DebugModeOSRVolatileStub<ICBinaryArith_Fallback *> stub(frame, stub_);
+
     RootedScript script(cx, frame->script());
     jsbytecode *pc = stub->icEntry()->pc(script);
     JSOp op = JSOp(*pc);
     FallbackICSpew(cx, stub, "BinaryArith(%s,%d,%d)", js_CodeName[op],
             int(lhs.isDouble() ? JSVAL_TYPE_DOUBLE : lhs.extractNonDoubleType()),
             int(rhs.isDouble() ? JSVAL_TYPE_DOUBLE : rhs.extractNonDoubleType()));
 
     // Don't pass lhs/rhs directly, we need the original values when
@@ -2566,16 +2591,20 @@ DoBinaryArithFallback(JSContext *cx, Bas
         if (!UrshOperation(cx, lhs, rhs, ret))
             return false;
         break;
       }
       default:
         MOZ_ASSUME_UNREACHABLE("Unhandled baseline arith op");
     }
 
+    // Check if debug mode toggling made the stub invalid.
+    if (stub.invalid())
+        return true;
+
     if (ret.isDouble())
         stub->setSawDoubleResult();
 
     // Check to see if a new stub should be generated.
     if (stub->numOptimizedStubs() >= ICBinaryArith_Fallback::MAX_OPTIMIZED_STUBS) {
         stub->noteUnoptimizableOperands();
         return true;
     }
@@ -3043,19 +3072,22 @@ ICBinaryArith_DoubleWithInt32::Compiler:
 // UnaryArith_Fallback
 //
 
 // Disable PGO (see bug 851490).
 #if defined(_MSC_VER)
 # pragma optimize("g", off)
 #endif
 static bool
-DoUnaryArithFallback(JSContext *cx, BaselineFrame *frame, ICUnaryArith_Fallback *stub,
+DoUnaryArithFallback(JSContext *cx, BaselineFrame *frame, ICUnaryArith_Fallback *stub_,
                      HandleValue val, MutableHandleValue res)
 {
+    // This fallback stub may trigger debug mode toggling.
+    DebugModeOSRVolatileStub<ICUnaryArith_Fallback *> stub(frame, stub_);
+
     RootedScript script(cx, frame->script());
     jsbytecode *pc = stub->icEntry()->pc(script);
     JSOp op = JSOp(*pc);
     FallbackICSpew(cx, stub, "UnaryArith(%s)", js_CodeName[op]);
 
     switch (op) {
       case JSOP_BITNOT: {
         int32_t result;
@@ -3067,16 +3099,20 @@ DoUnaryArithFallback(JSContext *cx, Base
       case JSOP_NEG:
         if (!NegOperation(cx, script, pc, val, res))
             return false;
         break;
       default:
         MOZ_ASSUME_UNREACHABLE("Unexpected op");
     }
 
+    // Check if debug mode toggling made the stub invalid.
+    if (stub.invalid())
+        return true;
+
     if (res.isDouble())
         stub->setSawDoubleResult();
 
     if (stub->numOptimizedStubs() >= ICUnaryArith_Fallback::MAX_OPTIMIZED_STUBS) {
         // TODO: Discard/replace stubs.
         return true;
     }
 
@@ -3972,19 +4008,23 @@ TryAttachGetElemStub(JSContext *cx, JSSc
     // determine that an object has no properties on such indexes.
     if (rhs.isNumber() && rhs.toNumber() < 0)
         stub->noteNegativeIndex();
 
     return true;
 }
 
 static bool
-DoGetElemFallback(JSContext *cx, BaselineFrame *frame, ICGetElem_Fallback *stub, HandleValue lhs,
+DoGetElemFallback(JSContext *cx, BaselineFrame *frame, ICGetElem_Fallback *stub_, HandleValue lhs,
                   HandleValue rhs, MutableHandleValue res)
 {
+    // This fallback stub may trigger debug mode toggling.
+    DebugModeOSRVolatileStub<ICGetElem_Fallback *> stub(frame, stub_);
+
+    RootedScript script(cx, frame->script());
     jsbytecode *pc = stub->icEntry()->pc(frame->script());
     JSOp op = JSOp(*pc);
     FallbackICSpew(cx, stub, "GetElem(%s)", js_CodeName[op]);
 
     JS_ASSERT(op == JSOP_GETELEM || op == JSOP_CALLELEM);
 
     // Don't pass lhs directly, we need it when generating stubs.
     RootedValue lhsCopy(cx, lhs);
@@ -3999,16 +4039,20 @@ DoGetElemFallback(JSContext *cx, Baselin
     }
 
     if (!isOptimizedArgs) {
         if (!GetElementOperation(cx, op, &lhsCopy, rhs, res))
             return false;
         types::TypeScript::Monitor(cx, frame->script(), pc, res);
     }
 
+    // Check if debug mode toggling made the stub invalid.
+    if (stub.invalid())
+        return true;
+
     // Add a type monitor stub for the resulting value.
     if (!stub->addMonitorStubForValue(cx, frame->script(), res))
         return false;
 
     if (stub->numOptimizedStubs() >= ICGetElem_Fallback::MAX_OPTIMIZED_STUBS) {
         // TODO: Discard all stubs in this IC and replace with inert megamorphic stub.
         // But for now we just bail.
         return true;
@@ -4940,19 +4984,22 @@ CanOptimizeDenseSetElem(JSContext *cx, H
         return false;
 
     *isAddingCaseOut = true;
 
     return true;
 }
 
 static bool
-DoSetElemFallback(JSContext *cx, BaselineFrame *frame, ICSetElem_Fallback *stub, Value *stack,
+DoSetElemFallback(JSContext *cx, BaselineFrame *frame, ICSetElem_Fallback *stub_, Value *stack,
                   HandleValue objv, HandleValue index, HandleValue rhs)
 {
+    // This fallback stub may trigger debug mode toggling.
+    DebugModeOSRVolatileStub<ICSetElem_Fallback *> stub(frame, stub_);
+
     RootedScript script(cx, frame->script());
     jsbytecode *pc = stub->icEntry()->pc(script);
     JSOp op = JSOp(*pc);
     FallbackICSpew(cx, stub, "SetElem(%s)", js_CodeName[JSOp(*pc)]);
 
     JS_ASSERT(op == JSOP_SETELEM ||
               op == JSOP_INITELEM ||
               op == JSOP_INITELEM_ARRAY);
@@ -4982,16 +5029,20 @@ DoSetElemFallback(JSContext *cx, Baselin
         if (!SetObjectElement(cx, obj, index, rhs, script->strict(), script, pc))
             return false;
     }
 
     // Overwrite the object on the stack (pushed for the decompiler) with the rhs.
     JS_ASSERT(stack[2] == objv);
     stack[2] = rhs;
 
+    // Check if debug mode toggling made the stub invalid.
+    if (stub.invalid())
+        return true;
+
     if (stub->numOptimizedStubs() >= ICSetElem_Fallback::MAX_OPTIMIZED_STUBS) {
         // TODO: Discard all stubs in this IC and replace with inert megamorphic stub.
         // But for now we just bail.
         return true;
     }
 
     // Try to generate new stubs.
     if (obj->isNative() &&
@@ -5752,19 +5803,22 @@ TryAttachScopeNameStub(JSContext *cx, Ha
     if (!newStub)
         return false;
 
     stub->addNewStub(newStub);
     return true;
 }
 
 static bool
-DoGetNameFallback(JSContext *cx, BaselineFrame *frame, ICGetName_Fallback *stub,
+DoGetNameFallback(JSContext *cx, BaselineFrame *frame, ICGetName_Fallback *stub_,
                   HandleObject scopeChain, MutableHandleValue res)
 {
+    // This fallback stub may trigger debug mode toggling.
+    DebugModeOSRVolatileStub<ICGetName_Fallback *> stub(frame, stub_);
+
     RootedScript script(cx, frame->script());
     jsbytecode *pc = stub->icEntry()->pc(script);
     mozilla::DebugOnly<JSOp> op = JSOp(*pc);
     FallbackICSpew(cx, stub, "GetName(%s)", js_CodeName[JSOp(*pc)]);
 
     JS_ASSERT(op == JSOP_NAME || op == JSOP_GETGNAME);
 
     RootedPropertyName name(cx, script->getName(pc));
@@ -5774,16 +5828,20 @@ DoGetNameFallback(JSContext *cx, Baselin
             return false;
     } else {
         if (!GetScopeName(cx, scopeChain, name, res))
             return false;
     }
 
     types::TypeScript::Monitor(cx, script, pc, res);
 
+    // Check if debug mode toggling made the stub invalid.
+    if (stub.invalid())
+        return true;
+
     // Add a type monitor stub for the resulting value.
     if (!stub->addMonitorStubForValue(cx, script, res))
         return false;
 
     // Attach new stub.
     if (stub->numOptimizedStubs() >= ICGetName_Fallback::MAX_OPTIMIZED_STUBS) {
         // TODO: Discard all stubs in this IC and replace with generic stub.
         return true;
@@ -5929,35 +5987,42 @@ ICBindName_Fallback::Compiler::generateS
     return tailCallVM(DoBindNameFallbackInfo, masm);
 }
 
 //
 // GetIntrinsic_Fallback
 //
 
 static bool
-DoGetIntrinsicFallback(JSContext *cx, BaselineFrame *frame, ICGetIntrinsic_Fallback *stub,
+DoGetIntrinsicFallback(JSContext *cx, BaselineFrame *frame, ICGetIntrinsic_Fallback *stub_,
                        MutableHandleValue res)
 {
+    // This fallback stub may trigger debug mode toggling.
+    DebugModeOSRVolatileStub<ICGetIntrinsic_Fallback *> stub(frame, stub_);
+
     RootedScript script(cx, frame->script());
     jsbytecode *pc = stub->icEntry()->pc(script);
     mozilla::DebugOnly<JSOp> op = JSOp(*pc);
     FallbackICSpew(cx, stub, "GetIntrinsic(%s)", js_CodeName[JSOp(*pc)]);
 
     JS_ASSERT(op == JSOP_GETINTRINSIC);
 
     if (!GetIntrinsicOperation(cx, pc, res))
         return false;
 
     // An intrinsic operation will always produce the same result, so only
     // needs to be monitored once. Attach a stub to load the resulting constant
     // directly.
 
     types::TypeScript::Monitor(cx, script, pc, res);
 
+    // Check if debug mode toggling made the stub invalid.
+    if (stub.invalid())
+        return true;
+
     IonSpew(IonSpew_BaselineIC, "  Generating GetIntrinsic optimized stub");
     ICGetIntrinsic_Constant::Compiler compiler(cx, res);
     ICStub *newStub = compiler.getStub(compiler.getStubSpace(script));
     if (!newStub)
         return false;
 
     stub->addNewStub(newStub);
     return true;
@@ -6309,19 +6374,22 @@ TryAttachPrimitiveGetPropStub(JSContext 
         return false;
 
     stub->addNewStub(newStub);
     *attached = true;
     return true;
 }
 
 static bool
-DoGetPropFallback(JSContext *cx, BaselineFrame *frame, ICGetProp_Fallback *stub,
+DoGetPropFallback(JSContext *cx, BaselineFrame *frame, ICGetProp_Fallback *stub_,
                   MutableHandleValue val, MutableHandleValue res)
 {
+    // This fallback stub may trigger debug mode toggling.
+    DebugModeOSRVolatileStub<ICGetProp_Fallback *> stub(frame, stub_);
+
     jsbytecode *pc = stub->icEntry()->pc(frame->script());
     JSOp op = JSOp(*pc);
     FallbackICSpew(cx, stub, "GetProp(%s)", js_CodeName[op]);
 
     JS_ASSERT(op == JSOP_GETPROP || op == JSOP_CALLPROP || op == JSOP_LENGTH || op == JSOP_GETXPROP);
 
     RootedPropertyName name(cx, frame->script()->getName(pc));
 
@@ -6357,16 +6425,20 @@ DoGetPropFallback(JSContext *cx, Baselin
     if (op == JSOP_CALLPROP && MOZ_UNLIKELY(res.isUndefined()) && val.isObject()) {
         if (!OnUnknownMethod(cx, obj, IdToValue(id), res))
             return false;
     }
 #endif
 
     types::TypeScript::Monitor(cx, frame->script(), pc, res);
 
+    // Check if debug mode toggling made the stub invalid.
+    if (stub.invalid())
+        return true;
+
     // Add a type monitor stub for the resulting value.
     if (!stub->addMonitorStubForValue(cx, frame->script(), res))
         return false;
 
     if (stub->numOptimizedStubs() >= ICGetProp_Fallback::MAX_OPTIMIZED_STUBS) {
         // TODO: Discard all stubs in this IC and replace with generic getprop stub.
         return true;
     }
@@ -6418,44 +6490,61 @@ ICGetProp_Fallback::Compiler::generateSt
     // Push arguments.
     masm.pushValue(R0);
     masm.push(BaselineStubReg);
     masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
 
     if (!tailCallVM(DoGetPropFallbackInfo, masm))
         return false;
 
-    // What follows is bailout-only code for inlined scripted getters
-    // The return address pointed to by the baseline stack points here.
-    returnOffset_ = masm.currentOffset();
-
+    // What follows is bailout for inlined scripted getters or for on-stack
+    // debug mode recompile. The return address pointed to by the baseline
+    // stack points here.
+    //
     // Even though the fallback frame doesn't enter a stub frame, the CallScripted
     // frame that we are emulating does. Again, we lie.
 #ifdef DEBUG
     entersStubFrame_ = true;
 #endif
 
-    leaveStubFrame(masm, true);
+    Label leaveStubCommon;
+
+    returnFromStubOffset_ = masm.currentOffset();
+    leaveStubFrameHead(masm, false);
+    masm.jump(&leaveStubCommon);
+
+    returnFromIonOffset_ = masm.currentOffset();
+    leaveStubFrameHead(masm, true);
+
+    masm.bind(&leaveStubCommon);
+    leaveStubFrameCommonTail(masm);
 
     // When we get here, BaselineStubReg contains the ICGetProp_Fallback stub,
     // which we can't use to enter the TypeMonitor IC, because it's a MonitoredFallbackStub
     // instead of a MonitoredStub. So, we cheat.
     masm.loadPtr(Address(BaselineStubReg, ICMonitoredFallbackStub::offsetOfFallbackMonitorStub()),
                  BaselineStubReg);
     EmitEnterTypeMonitorIC(masm, ICTypeMonitor_Fallback::offsetOfFirstMonitorStub());
 
     return true;
 }
 
 bool
 ICGetProp_Fallback::Compiler::postGenerateStubCode(MacroAssembler &masm, Handle<JitCode *> code)
 {
-    CodeOffsetLabel offset(returnOffset_);
-    offset.fixup(&masm);
-    cx->compartment()->jitCompartment()->initBaselineGetPropReturnAddr(code->raw() + offset.offset());
+    JitCompartment *comp = cx->compartment()->jitCompartment();
+
+    CodeOffsetLabel fromIon(returnFromIonOffset_);
+    fromIon.fixup(&masm);
+    comp->initBaselineGetPropReturnFromIonAddr(code->raw() + fromIon.offset());
+
+    CodeOffsetLabel fromVM(returnFromStubOffset_);
+    fromVM.fixup(&masm);
+    comp->initBaselineGetPropReturnFromStubAddr(code->raw() + fromVM.offset());
+
     return true;
 }
 
 bool
 ICGetProp_ArrayLength::Compiler::generateStubCode(MacroAssembler &masm)
 {
     Label failure;
     masm.branchTestObject(Assembler::NotEqual, R0, &failure);
@@ -7243,19 +7332,22 @@ TryAttachSetPropStub(JSContext *cx, Hand
         *attached = true;
         return true;
     }
 
     return true;
 }
 
 static bool
-DoSetPropFallback(JSContext *cx, BaselineFrame *frame, ICSetProp_Fallback *stub, HandleValue lhs,
-                  HandleValue rhs, MutableHandleValue res)
-{
+DoSetPropFallback(JSContext *cx, BaselineFrame *frame, ICSetProp_Fallback *stub_,
+                  HandleValue lhs, HandleValue rhs, MutableHandleValue res)
+{
+    // This fallback stub may trigger debug mode toggling.
+    DebugModeOSRVolatileStub<ICSetProp_Fallback *> stub(frame, stub_);
+
     RootedScript script(cx, frame->script());
     jsbytecode *pc = stub->icEntry()->pc(script);
     JSOp op = JSOp(*pc);
     FallbackICSpew(cx, stub, "SetProp(%s)", js_CodeName[op]);
 
     JS_ASSERT(op == JSOP_SETPROP ||
               op == JSOP_SETNAME ||
               op == JSOP_SETGNAME ||
@@ -7294,16 +7386,20 @@ DoSetPropFallback(JSContext *cx, Baselin
             if (!js::SetProperty<false>(cx, obj, id, rhs))
                 return false;
         }
     }
 
     // Leave the RHS on the stack.
     res.set(rhs);
 
+    // Check if debug mode toggling made the stub invalid.
+    if (stub.invalid())
+        return true;
+
     if (stub->numOptimizedStubs() >= ICSetProp_Fallback::MAX_OPTIMIZED_STUBS) {
         // TODO: Discard all stubs in this IC and replace with generic setprop stub.
         return true;
     }
 
     bool attached = false;
     if (!TryAttachSetPropStub(cx, script, pc, stub, obj, oldShape, oldSlots, name, id, rhs,
          &attached))
@@ -7339,41 +7435,58 @@ ICSetProp_Fallback::Compiler::generateSt
     masm.pushValue(R1);
     masm.pushValue(R0);
     masm.push(BaselineStubReg);
     masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg());
 
     if (!tailCallVM(DoSetPropFallbackInfo, masm))
         return false;
 
-    // What follows is bailout-only code for inlined scripted getters
-    // The return address pointed to by the baseline stack points here.
-    returnOffset_ = masm.currentOffset();
-
+    // What follows is bailout debug mode recompile code for inlined scripted
+    // getters The return address pointed to by the baseline stack points
+    // here.
+    //
     // Even though the fallback frame doesn't enter a stub frame, the CallScripted
     // frame that we are emulating does. Again, we lie.
 #ifdef DEBUG
     entersStubFrame_ = true;
 #endif
 
-    leaveStubFrame(masm, true);
+    Label leaveStubCommon;
+
+    returnFromStubOffset_ = masm.currentOffset();
+    leaveStubFrameHead(masm, false);
+    masm.jump(&leaveStubCommon);
+
+    returnFromIonOffset_ = masm.currentOffset();
+    leaveStubFrameHead(masm, true);
+
+    masm.bind(&leaveStubCommon);
+    leaveStubFrameCommonTail(masm);
 
     // Retrieve the stashed initial argument from the caller's frame before returning
     EmitUnstowICValues(masm, 1);
     EmitReturnFromIC(masm);
 
     return true;
 }
 
 bool
 ICSetProp_Fallback::Compiler::postGenerateStubCode(MacroAssembler &masm, Handle<JitCode *> code)
 {
-    CodeOffsetLabel offset(returnOffset_);
-    offset.fixup(&masm);
-    cx->compartment()->jitCompartment()->initBaselineSetPropReturnAddr(code->raw() + offset.offset());
+    JitCompartment *comp = cx->compartment()->jitCompartment();
+
+    CodeOffsetLabel fromIon(returnFromIonOffset_);
+    fromIon.fixup(&masm);
+    comp->initBaselineSetPropReturnFromIonAddr(code->raw() + fromIon.offset());
+
+    CodeOffsetLabel fromVM(returnFromStubOffset_);
+    fromVM.fixup(&masm);
+    comp->initBaselineSetPropReturnFromStubAddr(code->raw() + fromVM.offset());
+
     return true;
 }
 
 bool
 ICSetProp_Native::Compiler::generateStubCode(MacroAssembler &masm)
 {
     Label failure;
 
@@ -8079,19 +8192,22 @@ MaybeCloneFunctionAtCallsite(JSContext *
     if (!fun)
         return false;
 
     callee.setObject(*fun);
     return true;
 }
 
 static bool
-DoCallFallback(JSContext *cx, BaselineFrame *frame, ICCall_Fallback *stub, uint32_t argc,
+DoCallFallback(JSContext *cx, BaselineFrame *frame, ICCall_Fallback *stub_, uint32_t argc,
                Value *vp, MutableHandleValue res)
 {
+    // This fallback stub may trigger debug mode toggling.
+    DebugModeOSRVolatileStub<ICCall_Fallback *> stub(frame, stub_);
+
     // Ensure vp array is rooted - we may GC in here.
     AutoArrayRooter vpRoot(cx, argc + 2, vp);
 
     RootedScript script(cx, frame->script());
     jsbytecode *pc = stub->icEntry()->pc(script);
     JSOp op = JSOp(*pc);
     FallbackICSpew(cx, stub, "Call(%s)", js_CodeName[op]);
 
@@ -8133,16 +8249,20 @@ DoCallFallback(JSContext *cx, BaselineFr
     } else {
         JS_ASSERT(op == JSOP_CALL || op == JSOP_FUNCALL || op == JSOP_FUNAPPLY || op == JSOP_EVAL);
         if (!Invoke(cx, thisv, callee, argc, args, res))
             return false;
     }
 
     types::TypeScript::Monitor(cx, script, pc, res);
 
+    // Check if debug mode toggling made the stub invalid.
+    if (stub.invalid())
+        return true;
+
     // Attach a new TypeMonitor stub for this value.
     ICTypeMonitor_Fallback *typeMonFbStub = stub->fallbackMonitorStub();
     if (!typeMonFbStub->addMonitorStubForValue(cx, script, res))
         return false;
     // Add a type monitor stub for the resulting value.
     if (!stub->addMonitorStubForValue(cx, script, res))
         return false;
 
@@ -8377,27 +8497,45 @@ ICCall_Fallback::Compiler::generateStubC
     masm.pushBaselineFramePtr(R0.scratchReg(), R0.scratchReg());
 
     if (!callVM(DoCallFallbackInfo, masm))
         return false;
 
     leaveStubFrame(masm);
     EmitReturnFromIC(masm);
 
-    // The following asmcode is only used when an Ion inlined frame bails out into
-    // baseline jitcode.  The return address pushed onto the reconstructed baseline stack
-    // points here.
-    returnOffset_ = masm.currentOffset();
+    // The following asmcode is only used either when an Ion inlined frame
+    // bails out into baseline jitcode or we need to do on-stack script
+    // replacement for debug mode recompile.
+    Label leaveStubCommon;
+    returnFromStubOffset_ = masm.currentOffset();
 
     // Load passed-in ThisV into R1 just in case it's needed.  Need to do this before
     // we leave the stub frame since that info will be lost.
     // Current stack:  [...., ThisV, ActualArgc, CalleeToken, Descriptor ]
     masm.loadValue(Address(BaselineStackReg, 3 * sizeof(size_t)), R1);
 
-    leaveStubFrame(masm, true);
+    // Emit the coming-from-VM specific part of the stub-leaving code.
+    leaveStubFrameHead(masm, /* calledIntoIon = */ false);
+
+    // Jump to the common leave stub tail.
+    masm.jump(&leaveStubCommon);
+
+    // For Ion bailouts, the return address pushed onto the reconstructed
+    // baseline stack points here.
+    returnFromIonOffset_ = masm.currentOffset();
+
+    masm.loadValue(Address(BaselineStackReg, 3 * sizeof(size_t)), R1);
+
+    // Emit the coming-from-Ion specific part of the stub-leaving code.
+    leaveStubFrameHead(masm, /* calledIntoIon = */ true);
+
+    // Emit the common stub-leaving tail.
+    masm.bind(&leaveStubCommon);
+    leaveStubFrameCommonTail(masm);
 
     // R1 and R0 are taken.
     regs = availableGeneralRegs(2);
     Register scratch = regs.takeAny();
 
     // If this is a |constructing| call, if the callee returns a non-object, we replace it with
     // the |this| object passed in.
     JS_ASSERT(JSReturnOperand == R0);
@@ -8422,19 +8560,26 @@ ICCall_Fallback::Compiler::generateStubC
     EmitEnterTypeMonitorIC(masm, ICTypeMonitor_Fallback::offsetOfFirstMonitorStub());
 
     return true;
 }
 
 bool
 ICCall_Fallback::Compiler::postGenerateStubCode(MacroAssembler &masm, Handle<JitCode *> code)
 {
-    CodeOffsetLabel offset(returnOffset_);
-    offset.fixup(&masm);
-    cx->compartment()->jitCompartment()->initBaselineCallReturnAddr(code->raw() + offset.offset());
+    JitCompartment *comp = cx->compartment()->jitCompartment();
+
+    CodeOffsetLabel fromIon(returnFromIonOffset_);
+    fromIon.fixup(&masm);
+    comp->initBaselineCallReturnFromIonAddr(code->raw() + fromIon.offset());
+
+    CodeOffsetLabel fromVM(returnFromStubOffset_);
+    fromVM.fixup(&masm);
+    comp->initBaselineCallReturnFromStubAddr(code->raw() + fromVM.offset());
+
     return true;
 }
 
 typedef bool (*CreateThisFn)(JSContext *cx, HandleObject callee, MutableHandleValue rval);
 static const VMFunction CreateThisInfoBaseline = FunctionInfo<CreateThisFn>(CreateThis);
 
 bool
 ICCallScriptedCompiler::generateStubCode(MacroAssembler &masm)
@@ -9197,17 +9342,17 @@ ICTableSwitch::Compiler::getStub(ICStubS
             table[i] = defaultpc;
         pc += JUMP_OFFSET_LEN;
     }
 
     return ICTableSwitch::New(space, code, table, low, length, defaultpc);
 }
 
 void
-ICTableSwitch::fixupJumpTable(HandleScript script, BaselineScript *baseline)
+ICTableSwitch::fixupJumpTable(JSScript *script, BaselineScript *baseline)
 {
     defaultTarget_ = baseline->nativeCodeForPC(script, (jsbytecode *) defaultTarget_);
 
     for (int32_t i = 0; i < length_; i++)
         table_[i] = baseline->nativeCodeForPC(script, (jsbytecode *) table_[i]);
 }
 
 //
@@ -9246,26 +9391,33 @@ ICIteratorNew_Fallback::Compiler::genera
     return tailCallVM(DoIteratorNewFallbackInfo, masm);
 }
 
 //
 // IteratorMore_Fallback
 //
 
 static bool
-DoIteratorMoreFallback(JSContext *cx, BaselineFrame *frame, ICIteratorMore_Fallback *stub,
+DoIteratorMoreFallback(JSContext *cx, BaselineFrame *frame, ICIteratorMore_Fallback *stub_,
                        HandleValue iterValue, MutableHandleValue res)
 {
+    // This fallback stub may trigger debug mode toggling.
+    DebugModeOSRVolatileStub<ICIteratorMore_Fallback *> stub(frame, stub_);
+
     FallbackICSpew(cx, stub, "IteratorMore");
 
     bool cond;
     if (!IteratorMore(cx, &iterValue.toObject(), &cond, res))
         return false;
     res.setBoolean(cond);
 
+    // Check if debug mode toggling made the stub invalid.
+    if (stub.invalid())
+        return true;
+
     if (iterValue.toObject().is<PropertyIteratorObject>() &&
         !stub->hasStub(ICStub::IteratorMore_Native))
     {
         ICIteratorMore_Native::Compiler compiler(cx);
         ICStub *newStub = compiler.getStub(compiler.getStubSpace(frame->script()));
         if (!newStub)
             return false;
         stub->addNewStub(newStub);
@@ -9327,25 +9479,32 @@ ICIteratorMore_Native::Compiler::generat
     return true;
 }
 
 //
 // IteratorNext_Fallback
 //
 
 static bool
-DoIteratorNextFallback(JSContext *cx, BaselineFrame *frame, ICIteratorNext_Fallback *stub,
+DoIteratorNextFallback(JSContext *cx, BaselineFrame *frame, ICIteratorNext_Fallback *stub_,
                        HandleValue iterValue, MutableHandleValue res)
 {
+    // This fallback stub may trigger debug mode toggling.
+    DebugModeOSRVolatileStub<ICIteratorNext_Fallback *> stub(frame, stub_);
+
     FallbackICSpew(cx, stub, "IteratorNext");
 
     RootedObject iteratorObject(cx, &iterValue.toObject());
     if (!IteratorNext(cx, iteratorObject, res))
         return false;
 
+    // Check if debug mode toggling made the stub invalid.
+    if (stub.invalid())
+        return true;
+
     if (!res.isString() && !stub->hasNonStringResult())
         stub->setHasNonStringResult();
 
     if (iteratorObject->is<PropertyIteratorObject>() &&
         !stub->hasStub(ICStub::IteratorNext_Native))
     {
         ICIteratorNext_Native::Compiler compiler(cx);
         ICStub *newStub = compiler.getStub(compiler.getStubSpace(frame->script()));
--- a/js/src/jit/BaselineIC.h
+++ b/js/src/jit/BaselineIC.h
@@ -4,16 +4,18 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef jit_BaselineIC_h
 #define jit_BaselineIC_h
 
 #ifdef JS_ION
 
+#include "mozilla/Assertions.h"
+
 #include "jscntxt.h"
 #include "jscompartment.h"
 #include "jsgc.h"
 #include "jsopcode.h"
 
 #include "jit/BaselineJIT.h"
 #include "jit/BaselineRegisters.h"
 
@@ -201,25 +203,59 @@ class ICEntry
     // A pointer to the baseline IC stub for this instruction.
     ICStub *firstStub_;
 
     // Offset from the start of the JIT code where the IC
     // load and call instructions are.
     uint32_t returnOffset_;
 
     // The PC of this IC's bytecode op within the JSScript.
-    uint32_t pcOffset_ : 31;
-
-    // Whether this IC is for a bytecode op.
-    uint32_t isForOp_ : 1;
+    uint32_t pcOffset_ : 29;
 
   public:
-    ICEntry(uint32_t pcOffset, bool isForOp)
-      : firstStub_(nullptr), returnOffset_(), pcOffset_(pcOffset), isForOp_(isForOp)
-    {}
+    enum Kind {
+        // A for-op IC entry.
+        Kind_Op = 0,
+
+        // A non-op IC entry.
+        Kind_NonOp,
+
+        // A fake IC entry for returning from a callVM.
+        Kind_CallVM,
+
+        // A fake IC entry for returning from DebugTrapHandler.
+        Kind_DebugTrap,
+
+        // A fake IC entry for returning from a callVM to
+        // Debug{Prologue,Epilogue}.
+        Kind_DebugPrologue,
+        Kind_DebugEpilogue
+    };
+
+  private:
+    // What this IC is for.
+    Kind kind_ : 3;
+
+    // Set the kind and asserts that it's sane.
+    void setKind(Kind kind) {
+        kind_ = kind;
+        MOZ_ASSERT(this->kind() == kind);
+    }
+
+  public:
+    ICEntry(uint32_t pcOffset, Kind kind)
+      : firstStub_(nullptr), returnOffset_(), pcOffset_(pcOffset)
+    {
+        // The offset must fit in at least 29 bits, since we shave off 3 for
+        // the Kind enum.
+        MOZ_ASSERT(pcOffset_ == pcOffset);
+        JS_STATIC_ASSERT(BaselineScript::MAX_JSSCRIPT_LENGTH < 0x1fffffffu);
+        MOZ_ASSERT(pcOffset <= BaselineScript::MAX_JSSCRIPT_LENGTH);
+        setKind(kind);
+    }
 
     CodeOffsetLabel returnOffset() const {
         return CodeOffsetLabel(returnOffset_);
     }
 
     void setReturnOffset(CodeOffsetLabel offset) {
         JS_ASSERT(offset.offset() <= (size_t) UINT32_MAX);
         returnOffset_ = (uint32_t) offset.offset();
@@ -235,18 +271,31 @@ class ICEntry
     uint32_t pcOffset() const {
         return pcOffset_;
     }
 
     jsbytecode *pc(JSScript *script) const {
         return script->offsetToPC(pcOffset_);
     }
 
+    Kind kind() const {
+        // MSVC compiles enums as signed.
+        return (Kind)(kind_ & 0x7);
+    }
     bool isForOp() const {
-        return isForOp_;
+        return kind() == Kind_Op;
+    }
+
+    void setForDebugPrologue() {
+        MOZ_ASSERT(kind() == Kind_CallVM);
+        setKind(Kind_DebugPrologue);
+    }
+    void setForDebugEpilogue() {
+        MOZ_ASSERT(kind() == Kind_CallVM);
+        setKind(Kind_DebugEpilogue);
     }
 
     bool hasStub() const {
         return firstStub_ != nullptr;
     }
     ICStub *firstStub() const {
         JS_ASSERT(hasStub());
         return firstStub_;
@@ -1038,16 +1087,18 @@ class ICStubCompiler
     // checked is already in R0.
     bool callTypeUpdateIC(MacroAssembler &masm, uint32_t objectOffset);
 
     // A stub frame is used when a stub wants to call into the VM without
     // performing a tail call. This is required for the return address
     // to pc mapping to work.
     void enterStubFrame(MacroAssembler &masm, Register scratch);
     void leaveStubFrame(MacroAssembler &masm, bool calledIntoIon = false);
+    void leaveStubFrameHead(MacroAssembler &masm, bool calledIntoIon = false);
+    void leaveStubFrameCommonTail(MacroAssembler &masm);
 
     // Some stubs need to emit SPS profiler updates.  This emits the guarding
     // jitcode for those stubs.  If profiling is not enabled, jumps to the
     // given label.
     void guardProfilingEnabled(MacroAssembler &masm, Register scratch, Label *skip);
 
     // Higher-level helper to emit an update to the profiler pseudo-stack.
     void emitProfilingUpdate(MacroAssembler &masm, Register pcIdx, Register scratch,
@@ -4036,17 +4087,18 @@ class ICGetProp_Fallback : public ICMoni
         extra_ |= (1u << ACCESSED_GETTER_BIT);
     }
     bool hasAccessedGetter() const {
         return extra_ & (1u << ACCESSED_GETTER_BIT);
     }
 
     class Compiler : public ICStubCompiler {
       protected:
-        uint32_t returnOffset_;
+        uint32_t returnFromIonOffset_;
+        uint32_t returnFromStubOffset_;
         bool generateStubCode(MacroAssembler &masm);
         bool postGenerateStubCode(MacroAssembler &masm, Handle<JitCode *> code);
 
       public:
         Compiler(JSContext *cx)
           : ICStubCompiler(cx, ICStub::GetProp_Fallback)
         { }
 
@@ -4929,17 +4981,18 @@ class ICSetProp_Fallback : public ICFall
         extra_ |= (1u << UNOPTIMIZABLE_ACCESS_BIT);
     }
     bool hadUnoptimizableAccess() const {
         return extra_ & (1u << UNOPTIMIZABLE_ACCESS_BIT);
     }
 
     class Compiler : public ICStubCompiler {
       protected:
-        uint32_t returnOffset_;
+        uint32_t returnFromIonOffset_;
+        uint32_t returnFromStubOffset_;
         bool generateStubCode(MacroAssembler &masm);
         bool postGenerateStubCode(MacroAssembler &masm, Handle<JitCode *> code);
 
       public:
         Compiler(JSContext *cx)
           : ICStubCompiler(cx, ICStub::SetProp_Fallback)
         { }
 
@@ -5353,17 +5406,18 @@ class ICCall_Fallback : public ICMonitor
         // Return hasStub(Call_AnyNative) after Call_AnyNative stub is added.
         return false;
     }
 
     // Compiler for this stub kind.
     class Compiler : public ICCallStubCompiler {
       protected:
         bool isConstructing_;
-        uint32_t returnOffset_;
+        uint32_t returnFromIonOffset_;
+        uint32_t returnFromStubOffset_;
         bool generateStubCode(MacroAssembler &masm);
         bool postGenerateStubCode(MacroAssembler &masm, Handle<JitCode *> code);
 
       public:
         Compiler(JSContext *cx, bool isConstructing)
           : ICCallStubCompiler(cx, ICStub::Call_Fallback),
             isConstructing_(isConstructing)
         { }
@@ -5747,17 +5801,17 @@ class ICTableSwitch : public ICStub
   public:
     static inline ICTableSwitch *New(ICStubSpace *space, JitCode *code, void **table,
                                      int32_t min, int32_t length, void *defaultTarget) {
         if (!code)
             return nullptr;
         return space->allocate<ICTableSwitch>(code, table, min, length, defaultTarget);
     }
 
-    void fixupJumpTable(HandleScript script, BaselineScript *baseline);
+    void fixupJumpTable(JSScript *script, BaselineScript *baseline);
 
     class Compiler : public ICStubCompiler {
         bool generateStubCode(MacroAssembler &masm);
 
         jsbytecode *pc_;
 
       public:
         Compiler(JSContext *cx, jsbytecode *pc)
--- a/js/src/jit/BaselineJIT.cpp
+++ b/js/src/jit/BaselineJIT.cpp
@@ -35,25 +35,28 @@ PCMappingSlotInfo::ToSlotLocation(const 
             return SlotInR0;
         JS_ASSERT(stackVal->reg() == R1);
         return SlotInR1;
     }
     JS_ASSERT(stackVal->kind() != StackValue::Stack);
     return SlotIgnore;
 }
 
-BaselineScript::BaselineScript(uint32_t prologueOffset, uint32_t spsPushToggleOffset)
+BaselineScript::BaselineScript(uint32_t prologueOffset, uint32_t epilogueOffset,
+                               uint32_t spsPushToggleOffset, uint32_t postDebugPrologueOffset)
   : method_(nullptr),
     templateScope_(nullptr),
     fallbackStubSpace_(),
     prologueOffset_(prologueOffset),
+    epilogueOffset_(epilogueOffset),
 #ifdef DEBUG
     spsOn_(false),
 #endif
     spsPushToggleOffset_(spsPushToggleOffset),
+    postDebugPrologueOffset_(postDebugPrologueOffset),
     flags_(0)
 { }
 
 static const size_t BASELINE_LIFO_ALLOC_PRIMARY_CHUNK_SIZE = 4096;
 static const unsigned BASELINE_MAX_ARGS_LENGTH = 20000;
 
 static bool
 CheckFrame(InterpreterFrame *fp)
@@ -209,17 +212,17 @@ jit::EnterBaselineAtBranch(JSContext *cx
     if (status != IonExec_Ok)
         return status;
 
     fp->setReturnValue(data.result);
     return IonExec_Ok;
 }
 
 MethodStatus
-jit::BaselineCompile(JSContext *cx, HandleScript script)
+jit::BaselineCompile(JSContext *cx, JSScript *script)
 {
     JS_ASSERT(!script->hasBaselineScript());
     JS_ASSERT(script->canBaselineCompile());
     JS_ASSERT(IsBaselineEnabled(cx));
     LifoAlloc alloc(BASELINE_LIFO_ALLOC_PRIMARY_CHUNK_SIZE);
 
     script->ensureNonLazyCanonicalFunction(cx);
 
@@ -354,19 +357,19 @@ jit::CanEnterBaselineMethod(JSContext *c
         return Method_CantCompile;
     }
 
     RootedScript script(cx, state.script());
     return CanEnterBaselineJIT(cx, script, /* osr = */false);
 };
 
 BaselineScript *
-BaselineScript::New(JSContext *cx, uint32_t prologueOffset,
-                    uint32_t spsPushToggleOffset, size_t icEntries,
-                    size_t pcMappingIndexEntries, size_t pcMappingSize,
+BaselineScript::New(JSContext *cx, uint32_t prologueOffset, uint32_t epilogueOffset,
+                    uint32_t spsPushToggleOffset, uint32_t postDebugPrologueOffset,
+                    size_t icEntries, size_t pcMappingIndexEntries, size_t pcMappingSize,
                     size_t bytecodeTypeMapEntries)
 {
     static const unsigned DataAlignment = sizeof(uintptr_t);
 
     size_t paddedBaselineScriptSize = AlignBytes(sizeof(BaselineScript), DataAlignment);
 
     size_t icEntriesSize = icEntries * sizeof(ICEntry);
     size_t pcMappingIndexEntriesSize = pcMappingIndexEntries * sizeof(PCMappingIndexEntry);
@@ -383,17 +386,18 @@ BaselineScript::New(JSContext *cx, uint3
         paddedPCMappingSize +
         paddedBytecodeTypesMapSize;
 
     uint8_t *buffer = (uint8_t *)cx->malloc_(allocBytes);
     if (!buffer)
         return nullptr;
 
     BaselineScript *script = reinterpret_cast<BaselineScript *>(buffer);
-    new (script) BaselineScript(prologueOffset, spsPushToggleOffset);
+    new (script) BaselineScript(prologueOffset, epilogueOffset,
+                                spsPushToggleOffset, postDebugPrologueOffset);
 
     size_t offsetCursor = paddedBaselineScriptSize;
 
     script->icEntriesOffset_ = offsetCursor;
     script->icEntries_ = icEntries;
     offsetCursor += paddedICEntriesSize;
 
     script->pcMappingIndexOffset_ = offsetCursor;
@@ -591,17 +595,17 @@ BaselineScript::icEntryFromReturnAddress
 {
     JS_ASSERT(returnAddr > method_->raw());
     JS_ASSERT(returnAddr < method_->raw() + method_->instructionsSize());
     CodeOffsetLabel offset(returnAddr - method_->raw());
     return icEntryFromReturnOffset(offset);
 }
 
 void
-BaselineScript::copyICEntries(HandleScript script, const ICEntry *entries, MacroAssembler &masm)
+BaselineScript::copyICEntries(JSScript *script, const ICEntry *entries, MacroAssembler &masm)
 {
     // Fix up the return offset in the IC entries and copy them in.
     // Also write out the IC entry ptrs in any fallback stubs that were added.
     for (uint32_t i = 0; i < numICEntries(); i++) {
         ICEntry &realEntry = icEntry(i);
         realEntry = entries[i];
         realEntry.fixupReturnOffset(masm);
 
--- a/js/src/jit/BaselineJIT.h
+++ b/js/src/jit/BaselineJIT.h
@@ -114,22 +114,34 @@ struct BaselineScript
     HeapPtrObject templateScope_;
 
     // Allocated space for fallback stubs.
     FallbackICStubSpace fallbackStubSpace_;
 
     // Native code offset right before the scope chain is initialized.
     uint32_t prologueOffset_;
 
+    // Native code offset right before the frame is popped and the method
+    // returned from.
+    uint32_t epilogueOffset_;
+
     // The offsets for the toggledJump instructions for SPS update ICs.
 #ifdef DEBUG
     mozilla::DebugOnly<bool> spsOn_;
 #endif
     uint32_t spsPushToggleOffset_;
 
+    // Native code offsets right after the debug prologue VM call returns, or
+    // would have returned. This offset is recorded even when debug mode is
+    // off to aid on-stack debug mode recompilation.
+    //
+    // We don't need one for the debug epilogue because that always happens
+    // right before the epilogue, so we just use the epilogue offset.
+    uint32_t postDebugPrologueOffset_;
+
   public:
     enum Flag {
         // Flag set by JSScript::argumentsOptimizationFailed. Similar to
         // JSScript::needsArgsObj_, but can be read from JIT code.
         NEEDS_ARGS_OBJ = 1 << 0,
 
         // Flag set when discarding JIT code, to indicate this script is
         // on the stack and should not be discarded.
@@ -160,19 +172,21 @@ struct BaselineScript
     uint32_t pcMappingSize_;
 
     // List mapping indexes of bytecode type sets to the offset of the opcode
     // they correspond to, for use by TypeScript::BytecodeTypes.
     uint32_t bytecodeTypeMapOffset_;
 
   public:
     // Do not call directly, use BaselineScript::New. This is public for cx->new_.
-    BaselineScript(uint32_t prologueOffset, uint32_t spsPushToggleOffset);
+    BaselineScript(uint32_t prologueOffset, uint32_t epilogueOffset,
+                   uint32_t spsPushToggleOffset, uint32_t postDebugPrologueOffset);
 
     static BaselineScript *New(JSContext *cx, uint32_t prologueOffset,
+                               uint32_t epilogueOffset, uint32_t postDebugPrologueOffset,
                                uint32_t spsPushToggleOffset, size_t icEntries,
                                size_t pcMappingIndexEntries, size_t pcMappingSize,
                                size_t bytecodeTypeMapEntries);
     static void Trace(JSTracer *trc, BaselineScript *script);
     static void Destroy(FreeOp *fop, BaselineScript *script);
 
     void purgeOptimizedStubs(Zone *zone);
 
@@ -219,16 +233,30 @@ struct BaselineScript
 
     uint32_t prologueOffset() const {
         return prologueOffset_;
     }
     uint8_t *prologueEntryAddr() const {
         return method_->raw() + prologueOffset_;
     }
 
+    uint32_t epilogueOffset() const {
+        return epilogueOffset_;
+    }
+    uint8_t *epilogueEntryAddr() const {
+        return method_->raw() + epilogueOffset_;
+    }
+
+    uint32_t postDebugPrologueOffset() const {
+        return postDebugPrologueOffset_;
+    }
+    uint8_t *postDebugPrologueAddr() const {
+        return method_->raw() + postDebugPrologueOffset_;
+    }
+
     ICEntry *icEntryList() {
         return (ICEntry *)(reinterpret_cast<uint8_t *>(this) + icEntriesOffset_);
     }
     PCMappingIndexEntry *pcMappingIndexEntryList() {
         return (PCMappingIndexEntry *)(reinterpret_cast<uint8_t *>(this) + pcMappingIndexOffset_);
     }
     uint8_t *pcMappingData() {
         return reinterpret_cast<uint8_t *>(this) + pcMappingOffset_;
@@ -252,30 +280,35 @@ struct BaselineScript
         JS_ASSERT(!templateScope_);
         templateScope_ = templateScope;
     }
 
     void toggleBarriers(bool enabled) {
         method()->togglePreBarriers(enabled);
     }
 
+    bool containsCodeAddress(uint8_t *addr) const {
+        return method()->raw() <= addr && addr <= method()->raw() + method()->instructionsSize();
+    }
+
     ICEntry &icEntry(size_t index);
     ICEntry *maybeICEntryFromReturnOffset(CodeOffsetLabel returnOffset);
     ICEntry &icEntryFromReturnOffset(CodeOffsetLabel returnOffset);
     ICEntry &icEntryFromPCOffset(uint32_t pcOffset);
+    ICEntry &icEntryForDebugModeRecompileFromPCOffset(uint32_t pcOffset);
     ICEntry &icEntryFromPCOffset(uint32_t pcOffset, ICEntry *prevLookedUpEntry);
     ICEntry *maybeICEntryFromReturnAddress(uint8_t *returnAddr);
     ICEntry &icEntryFromReturnAddress(uint8_t *returnAddr);
     uint8_t *returnAddressForIC(const ICEntry &ent);
 
     size_t numICEntries() const {
         return icEntries_;
     }
 
-    void copyICEntries(HandleScript script, const ICEntry *entries, MacroAssembler &masm);
+    void copyICEntries(JSScript *script, const ICEntry *entries, MacroAssembler &masm);
     void adoptFallbackStubs(FallbackICStubSpace *stubSpace);
 
     PCMappingIndexEntry &pcMappingIndexEntry(size_t index);
     CompactBufferReader pcMappingReader(size_t indexEntry);
 
     size_t numPCMappingIndexEntries() const {
         return pcMappingIndexEntries_;
     }
@@ -382,16 +415,16 @@ BailoutIonToBaseline(JSContext *cx, JitA
                      const ExceptionBailoutInfo *exceptionInfo = nullptr);
 
 // Mark baseline scripts on the stack as active, so that they are not discarded
 // during GC.
 void
 MarkActiveBaselineScripts(Zone *zone);
 
 MethodStatus
-BaselineCompile(JSContext *cx, HandleScript script);
+BaselineCompile(JSContext *cx, JSScript *script);
 
 } // namespace jit
 } // namespace js
 
 #endif // JS_ION
 
 #endif /* jit_BaselineJIT_h */
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -12,16 +12,17 @@
 #include "jscompartment.h"
 #include "jsprf.h"
 #include "jsworkers.h"
 
 #include "gc/Marking.h"
 #include "jit/AliasAnalysis.h"
 #include "jit/AsmJSModule.h"
 #include "jit/BacktrackingAllocator.h"
+#include "jit/BaselineDebugModeOSR.h"
 #include "jit/BaselineFrame.h"
 #include "jit/BaselineInspector.h"
 #include "jit/BaselineJIT.h"
 #include "jit/CodeGenerator.h"
 #include "jit/EdgeCaseAnalysis.h"
 #include "jit/EffectiveAddressAnalysis.h"
 #include "jit/IonAnalysis.h"
 #include "jit/IonBuilder.h"
@@ -153,16 +154,17 @@ JitRuntime::JitRuntime()
     enterJIT_(nullptr),
     bailoutHandler_(nullptr),
     argumentsRectifier_(nullptr),
     argumentsRectifierReturnAddr_(nullptr),
     parallelArgumentsRectifier_(nullptr),
     invalidator_(nullptr),
     debugTrapHandler_(nullptr),
     forkJoinGetSliceStub_(nullptr),
+    baselineDebugModeOSRHandler_(nullptr),
     functionWrappers_(nullptr),
     osrTempData_(nullptr),
     flusher_(nullptr),
     ionCodeProtected_(false)
 {
 }
 
 JitRuntime::~JitRuntime()
@@ -451,19 +453,22 @@ jit::RequestInterruptForIonCode(JSRuntim
 
       default:
         MOZ_ASSUME_UNREACHABLE("Bad interrupt mode");
     }
 }
 
 JitCompartment::JitCompartment()
   : stubCodes_(nullptr),
-    baselineCallReturnAddr_(nullptr),
-    baselineGetPropReturnAddr_(nullptr),
-    baselineSetPropReturnAddr_(nullptr),
+    baselineCallReturnFromIonAddr_(nullptr),
+    baselineGetPropReturnFromIonAddr_(nullptr),
+    baselineSetPropReturnFromIonAddr_(nullptr),
+    baselineCallReturnFromStubAddr_(nullptr),
+    baselineGetPropReturnFromStubAddr_(nullptr),
+    baselineSetPropReturnFromStubAddr_(nullptr),
     stringConcatStub_(nullptr),
     parallelStringConcatStub_(nullptr),
     activeParallelEntryScripts_(nullptr)
 {
 }
 
 JitCompartment::~JitCompartment()
 {
@@ -616,23 +621,29 @@ JitCompartment::mark(JSTracer *trc, JSCo
 }
 
 void
 JitCompartment::sweep(FreeOp *fop)
 {
     stubCodes_->sweep(fop);
 
     // If the sweep removed the ICCall_Fallback stub, nullptr the baselineCallReturnAddr_ field.
-    if (!stubCodes_->lookup(static_cast<uint32_t>(ICStub::Call_Fallback)))
-        baselineCallReturnAddr_ = nullptr;
+    if (!stubCodes_->lookup(static_cast<uint32_t>(ICStub::Call_Fallback))) {
+        baselineCallReturnFromIonAddr_ = nullptr;
+        baselineCallReturnFromStubAddr_ = nullptr;
+    }
     // Similarly for the ICGetProp_Fallback stub.
-    if (!stubCodes_->lookup(static_cast<uint32_t>(ICStub::GetProp_Fallback)))
-        baselineGetPropReturnAddr_ = nullptr;
-    if (!stubCodes_->lookup(static_cast<uint32_t>(ICStub::SetProp_Fallback)))
-        baselineSetPropReturnAddr_ = nullptr;
+    if (!stubCodes_->lookup(static_cast<uint32_t>(ICStub::GetProp_Fallback))) {
+        baselineGetPropReturnFromIonAddr_ = nullptr;
+        baselineGetPropReturnFromStubAddr_ = nullptr;
+    }
+    if (!stubCodes_->lookup(static_cast<uint32_t>(ICStub::SetProp_Fallback))) {
+        baselineSetPropReturnFromIonAddr_ = nullptr;
+        baselineSetPropReturnFromStubAddr_ = nullptr;
+    }
 
     if (stringConcatStub_ && !IsJitCodeMarked(stringConcatStub_.unsafeGet()))
         stringConcatStub_ = nullptr;
 
     if (parallelStringConcatStub_ && !IsJitCodeMarked(parallelStringConcatStub_.unsafeGet()))
         parallelStringConcatStub_ = nullptr;
 
     if (activeParallelEntryScripts_) {
@@ -2003,17 +2014,17 @@ CheckScriptSize(JSContext *cx, JSScript*
             return Method_CantCompile;
         }
     }
 
     return Method_Compiled;
 }
 
 bool
-CanIonCompileScript(JSContext *cx, HandleScript script, bool osr)
+CanIonCompileScript(JSContext *cx, JSScript *script, bool osr)
 {
     if (!script->canIonCompile() || !CheckScript(cx, script, osr))
         return false;
 
     return CheckScriptSize(cx, script) == Method_Compiled;
 }
 
 static OptimizationLevel
--- a/js/src/jit/Ion.h
+++ b/js/src/jit/Ion.h
@@ -77,17 +77,17 @@ class IonContext
 bool InitializeIon();
 
 // Get and set the current Ion context.
 IonContext *GetIonContext();
 IonContext *MaybeGetIonContext();
 
 void SetIonContext(IonContext *ctx);
 
-bool CanIonCompileScript(JSContext *cx, HandleScript script, bool osr);
+bool CanIonCompileScript(JSContext *cx, JSScript *script, bool osr);
 
 MethodStatus CanEnterAtBranch(JSContext *cx, JSScript *script,
                               BaselineFrame *frame, jsbytecode *pc, bool isConstructing);
 MethodStatus CanEnter(JSContext *cx, RunState &state);
 MethodStatus CompileFunctionForBaseline(JSContext *cx, HandleScript script, BaselineFrame *frame,
                                         bool isConstructing);
 MethodStatus CanEnterUsingFastInvoke(JSContext *cx, HandleScript script, uint32_t numActualArgs);
 
--- a/js/src/jit/IonFrames.cpp
+++ b/js/src/jit/IonFrames.cpp
@@ -6,16 +6,17 @@
 
 #include "jit/IonFrames-inl.h"
 
 #include "jsfun.h"
 #include "jsobj.h"
 #include "jsscript.h"
 
 #include "gc/Marking.h"
+#include "jit/BaselineDebugModeOSR.h"
 #include "jit/BaselineFrame.h"
 #include "jit/BaselineIC.h"
 #include "jit/BaselineJIT.h"
 #include "jit/Ion.h"
 #include "jit/IonMacroAssembler.h"
 #include "jit/IonSpewer.h"
 #include "jit/JitCompartment.h"
 #include "jit/ParallelFunctions.h"
@@ -220,20 +221,28 @@ JitFrameIterator::script() const
 void
 JitFrameIterator::baselineScriptAndPc(JSScript **scriptRes, jsbytecode **pcRes) const
 {
     JS_ASSERT(isBaselineJS());
     JSScript *script = this->script();
     if (scriptRes)
         *scriptRes = script;
     uint8_t *retAddr = returnAddressToFp();
+
+    // If we are in the middle of a recompile handler, get the real return
+    // address as stashed in the RecompileInfo.
+    if (BaselineDebugModeOSRInfo *info = baselineFrame()->getDebugModeOSRInfo())
+        retAddr = info->resumeAddr;
+
     if (pcRes) {
-        // If the return address is into the prologue entry address, then assume start
-        // of script.
-        if (retAddr == script->baselineScript()->prologueEntryAddr()) {
+        // If the return address is into the prologue entry address or just
+        // after the debug prologue, then assume start of script.
+        if (retAddr == script->baselineScript()->prologueEntryAddr() ||
+            retAddr == script->baselineScript()->postDebugPrologueAddr())
+        {
             *pcRes = script->code();
             return;
         }
 
         // The return address _may_ be a return from a callVM or IC chain call done for
         // some op.
         ICEntry *icEntry = script->baselineScript()->maybeICEntryFromReturnAddress(retAddr);
         if (icEntry) {
@@ -563,16 +572,23 @@ HandleExceptionBaseline(JSContext *cx, c
 
           default:
             MOZ_ASSUME_UNREACHABLE("Invalid try note");
         }
     }
 
 }
 
+struct AutoDeleteDebugModeOSRInfo
+{
+    BaselineFrame *frame;
+    AutoDeleteDebugModeOSRInfo(BaselineFrame *frame) : frame(frame) { MOZ_ASSERT(frame); }
+    ~AutoDeleteDebugModeOSRInfo() { frame->deleteDebugModeOSRInfo(); }
+};
+
 void
 HandleException(ResumeFromException *rfe)
 {
     JSContext *cx = GetJSContextFromJitCode();
 
     rfe->kind = ResumeFromException::RESUME_ENTRY_FRAME;
 
     IonSpew(IonSpew_Invalidate, "handling exception");
@@ -633,16 +649,26 @@ HandleException(ResumeFromException *rfe
             if (invalidated)
                 ionScript->decref(cx->runtime()->defaultFreeOp());
 
         } else if (iter.isBaselineJS()) {
             // It's invalid to call DebugEpilogue twice for the same frame.
             bool calledDebugEpilogue = false;
 
             HandleExceptionBaseline(cx, iter, rfe, &calledDebugEpilogue);
+
+            // If we are propagating an exception through a frame with
+            // on-stack recompile info, we should free the allocated
+            // RecompileInfo struct before we leave this block, as we will not
+            // be returning to the recompile handler.
+            //
+            // We cannot delete it immediately because of the call to
+            // iter.baselineScriptAndPc below.
+            AutoDeleteDebugModeOSRInfo deleteDebugModeOSRInfo(iter.baselineFrame());
+
             if (rfe->kind != ResumeFromException::RESUME_ENTRY_FRAME)
                 return;
 
             // Unwind profiler pseudo-stack
             JSScript *script = iter.script();
             probes::ExitScript(cx, script, script->functionNonDelazifying(),
                                iter.baselineFrame()->hasPushedSPSFrame());
             // After this point, any pushed SPS frame would have been popped if it needed
--- a/js/src/jit/IonFrames.h
+++ b/js/src/jit/IonFrames.h
@@ -797,16 +797,20 @@ class IonBaselineStubFrameLayout : publi
     static inline int reverseOffsetOfSavedFramePtr() {
         return -int(2 * sizeof(void *));
     }
 
     inline ICStub *maybeStubPtr() {
         uint8_t *fp = reinterpret_cast<uint8_t *>(this);
         return *reinterpret_cast<ICStub **>(fp + reverseOffsetOfStubPtr());
     }
+    inline void setStubPtr(ICStub *stub) {
+        uint8_t *fp = reinterpret_cast<uint8_t *>(this);
+        *reinterpret_cast<ICStub **>(fp + reverseOffsetOfStubPtr()) = stub;
+    }
 };
 
 // An invalidation bailout stack is at the stack pointer for the callee frame.
 class InvalidationBailoutStack
 {
     mozilla::Array<double, FloatRegisters::Total> fpregs_;
     mozilla::Array<uintptr_t, Registers::Total> regs_;
     IonScript   *ionScript_;
--- a/js/src/jit/IonSpewer.cpp
+++ b/js/src/jit/IonSpewer.cpp
@@ -256,16 +256,17 @@ jit::CheckLogging()
             "\n"
             "  bl-aborts  Baseline compiler abort messages\n"
             "  bl-scripts Baseline script-compilation\n"
             "  bl-op      Baseline compiler detailed op-specific messages\n"
             "  bl-ic      Baseline inline-cache messages\n"
             "  bl-ic-fb   Baseline IC fallback stub messages\n"
             "  bl-osr     Baseline IC OSR messages\n"
             "  bl-bails   Baseline bailouts\n"
+            "  bl-dbg-osr Baseline debug mode on stack recompile messages\n"
             "  bl-all     All baseline spew\n"
             "\n"
         );
         exit(0);
         /*NOTREACHED*/
     }
     if (ContainsFlag(env, "aborts"))
         EnableChannel(IonSpew_Abort);
@@ -317,24 +318,27 @@ jit::CheckLogging()
     if (ContainsFlag(env, "bl-ic"))
         EnableChannel(IonSpew_BaselineIC);
     if (ContainsFlag(env, "bl-ic-fb"))
         EnableChannel(IonSpew_BaselineICFallback);
     if (ContainsFlag(env, "bl-osr"))
         EnableChannel(IonSpew_BaselineOSR);
     if (ContainsFlag(env, "bl-bails"))
         EnableChannel(IonSpew_BaselineBailouts);
+    if (ContainsFlag(env, "bl-dbg-osr"))
+        EnableChannel(IonSpew_BaselineDebugModeOSR);
     if (ContainsFlag(env, "bl-all")) {
         EnableChannel(IonSpew_BaselineAbort);
         EnableChannel(IonSpew_BaselineScripts);
         EnableChannel(IonSpew_BaselineOp);
         EnableChannel(IonSpew_BaselineIC);
         EnableChannel(IonSpew_BaselineICFallback);
         EnableChannel(IonSpew_BaselineOSR);
         EnableChannel(IonSpew_BaselineBailouts);
+        EnableChannel(IonSpew_BaselineDebugModeOSR);
     }
 
     IonSpewFile = stderr;
 }
 
 void
 jit::IonSpewStartVA(IonSpewChannel channel, const char *fmt, va_list ap)
 {
--- a/js/src/jit/IonSpewer.h
+++ b/js/src/jit/IonSpewer.h
@@ -69,17 +69,19 @@ namespace jit {
     _(BaselineOp)                           \
     /* Inline caches. */                    \
     _(BaselineIC)                           \
     /* Inline cache fallbacks. */           \
     _(BaselineICFallback)                   \
     /* OSR from Baseline => Ion. */         \
     _(BaselineOSR)                          \
     /* Bailouts. */                         \
-    _(BaselineBailouts)
+    _(BaselineBailouts)                     \
+    /* Debug Mode On Stack Recompile . */   \
+    _(BaselineDebugModeOSR)
 
 
 enum IonSpewChannel {
 #define IONSPEW_CHANNEL(name) IonSpew_##name,
     IONSPEW_CHANNEL_LIST(IONSPEW_CHANNEL)
 #undef IONSPEW_CHANNEL
     IonSpew_Terminator
 };
--- a/js/src/jit/JitCompartment.h
+++ b/js/src/jit/JitCompartment.h
@@ -187,16 +187,20 @@ class JitRuntime
     JitCode *shapePreBarrier_;
 
     // Thunk used by the debugger for breakpoint and step mode.
     JitCode *debugTrapHandler_;
 
     // Stub used to inline the ForkJoinGetSlice intrinsic.
     JitCode *forkJoinGetSliceStub_;
 
+    // Thunk used to fix up on-stack recompile of baseline scripts.
+    JitCode *baselineDebugModeOSRHandler_;
+    void *baselineDebugModeOSRHandlerNoFrameRegPopAddr_;
+
     // Map VMFunction addresses to the JitCode of the wrapper.
     typedef WeakCache<const VMFunction *, JitCode *> VMWrapperMap;
     VMWrapperMap *functionWrappers_;
 
     // Buffer for OSR from baseline to Ion. To avoid holding on to this for
     // too long, it's also freed in JitCompartment::mark and in EnterBaseline
     // (after returning from JIT code).
     uint8_t *osrTempData_;
@@ -218,16 +222,17 @@ class JitRuntime
     JitCode *generateEnterJIT(JSContext *cx, EnterJitType type);
     JitCode *generateArgumentsRectifier(JSContext *cx, ExecutionMode mode, void **returnAddrOut);
     JitCode *generateBailoutTable(JSContext *cx, uint32_t frameClass);
     JitCode *generateBailoutHandler(JSContext *cx);
     JitCode *generateInvalidator(JSContext *cx);
     JitCode *generatePreBarrier(JSContext *cx, MIRType type);
     JitCode *generateDebugTrapHandler(JSContext *cx);
     JitCode *generateForkJoinGetSliceStub(JSContext *cx);
+    JitCode *generateBaselineDebugModeOSRHandler(JSContext *cx, uint32_t *noFrameRegPopOffsetOut);
     JitCode *generateVMWrapper(JSContext *cx, const VMFunction &f);
 
     JSC::ExecutableAllocator *createIonAlloc(JSContext *cx);
 
   public:
     JitRuntime();
     ~JitRuntime();
     bool initialize(JSContext *cx);
@@ -278,16 +283,18 @@ class JitRuntime
     void ensureIonCodeProtected(JSRuntime *rt);
     void ensureIonCodeAccessible(JSRuntime *rt);
     void patchIonBackedges(JSRuntime *rt, BackedgeTarget target);
 
     bool handleAccessViolation(JSRuntime *rt, void *faultingAddress);
 
     JitCode *getVMWrapper(const VMFunction &f) const;
     JitCode *debugTrapHandler(JSContext *cx);
+    JitCode *getBaselineDebugModeOSRHandler(JSContext *cx);
+    void *getBaselineDebugModeOSRHandlerAddress(JSContext *cx, bool popFrameReg);
 
     JitCode *getGenericBailoutHandler() const {
         return bailoutHandler_;
     }
 
     JitCode *getExceptionTail() const {
         return exceptionTail_;
     }
@@ -352,19 +359,26 @@ class JitCompartment
     friend class JitActivation;
 
     // Map ICStub keys to ICStub shared code objects.
     typedef WeakValueCache<uint32_t, ReadBarriered<JitCode> > ICStubCodeMap;
     ICStubCodeMap *stubCodes_;
 
     // Keep track of offset into various baseline stubs' code at return
     // point from called script.
-    void *baselineCallReturnAddr_;
-    void *baselineGetPropReturnAddr_;
-    void *baselineSetPropReturnAddr_;
+    void *baselineCallReturnFromIonAddr_;
+    void *baselineGetPropReturnFromIonAddr_;
+    void *baselineSetPropReturnFromIonAddr_;
+
+    // Same as above, but is used for return from a baseline stub. This is
+    // used for recompiles of on-stack baseline scripts (e.g., for debug
+    // mode).
+    void *baselineCallReturnFromStubAddr_;
+    void *baselineGetPropReturnFromStubAddr_;
+    void *baselineSetPropReturnFromStubAddr_;
 
     // Stub to concatenate two strings inline. Note that it can't be
     // stored in JitRuntime because masm.newGCString bakes in zone-specific
     // pointers. This has to be a weak pointer to avoid keeping the whole
     // compartment alive.
     ReadBarriered<JitCode> stringConcatStub_;
     ReadBarriered<JitCode> parallelStringConcatStub_;
 
@@ -386,39 +400,64 @@ class JitCompartment
     bool putStubCode(uint32_t key, Handle<JitCode *> stubCode) {
         // Make sure to do a lookupForAdd(key) and then insert into that slot, because
         // that way if stubCode gets moved due to a GC caused by lookupForAdd, then
         // we still write the correct pointer.
         JS_ASSERT(!stubCodes_->has(key));
         ICStubCodeMap::AddPtr p = stubCodes_->lookupForAdd(key);
         return stubCodes_->add(p, key, stubCode.get());
     }
-    void initBaselineCallReturnAddr(void *addr) {
-        JS_ASSERT(baselineCallReturnAddr_ == nullptr);
-        baselineCallReturnAddr_ = addr;
+    void initBaselineCallReturnFromIonAddr(void *addr) {
+        JS_ASSERT(baselineCallReturnFromIonAddr_ == nullptr);
+        baselineCallReturnFromIonAddr_ = addr;
+    }
+    void *baselineCallReturnFromIonAddr() {
+        JS_ASSERT(baselineCallReturnFromIonAddr_ != nullptr);
+        return baselineCallReturnFromIonAddr_;
+    }
+    void initBaselineGetPropReturnFromIonAddr(void *addr) {
+        JS_ASSERT(baselineGetPropReturnFromIonAddr_ == nullptr);
+        baselineGetPropReturnFromIonAddr_ = addr;
     }
-    void *baselineCallReturnAddr() {
-        JS_ASSERT(baselineCallReturnAddr_ != nullptr);
-        return baselineCallReturnAddr_;
+    void *baselineGetPropReturnFromIonAddr() {
+        JS_ASSERT(baselineGetPropReturnFromIonAddr_ != nullptr);
+        return baselineGetPropReturnFromIonAddr_;
     }
-    void initBaselineGetPropReturnAddr(void *addr) {
-        JS_ASSERT(baselineGetPropReturnAddr_ == nullptr);
-        baselineGetPropReturnAddr_ = addr;
+    void initBaselineSetPropReturnFromIonAddr(void *addr) {
+        JS_ASSERT(baselineSetPropReturnFromIonAddr_ == nullptr);
+        baselineSetPropReturnFromIonAddr_ = addr;
+    }
+    void *baselineSetPropReturnFromIonAddr() {
+        JS_ASSERT(baselineSetPropReturnFromIonAddr_ != nullptr);
+        return baselineSetPropReturnFromIonAddr_;
     }
-    void *baselineGetPropReturnAddr() {
-        JS_ASSERT(baselineGetPropReturnAddr_ != nullptr);
-        return baselineGetPropReturnAddr_;
+
+    void initBaselineCallReturnFromStubAddr(void *addr) {
+        MOZ_ASSERT(baselineCallReturnFromStubAddr_ == nullptr);
+        baselineCallReturnFromStubAddr_ = addr;;
+    }
+    void *baselineCallReturnFromStubAddr() {
+        JS_ASSERT(baselineCallReturnFromStubAddr_ != nullptr);
+        return baselineCallReturnFromStubAddr_;
     }
-    void initBaselineSetPropReturnAddr(void *addr) {
-        JS_ASSERT(baselineSetPropReturnAddr_ == nullptr);
-        baselineSetPropReturnAddr_ = addr;
+    void initBaselineGetPropReturnFromStubAddr(void *addr) {
+        JS_ASSERT(baselineGetPropReturnFromStubAddr_ == nullptr);
+        baselineGetPropReturnFromStubAddr_ = addr;
+    }
+    void *baselineGetPropReturnFromStubAddr() {
+        JS_ASSERT(baselineGetPropReturnFromStubAddr_ != nullptr);
+        return baselineGetPropReturnFromStubAddr_;
     }
-    void *baselineSetPropReturnAddr() {
-        JS_ASSERT(baselineSetPropReturnAddr_ != nullptr);
-        return baselineSetPropReturnAddr_;
+    void initBaselineSetPropReturnFromStubAddr(void *addr) {
+        JS_ASSERT(baselineSetPropReturnFromStubAddr_ == nullptr);
+        baselineSetPropReturnFromStubAddr_ = addr;
+    }
+    void *baselineSetPropReturnFromStubAddr() {
+        JS_ASSERT(baselineSetPropReturnFromStubAddr_ != nullptr);
+        return baselineSetPropReturnFromStubAddr_;
     }
 
     bool notifyOfActiveParallelEntryScript(JSContext *cx, HandleScript script);
 
     void toggleBaselineStubBarriers(bool enabled);
 
     JSC::ExecutableAllocator *createIonAlloc();
 
--- a/js/src/jit/arm/BaselineCompiler-arm.cpp
+++ b/js/src/jit/arm/BaselineCompiler-arm.cpp
@@ -4,12 +4,12 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "jit/arm/BaselineCompiler-arm.h"
 
 using namespace js;
 using namespace js::jit;
 
-BaselineCompilerARM::BaselineCompilerARM(JSContext *cx, TempAllocator &alloc, HandleScript script)
+BaselineCompilerARM::BaselineCompilerARM(JSContext *cx, TempAllocator &alloc, JSScript *script)
   : BaselineCompilerShared(cx, alloc, script)
 {
 }
--- a/js/src/jit/arm/BaselineCompiler-arm.h
+++ b/js/src/jit/arm/BaselineCompiler-arm.h
@@ -10,17 +10,17 @@
 #include "jit/shared/BaselineCompiler-shared.h"
 
 namespace js {
 namespace jit {
 
 class BaselineCompilerARM : public BaselineCompilerShared
 {
   protected:
-    BaselineCompilerARM(JSContext *cx, TempAllocator &alloc, HandleScript script);
+    BaselineCompilerARM(JSContext *cx, TempAllocator &alloc, JSScript *script);
 };
 
 typedef BaselineCompilerARM BaselineCompilerSpecific;
 
 } // namespace jit
 } // namespace js
 
 #endif /* jit_arm_BaselineCompiler_arm_h */
--- a/js/src/jit/arm/BaselineHelpers-arm.h
+++ b/js/src/jit/arm/BaselineHelpers-arm.h
@@ -155,41 +155,52 @@ EmitEnterStubFrame(MacroAssembler &masm,
     masm.push(BaselineFrameReg);
     masm.mov(BaselineStackReg, BaselineFrameReg);
 
     // We pushed 4 words, so the stack is still aligned to 8 bytes.
     masm.checkStackAlignment();
 }
 
 inline void
-EmitLeaveStubFrame(MacroAssembler &masm, bool calledIntoIon = false)
+EmitLeaveStubFrameHead(MacroAssembler &masm, bool calledIntoIon = false)
 {
     // Ion frames do not save and restore the frame pointer. If we called
     // into Ion, we have to restore the stack pointer from the frame descriptor.
     // If we performed a VM call, the descriptor has been popped already so
     // in that case we use the frame pointer.
     if (calledIntoIon) {
         masm.pop(ScratchRegister);
         masm.ma_lsr(Imm32(FRAMESIZE_SHIFT), ScratchRegister, ScratchRegister);
         masm.ma_add(ScratchRegister, BaselineStackReg);
     } else {
         masm.mov(BaselineFrameReg, BaselineStackReg);
     }
+}
 
+inline void
+EmitLeaveStubFrameCommonTail(MacroAssembler &masm)
+{
     masm.pop(BaselineFrameReg);
     masm.pop(BaselineStubReg);
 
     // Load the return address.
     masm.pop(BaselineTailCallReg);
 
     // Discard the frame descriptor.
     masm.pop(ScratchRegister);
 }
 
 inline void
+EmitLeaveStubFrame(MacroAssembler &masm, bool calledIntoIon = false)
+{
+    EmitLeaveStubFrameHead(masm, calledIntoIon);
+    EmitLeaveStubFrameCommonTail(masm);
+}
+
+inline void
 EmitStowICValues(MacroAssembler &masm, int values)
 {
     JS_ASSERT(values >= 0 && values <= 2);
     switch(values) {
       case 1:
         // Stow R0
         masm.pushValue(R0);
         break;
--- a/js/src/jit/shared/BaselineCompiler-shared.cpp
+++ b/js/src/jit/shared/BaselineCompiler-shared.cpp
@@ -7,26 +7,26 @@
 #include "jit/shared/BaselineCompiler-shared.h"
 
 #include "jit/BaselineIC.h"
 #include "jit/VMFunctions.h"
 
 using namespace js;
 using namespace js::jit;
 
-BaselineCompilerShared::BaselineCompilerShared(JSContext *cx, TempAllocator &alloc, HandleScript script)
+BaselineCompilerShared::BaselineCompilerShared(JSContext *cx, TempAllocator &alloc, JSScript *script)
   : cx(cx),
-    script(cx, script),
+    script(script),
     pc(script->code()),
     ionCompileable_(jit::IsIonEnabled(cx) && CanIonCompileScript(cx, script, false)),
     ionOSRCompileable_(jit::IsIonEnabled(cx) && CanIonCompileScript(cx, script, true)),
     debugMode_(cx->compartment()->debugMode()),
     alloc_(alloc),
     analysis_(alloc, script),
-    frame(cx, script, masm),
+    frame(script, masm),
     stubSpace_(),
     icEntries_(),
     pcMappingEntries_(),
     icLoadLabels_(),
     pushedBeforeCall_(0),
     inCall_(false),
     spsPushToggleOffset_()
 { }
@@ -91,13 +91,13 @@ BaselineCompilerShared::callVM(const VMF
 
     // Perform the call.
     masm.call(code);
     uint32_t callOffset = masm.currentOffset();
     masm.pop(BaselineFrameReg);
 
     // Add a fake ICEntry (without stubs), so that the return offset to
     // pc mapping works.
-    ICEntry entry(script->pcToOffset(pc), false);
+    ICEntry entry(script->pcToOffset(pc), ICEntry::Kind_CallVM);
     entry.setReturnOffset(callOffset);
 
     return icEntries_.append(entry);
 }
--- a/js/src/jit/shared/BaselineCompiler-shared.h
+++ b/js/src/jit/shared/BaselineCompiler-shared.h
@@ -14,17 +14,17 @@
 
 namespace js {
 namespace jit {
 
 class BaselineCompilerShared
 {
   protected:
     JSContext *cx;
-    RootedScript script;
+    JSScript *script;
     jsbytecode *pc;
     MacroAssembler masm;
     bool ionCompileable_;
     bool ionOSRCompileable_;
     bool debugMode_;
 
     TempAllocator &alloc_;
     BytecodeAnalysis analysis_;
@@ -64,24 +64,24 @@ class BaselineCompilerShared
     };
     js::Vector<ICLoadLabel, 16, SystemAllocPolicy> icLoadLabels_;
 
     uint32_t pushedBeforeCall_;
     mozilla::DebugOnly<bool> inCall_;
 
     CodeOffsetLabel spsPushToggleOffset_;
 
-    BaselineCompilerShared(JSContext *cx, TempAllocator &alloc, HandleScript script);
+    BaselineCompilerShared(JSContext *cx, TempAllocator &alloc, JSScript *script);
 
-    ICEntry *allocateICEntry(ICStub *stub, bool isForOp) {
+    ICEntry *allocateICEntry(ICStub *stub, ICEntry::Kind kind) {
         if (!stub)
             return nullptr;
 
         // Create the entry and add it to the vector.
-        if (!icEntries_.append(ICEntry(script->pcToOffset(pc), isForOp)))
+        if (!icEntries_.append(ICEntry(script->pcToOffset(pc), kind)))
             return nullptr;
         ICEntry &vecEntry = icEntries_.back();
 
         // Set the first stub for the IC entry to the fallback stub
         vecEntry.setFirstStub(stub);
 
         // Return pointer to the IC entry
         return &vecEntry;
--- a/js/src/jit/shared/BaselineCompiler-x86-shared.cpp
+++ b/js/src/jit/shared/BaselineCompiler-x86-shared.cpp
@@ -4,12 +4,12 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "jit/shared/BaselineCompiler-x86-shared.h"
 
 using namespace js;
 using namespace js::jit;
 
-BaselineCompilerX86Shared::BaselineCompilerX86Shared(JSContext *cx, TempAllocator &alloc, HandleScript script)
+BaselineCompilerX86Shared::BaselineCompilerX86Shared(JSContext *cx, TempAllocator &alloc, JSScript *script)
   : BaselineCompilerShared(cx, alloc, script)
 {
 }
--- a/js/src/jit/shared/BaselineCompiler-x86-shared.h
+++ b/js/src/jit/shared/BaselineCompiler-x86-shared.h
@@ -10,15 +10,15 @@
 #include "jit/shared/BaselineCompiler-shared.h"
 
 namespace js {
 namespace jit {
 
 class BaselineCompilerX86Shared : public BaselineCompilerShared
 {
   protected:
-    BaselineCompilerX86Shared(JSContext *cx, TempAllocator &alloc, HandleScript script);
+    BaselineCompilerX86Shared(JSContext *cx, TempAllocator &alloc, JSScript *script);
 };
 
 } // namespace jit
 } // namespace js
 
 #endif /* jit_shared_BaselineCompiler_x86_shared_h */
--- a/js/src/jit/x64/BaselineCompiler-x64.cpp
+++ b/js/src/jit/x64/BaselineCompiler-x64.cpp
@@ -4,12 +4,12 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "jit/x64/BaselineCompiler-x64.h"
 
 using namespace js;
 using namespace js::jit;
 
-BaselineCompilerX64::BaselineCompilerX64(JSContext *cx, TempAllocator &alloc, HandleScript script)
+BaselineCompilerX64::BaselineCompilerX64(JSContext *cx, TempAllocator &alloc, JSScript *script)
   : BaselineCompilerX86Shared(cx, alloc, script)
 {
 }
--- a/js/src/jit/x64/BaselineCompiler-x64.h
+++ b/js/src/jit/x64/BaselineCompiler-x64.h
@@ -10,17 +10,17 @@
 #include "jit/shared/BaselineCompiler-x86-shared.h"
 
 namespace js {
 namespace jit {
 
 class BaselineCompilerX64 : public BaselineCompilerX86Shared
 {
   protected:
-    BaselineCompilerX64(JSContext *cx, TempAllocator &alloc, HandleScript script);
+    BaselineCompilerX64(JSContext *cx, TempAllocator &alloc, JSScript *script);
 };
 
 typedef BaselineCompilerX64 BaselineCompilerSpecific;
 
 } // namespace jit
 } // namespace js
 
 #endif /* jit_x64_BaselineCompiler_x64_h */
--- a/js/src/jit/x64/BaselineHelpers-x64.h
+++ b/js/src/jit/x64/BaselineHelpers-x64.h
@@ -136,42 +136,53 @@ EmitEnterStubFrame(MacroAssembler &masm,
 
     // Save old frame pointer, stack pointer and stub reg.
     masm.push(BaselineStubReg);
     masm.push(BaselineFrameReg);
     masm.mov(BaselineStackReg, BaselineFrameReg);
 }
 
 inline void
-EmitLeaveStubFrame(MacroAssembler &masm, bool calledIntoIon = false)
+EmitLeaveStubFrameHead(MacroAssembler &masm, bool calledIntoIon = false)
 {
     // Ion frames do not save and restore the frame pointer. If we called
     // into Ion, we have to restore the stack pointer from the frame descriptor.
     // If we performed a VM call, the descriptor has been popped already so
     // in that case we use the frame pointer.
     if (calledIntoIon) {
         masm.pop(ScratchReg);
         masm.shrq(Imm32(FRAMESIZE_SHIFT), ScratchReg);
         masm.addq(ScratchReg, BaselineStackReg);
     } else {
         masm.mov(BaselineFrameReg, BaselineStackReg);
     }
+}
 
+inline void
+EmitLeaveStubFrameCommonTail(MacroAssembler &masm)
+{
     masm.pop(BaselineFrameReg);
     masm.pop(BaselineStubReg);
 
     // Pop return address.
     masm.pop(BaselineTailCallReg);
 
     // Overwrite frame descriptor with return address, so that the stack matches
     // the state before entering the stub frame.
     masm.storePtr(BaselineTailCallReg, Address(BaselineStackReg, 0));
 }
 
 inline void
+EmitLeaveStubFrame(MacroAssembler &masm, bool calledIntoIon = false)
+{
+    EmitLeaveStubFrameHead(masm, calledIntoIon);
+    EmitLeaveStubFrameCommonTail(masm);
+}
+
+inline void
 EmitStowICValues(MacroAssembler &masm, int values)
 {
     JS_ASSERT(values >= 0 && values <= 2);
     switch(values) {
       case 1:
         // Stow R0
         masm.pop(BaselineTailCallReg);
         masm.pushValue(R0);
--- a/js/src/jit/x86/BaselineCompiler-x86.cpp
+++ b/js/src/jit/x86/BaselineCompiler-x86.cpp
@@ -4,12 +4,12 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "jit/x86/BaselineCompiler-x86.h"
 
 using namespace js;
 using namespace js::jit;
 
-BaselineCompilerX86::BaselineCompilerX86(JSContext *cx, TempAllocator &alloc, HandleScript script)
+BaselineCompilerX86::BaselineCompilerX86(JSContext *cx, TempAllocator &alloc, JSScript *script)
   : BaselineCompilerX86Shared(cx, alloc, script)
 {
 }
--- a/js/src/jit/x86/BaselineCompiler-x86.h
+++ b/js/src/jit/x86/BaselineCompiler-x86.h
@@ -10,17 +10,17 @@
 #include "jit/shared/BaselineCompiler-x86-shared.h"
 
 namespace js {
 namespace jit {
 
 class BaselineCompilerX86 : public BaselineCompilerX86Shared
 {
   protected:
-    BaselineCompilerX86(JSContext *cx, TempAllocator &alloc, HandleScript script);
+    BaselineCompilerX86(JSContext *cx, TempAllocator &alloc, JSScript *script);
 };
 
 typedef BaselineCompilerX86 BaselineCompilerSpecific;
 
 } // namespace jit
 } // namespace js
 
 #endif /* jit_x86_BaselineCompiler_x86_h */
--- a/js/src/jit/x86/BaselineHelpers-x86.h
+++ b/js/src/jit/x86/BaselineHelpers-x86.h
@@ -141,43 +141,54 @@ EmitEnterStubFrame(MacroAssembler &masm,
 
     // Save old frame pointer, stack pointer and stub reg.
     masm.push(BaselineStubReg);
     masm.push(BaselineFrameReg);
     masm.mov(BaselineStackReg, BaselineFrameReg);
 }
 
 inline void
-EmitLeaveStubFrame(MacroAssembler &masm, bool calledIntoIon = false)
+EmitLeaveStubFrameHead(MacroAssembler &masm, bool calledIntoIon = false)
 {
     // Ion frames do not save and restore the frame pointer. If we called
     // into Ion, we have to restore the stack pointer from the frame descriptor.
     // If we performed a VM call, the descriptor has been popped already so
     // in that case we use the frame pointer.
     if (calledIntoIon) {
         Register scratch = BaselineTailCallReg;
         masm.pop(scratch);
         masm.shrl(Imm32(FRAMESIZE_SHIFT), scratch);
         masm.addl(scratch, BaselineStackReg);
     } else {
         masm.mov(BaselineFrameReg, BaselineStackReg);
     }
+}
 
+inline void
+EmitLeaveStubFrameCommonTail(MacroAssembler &masm)
+{
     masm.pop(BaselineFrameReg);
     masm.pop(BaselineStubReg);
 
     // Pop return address.
     masm.pop(BaselineTailCallReg);
 
     // Overwrite frame descriptor with return address, so that the stack matches
     // the state before entering the stub frame.
     masm.storePtr(BaselineTailCallReg, Address(BaselineStackReg, 0));
 }
 
 inline void
+EmitLeaveStubFrame(MacroAssembler &masm, bool calledIntoIon = false)
+{
+    EmitLeaveStubFrameHead(masm, calledIntoIon);
+    EmitLeaveStubFrameCommonTail(masm);
+}
+
+inline void
 EmitStowICValues(MacroAssembler &masm, int values)
 {
     JS_ASSERT(values >= 0 && values <= 2);
     switch(values) {
       case 1:
         // Stow R0
         masm.pop(BaselineTailCallReg);
         masm.pushValue(R0);
--- a/js/src/moz.build
+++ b/js/src/moz.build
@@ -241,16 +241,17 @@ if CONFIG['ENABLE_ION']:
         'jit/AsmJS.cpp',
         'jit/AsmJSLink.cpp',
         'jit/AsmJSModule.cpp',
         'jit/AsmJSSignalHandlers.cpp',
         'jit/BacktrackingAllocator.cpp',
         'jit/Bailouts.cpp',
         'jit/BaselineBailouts.cpp',
         'jit/BaselineCompiler.cpp',
+        'jit/BaselineDebugModeOSR.cpp',
         'jit/BaselineFrame.cpp',
         'jit/BaselineFrameInfo.cpp',
         'jit/BaselineIC.cpp',
         'jit/BaselineInspector.cpp',
         'jit/BaselineJIT.cpp',
         'jit/BitSet.cpp',
         'jit/BytecodeAnalysis.cpp',
         'jit/C1Spewer.cpp',
--- a/js/src/vm/Stack.h
+++ b/js/src/vm/Stack.h
@@ -1394,19 +1394,24 @@ class JitActivation : public Activation
     // frame if one doesn't already exist. A frame can only be rematerialized
     // if an IonFrameIterator pointing to the nearest uninlined frame can be
     // provided, as values need to be read out of snapshots.
     //
     // The inlineDepth must be within bounds of the frame pointed to by iter.
     RematerializedFrame *getRematerializedFrame(JSContext *cx, JitFrameIterator &iter,
                                                 size_t inlineDepth = 0);
 
-    // Look up a rematerialized frame by the fp.
+    // Look up a rematerialized frame by the fp. If inlineDepth is out of
+    // bounds of what has been rematerialized, nullptr is returned.
     RematerializedFrame *lookupRematerializedFrame(uint8_t *top, size_t inlineDepth = 0);
 
+    bool hasRematerializedFrame(uint8_t *top, size_t inlineDepth = 0) {
+        return !!lookupRematerializedFrame(top, inlineDepth);
+    }
+
     // Remove a previous rematerialization by fp.
     void removeRematerializedFrame(uint8_t *top);
 
     void markRematerializedFrames(JSTracer *trc);
 #endif
 };
 
 // A filtering of the ActivationIterator to only stop at JitActivations.