Backed out 8 changesets (bug 1057082) for ASAN failures on a CLOSED TREE
authorWes Kocher <wkocher@mozilla.com>
Wed, 14 Jan 2015 15:26:50 -0800
changeset 223876 6ec139c64468872dc5b8ab44747e2b0c85870d5d
parent 223875 4acf60209a948d5b01b666c4a12247fe966c6ee6
child 223877 75174df7140a8327472f9275bf6cbb137481e612
push id54057
push userkwierso@gmail.com
push dateWed, 14 Jan 2015 23:27:14 +0000
treeherdermozilla-inbound@6ec139c64468 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
bugs1057082
milestone38.0a1
backs out0c1fabbfc58334488f82494c67e36d6e9a0dc48a
99213cacd671f96d6f0200e14f757ed15fc7f593
76d5287679469d1d4dab10a36960175d37b7f186
b1ad3dabec8f59cc7351e2f472e93bb1ff7b0b0c
f927b9899077326bd19ab79a1ea5a2cf732349f8
635e101ed2be856b2c74fb93fb6453373e2fba6f
a7d9f158838d963f08ab1284b74fe619fe299045
b1cb68eadf6c379f274ed95b2b8a23f63af00e3b
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Backed out 8 changesets (bug 1057082) for ASAN failures on a CLOSED TREE Backed out changeset 0c1fabbfc583 (bug 1057082) Backed out changeset 99213cacd671 (bug 1057082) Backed out changeset 76d528767946 (bug 1057082) Backed out changeset b1ad3dabec8f (bug 1057082) Backed out changeset f927b9899077 (bug 1057082) Backed out changeset 635e101ed2be (bug 1057082) Backed out changeset a7d9f158838d (bug 1057082) Backed out changeset b1cb68eadf6c (bug 1057082)
js/public/ProfilingFrameIterator.h
js/public/ProfilingStack.h
js/src/asmjs/AsmJSFrameIterator.cpp
js/src/asmjs/AsmJSValidate.cpp
js/src/jit-test/tests/asm.js/testProfiling.js
js/src/jit/Bailouts.cpp
js/src/jit/Bailouts.h
js/src/jit/BaselineBailouts.cpp
js/src/jit/BaselineCompiler.cpp
js/src/jit/BaselineCompiler.h
js/src/jit/BaselineFrame.cpp
js/src/jit/BaselineFrame.h
js/src/jit/BaselineIC.cpp
js/src/jit/BaselineIC.h
js/src/jit/BaselineJIT.cpp
js/src/jit/BaselineJIT.h
js/src/jit/CodeGenerator.cpp
js/src/jit/CodeGenerator.h
js/src/jit/CompileWrappers.cpp
js/src/jit/CompileWrappers.h
js/src/jit/Ion.cpp
js/src/jit/IonBuilder.cpp
js/src/jit/IonCaches.cpp
js/src/jit/IonCode.h
js/src/jit/IonInstrumentation.h
js/src/jit/JitCompartment.h
js/src/jit/JitFrameIterator-inl.h
js/src/jit/JitFrameIterator.h
js/src/jit/JitFrames.cpp
js/src/jit/JitFrames.h
js/src/jit/JitcodeMap.cpp
js/src/jit/JitcodeMap.h
js/src/jit/LIR-Common.h
js/src/jit/LOpcodes.h
js/src/jit/Lowering.cpp
js/src/jit/Lowering.h
js/src/jit/MIR.h
js/src/jit/MIRGenerator.h
js/src/jit/MOpcodes.h
js/src/jit/MacroAssembler.cpp
js/src/jit/MacroAssembler.h
js/src/jit/VMFunctions.cpp
js/src/jit/VMFunctions.h
js/src/jit/arm/CodeGenerator-arm.cpp
js/src/jit/arm/MacroAssembler-arm.cpp
js/src/jit/arm/MacroAssembler-arm.h
js/src/jit/arm/Trampoline-arm.cpp
js/src/jit/mips/CodeGenerator-mips.cpp
js/src/jit/mips/MacroAssembler-mips.cpp
js/src/jit/mips/MacroAssembler-mips.h
js/src/jit/mips/Trampoline-mips.cpp
js/src/jit/shared/Assembler-shared.h
js/src/jit/shared/BaselineCompiler-shared.cpp
js/src/jit/shared/BaselineCompiler-shared.h
js/src/jit/shared/CodeGenerator-shared.cpp
js/src/jit/shared/CodeGenerator-shared.h
js/src/jit/shared/CodeGenerator-x86-shared.cpp
js/src/jit/x64/MacroAssembler-x64.cpp
js/src/jit/x64/MacroAssembler-x64.h
js/src/jit/x64/Trampoline-x64.cpp
js/src/jit/x86/MacroAssembler-x86.cpp
js/src/jit/x86/MacroAssembler-x86.h
js/src/jit/x86/Trampoline-x86.cpp
js/src/shell/js.cpp
js/src/vm/Interpreter.cpp
js/src/vm/Runtime.cpp
js/src/vm/Runtime.h
js/src/vm/SPSProfiler.cpp
js/src/vm/SPSProfiler.h
js/src/vm/Stack.cpp
js/src/vm/Stack.h
toolkit/devtools/server/tests/unit/test_profiler_data.js
tools/profiler/TableTicker.cpp
tools/profiler/tests/test_enterjit_osr.js
--- a/js/public/ProfilingFrameIterator.h
+++ b/js/public/ProfilingFrameIterator.h
@@ -14,63 +14,39 @@
 #include "js/Utility.h"
 
 class JSAtom;
 struct JSRuntime;
 
 namespace js {
     class Activation;
     class AsmJSProfilingFrameIterator;
-    namespace jit {
-        class JitActivation;
-        class JitProfilingFrameIterator;
-    }
 }
 
 namespace JS {
 
 // This iterator can be used to walk the stack of a thread suspended at an
 // arbitrary pc. To provide acurate results, profiling must have been enabled
 // (via EnableRuntimeProfilingStack) before executing the callstack being
 // unwound.
 class JS_PUBLIC_API(ProfilingFrameIterator)
 {
-    JSRuntime *rt_;
     js::Activation *activation_;
 
-    // When moving past a JitActivation, we need to save the prevJitTop
-    // from it to use as the exit-frame pointer when the next caller jit
-    // activation (if any) comes around.
-    void *savedPrevJitTop_;
-
     static const unsigned StorageSpace = 6 * sizeof(void*);
     mozilla::AlignedStorage<StorageSpace> storage_;
     js::AsmJSProfilingFrameIterator &asmJSIter() {
         MOZ_ASSERT(!done());
-        MOZ_ASSERT(isAsmJS());
         return *reinterpret_cast<js::AsmJSProfilingFrameIterator*>(storage_.addr());
     }
     const js::AsmJSProfilingFrameIterator &asmJSIter() const {
         MOZ_ASSERT(!done());
-        MOZ_ASSERT(isAsmJS());
         return *reinterpret_cast<const js::AsmJSProfilingFrameIterator*>(storage_.addr());
     }
 
-    js::jit::JitProfilingFrameIterator &jitIter() {
-        MOZ_ASSERT(!done());
-        MOZ_ASSERT(isJit());
-        return *reinterpret_cast<js::jit::JitProfilingFrameIterator*>(storage_.addr());
-    }
-
-    const js::jit::JitProfilingFrameIterator &jitIter() const {
-        MOZ_ASSERT(!done());
-        MOZ_ASSERT(isJit());
-        return *reinterpret_cast<const js::jit::JitProfilingFrameIterator*>(storage_.addr());
-    }
-
     void settle();
 
   public:
     struct RegisterState
     {
         RegisterState() : pc(nullptr), sp(nullptr), lr(nullptr) {}
         void *pc;
         void *sp;
@@ -84,38 +60,22 @@ class JS_PUBLIC_API(ProfilingFrameIterat
 
     // Assuming the stack grows down (we do), the return value:
     //  - always points into the stack
     //  - is weakly monotonically increasing (may be equal for successive frames)
     //  - will compare greater than newer native and psuedo-stack frame addresses
     //    and less than older native and psuedo-stack frame addresses
     void *stackAddress() const;
 
-    enum FrameKind
-    {
-      Frame_Baseline,
-      Frame_Ion,
-      Frame_AsmJS
-    };
-
-    struct Frame
-    {
-        FrameKind kind;
-        void *stackAddress;
-        void *returnAddress;
-        void *activation;
-        const char *label;
-    };
-    uint32_t extractStack(Frame *frames, uint32_t offset, uint32_t end) const;
+    // Return a label suitable for regexp-matching as performed by
+    // browser/devtools/profiler/cleopatra/js/parserWorker.js
+    const char *label() const;
 
   private:
     void iteratorConstruct(const RegisterState &state);
     void iteratorConstruct();
     void iteratorDestroy();
     bool iteratorDone();
-
-    bool isAsmJS() const;
-    bool isJit() const;
 };
 
 } // namespace JS
 
 #endif  /* js_ProfilingFrameIterator_h */
--- a/js/public/ProfilingStack.h
+++ b/js/public/ProfilingStack.h
@@ -55,39 +55,34 @@ class ProfileEntry
         // a JS frame is assumed by default. You're not allowed to publicly
         // change the frame type. Instead, call `setJsFrame` or `setCppFrame`.
         IS_CPP_ENTRY = 0x01,
 
         // Indicate that copying the frame label is not necessary when taking a
         // sample of the pseudostack.
         FRAME_LABEL_COPY = 0x02,
 
-        // This ProfileEntry is a dummy entry indicating the start of a run
-        // of JS pseudostack entries.
-        BEGIN_PSEUDO_JS = 0x04,
-
-        // This flag is used to indicate that an interpreter JS entry has OSR-ed
-        // into baseline.
-        OSR = 0x08,
+        // This ProfileEntry was pushed immediately before calling into asm.js.
+        ASMJS = 0x04,
 
         // Mask for removing all flags except the category information.
-        CATEGORY_MASK = ~IS_CPP_ENTRY & ~FRAME_LABEL_COPY & ~BEGIN_PSEUDO_JS & ~OSR
+        CATEGORY_MASK = ~IS_CPP_ENTRY & ~FRAME_LABEL_COPY & ~ASMJS
     };
 
     // Keep these in sync with browser/devtools/profiler/utils/global.js
     MOZ_BEGIN_NESTED_ENUM_CLASS(Category, uint32_t)
-        OTHER    = 0x10,
-        CSS      = 0x20,
-        JS       = 0x40,
-        GC       = 0x80,
-        CC       = 0x100,
-        NETWORK  = 0x200,
-        GRAPHICS = 0x400,
-        STORAGE  = 0x800,
-        EVENTS   = 0x1000,
+        OTHER    = 0x08,
+        CSS      = 0x10,
+        JS       = 0x20,
+        GC       = 0x40,
+        CC       = 0x80,
+        NETWORK  = 0x100,
+        GRAPHICS = 0x200,
+        STORAGE  = 0x400,
+        EVENTS   = 0x800,
 
         FIRST    = OTHER,
         LAST     = EVENTS
     MOZ_END_NESTED_ENUM_CLASS(Category)
 
     // All of these methods are marked with the 'volatile' keyword because SPS's
     // representation of the stack is stored such that all ProfileEntry
     // instances are volatile. These methods would not be available unless they
@@ -126,28 +121,16 @@ class ProfileEntry
 
     uint32_t flags() const volatile {
         return flags_;
     }
     uint32_t category() const volatile {
         return flags_ & CATEGORY_MASK;
     }
 
-    void setOSR() volatile {
-        MOZ_ASSERT(isJs());
-        setFlag(OSR);
-    }
-    void unsetOSR() volatile {
-        MOZ_ASSERT(isJs());
-        unsetFlag(OSR);
-    }
-    bool isOSR() const volatile {
-        return hasFlag(OSR);
-    }
-
     void *stackAddress() const volatile {
         MOZ_ASSERT(!isJs());
         return spOrScript;
     }
     JSScript *script() const volatile {
         MOZ_ASSERT(isJs());
         return (JSScript *)spOrScript;
     }
--- a/js/src/asmjs/AsmJSFrameIterator.cpp
+++ b/js/src/asmjs/AsmJSFrameIterator.cpp
@@ -409,26 +409,16 @@ js::GenerateAsmJSExitEpilogue(MacroAssem
 AsmJSProfilingFrameIterator::AsmJSProfilingFrameIterator(const AsmJSActivation &activation)
   : module_(&activation.module()),
     callerFP_(nullptr),
     callerPC_(nullptr),
     stackAddress_(nullptr),
     exitReason_(AsmJSExit::None),
     codeRange_(nullptr)
 {
-    // If profiling hasn't been enabled for this module, then CallerFPFromFP
-    // will be trash, so ignore the entire activation. In practice, this only
-    // happens if profiling is enabled while module->active() (in this case,
-    // profiling will be enabled when the module becomes inactive and gets
-    // called again).
-    if (!module_->profilingEnabled()) {
-        MOZ_ASSERT(done());
-        return;
-    }
-
     initFromFP(activation);
 }
 
 static inline void
 AssertMatchesCallSite(const AsmJSModule &module, const AsmJSModule::CodeRange *calleeCodeRange,
                       void *callerPC, void *callerFP, void *fp)
 {
 #ifdef DEBUG
--- a/js/src/asmjs/AsmJSValidate.cpp
+++ b/js/src/asmjs/AsmJSValidate.cpp
@@ -8504,59 +8504,32 @@ GenerateFFIIonExit(ModuleCompiler &m, co
 
         // The following is inlined:
         //   JSContext *cx = activation->cx();
         //   Activation *act = cx->mainThread().activation();
         //   act.active_ = true;
         //   act.prevJitTop_ = cx->mainThread().jitTop;
         //   act.prevJitJSContext_ = cx->mainThread().jitJSContext;
         //   cx->mainThread().jitJSContext = cx;
-        //   act.prevJitActivation_ = cx->mainThread().jitActivation;
-        //   cx->mainThread().jitActivation = act;
-        //   act.prevProfilingActivation_ = cx->mainThread().profilingActivation;
-        //   cx->mainThread().profilingActivation_ = act;
         // On the ARM store8() uses the secondScratchReg (lr) as a temp.
         size_t offsetOfActivation = offsetof(JSRuntime, mainThread) +
                                     PerThreadData::offsetOfActivation();
         size_t offsetOfJitTop = offsetof(JSRuntime, mainThread) + offsetof(PerThreadData, jitTop);
         size_t offsetOfJitJSContext = offsetof(JSRuntime, mainThread) +
                                       offsetof(PerThreadData, jitJSContext);
-        size_t offsetOfJitActivation = offsetof(JSRuntime, mainThread) +
-                                       offsetof(PerThreadData, jitActivation);
-        size_t offsetOfProfilingActivation = offsetof(JSRuntime, mainThread) +
-                                             PerThreadData::offsetOfProfilingActivation();
         masm.loadAsmJSActivation(reg0);
         masm.loadPtr(Address(reg0, AsmJSActivation::offsetOfContext()), reg3);
         masm.loadPtr(Address(reg3, JSContext::offsetOfRuntime()), reg0);
         masm.loadPtr(Address(reg0, offsetOfActivation), reg1);
-
-        //   act.active_ = true;
         masm.store8(Imm32(1), Address(reg1, JitActivation::offsetOfActiveUint8()));
-
-        //   act.prevJitTop_ = cx->mainThread().jitTop;
         masm.loadPtr(Address(reg0, offsetOfJitTop), reg2);
         masm.storePtr(reg2, Address(reg1, JitActivation::offsetOfPrevJitTop()));
-
-        //   act.prevJitJSContext_ = cx->mainThread().jitJSContext;
         masm.loadPtr(Address(reg0, offsetOfJitJSContext), reg2);
         masm.storePtr(reg2, Address(reg1, JitActivation::offsetOfPrevJitJSContext()));
-        //   cx->mainThread().jitJSContext = cx;
         masm.storePtr(reg3, Address(reg0, offsetOfJitJSContext));
-
-        //   act.prevJitActivation_ = cx->mainThread().jitActivation;
-        masm.loadPtr(Address(reg0, offsetOfJitActivation), reg2);
-        masm.storePtr(reg2, Address(reg1, JitActivation::offsetOfPrevJitActivation()));
-        //   cx->mainThread().jitActivation = act;
-        masm.storePtr(reg1, Address(reg0, offsetOfJitActivation));
-
-        //   act.prevProfilingActivation_ = cx->mainThread().profilingActivation;
-        masm.loadPtr(Address(reg0, offsetOfProfilingActivation), reg2);
-        masm.storePtr(reg2, Address(reg1, Activation::offsetOfPrevProfiling()));
-        //   cx->mainThread().profilingActivation_ = act;
-        masm.storePtr(reg1, Address(reg0, offsetOfProfilingActivation));
     }
 
     // 2. Call
     AssertStackAlignment(masm, AsmJSStackAlignment);
     masm.callJitFromAsmJS(callee);
     AssertStackAlignment(masm, AsmJSStackAlignment);
 
     {
@@ -8566,53 +8539,32 @@ GenerateFFIIonExit(ModuleCompiler &m, co
         // JSReturnReg_Type, so there are five live registers.
         MOZ_ASSERT(JSReturnReg_Data == AsmJSIonExitRegReturnData);
         MOZ_ASSERT(JSReturnReg_Type == AsmJSIonExitRegReturnType);
         Register reg0 = AsmJSIonExitRegD0;
         Register reg1 = AsmJSIonExitRegD1;
         Register reg2 = AsmJSIonExitRegD2;
 
         // The following is inlined:
-        //   rt->mainThread.profilingActivation = prevProfilingActivation_;
         //   rt->mainThread.activation()->active_ = false;
         //   rt->mainThread.jitTop = prevJitTop_;
         //   rt->mainThread.jitJSContext = prevJitJSContext_;
-        //   rt->mainThread.jitActivation = prevJitActivation_;
         // On the ARM store8() uses the secondScratchReg (lr) as a temp.
         size_t offsetOfActivation = offsetof(JSRuntime, mainThread) +
                                     PerThreadData::offsetOfActivation();
         size_t offsetOfJitTop = offsetof(JSRuntime, mainThread) + offsetof(PerThreadData, jitTop);
         size_t offsetOfJitJSContext = offsetof(JSRuntime, mainThread) +
                                       offsetof(PerThreadData, jitJSContext);
-        size_t offsetOfJitActivation = offsetof(JSRuntime, mainThread) +
-                                       offsetof(PerThreadData, jitActivation);
-        size_t offsetOfProfilingActivation = offsetof(JSRuntime, mainThread) +
-                                             PerThreadData::offsetOfProfilingActivation();
-
         masm.movePtr(AsmJSImmPtr(AsmJSImm_Runtime), reg0);
         masm.loadPtr(Address(reg0, offsetOfActivation), reg1);
-
-        //   rt->mainThread.jitTop = prevJitTop_;
+        masm.store8(Imm32(0), Address(reg1, JitActivation::offsetOfActiveUint8()));
         masm.loadPtr(Address(reg1, JitActivation::offsetOfPrevJitTop()), reg2);
         masm.storePtr(reg2, Address(reg0, offsetOfJitTop));
-
-        //   rt->mainThread.profilingActivation = rt->mainThread.activation()->prevProfiling_;
-        masm.loadPtr(Address(reg1, Activation::offsetOfPrevProfiling()), reg2);
-        masm.storePtr(reg2, Address(reg0, offsetOfProfilingActivation));
-
-        //   rt->mainThread.activation()->active_ = false;
-        masm.store8(Imm32(0), Address(reg1, JitActivation::offsetOfActiveUint8()));
-
-        //   rt->mainThread.jitJSContext = prevJitJSContext_;
         masm.loadPtr(Address(reg1, JitActivation::offsetOfPrevJitJSContext()), reg2);
         masm.storePtr(reg2, Address(reg0, offsetOfJitJSContext));
-
-        //   rt->mainThread.jitActivation = prevJitActivation_;
-        masm.loadPtr(Address(reg1, JitActivation::offsetOfPrevJitActivation()), reg2);
-        masm.storePtr(reg2, Address(reg0, offsetOfJitActivation));
     }
 
     MOZ_ASSERT(masm.framePushed() == framePushed);
 
     // Reload the global register since Ion code can clobber any register.
 #if defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_MIPS)
     JS_STATIC_ASSERT(MaybeSavedGlobalReg > 0);
     masm.loadPtr(Address(StackPointer, savedGlobalOffset), GlobalReg);
--- a/js/src/jit-test/tests/asm.js/testProfiling.js
+++ b/js/src/jit-test/tests/asm.js/testProfiling.js
@@ -1,181 +1,143 @@
 load(libdir + "asm.js");
 load(libdir + "asserts.js");
 
 // Single-step profiling currently only works in the ARM simulator
 if (!getBuildConfiguration()["arm-simulator"])
     quit();
 
-function checkSubSequence(got, expect)
+function assertEqualStacks(got, expect)
 {
-    var got_i = 0;
-    EXP: for (var exp_i = 0; exp_i < expect.length; exp_i++) {
-        var item = expect[exp_i];
-        // Scan for next match in got.
-        while (got_i < got.length) {
-            if (got[got_i++] == expect[exp_i])
-                continue EXP;
-        }
-        print("MISMATCH: " + got.join(",") + "\n" +
-              "    VS    " + expect.join(","));
-        return false;
-    }
-    return true;
-}
-
-function assertStackContainsSeq(got, expect)
-{
-    var normalized = [];
+    // Strip off the " (script/library info)"
+    got = String(got).replace(/ \([^\)]*\)/g, "");
 
-    for (var i = 0; i < got.length; i++) {
-        if (got[i].length == 0)
-            continue;
-        var parts = got[i].split(',');
-        for (var j = 0; j < parts.length; j++) {
-            var frame = parts[j];
-            frame = frame.replace(/ \([^\)]*\)/g, "");
-            frame = frame.replace(/(fast|slow) FFI trampoline/g, "<");
-            frame = frame.replace(/entry trampoline/g, ">");
-            frame = frame.replace(/(\/[^\/,<]+)*\/testProfiling.js/g, "");
-            frame = frame.replace(/testBuiltinD2D/g, "");
-            frame = frame.replace(/testBuiltinF2F/g, "");
-            frame = frame.replace(/testBuiltinDD2D/g, "");
-            frame = frame.replace(/assertThrowsInstanceOf/g, "");
-            frame = frame.replace(/^ffi[12]?/g, "");
-            normalized.push(frame);
-        }
-    }
+    // Shorten FFI/entry trampolines
+    got = got.replace(/(fast|slow) FFI trampoline/g, "<").replace(/entry trampoline/g, ">");
 
-    var gotNorm = normalized.join(',').replace(/,+/g, ",");
-    gotNorm = gotNorm.replace(/^,/, "").replace(/,$/, "");
-
-    assertEq(checkSubSequence(gotNorm.split(','), expect.split(',')), true);
+    assertEq(got, expect);
 }
 
 // Test profiling enablement while asm.js is running.
 var stacks;
 var ffi = function(enable) {
     if (enable == +1)
         enableSPSProfiling();
     if (enable == -1)
         disableSPSProfiling();
     enableSingleStepProfiling();
     stacks = disableSingleStepProfiling();
 }
 var f = asmLink(asmCompile('global','ffis',USE_ASM + "var ffi=ffis.ffi; function g(i) { i=i|0; ffi(i|0) } function f(i) { i=i|0; g(i|0) } return f"), null, {ffi});
 f(0);
-assertStackContainsSeq(stacks, "", true);
+assertEqualStacks(stacks, "");
 f(+1);
-assertStackContainsSeq(stacks, "", true);
+assertEqualStacks(stacks, "");
 f(0);
-assertStackContainsSeq(stacks, "<,g,f,>", true);
+assertEqualStacks(stacks, "<gf>");
 f(-1);
-assertStackContainsSeq(stacks, "<,g,f,>", true);
+assertEqualStacks(stacks, "<gf>");
 f(0);
-assertStackContainsSeq(stacks, "", true);
+assertEqualStacks(stacks, "");
 
 // Enable profiling for the rest of the tests.
 enableSPSProfiling();
 
 var f = asmLink(asmCompile(USE_ASM + "function f() { return 42 } return f"));
 enableSingleStepProfiling();
 assertEq(f(), 42);
 var stacks = disableSingleStepProfiling();
-assertStackContainsSeq(stacks, ">,f,>,>");
+assertEqualStacks(stacks, ",>,f>,>,");
 
 var f = asmLink(asmCompile(USE_ASM + "function g(i) { i=i|0; return (i+1)|0 } function f() { return g(42)|0 } return f"));
 enableSingleStepProfiling();
 assertEq(f(), 43);
 var stacks = disableSingleStepProfiling();
-assertStackContainsSeq(stacks, ">,f,>,g,f,>,f,>,>");
+assertEqualStacks(stacks, ",>,f>,gf>,f>,>,");
 
 var f = asmLink(asmCompile(USE_ASM + "function g1() { return 1 } function g2() { return 2 } function f(i) { i=i|0; return TBL[i&1]()|0 } var TBL=[g1,g2]; return f"));
 enableSingleStepProfiling();
 assertEq(f(0), 1);
 assertEq(f(1), 2);
 var stacks = disableSingleStepProfiling();
-assertStackContainsSeq(stacks, ">,f,>,g1,f,>,f,>,>,>,f,>,g2,f,>,f,>,>");
+assertEqualStacks(stacks, ",>,f>,g1f>,f>,>,,>,f>,g2f>,f>,>,");
 
 function testBuiltinD2D(name) {
     var f = asmLink(asmCompile('g', USE_ASM + "var fun=g.Math." + name + "; function f(d) { d=+d; return +fun(d) } return f"), this);
     enableSingleStepProfiling();
     assertEq(f(.1), eval("Math." + name + "(.1)"));
     var stacks = disableSingleStepProfiling();
-    assertStackContainsSeq(stacks, ">,f,>,Math." + name + ",f,>,f,>,>");
+    assertEqualStacks(stacks, ",>,f>,Math." + name + "f>,f>,>,");
 }
 for (name of ['sin', 'cos', 'tan', 'asin', 'acos', 'atan', 'ceil', 'floor', 'exp', 'log'])
     testBuiltinD2D(name);
 function testBuiltinF2F(name) {
     var f = asmLink(asmCompile('g', USE_ASM + "var tof=g.Math.fround; var fun=g.Math." + name + "; function f(d) { d=tof(d); return tof(fun(d)) } return f"), this);
     enableSingleStepProfiling();
     assertEq(f(.1), eval("Math.fround(Math." + name + "(Math.fround(.1)))"));
     var stacks = disableSingleStepProfiling();
-    assertStackContainsSeq(stacks, ">,f,>,Math." + name + ",f,>,f,>,>");
+    assertEqualStacks(stacks, ",>,f>,Math." + name + "f>,f>,>,");
 }
 for (name of ['ceil', 'floor'])
     testBuiltinF2F(name);
 function testBuiltinDD2D(name) {
     var f = asmLink(asmCompile('g', USE_ASM + "var fun=g.Math." + name + "; function f(d, e) { d=+d; e=+e; return +fun(d,e) } return f"), this);
     enableSingleStepProfiling();
     assertEq(f(.1, .2), eval("Math." + name + "(.1, .2)"));
     var stacks = disableSingleStepProfiling();
-    assertStackContainsSeq(stacks, ">,f,>,Math." + name + ",f,>,f,>,>");
+    assertEqualStacks(stacks, ",>,f>,Math." + name + "f>,f>,>,");
 }
 for (name of ['atan2', 'pow'])
     testBuiltinDD2D(name);
 
 // FFI tests:
 setJitCompilerOption("ion.warmup.trigger", 10);
 setJitCompilerOption("baseline.warmup.trigger", 0);
 setJitCompilerOption("offthread-compilation.enable", 0);
 
 var ffi1 = function() { return 10 }
 var ffi2 = function() { return 73 }
 var f = asmLink(asmCompile('g','ffis', USE_ASM + "var ffi1=ffis.ffi1, ffi2=ffis.ffi2; function f() { return ((ffi1()|0) + (ffi2()|0))|0 } return f"), null, {ffi1,ffi2});
 // Interpreter FFI exit
 enableSingleStepProfiling();
 assertEq(f(), 83);
 var stacks = disableSingleStepProfiling();
-assertStackContainsSeq(stacks, ">,f,>,<,f,>,f,>,<,f,>,f,>,>");
-
+assertEqualStacks(stacks, ",>,f>,<f>,f>,<f>,f>,>,");
+// Ion FFI exit
 for (var i = 0; i < 20; i++)
     assertEq(f(), 83);
 enableSingleStepProfiling();
 assertEq(f(), 83);
 var stacks = disableSingleStepProfiling();
-assertStackContainsSeq(stacks, ">,f,>,<,f,>,f,>,<,f,>,f,>,>");
+assertEqualStacks(stacks, ",>,f>,<f>,f>,<f>,f>,>,");
 
 var ffi1 = function() { return 15 }
 var ffi2 = function() { return f2() + 17 }
 var {f1,f2} = asmLink(asmCompile('g','ffis', USE_ASM + "var ffi1=ffis.ffi1, ffi2=ffis.ffi2; function f2() { return ffi1()|0 } function f1() { return ffi2()|0 } return {f1:f1, f2:f2}"), null, {ffi1, ffi2});
 // Interpreter FFI exit
 enableSingleStepProfiling();
 assertEq(f1(), 32);
 var stacks = disableSingleStepProfiling();
-assertStackContainsSeq(stacks, ">,f1,>,<,f1,>,>,<,f1,>,f2,>,<,f1,>,<,f2,>,<,f1,>,f2,>,<,f1,>,>,<,f1,>,<,f1,>,f1,>,>");
-
-
+assertEqualStacks(stacks, ",>,f1>,<f1>,><f1>,f2><f1>,<f2><f1>,f2><f1>,><f1>,<f1>,f1>,>,");
 // Ion FFI exit
 for (var i = 0; i < 20; i++)
     assertEq(f1(), 32);
 enableSingleStepProfiling();
 assertEq(f1(), 32);
 var stacks = disableSingleStepProfiling();
-assertStackContainsSeq(stacks, ">,f1,>,<,f1,>,>,<,f1,>,f2,>,<,f1,>,<,f2,>,<,f1,>,f2,>,<,f1,>,>,<,f1,>,<,f1,>,f1,>,>");
-
+assertEqualStacks(stacks, ",>,f1>,<f1>,><f1>,f2><f1>,<f2><f1>,f2><f1>,><f1>,<f1>,f1>,>,");
 
 // Detachment exit
 var buf = new ArrayBuffer(BUF_CHANGE_MIN);
 var ffi = function() { neuter(buf, 'change-data') }
 var f = asmLink(asmCompile('g','ffis','buf', USE_ASM + 'var ffi = ffis.ffi; var i32 = new g.Int32Array(buf); function f() { ffi() } return f'), this, {ffi:ffi}, buf);
 enableSingleStepProfiling();
 assertThrowsInstanceOf(f, InternalError);
 var stacks = disableSingleStepProfiling();
-assertStackContainsSeq(stacks, ">,f,>,<,f,>,inline stub,f,>,<,f,>,inline stub,f,>");
+assertEqualStacks(stacks, ",>,f>,<f>,inline stubf>,<f>,inline stubf>,");
 
 // This takes forever to run.
 // Stack-overflow exit test
 //var limit = -1;
 //var maxct = 0;
 //function ffi(ct) { if (ct == limit) { enableSingleStepProfiling(); print("enabled"); } maxct = ct; }
 //var f = asmLink(asmCompile('g', 'ffis',USE_ASM + "var ffi=ffis.ffi; var ct=0; function rec(){ ct=(ct+1)|0; ffi(ct|0); rec() } function f() { ct=0; rec() } return f"), null, {ffi});
 //// First find the stack limit:
--- a/js/src/jit/Bailouts.cpp
+++ b/js/src/jit/Bailouts.cpp
@@ -35,72 +35,65 @@ jit::Bailout(BailoutStack *sp, BaselineB
                IsInRange(FAKE_JIT_TOP_FOR_BAILOUT + sizeof(CommonFrameLayout), 0, 0x1000),
                "Fake jitTop pointer should be within the first page.");
     cx->mainThread().jitTop = FAKE_JIT_TOP_FOR_BAILOUT;
 
     JitActivationIterator jitActivations(cx->runtime());
     BailoutFrameInfo bailoutData(jitActivations, sp);
     JitFrameIterator iter(jitActivations);
     MOZ_ASSERT(!iter.ionScript()->invalidated());
-    CommonFrameLayout *currentFramePtr = iter.current();
 
     TraceLoggerThread *logger = TraceLoggerForMainThread(cx->runtime());
     TraceLogTimestamp(logger, TraceLogger_Bailout);
 
     JitSpew(JitSpew_IonBailouts, "Took bailout! Snapshot offset: %d", iter.snapshotOffset());
 
     MOZ_ASSERT(IsBaselineEnabled(cx));
 
     *bailoutInfo = nullptr;
+    bool poppedLastSPSFrame = false;
     uint32_t retval = BailoutIonToBaseline(cx, bailoutData.activation(), iter, false, bailoutInfo,
-                                           /* excInfo = */ nullptr);
+                                           /* excInfo = */ nullptr, &poppedLastSPSFrame);
     MOZ_ASSERT(retval == BAILOUT_RETURN_OK ||
                retval == BAILOUT_RETURN_FATAL_ERROR ||
                retval == BAILOUT_RETURN_OVERRECURSED);
     MOZ_ASSERT_IF(retval == BAILOUT_RETURN_OK, *bailoutInfo != nullptr);
 
     if (retval != BAILOUT_RETURN_OK) {
+        // If the bailout failed, then bailout trampoline will pop the
+        // current frame and jump straight to exception handling code when
+        // this function returns.  Any SPS entry pushed for this frame will
+        // be silently forgotten.
+        //
+        // We call ExitScript here to ensure that if the ionScript had SPS
+        // instrumentation, then the SPS entry for it is popped.
+        //
+        // However, if the bailout was during argument check, then a
+        // pseudostack frame would not have been pushed in the first
+        // place, so don't pop anything in that case.
+        bool popSPSFrame = iter.ionScript()->hasSPSInstrumentation() &&
+                           (SnapshotIterator(iter).bailoutKind() != Bailout_ArgumentCheck) &&
+                           !poppedLastSPSFrame;
         JSScript *script = iter.script();
-        probes::ExitScript(cx, script, script->functionNonDelazifying(),
-                           /* popSPSFrame = */ false);
+        probes::ExitScript(cx, script, script->functionNonDelazifying(), popSPSFrame);
 
         EnsureExitFrame(iter.jsFrame());
     }
 
     // This condition was wrong when we entered this bailout function, but it
     // might be true now. A GC might have reclaimed all the Jit code and
     // invalidated all frames which are currently on the stack. As we are
     // already in a bailout, we could not switch to an invalidation
     // bailout. When the code of an IonScript which is on the stack is
     // invalidated (see InvalidateActivation), we remove references to it and
     // increment the reference counter for each activation that appear on the
     // stack. As the bailed frame is one of them, we have to decrement it now.
     if (iter.ionScript()->invalidated())
         iter.ionScript()->decrementInvalidationCount(cx->runtime()->defaultFreeOp());
 
-    // NB: Commentary on how |lastProfilingFrame| is set from bailouts.
-    //
-    // Once we return to jitcode, any following frames might get clobbered,
-    // but the current frame will not (as it will be clobbered "in-place"
-    // with a baseline frame that will share the same frame prefix).
-    // However, there may be multiple baseline frames unpacked from this
-    // single Ion frame, which means we will need to once again reset
-    // |lastProfilingFrame| to point to the correct unpacked last frame
-    // in |FinishBailoutToBaseline|.
-    //
-    // In the case of error, the jitcode will jump immediately to an
-    // exception handler, which will unwind the frames and properly set
-    // the |lastProfilingFrame| to point to the frame being resumed into
-    // (see |AutoResetLastProfilerFrameOnReturnFromException|).
-    //
-    // In both cases, we want to temporarily set the |lastProfilingFrame|
-    // to the current frame being bailed out, and then fix it up later.
-    if (cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(cx->runtime()))
-        cx->mainThread().jitActivation->setLastProfilingFrame(currentFramePtr);
-
     return retval;
 }
 
 uint32_t
 jit::InvalidationBailout(InvalidationBailoutStack *sp, size_t *frameSizeOut,
                          BaselineBailoutInfo **bailoutInfo)
 {
     sp->checkInvariants();
@@ -108,31 +101,31 @@ jit::InvalidationBailout(InvalidationBai
     JSContext *cx = GetJSContextFromJitCode();
 
     // We don't have an exit frame.
     cx->mainThread().jitTop = FAKE_JIT_TOP_FOR_BAILOUT;
 
     JitActivationIterator jitActivations(cx->runtime());
     BailoutFrameInfo bailoutData(jitActivations, sp);
     JitFrameIterator iter(jitActivations);
-    CommonFrameLayout *currentFramePtr = iter.current();
 
     TraceLoggerThread *logger = TraceLoggerForMainThread(cx->runtime());
     TraceLogTimestamp(logger, TraceLogger_Invalidation);
 
     JitSpew(JitSpew_IonBailouts, "Took invalidation bailout! Snapshot offset: %d", iter.snapshotOffset());
 
     // Note: the frame size must be computed before we return from this function.
     *frameSizeOut = iter.frameSize();
 
     MOZ_ASSERT(IsBaselineEnabled(cx));
 
     *bailoutInfo = nullptr;
+    bool poppedLastSPSFrame = false;
     uint32_t retval = BailoutIonToBaseline(cx, bailoutData.activation(), iter, true, bailoutInfo,
-                                           /* excInfo = */ nullptr);
+                                           /* excInfo = */ nullptr, &poppedLastSPSFrame);
     MOZ_ASSERT(retval == BAILOUT_RETURN_OK ||
                retval == BAILOUT_RETURN_FATAL_ERROR ||
                retval == BAILOUT_RETURN_OVERRECURSED);
     MOZ_ASSERT_IF(retval == BAILOUT_RETURN_OK, *bailoutInfo != nullptr);
 
     if (retval != BAILOUT_RETURN_OK) {
         // If the bailout failed, then bailout trampoline will pop the
         // current frame and jump straight to exception handling code when
@@ -140,19 +133,21 @@ jit::InvalidationBailout(InvalidationBai
         // be silently forgotten.
         //
         // We call ExitScript here to ensure that if the ionScript had SPS
         // instrumentation, then the SPS entry for it is popped.
         //
         // However, if the bailout was during argument check, then a
         // pseudostack frame would not have been pushed in the first
         // place, so don't pop anything in that case.
+        bool popSPSFrame = iter.ionScript()->hasSPSInstrumentation() &&
+                           (SnapshotIterator(iter).bailoutKind() != Bailout_ArgumentCheck) &&
+                           !poppedLastSPSFrame;
         JSScript *script = iter.script();
-        probes::ExitScript(cx, script, script->functionNonDelazifying(),
-                           /* popSPSFrame = */ false);
+        probes::ExitScript(cx, script, script->functionNonDelazifying(), popSPSFrame);
 
         JitFrameLayout *frame = iter.jsFrame();
         JitSpew(JitSpew_IonInvalidate, "Bailout failed (%s): converting to exit frame",
                 (retval == BAILOUT_RETURN_FATAL_ERROR) ? "Fatal Error" : "Over Recursion");
         JitSpew(JitSpew_IonInvalidate, "   orig calleeToken %p", (void *) frame->calleeToken());
         JitSpew(JitSpew_IonInvalidate, "   orig frameSize %u", unsigned(frame->prevFrameLocalSize()));
         JitSpew(JitSpew_IonInvalidate, "   orig ra %p", (void *) frame->returnAddress());
 
@@ -161,20 +156,16 @@ jit::InvalidationBailout(InvalidationBai
 
         JitSpew(JitSpew_IonInvalidate, "   new  calleeToken %p", (void *) frame->calleeToken());
         JitSpew(JitSpew_IonInvalidate, "   new  frameSize %u", unsigned(frame->prevFrameLocalSize()));
         JitSpew(JitSpew_IonInvalidate, "   new  ra %p", (void *) frame->returnAddress());
     }
 
     iter.ionScript()->decrementInvalidationCount(cx->runtime()->defaultFreeOp());
 
-    // Make the frame being bailed out the top profiled frame.
-    if (cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(cx->runtime()))
-        cx->mainThread().jitActivation->setLastProfilingFrame(currentFramePtr);
-
     return retval;
 }
 
 BailoutFrameInfo::BailoutFrameInfo(const JitActivationIterator &activations,
                                    const JitFrameIterator &frame)
   : machine_(frame.machineState())
 {
     framePointer_ = (uint8_t *) frame.fp();
@@ -185,34 +176,33 @@ BailoutFrameInfo::BailoutFrameInfo(const
     const OsiIndex *osiIndex = frame.osiIndex();
     snapshotOffset_ = osiIndex->snapshotOffset();
 }
 
 uint32_t
 jit::ExceptionHandlerBailout(JSContext *cx, const InlineFrameIterator &frame,
                              ResumeFromException *rfe,
                              const ExceptionBailoutInfo &excInfo,
-                             bool *overrecursed)
+                             bool *overrecursed, bool *poppedLastSPSFrameOut)
 {
     // We can be propagating debug mode exceptions without there being an
     // actual exception pending. For instance, when we return false from an
     // operation callback like a timeout handler.
     MOZ_ASSERT_IF(!excInfo.propagatingIonExceptionForDebugMode(), cx->isExceptionPending());
 
     cx->mainThread().jitTop = FAKE_JIT_TOP_FOR_BAILOUT;
     gc::AutoSuppressGC suppress(cx);
 
     JitActivationIterator jitActivations(cx->runtime());
     BailoutFrameInfo bailoutData(jitActivations, frame.frame());
     JitFrameIterator iter(jitActivations);
-    CommonFrameLayout *currentFramePtr = iter.current();
 
     BaselineBailoutInfo *bailoutInfo = nullptr;
     uint32_t retval = BailoutIonToBaseline(cx, bailoutData.activation(), iter, true,
-                                           &bailoutInfo, &excInfo);
+                                           &bailoutInfo, &excInfo, poppedLastSPSFrameOut);
 
     if (retval == BAILOUT_RETURN_OK) {
         MOZ_ASSERT(bailoutInfo);
 
         // Overwrite the kind so HandleException after the bailout returns
         // false, jumping directly to the exception tail.
         if (excInfo.propagatingIonExceptionForDebugMode())
             bailoutInfo->bailoutKind = Bailout_IonExceptionDebugMode;
@@ -231,20 +221,16 @@ jit::ExceptionHandlerBailout(JSContext *
             cx->clearPendingException();
 
         if (retval == BAILOUT_RETURN_OVERRECURSED)
             *overrecursed = true;
         else
             MOZ_ASSERT(retval == BAILOUT_RETURN_FATAL_ERROR);
     }
 
-    // Make the frame being bailed out the top profiled frame.
-    if (cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(cx->runtime()))
-        cx->mainThread().jitActivation->setLastProfilingFrame(currentFramePtr);
-
     return retval;
 }
 
 // Initialize the decl env Object, call object, and any arguments obj of the current frame.
 bool
 jit::EnsureHasScopeObjects(JSContext *cx, AbstractFramePtr fp)
 {
     if (fp.isFunctionFrame() &&
--- a/js/src/jit/Bailouts.h
+++ b/js/src/jit/Bailouts.h
@@ -204,17 +204,17 @@ class ExceptionBailoutInfo
     }
 };
 
 // Called from the exception handler to enter a catch or finally block.
 // Returns a BAILOUT_* error code.
 uint32_t ExceptionHandlerBailout(JSContext *cx, const InlineFrameIterator &frame,
                                  ResumeFromException *rfe,
                                  const ExceptionBailoutInfo &excInfo,
-                                 bool *overrecursed);
+                                 bool *overrecursed, bool *poppedLastSPSFrameOut);
 
 uint32_t FinishBailoutToBaseline(BaselineBailoutInfo *bailoutInfo);
 
 bool CheckFrequentBailouts(JSContext *cx, JSScript *script);
 
 } // namespace jit
 } // namespace js
 
--- a/js/src/jit/BaselineBailouts.cpp
+++ b/js/src/jit/BaselineBailouts.cpp
@@ -546,23 +546,26 @@ HasLiveIteratorAtStackDepth(JSScript *sc
 //                      |  ReturnAddr   | <-- return into ArgumentsRectifier after call
 //                      +===============+
 //
 static bool
 InitFromBailout(JSContext *cx, HandleScript caller, jsbytecode *callerPC,
                 HandleFunction fun, HandleScript script, IonScript *ionScript,
                 SnapshotIterator &iter, bool invalidate, BaselineStackBuilder &builder,
                 AutoValueVector &startFrameFormals, MutableHandleFunction nextCallee,
-                jsbytecode **callPC, const ExceptionBailoutInfo *excInfo)
+                jsbytecode **callPC, const ExceptionBailoutInfo *excInfo,
+                bool *poppedLastSPSFrameOut)
 {
     // The Baseline frames we will reconstruct on the heap are not rooted, so GC
     // must be suppressed here.
     MOZ_ASSERT(cx->mainThread().suppressGC);
 
     MOZ_ASSERT(script->hasBaselineScript());
+    MOZ_ASSERT(poppedLastSPSFrameOut);
+    MOZ_ASSERT(!*poppedLastSPSFrameOut);
 
     // Are we catching an exception?
     bool catchingException = excInfo && excInfo->catchingException();
 
     // If we are catching an exception, we are bailing out to a catch or
     // finally block and this is the frame where we will resume. Usually the
     // expression stack should be empty in this case but there can be
     // iterators on the stack.
@@ -618,16 +621,26 @@ InitFromBailout(JSContext *cx, HandleScr
     // Initialize BaselineFrame::frameSize
     uint32_t frameSize = BaselineFrame::Size() + BaselineFrame::FramePointerOffset +
                          (sizeof(Value) * (script->nfixed() + exprStackSlots));
     JitSpew(JitSpew_BaselineBailouts, "      FrameSize=%d", (int) frameSize);
     blFrame->setFrameSize(frameSize);
 
     uint32_t flags = 0;
 
+    // If SPS Profiler is enabled, mark the frame as having pushed an SPS entry.
+    // This may be wrong for the last frame of ArgumentCheck bailout, but
+    // that will be fixed later.
+    if (ionScript->hasSPSInstrumentation()) {
+        if (callerPC == nullptr) {
+            JitSpew(JitSpew_BaselineBailouts, "      Setting SPS flag on top frame!");
+            flags |= BaselineFrame::HAS_PUSHED_SPS_FRAME;
+        }
+    }
+
     // If we are bailing to a script whose execution is observed, mark the
     // baseline frame as a debuggee frame. This is to cover the case where we
     // don't rematerialize the Ion frame via the Debugger.
     if (script->isDebuggee())
         flags |= BaselineFrame::DEBUGGEE;
 
     // Initialize BaselineFrame's scopeChain and argsObj
     JSObject *scopeChain = nullptr;
@@ -1080,24 +1093,63 @@ InitFromBailout(JSContext *cx, HandleScr
             if (scopeChain == nullptr) {
                 // Global and eval scripts expect the scope chain in R1, so only
                 // resume into the prologue for function scripts.
                 MOZ_ASSERT(fun);
                 MOZ_ASSERT(numUnsynced == 0);
                 opReturnAddr = baselineScript->prologueEntryAddr();
                 JitSpew(JitSpew_BaselineBailouts, "      Resuming into prologue.");
 
+                // If bailing into prologue, HAS_PUSHED_SPS_FRAME should not be set on frame.
+                blFrame->unsetPushedSPSFrame();
+
+                if (cx->runtime()->spsProfiler.enabled()) {
+                    // 1. If resuming into inline code, then the top SPS entry will be
+                    // for the outermost caller, and will have an uninitialized PC.
+                    // This will be fixed up later in BailoutIonToBaseline.
+                    //
+                    // 2. If resuming into top-level code prologue, with ArgumentCheck,
+                    // no SPS entry will have been pushed.  Can be left alone.
+                    //
+                    // 3. If resuming into top-level code prologue, without ArgumentCheck,
+                    // an SPS entry will have been pushed, and needs to be popped.
+                    //
+                    // 4. If resuming into top-level code main body, an SPS entry will
+                    // have been pushed, and can be left alone.
+                    //
+                    // Only need to handle case 3 here.
+                    if (!caller && bailoutKind != Bailout_ArgumentCheck) {
+                        JitSpew(JitSpew_BaselineBailouts,
+                                "      Popping SPS entry for outermost frame");
+                        cx->runtime()->spsProfiler.exit(script, fun);
+
+                        // Notify caller that the last SPS frame was popped, so not
+                        // to do it again.
+                        if (poppedLastSPSFrameOut)
+                            *poppedLastSPSFrameOut = true;
+                    }
+                }
             } else {
                 opReturnAddr = nativeCodeForPC;
             }
             builder.setResumeAddr(opReturnAddr);
             JitSpew(JitSpew_BaselineBailouts, "      Set resumeAddr=%p", opReturnAddr);
         }
 
         if (cx->runtime()->spsProfiler.enabled()) {
+            if (blFrame->hasPushedSPSFrame()) {
+                // Set PC index to 0 for the innermost frame to match what the
+                // interpreter and Baseline do: they update the SPS pc for
+                // JSOP_CALL ops but set it to 0 when running other ops. Ion code
+                // can set the pc to NullPCIndex and this will confuse SPS when
+                // Baseline calls into the VM at non-CALL ops and re-enters JS.
+                JitSpew(JitSpew_BaselineBailouts, "      Setting PCidx for last frame to 0");
+                cx->runtime()->spsProfiler.updatePC(script, script->code());
+            }
+
             // Register bailout with profiler.
             const char *filename = script->filename();
             if (filename == nullptr)
                 filename = "<unknown>";
             unsigned len = strlen(filename) + 200;
             char *buf = js_pod_malloc<char>(len);
             if (buf == nullptr)
                 return false;
@@ -1324,21 +1376,24 @@ InitFromBailout(JSContext *cx, HandleScr
         return false;
 
     return true;
 }
 
 uint32_t
 jit::BailoutIonToBaseline(JSContext *cx, JitActivation *activation, JitFrameIterator &iter,
                           bool invalidate, BaselineBailoutInfo **bailoutInfo,
-                          const ExceptionBailoutInfo *excInfo)
+                          const ExceptionBailoutInfo *excInfo, bool *poppedLastSPSFrameOut)
 {
     MOZ_ASSERT(bailoutInfo != nullptr);
     MOZ_ASSERT(*bailoutInfo == nullptr);
 
+    MOZ_ASSERT(poppedLastSPSFrameOut);
+    MOZ_ASSERT(!*poppedLastSPSFrameOut);
+
     TraceLoggerThread *logger = TraceLoggerForMainThread(cx->runtime());
     TraceLogStopEvent(logger, TraceLogger_IonMonkey);
     TraceLogStartEvent(logger, TraceLogger_Baseline);
 
     // The caller of the top frame must be one of the following:
     //      IonJS - Ion calling into Ion.
     //      BaselineStub - Baseline calling into Ion.
     //      Entry - Interpreter or other calling into Ion.
@@ -1432,16 +1487,19 @@ jit::BailoutIonToBaseline(JSContext *cx,
     size_t frameNo = 0;
 
     // Reconstruct baseline frames using the builder.
     RootedScript caller(cx);
     jsbytecode *callerPC = nullptr;
     RootedFunction fun(cx, callee);
     AutoValueVector startFrameFormals(cx);
 
+    RootedScript topCaller(cx);
+    jsbytecode *topCallerPC = nullptr;
+
     gc::AutoSuppressGC suppress(cx);
 
     while (true) {
         // Skip recover instructions as they are already recovered by |initInstructionResults|.
         snapIter.settleOnFrame();
 
         if (frameNo > 0) {
             // TraceLogger doesn't create entries for inlined frames. But we
@@ -1461,17 +1519,18 @@ jit::BailoutIonToBaseline(JSContext *cx,
         // We also need to pass excInfo if we're bailing out in place for
         // debug mode.
         bool passExcInfo = handleException || propagatingExceptionForDebugMode;
 
         jsbytecode *callPC = nullptr;
         RootedFunction nextCallee(cx, nullptr);
         if (!InitFromBailout(cx, caller, callerPC, fun, scr, iter.ionScript(),
                              snapIter, invalidate, builder, startFrameFormals,
-                             &nextCallee, &callPC, passExcInfo ? excInfo : nullptr))
+                             &nextCallee, &callPC, passExcInfo ? excInfo : nullptr,
+                             poppedLastSPSFrameOut))
         {
             return BAILOUT_RETURN_FATAL_ERROR;
         }
 
         if (!snapIter.moreFrames()) {
             MOZ_ASSERT(!callPC);
             break;
         }
@@ -1481,22 +1540,34 @@ jit::BailoutIonToBaseline(JSContext *cx,
 
         MOZ_ASSERT(nextCallee);
         MOZ_ASSERT(callPC);
         caller = scr;
         callerPC = callPC;
         fun = nextCallee;
         scr = fun->existingScriptForInlinedFunction();
 
+        // Save top caller info for adjusting SPS frames later.
+        if (!topCaller) {
+            MOZ_ASSERT(frameNo == 0);
+            topCaller = caller;
+            topCallerPC = callerPC;
+        }
+
         frameNo++;
 
         snapIter.nextInstruction();
     }
     JitSpew(JitSpew_BaselineBailouts, "  Done restoring frames");
 
+    // If there were multiple inline frames unpacked, then the current top SPS frame
+    // is for the outermost caller, and has an uninitialized PC.  Initialize it now.
+    if (frameNo > 0)
+        cx->runtime()->spsProfiler.updatePC(topCaller, topCallerPC);
+
     BailoutKind bailoutKind = snapIter.bailoutKind();
 
     if (!startFrameFormals.empty()) {
         // Set the first frame's formals, see the comment in InitFromBailout.
         Value *argv = builder.startFrame()->argv() + 1; // +1 to skip |this|.
         mozilla::PodCopy(argv, startFrameFormals.begin(), startFrameFormals.length());
     }
 
@@ -1645,22 +1716,16 @@ jit::FinishBailoutToBaseline(BaselineBai
     // that script->needsArgsObj() implies frame->hasArgsObj().
     RootedScript innerScript(cx, nullptr);
     RootedScript outerScript(cx, nullptr);
 
     MOZ_ASSERT(cx->currentlyRunningInJit());
     JitFrameIterator iter(cx);
     uint8_t *outerFp = nullptr;
 
-    // Iter currently points at the exit frame.  Get the previous frame
-    // (which must be a baseline frame), and set it as the last profiling
-    // frame.
-    if (cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(cx->runtime()))
-        cx->mainThread().jitActivation->setLastProfilingFrame(iter.prevFp());
-
     uint32_t frameno = 0;
     while (frameno < numFrames) {
         MOZ_ASSERT(!iter.isIonJS());
 
         if (iter.isBaselineJS()) {
             BaselineFrame *frame = iter.baselineFrame();
             MOZ_ASSERT(frame->script()->hasBaselineScript());
 
--- a/js/src/jit/BaselineCompiler.cpp
+++ b/js/src/jit/BaselineCompiler.cpp
@@ -170,32 +170,30 @@ BaselineCompiler::compile()
         previousOffset = entry.nativeOffset;
     }
 
     if (pcEntries.oom())
         return Method_Error;
 
     prologueOffset_.fixup(&masm);
     epilogueOffset_.fixup(&masm);
-    profilerEnterFrameToggleOffset_.fixup(&masm);
-    profilerExitFrameToggleOffset_.fixup(&masm);
+    spsPushToggleOffset_.fixup(&masm);
 #ifdef JS_TRACE_LOGGING
     traceLoggerEnterToggleOffset_.fixup(&masm);
     traceLoggerExitToggleOffset_.fixup(&masm);
 #endif
     postDebugPrologueOffset_.fixup(&masm);
 
     // Note: There is an extra entry in the bytecode type map for the search hint, see below.
     size_t bytecodeTypeMapEntries = script->nTypeSets() + 1;
 
     mozilla::UniquePtr<BaselineScript, JS::DeletePolicy<BaselineScript> > baselineScript(
         BaselineScript::New(script, prologueOffset_.offset(),
                             epilogueOffset_.offset(),
-                            profilerEnterFrameToggleOffset_.offset(),
-                            profilerExitFrameToggleOffset_.offset(),
+                            spsPushToggleOffset_.offset(),
                             traceLoggerEnterToggleOffset_.offset(),
                             traceLoggerExitToggleOffset_.offset(),
                             postDebugPrologueOffset_.offset(),
                             icEntries_.length(),
                             pcMappingIndexEntries.length(),
                             pcEntries.length(),
                             bytecodeTypeMapEntries,
                             yieldOffsets_.length()));
@@ -239,16 +237,20 @@ BaselineCompiler::compile()
 
     if (modifiesArguments_)
         baselineScript->setModifiesArguments();
 
     // All barriers are emitted off-by-default, toggle them on if needed.
     if (cx->zone()->needsIncrementalBarrier())
         baselineScript->toggleBarriers(true);
 
+    // All SPS instrumentation is emitted toggled off.  Toggle them on if needed.
+    if (cx->runtime()->spsProfiler.enabled())
+        baselineScript->toggleSPS(true);
+
 #ifdef JS_TRACE_LOGGING
     // Initialize the tracelogger instrumentation.
     baselineScript->initTraceLogger(cx->runtime(), script);
 #endif
 
     uint32_t *bytecodeMap = baselineScript->bytecodeTypeMap();
     types::FillBytecodeTypeMap(script, bytecodeMap);
 
@@ -256,39 +258,26 @@ BaselineCompiler::compile()
     // searches for the sought entry when queries are in linear order.
     bytecodeMap[script->nTypeSets()] = 0;
 
     baselineScript->copyYieldEntries(script, yieldOffsets_);
 
     if (compileDebugInstrumentation_)
         baselineScript->setHasDebugInstrumentation();
 
-    // If profiler instrumentation is enabled, toggle instrumentation on.
-    if (cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(cx->runtime()))
-        baselineScript->toggleProfilerInstrumentation(true);
-
-    // Always register a native => bytecode mapping entry, since profiler can be
-    // turned on with baseline jitcode on stack, and baseline jitcode cannot be invalidated.
-    {
+    // Register a native => bytecode mapping entry for this script if needed.
+    if (cx->runtime()->jitRuntime()->isNativeToBytecodeMapEnabled(cx->runtime())) {
         JitSpew(JitSpew_Profiling, "Added JitcodeGlobalEntry for baseline script %s:%d (%p)",
                     script->filename(), script->lineno(), baselineScript.get());
-
-        // Generate profiling string.
-        char *str = JitcodeGlobalEntry::createScriptString(cx, script);
-        if (!str)
-            return Method_Error;
-
         JitcodeGlobalEntry::BaselineEntry entry;
-        entry.init(code->raw(), code->rawEnd(), script, str);
+        entry.init(code->raw(), code->raw() + code->instructionsSize(), script);
 
         JitcodeGlobalTable *globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
-        if (!globalTable->addEntry(entry, cx->runtime())) {
-            entry.destroy();
+        if (!globalTable->addEntry(entry))
             return Method_Error;
-        }
 
         // Mark the jitcode as having a bytecode map.
         code->setHasBytecodeMap();
     }
 
     script->setBaselineScript(cx, baselineScript.release());
 
     return Method_Compiled;
@@ -330,18 +319,16 @@ BaselineCompiler::emitInitializeLocals(s
 bool
 BaselineCompiler::emitPrologue()
 {
 #ifdef JS_USE_LINK_REGISTER
     // Push link register from generateEnterJIT()'s BLR.
     masm.pushReturnAddress();
     masm.checkStackAlignment();
 #endif
-    emitProfilerEnterFrame();
-
     masm.push(BaselineFrameReg);
     masm.mov(BaselineStackReg, BaselineFrameReg);
 
     masm.subPtr(Imm32(BaselineFrame::Size()), BaselineStackReg);
 
     // Initialize BaselineFrame. For eval scripts, the scope chain
     // is passed in R1, so we have to be careful not to clobber
     // it.
@@ -420,16 +407,19 @@ BaselineCompiler::emitPrologue()
         return false;
 
     if (!emitWarmUpCounterIncrement())
         return false;
 
     if (!emitArgumentTypeChecks())
         return false;
 
+    if (!emitSPSPush())
+        return false;
+
     return true;
 }
 
 bool
 BaselineCompiler::emitEpilogue()
 {
     // Record the offset of the epilogue, so we can do early return from
     // Debugger handlers during on-stack recompile.
@@ -437,21 +427,22 @@ BaselineCompiler::emitEpilogue()
 
     masm.bind(&return_);
 
 #ifdef JS_TRACE_LOGGING
     if (!emitTraceLoggerExit())
         return false;
 #endif
 
+    // Pop SPS frame if necessary
+    emitSPSPop();
+
     masm.mov(BaselineFrameReg, BaselineStackReg);
     masm.pop(BaselineFrameReg);
 
-    emitProfilerExitFrame();
-
     masm.ret();
     return true;
 }
 
 // On input:
 //  R2.scratchReg() contains object being written to.
 //  Called with the baseline stack synced, except for R0 which is preserved.
 //  All other registers are usable as scratch.
@@ -831,44 +822,43 @@ BaselineCompiler::emitTraceLoggerExit()
     masm.Pop(loggerReg);
 
     masm.bind(&noTraceLogger);
 
     return true;
 }
 #endif
 
-void
-BaselineCompiler::emitProfilerEnterFrame()
+bool
+BaselineCompiler::emitSPSPush()
 {
-    // Store stack position to lastProfilingFrame variable, guarded by a toggled jump.
-    // Starts off initially disabled.
-    Label noInstrument;
-    CodeOffsetLabel toggleOffset = masm.toggledJump(&noInstrument);
-    masm.profilerEnterFrame(BaselineStackReg, R0.scratchReg());
-    masm.bind(&noInstrument);
+    // Enter the IC, guarded by a toggled jump (initially disabled).
+    Label noPush;
+    CodeOffsetLabel toggleOffset = masm.toggledJump(&noPush);
+    MOZ_ASSERT(frame.numUnsyncedSlots() == 0);
+    ICProfiler_Fallback::Compiler compiler(cx);
+    if (!emitNonOpIC(compiler.getStub(&stubSpace_)))
+        return false;
+    masm.bind(&noPush);
 
     // Store the start offset in the appropriate location.
-    MOZ_ASSERT(profilerEnterFrameToggleOffset_.offset() == 0);
-    profilerEnterFrameToggleOffset_ = toggleOffset;
+    MOZ_ASSERT(spsPushToggleOffset_.offset() == 0);
+    spsPushToggleOffset_ = toggleOffset;
+    return true;
 }
 
 void
-BaselineCompiler::emitProfilerExitFrame()
+BaselineCompiler::emitSPSPop()
 {
-    // Store previous frame to lastProfilingFrame variable, guarded by a toggled jump.
-    // Starts off initially disabled.
-    Label noInstrument;
-    CodeOffsetLabel toggleOffset = masm.toggledJump(&noInstrument);
-    masm.profilerExitFrame();
-    masm.bind(&noInstrument);
-
-    // Store the start offset in the appropriate location.
-    MOZ_ASSERT(profilerExitFrameToggleOffset_.offset() == 0);
-    profilerExitFrameToggleOffset_ = toggleOffset;
+    // If profiler entry was pushed on this frame, pop it.
+    Label noPop;
+    masm.branchTest32(Assembler::Zero, frame.addressOfFlags(),
+                      Imm32(BaselineFrame::HAS_PUSHED_SPS_FRAME), &noPop);
+    masm.spsPopFrameSafe(&cx->runtime()->spsProfiler, R1.scratchReg());
+    masm.bind(&noPop);
 }
 
 MethodStatus
 BaselineCompiler::emitBody()
 {
     MOZ_ASSERT(pc == script->code());
 
     bool lastOpUnreachable = false;
@@ -3635,29 +3625,16 @@ BaselineCompiler::emit_JSOP_RESUME()
     ICEntry icEntry(script->pcToOffset(pc), ICEntry::Kind_Op);
     icEntry.setReturnOffset(CodeOffsetLabel(masm.currentOffset()));
     if (!icEntries_.append(icEntry))
         return false;
 
     masm.jump(&returnTarget);
     masm.bind(&genStart);
 
-    // If profiler instrumentation is on, update lastProfilingFrame on
-    // current JitActivation
-    {
-        Register scratchReg = scratch2;
-        Label skip;
-        AbsoluteAddress addressOfEnabled(cx->runtime()->spsProfiler.addressOfEnabled());
-        masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0), &skip);
-        masm.loadPtr(AbsoluteAddress(cx->mainThread().addressOfProfilingActivation()), scratchReg);
-        masm.storePtr(BaselineStackReg,
-                      Address(scratchReg, JitActivation::offsetOfLastProfilingFrame()));
-        masm.bind(&skip);
-    }
-
     // Construct BaselineFrame.
     masm.push(BaselineFrameReg);
     masm.mov(BaselineStackReg, BaselineFrameReg);
     masm.subPtr(Imm32(BaselineFrame::Size()), BaselineStackReg);
     masm.checkStackAlignment();
 
     // Store flags and scope chain.
     masm.store32(Imm32(BaselineFrame::HAS_CALL_OBJ), frame.addressOfFlags());
--- a/js/src/jit/BaselineCompiler.h
+++ b/js/src/jit/BaselineCompiler.h
@@ -251,19 +251,18 @@ class BaselineCompiler : public Baseline
     bool emitStackCheck(bool earlyCheck=false);
     bool emitInterruptCheck();
     bool emitWarmUpCounterIncrement(bool allowOsr=true);
     bool emitArgumentTypeChecks();
     bool emitDebugPrologue();
     bool emitDebugTrap();
     bool emitTraceLoggerEnter();
     bool emitTraceLoggerExit();
-
-    void emitProfilerEnterFrame();
-    void emitProfilerExitFrame();
+    bool emitSPSPush();
+    void emitSPSPop();
 
     bool initScopeChain();
 
     void storeValue(const StackValue *source, const Address &dest,
                     const ValueOperand &scratch);
 
 #define EMIT_OP(op) bool emit_##op();
     OPCODE_LIST(EMIT_OP)
--- a/js/src/jit/BaselineFrame.cpp
+++ b/js/src/jit/BaselineFrame.cpp
@@ -167,28 +167,39 @@ BaselineFrame::initForOsr(InterpreterFra
     if (fp->script()->needsArgsObj() && fp->hasArgsObj()) {
         flags_ |= BaselineFrame::HAS_ARGS_OBJ;
         argsObj_ = &fp->argsObj();
     }
 
     if (fp->hasReturnValue())
         setReturnValue(fp->returnValue());
 
+    // If the interpreter pushed an SPS frame when it entered the function, the
+    // interpreter will pop it after the OSR trampoline returns.  In order for
+    // the Baseline frame to have its SPS flag set, it must have its own SPS
+    // frame, which the Baseline code will pop on return.  Note that the
+    // profiler may have been enabled or disabled after the function was entered
+    // but before OSR.
+    JSContext *cx = GetJSContextFromJitCode();
+    SPSProfiler *p = &(cx->runtime()->spsProfiler);
+    if (p->enabled()) {
+        p->enter(fp->script(), fp->maybeFun());
+        flags_ |= BaselineFrame::HAS_PUSHED_SPS_FRAME;
+    }
+
     frameSize_ = BaselineFrame::FramePointerOffset +
         BaselineFrame::Size() +
         numStackValues * sizeof(Value);
 
     MOZ_ASSERT(numValueSlots() == numStackValues);
 
     for (uint32_t i = 0; i < numStackValues; i++)
         *valueSlot(i) = fp->slots()[i];
 
     if (fp->isDebuggee()) {
-        JSContext *cx = GetJSContextFromJitCode();
-
         // For debuggee frames, update any Debugger.Frame objects for the
         // InterpreterFrame to point to the BaselineFrame.
 
         // The caller pushed a fake return address. ScriptFrameIter, used by the
         // debugger, wants a valid return address, but it's okay to just pick one.
         // In debug mode there's always at least 1 ICEntry (since there are always
         // debug prologue/epilogue calls).
         JitFrameIterator iter(cx);
--- a/js/src/jit/BaselineFrame.h
+++ b/js/src/jit/BaselineFrame.h
@@ -51,16 +51,19 @@ class BaselineFrame
         //
         // See comment above 'debugMode' in jscompartment.h for explanation of
         // invariants of debuggee compartments, scripts, and frames.
         DEBUGGEE         = 1 << 6,
 
         // Eval frame, see the "eval frames" comment.
         EVAL             = 1 << 7,
 
+        // Frame has profiler entry pushed.
+        HAS_PUSHED_SPS_FRAME = 1 << 8,
+
         // Frame has over-recursed on an early check.
         OVER_RECURSED    = 1 << 9,
 
         // Frame has a BaselineRecompileInfo stashed in the scratch value
         // slot. See PatchBaselineFramesForDebugMode.
         HAS_DEBUG_MODE_OSR_INFO = 1 << 10,
 
         // This flag is intended for use whenever the frame is settled on a
@@ -300,16 +303,28 @@ class BaselineFrame
         flags_ &= ~DEBUGGER_HANDLING_EXCEPTION;
     }
 
     JSScript *evalScript() const {
         MOZ_ASSERT(isEvalFrame());
         return evalScript_;
     }
 
+    bool hasPushedSPSFrame() const {
+        return flags_ & HAS_PUSHED_SPS_FRAME;
+    }
+
+    void setPushedSPSFrame() {
+        flags_ |= HAS_PUSHED_SPS_FRAME;
+    }
+
+    void unsetPushedSPSFrame() {
+        flags_ &= ~HAS_PUSHED_SPS_FRAME;
+    }
+
     bool overRecursed() const {
         return flags_ & OVER_RECURSED;
     }
 
     void setOverRecursed() {
         flags_ |= OVER_RECURSED;
     }
 
--- a/js/src/jit/BaselineIC.cpp
+++ b/js/src/jit/BaselineIC.cpp
@@ -292,16 +292,21 @@ ICStub::trace(JSTracer *trc)
         MarkObject(trc, &updateStub->object(), "baseline-update-singleobject");
         break;
       }
       case ICStub::TypeUpdate_TypeObject: {
         ICTypeUpdate_TypeObject *updateStub = toTypeUpdate_TypeObject();
         MarkTypeObject(trc, &updateStub->type(), "baseline-update-typeobject");
         break;
       }
+      case ICStub::Profiler_PushFunction: {
+        ICProfiler_PushFunction *pushFunStub = toProfiler_PushFunction();
+        MarkScript(trc, &pushFunStub->script(), "baseline-profilerpushfunction-stub-script");
+        break;
+      }
       case ICStub::GetName_Global: {
         ICGetName_Global *globalStub = toGetName_Global();
         MarkShape(trc, &globalStub->shape(), "baseline-global-stub-shape");
         break;
       }
       case ICStub::GetName_Scope0:
         static_cast<ICGetName_Scope<0>*>(this)->traceScopes(trc);
         break;
@@ -713,16 +718,76 @@ ICStubCompiler::enterStubFrame(MacroAsse
 
 void
 ICStubCompiler::leaveStubFrame(MacroAssembler &masm, bool calledIntoIon)
 {
     MOZ_ASSERT(entersStubFrame_);
     EmitLeaveStubFrame(masm, calledIntoIon);
 }
 
+void
+ICStubCompiler::guardProfilingEnabled(MacroAssembler &masm, Register scratch, Label *skip)
+{
+    // This should only be called from the following stubs.
+    MOZ_ASSERT(kind == ICStub::Call_Scripted                             ||
+               kind == ICStub::Call_AnyScripted                          ||
+               kind == ICStub::Call_Native                               ||
+               kind == ICStub::Call_ClassHook                            ||
+               kind == ICStub::Call_ScriptedApplyArray                   ||
+               kind == ICStub::Call_ScriptedApplyArguments               ||
+               kind == ICStub::Call_ScriptedFunCall                      ||
+               kind == ICStub::GetProp_CallScripted                      ||
+               kind == ICStub::GetProp_CallNative                        ||
+               kind == ICStub::GetProp_CallNativePrototype               ||
+               kind == ICStub::GetProp_CallDOMProxyNative                ||
+               kind == ICStub::GetElem_NativePrototypeCallNative         ||
+               kind == ICStub::GetElem_NativePrototypeCallScripted       ||
+               kind == ICStub::GetProp_CallDOMProxyWithGenerationNative  ||
+               kind == ICStub::GetProp_DOMProxyShadowed                  ||
+               kind == ICStub::SetProp_CallScripted                      ||
+               kind == ICStub::SetProp_CallNative);
+
+    // Guard on bit in frame that indicates if the SPS frame was pushed in the first
+    // place.  This code is expected to be called from within a stub that has already
+    // entered a stub frame.
+    MOZ_ASSERT(entersStubFrame_);
+    masm.loadPtr(Address(BaselineFrameReg, 0), scratch);
+    masm.branchTest32(Assembler::Zero,
+                      Address(scratch, BaselineFrame::reverseOffsetOfFlags()),
+                      Imm32(BaselineFrame::HAS_PUSHED_SPS_FRAME),
+                      skip);
+
+    // Check if profiling is enabled
+    uint32_t *enabledAddr = cx->runtime()->spsProfiler.addressOfEnabled();
+    masm.branch32(Assembler::Equal, AbsoluteAddress(enabledAddr), Imm32(0), skip);
+}
+
+void
+ICStubCompiler::emitProfilingUpdate(MacroAssembler &masm, Register pcIdx, Register scratch,
+                                    uint32_t stubPcOffset)
+{
+    Label skipProfilerUpdate;
+
+    // Check if profiling is enabled.
+    guardProfilingEnabled(masm, scratch, &skipProfilerUpdate);
+
+    // Update profiling entry before leaving function.
+    masm.load32(Address(BaselineStubReg, stubPcOffset), pcIdx);
+    masm.spsUpdatePCIdx(&cx->runtime()->spsProfiler, pcIdx, scratch);
+
+    masm.bind(&skipProfilerUpdate);
+}
+
+void
+ICStubCompiler::emitProfilingUpdate(MacroAssembler &masm, GeneralRegisterSet regs,
+                                    uint32_t stubPcOffset)
+{
+    emitProfilingUpdate(masm, regs.takeAny(), regs.takeAny(), stubPcOffset);
+}
+
 inline bool
 ICStubCompiler::emitPostWriteBarrierSlot(MacroAssembler &masm, Register obj, ValueOperand val,
                                          Register scratch, GeneralRegisterSet saveRegs)
 {
     Label skipBarrier;
     masm.branchPtrInNurseryRange(Assembler::Equal, obj, scratch, &skipBarrier);
     masm.branchValueIsNurseryObject(Assembler::NotEqual, val, scratch, &skipBarrier);
 
@@ -793,19 +858,26 @@ EnsureCanEnterIon(JSContext *cx, ICWarmU
                     bailoutExpected ? "yes" : "no");
             script->resetWarmUpCounter();
         }
         return true;
     }
 
     if (isLoopEntry) {
         IonScript *ion = script->ionScript();
-        MOZ_ASSERT(cx->runtime()->spsProfiler.enabled() == ion->hasProfilingInstrumentation());
+        MOZ_ASSERT(cx->runtime()->spsProfiler.enabled() == ion->hasSPSInstrumentation());
         MOZ_ASSERT(ion->osrPc() == pc);
 
+        // If the baseline frame's SPS handling doesn't match up with the Ion code's SPS
+        // handling, don't OSR.
+        if (frame->hasPushedSPSFrame() != ion->hasSPSInstrumentation()) {
+            JitSpew(JitSpew_BaselineOSR, "  OSR crosses SPS handling boundaries, skipping!");
+            return true;
+        }
+
         JitSpew(JitSpew_BaselineOSR, "  OSR possible!");
         *jitcodePtr = ion->method()->raw() + ion->osrEntryOffset();
     }
 
     return true;
 }
 
 //
@@ -1001,42 +1073,120 @@ ICWarmUpCounter_Fallback::Compiler::gene
 
     // Restore the stack pointer to point to the saved frame pointer.
     masm.movePtr(BaselineFrameReg, BaselineStackReg);
 
     // Discard saved frame pointer, so that the return address is on top of
     // the stack.
     masm.pop(scratchReg);
 
-#ifdef DEBUG
-    // If profiler instrumentation is on, ensure that lastProfilingFrame is
-    // the frame currently being OSR-ed
-    {
-        Label checkOk;
-        AbsoluteAddress addressOfEnabled(cx->runtime()->spsProfiler.addressOfEnabled());
-        masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0), &checkOk);
-        masm.loadPtr(AbsoluteAddress((void*)&cx->mainThread().jitActivation), scratchReg);
-        masm.loadPtr(Address(scratchReg, JitActivation::offsetOfLastProfilingFrame()), scratchReg);
-        masm.branchPtr(Assembler::Equal, scratchReg, BaselineStackReg, &checkOk);
-        masm.assumeUnreachable("Baseline OSR lastProfilingFrame mismatch.");
-        masm.bind(&checkOk);
-    }
-#endif
-
     // Jump into Ion.
     masm.loadPtr(Address(osrDataReg, offsetof(IonOsrTempData, jitcode)), scratchReg);
     masm.loadPtr(Address(osrDataReg, offsetof(IonOsrTempData, baselineFrame)), OsrFrameReg);
     masm.jump(scratchReg);
 
     // No jitcode available, do nothing.
     masm.bind(&noCompiledCode);
     EmitReturnFromIC(masm);
     return true;
 }
 
+//
+// ICProfile_Fallback
+//
+
+static bool
+DoProfilerFallback(JSContext *cx, BaselineFrame *frame, ICProfiler_Fallback *stub)
+{
+    RootedScript script(cx, frame->script());
+    RootedFunction func(cx, frame->maybeFun());
+    mozilla::DebugOnly<ICEntry *> icEntry = stub->icEntry();
+
+    FallbackICSpew(cx, stub, "Profiler");
+
+    SPSProfiler *profiler = &cx->runtime()->spsProfiler;
+
+    // Manually enter SPS this time.
+    MOZ_ASSERT(profiler->enabled());
+    if (!cx->runtime()->spsProfiler.enter(script, func))
+        return false;
+    frame->setPushedSPSFrame();
+
+    // Unlink any existing PushFunction stub (which may hold stale 'const char *' to
+    // the profile string.
+    MOZ_ASSERT_IF(icEntry->firstStub() != stub,
+                  icEntry->firstStub()->isProfiler_PushFunction() &&
+                  icEntry->firstStub()->next() == stub);
+    stub->unlinkStubsWithKind(cx, ICStub::Profiler_PushFunction);
+    MOZ_ASSERT(icEntry->firstStub() == stub);
+
+    // Generate the string to use to identify this stack frame.
+    const char *string = profiler->profileString(script, func);
+    if (string == nullptr)
+        return false;
+
+    JitSpew(JitSpew_BaselineIC, "  Generating Profiler_PushFunction stub for %s:%d",
+            script->filename(), script->lineno());
+
+    // Create a new optimized stub.
+    ICProfiler_PushFunction::Compiler compiler(cx, string, script);
+    ICStub *optStub = compiler.getStub(compiler.getStubSpace(script));
+    if (!optStub)
+        return false;
+    stub->addNewStub(optStub);
+
+    return true;
+}
+
+typedef bool (*DoProfilerFallbackFn)(JSContext *, BaselineFrame *frame, ICProfiler_Fallback *);
+static const VMFunction DoProfilerFallbackInfo =
+    FunctionInfo<DoProfilerFallbackFn>(DoProfilerFallback, TailCall);
+
+bool
+ICProfiler_Fallback::Compiler::generateStubCode(MacroAssembler &masm)
+{
+    EmitRestoreTailCallReg(masm);
+
+    masm.push(BaselineStubReg);         // Push stub.
+    masm.pushBaselineFramePtr(BaselineFrameReg, R0.scratchReg()); // Push frame.
+
+    return tailCallVM(DoProfilerFallbackInfo, masm);
+}
+
+bool
+ICProfiler_PushFunction::Compiler::generateStubCode(MacroAssembler &masm)
+{
+
+    Register scratch = R0.scratchReg();
+    Register scratch2 = R1.scratchReg();
+
+    // Profiling should be enabled if we ever reach here.
+#ifdef DEBUG
+    Label spsEnabled;
+    uint32_t *enabledAddr = cx->runtime()->spsProfiler.addressOfEnabled();
+    masm.branch32(Assembler::NotEqual, AbsoluteAddress(enabledAddr), Imm32(0), &spsEnabled);
+    masm.assumeUnreachable("Profiling should have been enabled.");
+    masm.bind(&spsEnabled);
+#endif
+
+    // Push SPS entry.
+    masm.spsPushFrame(&cx->runtime()->spsProfiler,
+                      Address(BaselineStubReg, ICProfiler_PushFunction::offsetOfStr()),
+                      Address(BaselineStubReg, ICProfiler_PushFunction::offsetOfScript()),
+                      scratch,
+                      scratch2);
+
+    // Mark frame as having profiler entry pushed.
+    Address flagsOffset(BaselineFrameReg, BaselineFrame::reverseOffsetOfFlags());
+    masm.or32(Imm32(BaselineFrame::HAS_PUSHED_SPS_FRAME), flagsOffset);
+
+    EmitReturnFromIC(masm);
+
+    return true;
+}
 
 //
 // TypeMonitor_Fallback
 //
 
 bool
 ICTypeMonitor_Fallback::addMonitorStubForValue(JSContext *cx, JSScript *script, HandleValue val)
 {
@@ -4059,16 +4209,19 @@ ICGetElemNativeCompiler::emitCallNative(
     masm.push(objReg);
 
     // Push native callee.
     masm.loadPtr(Address(BaselineStubReg, ICGetElemNativeGetterStub::offsetOfGetter()), objReg);
     masm.push(objReg);
 
     regs.add(objReg);
 
+    // Profiler hook.
+    emitProfilingUpdate(masm, regs, ICGetElemNativeGetterStub::offsetOfPCOffset());
+
     // Call helper.
     if (!callVM(DoCallNativeGetterInfo, masm))
         return false;
 
     leaveStubFrame(masm);
 
     return true;
 }
@@ -4124,16 +4277,26 @@ ICGetElemNativeCompiler::emitCallScripte
             cx->runtime()->jitRuntime()->getArgumentsRectifier();
 
         masm.movePtr(ImmGCPtr(argumentsRectifier), code);
         masm.loadPtr(Address(code, JitCode::offsetOfCode()), code);
         masm.mov(ImmWord(0), ArgumentsRectifierReg);
     }
 
     masm.bind(&noUnderflow);
+
+    // If needed, update SPS Profiler frame entry.  At this point, callee and scratch can
+    // be clobbered.
+    {
+        GeneralRegisterSet availRegs = availableGeneralRegs(0);
+        availRegs.take(ArgumentsRectifierReg);
+        availRegs.take(code);
+        emitProfilingUpdate(masm, availRegs, ICGetElemNativeGetterStub::offsetOfPCOffset());
+    }
+
     masm.callJit(code);
 
     leaveStubFrame(masm, true);
 
     return true;
 }
 
 bool
@@ -7280,16 +7443,26 @@ ICGetProp_CallScripted::Compiler::genera
             cx->runtime()->jitRuntime()->getArgumentsRectifier();
 
         masm.movePtr(ImmGCPtr(argumentsRectifier), code);
         masm.loadPtr(Address(code, JitCode::offsetOfCode()), code);
         masm.mov(ImmWord(0), ArgumentsRectifierReg);
     }
 
     masm.bind(&noUnderflow);
+
+    // If needed, update SPS Profiler frame entry.  At this point, callee and scratch can
+    // be clobbered.
+    {
+        GeneralRegisterSet availRegs = availableGeneralRegs(0);
+        availRegs.take(ArgumentsRectifierReg);
+        availRegs.take(code);
+        emitProfilingUpdate(masm, availRegs, ICGetProp_CallScripted::offsetOfPCOffset());
+    }
+
     masm.callJit(code);
 
     leaveStubFrame(masm, true);
 
     // Enter type monitor IC to type-check result.
     EmitEnterTypeMonitorIC(masm);
 
     // Leave stub frame and go to next stub.
@@ -7341,16 +7514,19 @@ ICGetProp_CallNative::Compiler::generate
 
     masm.loadPtr(Address(BaselineStubReg, ICGetProp_CallNative::offsetOfGetter()), scratch);
     masm.Push(scratch);
 
     regs.add(scratch);
     if (!inputDefinitelyObject_)
         regs.add(R0);
 
+    // If needed, update SPS Profiler frame entry.
+    emitProfilingUpdate(masm, regs, ICGetProp_CallNative::offsetOfPCOffset());
+
     if (!callVM(DoCallNativeGetterInfo, masm))
         return false;
     leaveStubFrame(masm);
 
     EmitEnterTypeMonitorIC(masm);
 
     masm.bind(&failure);
     EmitStubGuardFailure(masm);
@@ -7410,16 +7586,19 @@ ICGetProp_CallNativePrototype::Compiler:
     masm.push(objReg);
     masm.push(callee);
 
     if (!inputDefinitelyObject_)
         regs.add(R0);
     else
         regs.add(objReg);
 
+    // If needed, update SPS Profiler frame entry.
+    emitProfilingUpdate(masm, regs, ICGetProp_CallNativePrototype::offsetOfPCOffset());
+
     if (!callVM(DoCallNativeGetterInfo, masm))
         return false;
     leaveStubFrame(masm);
 
     // Enter type monitor IC to type-check result.
     EmitEnterTypeMonitorIC(masm);
 
     // Failure case - jump to next stub
@@ -7480,16 +7659,19 @@ ICGetPropCallDOMProxyNativeCompiler::gen
 
     // Push args for vm call.
     masm.push(objReg);
     masm.push(callee);
 
     // Don't have to preserve R0 anymore.
     regs.add(R0);
 
+    // If needed, update SPS Profiler frame entry.
+    emitProfilingUpdate(masm, regs, ICGetProp_CallDOMProxyNative::offsetOfPCOffset());
+
     if (!callVM(DoCallNativeGetterInfo, masm))
         return false;
     leaveStubFrame(masm);
 
     // Enter type monitor IC to type-check result.
     EmitEnterTypeMonitorIC(masm);
 
     // Failure case - jump to next stub
@@ -7612,16 +7794,19 @@ ICGetProp_DOMProxyShadowed::Compiler::ge
     // Push property name and proxy object.
     masm.loadPtr(Address(BaselineStubReg, ICGetProp_DOMProxyShadowed::offsetOfName()), scratch);
     masm.push(scratch);
     masm.push(objReg);
 
     // Don't have to preserve R0 anymore.
     regs.add(R0);
 
+    // If needed, update SPS Profiler frame entry.
+    emitProfilingUpdate(masm, regs, ICGetProp_DOMProxyShadowed::offsetOfPCOffset());
+
     if (!callVM(ProxyGetInfo, masm))
         return false;
     leaveStubFrame(masm);
 
     // Enter type monitor IC to type-check result.
     EmitEnterTypeMonitorIC(masm);
 
     // Failure case - jump to next stub
@@ -8635,16 +8820,26 @@ ICSetProp_CallScripted::Compiler::genera
             cx->runtime()->jitRuntime()->getArgumentsRectifier();
 
         masm.movePtr(ImmGCPtr(argumentsRectifier), code);
         masm.loadPtr(Address(code, JitCode::offsetOfCode()), code);
         masm.mov(ImmWord(1), ArgumentsRectifierReg);
     }
 
     masm.bind(&noUnderflow);
+
+    // If needed, update SPS Profiler frame entry.  At this point, callee and scratch can
+    // be clobbered.
+    {
+        GeneralRegisterSet availRegs = availableGeneralRegs(0);
+        availRegs.take(ArgumentsRectifierReg);
+        availRegs.take(code);
+        emitProfilingUpdate(masm, availRegs, ICSetProp_CallScripted::offsetOfPCOffset());
+    }
+
     masm.callJit(code);
 
     leaveStubFrame(masm, true);
     // Do not care about return value from function. The original RHS should be returned
     // as the result of this operation.
     EmitUnstowICValues(masm, 2);
     masm.moveValue(R1, R0);
     EmitReturnFromIC(masm);
@@ -8720,16 +8915,19 @@ ICSetProp_CallNative::Compiler::generate
     masm.movePtr(BaselineStackReg, scratch);
     masm.pushValue(Address(scratch, STUB_FRAME_SIZE));
     masm.push(objReg);
     masm.push(callee);
 
     // Don't need to preserve R0 anymore.
     regs.add(R0);
 
+    // If needed, update SPS Profiler frame entry.
+    emitProfilingUpdate(masm, regs, ICSetProp_CallNative::offsetOfPCOffset());
+
     if (!callVM(DoCallNativeSetterInfo, masm))
         return false;
     leaveStubFrame(masm);
 
     // Do not care about return value from function. The original RHS should be returned
     // as the result of this operation.
     EmitUnstowICValues(masm, 2);
     masm.moveValue(R1, R0);
@@ -9268,16 +9466,20 @@ DoCallFallback(JSContext *cx, BaselineFr
     // Compute construcing and useNewType flags.
     bool constructing = (op == JSOP_NEW);
     bool newType = types::UseNewType(cx, script, pc);
 
     // Try attaching a call stub.
     if (!TryAttachCallStub(cx, stub, script, pc, op, argc, vp, constructing, false, newType))
         return false;
 
+    // Maybe update PC in profiler entry before leaving this script by call.
+    if (cx->runtime()->spsProfiler.enabled() && frame->hasPushedSPSFrame())
+        cx->runtime()->spsProfiler.updatePC(script, pc);
+
     if (!MaybeCloneFunctionAtCallsite(cx, &callee, script, pc))
         return false;
 
     if (op == JSOP_NEW) {
         if (!InvokeConstructor(cx, callee, argc, args, res))
             return false;
     } else if ((op == JSOP_EVAL || op == JSOP_STRICTEVAL) &&
                frame->scopeChain()->global().valueIsEval(callee))
@@ -9340,16 +9542,20 @@ DoSpreadCallFallback(JSContext *cx, Base
 
     // Try attaching a call stub.
     if (op != JSOP_SPREADEVAL && op != JSOP_STRICTSPREADEVAL &&
         !TryAttachCallStub(cx, stub, script, pc, op, 1, vp, constructing, true, false))
     {
         return false;
     }
 
+    // Maybe update PC in profiler entry before leaving this script by call.
+    if (cx->runtime()->spsProfiler.enabled() && frame->hasPushedSPSFrame())
+        cx->runtime()->spsProfiler.updatePC(script, pc);
+
     if (!MaybeCloneFunctionAtCallsite(cx, &callee, script, pc))
         return false;
 
     if (!SpreadCallOperation(cx, script, pc, thisv, callee, arr, res))
         return false;
 
     // Check if debug mode toggling made the stub invalid.
     if (stub.invalid())
@@ -9919,16 +10125,28 @@ ICCallScriptedCompiler::generateStubCode
             cx->runtime()->jitRuntime()->getArgumentsRectifier();
 
         masm.movePtr(ImmGCPtr(argumentsRectifier), code);
         masm.loadPtr(Address(code, JitCode::offsetOfCode()), code);
         masm.mov(argcReg, ArgumentsRectifierReg);
     }
 
     masm.bind(&noUnderflow);
+
+    // If needed, update SPS Profiler frame entry before and after call.
+    {
+        MOZ_ASSERT(kind == ICStub::Call_Scripted || kind == ICStub::Call_AnyScripted);
+        GeneralRegisterSet availRegs = availableGeneralRegs(0);
+        availRegs.take(ArgumentsRectifierReg);
+        availRegs.take(code);
+        emitProfilingUpdate(masm, availRegs, kind == ICStub::Call_Scripted ?
+                                                ICCall_Scripted::offsetOfPCOffset()
+                                              : ICCall_AnyScripted::offsetOfPCOffset());
+    }
+
     masm.callJit(code);
 
     // If this is a constructing call, and the callee returns a non-object, replace it with
     // the |this| object passed in.
     if (isConstructing_) {
         Label skipThisReplace;
         masm.branchTestObject(Assembler::Equal, JSReturnOperand, &skipThisReplace);
 
@@ -10179,16 +10397,20 @@ ICCall_Native::Compiler::generateStubCod
     masm.push(argcReg);
 
     Register scratch = regs.takeAny();
     EmitCreateStubFrameDescriptor(masm, scratch);
     masm.push(scratch);
     masm.push(BaselineTailCallReg);
     masm.enterFakeExitFrame(NativeExitFrameLayout::Token());
 
+    // If needed, update SPS Profiler frame entry.  At this point, BaselineTailCallReg
+    // and scratch can be clobbered.
+    emitProfilingUpdate(masm, BaselineTailCallReg, scratch, ICCall_Native::offsetOfPCOffset());
+
     // Execute call.
     masm.setupUnalignedABICall(3, scratch);
     masm.loadJSContext(scratch);
     masm.passABIArg(scratch);
     masm.passABIArg(argcReg);
     masm.passABIArg(vpReg);
 
 #if defined(JS_ARM_SIMULATOR) || defined(JS_MIPS_SIMULATOR)
@@ -10274,16 +10496,20 @@ ICCall_ClassHook::Compiler::generateStub
     // Construct a native exit frame.
     masm.push(argcReg);
 
     EmitCreateStubFrameDescriptor(masm, scratch);
     masm.push(scratch);
     masm.push(BaselineTailCallReg);
     masm.enterFakeExitFrame(NativeExitFrameLayout::Token());
 
+    // If needed, update SPS Profiler frame entry.  At this point, BaselineTailCallReg
+    // and scratch can be clobbered.
+    emitProfilingUpdate(masm, BaselineTailCallReg, scratch, ICCall_ClassHook::offsetOfPCOffset());
+
     // Execute call.
     masm.setupUnalignedABICall(3, scratch);
     masm.loadJSContext(scratch);
     masm.passABIArg(scratch);
     masm.passABIArg(argcReg);
     masm.passABIArg(vpReg);
     masm.callWithABI(Address(BaselineStubReg, ICCall_ClassHook::offsetOfNative()));
 
@@ -10388,16 +10614,21 @@ ICCall_ScriptedApplyArray::Compiler::gen
 
         masm.movePtr(ImmGCPtr(argumentsRectifier), target);
         masm.loadPtr(Address(target, JitCode::offsetOfCode()), target);
         masm.mov(argcReg, ArgumentsRectifierReg);
     }
     masm.bind(&noUnderflow);
     regs.add(argcReg);
 
+    // If needed, update SPS Profiler frame entry.  At this point, BaselineTailCallReg
+    // and scratch can be clobbered.
+    emitProfilingUpdate(masm, regs.getAny(), scratch,
+                        ICCall_ScriptedApplyArguments::offsetOfPCOffset());
+
     // Do call
     masm.callJit(target);
     leaveStubFrame(masm, true);
 
     // Enter type monitor IC to type-check result.
     EmitEnterTypeMonitorIC(masm);
 
     masm.bind(&failure);
@@ -10484,16 +10715,21 @@ ICCall_ScriptedApplyArguments::Compiler:
 
         masm.movePtr(ImmGCPtr(argumentsRectifier), target);
         masm.loadPtr(Address(target, JitCode::offsetOfCode()), target);
         masm.mov(argcReg, ArgumentsRectifierReg);
     }
     masm.bind(&noUnderflow);
     regs.add(argcReg);
 
+    // If needed, update SPS Profiler frame entry.  At this point, BaselineTailCallReg
+    // and scratch can be clobbered.
+    emitProfilingUpdate(masm, regs.getAny(), scratch,
+                        ICCall_ScriptedApplyArguments::offsetOfPCOffset());
+
     // Do call
     masm.callJit(target);
     leaveStubFrame(masm, true);
 
     // Enter type monitor IC to type-check result.
     EmitEnterTypeMonitorIC(masm);
 
     masm.bind(&failure);
@@ -10600,16 +10836,26 @@ ICCall_ScriptedFunCall::Compiler::genera
             cx->runtime()->jitRuntime()->getArgumentsRectifier();
 
         masm.movePtr(ImmGCPtr(argumentsRectifier), code);
         masm.loadPtr(Address(code, JitCode::offsetOfCode()), code);
         masm.mov(argcReg, ArgumentsRectifierReg);
     }
 
     masm.bind(&noUnderflow);
+
+    // If needed, update SPS Profiler frame entry.
+    {
+        // Need to avoid using ArgumentsRectifierReg and code register.
+        GeneralRegisterSet availRegs = availableGeneralRegs(0);
+        availRegs.take(ArgumentsRectifierReg);
+        availRegs.take(code);
+        emitProfilingUpdate(masm, availRegs, ICCall_ScriptedFunCall::offsetOfPCOffset());
+    }
+
     masm.callJit(code);
 
     leaveStubFrame(masm, true);
 
     // Enter type monitor IC to type-check result.
     EmitEnterTypeMonitorIC(masm);
 
     masm.bind(&failure);
@@ -11252,16 +11498,23 @@ ICRetSub_Resume::Compiler::generateStubC
     if (!tailCallVM(ThrowInfoBaseline, masm))
         return false;
 
     masm.bind(&fail);
     EmitStubGuardFailure(masm);
     return true;
 }
 
+ICProfiler_PushFunction::ICProfiler_PushFunction(JitCode *stubCode, const char *str,
+                                                 HandleScript script)
+  : ICStub(ICStub::Profiler_PushFunction, stubCode),
+    str_(str),
+    script_(script)
+{ }
+
 ICTypeMonitor_SingleObject::ICTypeMonitor_SingleObject(JitCode *stubCode, HandleObject obj)
   : ICStub(TypeMonitor_SingleObject, stubCode),
     obj_(obj)
 { }
 
 ICTypeMonitor_TypeObject::ICTypeMonitor_TypeObject(JitCode *stubCode, HandleTypeObject type)
   : ICStub(TypeMonitor_TypeObject, stubCode),
     type_(type)
--- a/js/src/jit/BaselineIC.h
+++ b/js/src/jit/BaselineIC.h
@@ -330,16 +330,19 @@ class ICEntry
         return &firstStub_;
     }
 };
 
 // List of baseline IC stub kinds.
 #define IC_STUB_KIND_LIST(_)    \
     _(WarmUpCounter_Fallback)   \
                                 \
+    _(Profiler_Fallback)        \
+    _(Profiler_PushFunction)    \
+                                \
     _(TypeMonitor_Fallback)     \
     _(TypeMonitor_SingleObject) \
     _(TypeMonitor_TypeObject)   \
     _(TypeMonitor_PrimitiveSet) \
                                 \
     _(TypeUpdate_Fallback)      \
     _(TypeUpdate_SingleObject)  \
     _(TypeUpdate_TypeObject)    \
@@ -1114,16 +1117,21 @@ class ICStubCompiler
     void enterStubFrame(MacroAssembler &masm, Register scratch);
     void leaveStubFrame(MacroAssembler &masm, bool calledIntoIon = false);
 
     // Some stubs need to emit SPS profiler updates.  This emits the guarding
     // jitcode for those stubs.  If profiling is not enabled, jumps to the
     // given label.
     void guardProfilingEnabled(MacroAssembler &masm, Register scratch, Label *skip);
 
+    // Higher-level helper to emit an update to the profiler pseudo-stack.
+    void emitProfilingUpdate(MacroAssembler &masm, Register pcIdx, Register scratch,
+                             uint32_t stubPcOffset);
+    void emitProfilingUpdate(MacroAssembler &masm, GeneralRegisterSet regs, uint32_t stubPcOffset);
+
     inline GeneralRegisterSet availableGeneralRegs(size_t numInputs) const {
         GeneralRegisterSet regs(GeneralRegisterSet::All());
         MOZ_ASSERT(!regs.has(BaselineStackReg));
 #if defined(JS_CODEGEN_ARM)
         MOZ_ASSERT(!regs.has(BaselineTailCallReg));
         regs.take(BaselineSecondScratchReg);
 #elif defined(JS_CODEGEN_MIPS)
         MOZ_ASSERT(!regs.has(BaselineTailCallReg));
@@ -1216,16 +1224,101 @@ class ICWarmUpCounter_Fallback : public 
         { }
 
         ICWarmUpCounter_Fallback *getStub(ICStubSpace *space) {
             return ICWarmUpCounter_Fallback::New(space, getStubCode());
         }
     };
 };
 
+// Profiler_Fallback
+
+class ICProfiler_Fallback : public ICFallbackStub
+{
+    friend class ICStubSpace;
+
+    explicit ICProfiler_Fallback(JitCode *stubCode)
+      : ICFallbackStub(ICStub::Profiler_Fallback, stubCode)
+    { }
+
+  public:
+    static inline ICProfiler_Fallback *New(ICStubSpace *space, JitCode *code) {
+        if (!code)
+            return nullptr;
+        return space->allocate<ICProfiler_Fallback>(code);
+    }
+
+    // Compiler for this stub kind.
+    class Compiler : public ICStubCompiler {
+      protected:
+        bool generateStubCode(MacroAssembler &masm);
+
+      public:
+        explicit Compiler(JSContext *cx)
+          : ICStubCompiler(cx, ICStub::Profiler_Fallback)
+        { }
+
+        ICProfiler_Fallback *getStub(ICStubSpace *space) {
+            return ICProfiler_Fallback::New(space, getStubCode());
+        }
+    };
+};
+
+// Profiler_PushFunction
+
+class ICProfiler_PushFunction : public ICStub
+{
+    friend class ICStubSpace;
+
+  protected:
+    const char *str_;
+    HeapPtrScript script_;
+
+    ICProfiler_PushFunction(JitCode *stubCode, const char *str, HandleScript script);
+
+  public:
+    static inline ICProfiler_PushFunction *New(ICStubSpace *space, JitCode *code,
+                                               const char *str, HandleScript script)
+    {
+        if (!code)
+            return nullptr;
+        return space->allocate<ICProfiler_PushFunction>(code, str, script);
+    }
+
+    HeapPtrScript &script() {
+        return script_;
+    }
+
+    static size_t offsetOfStr() {
+        return offsetof(ICProfiler_PushFunction, str_);
+    }
+    static size_t offsetOfScript() {
+        return offsetof(ICProfiler_PushFunction, script_);
+    }
+
+    // Compiler for this stub kind.
+    class Compiler : public ICStubCompiler {
+      protected:
+        const char *str_;
+        RootedScript script_;
+        bool generateStubCode(MacroAssembler &masm);
+
+      public:
+        Compiler(JSContext *cx, const char *str, HandleScript script)
+          : ICStubCompiler(cx, ICStub::Profiler_PushFunction),
+            str_(str),
+            script_(cx, script)
+        { }
+
+        ICProfiler_PushFunction *getStub(ICStubSpace *space) {
+            return ICProfiler_PushFunction::New(space, getStubCode(), str_, script_);
+        }
+    };
+};
+
 
 // TypeCheckPrimitiveSetStub
 //   Base class for IC stubs (TypeUpdate or TypeMonitor) that check that a given
 //   value's type falls within a set of primitive types.
 
 class TypeCheckPrimitiveSetStub : public ICStub
 {
     friend class ICStubSpace;
--- a/js/src/jit/BaselineJIT.cpp
+++ b/js/src/jit/BaselineJIT.cpp
@@ -37,29 +37,29 @@ PCMappingSlotInfo::ToSlotLocation(const 
         MOZ_ASSERT(stackVal->reg() == R1);
         return SlotInR1;
     }
     MOZ_ASSERT(stackVal->kind() != StackValue::Stack);
     return SlotIgnore;
 }
 
 BaselineScript::BaselineScript(uint32_t prologueOffset, uint32_t epilogueOffset,
-                               uint32_t profilerEnterToggleOffset,
-                               uint32_t profilerExitToggleOffset,
-                               uint32_t traceLoggerEnterToggleOffset,
+                               uint32_t spsPushToggleOffset, uint32_t traceLoggerEnterToggleOffset,
                                uint32_t traceLoggerExitToggleOffset,
                                uint32_t postDebugPrologueOffset)
   : method_(nullptr),
     templateScope_(nullptr),
     fallbackStubSpace_(),
     dependentAsmJSModules_(nullptr),
     prologueOffset_(prologueOffset),
     epilogueOffset_(epilogueOffset),
-    profilerEnterToggleOffset_(profilerEnterToggleOffset),
-    profilerExitToggleOffset_(profilerExitToggleOffset),
+#ifdef DEBUG
+    spsOn_(false),
+#endif
+    spsPushToggleOffset_(spsPushToggleOffset),
 #ifdef JS_TRACE_LOGGING
 # ifdef DEBUG
     traceLoggerScriptsEnabled_(false),
     traceLoggerEngineEnabled_(false),
 # endif
     traceLoggerEnterToggleOffset_(traceLoggerEnterToggleOffset),
     traceLoggerExitToggleOffset_(traceLoggerExitToggleOffset),
     traceLoggerScriptEvent_(),
@@ -337,19 +337,18 @@ jit::CanEnterBaselineMethod(JSContext *c
     }
 
     RootedScript script(cx, state.script());
     return CanEnterBaselineJIT(cx, script, /* osrFrame = */ nullptr);
 };
 
 BaselineScript *
 BaselineScript::New(JSScript *jsscript, uint32_t prologueOffset, uint32_t epilogueOffset,
-                    uint32_t profilerEnterToggleOffset, uint32_t profilerExitToggleOffset,
-                    uint32_t traceLoggerEnterToggleOffset, uint32_t traceLoggerExitToggleOffset,
-                    uint32_t postDebugPrologueOffset,
+                    uint32_t spsPushToggleOffset, uint32_t traceLoggerEnterToggleOffset,
+                    uint32_t traceLoggerExitToggleOffset, uint32_t postDebugPrologueOffset,
                     size_t icEntries, size_t pcMappingIndexEntries, size_t pcMappingSize,
                     size_t bytecodeTypeMapEntries, size_t yieldEntries)
 {
     static const unsigned DataAlignment = sizeof(uintptr_t);
 
     size_t icEntriesSize = icEntries * sizeof(ICEntry);
     size_t pcMappingIndexEntriesSize = pcMappingIndexEntries * sizeof(PCMappingIndexEntry);
     size_t bytecodeTypeMapSize = bytecodeTypeMapEntries * sizeof(uint32_t);
@@ -366,19 +365,18 @@ BaselineScript::New(JSScript *jsscript, 
                         paddedPCMappingSize +
                         paddedBytecodeTypesMapSize +
                         paddedYieldEntriesSize;
 
     BaselineScript *script = jsscript->zone()->pod_malloc_with_extra<BaselineScript, uint8_t>(allocBytes);
     if (!script)
         return nullptr;
     new (script) BaselineScript(prologueOffset, epilogueOffset,
-                                profilerEnterToggleOffset, profilerExitToggleOffset,
-                                traceLoggerEnterToggleOffset, traceLoggerExitToggleOffset,
-                                postDebugPrologueOffset);
+                                spsPushToggleOffset, traceLoggerEnterToggleOffset,
+                                traceLoggerExitToggleOffset, postDebugPrologueOffset);
 
     size_t offsetCursor = sizeof(BaselineScript);
     MOZ_ASSERT(offsetCursor == AlignBytes(sizeof(BaselineScript), DataAlignment));
 
     script->icEntriesOffset_ = offsetCursor;
     script->icEntries_ = icEntries;
     offsetCursor += paddedICEntriesSize;
 
@@ -844,16 +842,35 @@ BaselineScript::toggleDebugTraps(JSScrip
                 Assembler::ToggleCall(label, enabled);
             }
 
             curPC += GetBytecodeLength(curPC);
         }
     }
 }
 
+void
+BaselineScript::toggleSPS(bool enable)
+{
+    MOZ_ASSERT(enable == !(bool)spsOn_);
+
+    JitSpew(JitSpew_BaselineIC, "  toggling SPS %s for BaselineScript %p",
+            enable ? "on" : "off", this);
+
+    // Toggle the jump
+    CodeLocationLabel pushToggleLocation(method_, CodeOffsetLabel(spsPushToggleOffset_));
+    if (enable)
+        Assembler::ToggleToCmp(pushToggleLocation);
+    else
+        Assembler::ToggleToJmp(pushToggleLocation);
+#ifdef DEBUG
+    spsOn_ = enable;
+#endif
+}
+
 #ifdef JS_TRACE_LOGGING
 void
 BaselineScript::initTraceLogger(JSRuntime *runtime, JSScript *script)
 {
 #ifdef DEBUG
     traceLoggerScriptsEnabled_ = TraceLogTextIdEnabled(TraceLogger_Scripts);
     traceLoggerEngineEnabled_ = TraceLogTextIdEnabled(TraceLogger_Engine);
 #endif
@@ -929,39 +946,16 @@ BaselineScript::toggleTraceLoggerEngine(
 
 #if DEBUG
     traceLoggerEngineEnabled_ = enable;
 #endif
 }
 #endif
 
 void
-BaselineScript::toggleProfilerInstrumentation(bool enable)
-{
-    if (enable == isProfilerInstrumentationOn())
-        return;
-
-    JitSpew(JitSpew_BaselineIC, "  toggling profiling %s for BaselineScript %p",
-            enable ? "on" : "off", this);
-
-    // Toggle the jump
-    CodeLocationLabel enterToggleLocation(method_, CodeOffsetLabel(profilerEnterToggleOffset_));
-    CodeLocationLabel exitToggleLocation(method_, CodeOffsetLabel(profilerExitToggleOffset_));
-    if (enable) {
-        Assembler::ToggleToCmp(enterToggleLocation);
-        Assembler::ToggleToCmp(exitToggleLocation);
-        flags_ |= uint32_t(PROFILER_INSTRUMENTATION_ON);
-    } else {
-        Assembler::ToggleToJmp(enterToggleLocation);
-        Assembler::ToggleToJmp(exitToggleLocation);
-        flags_ &= ~uint32_t(PROFILER_INSTRUMENTATION_ON);
-    }
-}
-
-void
 BaselineScript::purgeOptimizedStubs(Zone *zone)
 {
     JitSpew(JitSpew_BaselineIC, "Purging optimized stubs");
 
     for (size_t i = 0; i < numICEntries(); i++) {
         ICEntry &entry = icEntry(i);
         if (!entry.hasStub())
             continue;
@@ -1046,24 +1040,24 @@ void
 jit::AddSizeOfBaselineData(JSScript *script, mozilla::MallocSizeOf mallocSizeOf, size_t *data,
                            size_t *fallbackStubs)
 {
     if (script->hasBaselineScript())
         script->baselineScript()->addSizeOfIncludingThis(mallocSizeOf, data, fallbackStubs);
 }
 
 void
-jit::ToggleBaselineProfiling(JSRuntime *runtime, bool enable)
+jit::ToggleBaselineSPS(JSRuntime *runtime, bool enable)
 {
     for (ZonesIter zone(runtime, SkipAtoms); !zone.done(); zone.next()) {
         for (gc::ZoneCellIter i(zone, gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
             JSScript *script = i.get<JSScript>();
             if (!script->hasBaselineScript())
                 continue;
-            script->baselineScript()->toggleProfilerInstrumentation(enable);
+            script->baselineScript()->toggleSPS(enable);
         }
     }
 }
 
 #ifdef JS_TRACE_LOGGING
 void
 jit::ToggleBaselineTraceLoggerScripts(JSRuntime *runtime, bool enable)
 {
--- a/js/src/jit/BaselineJIT.h
+++ b/js/src/jit/BaselineJIT.h
@@ -134,19 +134,21 @@ struct BaselineScript
 
     // Native code offset right before the scope chain is initialized.
     uint32_t prologueOffset_;
 
     // Native code offset right before the frame is popped and the method
     // returned from.
     uint32_t epilogueOffset_;
 
-    // The offsets for the toggledJump instructions for profiler instrumentation.
-    uint32_t profilerEnterToggleOffset_;
-    uint32_t profilerExitToggleOffset_;
+    // The offsets for the toggledJump instructions for SPS update ICs.
+#ifdef DEBUG
+    mozilla::DebugOnly<bool> spsOn_;
+#endif
+    uint32_t spsPushToggleOffset_;
 
     // The offsets and event used for Tracelogger toggling.
 #ifdef JS_TRACE_LOGGING
 # ifdef DEBUG
     bool traceLoggerScriptsEnabled_;
     bool traceLoggerEngineEnabled_;
 # endif
     uint32_t traceLoggerEnterToggleOffset_;
@@ -178,20 +180,17 @@ struct BaselineScript
 
         // Flag set when compiled for use with Debugger. Handles various
         // Debugger hooks and compiles toggled calls for traps.
         HAS_DEBUG_INSTRUMENTATION = 1 << 3,
 
         // Flag set if this script has ever been Ion compiled, either directly
         // or inlined into another script. This is cleared when the script's
         // type information or caches are cleared.
-        ION_COMPILED_OR_INLINED = 1 << 4,
-
-        // Flag is set if this script has profiling instrumentation turned on.
-        PROFILER_INSTRUMENTATION_ON = 1 << 5
+        ION_COMPILED_OR_INLINED = 1 << 4
     };
 
   private:
     uint32_t flags_;
 
   private:
     void trace(JSTracer *trc);
 
@@ -210,30 +209,24 @@ struct BaselineScript
 
     // For generator scripts, we store the native code address for each yield
     // instruction.
     uint32_t yieldEntriesOffset_;
 
   public:
     // Do not call directly, use BaselineScript::New. This is public for cx->new_.
     BaselineScript(uint32_t prologueOffset, uint32_t epilogueOffset,
-                   uint32_t profilerEnterToggleOffset,
-                   uint32_t profilerExitToggleOffset,
-                   uint32_t traceLoggerEnterToggleOffset,
-                   uint32_t traceLoggerExitToggleOffset,
-                   uint32_t postDebugPrologueOffset);
+                   uint32_t spsPushToggleOffset, uint32_t traceLoggerEnterToggleOffset,
+                   uint32_t traceLoggerExitToggleOffset, uint32_t postDebugPrologueOffset);
 
     static BaselineScript *New(JSScript *jsscript, uint32_t prologueOffset,
                                uint32_t epilogueOffset, uint32_t postDebugPrologueOffset,
-                               uint32_t profilerEnterToggleOffset,
-                               uint32_t profilerExitToggleOffset,
-                               uint32_t traceLoggerEnterToggleOffset,
-                               uint32_t traceLoggerExitToggleOffset,
-                               size_t icEntries, size_t pcMappingIndexEntries,
-                               size_t pcMappingSize,
+                               uint32_t spsPushToggleOffset, uint32_t traceLoggerEnterToggleOffset,
+                               uint32_t traceLoggerExitToggleOffset, size_t icEntries,
+                               size_t pcMappingIndexEntries, size_t pcMappingSize,
                                size_t bytecodeTypeMapEntries, size_t yieldEntries);
 
     static void Trace(JSTracer *trc, BaselineScript *script);
     static void Destroy(FreeOp *fop, BaselineScript *script);
 
     void purgeOptimizedStubs(Zone *zone);
 
     static inline size_t offsetOfMethod() {
@@ -387,20 +380,17 @@ struct BaselineScript
     void unlinkDependentAsmJSModules(FreeOp *fop);
     void removeDependentAsmJSModule(DependentAsmJSModuleExit exit);
 
     // Toggle debug traps (used for breakpoints and step mode) in the script.
     // If |pc| is nullptr, toggle traps for all ops in the script. Else, only
     // toggle traps at |pc|.
     void toggleDebugTraps(JSScript *script, jsbytecode *pc);
 
-    void toggleProfilerInstrumentation(bool enable);
-    bool isProfilerInstrumentationOn() const {
-        return flags_ & PROFILER_INSTRUMENTATION_ON;
-    }
+    void toggleSPS(bool enable);
 
 #ifdef JS_TRACE_LOGGING
     void initTraceLogger(JSRuntime *runtime, JSScript *script);
     void toggleTraceLoggerScripts(JSRuntime *runtime, JSScript *script, bool enable);
     void toggleTraceLoggerEngine(bool enable);
 
     static size_t offsetOfTraceLoggerScriptEvent() {
         return offsetof(BaselineScript, traceLoggerScriptEvent_);
@@ -452,17 +442,17 @@ EnterBaselineAtBranch(JSContext *cx, Int
 void
 FinishDiscardBaselineScript(FreeOp *fop, JSScript *script);
 
 void
 AddSizeOfBaselineData(JSScript *script, mozilla::MallocSizeOf mallocSizeOf, size_t *data,
                       size_t *fallbackStubs);
 
 void
-ToggleBaselineProfiling(JSRuntime *runtime, bool enable);
+ToggleBaselineSPS(JSRuntime *runtime, bool enable);
 
 void
 ToggleBaselineTraceLoggerScripts(JSRuntime *runtime, bool enable);
 void
 ToggleBaselineTraceLoggerEngine(JSRuntime *runtime, bool enable);
 
 struct BaselineBailoutInfo
 {
@@ -504,17 +494,18 @@ struct BaselineBailoutInfo
 
     // The bailout kind.
     BailoutKind bailoutKind;
 };
 
 uint32_t
 BailoutIonToBaseline(JSContext *cx, JitActivation *activation, JitFrameIterator &iter,
                      bool invalidate, BaselineBailoutInfo **bailoutInfo,
-                     const ExceptionBailoutInfo *exceptionInfo);
+                     const ExceptionBailoutInfo *exceptionInfo,
+                     bool *poppedLastSPSFrame);
 
 // Mark baseline scripts on the stack as active, so that they are not discarded
 // during GC.
 void
 MarkActiveBaselineScripts(Zone *zone);
 
 MethodStatus
 BaselineCompile(JSContext *cx, JSScript *script, bool forceDebugInstrumentation = false);
--- a/js/src/jit/CodeGenerator.cpp
+++ b/js/src/jit/CodeGenerator.cpp
@@ -1977,31 +1977,25 @@ CodeGenerator::visitReturn(LReturn *lir)
     // Don't emit a jump to the return label if this is the last block.
     if (current->mir() != *gen->graph().poBegin())
         masm.jump(&returnLabel_);
 }
 
 void
 CodeGenerator::visitOsrEntry(LOsrEntry *lir)
 {
-    Register temp = ToRegister(lir->temp());
-
     // Remember the OSR entry offset into the code buffer.
     masm.flushBuffer();
     setOsrEntryOffset(masm.size());
 
 #ifdef JS_TRACE_LOGGING
     emitTracelogStopEvent(TraceLogger_Baseline);
     emitTracelogStartEvent(TraceLogger_IonMonkey);
 #endif
 
-    // If profiling, save the current frame pointer to a per-thread global field.
-    if (isProfilerInstrumentationEnabled())
-        masm.profilerEnterFrame(StackPointer, temp);
-
     // Allocate the full frame for this function
     // Note we have a new entry here. So we reset MacroAssembler::framePushed()
     // to 0, before reserving the stack.
     MOZ_ASSERT(masm.framePushed() == frameSize());
     masm.setFramePushed(0);
     masm.reserveStack(frameSize());
 }
 
@@ -3649,17 +3643,17 @@ CodeGenerator::emitObjectOrStringResultC
         break;
       case MIRType_Symbol:
         callee = JS_FUNC_TO_DATA_PTR(void *, AssertValidSymbolPtr);
         break;
       default:
         MOZ_CRASH();
     }
 
-    masm.callWithABI(callee);
+    masm.callWithABINoProfiling(callee);
     restoreVolatile();
 
     masm.bind(&done);
     masm.pop(temp);
 }
 
 void
 CodeGenerator::emitValueResultChecks(LInstruction *lir, MDefinition *mir)
@@ -3715,17 +3709,17 @@ CodeGenerator::emitValueResultChecks(LIn
 
     masm.pushValue(output);
     masm.movePtr(StackPointer, temp1);
 
     masm.setupUnalignedABICall(2, temp2);
     masm.loadJSContext(temp2);
     masm.passABIArg(temp2);
     masm.passABIArg(temp1);
-    masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, AssertValidValue));
+    masm.callWithABINoProfiling(JS_FUNC_TO_DATA_PTR(void *, AssertValidValue));
     masm.popValue(output);
     restoreVolatile();
 
     masm.bind(&done);
     masm.pop(temp2);
     masm.pop(temp1);
 }
 
@@ -6928,16 +6922,19 @@ CodeGenerator::visitRest(LRest *lir)
     emitRest(lir, temp2, numActuals, temp0, temp1, numFormals, templateObject, false, ToRegister(lir->output()));
 }
 
 bool
 CodeGenerator::generateAsmJS(AsmJSFunctionLabels *labels)
 {
     JitSpew(JitSpew_Codegen, "# Emitting asm.js code");
 
+    // AsmJS doesn't do SPS instrumentation.
+    sps_.disable();
+
     if (!omitOverRecursedCheck())
         labels->overflowThunk.emplace();
 
     GenerateAsmJSFunctionPrologue(masm, frameSize(), labels);
 
     if (!generateBody())
         return false;
 
@@ -7154,17 +7151,17 @@ CodeGenerator::link(JSContext *cx, types
     // read barriers which were skipped while compiling the script off thread.
     Linker linker(masm);
     AutoFlushICache afc("IonLink");
     JitCode *code = linker.newCodeForIonScript(cx);
     if (!code)
         return false;
 
     // Encode native to bytecode map if profiling is enabled.
-    if (isProfilerInstrumentationEnabled()) {
+    if (isNativeToBytecodeMapEnabled()) {
         // Generate native-to-bytecode main table.
         if (!generateCompactNativeToBytecodeMap(cx, code))
             return false;
 
         uint8_t *ionTableAddr = ((uint8_t *) nativeToBytecodeMap_) + nativeToBytecodeTableOffset_;
         JitcodeIonTable *ionTable = (JitcodeIonTable *) ionTableAddr;
 
         // Construct the IonEntry that will go into the global table.
@@ -7177,32 +7174,17 @@ CodeGenerator::link(JSContext *cx, types
             return false;
         }
 
         // nativeToBytecodeScriptList_ is no longer needed.
         js_free(nativeToBytecodeScriptList_);
 
         // Add entry to the global table.
         JitcodeGlobalTable *globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
-        if (!globalTable->addEntry(entry, cx->runtime())) {
-            // Memory may have been allocated for the entry.
-            entry.destroy();
-            return false;
-        }
-
-        // Mark the jitcode as having a bytecode map.
-        code->setHasBytecodeMap();
-    } else {
-        // Add a dumy jitcodeGlobalTable entry.
-        JitcodeGlobalEntry::DummyEntry entry;
-        entry.init(code->raw(), code->rawEnd());
-
-        // Add entry to the global table.
-        JitcodeGlobalTable *globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
-        if (!globalTable->addEntry(entry, cx->runtime())) {
+        if (!globalTable->addEntry(entry)) {
             // Memory may have been allocated for the entry.
             entry.destroy();
             return false;
         }
 
         // Mark the jitcode as having a bytecode map.
         code->setHasBytecodeMap();
     }
@@ -7219,18 +7201,18 @@ CodeGenerator::link(JSContext *cx, types
         cx->runtime()->spsProfiler.markEvent(buf);
         js_free(buf);
     }
 
     ionScript->setMethod(code);
     ionScript->setSkipArgCheckEntryOffset(getSkipArgCheckEntryOffset());
 
     // If SPS is enabled, mark IonScript as having been instrumented with SPS
-    if (isProfilerInstrumentationEnabled())
-        ionScript->setHasProfilingInstrumentation();
+    if (sps_.enabled())
+        ionScript->setHasSPSInstrumentation();
 
     SetIonScript(cx, script, executionMode, ionScript);
 
     invalidateEpilogueData_.fixup(&masm);
     Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, invalidateEpilogueData_),
                                        ImmPtr(ionScript),
                                        ImmPtr((void*)-1));
 
@@ -8906,16 +8888,59 @@ CodeGenerator::visitSetDOMProperty(LSetD
 
     masm.branchIfFalseBool(ReturnReg, masm.exceptionLabel());
 
     masm.adjustStack(IonDOMExitFrameLayout::Size());
 
     MOZ_ASSERT(masm.framePushed() == initialStack);
 }
 
+typedef bool(*SPSFn)(JSContext *, HandleScript);
+static const VMFunction SPSEnterInfo = FunctionInfo<SPSFn>(SPSEnter);
+static const VMFunction SPSExitInfo = FunctionInfo<SPSFn>(SPSExit);
+
+void
+CodeGenerator::visitProfilerStackOp(LProfilerStackOp *lir)
+{
+    Register temp = ToRegister(lir->temp()->output());
+
+    switch (lir->type()) {
+        case MProfilerStackOp::Enter:
+            if (gen->options.spsSlowAssertionsEnabled()) {
+                saveLive(lir);
+                pushArg(ImmGCPtr(lir->script()));
+                callVM(SPSEnterInfo, lir);
+                restoreLive(lir);
+                sps_.pushManual(lir->script(), masm, temp, /* inlinedFunction = */ false);
+            } else {
+                masm.propagateOOM(sps_.push(lir->script(), masm, temp,
+                                            /* inlinedFunction = */ false));
+            }
+            return;
+
+        case MProfilerStackOp::Exit:
+            if (gen->options.spsSlowAssertionsEnabled()) {
+                saveLive(lir);
+                pushArg(ImmGCPtr(lir->script()));
+                // Once we've exited, then we shouldn't emit instrumentation for
+                // the corresponding reenter() because we no longer have a
+                // frame.
+                sps_.skipNextReenter();
+                callVM(SPSExitInfo, lir);
+                restoreLive(lir);
+            } else {
+                sps_.pop(masm, temp, /* inlinedFunction = */ false);
+            }
+            return;
+
+        default:
+            MOZ_CRASH("invalid LProfilerStackOp type");
+    }
+}
+
 class OutOfLineIsCallable : public OutOfLineCodeBase<CodeGenerator>
 {
     LIsCallable *ins_;
 
   public:
     explicit OutOfLineIsCallable(LIsCallable *ins)
       : ins_(ins)
     { }
--- a/js/src/jit/CodeGenerator.h
+++ b/js/src/jit/CodeGenerator.h
@@ -288,16 +288,17 @@ class CodeGenerator : public CodeGenerat
     void visitBitNotV(LBitNotV *lir);
     void visitBitOpV(LBitOpV *lir);
     void emitInstanceOf(LInstruction *ins, JSObject *prototypeObject);
     void visitIn(LIn *ins);
     void visitInArray(LInArray *ins);
     void visitInstanceOfO(LInstanceOfO *ins);
     void visitInstanceOfV(LInstanceOfV *ins);
     void visitCallInstanceOf(LCallInstanceOf *ins);
+    void visitProfilerStackOp(LProfilerStackOp *lir);
     void visitGetDOMProperty(LGetDOMProperty *lir);
     void visitGetDOMMemberV(LGetDOMMemberV *lir);
     void visitGetDOMMemberT(LGetDOMMemberT *lir);
     void visitSetDOMProperty(LSetDOMProperty *lir);
     void visitCallDOMNative(LCallDOMNative *lir);
     void visitCallGetIntrinsicValue(LCallGetIntrinsicValue *lir);
     void visitIsCallable(LIsCallable *lir);
     void visitOutOfLineIsCallable(OutOfLineIsCallable *ool);
--- a/js/src/jit/CompileWrappers.cpp
+++ b/js/src/jit/CompileWrappers.cpp
@@ -36,28 +36,16 @@ CompileRuntime::mainThread()
 
 const void *
 CompileRuntime::addressOfJitTop()
 {
     return &runtime()->mainThread.jitTop;
 }
 
 const void *
-CompileRuntime::addressOfJitActivation()
-{
-    return &runtime()->mainThread.jitActivation;
-}
-
-const void *
-CompileRuntime::addressOfProfilingActivation()
-{
-    return (const void *) &runtime()->mainThread.profilingActivation_;
-}
-
-const void *
 CompileRuntime::addressOfJitStackLimit()
 {
     return runtime()->mainThread.addressOfJitStackLimit();
 }
 
 const void *
 CompileRuntime::addressOfJSContext()
 {
--- a/js/src/jit/CompileWrappers.h
+++ b/js/src/jit/CompileWrappers.h
@@ -29,22 +29,16 @@ class CompileRuntime
 
     bool onMainThread();
 
     js::PerThreadData *mainThread();
 
     // &mainThread.jitTop
     const void *addressOfJitTop();
 
-    // &mainThread.jitActivation
-    const void *addressOfJitActivation();
-
-    // &mainThread.profilingActivation
-    const void *addressOfProfilingActivation();
-
     // rt->mainThread.jitStackLimit;
     const void *addressOfJitStackLimit();
 
     // &mainThread.jitJSContext
     const void *addressOfJSContext();
 
     // &mainThread.activation_
     const void *addressOfActivation();
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -148,17 +148,16 @@ jit::InitializeIon()
     return true;
 }
 
 JitRuntime::JitRuntime()
   : execAlloc_(nullptr),
     ionAlloc_(nullptr),
     exceptionTail_(nullptr),
     bailoutTail_(nullptr),
-    profilerExitFrameTail_(nullptr),
     enterJIT_(nullptr),
     bailoutHandler_(nullptr),
     argumentsRectifier_(nullptr),
     argumentsRectifierReturnAddr_(nullptr),
     invalidator_(nullptr),
     debugTrapHandler_(nullptr),
     baselineDebugModeOSRHandler_(nullptr),
     functionWrappers_(nullptr),
@@ -198,21 +197,16 @@ JitRuntime::initialize(JSContext *cx)
 
     if (!cx->compartment()->ensureJitCompartmentExists(cx))
         return false;
 
     functionWrappers_ = cx->new_<VMWrapperMap>(cx);
     if (!functionWrappers_ || !functionWrappers_->init())
         return false;
 
-    JitSpew(JitSpew_Codegen, "# Emitting profiler exit frame tail stub");
-    profilerExitFrameTail_ = generateProfilerExitFrameTailStub(cx);
-    if (!profilerExitFrameTail_)
-        return false;
-
     JitSpew(JitSpew_Codegen, "# Emitting exception tail stub");
 
     void *handler = JS_FUNC_TO_DATA_PTR(void *, jit::HandleException);
 
     exceptionTail_ = generateExceptionTailStub(cx, handler);
     if (!exceptionTail_)
         return false;
 
@@ -656,28 +650,27 @@ JitCode::trace(JSTracer *trc)
         CompactBufferReader reader(start, start + dataRelocTableBytes_);
         MacroAssembler::TraceDataRelocations(trc, this, reader);
     }
 }
 
 void
 JitCode::finalize(FreeOp *fop)
 {
-    JSRuntime *rt = fop->runtime();
-
     // If this jitcode has a bytecode map, de-register it.
     if (hasBytecodeMap_) {
-        MOZ_ASSERT(rt->jitRuntime()->hasJitcodeGlobalTable());
-        rt->jitRuntime()->getJitcodeGlobalTable()->removeEntry(raw(), rt);
+        MOZ_ASSERT(fop->runtime()->jitRuntime()->hasJitcodeGlobalTable());
+        fop->runtime()->jitRuntime()->getJitcodeGlobalTable()->removeEntry(raw());
     }
 
     // Buffer can be freed at any time hereafter. Catch use-after-free bugs.
     // Don't do this if the Ion code is protected, as the signal handler will
     // deadlock trying to reacquire the interrupt lock.
-    memset(code_, JS_SWEPT_CODE_PATTERN, bufferSize_);
+    if (fop->runtime()->jitRuntime())
+        memset(code_, JS_SWEPT_CODE_PATTERN, bufferSize_);
     code_ = nullptr;
 
     // Code buffers are stored inside JSC pools.
     // Pools are refcounted. Releasing the pool may free it.
     if (pool_) {
         // Horrible hack: if we are using perf integration, we don't
         // want to reuse code addresses, so we just leak the memory instead.
         if (!PerfEnabled())
@@ -706,17 +699,17 @@ IonScript::IonScript()
   : method_(nullptr),
     deoptTable_(nullptr),
     osrPc_(nullptr),
     osrEntryOffset_(0),
     skipArgCheckEntryOffset_(0),
     invalidateEpilogueOffset_(0),
     invalidateEpilogueDataOffset_(0),
     numBailouts_(0),
-    hasProfilingInstrumentation_(false),
+    hasSPSInstrumentation_(false),
     recompiling_(false),
     runtimeData_(0),
     runtimeSize_(0),
     cacheIndex_(0),
     cacheEntries_(0),
     safepointIndexOffset_(0),
     safepointIndexEntries_(0),
     safepointsStart_(0),
--- a/js/src/jit/IonBuilder.cpp
+++ b/js/src/jit/IonBuilder.cpp
@@ -703,16 +703,18 @@ IonBuilder::build()
     if (info().hasArguments()) {
         MInstruction *argsObj = MConstant::New(alloc(), UndefinedValue());
         current->add(argsObj);
         current->initSlot(info().argsObjSlot(), argsObj);
     }
 
     // Emit the start instruction, so we can begin real instructions.
     current->add(MStart::New(alloc(), MStart::StartType_Default));
+    if (instrumentedProfiling())
+        current->add(MProfilerStackOp::New(alloc(), script(), MProfilerStackOp::Enter));
 
     // Guard against over-recursion. Do this before we start unboxing, since
     // this will create an OSI point that will read the incoming argument
     // values, which is nice to do before their last real use, to minimize
     // register/stack pressure.
     MCheckOverRecursed *check = MCheckOverRecursed::New(alloc());
     current->add(check);
     check->setResumePoint(MResumePoint::Copy(alloc(), current->entryResumePoint()));
@@ -4068,16 +4070,19 @@ IonBuilder::processReturn(JSOp op)
         def = current->getSlot(info().returnValueSlot());
         break;
 
       default:
         def = nullptr;
         MOZ_CRASH("unknown return op");
     }
 
+    if (instrumentedProfiling() && inliningDepth_ == 0) {
+        current->add(MProfilerStackOp::New(alloc(), script(), MProfilerStackOp::Exit));
+    }
     MReturn *ret = MReturn::New(alloc(), def);
     current->end(ret);
 
     if (!graph().addReturn(current))
         return ControlStatus_Error;
 
     // Make sure no one tries to use this block now.
     setCurrent(nullptr);
--- a/js/src/jit/IonCaches.cpp
+++ b/js/src/jit/IonCaches.cpp
@@ -424,36 +424,23 @@ IonCache::linkAndAttachStub(JSContext *c
 
 #ifdef JS_ION_PERF
     writePerfSpewerJitCodeProfile(code, "IonCache");
 #endif
 
     attachStub(masm, attacher, code);
 
     // Add entry to native => bytecode mapping for this stub if needed.
-    if (cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(cx->runtime())) {
+    if (cx->runtime()->jitRuntime()->isNativeToBytecodeMapEnabled(cx->runtime())) {
         JitcodeGlobalEntry::IonCacheEntry entry;
-        entry.init(code->raw(), code->rawEnd(), rejoinAddress());
+        entry.init(code->raw(), code->raw() + code->instructionsSize(), rejoinAddress());
 
         // Add entry to the global table.
         JitcodeGlobalTable *globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
-        if (!globalTable->addEntry(entry, cx->runtime())) {
-            entry.destroy();
-            return false;
-        }
-
-        // Mark the jitcode as having a bytecode map.
-        code->setHasBytecodeMap();
-    } else {
-        JitcodeGlobalEntry::DummyEntry entry;
-        entry.init(code->raw(), code->rawEnd());
-
-        // Add entry to the global table.
-        JitcodeGlobalTable *globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
-        if (!globalTable->addEntry(entry, cx->runtime())) {
+        if (!globalTable->addEntry(entry)) {
             entry.destroy();
             return false;
         }
 
         // Mark the jitcode as having a bytecode map.
         code->setHasBytecodeMap();
     }
 
--- a/js/src/jit/IonCode.h
+++ b/js/src/jit/IonCode.h
@@ -93,20 +93,16 @@ class JitCode : public gc::TenuredCell
 
   public:
     uint8_t *raw() const {
         return code_;
     }
     uint8_t *rawEnd() const {
         return code_ + insnSize_;
     }
-    bool containsNativePC(const void *addr) const {
-        const uint8_t *addr_u8 = (const uint8_t *) addr;
-        return raw() <= addr_u8 && addr_u8 < rawEnd();
-    }
     size_t instructionsSize() const {
         return insnSize_;
     }
     void trace(JSTracer *trc);
     void finalize(FreeOp *fop);
     void fixupAfterMovingGC() {}
     void setInvalidated() {
         invalidated_ = true;
@@ -192,18 +188,18 @@ struct IonScript
     // NOTE: technically a constant delta from
     // |invalidateEpilogueOffset_|, so we could hard-code this
     // per-platform if we want.
     uint32_t invalidateEpilogueDataOffset_;
 
     // Number of times this script bailed out without invalidation.
     uint32_t numBailouts_;
 
-    // Flag set if IonScript was compiled with profiling enabled.
-    bool hasProfilingInstrumentation_;
+    // Flag set if IonScript was compiled with SPS profiling enabled.
+    bool hasSPSInstrumentation_;
 
     // Flag for if this script is getting recompiled.
     uint32_t recompiling_;
 
     // Any kind of data needed by the runtime, these can be either cache
     // information or profiling info.
     uint32_t runtimeData_;
     uint32_t runtimeSize_;
@@ -411,24 +407,24 @@ struct IonScript
         numBailouts_++;
     }
     uint32_t numBailouts() const {
         return numBailouts_;
     }
     bool bailoutExpected() const {
         return numBailouts_ > 0;
     }
-    void setHasProfilingInstrumentation() {
-        hasProfilingInstrumentation_ = true;
+    void setHasSPSInstrumentation() {
+        hasSPSInstrumentation_ = true;
     }
-    void clearHasProfilingInstrumentation() {
-        hasProfilingInstrumentation_ = false;
+    void clearHasSPSInstrumentation() {
+        hasSPSInstrumentation_ = false;
     }
-    bool hasProfilingInstrumentation() const {
-        return hasProfilingInstrumentation_;
+    bool hasSPSInstrumentation() const {
+        return hasSPSInstrumentation_;
     }
     void setTraceLoggerEvent(TraceLoggerEvent &event) {
         traceLoggerScriptEvent_ = event;
     }
     const uint8_t *snapshots() const {
         return reinterpret_cast<const uint8_t *>(this) + snapshots_;
     }
     size_t snapshotsListSize() const {
--- a/js/src/jit/IonInstrumentation.h
+++ b/js/src/jit/IonInstrumentation.h
@@ -14,20 +14,31 @@ class SPSProfiler;
 namespace jit {
 
 class MacroAssembler;
 
 typedef SPSInstrumentation<MacroAssembler, Register> BaseInstrumentation;
 
 class IonInstrumentation : public BaseInstrumentation
 {
+    jsbytecode **trackedPc_;
+
   public:
     IonInstrumentation(SPSProfiler *profiler, jsbytecode **pc)
-      : BaseInstrumentation(profiler)
+      : BaseInstrumentation(profiler),
+        trackedPc_(pc)
     {
         MOZ_ASSERT(pc != nullptr);
     }
+
+    void leave(MacroAssembler &masm, Register reg, bool inlinedFunction = false) {
+        BaseInstrumentation::leave(*trackedPc_, masm, reg, inlinedFunction);
+    }
+
+    bool enterInlineFrame() {
+        return BaseInstrumentation::enterInlineFrame(*trackedPc_);
+    }
 };
 
 } // namespace jit
 } // namespace js
 
 #endif /* jit_IonInstrumentatjit_h */
--- a/js/src/jit/JitCompartment.h
+++ b/js/src/jit/JitCompartment.h
@@ -153,19 +153,16 @@ class JitRuntime
     ExecutableAllocator *ionAlloc_;
 
     // Shared exception-handler tail.
     JitCode *exceptionTail_;
 
     // Shared post-bailout-handler tail.
     JitCode *bailoutTail_;
 
-    // Shared profiler exit frame tail.
-    JitCode *profilerExitFrameTail_;
-
     // Trampoline for entering JIT code. Contains OSR prologue.
     JitCode *enterJIT_;
 
     // Trampoline for entering baseline JIT code.
     JitCode *enterBaselineJIT_;
 
     // Vector mapping frame class sizes to bailout tables.
     Vector<JitCode*, 4, SystemAllocPolicy> bailoutTables_;
@@ -232,17 +229,16 @@ class JitRuntime
     // their callee.
     js::Value ionReturnOverride_;
 
     // Global table of jitcode native address => bytecode address mappings.
     JitcodeGlobalTable *jitcodeGlobalTable_;
 
   private:
     JitCode *generateLazyLinkStub(JSContext *cx);
-    JitCode *generateProfilerExitFrameTailStub(JSContext *cx);
     JitCode *generateExceptionTailStub(JSContext *cx, void *handler);
     JitCode *generateBailoutTailStub(JSContext *cx);
     JitCode *generateEnterJIT(JSContext *cx, EnterJitType type);
     JitCode *generateArgumentsRectifier(JSContext *cx, void **returnAddrOut);
     JitCode *generateBailoutTable(JSContext *cx, uint32_t frameClass);
     JitCode *generateBailoutHandler(JSContext *cx);
     JitCode *generateInvalidator(JSContext *cx);
     JitCode *generatePreBarrier(JSContext *cx, MIRType type);
@@ -322,20 +318,16 @@ class JitRuntime
     JitCode *getExceptionTail() const {
         return exceptionTail_;
     }
 
     JitCode *getBailoutTail() const {
         return bailoutTail_;
     }
 
-    JitCode *getProfilerExitFrameTail() const {
-        return profilerExitFrameTail_;
-    }
-
     JitCode *getBailoutTable(const FrameSizeClass &frameClass) const;
 
     JitCode *getArgumentsRectifier() const {
         return argumentsRectifier_;
     }
 
     void *getArgumentsRectifierReturnAddr() const {
         return argumentsRectifierReturnAddr_;
@@ -394,18 +386,22 @@ class JitRuntime
         return jitcodeGlobalTable_ != nullptr;
     }
 
     JitcodeGlobalTable *getJitcodeGlobalTable() {
         MOZ_ASSERT(hasJitcodeGlobalTable());
         return jitcodeGlobalTable_;
     }
 
-    bool isProfilerInstrumentationEnabled(JSRuntime *rt) {
+    bool isNativeToBytecodeMapEnabled(JSRuntime *rt) {
+#ifdef DEBUG
+        return true;
+#else // DEBUG
         return rt->spsProfiler.enabled();
+#endif // DEBUG
     }
 };
 
 class JitZone
 {
     // Allocated space for optimized baseline stubs.
     OptimizedICStubSpace optimizedStubSpace_;
 
--- a/js/src/jit/JitFrameIterator-inl.h
+++ b/js/src/jit/JitFrameIterator-inl.h
@@ -11,29 +11,16 @@
 
 #include "jit/Bailouts.h"
 #include "jit/BaselineFrame.h"
 #include "jit/JitFrames.h"
 
 namespace js {
 namespace jit {
 
-inline JitFrameLayout *
-JitProfilingFrameIterator::framePtr()
-{
-    MOZ_ASSERT(!done());
-    return (JitFrameLayout *) fp_;
-}
-
-inline JSScript *
-JitProfilingFrameIterator::frameScript()
-{
-    return ScriptFromCalleeToken(framePtr()->calleeToken());
-}
-
 inline BaselineFrame *
 JitFrameIterator::baselineFrame() const
 {
     MOZ_ASSERT(isBaselineJS());
     return (BaselineFrame *)(fp() - BaselineFrame::FramePointerOffset - BaselineFrame::Size());
 }
 
 template <typename T>
--- a/js/src/jit/JitFrameIterator.h
+++ b/js/src/jit/JitFrameIterator.h
@@ -9,18 +9,16 @@
 
 #include "jsfun.h"
 #include "jsscript.h"
 #include "jstypes.h"
 
 #include "jit/IonCode.h"
 #include "jit/Snapshots.h"
 
-#include "js/ProfilingFrameIterator.h"
-
 namespace js {
     class ActivationIterator;
 };
 
 namespace js {
 namespace jit {
 
 enum FrameType
@@ -252,43 +250,16 @@ class JitFrameIterator
 
 #ifdef DEBUG
     bool verifyReturnAddressUsingNativeToBytecodeMap();
 #else
     inline bool verifyReturnAddressUsingNativeToBytecodeMap() { return true; }
 #endif
 };
 
-class JitcodeGlobalTable;
-
-class JitProfilingFrameIterator
-{
-    uint8_t *fp_;
-    FrameType type_;
-    void *returnAddressToFp_;
-
-    inline JitFrameLayout *framePtr();
-    inline JSScript *frameScript();
-    bool tryInitWithPC(void *pc);
-    bool tryInitWithTable(JitcodeGlobalTable *table, void *pc, JSRuntime *rt);
-
-  public:
-    JitProfilingFrameIterator(JSRuntime *rt,
-                              const JS::ProfilingFrameIterator::RegisterState &state);
-    explicit JitProfilingFrameIterator(void *exitFrame);
-
-    void operator++();
-    bool done() const { return fp_ == nullptr; }
-
-    void *fp() const { MOZ_ASSERT(!done()); return fp_; }
-    void *stackAddress() const { return fp(); }
-    FrameType frameType() const { MOZ_ASSERT(!done()); return type_; }
-    void *returnAddressToFp() const { MOZ_ASSERT(!done()); return returnAddressToFp_; }
-};
-
 class RInstructionResults
 {
     // Vector of results of recover instructions.
     typedef mozilla::Vector<RelocatableValue, 1, SystemAllocPolicy> Values;
     mozilla::UniquePtr<Values, JS::DeletePolicy<Values> > results_;
 
     // The frame pointer is used as a key to check if the current frame already
     // bailed out.
--- a/js/src/jit/JitFrames.cpp
+++ b/js/src/jit/JitFrames.cpp
@@ -24,17 +24,16 @@
 #include "jit/PcScriptCache.h"
 #include "jit/Recover.h"
 #include "jit/Safepoints.h"
 #include "jit/Snapshots.h"
 #include "jit/VMFunctions.h"
 #include "vm/ArgumentsObject.h"
 #include "vm/Debugger.h"
 #include "vm/Interpreter.h"
-#include "vm/SPSProfiler.h"
 #include "vm/TraceLogging.h"
 
 #include "jsinferinlines.h"
 #include "jsscriptinlines.h"
 #include "gc/Nursery-inl.h"
 #include "jit/JitFrameIterator-inl.h"
 #include "vm/Debugger-inl.h"
 #include "vm/Probes-inl.h"
@@ -374,17 +373,17 @@ CloseLiveIterator(JSContext *cx, const I
     if (cx->isExceptionPending())
         UnwindIteratorForException(cx, obj);
     else
         UnwindIteratorForUncatchableException(cx, obj);
 }
 
 static void
 HandleExceptionIon(JSContext *cx, const InlineFrameIterator &frame, ResumeFromException *rfe,
-                   bool *overrecursed)
+                   bool *overrecursed, bool *poppedLastSPSFrameOut)
 {
     RootedScript script(cx, frame.script());
     jsbytecode *pc = frame.pc();
 
     if (cx->compartment()->isDebuggee()) {
         // We need to bail when there is a catchable exception, and we are the
         // debuggee of a Debugger with a live onExceptionUnwind hook, or if a
         // Debugger has observed this frame (e.g., for onPop).
@@ -406,17 +405,18 @@ HandleExceptionIon(JSContext *cx, const 
             //      frame.
             //
             // An empty exception info denotes that we're propagating an Ion
             // exception due to debug mode, which BailoutIonToBaseline needs to
             // know. This is because we might not be able to fully reconstruct up
             // to the stack depth at the snapshot, as we could've thrown in the
             // middle of a call.
             ExceptionBailoutInfo propagateInfo;
-            uint32_t retval = ExceptionHandlerBailout(cx, frame, rfe, propagateInfo, overrecursed);
+            uint32_t retval = ExceptionHandlerBailout(cx, frame, rfe, propagateInfo, overrecursed,
+                                                      poppedLastSPSFrameOut);
             if (retval == BAILOUT_RETURN_OK)
                 return;
         }
     }
 
     if (!script->hasTrynotes())
         return;
 
@@ -448,17 +448,18 @@ HandleExceptionIon(JSContext *cx, const 
                 // Ion can compile try-catch, but bailing out to catch
                 // exceptions is slow. Reset the warm-up counter so that if we
                 // catch many exceptions we won't Ion-compile the script.
                 script->resetWarmUpCounter();
 
                 // Bailout at the start of the catch block.
                 jsbytecode *catchPC = script->main() + tn->start + tn->length;
                 ExceptionBailoutInfo excInfo(frame.frameNo(), catchPC, tn->stackDepth);
-                uint32_t retval = ExceptionHandlerBailout(cx, frame, rfe, excInfo, overrecursed);
+                uint32_t retval = ExceptionHandlerBailout(cx, frame, rfe, excInfo, overrecursed,
+                                                          poppedLastSPSFrameOut);
                 if (retval == BAILOUT_RETURN_OK)
                     return;
 
                 // Error on bailout clears pending exception.
                 MOZ_ASSERT(!cx->isExceptionPending());
             }
             break;
 
@@ -676,64 +677,22 @@ struct AutoDeleteDebugModeOSRInfo
 
 struct AutoClearBaselineOverridePc
 {
     BaselineFrame *frame;
     explicit AutoClearBaselineOverridePc(BaselineFrame *frame) : frame(frame) { MOZ_ASSERT(frame); }
     ~AutoClearBaselineOverridePc() { frame->clearOverridePc(); }
 };
 
-struct AutoResetLastProfilerFrameOnReturnFromException
-{
-    JSContext *cx;
-    ResumeFromException *rfe;
-
-    AutoResetLastProfilerFrameOnReturnFromException(JSContext *cx, ResumeFromException *rfe)
-      : cx(cx), rfe(rfe) {}
-
-    ~AutoResetLastProfilerFrameOnReturnFromException() {
-        if (!cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(cx->runtime()))
-            return;
-
-        MOZ_ASSERT(cx->mainThread().jitActivation == cx->mainThread().profilingActivation());
-
-        void *lastProfilingFrame = getLastProfilingFrame();
-        cx->mainThread().jitActivation->setLastProfilingFrame(lastProfilingFrame);
-    }
-
-    void *getLastProfilingFrame() {
-        switch (rfe->kind) {
-          case ResumeFromException::RESUME_ENTRY_FRAME:
-            return nullptr;
-
-          // The following all return into baseline frames.
-          case ResumeFromException::RESUME_CATCH:
-          case ResumeFromException::RESUME_FINALLY:
-          case ResumeFromException::RESUME_FORCED_RETURN:
-            return rfe->framePointer + BaselineFrame::FramePointerOffset;
-
-          // When resuming into a bailed-out ion frame, use the bailout info to
-          // find the frame we are resuming into.
-          case ResumeFromException::RESUME_BAILOUT:
-            return rfe->bailoutInfo->incomingStack;
-        }
-
-        MOZ_CRASH("Invalid ResumeFromException type!");
-        return nullptr;
-    }
-};
-
 void
 HandleException(ResumeFromException *rfe)
 {
     JSContext *cx = GetJSContextFromJitCode();
     TraceLoggerThread *logger = TraceLoggerForMainThread(cx->runtime());
 
-    AutoResetLastProfilerFrameOnReturnFromException profFrameReset(cx, rfe);
-
     rfe->kind = ResumeFromException::RESUME_ENTRY_FRAME;
 
     JitSpew(JitSpew_IonInvalidate, "handling exception");
 
     // Clear any Ion return override that's been set.
     // This may happen if a callVM function causes an invalidation (setting the
     // override), and then fails, bypassing the bailout handlers that would
     // otherwise clear the return override.
@@ -756,33 +715,50 @@ HandleException(ResumeFromException *rfe
             // them.
             InlineFrameIterator frames(cx, &iter);
 
             // Invalidation state will be the same for all inlined scripts in the frame.
             IonScript *ionScript = nullptr;
             bool invalidated = iter.checkInvalidation(&ionScript);
 
             for (;;) {
-                HandleExceptionIon(cx, frames, rfe, &overrecursed);
+                bool poppedLastSPSFrame = false;
+                HandleExceptionIon(cx, frames, rfe, &overrecursed, &poppedLastSPSFrame);
 
                 if (rfe->kind == ResumeFromException::RESUME_BAILOUT) {
                     if (invalidated)
                         ionScript->decrementInvalidationCount(cx->runtime()->defaultFreeOp());
                     return;
                 }
 
                 MOZ_ASSERT(rfe->kind == ResumeFromException::RESUME_ENTRY_FRAME);
 
+                // Figure out whether SPS frame was pushed for this frame or not.
+                // Even if profiler is enabled, the frame being popped might have
+                // been entered prior to SPS being enabled, and thus not have
+                // a pushed SPS frame.
+                bool popSPSFrame = cx->runtime()->spsProfiler.enabled();
+                if (invalidated)
+                    popSPSFrame = ionScript->hasSPSInstrumentation();
+
+                // Don't pop an SPS frame for inlined frames, since they are not instrumented.
+                if (frames.more())
+                    popSPSFrame = false;
+
+                // Don't pop the last SPS frame if it's already been popped by
+                // bailing out.
+                if (poppedLastSPSFrame)
+                    popSPSFrame = false;
+
                 // When profiling, each frame popped needs a notification that
                 // the function has exited, so invoke the probe that a function
                 // is exiting.
 
                 JSScript *script = frames.script();
-                probes::ExitScript(cx, script, script->functionNonDelazifying(),
-                                   /* popSPSFrame = */ false);
+                probes::ExitScript(cx, script, script->functionNonDelazifying(), popSPSFrame);
                 if (!frames.more()) {
                     TraceLogStopEvent(logger, TraceLogger_IonMonkey);
                     TraceLogStopEvent(logger, TraceLogger_Scripts);
                     break;
                 }
                 ++frames;
             }
 
@@ -823,17 +799,21 @@ HandleException(ResumeFromException *rfe
                 return;
 
             TraceLogStopEvent(logger, TraceLogger_Baseline);
             TraceLogStopEvent(logger, TraceLogger_Scripts);
 
             // Unwind profiler pseudo-stack
             JSScript *script = iter.script();
             probes::ExitScript(cx, script, script->functionNonDelazifying(),
-                               /* popSPSFrame = */ false);
+                               iter.baselineFrame()->hasPushedSPSFrame());
+            // After this point, any pushed SPS frame would have been popped if it needed
+            // to be.  Unset the flag here so that if we call DebugEpilogue below,
+            // it doesn't try to pop the SPS frame again.
+            iter.baselineFrame()->unsetPushedSPSFrame();
 
             if (iter.baselineFrame()->isDebuggee() && !calledDebugEpilogue) {
                 // If we still need to call the DebugEpilogue, we must
                 // remember the pc we unwound the scope chain to, as it will
                 // be out of sync with the frame's actual pc.
                 if (unwoundScopeToPc)
                     iter.baselineFrame()->setOverridePc(unwoundScopeToPc);
 
@@ -2665,17 +2645,17 @@ JitFrameIterator::verifyReturnAddressUsi
 
     if (rt->isHeapMinorCollecting())
         return true;
 
     JitRuntime *jitrt = rt->jitRuntime();
 
     // Look up and print bytecode info for the native address.
     JitcodeGlobalEntry entry;
-    if (!jitrt->getJitcodeGlobalTable()->lookup(returnAddressToFp_, &entry, rt))
+    if (!jitrt->getJitcodeGlobalTable()->lookup(returnAddressToFp_, &entry))
         return true;
 
     JitSpew(JitSpew_Profiling, "Found nativeToBytecode entry for %p: %p - %p",
             returnAddressToFp_, entry.nativeStartAddr(), entry.nativeEndAddr());
 
     JitcodeGlobalEntry::BytecodeLocationVector location;
     uint32_t depth = UINT32_MAX;
     if (!entry.callStackAtAddr(rt, returnAddressToFp_, location, &depth))
@@ -2712,288 +2692,16 @@ JitFrameIterator::verifyReturnAddressUsi
                 ++inlineFrames;
         }
     }
 
     return true;
 }
 #endif // DEBUG
 
-JitProfilingFrameIterator::JitProfilingFrameIterator(
-        JSRuntime *rt, const JS::ProfilingFrameIterator::RegisterState &state)
-{
-    // If no profilingActivation is live, initialize directly to
-    // end-of-iteration state.
-    if (!rt->mainThread.profilingActivation()) {
-        type_ = JitFrame_Entry;
-        fp_ = nullptr;
-        returnAddressToFp_ = nullptr;
-        return;
-    }
-
-    MOZ_ASSERT(rt->mainThread.profilingActivation()->isJit());
-
-    JitActivation *act = rt->mainThread.profilingActivation()->asJit();
-
-    // If the top JitActivation has a null lastProfilingFrame, assume that
-    // it's a trivially empty activation, and initialize directly
-    // to end-of-iteration state.
-    if (!act->lastProfilingFrame()) {
-        type_ = JitFrame_Entry;
-        fp_ = nullptr;
-        returnAddressToFp_ = nullptr;
-        return;
-    }
-
-    // Get the fp from the current profilingActivation
-    fp_ = (uint8_t *) act->lastProfilingFrame();
-    void *lastCallSite = act->lastProfilingCallSite();
-
-    JitcodeGlobalTable *table = rt->jitRuntime()->getJitcodeGlobalTable();
-
-    // Profiler sampling must NOT be suppressed if we are here.
-    MOZ_ASSERT(rt->isProfilerSamplingEnabled());
-
-    // Since the frame is on stack, and is a jit frame, it MUST have Baseline jitcode.
-    MOZ_ASSERT(frameScript()->hasBaselineScript());
-
-    // Try initializing with sampler pc
-    if (tryInitWithPC(state.pc))
-        return;
-
-    // Try initializing with sampler pc using native=>bytecode table.
-    if (tryInitWithTable(table, state.pc, rt))
-        return;
-
-    // Try initializing with lastProfilingCallSite pc
-    if (lastCallSite) {
-        if (tryInitWithPC(lastCallSite))
-            return;
-
-        // Try initializing with lastProfilingCallSite pc using native=>bytecode table.
-        if (tryInitWithTable(table, lastCallSite, rt))
-            return;
-    }
-
-    // If nothing matches, for now just assume we are at the start of the last frame's
-    // baseline jit code.
-    type_ = JitFrame_BaselineJS;
-    returnAddressToFp_ = frameScript()->baselineScript()->method()->raw();
-    //++(*this);
-}
-
-template <typename FrameType, typename ReturnType=CommonFrameLayout*>
-inline ReturnType
-GetPreviousRawFrame(FrameType *frame)
-{
-    size_t prevSize = frame->prevFrameLocalSize() + FrameType::Size();
-    return (ReturnType) (((uint8_t *) frame) + prevSize);
-}
-
-JitProfilingFrameIterator::JitProfilingFrameIterator(void *exitFrame)
-{
-    // Exit frame was en
-    ExitFrameLayout *frame = (ExitFrameLayout *) exitFrame;
-    FrameType prevType = frame->prevType();
-
-    if (prevType == JitFrame_IonJS || prevType == JitFrame_BaselineJS ||
-        prevType == JitFrame_Unwound_IonJS)
-    {
-        returnAddressToFp_ = frame->returnAddress();
-        fp_ = GetPreviousRawFrame<ExitFrameLayout, uint8_t *>(frame);
-        type_ = JitFrame_IonJS;
-        return;
-    }
-
-    if (prevType == JitFrame_BaselineStub || prevType == JitFrame_Unwound_BaselineStub) {
-        BaselineStubFrameLayout *stubFrame =
-            GetPreviousRawFrame<ExitFrameLayout, BaselineStubFrameLayout *>(frame);
-        MOZ_ASSERT_IF(prevType == JitFrame_BaselineStub,
-                      stubFrame->prevType() == JitFrame_BaselineJS);
-        MOZ_ASSERT_IF(prevType == JitFrame_Unwound_BaselineStub,
-                      stubFrame->prevType() == JitFrame_BaselineJS ||
-                      stubFrame->prevType() == JitFrame_IonJS);
-        returnAddressToFp_ = stubFrame->returnAddress();
-        fp_ = ((uint8_t *) stubFrame->reverseSavedFramePtr())
-                + jit::BaselineFrame::FramePointerOffset;
-        type_ = JitFrame_BaselineJS;
-        return;
-    }
-
-    MOZ_CRASH("Invalid frame type prior to exit frame.");
-}
-
-bool
-JitProfilingFrameIterator::tryInitWithPC(void *pc)
-{
-    JSScript *callee = frameScript();
-
-    // Check for Ion first, since it's more likely for hot code.
-    if (callee->hasIonScript() && callee->ionScript()->method()->containsNativePC(pc)) {
-        type_ = JitFrame_IonJS;
-        returnAddressToFp_ = pc;
-        return true;
-    }
-
-    // Check for containment in Baseline jitcode second.
-    if (callee->baselineScript()->method()->containsNativePC(pc)) {
-        type_ = JitFrame_BaselineJS;
-        returnAddressToFp_ = pc;
-        return true;
-    }
-
-    return false;
-}
-
-bool
-JitProfilingFrameIterator::tryInitWithTable(JitcodeGlobalTable *table, void *pc, JSRuntime *rt)
-{
-    if (!pc)
-        return false;
-
-    JitcodeGlobalEntry entry;
-    if (!table->lookup(pc, &entry, rt))
-        return false;
-
-    JSScript *callee = frameScript();
-
-    MOZ_ASSERT(entry.isIon() || entry.isBaseline() || entry.isIonCache());
-    if (entry.isIon()) {
-        // If looked-up callee doesn't match frame callee, don't accept lastProfilingCallSite
-        if (entry.ionEntry().getScript(0) != callee)
-            return false;
-
-        type_ = JitFrame_IonJS;
-        returnAddressToFp_ = pc;
-        return true;
-    }
-
-    if (entry.isBaseline()) {
-        // If looked-up callee doesn't match frame callee, don't accept lastProfilingCallSite
-        if (entry.baselineEntry().script() != callee)
-            return false;
-
-        type_ = JitFrame_BaselineJS;
-        returnAddressToFp_ = pc;
-        return true;
-    }
-
-    if (entry.isIonCache()) {
-        JitcodeGlobalEntry ionEntry;
-        table->lookupInfallible(entry.ionCacheEntry().rejoinAddr(), &ionEntry, rt);
-        MOZ_ASSERT(ionEntry.isIon());
-
-        if (ionEntry.ionEntry().getScript(0) != callee)
-            return false;
-
-        type_ = JitFrame_IonJS;
-        returnAddressToFp_ = entry.ionCacheEntry().rejoinAddr();
-        return true;
-    }
-
-    return false;
-}
-
-void
-JitProfilingFrameIterator::operator++()
-{
-    /*
-     * fp_ points to a Baseline or Ion frame.  The possible call-stacks
-     * patterns occurring between this frame and a previous Ion or Baseline
-     * frame are as follows:
-     *
-     * <Baseline-Or-Ion>
-     * ^
-     * |
-     * ^--- Ion
-     * |
-     * ^--- Baseline Stub <---- Baseline
-     * |
-     * ^--- Argument Rectifier
-     * |    ^
-     * |    |
-     * |    ^--- Ion
-     * |    |
-     * |    ^--- Baseline Stub <---- Baseline
-     * |
-     * ^--- Entry Frame (From C++)
-     *      Exit Frame (From previous JitActivation)
-     *      ^
-     *      |
-     *      ^--- Ion
-     *      |
-     *      ^--- Baseline
-     *      |
-     *      ^--- Baseline Stub <---- Baseline
-     */
-    JitFrameLayout *frame = framePtr();
-    FrameType prevType = frame->prevType();
-
-    if (prevType == JitFrame_IonJS) {
-        returnAddressToFp_ = frame->returnAddress();
-        fp_ = GetPreviousRawFrame<JitFrameLayout, uint8_t *>(frame);
-        type_ = JitFrame_IonJS;
-        return;
-    }
-
-    if (prevType == JitFrame_BaselineJS) {
-        returnAddressToFp_ = frame->returnAddress();
-        fp_ = GetPreviousRawFrame<JitFrameLayout, uint8_t *>(frame);
-        type_ = JitFrame_BaselineJS;
-        return;
-    }
-
-    if (prevType == JitFrame_BaselineStub) {
-        BaselineStubFrameLayout *stubFrame =
-            GetPreviousRawFrame<JitFrameLayout, BaselineStubFrameLayout *>(frame);
-        MOZ_ASSERT(stubFrame->prevType() == JitFrame_BaselineJS);
-
-        returnAddressToFp_ = stubFrame->returnAddress();
-        fp_ = ((uint8_t *) stubFrame->reverseSavedFramePtr())
-                + jit::BaselineFrame::FramePointerOffset;
-        type_ = JitFrame_BaselineJS;
-        return;
-    }
-
-    if (prevType == JitFrame_Rectifier) {
-        RectifierFrameLayout *rectFrame =
-            GetPreviousRawFrame<JitFrameLayout, RectifierFrameLayout *>(frame);
-        FrameType rectPrevType = rectFrame->prevType();
-
-        if (rectPrevType == JitFrame_IonJS) {
-            returnAddressToFp_ = rectFrame->returnAddress();
-            fp_ = GetPreviousRawFrame<JitFrameLayout, uint8_t *>(rectFrame);
-            type_ = JitFrame_IonJS;
-            return;
-        }
-
-        if (rectPrevType == JitFrame_BaselineStub) {
-            BaselineStubFrameLayout *stubFrame =
-                GetPreviousRawFrame<JitFrameLayout, BaselineStubFrameLayout *>(rectFrame);
-            returnAddressToFp_ = stubFrame->returnAddress();
-            fp_ = ((uint8_t *) stubFrame->reverseSavedFramePtr())
-                    + jit::BaselineFrame::FramePointerOffset;
-            type_ = JitFrame_BaselineJS;
-            return;
-        }
-
-        MOZ_CRASH("Bad frame type prior to rectifier frame.");
-    }
-
-    if (prevType == JitFrame_Entry) {
-        // No previous frame, set to null to indicate that JitFrameIterator is done()
-        returnAddressToFp_ = nullptr;
-        fp_ = nullptr;
-        type_ = JitFrame_Entry;
-        return;
-    }
-
-    MOZ_CRASH("Bad frame type.");
-}
-
 JitFrameLayout *
 InvalidationBailoutStack::fp() const
 {
     return (JitFrameLayout *) (sp() + ionScript_->frameSize());
 }
 
 void
 InvalidationBailoutStack::checkInvariants() const
--- a/js/src/jit/JitFrames.h
+++ b/js/src/jit/JitFrames.h
@@ -326,19 +326,16 @@ class CommonFrameLayout
     uintptr_t descriptor_;
 
     static const uintptr_t FrameTypeMask = (1 << FRAMETYPE_BITS) - 1;
 
   public:
     static size_t offsetOfDescriptor() {
         return offsetof(CommonFrameLayout, descriptor_);
     }
-    uintptr_t descriptor() const {
-        return descriptor_;
-    }
     static size_t offsetOfReturnAddress() {
         return offsetof(CommonFrameLayout, returnAddress_);
     }
     FrameType prevType() const {
         return FrameType(descriptor_ & FrameTypeMask);
     }
     void changePrevType(FrameType type) {
         descriptor_ &= ~FrameTypeMask;
@@ -821,21 +818,16 @@ class BaselineStubFrameLayout : public C
 
     static inline int reverseOffsetOfStubPtr() {
         return -int(sizeof(void *));
     }
     static inline int reverseOffsetOfSavedFramePtr() {
         return -int(2 * sizeof(void *));
     }
 
-    void *reverseSavedFramePtr() {
-        uint8_t *addr = ((uint8_t *) this) + reverseOffsetOfSavedFramePtr();
-        return *(void **)addr;
-    }
-
     inline ICStub *maybeStubPtr() {
         uint8_t *fp = reinterpret_cast<uint8_t *>(this);
         return *reinterpret_cast<ICStub **>(fp + reverseOffsetOfStubPtr());
     }
     inline void setStubPtr(ICStub *stub) {
         uint8_t *fp = reinterpret_cast<uint8_t *>(this);
         *reinterpret_cast<ICStub **>(fp + reverseOffsetOfStubPtr()) = stub;
     }
--- a/js/src/jit/JitcodeMap.cpp
+++ b/js/src/jit/JitcodeMap.cpp
@@ -2,25 +2,20 @@
  * vim: set ts=8 sts=4 et sw=4 tw=99:
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "jit/JitcodeMap.h"
 
 #include "mozilla/DebugOnly.h"
-#include "mozilla/UniquePtr.h"
-#include "jsprf.h"
-
 #include "jit/BaselineJIT.h"
 #include "jit/JitSpewer.h"
 
 #include "js/Vector.h"
-#include "vm/SPSProfiler.h"
-#include "jsscriptinlines.h"
 
 namespace js {
 namespace jit {
 
 bool
 JitcodeGlobalEntry::IonEntry::callStackAtAddr(JSRuntime *rt, void *ptr,
                                               BytecodeLocationVector &results,
                                               uint32_t *depth) const
@@ -51,69 +46,33 @@ JitcodeGlobalEntry::IonEntry::callStackA
         jsbytecode *pc = script->offsetToPC(pcOffset);
         if (!results.append(BytecodeLocation(script, pc)))
             return false;
     }
 
     return true;
 }
 
-uint32_t
-JitcodeGlobalEntry::IonEntry::callStackAtAddr(JSRuntime *rt, void *ptr,
-                                              const char **results,
-                                              uint32_t maxResults) const
-{
-    MOZ_ASSERT(containsPointer(ptr));
-    MOZ_ASSERT(maxResults >= 1);
-    uint32_t ptrOffset = reinterpret_cast<uint8_t *>(ptr) -
-                         reinterpret_cast<uint8_t *>(nativeStartAddr());
-
-    uint32_t regionIdx = regionTable()->findRegionEntry(ptrOffset);
-    MOZ_ASSERT(regionIdx < regionTable()->numRegions());
-
-    JitcodeRegionEntry region = regionTable()->regionEntry(regionIdx);
-
-    JitcodeRegionEntry::ScriptPcIterator locationIter = region.scriptPcIterator();
-    MOZ_ASSERT(locationIter.hasMore());
-    uint32_t count = 0;
-    while (locationIter.hasMore()) {
-        uint32_t scriptIdx, pcOffset;
-
-        locationIter.readNext(&scriptIdx, &pcOffset);
-        MOZ_ASSERT(getStr(scriptIdx));
-
-        results[count++] = getStr(scriptIdx);
-        if (count >= maxResults)
-            break;
-    }
-
-    return count;
-}
-
 void
 JitcodeGlobalEntry::IonEntry::destroy()
 {
     // The region table is stored at the tail of the compacted data,
     // which means the start of the region table is a pointer to
     // the _middle_ of the memory space allocated for it.
     //
     // When freeing it, obtain the payload start pointer first.
     if (regionTable_)
         js_free((void*) (regionTable_->payloadStart()));
     regionTable_ = nullptr;
 
-    // Free the scriptList strs.
-    for (uint32_t i = 0; i < scriptList_->size; i++)  {
-        js_free(scriptList_->pairs[i].str);
-        scriptList_->pairs[i].str = nullptr;
-    }
-
-    // Free the script list
-    js_free(scriptList_);
-    scriptList_ = nullptr;
+    // Single tag is just pointer-to-jsscript, no memory to free.
+    ScriptListTag tag = scriptListTag();
+    if (tag > Single)
+        js_free(scriptListPointer());
+    scriptList_ = 0;
 }
 
 bool
 JitcodeGlobalEntry::BaselineEntry::callStackAtAddr(JSRuntime *rt, void *ptr,
                                                    BytecodeLocationVector &results,
                                                    uint32_t *depth) const
 {
     MOZ_ASSERT(containsPointer(ptr));
@@ -124,61 +83,32 @@ JitcodeGlobalEntry::BaselineEntry::callS
     if (!results.append(BytecodeLocation(script_, pc)))
         return false;
 
     *depth = 1;
 
     return true;
 }
 
-uint32_t
-JitcodeGlobalEntry::BaselineEntry::callStackAtAddr(JSRuntime *rt, void *ptr,
-                                                   const char **results,
-                                                   uint32_t maxResults) const
-{
-    MOZ_ASSERT(containsPointer(ptr));
-    MOZ_ASSERT(script_->hasBaselineScript());
-    MOZ_ASSERT(maxResults >= 1);
-
-    results[0] = str();
-    return 1;
-}
-
 bool
 JitcodeGlobalEntry::IonCacheEntry::callStackAtAddr(JSRuntime *rt, void *ptr,
                                                    BytecodeLocationVector &results,
                                                    uint32_t *depth) const
 {
     MOZ_ASSERT(containsPointer(ptr));
 
     // There must exist an entry for the rejoin addr if this entry exists.
     JitRuntime *jitrt = rt->jitRuntime();
     JitcodeGlobalEntry entry;
-    jitrt->getJitcodeGlobalTable()->lookupInfallible(rejoinAddr(), &entry, rt);
+    jitrt->getJitcodeGlobalTable()->lookupInfallible(rejoinAddr(), &entry);
     MOZ_ASSERT(entry.isIon());
 
     return entry.callStackAtAddr(rt, rejoinAddr(), results, depth);
 }
 
-uint32_t
-JitcodeGlobalEntry::IonCacheEntry::callStackAtAddr(JSRuntime *rt, void *ptr,
-                                                   const char **results,
-                                                   uint32_t maxResults) const
-{
-    MOZ_ASSERT(containsPointer(ptr));
-
-    // There must exist an entry for the rejoin addr if this entry exists.
-    JitRuntime *jitrt = rt->jitRuntime();
-    JitcodeGlobalEntry entry;
-    jitrt->getJitcodeGlobalTable()->lookupInfallible(rejoinAddr(), &entry, rt);
-    MOZ_ASSERT(entry.isIon());
-
-    return entry.callStackAtAddr(rt, rejoinAddr(), results, maxResults);
-}
-
 
 static int ComparePointers(const void *a, const void *b) {
     const uint8_t *a_ptr = reinterpret_cast<const uint8_t *>(a);
     const uint8_t *b_ptr = reinterpret_cast<const uint8_t *>(b);
     if (a_ptr < b_ptr)
         return -1;
     if (a_ptr > b_ptr)
         return 1;
@@ -209,145 +139,44 @@ JitcodeGlobalEntry::compare(const Jitcod
         // query ptr > entry
         return flip * 1;
     }
 
     // query ptr < entry
     return flip * -1;
 }
 
-/* static */ char *
-JitcodeGlobalEntry::createScriptString(JSContext *cx, JSScript *script, size_t *length)
-{
-    // If the script has a function, try calculating its name.
-    bool hasName = false;
-    size_t nameLength = 0;
-    mozilla::UniquePtr<char, JS::FreePolicy> nameStr = nullptr;
-    JSFunction *func = script->functionDelazifying();
-    if (func && func->displayAtom()) {
-        JSAtom *atom = func->displayAtom();
-
-        JS::AutoCheckCannotGC nogc;
-        nameStr = mozilla::UniquePtr<char, JS::FreePolicy>(
-            atom->hasLatin1Chars() ?
-                JS::CharsToNewUTF8CharsZ(cx, atom->latin1Range(nogc)).c_str()
-              : JS::CharsToNewUTF8CharsZ(cx, atom->twoByteRange(nogc)).c_str());
-        if (!nameStr)
-            return nullptr;
-
-        nameLength = strlen(nameStr.get());
-        hasName = true;
-    }
-
-    // Calculate filename length
-    const char *filenameStr = script->filename() ? script->filename() : "(null)";
-    size_t filenameLength = strlen(filenameStr);
-
-    // Calculate lineno length
-    bool hasLineno = false;
-    size_t linenoLength = 0;
-    char linenoStr[15];
-    if (hasName || (script->functionNonDelazifying() || script->isForEval())) {
-        linenoLength = JS_snprintf(linenoStr, 15, "%u", (unsigned) script->lineno());
-        hasLineno = true;
-    }
-
-    // Full profile string for scripts with functions is:
-    //      FuncName (FileName:Lineno)
-    // Full profile string for scripts without functions is:
-    //      FileName:Lineno
-    // Full profile string for scripts without functions and without linenos is:
-    //      FileName
-
-    // Calculate full string length.
-    size_t fullLength = 0;
-    if (hasName) {
-        MOZ_ASSERT(hasLineno);
-        fullLength = nameLength + 2 + filenameLength + 1 + linenoLength + 1;
-    } else if (hasLineno) {
-        fullLength = filenameLength + 1 + linenoLength;
-    } else {
-        fullLength = filenameLength;
-    }
-
-    // Allocate string.
-    char *str = cx->pod_malloc<char>(fullLength + 1);
-    if (!str)
-        return nullptr;
-
-    size_t cur = 0;
-
-    // Fill string with func name if needed.
-    if (hasName) {
-        memcpy(str + cur, nameStr.get(), nameLength);
-        cur += nameLength;
-        str[cur++] = ' ';
-        str[cur++] = '(';
-    }
-
-    // Fill string with filename chars.
-    memcpy(str + cur, filenameStr, filenameLength);
-    cur += filenameLength;
-
-    // Fill lineno chars.
-    if (hasLineno) {
-        str[cur++] = ':';
-        memcpy(str + cur, linenoStr, linenoLength);
-        cur += linenoLength;
-    }
-
-    // Terminal ')' if necessary.
-    if (hasName)
-        str[cur++] = ')';
-
-    MOZ_ASSERT(cur == fullLength);
-    str[cur] = 0;
-
-    if (length)
-        *length = fullLength;
-
-    return str;
-}
-
 bool
-JitcodeGlobalTable::lookup(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt)
+JitcodeGlobalTable::lookup(void *ptr, JitcodeGlobalEntry *result)
 {
     MOZ_ASSERT(result);
 
     // Construct a JitcodeGlobalEntry::Query to do the lookup
     JitcodeGlobalEntry query = JitcodeGlobalEntry::MakeQuery(ptr);
-
-    // Lookups on tree does mutation.  Suppress sampling when this is happening.
-    AutoSuppressProfilerSampling suppressSampling(rt);
     return tree_.contains(query, result);
 }
 
 void
-JitcodeGlobalTable::lookupInfallible(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt)
+JitcodeGlobalTable::lookupInfallible(void *ptr, JitcodeGlobalEntry *result)
 {
-    mozilla::DebugOnly<bool> success = lookup(ptr, result, rt);
+    mozilla::DebugOnly<bool> success = lookup(ptr, result);
     MOZ_ASSERT(success);
 }
 
 bool
-JitcodeGlobalTable::addEntry(const JitcodeGlobalEntry &entry, JSRuntime *rt)
+JitcodeGlobalTable::addEntry(const JitcodeGlobalEntry &entry)
 {
-    // Suppress profiler sampling while table is being mutated.
-    AutoSuppressProfilerSampling suppressSampling(rt);
-
-    MOZ_ASSERT(entry.isIon() || entry.isBaseline() || entry.isIonCache() || entry.isDummy());
+    // Should only add Main entries for now.
+    MOZ_ASSERT(entry.isIon() || entry.isBaseline() || entry.isIonCache());
     return tree_.insert(entry);
 }
 
 void
-JitcodeGlobalTable::removeEntry(void *startAddr, JSRuntime *rt)
+JitcodeGlobalTable::removeEntry(void *startAddr)
 {
-    // Suppress profiler sampling while table is being mutated.
-    AutoSuppressProfilerSampling suppressSampling(rt);
-
     JitcodeGlobalEntry query = JitcodeGlobalEntry::MakeQuery(startAddr);
     JitcodeGlobalEntry result;
     mozilla::DebugOnly<bool> success = tree_.contains(query, &result);
     MOZ_ASSERT(success);
 
     // Destroy entry before removing it from tree.
     result.destroy();
     tree_.remove(query);
@@ -722,71 +551,44 @@ JitcodeRegionEntry::findPcOffset(uint32_
         if (queryNativeOffset <= curNativeOffset + nativeDelta)
             break;
         curNativeOffset += nativeDelta;
         curPcOffset += pcDelta;
     }
     return curPcOffset;
 }
 
-typedef js::Vector<char *, 32, SystemAllocPolicy> ProfilingStringVector;
-
-struct AutoFreeProfilingStrings {
-    ProfilingStringVector &profilingStrings_;
-    bool keep_;
-    explicit AutoFreeProfilingStrings(ProfilingStringVector &vec)
-        : profilingStrings_(vec),
-          keep_(false)
-    {}
-
-    void keepStrings() { keep_ = true; }
-
-    ~AutoFreeProfilingStrings() {
-        if (keep_)
-            return;
-        for (size_t i = 0; i < profilingStrings_.length(); i++)
-            js_free(profilingStrings_[i]);
-    }
-};
-
 bool
 JitcodeIonTable::makeIonEntry(JSContext *cx, JitCode *code,
                               uint32_t numScripts, JSScript **scripts,
                               JitcodeGlobalEntry::IonEntry &out)
 {
     typedef JitcodeGlobalEntry::IonEntry::SizedScriptList SizedScriptList;
 
     MOZ_ASSERT(numScripts > 0);
 
-    // Create profiling strings for script, within vector.
-    typedef js::Vector<char *, 32, SystemAllocPolicy> ProfilingStringVector;
-
-    ProfilingStringVector profilingStrings;
-    if (!profilingStrings.reserve(numScripts))
-        return false;
+    if (numScripts == 1) {
+        out.init(code->raw(), code->rawEnd(), scripts[0], this);
+        return true;
+    }
 
-    AutoFreeProfilingStrings autoFreeProfilingStrings(profilingStrings);
-    for (uint32_t i = 0; i < numScripts; i++) {
-        char *str = JitcodeGlobalEntry::createScriptString(cx, scripts[i]);
-        if (!str)
+    if (numScripts < uint32_t(JitcodeGlobalEntry::IonEntry::Multi)) {
+        JSScript **scriptsCopy = cx->pod_malloc<JSScript *>(numScripts);
+        if (!scriptsCopy)
             return false;
-        if (!profilingStrings.append(str))
-            return false;
+        memcpy(scriptsCopy, scripts, sizeof(JSScript *) * numScripts);
+        out.init(code->raw(), code->rawEnd(), numScripts, scriptsCopy, this);
+        return true;
     }
 
     // Create SizedScriptList
     void *mem = (void *)cx->pod_malloc<uint8_t>(SizedScriptList::AllocSizeFor(numScripts));
     if (!mem)
         return false;
-
-    // Keep allocated profiling strings on destruct.
-    autoFreeProfilingStrings.keepStrings();
-
-    SizedScriptList *scriptList = new (mem) SizedScriptList(numScripts, scripts,
-                                                            &profilingStrings[0]);
+    SizedScriptList *scriptList = new (mem) SizedScriptList(numScripts, scripts);
     out.init(code->raw(), code->rawEnd(), scriptList, this);
     return true;
 }
 
 uint32_t
 JitcodeIonTable::findRegionEntry(uint32_t nativeOffset) const
 {
     static const uint32_t LINEAR_SEARCH_THRESHOLD = 8;
--- a/js/src/jit/JitcodeMap.h
+++ b/js/src/jit/JitcodeMap.h
@@ -36,29 +36,27 @@ class JitcodeRegionEntry;
 class JitcodeGlobalEntry
 {
   public:
     enum Kind {
         INVALID = 0,
         Ion,
         Baseline,
         IonCache,
-        Dummy,
         Query,
         LIMIT
     };
     JS_STATIC_ASSERT(LIMIT <= 8);
 
     struct BytecodeLocation {
         JSScript *script;
         jsbytecode *pc;
         BytecodeLocation(JSScript *script, jsbytecode *pc) : script(script), pc(pc) {}
     };
     typedef Vector<BytecodeLocation, 0, SystemAllocPolicy> BytecodeLocationVector;
-    typedef Vector<const char *, 0, SystemAllocPolicy> ProfileStringVector;
 
     struct BaseEntry
     {
         void *nativeStartAddr_;
         void *nativeEndAddr_;
         Kind kind_;
 
         void init() {
@@ -94,71 +92,130 @@ class JitcodeGlobalEntry
         }
         bool containsPointer(void *ptr) const {
             return startsBelowPointer(ptr) && endsAbovePointer(ptr);
         }
     };
 
     struct IonEntry : public BaseEntry
     {
+        uintptr_t scriptList_;
+
         // regionTable_ points to the start of the region table within the
         // packed map for compile represented by this entry.  Since the
         // region table occurs at the tail of the memory region, this pointer
         // points somewhere inside the region memory space, and not to the start
         // of the memory space.
         JitcodeIonTable *regionTable_;
 
-        struct ScriptNamePair {
-            JSScript *script;
-            char *str;
+        static const unsigned LowBits = 3;
+        static const uintptr_t LowMask = (uintptr_t(1) << LowBits) - 1;
+
+        enum ScriptListTag {
+            Single = 0,
+            Multi = 7
         };
 
         struct SizedScriptList {
             uint32_t size;
-            ScriptNamePair pairs[0];
-            SizedScriptList(uint32_t sz, JSScript **scrs, char **strs) : size(sz) {
-                for (uint32_t i = 0; i < size; i++) {
-                    pairs[i].script = scrs[i];
-                    pairs[i].str = strs[i];
-                }
+            JSScript *scripts[0];
+            SizedScriptList(uint32_t sz, JSScript **scr) : size(sz) {
+                for (uint32_t i = 0; i < size; i++)
+                    scripts[i] = scr[i];
             }
 
             static uint32_t AllocSizeFor(uint32_t nscripts) {
-                return sizeof(SizedScriptList) + (nscripts * sizeof(ScriptNamePair));
+                return sizeof(SizedScriptList) + (nscripts * sizeof(JSScript *));
             }
         };
 
-        SizedScriptList *scriptList_;
+        void init(void *nativeStartAddr, void *nativeEndAddr,
+                  JSScript *script, JitcodeIonTable *regionTable)
+        {
+            MOZ_ASSERT((uintptr_t(script) & LowMask) == 0);
+            MOZ_ASSERT(script);
+            MOZ_ASSERT(regionTable);
+            BaseEntry::init(Ion, nativeStartAddr, nativeEndAddr);
+            scriptList_ = uintptr_t(script);
+            regionTable_ = regionTable;
+        }
+
+        void init(void *nativeStartAddr, void *nativeEndAddr,
+                  unsigned numScripts, JSScript **scripts, JitcodeIonTable *regionTable)
+        {
+            MOZ_ASSERT((uintptr_t(scripts) & LowMask) == 0);
+            MOZ_ASSERT(numScripts >= 1);
+            MOZ_ASSERT(numScripts <= 6);
+            MOZ_ASSERT(scripts);
+            MOZ_ASSERT(regionTable);
+            BaseEntry::init(Ion, nativeStartAddr, nativeEndAddr);
+            scriptList_ = uintptr_t(scripts) | numScripts;
+            regionTable_ = regionTable;
+        }
 
         void init(void *nativeStartAddr, void *nativeEndAddr,
-                  SizedScriptList *scriptList, JitcodeIonTable *regionTable)
+                  SizedScriptList *scripts, JitcodeIonTable *regionTable)
         {
-            MOZ_ASSERT(scriptList);
+            MOZ_ASSERT((uintptr_t(scripts) & LowMask) == 0);
+            MOZ_ASSERT(scripts->size > 6);
+            MOZ_ASSERT(scripts);
             MOZ_ASSERT(regionTable);
+
             BaseEntry::init(Ion, nativeStartAddr, nativeEndAddr);
+            scriptList_ = uintptr_t(scripts) | uintptr_t(Multi);
             regionTable_ = regionTable;
-            scriptList_ = scriptList;
         }
 
+        ScriptListTag scriptListTag() const {
+            return static_cast<ScriptListTag>(scriptList_ & LowMask);
+        }
+        void *scriptListPointer() const {
+            return reinterpret_cast<void *>(scriptList_ & ~LowMask);
+        }
+
+        JSScript *singleScript() const {
+            MOZ_ASSERT(scriptListTag() == Single);
+            return reinterpret_cast<JSScript *>(scriptListPointer());
+        }
+        JSScript **rawScriptArray() const {
+            MOZ_ASSERT(scriptListTag() < Multi);
+            return reinterpret_cast<JSScript **>(scriptListPointer());
+        }
         SizedScriptList *sizedScriptList() const {
-            return scriptList_;
+            MOZ_ASSERT(scriptListTag() == Multi);
+            return reinterpret_cast<SizedScriptList *>(scriptListPointer());
         }
 
         unsigned numScripts() const {
-            return scriptList_->size;
+            ScriptListTag tag = scriptListTag();
+            if (tag == Single)
+                return 1;
+
+            if (tag < Multi) {
+                MOZ_ASSERT(int(tag) >= 2);
+                return static_cast<unsigned>(tag);
+            }
+
+            return sizedScriptList()->size;
         }
 
         JSScript *getScript(unsigned idx) const {
             MOZ_ASSERT(idx < numScripts());
-            return sizedScriptList()->pairs[idx].script;
-        }
+
+            ScriptListTag tag = scriptListTag();
+
+            if (tag == Single)
+                return singleScript();
 
-        const char *getStr(unsigned idx) const {
-            MOZ_ASSERT(idx < numScripts());
-            return sizedScriptList()->pairs[idx].str;
+            if (tag < Multi) {
+                MOZ_ASSERT(int(tag) >= 2);
+                return rawScriptArray()[idx];
+            }
+
+            return sizedScriptList()->scripts[idx];
         }
 
         void destroy();
 
         JitcodeIonTable *regionTable() const {
             return regionTable_;
         }
 
@@ -168,49 +225,37 @@ class JitcodeGlobalEntry
                 if (getScript(i) == script)
                     return i;
             }
             return -1;
         }
 
         bool callStackAtAddr(JSRuntime *rt, void *ptr, BytecodeLocationVector &results,
                              uint32_t *depth) const;
-
-        uint32_t callStackAtAddr(JSRuntime *rt, void *ptr, const char **results,
-                                 uint32_t maxResults) const;
     };
 
     struct BaselineEntry : public BaseEntry
     {
         JSScript *script_;
-        const char *str_;
 
-        void init(void *nativeStartAddr, void *nativeEndAddr, JSScript *script, const char *str)
+        void init(void *nativeStartAddr, void *nativeEndAddr, JSScript *script)
         {
             MOZ_ASSERT(script != nullptr);
             BaseEntry::init(Baseline, nativeStartAddr, nativeEndAddr);
             script_ = script;
-            str_ = str;
         }
 
         JSScript *script() const {
             return script_;
         }
 
-        const char *str() const {
-            return str_;
-        }
-
         void destroy() {}
 
         bool callStackAtAddr(JSRuntime *rt, void *ptr, BytecodeLocationVector &results,
                              uint32_t *depth) const;
-
-        uint32_t callStackAtAddr(JSRuntime *rt, void *ptr, const char **results,
-                                 uint32_t maxResults) const;
     };
 
     struct IonCacheEntry : public BaseEntry
     {
         void *rejoinAddr_;
 
         void init(void *nativeStartAddr, void *nativeEndAddr, void *rejoinAddr)
         {
@@ -222,43 +267,16 @@ class JitcodeGlobalEntry
         void *rejoinAddr() const {
             return rejoinAddr_;
         }
 
         void destroy() {}
 
         bool callStackAtAddr(JSRuntime *rt, void *ptr, BytecodeLocationVector &results,
                              uint32_t *depth) const;
-
-        uint32_t callStackAtAddr(JSRuntime *rt, void *ptr, const char **results,
-                                 uint32_t maxResults) const;
-    };
-
-    // Dummy entries are created for jitcode generated when profiling is not turned on,
-    // so that they have representation in the global table if they are on the
-    // stack when profiling is enabled.
-    struct DummyEntry : public BaseEntry
-    {
-        void init(void *nativeStartAddr, void *nativeEndAddr) {
-            BaseEntry::init(Dummy, nativeStartAddr, nativeEndAddr);
-        }
-
-        void destroy() {}
-
-        bool callStackAtAddr(JSRuntime *rt, void *ptr, BytecodeLocationVector &results,
-                             uint32_t *depth) const
-        {
-            return true;
-        }
-
-        uint32_t callStackAtAddr(JSRuntime *rt, void *ptr, const char **results,
-                                 uint32_t maxResults) const
-        {
-            return 0;
-        }
     };
 
     // QueryEntry is never stored in the table, just used for queries
     // where an instance of JitcodeGlobalEntry is required to do tree
     // lookups.
     struct QueryEntry : public BaseEntry
     {
         void init(void *addr) {
@@ -281,19 +299,16 @@ class JitcodeGlobalEntry
         IonEntry ion_;
 
         // Baseline jitcode.
         BaselineEntry baseline_;
 
         // IonCache stubs.
         IonCacheEntry ionCache_;
 
-        // Dummy entries.
-        DummyEntry dummy_;
-
         // When doing queries on the SplayTree for particular addresses,
         // the query addresses are representd using a QueryEntry.
         QueryEntry query_;
     };
 
   public:
     JitcodeGlobalEntry() {
         base_.init();
@@ -306,20 +321,16 @@ class JitcodeGlobalEntry
     explicit JitcodeGlobalEntry(const BaselineEntry &baseline) {
         baseline_ = baseline;
     }
 
     explicit JitcodeGlobalEntry(const IonCacheEntry &ionCache) {
         ionCache_ = ionCache;
     }
 
-    explicit JitcodeGlobalEntry(const DummyEntry &dummy) {
-        dummy_ = dummy;
-    }
-
     explicit JitcodeGlobalEntry(const QueryEntry &query) {
         query_ = query;
     }
 
     static JitcodeGlobalEntry MakeQuery(void *ptr) {
         QueryEntry query;
         query.init(ptr);
         return JitcodeGlobalEntry(query);
@@ -331,19 +342,16 @@ class JitcodeGlobalEntry
             ionEntry().destroy();
             break;
           case Baseline:
             baselineEntry().destroy();
             break;
           case IonCache:
             ionCacheEntry().destroy();
             break;
-          case Dummy:
-            dummyEntry().destroy();
-            break;
           case Query:
             queryEntry().destroy();
             break;
           default:
             MOZ_CRASH("Invalid JitcodeGlobalEntry kind.");
         }
     }
 
@@ -384,39 +392,32 @@ class JitcodeGlobalEntry
         return kind() == Ion;
     }
     bool isBaseline() const {
         return kind() == Baseline;
     }
     bool isIonCache() const {
         return kind() == IonCache;
     }
-    bool isDummy() const {
-        return kind() == Dummy;
-    }
     bool isQuery() const {
         return kind() == Query;
     }
 
     IonEntry &ionEntry() {
         MOZ_ASSERT(isIon());
         return ion_;
     }
     BaselineEntry &baselineEntry() {
         MOZ_ASSERT(isBaseline());
         return baseline_;
     }
     IonCacheEntry &ionCacheEntry() {
         MOZ_ASSERT(isIonCache());
         return ionCache_;
     }
-    DummyEntry &dummyEntry() {
-        MOZ_ASSERT(isDummy());
-        return dummy_;
-    }
     QueryEntry &queryEntry() {
         MOZ_ASSERT(isQuery());
         return query_;
     }
 
     const IonEntry &ionEntry() const {
         MOZ_ASSERT(isIon());
         return ion_;
@@ -424,20 +425,16 @@ class JitcodeGlobalEntry
     const BaselineEntry &baselineEntry() const {
         MOZ_ASSERT(isBaseline());
         return baseline_;
     }
     const IonCacheEntry &ionCacheEntry() const {
         MOZ_ASSERT(isIonCache());
         return ionCache_;
     }
-    const DummyEntry &dummyEntry() const {
-        MOZ_ASSERT(isDummy());
-        return dummy_;
-    }
     const QueryEntry &queryEntry() const {
         MOZ_ASSERT(isQuery());
         return query_;
     }
 
     // Read the inline call stack at a given point in the native code and append into
     // the given vector.  Innermost (script,pc) pair will be appended first, and
     // outermost appended last.
@@ -448,51 +445,28 @@ class JitcodeGlobalEntry
     {
         switch (kind()) {
           case Ion:
             return ionEntry().callStackAtAddr(rt, ptr, results, depth);
           case Baseline:
             return baselineEntry().callStackAtAddr(rt, ptr, results, depth);
           case IonCache:
             return ionCacheEntry().callStackAtAddr(rt, ptr, results, depth);
-          case Dummy:
-            return dummyEntry().callStackAtAddr(rt, ptr, results, depth);
-          default:
-            MOZ_CRASH("Invalid JitcodeGlobalEntry kind.");
-        }
-        return false;
-    }
-
-    uint32_t callStackAtAddr(JSRuntime *rt, void *ptr, const char **results,
-                             uint32_t maxResults) const
-    {
-        switch (kind()) {
-          case Ion:
-            return ionEntry().callStackAtAddr(rt, ptr, results, maxResults);
-          case Baseline:
-            return baselineEntry().callStackAtAddr(rt, ptr, results, maxResults);
-          case IonCache:
-            return ionCacheEntry().callStackAtAddr(rt, ptr, results, maxResults);
-          case Dummy:
-            return dummyEntry().callStackAtAddr(rt, ptr, results, maxResults);
           default:
             MOZ_CRASH("Invalid JitcodeGlobalEntry kind.");
         }
         return false;
     }
 
     // Figure out the number of the (JSScript *, jsbytecode *) pairs that are active
     // at this location.
     uint32_t lookupInlineCallDepth(void *ptr);
 
     // Compare two global entries.
     static int compare(const JitcodeGlobalEntry &ent1, const JitcodeGlobalEntry &ent2);
-
-    // Compute a profiling string for a given script.
-    static char *createScriptString(JSContext *cx, JSScript *script, size_t *length=nullptr);
 };
 
 /*
  * Global table of JitcodeGlobalEntry values sorted by native address range.
  */
 class JitcodeGlobalTable
 {
   public:
@@ -513,36 +487,33 @@ class JitcodeGlobalTable
         tree_.disableCheckCoherency();
     }
     ~JitcodeGlobalTable() {}
 
     bool empty() const {
         return tree_.empty();
     }
 
-    bool lookup(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt);
-    void lookupInfallible(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt);
+    bool lookup(void *ptr, JitcodeGlobalEntry *result);
+    void lookupInfallible(void *ptr, JitcodeGlobalEntry *result);
 
-    bool addEntry(const JitcodeGlobalEntry::IonEntry &entry, JSRuntime *rt) {
-        return addEntry(JitcodeGlobalEntry(entry), rt);
+    bool addEntry(const JitcodeGlobalEntry::IonEntry &entry) {
+        return addEntry(JitcodeGlobalEntry(entry));
     }
-    bool addEntry(const JitcodeGlobalEntry::BaselineEntry &entry, JSRuntime *rt) {
-        return addEntry(JitcodeGlobalEntry(entry), rt);
+    bool addEntry(const JitcodeGlobalEntry::BaselineEntry &entry) {
+        return addEntry(JitcodeGlobalEntry(entry));
     }
-    bool addEntry(const JitcodeGlobalEntry::IonCacheEntry &entry, JSRuntime *rt) {
-        return addEntry(JitcodeGlobalEntry(entry), rt);
-    }
-    bool addEntry(const JitcodeGlobalEntry::DummyEntry &entry, JSRuntime *rt) {
-        return addEntry(JitcodeGlobalEntry(entry), rt);
+    bool addEntry(const JitcodeGlobalEntry::IonCacheEntry &entry) {
+        return addEntry(JitcodeGlobalEntry(entry));
     }
 
-    void removeEntry(void *startAddr, JSRuntime *rt);
+    void removeEntry(void *startAddr);
 
   private:
-    bool addEntry(const JitcodeGlobalEntry &entry, JSRuntime *rt);
+    bool addEntry(const JitcodeGlobalEntry &entry);
 };
 
 
 /*
  * Container class for main jitcode table.
  * The Region table's memory is structured as follows:
  *
  *      +------------------------------------------------+   |
@@ -839,18 +810,18 @@ class JitcodeIonTable
   public:
     explicit JitcodeIonTable(uint32_t numRegions)
       : numRegions_(numRegions)
     {
         for (uint32_t i = 0; i < numRegions; i++)
             regionOffsets_[i] = 0;
     }
 
-    bool makeIonEntry(JSContext *cx, JitCode *code, uint32_t numScripts,
-                      JSScript **scripts, JitcodeGlobalEntry::IonEntry &out);
+    bool makeIonEntry(JSContext *cx, JitCode *code, uint32_t numScripts, JSScript **scripts,
+                      JitcodeGlobalEntry::IonEntry &out);
 
     uint32_t numRegions() const {
         return numRegions_;
     }
 
     uint32_t regionOffset(uint32_t regionIndex) const {
         MOZ_ASSERT(regionIndex < numRegions());
         return regionOffsets_[regionIndex];
--- a/js/src/jit/LIR-Common.h
+++ b/js/src/jit/LIR-Common.h
@@ -3661,43 +3661,39 @@ class LFloat32x4ToInt32x4 : public LInst
 class LStart : public LInstructionHelper<0, 0, 0>
 {
   public:
     LIR_HEADER(Start)
 };
 
 // Passed the BaselineFrame address in the OsrFrameReg by SideCannon().
 // Forwards this object to the LOsrValues for Value materialization.
-class LOsrEntry : public LInstructionHelper<1, 0, 1>
+class LOsrEntry : public LInstructionHelper<1, 0, 0>
 {
   protected:
     Label label_;
     uint32_t frameDepth_;
 
   public:
     LIR_HEADER(OsrEntry)
 
-    LOsrEntry(const LDefinition &temp)
+    LOsrEntry()
       : frameDepth_(0)
-    {
-        setTemp(0, temp);
-    }
+    { }
 
     void setFrameDepth(uint32_t depth) {
         frameDepth_ = depth;
     }
     uint32_t getFrameDepth() {
         return frameDepth_;
     }
     Label *label() {
         return &label_;
     }
-    const LDefinition *temp() {
-        return getTemp(0);
-    }
+
 };
 
 // Materialize a Value stored in an interpreter frame for OSR.
 class LOsrValue : public LInstructionHelper<BOX_PIECES, 1, 0>
 {
   public:
     LIR_HEADER(OsrValue)
 
@@ -6160,16 +6156,38 @@ class LCallInstanceOf : public LCallInst
     const LAllocation *rhs() {
         return getOperand(RHS);
     }
 
     static const size_t LHS = 0;
     static const size_t RHS = BOX_PIECES;
 };
 
+class LProfilerStackOp : public LInstructionHelper<0, 0, 1>
+{
+  public:
+    LIR_HEADER(ProfilerStackOp)
+
+    explicit LProfilerStackOp(const LDefinition &temp) {
+        setTemp(0, temp);
+    }
+
+    const LDefinition *temp() {
+        return getTemp(0);
+    }
+
+    JSScript *script() {
+        return mir_->toProfilerStackOp()->script();
+    }
+
+    MProfilerStackOp::Type type() {
+        return mir_->toProfilerStackOp()->type();
+    }
+};
+
 class LIsCallable : public LInstructionHelper<1, 1, 0>
 {
   public:
     LIR_HEADER(IsCallable);
     explicit LIsCallable(const LAllocation &object) {
         setOperand(0, object);
     }
 
--- a/js/src/jit/LOpcodes.h
+++ b/js/src/jit/LOpcodes.h
@@ -300,16 +300,17 @@
     _(In)                           \
     _(InArray)                      \
     _(InstanceOfO)                  \
     _(InstanceOfV)                  \
     _(CallInstanceOf)               \
     _(InterruptCheck)               \
     _(AsmJSInterruptCheck)          \
     _(InterruptCheckImplicit)       \
+    _(ProfilerStackOp)              \
     _(GetDOMProperty)               \
     _(GetDOMMemberV)                \
     _(GetDOMMemberT)                \
     _(SetDOMProperty)               \
     _(CallDOMNative)                \
     _(IsCallable)                   \
     _(IsObject)                     \
     _(IsObjectAndBranch)            \
--- a/js/src/jit/Lowering.cpp
+++ b/js/src/jit/Lowering.cpp
@@ -1628,17 +1628,17 @@ void
 LIRGenerator::visitLimitedTruncate(MLimitedTruncate *nop)
 {
     redefine(nop, nop->input());
 }
 
 void
 LIRGenerator::visitOsrEntry(MOsrEntry *entry)
 {
-    LOsrEntry *lir = new(alloc()) LOsrEntry(temp());
+    LOsrEntry *lir = new(alloc()) LOsrEntry;
     defineFixed(lir, entry, LAllocation(AnyRegister(OsrFrameReg)));
 }
 
 void
 LIRGenerator::visitOsrValue(MOsrValue *value)
 {
     LOsrValue *lir = new(alloc()) LOsrValue(useRegister(value->entry()));
     defineBox(lir, value);
@@ -3488,16 +3488,28 @@ LIRGenerator::visitCallInstanceOf(MCallI
 
     LCallInstanceOf *lir = new(alloc()) LCallInstanceOf(useRegisterAtStart(rhs));
     useBoxAtStart(lir, LCallInstanceOf::LHS, lhs);
     defineReturn(lir, ins);
     assignSafepoint(lir, ins);
 }
 
 void
+LIRGenerator::visitProfilerStackOp(MProfilerStackOp *ins)
+{
+    LProfilerStackOp *lir = new(alloc()) LProfilerStackOp(temp());
+    add(lir, ins);
+
+    // If slow assertions are enabled, then this node will result in a callVM
+    // out to a C++ function for the assertions, so we will need a safepoint.
+    if (gen->options.spsSlowAssertionsEnabled())
+        assignSafepoint(lir, ins);
+}
+
+void
 LIRGenerator::visitIsCallable(MIsCallable *ins)
 {
     MOZ_ASSERT(ins->object()->type() == MIRType_Object);
     MOZ_ASSERT(ins->type() == MIRType_Boolean);
     define(new(alloc()) LIsCallable(useRegister(ins->object())), ins);
 }
 
 static bool
--- a/js/src/jit/Lowering.h
+++ b/js/src/jit/Lowering.h
@@ -243,16 +243,17 @@ class LIRGenerator : public LIRGenerator
     void visitSetFrameArgument(MSetFrameArgument *ins);
     void visitRunOncePrologue(MRunOncePrologue *ins);
     void visitRest(MRest *ins);
     void visitThrow(MThrow *ins);
     void visitIn(MIn *ins);
     void visitInArray(MInArray *ins);
     void visitInstanceOf(MInstanceOf *ins);
     void visitCallInstanceOf(MCallInstanceOf *ins);
+    void visitProfilerStackOp(MProfilerStackOp *ins);
     void visitIsCallable(MIsCallable *ins);
     void visitIsObject(MIsObject *ins);
     void visitHasClass(MHasClass *ins);
     void visitAsmJSLoadGlobalVar(MAsmJSLoadGlobalVar *ins);
     void visitAsmJSStoreGlobalVar(MAsmJSStoreGlobalVar *ins);
     void visitAsmJSLoadFFIFunc(MAsmJSLoadFFIFunc *ins);
     void visitAsmJSParameter(MAsmJSParameter *ins);
     void visitAsmJSReturn(MAsmJSReturn *ins);
--- a/js/src/jit/MIR.h
+++ b/js/src/jit/MIR.h
@@ -11433,16 +11433,58 @@ class MNewStringObject :
 
     static MNewStringObject *New(TempAllocator &alloc, MDefinition *input, JSObject *templateObj) {
         return new(alloc) MNewStringObject(input, templateObj);
     }
 
     StringObject *templateObj() const;
 };
 
+// Node that represents that a script has begun executing. This comes at the
+// start of the function and is called once per function (including inline
+// ones)
+class MProfilerStackOp : public MNullaryInstruction
+{
+  public:
+    enum Type {
+        Enter,        // a function has begun executing and it is not inline
+        Exit          // any function has exited and is not inline
+    };
+
+  private:
+    JSScript *script_;
+    Type type_;
+
+    MProfilerStackOp(JSScript *script, Type type)
+      : script_(script), type_(type)
+    {
+        MOZ_ASSERT(script);
+        setGuard();
+    }
+
+  public:
+    INSTRUCTION_HEADER(ProfilerStackOp)
+
+    static MProfilerStackOp *New(TempAllocator &alloc, JSScript *script, Type type) {
+        return new(alloc) MProfilerStackOp(script, type);
+    }
+
+    JSScript *script() {
+        return script_;
+    }
+
+    Type type() {
+        return type_;
+    }
+
+    AliasSet getAliasSet() const MOZ_OVERRIDE {
+        return AliasSet::None();
+    }
+};
+
 // This is an alias for MLoadFixedSlot.
 class MEnclosingScope : public MLoadFixedSlot
 {
     explicit MEnclosingScope(MDefinition *obj)
       : MLoadFixedSlot(obj, ScopeObject::enclosingScopeSlot())
     {
         setResultType(MIRType_Object);
     }
--- a/js/src/jit/MIRGenerator.h
+++ b/js/src/jit/MIRGenerator.h
@@ -78,18 +78,24 @@ class MIRGenerator
     bool instrumentedProfiling() {
         if (!instrumentedProfilingIsCached_) {
             instrumentedProfiling_ = GetJitContext()->runtime->spsProfiler().enabled();
             instrumentedProfilingIsCached_ = true;
         }
         return instrumentedProfiling_;
     }
 
-    bool isProfilerInstrumentationEnabled() {
-        return !compilingAsmJS() && instrumentedProfiling();
+    bool isNativeToBytecodeMapEnabled() {
+        if (compilingAsmJS())
+            return false;
+#ifdef DEBUG
+        return true;
+#else
+        return instrumentedProfiling();
+#endif
     }
 
     // Whether the main thread is trying to cancel this build.
     bool shouldCancel(const char *why) {
         maybePause();
         return cancelBuild_;
     }
     void cancel() {
--- a/js/src/jit/MOpcodes.h
+++ b/js/src/jit/MOpcodes.h
@@ -221,16 +221,17 @@ namespace jit {
     _(Floor)                                                                \
     _(Ceil)                                                                 \
     _(Round)                                                                \
     _(In)                                                                   \
     _(InstanceOf)                                                           \
     _(CallInstanceOf)                                                       \
     _(InterruptCheck)                                                       \
     _(AsmJSInterruptCheck)                                                  \
+    _(ProfilerStackOp)                                                      \
     _(GetDOMProperty)                                                       \
     _(GetDOMMember)                                                         \
     _(SetDOMProperty)                                                       \
     _(IsCallable)                                                           \
     _(IsObject)                                                             \
     _(HasClass)                                                             \
     _(AsmJSNeg)                                                             \
     _(AsmJSUnsignedToDouble)                                                \
--- a/js/src/jit/MacroAssembler.cpp
+++ b/js/src/jit/MacroAssembler.cpp
@@ -1309,18 +1309,26 @@ MacroAssembler::loadBaselineFramePtr(Reg
     subPtr(Imm32(BaselineFrame::Size()), dest);
 }
 
 void
 MacroAssembler::handleFailure()
 {
     // Re-entry code is irrelevant because the exception will leave the
     // running function and never come back
+    if (sps_)
+        sps_->skipNextReenter();
+    leaveSPSFrame();
+
     JitCode *excTail = GetJitContext()->runtime->jitRuntime()->getExceptionTail();
     jump(excTail);
+
+    // Doesn't actually emit code, but balances the leave()
+    if (sps_)
+        sps_->reenter(*this, InvalidReg);
 }
 
 #ifdef DEBUG
 static void
 AssumeUnreachable_(const char *output) {
     MOZ_ReportAssertionFailure(output, __FILE__, __LINE__);
 }
 #endif
@@ -1332,17 +1340,17 @@ MacroAssembler::assumeUnreachable(const 
     if (!IsCompilingAsmJS()) {
         RegisterSet regs = RegisterSet::Volatile();
         PushRegsInMask(regs);
         Register temp = regs.takeGeneral();
 
         setupUnalignedABICall(1, temp);
         movePtr(ImmPtr(output), temp);
         passABIArg(temp);
-        callWithABI(JS_FUNC_TO_DATA_PTR(void *, AssumeUnreachable_));
+        callWithABINoProfiling(JS_FUNC_TO_DATA_PTR(void *, AssumeUnreachable_));
 
         PopRegsInMask(RegisterSet::Volatile());
     }
 #endif
 
     breakpoint();
 }
 
@@ -1360,17 +1368,17 @@ MacroAssembler::printf(const char *outpu
     RegisterSet regs = RegisterSet::Volatile();
     PushRegsInMask(regs);
 
     Register temp = regs.takeGeneral();
 
     setupUnalignedABICall(1, temp);
     movePtr(ImmPtr(output), temp);
     passABIArg(temp);
-    callWithABI(JS_FUNC_TO_DATA_PTR(void *, Printf0_));
+    callWithABINoProfiling(JS_FUNC_TO_DATA_PTR(void *, Printf0_));
 
     PopRegsInMask(RegisterSet::Volatile());
 }
 
 static void
 Printf1_(const char *output, uintptr_t value) {
     char *line = JS_sprintf_append(nullptr, output, value);
     fprintf(stderr, "%s", line);
@@ -1386,17 +1394,17 @@ MacroAssembler::printf(const char *outpu
     regs.takeUnchecked(value);
 
     Register temp = regs.takeGeneral();
 
     setupUnalignedABICall(2, temp);
     movePtr(ImmPtr(output), temp);
     passABIArg(temp);
     passABIArg(value);
-    callWithABI(JS_FUNC_TO_DATA_PTR(void *, Printf1_));
+    callWithABINoProfiling(JS_FUNC_TO_DATA_PTR(void *, Printf1_));
 
     PopRegsInMask(RegisterSet::Volatile());
 }
 
 #ifdef JS_TRACE_LOGGING
 void
 MacroAssembler::tracelogStartId(Register logger, uint32_t textId, bool force)
 {
@@ -1409,17 +1417,17 @@ MacroAssembler::tracelogStartId(Register
     regs.takeUnchecked(logger);
 
     Register temp = regs.takeGeneral();
 
     setupUnalignedABICall(2, temp);
     passABIArg(logger);
     move32(Imm32(textId), temp);
     passABIArg(temp);
-    callWithABI(JS_FUNC_TO_DATA_PTR(void *, TraceLogStartEventPrivate));
+    callWithABINoProfiling(JS_FUNC_TO_DATA_PTR(void *, TraceLogStartEventPrivate));
 
     PopRegsInMask(RegisterSet::Volatile());
 }
 
 void
 MacroAssembler::tracelogStartId(Register logger, Register textId)
 {
     PushRegsInMask(RegisterSet::Volatile());
@@ -1428,17 +1436,17 @@ MacroAssembler::tracelogStartId(Register
     regs.takeUnchecked(logger);
     regs.takeUnchecked(textId);
 
     Register temp = regs.takeGeneral();
 
     setupUnalignedABICall(2, temp);
     passABIArg(logger);
     passABIArg(textId);
-    callWithABI(JS_FUNC_TO_DATA_PTR(void *, TraceLogStartEventPrivate));
+    callWithABINoProfiling(JS_FUNC_TO_DATA_PTR(void *, TraceLogStartEventPrivate));
 
     PopRegsInMask(RegisterSet::Volatile());
 }
 
 void
 MacroAssembler::tracelogStartEvent(Register logger, Register event)
 {
     void (&TraceLogFunc)(TraceLoggerThread *, const TraceLoggerEvent &) = TraceLogStartEvent;
@@ -1449,17 +1457,17 @@ MacroAssembler::tracelogStartEvent(Regis
     regs.takeUnchecked(logger);
     regs.takeUnchecked(event);
 
     Register temp = regs.takeGeneral();
 
     setupUnalignedABICall(2, temp);
     passABIArg(logger);
     passABIArg(event);
-    callWithABI(JS_FUNC_TO_DATA_PTR(void *, TraceLogFunc));
+    callWithABINoProfiling(JS_FUNC_TO_DATA_PTR(void *, TraceLogFunc));
 
     PopRegsInMask(RegisterSet::Volatile());
 }
 
 void
 MacroAssembler::tracelogStopId(Register logger, uint32_t textId, bool force)
 {
     if (!force && !TraceLogTextIdEnabled(textId))
@@ -1472,17 +1480,17 @@ MacroAssembler::tracelogStopId(Register 
 
     Register temp = regs.takeGeneral();
 
     setupUnalignedABICall(2, temp);
     passABIArg(logger);
     move32(Imm32(textId), temp);
     passABIArg(temp);
 
-    callWithABI(JS_FUNC_TO_DATA_PTR(void *, TraceLogStopEventPrivate));
+    callWithABINoProfiling(JS_FUNC_TO_DATA_PTR(void *, TraceLogStopEventPrivate));
 
     PopRegsInMask(RegisterSet::Volatile());
 }
 
 void
 MacroAssembler::tracelogStopId(Register logger, Register textId)
 {
     PushRegsInMask(RegisterSet::Volatile());
@@ -1491,17 +1499,17 @@ MacroAssembler::tracelogStopId(Register 
 
     regs.takeUnchecked(textId);
 
     Register temp = regs.takeGeneral();
 
     setupUnalignedABICall(2, temp);
     passABIArg(logger);
     passABIArg(textId);
-    callWithABI(JS_FUNC_TO_DATA_PTR(void *, TraceLogStopEventPrivate));
+    callWithABINoProfiling(JS_FUNC_TO_DATA_PTR(void *, TraceLogStopEventPrivate));
 
     PopRegsInMask(RegisterSet::Volatile());
 }
 #endif
 
 void
 MacroAssembler::convertInt32ValueToDouble(const Address &address, Register scratch, Label *done)
 {
@@ -1913,40 +1921,16 @@ MacroAssembler::finish()
         bind(&failureLabel_);
         handleFailure();
     }
 
     MacroAssemblerSpecific::finish();
 }
 
 void
-MacroAssembler::link(JitCode *code)
-{
-    MOZ_ASSERT(!oom());
-    // If this code can transition to C++ code and witness a GC, then we need to store
-    // the JitCode onto the stack in order to GC it correctly.  exitCodePatch should
-    // be unset if the code never needed to push its JitCode*.
-    if (hasEnteredExitFrame()) {
-        exitCodePatch_.fixup(this);
-        PatchDataWithValueCheck(CodeLocationLabel(code, exitCodePatch_),
-                                ImmPtr(code),
-                                ImmPtr((void*)-1));
-    }
-
-    // Fix up the code pointers to be written for locations where profilerCallSite
-    // emitted moves of RIP to a register.
-    for (size_t i = 0; i < profilerCallSites_.length(); i++) {
-        CodeOffsetLabel offset = profilerCallSites_[i];
-        offset.fixup(this);
-        CodeLocationLabel location(code, offset);
-        PatchDataWithValueCheck(location, ImmPtr(location.raw()), ImmPtr((void*)-1));
-    }
-}
-
-void
 MacroAssembler::branchIfNotInterpretedConstructor(Register fun, Register scratch, Label *label)
 {
     // 16-bit loads are slow and unaligned 32-bit loads may be too so
     // perform an aligned 32-bit load and adjust the bitmask accordingly.
     MOZ_ASSERT(JSFunction::offsetOfNargs() % sizeof(uint32_t) == 0);
     MOZ_ASSERT(JSFunction::offsetOfFlags() == JSFunction::offsetOfNargs() + 2);
 
     // Emit code for the following test:
@@ -2010,32 +1994,59 @@ MacroAssembler::branchEqualTypeIfNeeded(
             branchTestObject(Equal, tag, label);
             break;
           default:
             MOZ_CRASH("Unsupported type");
         }
     }
 }
 
+
+// If a pseudostack frame has this as its label, its stack pointer
+// field points to the registers saved on entry to JIT code.  A native
+// stack unwinder could use that information to continue unwinding
+// past that point.
+const char MacroAssembler::enterJitLabel[] = "EnterJIT";
+
+// Creates an enterJIT pseudostack frame, as described above.  Pushes
+// a word to the stack to indicate whether this was done.  |framePtr| is
+// the pointer to the machine-dependent saved state.
 void
-MacroAssembler::profilerPreCallImpl()
+MacroAssembler::spsMarkJit(SPSProfiler *p, Register framePtr, Register temp)
 {
-    Register reg = CallTempReg0;
-    Register reg2 = CallTempReg1;
-    push(reg);
-    push(reg2);
-    profilerPreCallImpl(reg, reg2);
-    pop(reg2);
-    pop(reg);
+    Label spsNotEnabled;
+    uint32_t *enabledAddr = p->addressOfEnabled();
+    load32(AbsoluteAddress(enabledAddr), temp);
+    push(temp); // +4: Did we push an sps frame.
+    branchTest32(Assembler::Equal, temp, temp, &spsNotEnabled);
+
+    Label stackFull;
+    // We always need the "safe" versions, because these are used in trampolines
+    // and won't be regenerated when SPS state changes.
+    spsProfileEntryAddressSafe(p, 0, temp, &stackFull);
+
+    // Push a C++ frame with non-copy label
+    storePtr(ImmPtr(enterJitLabel), Address(temp, ProfileEntry::offsetOfLabel()));
+    storePtr(framePtr,              Address(temp, ProfileEntry::offsetOfSpOrScript()));
+    store32(Imm32(0),               Address(temp, ProfileEntry::offsetOfLineOrPc()));
+    store32(Imm32(ProfileEntry::IS_CPP_ENTRY), Address(temp, ProfileEntry::offsetOfFlags()));
+
+    /* Always increment the stack size, whether or not we actually pushed. */
+    bind(&stackFull);
+    loadPtr(AbsoluteAddress(p->addressOfSizePointer()), temp);
+    add32(Imm32(1), Address(temp, 0));
+
+    bind(&spsNotEnabled);
 }
 
+// Pops the word pushed by spsMarkJit and, if spsMarkJit pushed an SPS
+// frame, pops it.
 void
-MacroAssembler::profilerPreCallImpl(Register reg, Register reg2)
+MacroAssembler::spsUnmarkJit(SPSProfiler *p, Register temp)
 {
-    JitContext *icx = GetJitContext();
-    AbsoluteAddress profilingActivation(icx->runtime->addressOfProfilingActivation());
+    Label spsNotEnabled;
+    pop(temp); // -4: Was the profiler enabled.
+    branchTest32(Assembler::Equal, temp, temp, &spsNotEnabled);
 
-    CodeOffsetLabel label = movWithPatch(ImmWord(uintptr_t(-1)), reg);
-    loadPtr(profilingActivation, reg2);
-    storePtr(reg, Address(reg2, JitActivation::offsetOfLastProfilingCallSite()));
+    spsPopFrameSafe(p, temp);
 
-    appendProfilerCallSite(label);
+    bind(&spsNotEnabled);
 }
--- a/js/src/jit/MacroAssembler.h
+++ b/js/src/jit/MacroAssembler.h
@@ -180,28 +180,37 @@ class MacroAssembler : public MacroAssem
             masm.branchPtr(cond(), reg(), ptr_, jump());
         }
     };
 
     mozilla::Maybe<AutoRooter> autoRooter_;
     mozilla::Maybe<JitContext> jitContext_;
     mozilla::Maybe<AutoJitContextAlloc> alloc_;
 
+    // SPS instrumentation, only used for Ion caches.
+    mozilla::Maybe<IonInstrumentation> spsInstrumentation_;
+    jsbytecode *spsPc_;
+
   private:
     // This field is used to manage profiling instrumentation output. If
     // provided and enabled, then instrumentation will be emitted around call
-    // sites.
-    bool emitProfilingInstrumentation_;
+    // sites. The IonInstrumentation instance is hosted inside of
+    // CodeGeneratorShared and is the manager of when instrumentation is
+    // actually emitted or not. If nullptr, then no instrumentation is emitted.
+    IonInstrumentation *sps_;
 
     // Labels for handling exceptions and failures.
     NonAssertingLabel failureLabel_;
 
   public:
+    // If instrumentation should be emitted, then the sps parameter should be
+    // provided, but otherwise it can be safely omitted to prevent all
+    // instrumentation from being emitted.
     MacroAssembler()
-      : emitProfilingInstrumentation_(false)
+      : sps_(nullptr)
     {
         JitContext *jcx = GetJitContext();
         JSContext *cx = jcx->cx;
         if (cx)
             constructRoot(cx);
 
         if (!jcx->temp) {
             MOZ_ASSERT(cx);
@@ -214,46 +223,52 @@ class MacroAssembler : public MacroAssem
         m_buffer.id = jcx->getNextAssemblerId();
 #endif
     }
 
     // This constructor should only be used when there is no JitContext active
     // (for example, Trampoline-$(ARCH).cpp and IonCaches.cpp).
     explicit MacroAssembler(JSContext *cx, IonScript *ion = nullptr,
                             JSScript *script = nullptr, jsbytecode *pc = nullptr)
-      : emitProfilingInstrumentation_(false)
+      : sps_(nullptr)
     {
         constructRoot(cx);
         jitContext_.emplace(cx, (js::jit::TempAllocator *)nullptr);
         alloc_.emplace(cx);
         moveResolver_.setAllocator(*jitContext_->temp);
 #ifdef JS_CODEGEN_ARM
         initWithAllocator();
         m_buffer.id = GetJitContext()->getNextAssemblerId();
 #endif
         if (ion) {
             setFramePushed(ion->frameSize());
-            if (pc && cx->runtime()->spsProfiler.enabled())
-                emitProfilingInstrumentation_ = true;
+            if (pc && cx->runtime()->spsProfiler.enabled()) {
+                // We have to update the SPS pc when this IC stub calls into
+                // the VM.
+                spsPc_ = pc;
+                spsInstrumentation_.emplace(&cx->runtime()->spsProfiler, &spsPc_);
+                sps_ = spsInstrumentation_.ptr();
+                sps_->setPushed(script);
+            }
         }
     }
 
     // asm.js compilation handles its own JitContext-pushing
     struct AsmJSToken {};
     explicit MacroAssembler(AsmJSToken)
-      : emitProfilingInstrumentation_(false)
+      : sps_(nullptr)
     {
 #ifdef JS_CODEGEN_ARM
         initWithAllocator();
         m_buffer.id = 0;
 #endif
     }
 
-    void enableProfilingInstrumentation() {
-        emitProfilingInstrumentation_ = true;
+    void setInstrumentation(IonInstrumentation *sps) {
+        sps_ = sps;
     }
 
     void resetForNewCodeGenerator(TempAllocator &alloc) {
         setFramePushed(0);
         moveResolver_.clearTempObjectPool();
         moveResolver_.setAllocator(alloc);
     }
 
@@ -844,66 +859,85 @@ class MacroAssembler : public MacroAssem
     void leaveExitFrame() {
         freeStack(ExitFooterFrame::Size());
     }
 
     bool hasEnteredExitFrame() const {
         return exitCodePatch_.offset() != 0;
     }
 
+    void link(JitCode *code) {
+        MOZ_ASSERT(!oom());
+        // If this code can transition to C++ code and witness a GC, then we need to store
+        // the JitCode onto the stack in order to GC it correctly.  exitCodePatch should
+        // be unset if the code never needed to push its JitCode*.
+        if (hasEnteredExitFrame()) {
+            exitCodePatch_.fixup(this);
+            PatchDataWithValueCheck(CodeLocationLabel(code, exitCodePatch_),
+                                    ImmPtr(code),
+                                    ImmPtr((void*)-1));
+        }
+
+    }
+
     // Generates code used to complete a bailout.
     void generateBailoutTail(Register scratch, Register bailoutInfo);
 
     // These functions exist as small wrappers around sites where execution can
     // leave the currently running stream of instructions. They exist so that
     // instrumentation may be put in place around them if necessary and the
     // instrumentation is enabled. For the functions that return a uint32_t,
     // they are returning the offset of the assembler just after the call has
     // been made so that a safepoint can be made at that location.
 
     template <typename T>
+    void callWithABINoProfiling(const T &fun, MoveOp::Type result = MoveOp::GENERAL) {
+        MacroAssemblerSpecific::callWithABI(fun, result);
+    }
+
+    template <typename T>
     void callWithABI(const T &fun, MoveOp::Type result = MoveOp::GENERAL) {
-        profilerPreCall();
-        MacroAssemblerSpecific::callWithABI(fun, result);
-        profilerPostReturn();
+        leaveSPSFrame();
+        callWithABINoProfiling(fun, result);
+        reenterSPSFrame();
     }
 
     // see above comment for what is returned
     uint32_t callJit(Register callee) {
-        profilerPreCall();
+        leaveSPSFrame();
         MacroAssemblerSpecific::callJit(callee);
         uint32_t ret = currentOffset();
-        profilerPostReturn();
+        reenterSPSFrame();
         return ret;
     }
 
     // see above comment for what is returned
     uint32_t callWithExitFrame(Label *target) {
-        profilerPreCall();
+        leaveSPSFrame();
         MacroAssemblerSpecific::callWithExitFrame(target);
         uint32_t ret = currentOffset();
-        profilerPostReturn();
+        reenterSPSFrame();
         return ret;
     }
 
     // see above comment for what is returned
     uint32_t callWithExitFrame(JitCode *target) {
-        profilerPreCall();
+        leaveSPSFrame();
         MacroAssemblerSpecific::callWithExitFrame(target);
         uint32_t ret = currentOffset();
-        profilerPostReturn();
+        reenterSPSFrame();
         return ret;
     }
 
     // see above comment for what is returned
     uint32_t callWithExitFrame(JitCode *target, Register dynStack) {
-        profilerPreCall();
+        leaveSPSFrame();
         MacroAssemblerSpecific::callWithExitFrame(target, dynStack);
         uint32_t ret = currentOffset();
-        profilerPostReturn();
+        reenterSPSFrame();
         return ret;
     }
 
     void branchTestObjectTruthy(bool truthy, Register objReg, Register scratch,
                                 Label *slowCheck, Label *checked)
     {
         // The branches to out-of-line code here implement a conservative version
         // of the JSObject::isWrapper test performed in EmulatesUndefined.  If none
@@ -929,29 +963,175 @@ class MacroAssembler : public MacroAssem
         loadObjClass(object, scratch);
         branchTestClassIsProxy(proxy, scratch, label);
     }
 
   private:
     // These two functions are helpers used around call sites throughout the
     // assembler. They are called from the above call wrappers to emit the
     // necessary instrumentation.
-    void profilerPreCall() {
-        if (!emitProfilingInstrumentation_)
+    void leaveSPSFrame() {
+        if (!sps_ || !sps_->enabled())
+            return;
+        // No registers are guaranteed to be available, so push/pop a register
+        // so we can use one
+        push(CallTempReg0);
+        sps_->leave(*this, CallTempReg0);
+        pop(CallTempReg0);
+    }
+
+    void reenterSPSFrame() {
+        if (!sps_ || !sps_->enabled())
             return;
-        profilerPreCallImpl();
+        // Attempt to use a now-free register within a given set, but if the
+        // architecture being built doesn't have an available register, resort
+        // to push/pop
+        GeneralRegisterSet regs(Registers::TempMask & ~Registers::JSCallMask &
+                                                      ~Registers::CallMask);
+        if (regs.empty()) {
+            push(CallTempReg0);
+            sps_->reenter(*this, CallTempReg0);
+            pop(CallTempReg0);
+        } else {
+            sps_->reenter(*this, regs.getAny());
+        }
     }
 
-    void profilerPostReturn() {
-        if (!emitProfilingInstrumentation_)
-            return;
-        profilerPostReturnImpl();
+    void spsProfileEntryAddress(SPSProfiler *p, int offset, Register temp,
+                                Label *full)
+    {
+        movePtr(ImmPtr(p->sizePointer()), temp);
+        load32(Address(temp, 0), temp);
+        if (offset != 0)
+            add32(Imm32(offset), temp);
+        branch32(Assembler::GreaterThanOrEqual, temp, Imm32(p->maxSize()), full);
+
+        JS_STATIC_ASSERT(sizeof(ProfileEntry) == (2 * sizeof(void *)) + 8);
+        if (sizeof(void *) == 4) {
+            lshiftPtr(Imm32(4), temp);
+        } else {
+            lshiftPtr(Imm32(3), temp);
+            mulBy3(temp, temp);
+        }
+
+        addPtr(ImmPtr(p->stack()), temp);
+    }
+
+    // The safe version of the above method refrains from assuming that the fields
+    // of the SPSProfiler class are going to stay the same across different runs of
+    // the jitcode.  Ion can use the more efficient unsafe version because ion jitcode
+    // will not survive changes to to the profiler settings.  Baseline jitcode, however,
+    // can span these changes, so any hardcoded field values will be incorrect afterwards.
+    // All the sps-related methods used by baseline call |spsProfileEntryAddressSafe|.
+    void spsProfileEntryAddressSafe(SPSProfiler *p, int offset, Register temp,
+                                    Label *full)
+    {
+        // Load size pointer
+        loadPtr(AbsoluteAddress(p->addressOfSizePointer()), temp);
+
+        // Load size
+        load32(Address(temp, 0), temp);
+        if (offset != 0)
+            add32(Imm32(offset), temp);
+
+        // Test against max size.
+        branch32(Assembler::LessThanOrEqual, AbsoluteAddress(p->addressOfMaxSize()), temp, full);
+
+        JS_STATIC_ASSERT(sizeof(ProfileEntry) == (2 * sizeof(void *)) + 8);
+        if (sizeof(void *) == 4) {
+            lshiftPtr(Imm32(4), temp);
+        } else {
+            lshiftPtr(Imm32(3), temp);
+            mulBy3(temp, temp);
+        }
+
+        push(temp);
+        loadPtr(AbsoluteAddress(p->addressOfStack()), temp);
+        addPtr(Address(StackPointer, 0), temp);
+        addPtr(Imm32(sizeof(size_t)), StackPointer);
     }
 
   public:
+    // These functions are needed by the IonInstrumentation interface defined in
+    // vm/SPSProfiler.h.  They will modify the pseudostack provided to SPS to
+    // perform the actual instrumentation.
+
+    void spsUpdatePCIdx(SPSProfiler *p, int32_t idx, Register temp) {
+        Label stackFull;
+        spsProfileEntryAddress(p, -1, temp, &stackFull);
+        store32(Imm32(idx), Address(temp, ProfileEntry::offsetOfLineOrPc()));
+        bind(&stackFull);
+    }
+
+    void spsUpdatePCIdx(SPSProfiler *p, Register idx, Register temp) {
+        Label stackFull;
+        spsProfileEntryAddressSafe(p, -1, temp, &stackFull);
+        store32(idx, Address(temp, ProfileEntry::offsetOfLineOrPc()));
+        bind(&stackFull);
+    }
+
+    // spsPushFrame variant for Ion-optimized scripts.
+    void spsPushFrame(SPSProfiler *p, const char *str, JSScript *s, Register temp) {
+        Label stackFull;
+        spsProfileEntryAddress(p, 0, temp, &stackFull);
+
+        // Push a JS frame with a copy label
+        storePtr(ImmPtr(str), Address(temp, ProfileEntry::offsetOfLabel()));
+        storePtr(ImmGCPtr(s), Address(temp, ProfileEntry::offsetOfSpOrScript()));
+        store32(Imm32(ProfileEntry::NullPCOffset), Address(temp, ProfileEntry::offsetOfLineOrPc()));
+        store32(Imm32(ProfileEntry::FRAME_LABEL_COPY), Address(temp, ProfileEntry::offsetOfFlags()));
+
+        /* Always increment the stack size, whether or not we actually pushed. */
+        bind(&stackFull);
+        movePtr(ImmPtr(p->sizePointer()), temp);
+        add32(Imm32(1), Address(temp, 0));
+    }
+
+    // spsPushFrame variant for Baseline-optimized scripts.
+    void spsPushFrame(SPSProfiler *p, const Address &str, const Address &script,
+                      Register temp, Register temp2)
+    {
+        Label stackFull;
+        spsProfileEntryAddressSafe(p, 0, temp, &stackFull);
+
+        // Push a JS frame with a copy label
+        loadPtr(str, temp2);
+        storePtr(temp2, Address(temp, ProfileEntry::offsetOfLabel()));
+
+        loadPtr(script, temp2);
+        storePtr(temp2, Address(temp, ProfileEntry::offsetOfSpOrScript()));
+
+        // Store 0 for PCIdx because that's what interpreter does.
+        // (See probes::EnterScript, which calls spsProfiler.enter, which pushes an entry
+        //  with 0 pcIdx).
+        store32(Imm32(0), Address(temp, ProfileEntry::offsetOfLineOrPc()));
+        store32(Imm32(ProfileEntry::FRAME_LABEL_COPY), Address(temp, ProfileEntry::offsetOfFlags()));
+
+        /* Always increment the stack size, whether or not we actually pushed. */
+        bind(&stackFull);
+        movePtr(ImmPtr(p->addressOfSizePointer()), temp);
+        loadPtr(Address(temp, 0), temp);
+        add32(Imm32(1), Address(temp, 0));
+    }
+
+    void spsPopFrame(SPSProfiler *p, Register temp) {
+        movePtr(ImmPtr(p->sizePointer()), temp);
+        add32(Imm32(-1), Address(temp, 0));
+    }
+
+    // spsPropFrameSafe does not assume |profiler->sizePointer()| will stay constant.
+    void spsPopFrameSafe(SPSProfiler *p, Register temp) {
+        loadPtr(AbsoluteAddress(p->addressOfSizePointer()), temp);
+        add32(Imm32(-1), Address(temp, 0));
+    }
+
+    static const char enterJitLabel[];
+    void spsMarkJit(SPSProfiler *p, Register framePtr, Register temp);
+    void spsUnmarkJit(SPSProfiler *p, Register temp);
+
     void loadBaselineOrIonRaw(Register script, Register dest, Label *failure);
     void loadBaselineOrIonNoArgCheck(Register callee, Register dest, Label *failure);
 
     void loadBaselineFramePtr(Register framePtr, Register dest);
 
     void pushBaselineFramePtr(Register framePtr, Register scratch) {
         loadBaselineFramePtr(framePtr, scratch);
         push(scratch);
@@ -966,17 +1146,16 @@ class MacroAssembler : public MacroAssem
         return &failureLabel_;
     }
 
     Label *failureLabel() {
         return &failureLabel_;
     }
 
     void finish();
-    void link(JitCode *code);
 
     void assumeUnreachable(const char *output);
     void printf(const char *output);
     void printf(const char *output, Register value);
 
 #ifdef JS_TRACE_LOGGING
     void tracelogStartId(Register logger, uint32_t textId, bool force = false);
     void tracelogStartId(Register logger, Register textId);
@@ -1238,20 +1417,16 @@ class MacroAssembler : public MacroAssem
 #ifdef DEBUG
         Label ok;
         MOZ_ASSERT(IsPowerOfTwo(alignment));
         branchTestPtr(Assembler::Zero, StackPointer, Imm32(alignment - 1), &ok);
         breakpoint();
         bind(&ok);
 #endif
     }
-
-    void profilerPreCallImpl();
-    void profilerPreCallImpl(Register reg, Register reg2);
-    void profilerPostReturnImpl() {}
 };
 
 static inline Assembler::DoubleCondition
 JSOpToDoubleCondition(JSOp op)
 {
     switch (op) {
       case JSOP_EQ:
       case JSOP_STRICTEQ:
--- a/js/src/jit/VMFunctions.cpp
+++ b/js/src/jit/VMFunctions.cpp
@@ -572,16 +572,29 @@ NewSingletonCallObject(JSContext *cx, Ha
 
 JSObject *
 NewStringObject(JSContext *cx, HandleString str)
 {
     return StringObject::create(cx, str);
 }
 
 bool
+SPSEnter(JSContext *cx, HandleScript script)
+{
+    return cx->runtime()->spsProfiler.enter(script, script->functionNonDelazifying());
+}
+
+bool
+SPSExit(JSContext *cx, HandleScript script)
+{
+    cx->runtime()->spsProfiler.exit(script, script->functionNonDelazifying());
+    return true;
+}
+
+bool
 OperatorIn(JSContext *cx, HandleValue key, HandleObject obj, bool *out)
 {
     RootedId id(cx);
     if (!ValueToId<CanGC>(cx, key, &id))
         return false;
 
     RootedObject obj2(cx);
     RootedShape prop(cx);
@@ -785,16 +798,25 @@ DebugEpilogue(JSContext *cx, BaselineFra
     if (frame->isNonEvalFunctionFrame()) {
         MOZ_ASSERT_IF(ok, frame->hasReturnValue());
         DebugScopes::onPopCall(frame, cx);
     } else if (frame->isStrictEvalFrame()) {
         MOZ_ASSERT_IF(frame->hasCallObj(), frame->scopeChain()->as<CallObject>().isForEval());
         DebugScopes::onPopStrictEvalScope(frame);
     }
 
+    // If the frame has a pushed SPS frame, make sure to pop it.
+    if (frame->hasPushedSPSFrame()) {
+        cx->runtime()->spsProfiler.exit(frame->script(), frame->maybeFun());
+        // Unset the pushedSPSFrame flag because DebugEpilogue may get called before
+        // probes::ExitScript in baseline during exception handling, and we don't
+        // want to double-pop SPS frames.
+        frame->unsetPushedSPSFrame();
+    }
+
     if (!ok) {
         // Pop this frame by updating jitTop, so that the exception handling
         // code will start at the previous frame.
 
         JitFrameLayout *prefix = frame->framePrefix();
         EnsureExitFrame(prefix);
         cx->mainThread().jitTop = (uint8_t *)prefix;
         return false;
--- a/js/src/jit/VMFunctions.h
+++ b/js/src/jit/VMFunctions.h
@@ -700,16 +700,19 @@ bool SetProperty(JSContext *cx, HandleOb
 bool InterruptCheck(JSContext *cx);
 
 void *MallocWrapper(JSRuntime *rt, size_t nbytes);
 JSObject *NewCallObject(JSContext *cx, HandleShape shape, HandleTypeObject type,
                         uint32_t lexicalBegin);
 JSObject *NewSingletonCallObject(JSContext *cx, HandleShape shape, uint32_t lexicalBegin);
 JSObject *NewStringObject(JSContext *cx, HandleString str);
 
+bool SPSEnter(JSContext *cx, HandleScript script);
+bool SPSExit(JSContext *cx, HandleScript script);
+
 bool OperatorIn(JSContext *cx, HandleValue key, HandleObject obj, bool *out);
 bool OperatorInI(JSContext *cx, uint32_t index, HandleObject obj, bool *out);
 
 bool GetIntrinsicValue(JSContext *cx, HandlePropertyName name, MutableHandleValue rval);
 
 bool CreateThis(JSContext *cx, HandleObject callee, MutableHandleValue rval);
 
 void GetDynamicName(JSContext *cx, JSObject *scopeChain, JSString *str, Value *vp);
--- a/js/src/jit/arm/CodeGenerator-arm.cpp
+++ b/js/src/jit/arm/CodeGenerator-arm.cpp
@@ -42,21 +42,16 @@ CodeGeneratorARM::CodeGeneratorARM(MIRGe
 bool
 CodeGeneratorARM::generatePrologue()
 {
     MOZ_ASSERT(masm.framePushed() == 0);
     MOZ_ASSERT(!gen->compilingAsmJS());
 #ifdef JS_USE_LINK_REGISTER
     masm.pushReturnAddress();
 #endif
-
-    // If profiling, save the current frame pointer to a per-thread global field.
-    if (isProfilerInstrumentationEnabled())
-        masm.profilerEnterFrame(StackPointer, CallTempReg0);
-
     // Note that this automatically sets MacroAssembler::framePushed().
     masm.reserveStack(frameSize());
     masm.checkStackAlignment();
 
     emitTracelogIonStart();
 
     return true;
 }
@@ -66,22 +61,16 @@ CodeGeneratorARM::generateEpilogue()
 {
     MOZ_ASSERT(!gen->compilingAsmJS());
     masm.bind(&returnLabel_);
 
     emitTracelogIonStop();
 
     masm.freeStack(frameSize());
     MOZ_ASSERT(masm.framePushed() == 0);
-
-    // If profiling, reset the per-thread global lastJitFrame to point to
-    // the previous frame.
-    if (isProfilerInstrumentationEnabled())
-        masm.profilerExitFrame();
-
     masm.pop(pc);
     masm.flushBuffer();
     return true;
 }
 
 void
 CodeGeneratorARM::emitBranch(Assembler::Condition cond, MBasicBlock *mirTrue, MBasicBlock *mirFalse)
 {
--- a/js/src/jit/arm/MacroAssembler-arm.cpp
+++ b/js/src/jit/arm/MacroAssembler-arm.cpp
@@ -4289,28 +4289,16 @@ MacroAssemblerARMCompat::handleFailureWi
     // Only used in debug mode. Return BaselineFrame->returnValue() to the
     // caller.
     bind(&return_);
     ma_ldr(Operand(sp, offsetof(ResumeFromException, framePointer)), r11);
     ma_ldr(Operand(sp, offsetof(ResumeFromException, stackPointer)), sp);
     loadValue(Address(r11, BaselineFrame::reverseOffsetOfReturnValue()), JSReturnOperand);
     ma_mov(r11, sp);
     pop(r11);
-
-    // If profiling is enabled, then update the lastProfilingFrame to refer to caller
-    // frame before returning.
-    {
-        Label skipProfilingInstrumentation;
-        // Test if profiler enabled.
-        AbsoluteAddress addressOfEnabled(GetJitContext()->runtime->spsProfiler().addressOfEnabled());
-        branch32(Assembler::Equal, addressOfEnabled, Imm32(0), &skipProfilingInstrumentation);
-        profilerExitFrame();
-        bind(&skipProfilingInstrumentation);
-    }
-
     ret();
 
     // If we are bailing out to baseline to handle an exception, jump to the
     // bailout tail stub.
     bind(&bailout);
     ma_ldr(Operand(sp, offsetof(ResumeFromException, bailoutInfo)), r2);
     ma_mov(Imm32(BAILOUT_RETURN_OK), r0);
     ma_ldr(Operand(sp, offsetof(ResumeFromException, target)), r1);
@@ -5029,23 +5017,8 @@ js::jit::MacroAssemblerARMCompat::atomic
 template void
 js::jit::MacroAssemblerARMCompat::atomicFetchOp(int nbytes, bool signExtend, AtomicOp op,
                                                 const Register &value, const Address &mem,
                                                 Register temp, Register output);
 template void
 js::jit::MacroAssemblerARMCompat::atomicFetchOp(int nbytes, bool signExtend, AtomicOp op,
                                                 const Register &value, const BaseIndex &mem,
                                                 Register temp, Register output);
-
-void
-MacroAssemblerARMCompat::profilerEnterFrame(Register framePtr, Register scratch)
-{
-    AbsoluteAddress activation(GetJitContext()->runtime->addressOfProfilingActivation());
-    loadPtr(activation, scratch);
-    storePtr(framePtr, Address(scratch, JitActivation::offsetOfLastProfilingFrame()));
-    storePtr(ImmPtr(nullptr), Address(scratch, JitActivation::offsetOfLastProfilingCallSite()));
-}
-
-void
-MacroAssemblerARMCompat::profilerExitFrame()
-{
-    branch(GetJitContext()->runtime->jitRuntime()->getProfilerExitFrameTail());
-}
--- a/js/src/jit/arm/MacroAssembler-arm.h
+++ b/js/src/jit/arm/MacroAssembler-arm.h
@@ -1833,20 +1833,16 @@ class MacroAssemblerARMCompat : public M
         loadPtr(Address(GlobalReg, AsmJSActivationGlobalDataOffset - AsmJSGlobalRegBias), dest);
     }
     void loadAsmJSHeapRegisterFromGlobalData() {
         loadPtr(Address(GlobalReg, AsmJSHeapGlobalDataOffset - AsmJSGlobalRegBias), HeapReg);
     }
     void pushReturnAddress() {
         push(lr);
     }
-
-    // Instrumentation for entering and leaving the profiler.
-    void profilerEnterFrame(Register framePtr, Register scratch);
-    void profilerExitFrame();
 };
 
 typedef MacroAssemblerARMCompat MacroAssemblerSpecific;
 
 } // namespace jit
 } // namespace js
 
 #endif /* jit_arm_MacroAssembler_arm_h */
--- a/js/src/jit/arm/Trampoline-arm.cpp
+++ b/js/src/jit/arm/Trampoline-arm.cpp
@@ -31,18 +31,18 @@ static const FloatRegisterSet NonVolatil
                      (1ULL << FloatRegisters::d15));
 
 static void
 GenerateReturn(MacroAssembler &masm, int returnCode, SPSProfiler *prof)
 {
     // Restore non-volatile floating point registers.
     masm.transferMultipleByRuns(NonVolatileFloatRegs, IsLoad, StackPointer, IA);
 
-    // Get rid of padding word.
-    masm.addPtr(Imm32(sizeof(void*)), sp);
+    // Unwind the sps mark.
+    masm.spsUnmarkJit(prof, r8);
 
     // Set up return value
     masm.ma_mov(Imm32(returnCode), r0);
 
     // Pop and return
     masm.startDataTransferM(IsLoad, sp, IA, WriteBack);
     masm.transferReg(r4);
     masm.transferReg(r5);
@@ -64,18 +64,17 @@ struct EnterJITStack
     double d9;
     double d10;
     double d11;
     double d12;
     double d13;
     double d14;
     double d15;
 
-    // Padding.
-    void *padding;
+    size_t hasSPSMark;
 
     // Non-volatile registers.
     void *r4;
     void *r5;
     void *r6;
     void *r7;
     void *r8;
     void *r9;
@@ -125,18 +124,19 @@ JitRuntime::generateEnterJIT(JSContext *
     masm.transferReg(r9); // [sp,20]
     masm.transferReg(r10); // [sp,24]
     masm.transferReg(r11); // [sp,28]
     // The abi does not expect r12 (ip) to be preserved
     masm.transferReg(lr);  // [sp,32]
     // The 5th argument is located at [sp, 36]
     masm.finishDataTransfer();
 
-    // Add padding word.
-    masm.subPtr(Imm32(sizeof(void*)), sp);
+    // Push the EnterJIT sps mark. "Frame pointer" = start of saved core regs.
+    masm.movePtr(sp, r8);
+    masm.spsMarkJit(&cx->runtime()->spsProfiler, r8, r9);
 
     // Push the float registers.
     masm.transferMultipleByRuns(NonVolatileFloatRegs, IsStore, sp, DB);
 
     // Save stack pointer into r8
     masm.movePtr(sp, r8);
 
     // Load calleeToken into r9.
@@ -278,29 +278,16 @@ JitRuntime::generateEnterJIT(JSContext *
 
         MOZ_ASSERT(jitcode != ReturnReg);
 
         Label error;
         masm.addPtr(Imm32(ExitFrameLayout::SizeWithFooter()), sp);
         masm.addPtr(Imm32(BaselineFrame::Size()), framePtr);
         masm.branchIfFalseBool(ReturnReg, &error);
 
-        // If OSR-ing, then emit instrumentation for setting lastProfilerFrame
-        // if profiler instrumentation is enabled.
-        {
-            Label skipProfilingInstrumentation;
-            Register realFramePtr = numStackValues;
-            AbsoluteAddress addressOfEnabled(cx->runtime()->spsProfiler.addressOfEnabled());
-            masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0),
-                          &skipProfilingInstrumentation);
-            masm.ma_add(framePtr, Imm32(sizeof(void*)), realFramePtr);
-            masm.profilerEnterFrame(realFramePtr, scratch);
-            masm.bind(&skipProfilingInstrumentation);
-        }
-
         masm.jump(jitcode);
 
         // OOM: Load error value, discard return address and previous frame
         // pointer and return.
         masm.bind(&error);
         masm.mov(framePtr, sp);
         masm.addPtr(Imm32(2 * sizeof(uintptr_t)), sp);
         masm.moveValue(MagicValue(JS_ION_ERROR), JSReturnOperand);
@@ -957,27 +944,16 @@ JitRuntime::generateDebugTrapHandler(JSC
     masm.branchTest32(Assembler::NonZero, ReturnReg, ReturnReg, &forcedReturn);
     masm.mov(lr, pc);
 
     masm.bind(&forcedReturn);
     masm.loadValue(Address(r11, BaselineFrame::reverseOffsetOfReturnValue()),
                    JSReturnOperand);
     masm.mov(r11, sp);
     masm.pop(r11);
-
-    // Before returning, if profiling is turned on, make sure that lastProfilingFrame
-    // is set to the correct caller frame.
-    {
-        Label skipProfilingInstrumentation;
-        AbsoluteAddress addressOfEnabled(cx->runtime()->spsProfiler.addressOfEnabled());
-        masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0), &skipProfilingInstrumentation);
-        masm.profilerExitFrame();
-        masm.bind(&skipProfilingInstrumentation);
-    }
-
     masm.ret();
 
     Linker linker(masm);
     AutoFlushICache afc("DebugTrapHandler");
     JitCode *codeDbg = linker.newCode<NoGC>(cx, OTHER_CODE);
 
 #ifdef JS_ION_PERF
     writePerfSpewerJitCodeProfile(codeDbg, "DebugTrapHandler");
@@ -1016,299 +992,8 @@ JitRuntime::generateBailoutTailStub(JSCo
     JitCode *code = linker.newCode<NoGC>(cx, OTHER_CODE);
 
 #ifdef JS_ION_PERF
     writePerfSpewerJitCodeProfile(code, "BailoutTailStub");
 #endif
 
     return code;
 }
-
-JitCode *
-JitRuntime::generateProfilerExitFrameTailStub(JSContext *cx)
-{
-    MacroAssembler masm;
-
-    Register scratch1 = r5;
-    Register scratch2 = r6;
-    Register scratch3 = r7;
-    Register scratch4 = r8;
-
-    //
-    // The code generated below expects that the current stack pointer points
-    // to an Ion or Baseline frame, at the state it would be immediately
-    // before a ret().  Thus, after this stub's business is done, it executes
-    // a ret() and returns directly to the caller script, on behalf of the
-    // callee script that jumped to this code.
-    //
-    // Thus the expected stack is:
-    //
-    //                                   StackPointer ----+
-    //                                                    v
-    // ..., ActualArgc, CalleeToken, Descriptor, ReturnAddr
-    // MEM-HI                                       MEM-LOW
-    //
-    //
-    // The generated jitcode is responsible for overwriting the
-    // jitActivation->lastProfilingFrame field with a pointer to the previous
-    // Ion or Baseline jit-frame that was pushed before this one. It is also
-    // responsible for overwriting jitActivation->lastProfilingCallSite with
-    // the return address into that frame.  The frame could either be an
-    // immediate "caller" frame, or it could be a frame in a previous
-    // JitActivation (if the current frame was entered from C++, and the C++
-    // was entered by some caller jit-frame further down the stack).
-    //
-    // So this jitcode is responsible for "walking up" the jit stack, finding
-    // the previous Ion or Baseline JS frame, and storing its address and the
-    // return address into the appropriate fields on the current jitActivation.
-    //
-    // There are a fixed number of different path types that can lead to the
-    // current frame, which is either a baseline or ion frame:
-    //
-    // <Baseline-Or-Ion>
-    // ^
-    // |
-    // ^--- Ion
-    // |
-    // ^--- Baseline Stub <---- Baseline
-    // |
-    // ^--- Argument Rectifier
-    // |    ^
-    // |    |
-    // |    ^--- Ion
-    // |    |
-    // |    ^--- Baseline Stub <---- Baseline
-    // |
-    // ^--- Entry Frame (From C++)
-    //
-    Register actReg = scratch4;
-    AbsoluteAddress activationAddr(GetJitContext()->runtime->addressOfProfilingActivation());
-    masm.loadPtr(activationAddr, actReg);
-
-    Address lastProfilingFrame(actReg, JitActivation::offsetOfLastProfilingFrame());
-    Address lastProfilingCallSite(actReg, JitActivation::offsetOfLastProfilingCallSite());
-
-#ifdef DEBUG
-    // Ensure that frame we are exiting is current lastProfilingFrame
-    {
-        masm.loadPtr(lastProfilingFrame, scratch1);
-        Label checkOk;
-        masm.branchPtr(Assembler::Equal, scratch1, ImmWord(0), &checkOk);
-        masm.branchPtr(Assembler::Equal, StackPointer, scratch1, &checkOk);
-        masm.assumeUnreachable(
-            "Mismatch between stored lastProfilingFrame and current stack pointer.");
-        masm.bind(&checkOk);
-    }
-#endif
-
-    // Load the frame descriptor into |scratch1|, figure out what to do depending on its type.
-    masm.loadPtr(Address(StackPointer, JitFrameLayout::offsetOfDescriptor()), scratch1);
-
-    // Going into the conditionals, we will have:
-    //      FrameDescriptor.size in scratch1
-    //      FrameDescriptor.type in scratch2
-    masm.ma_and(Imm32((1 << FRAMESIZE_SHIFT) - 1), scratch1, scratch2);
-    masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), scratch1);
-
-    // Handling of each case is dependent on FrameDescriptor.type
-    Label handle_IonJS;
-    Label handle_BaselineStub;
-    Label handle_Rectifier;
-    Label handle_Entry;
-    Label end;
-
-    masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_IonJS), &handle_IonJS);
-    masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_BaselineJS), &handle_IonJS);
-    masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_BaselineStub), &handle_BaselineStub);
-    masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_Rectifier), &handle_Rectifier);
-    masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_Entry), &handle_Entry);
-
-    masm.assumeUnreachable("Invalid caller frame type when exiting from Ion frame.");
-
-    //
-    // JitFrame_IonJS
-    //
-    // Stack layout:
-    //                  ...
-    //                  Ion-Descriptor
-    //     Prev-FP ---> Ion-ReturnAddr
-    //                  ... previous frame data ... |- Descriptor.Size
-    //                  ... arguments ...           |
-    //                  ActualArgc          |
-    //                  CalleeToken         |- JitFrameLayout::Size()
-    //                  Descriptor          |
-    //        FP -----> ReturnAddr          |
-    //
-    masm.bind(&handle_IonJS);
-    {
-        // |scratch1| contains Descriptor.size
-
-        // returning directly to an IonJS frame.  Store return addr to frame
-        // in lastProfilingCallSite.
-        masm.loadPtr(Address(StackPointer, JitFrameLayout::offsetOfReturnAddress()), scratch2);
-        masm.storePtr(scratch2, lastProfilingCallSite);
-
-        // Store return frame in lastProfilingFrame.
-        // scratch2 := StackPointer + Descriptor.size*1 + JitFrameLayout::Size();
-        masm.ma_add(StackPointer, scratch1, scratch2);
-        masm.ma_add(scratch2, Imm32(JitFrameLayout::Size()), scratch2);
-        masm.storePtr(scratch2, lastProfilingFrame);
-        masm.ret();
-    }
-
-    //
-    // JitFrame_BaselineStub
-    //
-    // Look past the stub and store the frame pointer to
-    // the baselineJS frame prior to it.
-    //
-    // Stack layout:
-    //              ...
-    //              BL-Descriptor
-    // Prev-FP ---> BL-ReturnAddr
-    //      +-----> BL-PrevFramePointer
-    //      |       ... BL-FrameData ...
-    //      |       BLStub-Descriptor
-    //      |       BLStub-ReturnAddr
-    //      |       BLStub-StubPointer          |
-    //      +------ BLStub-SavedFramePointer    |- Descriptor.Size
-    //              ... arguments ...           |
-    //              ActualArgc          |
-    //              CalleeToken         |- JitFrameLayout::Size()
-    //              Descriptor          |
-    //    FP -----> ReturnAddr          |
-    //
-    // We take advantage of the fact that the stub frame saves the frame
-    // pointer pointing to the baseline frame, so a bunch of calculation can
-    // be avoided.
-    //
-    masm.bind(&handle_BaselineStub);
-    {
-        masm.ma_add(StackPointer, scratch1, scratch3);
-        Address stubFrameReturnAddr(scratch3,
-                                    JitFrameLayout::Size() +
-                                    BaselineStubFrameLayout::offsetOfReturnAddress());
-        masm.loadPtr(stubFrameReturnAddr, scratch2);
-        masm.storePtr(scratch2, lastProfilingCallSite);
-
-        Address stubFrameSavedFramePtr(scratch3,
-                                       JitFrameLayout::Size() - (2 * sizeof(void *)));
-        masm.loadPtr(stubFrameSavedFramePtr, scratch2);
-        masm.addPtr(Imm32(sizeof(void *)), scratch2); // Skip past BL-PrevFramePtr
-        masm.storePtr(scratch2, lastProfilingFrame);
-        masm.ret();
-    }
-
-
-    //
-    // JitFrame_Rectifier
-    //
-    // The rectifier frame can be preceded by either an IonJS or a
-    // BaselineStub frame.
-    //
-    // Stack layout if caller of rectifier was Ion:
-    //
-    //              Ion-Descriptor
-    //              Ion-ReturnAddr
-    //              ... ion frame data ... |- Rect-Descriptor.Size
-    //              < COMMON LAYOUT >
-    //
-    // Stack layout if caller of rectifier was Baseline:
-    //
-    //              BL-Descriptor
-    // Prev-FP ---> BL-ReturnAddr
-    //      +-----> BL-SavedFramePointer
-    //      |       ... baseline frame data ...
-    //      |       BLStub-Descriptor
-    //      |       BLStub-ReturnAddr
-    //      |       BLStub-StubPointer          |
-    //      +------ BLStub-SavedFramePointer    |- Rect-Descriptor.Size
-    //              ... args to rectifier ...   |
-    //              < COMMON LAYOUT >
-    //
-    // Common stack layout:
-    //
-    //              ActualArgc          |
-    //              CalleeToken         |- IonRectitiferFrameLayout::Size()
-    //              Rect-Descriptor     |
-    //              Rect-ReturnAddr     |
-    //              ... rectifier data & args ... |- Descriptor.Size
-    //              ActualArgc      |
-    //              CalleeToken     |- JitFrameLayout::Size()
-    //              Descriptor      |
-    //    FP -----> ReturnAddr      |
-    //
-    masm.bind(&handle_Rectifier);
-    {
-        // scratch2 := StackPointer + Descriptor.size*1 + JitFrameLayout::Size();
-        masm.ma_add(StackPointer, scratch1, scratch2);
-        masm.add32(Imm32(JitFrameLayout::Size()), scratch2);
-        masm.loadPtr(Address(scratch2, RectifierFrameLayout::offsetOfDescriptor()), scratch3);
-        masm.ma_lsr(Imm32(FRAMESIZE_SHIFT), scratch3, scratch1);
-        masm.and32(Imm32((1 << FRAMETYPE_BITS) - 1), scratch3);
-
-        // Now |scratch1| contains Rect-Descriptor.Size
-        // and |scratch2| points to Rectifier frame
-        // and |scratch3| contains Rect-Descriptor.Type
-
-        // Check for either Ion or BaselineStub frame.
-        Label handle_Rectifier_BaselineStub;
-        masm.branch32(Assembler::NotEqual, scratch3, Imm32(JitFrame_IonJS),
-                      &handle_Rectifier_BaselineStub);
-
-        // Handle Rectifier <- IonJS
-        // scratch3 := RectFrame[ReturnAddr]
-        masm.loadPtr(Address(scratch2, RectifierFrameLayout::offsetOfReturnAddress()), scratch3);
-        masm.storePtr(scratch3, lastProfilingCallSite);
-
-        // scratch3 := RectFrame + Rect-Descriptor.Size + RectifierFrameLayout::Size()
-        masm.ma_add(scratch2, scratch1, scratch3);
-        masm.add32(Imm32(RectifierFrameLayout::Size()), scratch3);
-        masm.storePtr(scratch3, lastProfilingFrame);
-        masm.ret();
-
-        // Handle Rectifier <- BaselineStub <- BaselineJS
-        masm.bind(&handle_Rectifier_BaselineStub);
-#ifdef DEBUG
-        {
-            Label checkOk;
-            masm.branch32(Assembler::Equal, scratch3, Imm32(JitFrame_BaselineStub), &checkOk);
-            masm.assumeUnreachable("Unrecognized frame preceding baselineStub.");
-            masm.bind(&checkOk);
-        }
-#endif
-        masm.ma_add(scratch2, scratch1, scratch3);
-        Address stubFrameReturnAddr(scratch3, RectifierFrameLayout::Size() +
-                                              BaselineStubFrameLayout::offsetOfReturnAddress());
-        masm.loadPtr(stubFrameReturnAddr, scratch2);
-        masm.storePtr(scratch2, lastProfilingCallSite);
-
-        Address stubFrameSavedFramePtr(scratch3,
-                                       RectifierFrameLayout::Size() - (2 * sizeof(void *)));
-        masm.loadPtr(stubFrameSavedFramePtr, scratch2);
-        masm.addPtr(Imm32(sizeof(void *)), scratch2);
-        masm.storePtr(scratch2, lastProfilingFrame);
-        masm.ret();
-    }
-
-    //
-    // JitFrame_Entry
-    //
-    // If at an entry frame, store null into both fields.
-    //
-    masm.bind(&handle_Entry);
-    {
-        masm.movePtr(ImmPtr(nullptr), scratch1);
-        masm.storePtr(scratch1, lastProfilingCallSite);
-        masm.storePtr(scratch1, lastProfilingFrame);
-        masm.ret();
-    }
-
-    Linker linker(masm);
-    AutoFlushICache afc("ProfilerExitFrameTailStub");
-    JitCode *code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "ProfilerExitFrameStub");
-#endif
-
-    return code;
-}
--- a/js/src/jit/mips/CodeGenerator-mips.cpp
+++ b/js/src/jit/mips/CodeGenerator-mips.cpp
@@ -40,20 +40,16 @@ CodeGeneratorMIPS::CodeGeneratorMIPS(MIR
 }
 
 bool
 CodeGeneratorMIPS::generatePrologue()
 {
     MOZ_ASSERT(masm.framePushed() == 0);
     MOZ_ASSERT(!gen->compilingAsmJS());
 
-    // If profiling, save the current frame pointer to a per-thread global field.
-    if (isProfilerInstrumentationEnabled())
-        masm.profilerEnterFrame(StackPointer, CallTempReg0);
-
     // Note that this automatically sets MacroAssembler::framePushed().
     masm.reserveStack(frameSize());
     masm.checkStackAlignment();
 
     emitTracelogIonStart();
 
     return true;
 }
@@ -63,22 +59,16 @@ CodeGeneratorMIPS::generateEpilogue()
 {
     MOZ_ASSERT(!gen->compilingAsmJS());
     masm.bind(&returnLabel_);
 
     emitTracelogIonStop();
 
     masm.freeStack(frameSize());
     MOZ_ASSERT(masm.framePushed() == 0);
-
-    // If profiling, reset the per-thread global lastJitFrame to point to
-    // the previous frame.
-    if (isProfilerInstrumentationEnabled())
-        masm.profilerExitFrame();
-
     masm.ret();
     return true;
 }
 
 void
 CodeGeneratorMIPS::branchToBlock(Assembler::FloatFormat fmt, FloatRegister lhs, FloatRegister rhs,
                                  MBasicBlock *mir, Assembler::DoubleCondition cond)
 {
--- a/js/src/jit/mips/MacroAssembler-mips.cpp
+++ b/js/src/jit/mips/MacroAssembler-mips.cpp
@@ -3599,28 +3599,16 @@ MacroAssemblerMIPSCompat::handleFailureW
     // caller.
     bind(&return_);
     ma_lw(BaselineFrameReg, Address(StackPointer, offsetof(ResumeFromException, framePointer)));
     ma_lw(StackPointer, Address(StackPointer, offsetof(ResumeFromException, stackPointer)));
     loadValue(Address(BaselineFrameReg, BaselineFrame::reverseOffsetOfReturnValue()),
               JSReturnOperand);
     ma_move(StackPointer, BaselineFrameReg);
     pop(BaselineFrameReg);
-
-    // If profiling is enabled, then update the lastProfilingFrame to refer to caller
-    // frame before returning.
-    {
-        Label skipProfilingInstrumentation;
-        // Test if profiler enabled.
-        AbsoluteAddress addressOfEnabled(GetJitContext()->runtime->spsProfiler().addressOfEnabled());
-        branch32(Assembler::Equal, addressOfEnabled, Imm32(0), &skipProfilingInstrumentation);
-        profilerExitFrame();
-        bind(&skipProfilingInstrumentation);
-    }
-
     ret();
 
     // If we are bailing out to baseline to handle an exception, jump to
     // the bailout tail stub.
     bind(&bailout);
     ma_lw(a2, Address(sp, offsetof(ResumeFromException, bailoutInfo)));
     ma_li(ReturnReg, Imm32(BAILOUT_RETURN_OK));
     ma_lw(a1, Address(sp, offsetof(ResumeFromException, target)));
@@ -3676,23 +3664,8 @@ MacroAssemblerMIPSCompat::branchValueIsN
 
     Label done;
 
     branchTestObject(Assembler::NotEqual, value, cond == Assembler::Equal ? &done : label);
     branchPtrInNurseryRange(cond, value.payloadReg(), temp, label);
 
     bind(&done);
 }
-
-void
-MacroAssemblerMIPSCompat::profilerEnterFrame(Register reg)
-{
-    AbsoluteAddress activation(GetJitContext()->runtime->addressOfProfilingActivation());
-    loadPtr(activation, scratch);
-    storePtr(framePtr, Address(scratch, JitActivation::offsetOfLastProfilingFrame()));
-    storePtr(ImmPtr(nullptr), Address(scratch, JitActivation::offsetOfLastProfilingCallSite()));
-}
-
-void
-MacroAssemblerMIPSCompat::profilerExitFrame()
-{
-    branch(GetJitContext()->runtime->jitRuntime()->getProfilerExitFrameTail());
-}
--- a/js/src/jit/mips/MacroAssembler-mips.h
+++ b/js/src/jit/mips/MacroAssembler-mips.h
@@ -1465,20 +1465,16 @@ public:
 
     void loadAsmJSActivation(Register dest) {
         loadPtr(Address(GlobalReg, AsmJSActivationGlobalDataOffset - AsmJSGlobalRegBias), dest);
     }
     void loadAsmJSHeapRegisterFromGlobalData() {
         MOZ_ASSERT(Imm16::IsInSignedRange(AsmJSHeapGlobalDataOffset - AsmJSGlobalRegBias));
         loadPtr(Address(GlobalReg, AsmJSHeapGlobalDataOffset - AsmJSGlobalRegBias), HeapReg);
     }
-
-    // Instrumentation for entering and leaving the profiler.
-    void profilerEnterFrame(Register framePtr, Register scratch);
-    void profilerExitFrame();
 };
 
 typedef MacroAssemblerMIPSCompat MacroAssemblerSpecific;
 
 } // namespace jit
 } // namespace js
 
 #endif /* jit_mips_MacroAssembler_mips_h */
--- a/js/src/jit/mips/Trampoline-mips.cpp
+++ b/js/src/jit/mips/Trampoline-mips.cpp
@@ -204,23 +204,20 @@ JitRuntime::generateEnterJIT(JSContext *
         Label notOsr;
         masm.ma_b(OsrFrameReg, OsrFrameReg, &notOsr, Assembler::Zero, ShortJump);
 
         Register scratch = regs.takeAny();
 
         Register numStackValues = regs.takeAny();
         masm.load32(slotNumStackValues, numStackValues);
 
-        // Push return address.
-        masm.subPtr(Imm32(sizeof(uintptr_t)), StackPointer);
+        // Push return address, previous frame pointer.
+        masm.subPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
         masm.ma_li(scratch, returnLabel.dest());
-        masm.storePtr(scratch, Address(StackPointer, 0));
-
-        // Push previous frame pointer.
-        masm.subPtr(Imm32(sizeof(uintptr_t)), StackPointer);
+        masm.storePtr(scratch, Address(StackPointer, sizeof(uintptr_t)));
         masm.storePtr(BaselineFrameReg, Address(StackPointer, 0));
 
         // Reserve frame.
         Register framePtr = BaselineFrameReg;
         masm.subPtr(Imm32(BaselineFrame::Size()), StackPointer);
         masm.movePtr(StackPointer, framePtr);
 
         // Reserve space for locals and stack values.
@@ -259,29 +256,16 @@ JitRuntime::generateEnterJIT(JSContext *
         masm.loadPtr(Address(StackPointer, sizeof(uintptr_t)), framePtr);
         masm.freeStack(2 * sizeof(uintptr_t));
 
         Label error;
         masm.freeStack(ExitFrameLayout::SizeWithFooter());
         masm.addPtr(Imm32(BaselineFrame::Size()), framePtr);
         masm.branchIfFalseBool(ReturnReg, &error);
 
-        // If OSR-ing, then emit instrumentation for setting lastProfilerFrame
-        // if profiler instrumentation is enabled.
-        {
-            Label skipProfilingInstrumentation;
-            Register realFramePtr = numStackValues;
-            AbsoluteAddress addressOfEnabled(cx->runtime()->spsProfiler.addressOfEnabled());
-            masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0),
-                          &skipProfilingInstrumentation);
-            masm.ma_add(realFramePtr, StackPointer, Imm32(sizeof(void*)));
-            masm.profilerEnterFrame(realFramePtr, scratch);
-            masm.bind(&skipProfilingInstrumentation);
-        }
-
         masm.jump(jitcode);
 
         // OOM: load error value, discard return address and previous frame
         // pointer and return.
         masm.bind(&error);
         masm.movePtr(framePtr, StackPointer);
         masm.addPtr(Imm32(2 * sizeof(uintptr_t)), StackPointer);
         masm.moveValue(MagicValue(JS_ION_ERROR), JSReturnOperand);
@@ -963,27 +947,16 @@ JitRuntime::generateDebugTrapHandler(JSC
     // ra was restored by EmitLeaveStubFrame
     masm.branch(ra);
 
     masm.bind(&forcedReturn);
     masm.loadValue(Address(s5, BaselineFrame::reverseOffsetOfReturnValue()),
                    JSReturnOperand);
     masm.movePtr(s5, StackPointer);
     masm.pop(s5);
-
-    // Before returning, if profiling is turned on, make sure that lastProfilingFrame
-    // is set to the correct caller frame.
-    {
-        Label skipProfilingInstrumentation;
-        AbsoluteAddress addressOfEnabled(cx->runtime()->spsProfiler.addressOfEnabled());
-        masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0), &skipProfilingInstrumentation);
-        masm.profilerExitFrame();
-        masm.bind(&skipProfilingInstrumentation);
-    }
-
     masm.ret();
 
     Linker linker(masm);
     AutoFlushICache afc("DebugTrapHandler");
     JitCode *codeDbg = linker.newCode<NoGC>(cx, OTHER_CODE);
 
 #ifdef JS_ION_PERF
     writePerfSpewerJitCodeProfile(codeDbg, "DebugTrapHandler");
@@ -1024,298 +997,8 @@ JitRuntime::generateBailoutTailStub(JSCo
 
 #ifdef JS_ION_PERF
     writePerfSpewerJitCodeProfile(code, "BailoutTailStub");
 #endif
 
     return code;
 }
 
-JitCode *
-JitRuntime::generateProfilerExitFrameTailStub(JSContext *cx)
-{
-    MacroAssembler masm;
-
-    Register scratch1 = t0;
-    Register scratch2 = t1;
-    Register scratch3 = t2;
-    Register scratch3 = t3;
-
-    //
-    // The code generated below expects that the current stack pointer points
-    // to an Ion or Baseline frame, at the state it would be immediately
-    // before a ret().  Thus, after this stub's business is done, it executes
-    // a ret() and returns directly to the caller script, on behalf of the
-    // callee script that jumped to this code.
-    //
-    // Thus the expected stack is:
-    //
-    //                                   StackPointer ----+
-    //                                                    v
-    // ..., ActualArgc, CalleeToken, Descriptor, ReturnAddr
-    // MEM-HI                                       MEM-LOW
-    //
-    //
-    // The generated jitcode is responsible for overwriting the
-    // jitActivation->lastProfilingFrame field with a pointer to the previous
-    // Ion or Baseline jit-frame that was pushed before this one. It is also
-    // responsible for overwriting jitActivation->lastProfilingCallSite with
-    // the return address into that frame.  The frame could either be an
-    // immediate "caller" frame, or it could be a frame in a previous
-    // JitActivation (if the current frame was entered from C++, and the C++
-    // was entered by some caller jit-frame further down the stack).
-    //
-    // So this jitcode is responsible for "walking up" the jit stack, finding
-    // the previous Ion or Baseline JS frame, and storing its address and the
-    // return address into the appropriate fields on the current jitActivation.
-    //
-    // There are a fixed number of different path types that can lead to the
-    // current frame, which is either a baseline or ion frame:
-    //
-    // <Baseline-Or-Ion>
-    // ^
-    // |
-    // ^--- Ion
-    // |
-    // ^--- Baseline Stub <---- Baseline
-    // |
-    // ^--- Argument Rectifier
-    // |    ^
-    // |    |
-    // |    ^--- Ion
-    // |    |
-    // |    ^--- Baseline Stub <---- Baseline
-    // |
-    // ^--- Entry Frame (From C++)
-    //
-    Register actReg = scratch4;
-    AbsoluteAddress activationAddr(GetJitContext()->runtime->addressOfProfilingActivation());
-    masm.loadPtr(activationAddr, actReg);
-
-    Address lastProfilingFrame(actReg, JitActivation::offsetOfLastProfilingFrame());
-    Address lastProfilingCallSite(actReg, JitActivation::offsetOfLastProfilingCallSite());
-
-#ifdef DEBUG
-    // Ensure that frame we are exiting is current lastProfilingFrame
-    {
-        masm.loadPtr(lastProfilingFrame, scratch1);
-        Label checkOk;
-        masm.branchPtr(Assembler::Equal, scratch1, ImmWord(0), &checkOk);
-        masm.branchPtr(Assembler::Equal, StackPointer, scratch1, &checkOk);
-        masm.assumeUnreachable(
-            "Mismatch between stored lastProfilingFrame and current stack pointer.");
-        masm.bind(&checkOk);
-    }
-#endif
-
-    // Load the frame descriptor into |scratch1|, figure out what to do depending on its type.
-    masm.loadPtr(Address(StackPointer, JitFrameLayout::offsetOfDescriptor()), scratch1);
-
-    // Going into the conditionals, we will have:
-    //      FrameDescriptor.size in scratch1
-    //      FrameDescriptor.type in scratch2
-    masm.ma_and(scratch2, scratch1, Imm32((1 << FRAMESIZE_SHIFT) - 1));
-    masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), scratch1);
-
-    // Handling of each case is dependent on FrameDescriptor.type
-    Label handle_IonJS;
-    Label handle_BaselineStub;
-    Label handle_Rectifier;
-    Label handle_Entry;
-    Label end;
-
-    masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_IonJS), &handle_IonJS);
-    masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_BaselineJS), &handle_IonJS);
-    masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_BaselineStub), &handle_BaselineStub);
-    masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_Rectifier), &handle_Rectifier);
-    masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_Entry), &handle_Entry);
-
-    masm.assumeUnreachable("Invalid caller frame type when exiting from Ion frame.");
-
-    //
-    // JitFrame_IonJS
-    //
-    // Stack layout:
-    //                  ...
-    //                  Ion-Descriptor
-    //     Prev-FP ---> Ion-ReturnAddr
-    //                  ... previous frame data ... |- Descriptor.Size
-    //                  ... arguments ...           |
-    //                  ActualArgc          |
-    //                  CalleeToken         |- JitFrameLayout::Size()
-    //                  Descriptor          |
-    //        FP -----> ReturnAddr          |
-    //
-    masm.bind(&handle_IonJS);
-    {
-        // |scratch1| contains Descriptor.size
-
-        // returning directly to an IonJS frame.  Store return addr to frame
-        // in lastProfilingCallSite.
-        masm.loadPtr(Address(StackPointer, JitFrameLayout::offsetOfReturnAddress()), scratch2);
-        masm.storePtr(scratch2, lastProfilingCallSite);
-
-        // Store return frame in lastProfilingFrame.
-        // scratch2 := StackPointer + Descriptor.size*1 + JitFrameLayout::Size();
-        masm.ma_add(scratch2, StackPointer, scratch1);
-        masm.ma_add(scratch2, scratch2, Imm32(JitFrameLayout::Size()));
-        masm.storePtr(scratch2, lastProfilingFrame);
-        masm.ret();
-    }
-
-    //
-    // JitFrame_BaselineStub
-    //
-    // Look past the stub and store the frame pointer to
-    // the baselineJS frame prior to it.
-    //
-    // Stack layout:
-    //              ...
-    //              BL-Descriptor
-    // Prev-FP ---> BL-ReturnAddr
-    //      +-----> BL-PrevFramePointer
-    //      |       ... BL-FrameData ...
-    //      |       BLStub-Descriptor
-    //      |       BLStub-ReturnAddr
-    //      |       BLStub-StubPointer          |
-    //      +------ BLStub-SavedFramePointer    |- Descriptor.Size
-    //              ... arguments ...           |
-    //              ActualArgc          |
-    //              CalleeToken         |- JitFrameLayout::Size()
-    //              Descriptor          |
-    //    FP -----> ReturnAddr          |
-    //
-    // We take advantage of the fact that the stub frame saves the frame
-    // pointer pointing to the baseline frame, so a bunch of calculation can
-    // be avoided.
-    //
-    masm.bind(&handle_BaselineStub);
-    {
-        masm.ma_add(scratch3, StackPointer, scratch1);
-        Address stubFrameReturnAddr(scratch3,
-                                    JitFrameLayout::Size() +
-                                    BaselineStubFrameLayout::offsetOfReturnAddress());
-        masm.loadPtr(stubFrameReturnAddr, scratch2);
-        masm.storePtr(scratch2, lastProfilingCallSite);
-
-        Address stubFrameSavedFramePtr(scratch3,
-                                       JitFrameLayout::Size() - (2 * sizeof(void *)));
-        masm.loadPtr(stubFrameSavedFramePtr, scratch2);
-        masm.addPtr(Imm32(sizeof(void *)), scratch2); // Skip past BL-PrevFramePtr
-        masm.storePtr(scratch2, lastProfilingFrame);
-        masm.ret();
-    }
-
-
-    //
-    // JitFrame_Rectifier
-    //
-    // The rectifier frame can be preceded by either an IonJS or a
-    // BaselineStub frame.
-    //
-    // Stack layout if caller of rectifier was Ion:
-    //
-    //              Ion-Descriptor
-    //              Ion-ReturnAddr
-    //              ... ion frame data ... |- Rect-Descriptor.Size
-    //              < COMMON LAYOUT >
-    //
-    // Stack layout if caller of rectifier was Baseline:
-    //
-    //              BL-Descriptor
-    // Prev-FP ---> BL-ReturnAddr
-    //      +-----> BL-SavedFramePointer
-    //      |       ... baseline frame data ...
-    //      |       BLStub-Descriptor
-    //      |       BLStub-ReturnAddr
-    //      |       BLStub-StubPointer          |
-    //      +------ BLStub-SavedFramePointer    |- Rect-Descriptor.Size
-    //              ... args to rectifier ...   |
-    //              < COMMON LAYOUT >
-    //
-    // Common stack layout:
-    //
-    //              ActualArgc          |
-    //              CalleeToken         |- IonRectitiferFrameLayout::Size()
-    //              Rect-Descriptor     |
-    //              Rect-ReturnAddr     |
-    //              ... rectifier data & args ... |- Descriptor.Size
-    //              ActualArgc      |
-    //              CalleeToken     |- JitFrameLayout::Size()
-    //              Descriptor      |
-    //    FP -----> ReturnAddr      |
-    //
-    masm.bind(&handle_Rectifier);
-    {
-        // scratch2 := StackPointer + Descriptor.size*1 + JitFrameLayout::Size();
-        masm.ma_add(scratch2, StackPointer, scratch1);
-        masm.add32(Imm32(JitFrameLayout::Size()), scratch2);
-        masm.loadPtr(Address(scratch2, RectifierFrameLayout::offsetOfDescriptor()), scratch3);
-        masm.ma_lsr(scratch1, scratch3, Imm32(FRAMESIZE_SHIFT));
-        masm.and32(Imm32((1 << FRAMETYPE_BITS) - 1), scratch3);
-
-        // Now |scratch1| contains Rect-Descriptor.Size
-        // and |scratch2| points to Rectifier frame
-        // and |scratch3| contains Rect-Descriptor.Type
-
-        // Check for either Ion or BaselineStub frame.
-        Label handle_Rectifier_BaselineStub;
-        masm.branch32(Assembler::NotEqual, scratch3, Imm32(JitFrame_IonJS),
-                      &handle_Rectifier_BaselineStub);
-
-        // Handle Rectifier <- IonJS
-        // scratch3 := RectFrame[ReturnAddr]
-        masm.loadPtr(Address(scratch2, RectifierFrameLayout::offsetOfReturnAddress()), scratch3);
-        masm.storePtr(scratch3, lastProfilingCallSite);
-
-        // scratch3 := RectFrame + Rect-Descriptor.Size + RectifierFrameLayout::Size()
-        masm.ma_add(scratch3, scratch2, scratch1);
-        masm.add32(Imm32(RectifierFrameLayout::Size()), scratch3);
-        masm.storePtr(scratch3, lastProfilingFrame);
-        masm.ret();
-
-        // Handle Rectifier <- BaselineStub <- BaselineJS
-        masm.bind(&handle_Rectifier_BaselineStub);
-#ifdef DEBUG
-        {
-            Label checkOk;
-            masm.branch32(Assembler::Equal, scratch3, Imm32(JitFrame_BaselineStub), &checkOk);
-            masm.assumeUnreachable("Unrecognized frame preceding baselineStub.");
-            masm.bind(&checkOk);
-        }
-#endif
-        masm.ma_add(scratch3, scratch2, scratch1);
-        Address stubFrameReturnAddr(scratch3, RectifierFrameLayout::Size() +
-                                              BaselineStubFrameLayout::offsetOfReturnAddress());
-        masm.loadPtr(stubFrameReturnAddr, scratch2);
-        masm.storePtr(scratch2, lastProfilingCallSite);
-
-        Address stubFrameSavedFramePtr(scratch3,
-                                       RectifierFrameLayout::Size() - (2 * sizeof(void *)));
-        masm.loadPtr(stubFrameSavedFramePtr, scratch2);
-        masm.addPtr(Imm32(sizeof(void *)), scratch2);
-        masm.storePtr(scratch2, lastProfilingFrame);
-        masm.ret();
-    }
-
-    //
-    // JitFrame_Entry
-    //
-    // If at an entry frame, store null into both fields.
-    //
-    masm.bind(&handle_Entry);
-    {
-        masm.movePtr(ImmPtr(nullptr), scratch1);
-        masm.storePtr(scratch1, lastProfilingCallSite);
-        masm.storePtr(scratch1, lastProfilingFrame);
-        masm.ret();
-    }
-
-    Linker linker(masm);
-    AutoFlushICache afc("ProfilerExitFrameTailStub");
-    JitCode *code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "ProfilerExitFrameStub");
-#endif
-
-    return code;
-}
--- a/js/src/jit/shared/Assembler-shared.h
+++ b/js/src/jit/shared/Assembler-shared.h
@@ -892,17 +892,16 @@ struct AsmJSAbsoluteLink
 class AssemblerShared
 {
     Vector<CallSite, 0, SystemAllocPolicy> callsites_;
     Vector<AsmJSHeapAccess, 0, SystemAllocPolicy> asmJSHeapAccesses_;
     Vector<AsmJSGlobalAccess, 0, SystemAllocPolicy> asmJSGlobalAccesses_;
     Vector<AsmJSAbsoluteLink, 0, SystemAllocPolicy> asmJSAbsoluteLinks_;
 
   protected:
-    Vector<CodeOffsetLabel, 0, SystemAllocPolicy> profilerCallSites_;
     bool enoughMemory_;
     bool embedsNurseryPointers_;
 
   public:
     AssemblerShared()
      : enoughMemory_(true),
        embedsNurseryPointers_(false)
     {}
@@ -914,20 +913,16 @@ class AssemblerShared
     void setOOM() {
         enoughMemory_ = false;
     }
 
     bool oom() const {
         return !enoughMemory_;
     }
 
-    void appendProfilerCallSite(CodeOffsetLabel label) {
-        enoughMemory_ &= profilerCallSites_.append(label);
-    }
-
     bool embedsNurseryPointers() const {
         return embedsNurseryPointers_;
     }
 
     ImmGCPtr noteMaybeNurseryPtr(ImmMaybeNurseryPtr ptr) {
         if (ptr.value && gc::IsInsideNursery(ptr.value)) {
             // FIXME: Ideally we'd assert this in all cases, but PJS needs to
             //        compile IC's from off-main-thread; it will not touch
--- a/js/src/jit/shared/BaselineCompiler-shared.cpp
+++ b/js/src/jit/shared/BaselineCompiler-shared.cpp
@@ -26,18 +26,16 @@ BaselineCompilerShared::BaselineCompiler
     frame(script, masm),
     stubSpace_(),
     icEntries_(),
     pcMappingEntries_(),
     icLoadLabels_(),
     pushedBeforeCall_(0),
     inCall_(false),
     spsPushToggleOffset_(),
-    profilerEnterFrameToggleOffset_(),
-    profilerExitFrameToggleOffset_(),
     traceLoggerEnterToggleOffset_(),
     traceLoggerExitToggleOffset_(),
     traceLoggerScriptTextIdOffset_()
 { }
 
 bool
 BaselineCompilerShared::callVM(const VMFunction &fun, CallVMPhase phase)
 {
--- a/js/src/jit/shared/BaselineCompiler-shared.h
+++ b/js/src/jit/shared/BaselineCompiler-shared.h
@@ -63,18 +63,16 @@ class BaselineCompilerShared
         CodeOffsetLabel label;
     };
     js::Vector<ICLoadLabel, 16, SystemAllocPolicy> icLoadLabels_;
 
     uint32_t pushedBeforeCall_;
     mozilla::DebugOnly<bool> inCall_;
 
     CodeOffsetLabel spsPushToggleOffset_;
-    CodeOffsetLabel profilerEnterFrameToggleOffset_;
-    CodeOffsetLabel profilerExitFrameToggleOffset_;
     CodeOffsetLabel traceLoggerEnterToggleOffset_;
     CodeOffsetLabel traceLoggerExitToggleOffset_;
     CodeOffsetLabel traceLoggerScriptTextIdOffset_;
 
     BaselineCompilerShared(JSContext *cx, TempAllocator &alloc, JSScript *script);
 
     ICEntry *allocateICEntry(ICStub *stub, ICEntry::Kind kind) {
         if (!stub)
--- a/js/src/jit/shared/CodeGenerator-shared.cpp
+++ b/js/src/jit/shared/CodeGenerator-shared.cpp
@@ -54,26 +54,27 @@ CodeGeneratorShared::CodeGeneratorShared
     lastOsiPointOffset_(0),
     safepoints_(graph->totalSlotCount()),
     nativeToBytecodeMap_(nullptr),
     nativeToBytecodeMapSize_(0),
     nativeToBytecodeTableOffset_(0),
     nativeToBytecodeNumRegions_(0),
     nativeToBytecodeScriptList_(nullptr),
     nativeToBytecodeScriptListLength_(0),
+    sps_(&GetJitContext()->runtime->spsProfiler(), &lastNotInlinedPC_),
     osrEntryOffset_(0),
     skipArgCheckEntryOffset_(0),
 #ifdef CHECK_OSIPOINT_REGISTERS
     checkOsiPointRegisters(js_JitOptions.checkOsiPointRegisters),
 #endif
     frameDepth_(graph->paddedLocalSlotsSize() + graph->argumentsSize()),
     frameInitialAdjustment_(0)
 {
-    if (gen->isProfilerInstrumentationEnabled())
-        masm.enableProfilingInstrumentation();
+    if (!gen->compilingAsmJS())
+        masm.setInstrumentation(&sps_);
 
     if (gen->compilingAsmJS()) {
         // Since asm.js uses the system ABI which does not necessarily use a
         // regular array where all slots are sizeof(Value), it maintains the max
         // argument stack depth separately.
         MOZ_ASSERT(graph->argumentSlotCount() == 0);
         frameDepth_ += gen->maxAsmJSStackArgBytes();
 
@@ -101,36 +102,42 @@ CodeGeneratorShared::CodeGeneratorShared
     } else {
         frameClass_ = FrameSizeClass::FromDepth(frameDepth_);
     }
 }
 
 bool
 CodeGeneratorShared::generateOutOfLineCode()
 {
+    JSScript *topScript = sps_.getPushed();
     for (size_t i = 0; i < outOfLineCode_.length(); i++) {
         // Add native => bytecode mapping entries for OOL sites.
         // Not enabled on asm.js yet since asm doesn't contain bytecode mappings.
         if (!gen->compilingAsmJS()) {
             if (!addNativeToBytecodeEntry(outOfLineCode_[i]->bytecodeSite()))
                 return false;
         }
 
         if (!gen->alloc().ensureBallast())
             return false;
 
         JitSpew(JitSpew_Codegen, "# Emitting out of line code");
 
         masm.setFramePushed(outOfLineCode_[i]->framePushed());
         lastPC_ = outOfLineCode_[i]->pc();
+        if (!sps_.prepareForOOL())
+            return false;
+        sps_.setPushed(outOfLineCode_[i]->script());
         outOfLineCode_[i]->bind(&masm);
 
         oolIns = outOfLineCode_[i];
         outOfLineCode_[i]->generate(this);
+        sps_.finishOOL();
     }
+    sps_.setPushed(topScript);
     oolIns = nullptr;
 
     return true;
 }
 
 void
 CodeGeneratorShared::addOutOfLineCode(OutOfLineCode *code, const MInstruction *mir)
 {
@@ -146,17 +153,17 @@ CodeGeneratorShared::addOutOfLineCode(Ou
     MOZ_ASSERT_IF(!gen->compilingAsmJS(), code->script()->containsPC(code->pc()));
     masm.propagateOOM(outOfLineCode_.append(code));
 }
 
 bool
 CodeGeneratorShared::addNativeToBytecodeEntry(const BytecodeSite *site)
 {
     // Skip the table entirely if profiling is not enabled.
-    if (!isProfilerInstrumentationEnabled())
+    if (!isNativeToBytecodeMapEnabled())
         return true;
 
     MOZ_ASSERT(site);
     MOZ_ASSERT(site->tree());
     MOZ_ASSERT(site->pc());
 
     InlineScriptTree *tree = site->tree();
     jsbytecode *pc = site->pc();
--- a/js/src/jit/shared/CodeGenerator-shared.h
+++ b/js/src/jit/shared/CodeGenerator-shared.h
@@ -108,18 +108,23 @@ class CodeGeneratorShared : public LElem
     uint8_t *nativeToBytecodeMap_;
     uint32_t nativeToBytecodeMapSize_;
     uint32_t nativeToBytecodeTableOffset_;
     uint32_t nativeToBytecodeNumRegions_;
 
     JSScript **nativeToBytecodeScriptList_;
     uint32_t nativeToBytecodeScriptListLength_;
 
-    bool isProfilerInstrumentationEnabled() {
-        return gen->isProfilerInstrumentationEnabled();
+    // When profiling is enabled, this is the instrumentation manager which
+    // maintains state of what script is currently being generated (for inline
+    // scripts) and when instrumentation needs to be emitted or skipped.
+    IonInstrumentation sps_;
+
+    bool isNativeToBytecodeMapEnabled() {
+        return gen->isNativeToBytecodeMapEnabled();
     }
 
   protected:
     // The offset of the first instruction of the OSR entry block from the
     // beginning of the code buffer.
     size_t osrEntryOffset_;
 
     TempAllocator &alloc() const {
--- a/js/src/jit/shared/CodeGenerator-x86-shared.cpp
+++ b/js/src/jit/shared/CodeGenerator-x86-shared.cpp
@@ -39,20 +39,16 @@ CodeGeneratorX86Shared::CodeGeneratorX86
 }
 
 bool
 CodeGeneratorX86Shared::generatePrologue()
 {
     MOZ_ASSERT(masm.framePushed() == 0);
     MOZ_ASSERT(!gen->compilingAsmJS());
 
-    // If profiling, save the current frame pointer to a per-thread global field.
-    if (isProfilerInstrumentationEnabled())
-        masm.profilerEnterFrame(StackPointer, CallTempReg0);
-
     // Note that this automatically sets MacroAssembler::framePushed().
     masm.reserveStack(frameSize());
 
     emitTracelogIonStart();
 
     return true;
 }
 
@@ -64,21 +60,16 @@ CodeGeneratorX86Shared::generateEpilogue
     masm.bind(&returnLabel_);
 
     emitTracelogIonStop();
 
     // Pop the stack we allocated at the start of the function.
     masm.freeStack(frameSize());
     MOZ_ASSERT(masm.framePushed() == 0);
 
-    // If profiling, reset the per-thread global lastJitFrame to point to
-    // the previous frame.
-    if (isProfilerInstrumentationEnabled())
-        masm.profilerExitFrame();
-
     masm.ret();
     return true;
 }
 
 void
 OutOfLineBailout::accept(CodeGeneratorX86Shared *codegen)
 {
     codegen->visitOutOfLineBailout(this);
--- a/js/src/jit/x64/MacroAssembler-x64.cpp
+++ b/js/src/jit/x64/MacroAssembler-x64.cpp
@@ -436,27 +436,16 @@ MacroAssemblerX64::handleFailureWithHand
 
     // Only used in debug mode. Return BaselineFrame->returnValue() to the caller.
     bind(&return_);
     loadPtr(Address(rsp, offsetof(ResumeFromException, framePointer)), rbp);
     loadPtr(Address(rsp, offsetof(ResumeFromException, stackPointer)), rsp);
     loadValue(Address(rbp, BaselineFrame::reverseOffsetOfReturnValue()), JSReturnOperand);
     movq(rbp, rsp);
     pop(rbp);
-
-    // If profiling is enabled, then update the lastProfilingFrame to refer to caller
-    // frame before returning.
-    {
-        Label skipProfilingInstrumentation;
-        AbsoluteAddress addressOfEnabled(GetJitContext()->runtime->spsProfiler().addressOfEnabled());
-        branch32(Assembler::Equal, addressOfEnabled, Imm32(0), &skipProfilingInstrumentation);
-        profilerExitFrame();
-        bind(&skipProfilingInstrumentation);
-    }
-
     ret();
 
     // If we are bailing out to baseline to handle an exception, jump to
     // the bailout tail stub.
     bind(&bailout);
     loadPtr(Address(esp, offsetof(ResumeFromException, bailoutInfo)), r9);
     mov(ImmWord(BAILOUT_RETURN_OK), rax);
     jmp(Operand(rsp, offsetof(ResumeFromException, target)));
@@ -525,23 +514,8 @@ MacroAssemblerX64::branchValueIsNurseryO
     const Nursery &nursery = GetJitContext()->runtime->gcNursery();
     Value start = ObjectValue(*reinterpret_cast<JSObject *>(nursery.start()));
 
     movePtr(ImmWord(-ptrdiff_t(start.asRawBits())), ScratchReg);
     addPtr(value.valueReg(), ScratchReg);
     branchPtr(cond == Assembler::Equal ? Assembler::Below : Assembler::AboveOrEqual,
               ScratchReg, Imm32(nursery.nurserySize()), label);
 }
-
-void
-MacroAssemblerX64::profilerEnterFrame(Register framePtr, Register scratch)
-{
-    AbsoluteAddress activation(GetJitContext()->runtime->addressOfProfilingActivation());
-    loadPtr(activation, scratch);
-    storePtr(framePtr, Address(scratch, JitActivation::offsetOfLastProfilingFrame()));
-    storePtr(ImmPtr(nullptr), Address(scratch, JitActivation::offsetOfLastProfilingCallSite()));
-}
-
-void
-MacroAssemblerX64::profilerExitFrame()
-{
-    jmp(GetJitContext()->runtime->jitRuntime()->getProfilerExitFrameTail());
-}
--- a/js/src/jit/x64/MacroAssembler-x64.h
+++ b/js/src/jit/x64/MacroAssembler-x64.h
@@ -1440,20 +1440,16 @@ class MacroAssemblerX64 : public MacroAs
     }
     void memIntToValue(Address Source, Address Dest) {
         load32(Source, ScratchReg);
         storeValue(JSVAL_TYPE_INT32, ScratchReg, Dest);
     }
 
     void branchPtrInNurseryRange(Condition cond, Register ptr, Register temp, Label *label);
     void branchValueIsNurseryObject(Condition cond, ValueOperand value, Register temp, Label *label);
-
-    // Instrumentation for entering and leaving the profiler.
-    void profilerEnterFrame(Register framePtr, Register scratch);
-    void profilerExitFrame();
 };
 
 typedef MacroAssemblerX64 MacroAssemblerSpecific;
 
 } // namespace jit
 } // namespace js
 
 #endif /* jit_x64_MacroAssembler_x64_h */
--- a/js/src/jit/x64/Trampoline-x64.cpp
+++ b/js/src/jit/x64/Trampoline-x64.cpp
@@ -76,16 +76,19 @@ JitRuntime::generateEnterJIT(JSContext *
     masm.vmovdqa(xmm10, Operand(rsp, 16 * 4));
     masm.vmovdqa(xmm11, Operand(rsp, 16 * 5));
     masm.vmovdqa(xmm12, Operand(rsp, 16 * 6));
     masm.vmovdqa(xmm13, Operand(rsp, 16 * 7));
     masm.vmovdqa(xmm14, Operand(rsp, 16 * 8));
     masm.vmovdqa(xmm15, Operand(rsp, 16 * 9));
 #endif
 
+    // Push the EnterJIT sps mark.
+    masm.spsMarkJit(&cx->runtime()->spsProfiler, rbp, rbx);
+
     // Save arguments passed in registers needed after function call.
     masm.push(result);
 
     // Remember stack depth without padding and arguments.
     masm.mov(rsp, r14);
 
     // Remember number of bytes occupied by argument vector
     masm.mov(reg_argc, r13);
@@ -157,21 +160,19 @@ JitRuntime::generateEnterJIT(JSContext *
         Register scratch = regs.takeAny();
 
         Label notOsr;
         masm.branchTestPtr(Assembler::Zero, OsrFrameReg, OsrFrameReg, &notOsr);
 
         Register numStackValues = regs.takeAny();
         masm.movq(numStackValuesAddr, numStackValues);
 
-        // Push return address
+        // Push return address, previous frame pointer.
         masm.mov(returnLabel.dest(), scratch);
         masm.push(scratch);
-
-        // Push previous frame pointer.
         masm.push(rbp);
 
         // Reserve frame.
         Register framePtr = rbp;
         masm.subPtr(Imm32(BaselineFrame::Size()), rsp);
         masm.mov(rsp, framePtr);
 
 #ifdef XP_WIN
@@ -225,29 +226,16 @@ JitRuntime::generateEnterJIT(JSContext *
 
         MOZ_ASSERT(reg_code != ReturnReg);
 
         Label error;
         masm.addPtr(Imm32(ExitFrameLayout::SizeWithFooter()), rsp);
         masm.addPtr(Imm32(BaselineFrame::Size()), framePtr);
         masm.branchIfFalseBool(ReturnReg, &error);
 
-        // If OSR-ing, then emit instrumentation for setting lastProfilerFrame
-        // if profiler instrumentation is enabled.
-        {
-            Label skipProfilingInstrumentation;
-            Register realFramePtr = numStackValues;
-            AbsoluteAddress addressOfEnabled(cx->runtime()->spsProfiler.addressOfEnabled());
-            masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0),
-                          &skipProfilingInstrumentation);
-            masm.lea(Operand(framePtr, sizeof(void*)), realFramePtr);
-            masm.profilerEnterFrame(realFramePtr, scratch);
-            masm.bind(&skipProfilingInstrumentation);
-        }
-
         masm.jump(reg_code);
 
         // OOM: load error value, discard return address and previous frame
         // pointer and return.
         masm.bind(&error);
         masm.mov(framePtr, rsp);
         masm.addPtr(Imm32(2 * sizeof(uintptr_t)), rsp);
         masm.moveValue(MagicValue(JS_ION_ERROR), JSReturnOperand);
@@ -273,16 +261,19 @@ JitRuntime::generateEnterJIT(JSContext *
     masm.addq(r14, rsp);        // Remove arguments.
 
     /*****************************************************************
     Place return value where it belongs, pop all saved registers
     *****************************************************************/
     masm.pop(r12); // vp
     masm.storeValue(JSReturnOperand, Operand(r12, 0));
 
+    // Unwind the sps mark.
+    masm.spsUnmarkJit(&cx->runtime()->spsProfiler, rbx);
+
     // Restore non-volatile registers.
 #if defined(_WIN64)
     masm.vmovdqa(Operand(rsp, 16 * 0), xmm6);
     masm.vmovdqa(Operand(rsp, 16 * 1), xmm7);
     masm.vmovdqa(Operand(rsp, 16 * 2), xmm8);
     masm.vmovdqa(Operand(rsp, 16 * 3), xmm9);
     masm.vmovdqa(Operand(rsp, 16 * 4), xmm10);
     masm.vmovdqa(Operand(rsp, 16 * 5), xmm11);
@@ -779,27 +770,16 @@ JitRuntime::generateDebugTrapHandler(JSC
     masm.branchTest32(Assembler::NonZero, ReturnReg, ReturnReg, &forcedReturn);
     masm.ret();
 
     masm.bind(&forcedReturn);
     masm.loadValue(Address(ebp, BaselineFrame::reverseOffsetOfReturnValue()),
                    JSReturnOperand);
     masm.mov(rbp, rsp);
     masm.pop(rbp);
-
-    // Before returning, if profiling is turned on, make sure that lastProfilingFrame
-    // is set to the correct caller frame.
-    {
-        Label skipProfilingInstrumentation;
-        AbsoluteAddress addressOfEnabled(cx->runtime()->spsProfiler.addressOfEnabled());
-        masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0), &skipProfilingInstrumentation);
-        masm.profilerExitFrame();
-        masm.bind(&skipProfilingInstrumentation);
-    }
-
     masm.ret();
 
     Linker linker(masm);
     JitCode *codeDbg = linker.newCode<NoGC>(cx, OTHER_CODE);
 
 #ifdef JS_ION_PERF
     writePerfSpewerJitCodeProfile(codeDbg, "DebugTrapHandler");
 #endif
@@ -835,294 +815,8 @@ JitRuntime::generateBailoutTailStub(JSCo
     JitCode *code = linker.newCode<NoGC>(cx, OTHER_CODE);
 
 #ifdef JS_ION_PERF
     writePerfSpewerJitCodeProfile(code, "BailoutTailStub");
 #endif
 
     return code;
 }
-
-JitCode *
-JitRuntime::generateProfilerExitFrameTailStub(JSContext *cx)
-{
-    MacroAssembler masm;
-
-    Register scratch1 = r8;
-    Register scratch2 = r9;
-    Register scratch3 = r10;
-    Register scratch4 = r11;
-
-    //
-    // The code generated below expects that the current stack pointer points
-    // to an Ion or Baseline frame, at the state it would be immediately
-    // before a ret().  Thus, after this stub's business is done, it executes
-    // a ret() and returns directly to the caller script, on behalf of the
-    // callee script that jumped to this code.
-    //
-    // Thus the expected stack is:
-    //
-    //                                   StackPointer ----+
-    //                                                    v
-    // ..., ActualArgc, CalleeToken, Descriptor, ReturnAddr
-    // MEM-HI                                       MEM-LOW
-    //
-    //
-    // The generated jitcode is responsible for overwriting the
-    // jitActivation->lastProfilingFrame field with a pointer to the previous
-    // Ion or Baseline jit-frame that was pushed before this one. It is also
-    // responsible for overwriting jitActivation->lastProfilingCallSite with
-    // the return address into that frame.  The frame could either be an
-    // immediate "caller" frame, or it could be a frame in a previous
-    // JitActivation (if the current frame was entered from C++, and the C++
-    // was entered by some caller jit-frame further down the stack).
-    //
-    // So this jitcode is responsible for "walking up" the jit stack, finding
-    // the previous Ion or Baseline JS frame, and storing its address and the
-    // return address into the appropriate fields on the current jitActivation.
-    //
-    // There are a fixed number of different path types that can lead to the
-    // current frame, which is either a baseline or ion frame:
-    //
-    // <Baseline-Or-Ion>
-    // ^
-    // |
-    // ^--- Ion
-    // |
-    // ^--- Baseline Stub <---- Baseline
-    // |
-    // ^--- Argument Rectifier
-    // |    ^
-    // |    |
-    // |    ^--- Ion
-    // |    |
-    // |    ^--- Baseline Stub <---- Baseline
-    // |
-    // ^--- Entry Frame (From C++)
-    //
-    Register actReg = scratch4;
-    AbsoluteAddress activationAddr(GetJitContext()->runtime->addressOfProfilingActivation());
-    masm.loadPtr(activationAddr, actReg);
-
-    Address lastProfilingFrame(actReg, JitActivation::offsetOfLastProfilingFrame());
-    Address lastProfilingCallSite(actReg, JitActivation::offsetOfLastProfilingCallSite());
-
-#ifdef DEBUG
-    // Ensure that frame we are exiting is current lastProfilingFrame
-    {
-        masm.loadPtr(lastProfilingFrame, scratch1);
-        Label checkOk;
-        masm.branchPtr(Assembler::Equal, scratch1, ImmWord(0), &checkOk);
-        masm.branchPtr(Assembler::Equal, StackPointer, scratch1, &checkOk);
-        masm.assumeUnreachable(
-            "Mismatch between stored lastProfilingFrame and current stack pointer.");
-        masm.bind(&checkOk);
-    }
-#endif
-
-    // Load the frame descriptor into |scratch1|, figure out what to do depending on its type.
-    masm.loadPtr(Address(StackPointer, JitFrameLayout::offsetOfDescriptor()), scratch1);
-
-    // Going into the conditionals, we will have:
-    //      FrameDescriptor.size in scratch1
-    //      FrameDescriptor.type in scratch2
-    masm.movePtr(scratch1, scratch2);
-    masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), scratch1);
-    masm.and32(Imm32((1 << FRAMETYPE_BITS) - 1), scratch2);
-
-    // Handling of each case is dependent on FrameDescriptor.type
-    Label handle_IonJS;
-    Label handle_BaselineStub;
-    Label handle_Rectifier;
-    Label handle_Entry;
-    Label end;
-
-    masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_IonJS), &handle_IonJS);
-    masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_BaselineJS), &handle_IonJS);
-    masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_BaselineStub), &handle_BaselineStub);
-    masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_Rectifier), &handle_Rectifier);
-    masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_Entry), &handle_Entry);
-
-    masm.assumeUnreachable("Invalid caller frame type when exiting from Ion frame.");
-
-    //
-    // JitFrame_IonJS
-    //
-    // Stack layout:
-    //                  ...
-    //                  Ion-Descriptor
-    //     Prev-FP ---> Ion-ReturnAddr
-    //                  ... previous frame data ... |- Descriptor.Size
-    //                  ... arguments ...           |
-    //                  ActualArgc          |
-    //                  CalleeToken         |- JitFrameLayout::Size()
-    //                  Descriptor          |
-    //        FP -----> ReturnAddr          |
-    //
-    masm.bind(&handle_IonJS);
-    {
-        // returning directly to an IonJS frame.  Store return addr to frame
-        // in lastProfilingCallSite.
-        masm.loadPtr(Address(StackPointer, JitFrameLayout::offsetOfReturnAddress()), scratch2);
-        masm.storePtr(scratch2, lastProfilingCallSite);
-
-        // Store return frame in lastProfilingFrame.
-        // scratch2 := StackPointer + Descriptor.size*1 + JitFrameLayout::Size();
-        masm.lea(Operand(StackPointer, scratch1, TimesOne, JitFrameLayout::Size()), scratch2);
-        masm.storePtr(scratch2, lastProfilingFrame);
-        masm.ret();
-    }
-
-    //
-    // JitFrame_BaselineStub
-    //
-    // Look past the stub and store the frame pointer to
-    // the baselineJS frame prior to it.
-    //
-    // Stack layout:
-    //              ...
-    //              BL-Descriptor
-    // Prev-FP ---> BL-ReturnAddr
-    //      +-----> BL-PrevFramePointer
-    //      |       ... BL-FrameData ...
-    //      |       BLStub-Descriptor
-    //      |       BLStub-ReturnAddr
-    //      |       BLStub-StubPointer          |
-    //      +------ BLStub-SavedFramePointer    |- Descriptor.Size
-    //              ... arguments ...           |
-    //              ActualArgc          |
-    //              CalleeToken         |- JitFrameLayout::Size()
-    //              Descriptor          |
-    //    FP -----> ReturnAddr          |
-    //
-    // We take advantage of the fact that the stub frame saves the frame
-    // pointer pointing to the baseline frame, so a bunch of calculation can
-    // be avoided.
-    //
-    masm.bind(&handle_BaselineStub);
-    {
-        BaseIndex stubFrameReturnAddr(StackPointer, scratch1, TimesOne,
-                                      JitFrameLayout::Size() +
-                                      BaselineStubFrameLayout::offsetOfReturnAddress());
-        masm.loadPtr(stubFrameReturnAddr, scratch2);
-        masm.storePtr(scratch2, lastProfilingCallSite);
-
-        BaseIndex stubFrameSavedFramePtr(StackPointer, scratch1, TimesOne,
-                                         JitFrameLayout::Size() - (2 * sizeof(void *)));
-        masm.loadPtr(stubFrameSavedFramePtr, scratch2);
-        masm.addPtr(Imm32(sizeof(void *)), scratch2); // Skip past BL-PrevFramePtr
-        masm.storePtr(scratch2, lastProfilingFrame);
-        masm.ret();
-    }
-
-
-    //
-    // JitFrame_Rectifier
-    //
-    // The rectifier frame can be preceded by either an IonJS or a
-    // BaselineStub frame.
-    //
-    // Stack layout if caller of rectifier was Ion:
-    //
-    //              Ion-Descriptor
-    //              Ion-ReturnAddr
-    //              ... ion frame data ... |- Rect-Descriptor.Size
-    //              < COMMON LAYOUT >
-    //
-    // Stack layout if caller of rectifier was Baseline:
-    //
-    //              BL-Descriptor
-    // Prev-FP ---> BL-ReturnAddr
-    //      +-----> BL-SavedFramePointer
-    //      |       ... baseline frame data ...
-    //      |       BLStub-Descriptor
-    //      |       BLStub-ReturnAddr
-    //      |       BLStub-StubPointer          |
-    //      +------ BLStub-SavedFramePointer    |- Rect-Descriptor.Size
-    //              ... args to rectifier ...   |
-    //              < COMMON LAYOUT >
-    //
-    // Common stack layout:
-    //
-    //              ActualArgc          |
-    //              CalleeToken         |- IonRectitiferFrameLayout::Size()
-    //              Rect-Descriptor     |
-    //              Rect-ReturnAddr     |
-    //              ... rectifier data & args ... |- Descriptor.Size
-    //              ActualArgc      |
-    //              CalleeToken     |- JitFrameLayout::Size()
-    //              Descriptor      |
-    //    FP -----> ReturnAddr      |
-    //
-    masm.bind(&handle_Rectifier);
-    {
-        // scratch2 := StackPointer + Descriptor.size + JitFrameLayout::Size()
-        masm.lea(Operand(StackPointer, scratch1, TimesOne, JitFrameLayout::Size()), scratch2);
-        masm.loadPtr(Address(scratch2, RectifierFrameLayout::offsetOfDescriptor()), scratch3);
-        masm.movePtr(scratch3, scratch1);
-        masm.and32(Imm32((1 << FRAMETYPE_BITS) - 1), scratch3);
-        masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), scratch1);
-
-        // Now |scratch1| contains Rect-Descriptor.Size
-        // and |scratch2| points to Rectifier frame
-        // and |scratch3| contains Rect-Descriptor.Type
-
-        // Check for either Ion or BaselineStub frame.
-        Label handle_Rectifier_BaselineStub;
-        masm.branch32(Assembler::NotEqual, scratch3, Imm32(JitFrame_IonJS),
-                      &handle_Rectifier_BaselineStub);
-
-        // Handle Rectifier <- IonJS
-        // scratch3 := RectFrame[ReturnAddr]
-        masm.loadPtr(Address(scratch2, RectifierFrameLayout::offsetOfReturnAddress()), scratch3);
-        masm.storePtr(scratch3, lastProfilingCallSite);
-
-        // scratch3 := RectFrame + Rect-Descriptor.Size + RectifierFrameLayout::Size()
-        masm.lea(Operand(scratch2, scratch1, TimesOne, RectifierFrameLayout::Size()), scratch3);
-        masm.storePtr(scratch3, lastProfilingFrame);
-        masm.ret();
-
-        // Handle Rectifier <- BaselineStub <- BaselineJS
-        masm.bind(&handle_Rectifier_BaselineStub);
-#ifdef DEBUG
-        {
-            Label checkOk;
-            masm.branch32(Assembler::Equal, scratch3, Imm32(JitFrame_BaselineStub), &checkOk);
-            masm.assumeUnreachable("Unrecognized frame preceding baselineStub.");
-            masm.bind(&checkOk);
-        }
-#endif
-        BaseIndex stubFrameReturnAddr(scratch2, scratch1, TimesOne,
-                                         RectifierFrameLayout::Size() +
-                                         BaselineStubFrameLayout::offsetOfReturnAddress());
-        masm.loadPtr(stubFrameReturnAddr, scratch3);
-        masm.storePtr(scratch3, lastProfilingCallSite);
-
-        BaseIndex stubFrameSavedFramePtr(scratch2, scratch1, TimesOne,
-                                         RectifierFrameLayout::Size() - (2 * sizeof(void *)));
-        masm.loadPtr(stubFrameSavedFramePtr, scratch3);
-        masm.addPtr(Imm32(sizeof(void *)), scratch3);
-        masm.storePtr(scratch3, lastProfilingFrame);
-        masm.ret();
-    }
-
-    //
-    // JitFrame_Entry
-    //
-    // If at an entry frame, store null into both fields.
-    //
-    masm.bind(&handle_Entry);
-    {
-        masm.movePtr(ImmPtr(nullptr), scratch1);
-        masm.storePtr(scratch1, lastProfilingCallSite);
-        masm.storePtr(scratch1, lastProfilingFrame);
-        masm.ret();
-    }
-
-    Linker linker(masm);
-    JitCode *code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "ProfilerExitFrameStub");
-#endif
-
-    return code;
-}
--- a/js/src/jit/x86/MacroAssembler-x86.cpp
+++ b/js/src/jit/x86/MacroAssembler-x86.cpp
@@ -416,28 +416,16 @@ MacroAssemblerX86::handleFailureWithHand
 
     // Only used in debug mode. Return BaselineFrame->returnValue() to the caller.
     bind(&return_);
     loadPtr(Address(esp, offsetof(ResumeFromException, framePointer)), ebp);
     loadPtr(Address(esp, offsetof(ResumeFromException, stackPointer)), esp);
     loadValue(Address(ebp, BaselineFrame::reverseOffsetOfReturnValue()), JSReturnOperand);
     movl(ebp, esp);
     pop(ebp);
-
-    // If profiling is enabled, then update the lastProfilingFrame to refer to caller
-    // frame before returning.
-    {
-        Label skipProfilingInstrumentation;
-        // Test if profiler enabled.
-        AbsoluteAddress addressOfEnabled(GetJitContext()->runtime->spsProfiler().addressOfEnabled());
-        branch32(Assembler::Equal, addressOfEnabled, Imm32(0), &skipProfilingInstrumentation);
-        profilerExitFrame();
-        bind(&skipProfilingInstrumentation);
-    }
-
     ret();
 
     // If we are bailing out to baseline to handle an exception, jump to
     // the bailout tail stub.
     bind(&bailout);
     loadPtr(Address(esp, offsetof(ResumeFromException, bailoutInfo)), ecx);
     movl(Imm32(BAILOUT_RETURN_OK), eax);
     jmp(Operand(esp, offsetof(ResumeFromException, target)));
@@ -521,23 +509,8 @@ MacroAssemblerX86::branchValueIsNurseryO
 
     Label done;
 
     branchTestObject(Assembler::NotEqual, value, cond == Assembler::Equal ? &done : label);
     branchPtrInNurseryRange(cond, value.payloadReg(), temp, label);
 
     bind(&done);
 }
-
-void
-MacroAssemblerX86::profilerEnterFrame(Register framePtr, Register scratch)
-{
-    AbsoluteAddress activation(GetJitContext()->runtime->addressOfProfilingActivation());
-    loadPtr(activation, scratch);
-    storePtr(framePtr, Address(scratch, JitActivation::offsetOfLastProfilingFrame()));
-    storePtr(ImmPtr(nullptr), Address(scratch, JitActivation::offsetOfLastProfilingCallSite()));
-}
-
-void
-MacroAssemblerX86::profilerExitFrame()
-{
-    jmp(GetJitContext()->runtime->jitRuntime()->getProfilerExitFrameTail());
-}
--- a/js/src/jit/x86/MacroAssembler-x86.h
+++ b/js/src/jit/x86/MacroAssembler-x86.h
@@ -1190,20 +1190,16 @@ class MacroAssemblerX86 : public MacroAs
         addPtr(ImmWord(framePushed()), dynStack);
         makeFrameDescriptor(dynStack, JitFrame_IonJS);
         Push(dynStack);
         call(target);
     }
 
     void branchPtrInNurseryRange(Condition cond, Register ptr, Register temp, Label *label);
     void branchValueIsNurseryObject(Condition cond, ValueOperand value, Register temp, Label *label);
-
-    // Instrumentation for entering and leaving the profiler.
-    void profilerEnterFrame(Register framePtr, Register scratch);
-    void profilerExitFrame();
 };
 
 typedef MacroAssemblerX86 MacroAssemblerSpecific;
 
 } // namespace jit
 } // namespace js
 
 #endif /* jit_x86_MacroAssembler_x86_h */
--- a/js/src/jit/x86/Trampoline-x86.cpp
+++ b/js/src/jit/x86/Trampoline-x86.cpp
@@ -55,16 +55,19 @@ JitRuntime::generateEnterJIT(JSContext *
 
     // Save non-volatile registers. These must be saved by the trampoline,
     // rather than the JIT'd code, because they are scanned by the conservative
     // scanner.
     masm.push(ebx);
     masm.push(esi);
     masm.push(edi);
 
+    // Push the EnterJIT sps mark.
+    masm.spsMarkJit(&cx->runtime()->spsProfiler, ebp, ebx);
+
     // Keep track of the stack which has to be unwound after returning from the
     // compiled function.
     masm.movl(esp, esi);
 
     // eax <- 8*argc, eax is now the offset betwen argv and the last
     masm.loadPtr(Address(ebp, ARG_ARGC), eax);
     masm.shll(Imm32(3), eax);
 
@@ -151,21 +154,19 @@ JitRuntime::generateEnterJIT(JSContext *
         masm.branchTestPtr(Assembler::Zero, OsrFrameReg, OsrFrameReg, &notOsr);
 
         Register numStackValues = regs.takeAny();
         masm.loadPtr(Address(ebp, ARG_STACKVALUES), numStackValues);
 
         Register jitcode = regs.takeAny();
         masm.loadPtr(Address(ebp, ARG_JITCODE), jitcode);
 
-        // Push return address.
+        // Push return address, previous frame pointer.
         masm.mov(returnLabel.dest(), scratch);
         masm.push(scratch);
-
-        // Push previous frame pointer.
         masm.push(ebp);
 
         // Reserve frame.
         Register framePtr = ebp;
         masm.subPtr(Imm32(BaselineFrame::Size()), esp);
         masm.mov(esp, framePtr);
 
 #ifdef XP_WIN
@@ -216,29 +217,16 @@ JitRuntime::generateEnterJIT(JSContext *
 
         MOZ_ASSERT(jitcode != ReturnReg);
 
         Label error;
         masm.addPtr(Imm32(ExitFrameLayout::SizeWithFooter()), esp);
         masm.addPtr(Imm32(BaselineFrame::Size()), framePtr);
         masm.branchIfFalseBool(ReturnReg, &error);
 
-        // If OSR-ing, then emit instrumentation for setting lastProfilerFrame
-        // if profiler instrumentation is enabled.
-        {
-            Label skipProfilingInstrumentation;
-            Register realFramePtr = numStackValues;
-            AbsoluteAddress addressOfEnabled(cx->runtime()->spsProfiler.addressOfEnabled());
-            masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0),
-                          &skipProfilingInstrumentation);
-            masm.lea(Operand(framePtr, sizeof(void*)), realFramePtr);
-            masm.profilerEnterFrame(realFramePtr, scratch);
-            masm.bind(&skipProfilingInstrumentation);
-        }
-
         masm.jump(jitcode);
 
         // OOM: load error value, discard return address and previous frame
         // pointer and return.
         masm.bind(&error);
         masm.mov(framePtr, esp);
         masm.addPtr(Imm32(2 * sizeof(uintptr_t)), esp);
         masm.moveValue(MagicValue(JS_ION_ERROR), JSReturnOperand);
@@ -264,28 +252,31 @@ JitRuntime::generateEnterJIT(JSContext *
     // Pop arguments off the stack.
     // eax <- 8*argc (size of all arguments we pushed on the stack)
     masm.pop(eax);
     masm.shrl(Imm32(FRAMESIZE_SHIFT), eax); // Unmark EntryFrame.
     masm.addl(eax, esp);
 
     // |ebp| could have been clobbered by the inner function.
     // Grab the address for the Value result from the argument stack.
-    //  +20 ... arguments ...
-    //  +16 <return>
-    //  +12 ebp <- original %ebp pointing here.
-    //  +8  ebx
-    //  +4  esi
-    //  +0  edi
-    masm.loadPtr(Address(esp, ARG_RESULT + 3 * sizeof(void *)), eax);
+    //  +24 ... arguments ...
+    //  +20 <return>
+    //  +16 ebp <- original %ebp pointing here.
+    //  +12 ebx
+    //  +8  esi
+    //  +4  edi
+    //  +0  hasSPSFrame
+    masm.loadPtr(Address(esp, ARG_RESULT + 4 * sizeof(void *)), eax);
     masm.storeValue(JSReturnOperand, Operand(eax, 0));
 
     /**************************************************************
         Return stack and registers to correct state
     **************************************************************/
+    // Unwind the sps mark.
+    masm.spsUnmarkJit(&cx->runtime()->spsProfiler, ebx);
 
     // Restore non-volatile registers
     masm.pop(edi);
     masm.pop(esi);
     masm.pop(ebx);
 
     // Restore old stack frame pointer
     masm.pop(ebp);
@@ -817,27 +808,16 @@ JitRuntime::generateDebugTrapHandler(JSC
     masm.branchTest32(Assembler::NonZero, ReturnReg, ReturnReg, &forcedReturn);
     masm.ret();
 
     masm.bind(&forcedReturn);
     masm.loadValue(Address(ebp, BaselineFrame::reverseOffsetOfReturnValue()),
                    JSReturnOperand);
     masm.mov(ebp, esp);
     masm.pop(ebp);
-
-    // Before returning, if profiling is turned on, make sure that lastProfilingFrame
-    // is set to the correct caller frame.
-    {
-        Label skipProfilingInstrumentation;
-        AbsoluteAddress addressOfEnabled(cx->runtime()->spsProfiler.addressOfEnabled());
-        masm.branch32(Assembler::Equal, addressOfEnabled, Imm32(0), &skipProfilingInstrumentation);
-        masm.profilerExitFrame();
-        masm.bind(&skipProfilingInstrumentation);
-    }
-
     masm.ret();
 
     Linker linker(masm);
     JitCode *codeDbg = linker.newCode<NoGC>(cx, OTHER_CODE);
 
 #ifdef JS_ION_PERF
     writePerfSpewerJitCodeProfile(codeDbg, "DebugTrapHandler");
 #endif
@@ -873,297 +853,8 @@ JitRuntime::generateBailoutTailStub(JSCo
     JitCode *code = linker.newCode<NoGC>(cx, OTHER_CODE);
 
 #ifdef JS_ION_PERF
     writePerfSpewerJitCodeProfile(code, "BailoutTailStub");
 #endif
 
     return code;
 }
-
-JitCode *
-JitRuntime::generateProfilerExitFrameTailStub(JSContext *cx)
-{
-    MacroAssembler masm;
-
-    Register scratch1 = eax;
-    Register scratch2 = ebx;
-    Register scratch3 = esi;
-    Register scratch4 = edi;
-
-    //
-    // The code generated below expects that the current stack pointer points
-    // to an Ion or Baseline frame, at the state it would be immediately
-    // before a ret().  Thus, after this stub's business is done, it executes
-    // a ret() and returns directly to the caller script, on behalf of the
-    // callee script that jumped to this code.
-    //
-    // Thus the expected stack is:
-    //
-    //                                   StackPointer ----+
-    //                                                    v
-    // ..., ActualArgc, CalleeToken, Descriptor, ReturnAddr
-    // MEM-HI                                       MEM-LOW
-    //
-    //
-    // The generated jitcode is responsible for overwriting the
-    // jitActivation->lastProfilingFrame field with a pointer to the previous
-    // Ion or Baseline jit-frame that was pushed before this one. It is also
-    // responsible for overwriting jitActivation->lastProfilingCallSite with
-    // the return address into that frame.  The frame could either be an
-    // immediate "caller" frame, or it could be a frame in a previous
-    // JitActivation (if the current frame was entered from C++, and the C++
-    // was entered by some caller jit-frame further down the stack).
-    //
-    // So this jitcode is responsible for "walking up" the jit stack, finding
-    // the previous Ion or Baseline JS frame, and storing its address and the
-    // return address into the appropriate fields on the current jitActivation.
-    //
-    // There are a fixed number of different path types that can lead to the
-    // current frame, which is either a baseline or ion frame:
-    //
-    // <Baseline-Or-Ion>
-    // ^
-    // |
-    // ^--- Ion
-    // |
-    // ^--- Baseline Stub <---- Baseline
-    // |
-    // ^--- Argument Rectifier
-    // |    ^
-    // |    |
-    // |    ^--- Ion
-    // |    |
-    // |    ^--- Baseline Stub <---- Baseline
-    // |
-    // ^--- Entry Frame (From C++)
-    //
-    Register actReg = scratch4;
-    AbsoluteAddress activationAddr(GetJitContext()->runtime->addressOfProfilingActivation());
-    masm.loadPtr(activationAddr, actReg);
-
-    Address lastProfilingFrame(actReg, JitActivation::offsetOfLastProfilingFrame());
-    Address lastProfilingCallSite(actReg, JitActivation::offsetOfLastProfilingCallSite());
-
-#ifdef DEBUG
-    // Ensure that frame we are exiting is current lastProfilingFrame
-    {
-        masm.loadPtr(lastProfilingFrame, scratch1);
-        Label checkOk;
-        masm.branchPtr(Assembler::Equal, scratch1, ImmWord(0), &checkOk);
-        masm.branchPtr(Assembler::Equal, StackPointer, scratch1, &checkOk);
-        masm.assumeUnreachable(
-            "Mismatch between stored lastProfilingFrame and current stack pointer.");
-        masm.bind(&checkOk);
-    }
-#endif
-
-    // Load the frame descriptor into |scratch1|, figure out what to do
-    // depending on its type.
-    masm.loadPtr(Address(StackPointer, JitFrameLayout::offsetOfDescriptor()), scratch1);
-
-    // Going into the conditionals, we will have:
-    //      FrameDescriptor.size in scratch1
-    //      FrameDescriptor.type in scratch2
-    masm.movePtr(scratch1, scratch2);
-    masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), scratch1);
-    masm.and32(Imm32((1 << FRAMETYPE_BITS) - 1), scratch2);
-
-    // Handling of each case is dependent on FrameDescriptor.type
-    Label handle_IonJS;
-    Label handle_BaselineStub;
-    Label handle_Rectifier;
-    Label handle_Entry;
-    Label end;
-
-    masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_IonJS), &handle_IonJS);
-    masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_BaselineJS), &handle_IonJS);
-    masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_BaselineStub), &handle_BaselineStub);
-    masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_Rectifier), &handle_Rectifier);
-    masm.branch32(Assembler::Equal, scratch2, Imm32(JitFrame_Entry), &handle_Entry);
-
-    masm.assumeUnreachable("Invalid caller frame type when exiting from Ion frame.");
-
-    //
-    // JitFrame_IonJS
-    //
-    // Stack layout:
-    //                  ...
-    //                  Ion-Descriptor
-    //     Prev-FP ---> Ion-ReturnAddr
-    //                  ... previous frame data ... |- Descriptor.Size
-    //                  ... arguments ...           |
-    //                  ActualArgc          |
-    //                  CalleeToken         |- JitFrameLayout::Size()
-    //                  Descriptor          |
-    //        FP -----> ReturnAddr          |
-    //
-    masm.bind(&handle_IonJS);
-    {
-        // |scratch1| contains Descriptor.size
-
-        // returning directly to an IonJS frame.  Store return addr to frame
-        // in lastProfilingCallSite.
-        masm.loadPtr(Address(StackPointer, JitFrameLayout::offsetOfReturnAddress()), scratch2);
-        masm.storePtr(scratch2, lastProfilingCallSite);
-
-        // Store return frame in lastProfilingFrame.
-        // scratch2 := StackPointer + Descriptor.size*1 + JitFrameLayout::Size();
-        masm.lea(Operand(StackPointer, scratch1, TimesOne, JitFrameLayout::Size()), scratch2);
-        masm.storePtr(scratch2, lastProfilingFrame);
-        masm.ret();
-    }
-
-    //
-    // JitFrame_BaselineStub
-    //
-    // Look past the stub and store the frame pointer to
-    // the baselineJS frame prior to it.
-    //
-    // Stack layout:
-    //              ...
-    //              BL-Descriptor
-    // Prev-FP ---> BL-ReturnAddr
-    //      +-----> BL-PrevFramePointer
-    //      |       ... BL-FrameData ...
-    //      |       BLStub-Descriptor
-    //      |       BLStub-ReturnAddr
-    //      |       BLStub-StubPointer          |
-    //      +------ BLStub-SavedFramePointer    |- Descriptor.Size
-    //              ... arguments ...           |
-    //              ActualArgc          |
-    //              CalleeToken         |- JitFrameLayout::Size()
-    //              Descriptor          |
-    //    FP -----> ReturnAddr          |
-    //
-    // We take advantage of the fact that the stub frame saves the frame
-    // pointer pointing to the baseline frame, so a bunch of calculation can
-    // be avoided.
-    //
-    masm.bind(&handle_BaselineStub);
-    {
-        BaseIndex stubFrameReturnAddr(StackPointer, scratch1, TimesOne,
-                                      JitFrameLayout::Size() +
-                                      BaselineStubFrameLayout::offsetOfReturnAddress());
-        masm.loadPtr(stubFrameReturnAddr, scratch2);
-        masm.storePtr(scratch2, lastProfilingCallSite);
-
-        BaseIndex stubFrameSavedFramePtr(StackPointer, scratch1, TimesOne,
-                                         JitFrameLayout::Size() - (2 * sizeof(void *)));
-        masm.loadPtr(stubFrameSavedFramePtr, scratch2);
-        masm.addPtr(Imm32(sizeof(void *)), scratch2); // Skip past BL-PrevFramePtr
-        masm.storePtr(scratch2, lastProfilingFrame);
-        masm.ret();
-    }
-
-
-    //
-    // JitFrame_Rectifier
-    //
-    // The rectifier frame can be preceded by either an IonJS or a
-    // BaselineStub frame.
-    //
-    // Stack layout if caller of rectifier was Ion:
-    //
-    //              Ion-Descriptor
-    //              Ion-ReturnAddr
-    //              ... ion frame data ... |- Rect-Descriptor.Size
-    //              < COMMON LAYOUT >
-    //
-    // Stack layout if caller of rectifier was Baseline:
-    //
-    //              BL-Descriptor
-    // Prev-FP ---> BL-ReturnAddr
-    //      +-----> BL-SavedFramePointer
-    //      |       ... baseline frame data ...
-    //      |       BLStub-Descriptor
-    //      |       BLStub-ReturnAddr
-    //      |       BLStub-StubPointer          |
-    //      +------ BLStub-SavedFramePointer    |- Rect-Descriptor.Size
-    //              ... args to rectifier ...   |
-    //              < COMMON LAYOUT >
-    //
-    // Common stack layout:
-    //
-    //              ActualArgc          |
-    //              CalleeToken         |- IonRectitiferFrameLayout::Size()
-    //              Rect-Descriptor     |
-    //              Rect-ReturnAddr     |
-    //              ... rectifier data & args ... |- Descriptor.Size
-    //              ActualArgc      |
-    //              CalleeToken     |- JitFrameLayout::Size()
-    //              Descriptor      |
-    //    FP -----> ReturnAddr      |
-    //
-    masm.bind(&handle_Rectifier);
-    {
-        // scratch2 := StackPointer + Descriptor.size + JitFrameLayout::Size()
-        masm.lea(Operand(StackPointer, scratch1, TimesOne, JitFrameLayout::Size()), scratch2);
-        masm.loadPtr(Address(scratch2, RectifierFrameLayout::offsetOfDescriptor()), scratch3);
-        masm.movePtr(scratch3, scratch1);
-        masm.and32(Imm32((1 << FRAMETYPE_BITS) - 1), scratch3);
-        masm.rshiftPtr(Imm32(FRAMESIZE_SHIFT), scratch1);
-
-        // Now |scratch1| contains Rect-Descriptor.Size
-        // and |scratch2| points to Rectifier frame
-        // and |scratch3| contains Rect-Descriptor.Type
-
-        // Check for either Ion or BaselineStub frame.
-        Label handle_Rectifier_BaselineStub;
-        masm.branch32(Assembler::NotEqual, scratch3, Imm32(JitFrame_IonJS),
-                      &handle_Rectifier_BaselineStub);
-
-        // Handle Rectifier <- IonJS
-        // scratch3 := RectFrame[ReturnAddr]
-        masm.loadPtr(Address(scratch2, RectifierFrameLayout::offsetOfReturnAddress()), scratch3);
-        masm.storePtr(scratch3, lastProfilingCallSite);
-
-        // scratch3 := RectFrame + Rect-Descriptor.Size + RectifierFrameLayout::Size()
-        masm.lea(Operand(scratch2, scratch1, TimesOne, RectifierFrameLayout::Size()), scratch3);
-        masm.storePtr(scratch3, lastProfilingFrame);
-        masm.ret();
-
-        // Handle Rectifier <- BaselineStub <- BaselineJS
-        masm.bind(&handle_Rectifier_BaselineStub);
-#ifdef DEBUG
-        {
-            Label checkOk;
-            masm.branch32(Assembler::Equal, scratch3, Imm32(JitFrame_BaselineStub), &checkOk);
-            masm.assumeUnreachable("Unrecognized frame preceding baselineStub.");
-            masm.bind(&checkOk);
-        }
-#endif
-        BaseIndex stubFrameReturnAddr(scratch2, scratch1, TimesOne,
-                                         RectifierFrameLayout::Size() +
-                                         BaselineStubFrameLayout::offsetOfReturnAddress());
-        masm.loadPtr(stubFrameReturnAddr, scratch3);
-        masm.storePtr(scratch3, lastProfilingCallSite);
-
-        BaseIndex stubFrameSavedFramePtr(scratch2, scratch1, TimesOne,
-                                         RectifierFrameLayout::Size() - (2 * sizeof(void *)));
-        masm.loadPtr(stubFrameSavedFramePtr, scratch3);
-        masm.addPtr(Imm32(sizeof(void *)), scratch3);
-        masm.storePtr(scratch3, lastProfilingFrame);
-        masm.ret();
-    }
-
-    //
-    // JitFrame_Entry
-    //
-    // If at an entry frame, store null into both fields.
-    //
-    masm.bind(&handle_Entry);
-    {
-        masm.movePtr(ImmPtr(nullptr), scratch1);
-        masm.storePtr(scratch1, lastProfilingCallSite);
-        masm.storePtr(scratch1, lastProfilingFrame);
-        masm.ret();
-    }
-
-    Linker linker(masm);
-    JitCode *code = linker.newCode<NoGC>(cx, OTHER_CODE);
-
-#ifdef JS_ION_PERF
-    writePerfSpewerJitCodeProfile(code, "ProfilerExitFrameStub");
-#endif
-
-    return code;
-}
--- a/js/src/shell/js.cpp
+++ b/js/src/shell/js.cpp
@@ -4155,29 +4155,22 @@ SingleStepCallback(void *arg, jit::Simul
 
     JS::ProfilingFrameIterator::RegisterState state;
     state.pc = pc;
     state.sp = (void*)sim->get_register(jit::Simulator::sp);
     state.lr = (void*)sim->get_register(jit::Simulator::lr);
 
     DebugOnly<void*> lastStackAddress = nullptr;
     StackChars stack;
-    uint32_t frameNo = 0;
     for (JS::ProfilingFrameIterator i(rt, state); !i.done(); ++i) {
         MOZ_ASSERT(i.stackAddress() != nullptr);
         MOZ_ASSERT(lastStackAddress <= i.stackAddress());
         lastStackAddress = i.stackAddress();
-        JS::ProfilingFrameIterator::Frame frames[16];
-        uint32_t nframes = i.extractStack(frames, 0, 16);
-        for (uint32_t i = 0; i < nframes; i++) {
-            if (frameNo > 0)
-                stack.append(",", 1);
-            stack.append(frames[i].label, strlen(frames[i].label));
-            frameNo++;
-        }
+        const char *label = i.label();
+        stack.append(label, strlen(label));
     }
 
     // Only append the stack if it differs from the last stack.
     if (stacks.empty() ||
         stacks.back().length() != stack.length() ||
         !PodEqual(stacks.back().begin(), stack.begin(), stack.length()))
     {
         stacks.append(Move(stack));
--- a/js/src/vm/Interpreter.cpp
+++ b/js/src/vm/Interpreter.cpp
@@ -1678,22 +1678,17 @@ END_CASE(JSOP_LABEL)
 CASE(JSOP_LOOPENTRY)
     // Attempt on-stack replacement with Baseline code.
     if (jit::IsBaselineEnabled(cx)) {
         jit::MethodStatus status = jit::CanEnterBaselineAtBranch(cx, REGS.fp(), false);
         if (status == jit::Method_Error)
             goto error;
         if (status == jit::Method_Compiled) {
             bool wasSPS = REGS.fp()->hasPushedSPSFrame();
-
-            jit::JitExecStatus maybeOsr;
-            {
-                SPSBaselineOSRMarker spsOSR(cx->runtime(), wasSPS);
-                maybeOsr = jit::EnterBaselineAtBranch(cx, REGS.fp(), REGS.pc);
-            }
+            jit::JitExecStatus maybeOsr = jit::EnterBaselineAtBranch(cx, REGS.fp(), REGS.pc);
 
             // We failed to call into baseline at all, so treat as an error.
             if (maybeOsr == jit::JitExec_Aborted)
                 goto error;
 
             interpReturnOK = (maybeOsr == jit::JitExec_Ok);
 
             // Pop the SPS frame pushed by the interpreter.  (The compiled version of the
--- a/js/src/vm/Runtime.cpp
+++ b/js/src/vm/Runtime.cpp
@@ -70,17 +70,16 @@ js::DisableExtraThreads()
 
 const JSSecurityCallbacks js::NullSecurityCallbacks = { };
 
 PerThreadData::PerThreadData(JSRuntime *runtime)
   : PerThreadDataFriendFields(),
     runtime_(runtime),
     jitTop(nullptr),
     jitJSContext(nullptr),
-    jitActivation(nullptr),
     jitStackLimit_(0xbad),
 #ifdef JS_TRACE_LOGGING
     traceLogger(nullptr),
 #endif
     activation_(nullptr),
     profilingActivation_(nullptr),
     asmJSActivationStack_(nullptr),
     autoFlushICache_(nullptr),
--- a/js/src/vm/Runtime.h
+++ b/js/src/vm/Runtime.h
@@ -82,17 +82,16 @@ class MathCache;
 
 namespace jit {
 class JitRuntime;
 class JitActivation;
 struct PcScriptCache;
 class Simulator;
 class SimulatorRuntime;
 struct AutoFlushICache;
-class CompileRuntime;
 }
 
 /*
  * GetSrcNote cache to avoid O(n^2) growth in finding a source note for a
  * given pc in a script. We use the script->code pointer to tag the cache,
  * instead of the script address itself, so that source notes are always found
  * by offset from the bytecode with which they were generated.
  */
@@ -515,22 +514,16 @@ class PerThreadData : public PerThreadDa
 
     /*
      * The current JSContext when entering JIT code. This field may only be used
      * from JIT code and C++ directly called by JIT code (otherwise it may refer
      * to the wrong JSContext).
      */
     JSContext           *jitJSContext;
 
-     /*
-     * Points to the most recent JitActivation pushed on the thread.
-     * See JitActivation constructor in vm/Stack.cpp
-     */
-    js::jit::JitActivation *jitActivation;
-
     /* See comment for JSRuntime::interrupt_. */
   private:
     mozilla::Atomic<uintptr_t, mozilla::Relaxed> jitStackLimit_;
     void resetJitStackLimit();
     friend struct ::JSRuntime;
   public:
     void initJitStackLimit();
     void initJitStackLimitPar(uintptr_t limit);
@@ -548,17 +541,16 @@ class PerThreadData : public PerThreadDa
     TraceLoggerThread   *traceLogger;
 #endif
 
   private:
     friend class js::Activation;
     friend class js::ActivationIterator;
     friend class js::jit::JitActivation;
     friend class js::AsmJSActivation;
-    friend class js::jit::CompileRuntime;
 #ifdef DEBUG
     friend void js::AssertCurrentThreadCanLock(RuntimeLock which);
 #endif
 
     /*
      * Points to the most recent activation running on the thread.
      * See Activation comment in vm/Stack.h.
      */
@@ -590,22 +582,16 @@ class PerThreadData : public PerThreadDa
     }
     static unsigned offsetOfActivation() {
         return offsetof(PerThreadData, activation_);
     }
 
     js::Activation *profilingActivation() const {
         return profilingActivation_;
     }
-    void *addressOfProfilingActivation() {
-        return (void*) &profilingActivation_;
-    }
-    static unsigned offsetOfProfilingActivation() {
-        return offsetof(PerThreadData, profilingActivation_);
-    }
 
     js::AsmJSActivation *asmJSActivationStack() const {
         return asmJSActivationStack_;
     }
     static js::AsmJSActivation *innermostAsmJSActivation() {
         PerThreadData *ptd = TlsPerThreadData.get();
         return ptd ? ptd->asmJSActivationStack_ : nullptr;
     }
@@ -1028,17 +1014,17 @@ struct JSRuntime : public JS::shadow::Ru
     /* SPS profiling metadata */
     js::SPSProfiler     spsProfiler;
 
     /* If true, new scripts must be created with PC counter information. */
     bool                profilingScripts;
 
     /* Whether sampling should be enabled or not. */
   private:
-    mozilla::Atomic<bool, mozilla::SequentiallyConsistent> suppressProfilerSampling;
+    bool                suppressProfilerSampling;
 
   public:
     bool isProfilerSamplingEnabled() const {
         return !suppressProfilerSampling;
     }
     void disableProfilerSampling() {
         suppressProfilerSampling = true;
     }
--- a/js/src/vm/SPSProfiler.cpp
+++ b/js/src/vm/SPSProfiler.cpp
@@ -7,20 +7,17 @@
 #include "vm/SPSProfiler.h"
 
 #include "mozilla/DebugOnly.h"
 
 #include "jsnum.h"
 #include "jsprf.h"
 #include "jsscript.h"
 
-#include "jit/BaselineFrame.h"
 #include "jit/BaselineJIT.h"
-#include "jit/JitFrameIterator.h"
-#include "jit/JitFrames.h"
 #include "vm/StringBuffer.h"
 
 using namespace js;
 
 using mozilla::DebugOnly;
 
 SPSProfiler::SPSProfiler(JSRuntime *rt)
   : rt(rt),
@@ -89,25 +86,17 @@ SPSProfiler::enable(bool enabled)
 
     enabled_ = enabled;
 
     /* Toggle SPS-related jumps on baseline jitcode.
      * The call to |ReleaseAllJITCode| above will release most baseline jitcode, but not
      * jitcode for scripts with active frames on the stack.  These scripts need to have
      * their profiler state toggled so they behave properly.
      */
-    jit::ToggleBaselineProfiling(rt, enabled);
-
-    /* Update lastProfilingFrame to point to the top-most JS jit-frame currently on
-     * stack.
-     */
-    if (rt->mainThread.jitActivation) {
-        void *lastProfilingFrame = GetTopProfilingJitFrame(rt->mainThread.jitTop);
-        rt->mainThread.jitActivation->setLastProfilingFrame(lastProfilingFrame);
-    }
+    jit::ToggleBaselineSPS(rt, enabled);
 }
 
 /* Lookup the string for the function/script, creating one if necessary */
 const char*
 SPSProfiler::profileString(JSScript *script, JSFunction *maybeFun)
 {
     AutoSPSLock lock(lock_);
     MOZ_ASSERT(strings.initialized());
@@ -205,28 +194,28 @@ SPSProfiler::exit(JSScript *script, JSFu
         MOZ_ASSERT(strcmp((const char*) stack_[*size_].label(), str) == 0);
         stack_[*size_].setLabel(nullptr);
         stack_[*size_].setPC(nullptr);
     }
 #endif
 }
 
 void
-SPSProfiler::beginPseudoJS(const char *string, void *sp)
+SPSProfiler::enterAsmJS(const char *string, void *sp)
 {
     /* these operations cannot be re-ordered, so volatile-ize operations */
     volatile ProfileEntry *stack = stack_;
     volatile uint32_t *size = size_;
     uint32_t current = *size;
 
-    MOZ_ASSERT(installed());
+    MOZ_ASSERT(enabled());
     if (current < max_) {
         stack[current].setLabel(string);
         stack[current].setCppFrame(sp, 0);
-        stack[current].setFlag(ProfileEntry::BEGIN_PSEUDO_JS);
+        stack[current].setFlag(ProfileEntry::ASMJS);
     }
     *size = current + 1;
 }
 
 void
 SPSProfiler::push(const char *string, void *sp, JSScript *script, jsbytecode *pc, bool copy)
 {
     MOZ_ASSERT_IF(sp != nullptr, script == nullptr && pc == nullptr);
@@ -328,61 +317,28 @@ SPSEntryMarker::SPSEntryMarker(JSRuntime
 {
     MOZ_GUARD_OBJECT_NOTIFIER_INIT;
     if (!profiler->installed()) {
         profiler = nullptr;
         return;
     }
     size_before = *profiler->size_;
     // We want to push a CPP frame so the profiler can correctly order JS and native stacks.
-    profiler->beginPseudoJS("js::RunScript", this);
+    profiler->push("js::RunScript", this, nullptr, nullptr, /* copy = */ false);
+    // We also want to push a JS frame so the hang monitor can catch script hangs.
     profiler->push("js::RunScript", nullptr, script, script->code(), /* copy = */ false);
 }
 
 SPSEntryMarker::~SPSEntryMarker()
 {
-    if (profiler == nullptr)
-        return;
-
-    profiler->pop();
-    profiler->endPseudoJS();
-    MOZ_ASSERT(size_before == *profiler->size_);
-}
-
-SPSBaselineOSRMarker::SPSBaselineOSRMarker(JSRuntime *rt, bool hasSPSFrame
-                                           MOZ_GUARD_OBJECT_NOTIFIER_PARAM_IN_IMPL)
-    : profiler(&rt->spsProfiler)
-{
-    MOZ_GUARD_OBJECT_NOTIFIER_INIT;
-    if (!hasSPSFrame || !profiler->enabled()) {
-        profiler = nullptr;
-        return;
+    if (profiler != nullptr) {
+        profiler->pop();
+        profiler->pop();
+        MOZ_ASSERT(size_before == *profiler->size_);
     }
-
-    size_before = profiler->size();
-    if (profiler->size() == 0)
-        return;
-
-    ProfileEntry &entry = profiler->stack()[profiler->size() - 1];
-    MOZ_ASSERT(entry.isJs());
-    entry.setOSR();
-}
-
-SPSBaselineOSRMarker::~SPSBaselineOSRMarker()
-{
-    if (profiler == nullptr)
-        return;
-
-    MOZ_ASSERT(size_before == *profiler->size_);
-    if (profiler->size() == 0)
-        return;
-
-    ProfileEntry &entry = profiler->stack()[profiler->size() - 1];
-    MOZ_ASSERT(entry.isJs());
-    entry.unsetOSR();
 }
 
 JS_FRIEND_API(jsbytecode*)
 ProfileEntry::pc() const volatile
 {
     MOZ_ASSERT(isJs());
     return lineOrPc == NullPCOffset ? nullptr : script()->offsetToPC(lineOrPc);
 }
@@ -414,16 +370,18 @@ js::RegisterRuntimeProfilingEventMarker(
 }
 
 JS_FRIEND_API(jsbytecode*)
 js::ProfilingGetPC(JSRuntime *rt, JSScript *script, void *ip)
 {
     return rt->spsProfiler.ipToPC(script, size_t(ip));
 }
 
+
+
 AutoSuppressProfilerSampling::AutoSuppressProfilerSampling(JSContext *cx
                                                            MOZ_GUARD_OBJECT_NOTIFIER_PARAM_IN_IMPL)
   : rt_(cx->runtime()),
     previouslyEnabled_(rt_->isProfilerSamplingEnabled())
 {
     MOZ_GUARD_OBJECT_NOTIFIER_INIT;
     if (previouslyEnabled_)
         rt_->disableProfilerSampling();
@@ -439,20 +397,8 @@ AutoSuppressProfilerSampling::AutoSuppre
         rt_->disableProfilerSampling();
 }
 
 AutoSuppressProfilerSampling::~AutoSuppressProfilerSampling()
 {
         if (previouslyEnabled_)
             rt_->enableProfilerSampling();
 }
-
-void *
-js::GetTopProfilingJitFrame(uint8_t *exitFramePtr)
-{
-    // For null exitFrame, there is no previous exit frame, just return.
-    if (!exitFramePtr)
-        return nullptr;
-
-    jit::JitProfilingFrameIterator iter(exitFramePtr);
-    MOZ_ASSERT(!iter.done());
-    return iter.fp();
-}
--- a/js/src/vm/SPSProfiler.h
+++ b/js/src/vm/SPSProfiler.h
@@ -107,22 +107,20 @@
 namespace js {
 
 class ProfileEntry;
 
 typedef HashMap<JSScript*, const char*, DefaultHasher<JSScript*>, SystemAllocPolicy>
         ProfileStringMap;
 
 class SPSEntryMarker;
-class SPSBaselineOSRMarker;
 
 class SPSProfiler
 {
     friend class SPSEntryMarker;
-    friend class SPSBaselineOSRMarker;
 
     JSRuntime            *rt;
     ProfileStringMap     strings;
     ProfileEntry         *stack_;
     uint32_t             *size_;
     uint32_t             max_;
     bool                 slowAssertions;
     uint32_t             enabled_;
@@ -148,17 +146,16 @@ class SPSProfiler
     }
 
     ProfileEntry **addressOfStack() {
         return &stack_;
     }
 
     uint32_t *sizePointer() { return size_; }
     uint32_t maxSize() { return max_; }
-    uint32_t size() { MOZ_ASSERT(installed()); return *size_; }
     ProfileEntry *stack() { return stack_; }
 
     /* management of whether instrumentation is on or off */
     bool enabled() { MOZ_ASSERT_IF(enabled_, installed()); return enabled_; }
     bool installed() { return stack_ != nullptr && size_ != nullptr; }
     void enable(bool enabled);
     void enableSlowAssertions(bool enabled) { slowAssertions = enabled; }
     bool slowAssertionsEnabled() { return slowAssertions; }
@@ -178,18 +175,18 @@ class SPSProfiler
         if (enabled() && *size_ - 1 < max_) {
             MOZ_ASSERT(*size_ > 0);
             MOZ_ASSERT(stack_[*size_ - 1].script() == script);
             stack_[*size_ - 1].setPC(pc);
         }
     }
 
     /* Enter asm.js code */
-    void beginPseudoJS(const char *string, void *sp);
-    void endPseudoJS() { pop(); }
+    void enterAsmJS(const char *string, void *sp);
+    void exitAsmJS() { pop(); }
 
     jsbytecode *ipToPC(JSScript *script, size_t ip) { return nullptr; }
 
     void setProfilingStack(ProfileEntry *stack, uint32_t *size, uint32_t max);
     void setEventMarker(void (*fn)(const char *));
     const char *profileString(JSScript *script, JSFunction *maybeFun);
     void onScriptFinalized(JSScript *script);
 
@@ -270,63 +267,252 @@ class SPSEntryMarker
 
   private:
     SPSProfiler *profiler;
     mozilla::DebugOnly<uint32_t> size_before;
     MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER
 };
 
 /*
- * This class is used in the interpreter to bound regions where the baseline JIT
- * being entered via OSR.  It marks the current top pseudostack entry as
- * OSR-ed
- */
-class SPSBaselineOSRMarker
-{
-  public:
-    explicit SPSBaselineOSRMarker(JSRuntime *rt, bool hasSPSFrame
-                                  MOZ_GUARD_OBJECT_NOTIFIER_PARAM);
-    ~SPSBaselineOSRMarker();
-
-  private:
-    SPSProfiler *profiler;
-    mozilla::DebugOnly<uint32_t> size_before;
-    MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER
-};
-
-/*
  * SPS is the profiling backend used by the JS engine to enable time profiling.
  * More information can be found in vm/SPSProfiler.{h,cpp}. This class manages
  * the instrumentation portion of the profiling for JIT code.
  *
  * The instrumentation tracks entry into functions, leaving those functions via
  * a function call, reentering the functions from a function call, and exiting
  * the functions from returning. This class also handles inline frames and
  * manages the instrumentation which needs to be attached to them as well.
  *
  * The basic methods which emit instrumentation are at the end of this class,
  * and the management functions are all described in the middle.
  */
 template<class Assembler, class Register>
 class SPSInstrumentation
 {
+    /* Because of inline frames, this is a nested structure in a vector */
+    struct FrameState {
+        JSScript *script; // script for this frame, nullptr if not pushed yet
+        jsbytecode *pc;   // pc at which this frame was left for entry into a callee
+        bool skipNext;    // should the next call to reenter be skipped?
+        int  left;        // number of leave() calls made without a matching reenter()
+    };
+
     SPSProfiler *profiler_; // Instrumentation location management
 
+    Vector<FrameState, 1, SystemAllocPolicy> frames;
+    FrameState *frame;
+
+    static void clearFrame(FrameState *frame) {
+        frame->script = nullptr;
+        frame->pc = nullptr;
+        frame->skipNext = false;
+        frame->left = 0;
+    }
+
   public:
     /*
      * Creates instrumentation which writes information out the the specified
      * profiler's stack and constituent fields.
      */
-    explicit SPSInstrumentation(SPSProfiler *profiler) : profiler_(profiler) {}
+    explicit SPSInstrumentation(SPSProfiler *profiler)
+      : profiler_(profiler), frame(nullptr)
+    {
+        enterInlineFrame(nullptr);
+    }
 
     /* Small proxies around SPSProfiler */
     bool enabled() { return profiler_ && profiler_->enabled(); }
     SPSProfiler *profiler() { MOZ_ASSERT(enabled()); return profiler_; }
     void disable() { profiler_ = nullptr; }
-};
+
+    /* Signals an inline function returned, reverting to the previous state */
+    void leaveInlineFrame() {
+        if (!enabled())
+            return;
+        MOZ_ASSERT(frame->left == 0);
+        MOZ_ASSERT(frame->script != nullptr);
+        frames.shrinkBy(1);
+        MOZ_ASSERT(frames.length() > 0);
+        frame = &frames[frames.length() - 1];
+    }
+
+    /* Saves the current state and assumes a fresh one for the inline function */
+    bool enterInlineFrame(jsbytecode *callerPC) {
+        if (!enabled())
+            return true;
+        MOZ_ASSERT_IF(frames.empty(), callerPC == nullptr);
+
+        MOZ_ASSERT_IF(frame != nullptr, frame->script != nullptr);
+        MOZ_ASSERT_IF(frame != nullptr, frame->left == 1);
+        if (!frames.empty()) {
+            MOZ_ASSERT(frame == &frames[frames.length() - 1]);
+            frame->pc = callerPC;
+        }
+        if (!frames.growBy(1))
+            return false;
+        frame = &frames[frames.length() - 1];
+        clearFrame(frame);
+        return true;
+    }
+
+    /* Prepares the instrumenter state for generating OOL code, by
+     * setting up the frame state to seem as if there are exactly
+     * two pushed frames: a frame for the top-level script, and
+     * a frame for the OOL code being generated.  Any
+     * vm-calls from the OOL code will "leave" the OOL frame and
+     * return back to it.
+     */
+    bool prepareForOOL() {
+        if (!enabled())
+            return true;
+        MOZ_ASSERT(!frames.empty());
+        if (frames.length() >= 2) {
+            frames.shrinkBy(frames.length() - 2);
 
+        } else { // frames.length() == 1
+            if (!frames.growBy(1))
+                return false;
+        }
+        frames[0].pc = frames[0].script->code();
+        frame = &frames[1];
+        clearFrame(frame);
+        return true;
+    }
+    void finishOOL() {
+        if (!enabled())
+            return;
+        MOZ_ASSERT(!frames.empty());
+        frames.shrinkBy(frames.length() - 1);
+    }
+
+    /* Number of inline frames currently active (doesn't include original one) */
+    unsigned inliningDepth() {
+        return frames.length() - 1;
+    }
+
+    /*
+     * When debugging or with slow assertions, sometimes a C++ method will be
+     * invoked to perform the pop operation from the SPS stack. When we leave
+     * JIT code, we need to record the current PC, but upon reentering JIT
+     * code, no update back to nullptr should happen. This method exists to
+     * flag this behavior. The next leave() will emit instrumentation, but the
+     * following reenter() will be a no-op.
+     */
+    void skipNextReenter() {
+        /* If we've left the frame, the reenter will be skipped anyway */
+        if (!enabled() || frame->left != 0)
+            return;
+        MOZ_ASSERT(frame->script);
+        MOZ_ASSERT(!frame->skipNext);
+        frame->skipNext = true;
+    }
+
+    /*
+     * In some cases, a frame needs to be flagged as having been pushed, but no
+     * instrumentation should be emitted. This updates internal state to flag
+     * that further instrumentation should actually be emitted.
+     */
+    void setPushed(JSScript *script) {
+        if (!enabled())
+            return;
+        MOZ_ASSERT(frame->left == 0);
+        frame->script = script;
+    }
 
-/* Get a pointer to the top-most profiling frame, given the exit frame pointer. */
-void *GetTopProfilingJitFrame(uint8_t *exitFramePtr);
+    JSScript *getPushed() {
+        if (!enabled())
+            return nullptr;
+        return frame->script;
+    }
+
+    /*
+     * Flags entry into a JS function for the first time. Before this is called,
+     * no instrumentation is emitted, but after this instrumentation is emitted.
+     */
+    bool push(JSScript *script, Assembler &masm, Register scratch, bool inlinedFunction = false) {
+        if (!enabled())
+            return true;
+        if (!inlinedFunction) {
+            const char *string = profiler_->profileString(script, script->functionNonDelazifying());
+            if (string == nullptr)
+                return false;
+            masm.spsPushFrame(profiler_, string, script, scratch);
+        }
+        setPushed(script);
+        return true;
+    }
+
+    /*
+     * Signifies that C++ performed the push() for this function. C++ always
+     * sets the current PC to something non-null, however, so as soon as JIT
+     * code is reentered this updates the current pc to nullptr.
+     */
+    void pushManual(JSScript *script, Assembler &masm, Register scratch,
+                    bool inlinedFunction = false)
+    {
+        if (!enabled())
+            return;
+
+        if (!inlinedFunction)
+            masm.spsUpdatePCIdx(profiler_, ProfileEntry::NullPCOffset, scratch);
+
+        setPushed(script);
+    }
+
+    /*
+     * Signals that the current function is leaving for a function call. This
+     * can happen both on JS function calls and also calls to C++. This
+     * internally manages how many leave() calls have been seen, and only the
+     * first leave() emits instrumentation. Similarly, only the last
+     * corresponding reenter() actually emits instrumentation.
+     */
+    void leave(jsbytecode *pc, Assembler &masm, Register scratch, bool inlinedFunction = false) {
+        if (enabled() && frame->script && frame->left++ == 0) {
+            jsbytecode *updatePC = pc;
+            JSScript *script = frame->script;
+            if (!inlinedFunction) {
+                // We may be leaving an inlined frame for entry into a C++ frame.
+                // Use the top script's pc offset instead of the innermost script's.
+                if (inliningDepth() > 0) {
+                    MOZ_ASSERT(frames[0].pc);
+                    updatePC = frames[0].pc;
+                    script = frames[0].script;
+                }
+            }
+
+            if (!inlinedFunction)
+                masm.spsUpdatePCIdx(profiler_, script->pcToOffset(updatePC), scratch);
+        }
+    }
+
+    /*
+     * Flags that the leaving of the current function has returned. This tracks
+     * state with leave() to only emit instrumentation at proper times.
+     */
+    void reenter(Assembler &masm, Register scratch, bool inlinedFunction = false) {
+        if (!enabled() || !frame->script || frame->left-- != 1)
+            return;
+        if (frame->skipNext) {
+            frame->skipNext = false;
+        } else {
+            if (!inlinedFunction)
+                masm.spsUpdatePCIdx(profiler_, ProfileEntry::NullPCOffset, scratch);
+        }
+    }
+
+    /*
+     * Signifies exiting a JS frame, popping the SPS entry. Because there can be
+     * multiple return sites of a function, this does not cease instrumentation
+     * emission.
+     */
+    void pop(Assembler &masm, Register scratch, bool inlinedFunction = false) {
+        if (enabled()) {
+            MOZ_ASSERT(frame->left == 0);
+            MOZ_ASSERT(frame->script);
+            if (!inlinedFunction)
+                masm.spsPopFrame(profiler_, scratch);
+        }
+    }
+};
 
 } /* namespace js */
 
 #endif /* vm_SPSProfiler_h */
--- a/js/src/vm/Stack.cpp
+++ b/js/src/vm/Stack.cpp
@@ -9,17 +9,16 @@
 #include "mozilla/PodOperations.h"
 
 #include "jscntxt.h"
 
 #include "asmjs/AsmJSFrameIterator.h"
 #include "asmjs/AsmJSModule.h"
 #include "gc/Marking.h"
 #include "jit/BaselineFrame.h"
-#include "jit/JitcodeMap.h"
 #include "jit/JitCompartment.h"
 #include "js/GCAPI.h"
 #include "vm/Opcodes.h"
 
 #include "jit/JitFrameIterator-inl.h"
 #include "vm/Interpreter-inl.h"
 #include "vm/Probes-inl.h"
 #include "vm/ScopeObject-inl.h"
@@ -1369,73 +1368,54 @@ AbstractFramePtr::evalPrevScopeChain(JSC
     return iter.scopeChain(cx);
 }
 
 bool
 AbstractFramePtr::hasPushedSPSFrame() const
 {
     if (isInterpreterFrame())
         return asInterpreterFrame()->hasPushedSPSFrame();
-    MOZ_ASSERT(isBaselineFrame());
-    return false;
+    return asBaselineFrame()->hasPushedSPSFrame();
 }
 
 jit::JitActivation::JitActivation(JSContext *cx, bool active)
   : Activation(cx, Jit),
     active_(active),
     rematerializedFrames_(nullptr),
     ionRecovery_(cx),
-    bailoutData_(nullptr),
-    lastProfilingFrame_(nullptr),
-    lastProfilingCallSite_(nullptr)
+    bailoutData_(nullptr)
 {
     if (active) {
         prevJitTop_ = cx->mainThread().jitTop;
         prevJitJSContext_ = cx->mainThread().jitJSContext;
-        prevJitActivation_ = cx->mainThread().jitActivation;
         cx->mainThread().jitJSContext = cx;
-        cx->mainThread().jitActivation = this;
-
-        registerProfiling();
     } else {
         prevJitTop_ = nullptr;
         prevJitJSContext_ = nullptr;
-        prevJitActivation_ = nullptr;
     }
 }
 
 jit::JitActivation::~JitActivation()
 {
     if (active_) {
-        if (isProfiling())
-            unregisterProfiling();
-
         cx_->perThreadData->jitTop = prevJitTop_;
         cx_->perThreadData->jitJSContext = prevJitJSContext_;
-        cx_->perThreadData->jitActivation = prevJitActivation_;
     }
 
     // All reocvered value are taken from activation during the bailout.
     MOZ_ASSERT(ionRecovery_.empty());
 
     // The BailoutFrameInfo should have unregistered itself from the
     // JitActivations.
     MOZ_ASSERT(!bailoutData_);
 
     clearRematerializedFrames();
     js_delete(rematerializedFrames_);
 }
 
-bool
-jit::JitActivation::isProfiling() const
-{
-    // All JitActivations can be profiled.
-    return true;
-}
-
 void
 jit::JitActivation::setBailoutData(jit::BailoutFrameInfo *bailoutData)
 {
     MOZ_ASSERT(!bailoutData_);
     bailoutData_ = bailoutData;
 }
 
 void
@@ -1450,35 +1430,25 @@ jit::JitActivation::cleanBailoutData()
 // and disable activation instruction sequences.
 void
 jit::JitActivation::setActive(JSContext *cx, bool active)
 {
     // Only allowed to deactivate/activate if activation is top.
     // (Not tested and will probably fail in other situations.)
     MOZ_ASSERT(cx->mainThread().activation_ == this);
     MOZ_ASSERT(active != active_);
+    active_ = active;
 
     if (active) {
-        *((volatile bool *) active_) = true;
         prevJitTop_ = cx->mainThread().jitTop;
         prevJitJSContext_ = cx->mainThread().jitJSContext;
-        prevJitActivation_ = cx->mainThread().jitActivation;
         cx->mainThread().jitJSContext = cx;
-        cx->mainThread().jitActivation = this;
-
-        registerProfiling();
-
     } else {
-        unregisterProfiling();
-
         cx->mainThread().jitTop = prevJitTop_;
         cx->mainThread().jitJSContext = prevJitJSContext_;
-        cx->mainThread().jitActivation = prevJitActivation_;
-
-        *((volatile bool *) active_) = false;
     }
 }
 
 void
 jit::JitActivation::removeRematerializedFrame(uint8_t *top)
 {
     if (!rematerializedFrames_)
         return;
@@ -1615,18 +1585,20 @@ AsmJSActivation::AsmJSActivation(JSConte
     resumePC_(nullptr),
     fp_(nullptr),
     exitReason_(AsmJSExit::None)
 {
     (void) entrySP_;  // squelch GCC warning
 
     // NB: this is a hack and can be removed once Ion switches over to
     // JS::ProfilingFrameIterator.
-    if (cx->runtime()->spsProfiler.enabled())
+    if (cx->runtime()->spsProfiler.enabled()) {
         profiler_ = &cx->runtime()->spsProfiler;
+        profiler_->enterAsmJS("asm.js code :0", this);
+    }
 
     prevAsmJSForModule_ = module.activation();
     module.activation() = this;
 
     prevAsmJS_ = cx->mainThread().asmJSActivationStack_;
     cx->mainThread().asmJSActivationStack_ = this;
 
     // Now that the AsmJSActivation is fully initialized, make it visible to
@@ -1634,16 +1606,19 @@ AsmJSActivation::AsmJSActivation(JSConte
     registerProfiling();
 }
 
 AsmJSActivation::~AsmJSActivation()
 {
     // Hide this activation from the profiler before is is destroyed.
     unregisterProfiling();
 
+    if (profiler_)
+        profiler_->exitAsmJS();
+
     MOZ_ASSERT(fp_ == nullptr);
 
     MOZ_ASSERT(module_.activation() == this);
     module_.activation() = prevAsmJSForModule_;
 
     JSContext *cx = cx_->asJSContext();
     MOZ_ASSERT(cx->mainThread().asmJSActivationStack_ == this);
 
@@ -1673,23 +1648,17 @@ Activation::registerProfiling()
     cx_->perThreadData->profilingActivation_ = this;
 }
 
 void
 Activation::unregisterProfiling()
 {
     MOZ_ASSERT(isProfiling());
     MOZ_ASSERT(cx_->perThreadData->profilingActivation_ == this);
-
-    // There may be a non-active jit activation in the linked list.  Skip past it.
-    Activation *prevProfiling = prevProfiling_;
-    while (prevProfiling && prevProfiling->isJit() && !prevProfiling->asJit()->isActive())
-        prevProfiling = prevProfiling->prevProfiling_;
-
-    cx_->perThreadData->profilingActivation_ = prevProfiling;
+    cx_->perThreadData->profilingActivation_ = prevProfiling_;
 }
 
 ActivationIterator::ActivationIterator(JSRuntime *rt)
   : jitTop_(rt->mainThread.jitTop),
     activation_(rt->mainThread.activation_)
 {
     settle();
 }
@@ -1717,34 +1686,24 @@ ActivationIterator::settle()
 {
     // Stop at the next active activation. No need to update jitTop_, since
     // we don't iterate over an active jit activation.
     while (!done() && activation_->isJit() && !activation_->asJit()->isActive())
         activation_ = activation_->prev();
 }
 
 JS::ProfilingFrameIterator::ProfilingFrameIterator(JSRuntime *rt, const RegisterState &state)
-  : rt_(rt),
-    activation_(rt->mainThread.profilingActivation()),
-    savedPrevJitTop_(nullptr)
+  : activation_(rt->mainThread.profilingActivation())
 {
     if (!activation_)
         return;
 
-    // If profiler sampling is not enabled, skip.
-    if (!rt_->isProfilerSamplingEnabled()) {
-        activation_ = nullptr;
-        return;
-    }
-
     MOZ_ASSERT(activation_->isProfiling());
 
-    static_assert(sizeof(AsmJSProfilingFrameIterator) <= StorageSpace &&
-                  sizeof(jit::JitProfilingFrameIterator) <= StorageSpace,
-                  "Need to increase storage");
+    static_assert(sizeof(AsmJSProfilingFrameIterator) <= StorageSpace, "Need to increase storage");
 
     iteratorConstruct(state);
     settle();
 }
 
 JS::ProfilingFrameIterator::~ProfilingFrameIterator()
 {
     if (!done()) {
@@ -1752,172 +1711,79 @@ JS::ProfilingFrameIterator::~ProfilingFr
         iteratorDestroy();
     }
 }
 
 void
 JS::ProfilingFrameIterator::operator++()
 {
     MOZ_ASSERT(!done());
-    MOZ_ASSERT(activation_->isAsmJS() || activation_->isJit());
 
-    if (activation_->isAsmJS()) {
-        ++asmJSIter();
-        settle();
-        return;
-    }
-
-    ++jitIter();
+    MOZ_ASSERT(activation_->isAsmJS());
+    ++asmJSIter();
     settle();
 }
 
 void
 JS::ProfilingFrameIterator::settle()
 {
     while (iteratorDone()) {
         iteratorDestroy();
         activation_ = activation_->prevProfiling();
-
-        // Skip past any non-active jit activations in the list.
-        while (activation_ && activation_->isJit() && !activation_->asJit()->isActive())
-            activation_ = activation_->prevProfiling();
-
         if (!activation_)
             return;
         iteratorConstruct();
     }
 }
 
 void
 JS::ProfilingFrameIterator::iteratorConstruct(const RegisterState &state)
 {
     MOZ_ASSERT(!done());
-    MOZ_ASSERT(activation_->isAsmJS() || activation_->isJit());
 
-    if (activation_->isAsmJS()) {
-        new (storage_.addr()) AsmJSProfilingFrameIterator(*activation_->asAsmJS(), state);
-        // Set savedPrevJitTop_ to the actual jitTop_ from the runtime.
-        savedPrevJitTop_ = activation_->cx()->perThreadData->jitTop;
-        return;
-    }
-
-    MOZ_ASSERT(activation_->asJit()->isActive());
-    new (storage_.addr()) jit::JitProfilingFrameIterator(rt_, state);
+    MOZ_ASSERT(activation_->isAsmJS());
+    new (storage_.addr()) AsmJSProfilingFrameIterator(*activation_->asAsmJS(), state);
 }
 
 void
 JS::ProfilingFrameIterator::iteratorConstruct()
 {
     MOZ_ASSERT(!done());
-    MOZ_ASSERT(activation_->isAsmJS() || activation_->isJit());
 
-    if (activation_->isAsmJS()) {
-        new (storage_.addr()) AsmJSProfilingFrameIterator(*activation_->asAsmJS());
-        return;
-    }
-
-    MOZ_ASSERT(activation_->asJit()->isActive());
-    MOZ_ASSERT(savedPrevJitTop_ != nullptr);
-    new (storage_.addr()) jit::JitProfilingFrameIterator(savedPrevJitTop_);
+    MOZ_ASSERT(activation_->isAsmJS());
+    new (storage_.addr()) AsmJSProfilingFrameIterator(*activation_->asAsmJS());
 }
 
 void
 JS::ProfilingFrameIterator::iteratorDestroy()
 {
     MOZ_ASSERT(!done());
-    MOZ_ASSERT(activation_->isAsmJS() || activation_->isJit());
 
-    if (activation_->isAsmJS()) {
-        asmJSIter().~AsmJSProfilingFrameIterator();
-        return;
-    }
-
-    // Save prevjitTop for later use
-    savedPrevJitTop_ = activation_->asJit()->prevJitTop();
-    jitIter().~JitProfilingFrameIterator();
+    MOZ_ASSERT(activation_->isAsmJS());
+    asmJSIter().~AsmJSProfilingFrameIterator();
 }
 
 bool
 JS::ProfilingFrameIterator::iteratorDone()
 {
     MOZ_ASSERT(!done());
-    MOZ_ASSERT(activation_->isAsmJS() || activation_->isJit());
 
-    if (activation_->isAsmJS())
-        return asmJSIter().done();
-
-    return jitIter().done();
+    MOZ_ASSERT(activation_->isAsmJS());
+    return asmJSIter().done();
 }
 
 void *
 JS::ProfilingFrameIterator::stackAddress() const
 {
     MOZ_ASSERT(!done());
-    MOZ_ASSERT(activation_->isAsmJS() || activation_->isJit());
 
-    if (activation_->isAsmJS())
-        return asmJSIter().stackAddress();
-
-    return jitIter().stackAddress();
+    MOZ_ASSERT(activation_->isAsmJS());
+    return asmJSIter().stackAddress();
 }
 
-uint32_t
-JS::ProfilingFrameIterator::extractStack(Frame *frames, uint32_t offset, uint32_t end) const
-{
-    if (offset >= end)
-        return 0;
-
-    void *stackAddr = stackAddress();
-
-    if (isAsmJS()) {
-        frames[offset].kind = Frame_AsmJS;
-        frames[offset].stackAddress = stackAddr;
-        frames[offset].returnAddress = nullptr;
-        frames[offset].activation = activation_;
-        frames[offset].label = asmJSIter().label();
-        return 1;
-    }
-
-    MOZ_ASSERT(isJit());
-    void *returnAddr = jitIter().returnAddressToFp();
-
-    // Look up an entry for the return address.
-    jit::JitcodeGlobalTable *table = rt_->jitRuntime()->getJitcodeGlobalTable();
-    jit::JitcodeGlobalEntry entry;
-    mozilla::DebugOnly<bool> result = table->lookup(returnAddr, &entry, rt_);
-    MOZ_ASSERT(result);
-
-    MOZ_ASSERT(entry.isIon() || entry.isIonCache() || entry.isBaseline() || entry.isDummy());
-
-    // Dummy frames produce no stack frames.
-    if (entry.isDummy())
-        return 0;
-
-    FrameKind kind = entry.isBaseline() ? Frame_Baseline : Frame_Ion;
-
-    // Extract the stack for the entry.  Assume maximum inlining depth is <64
-    const char *labels[64];
-    uint32_t depth = entry.callStackAtAddr(rt_, returnAddr, labels, 64);
-    MOZ_ASSERT(depth < 64);
-    for (uint32_t i = 0; i < depth; i++) {
-        if (offset + i >= end)
-            return i;
-        frames[offset + i].kind = kind;
-        frames[offset + i].stackAddress = stackAddr;
-        frames[offset + i].returnAddress = returnAddr;
-        frames[offset + i].activation = activation_;
-        frames[offset + i].label = labels[i];
-    }
-    return depth;
-}
-
-bool
-JS::ProfilingFrameIterator::isAsmJS() const
+const char *
+JS::ProfilingFrameIterator::label() const
 {
     MOZ_ASSERT(!done());
-    return activation_->isAsmJS();
+
+    MOZ_ASSERT(activation_->isAsmJS());
+    return asmJSIter().label();
 }
-
-bool
-JS::ProfilingFrameIterator::isJit() const
-{
-    return activation_->isJit();
-}
--- a/js/src/vm/Stack.h
+++ b/js/src/vm/Stack.h
@@ -2,17 +2,16 @@
  * vim: set ts=8 sts=4 et sw=4 tw=99:
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef vm_Stack_h
 #define vm_Stack_h
 
-#include "mozilla/Atomics.h"
 #include "mozilla/MemoryReporting.h"
 
 #include "jsfun.h"
 #include "jsscript.h"
 
 #include "asmjs/AsmJSFrameIterator.h"
 #include "jit/JitFrameIterator.h"
 #ifdef CHECK_OSIPOINT_REGISTERS
@@ -1126,20 +1125,16 @@ class Activation
     void unhideScriptedCaller() {
         MOZ_ASSERT(hideScriptedCallerCount_ > 0);
         hideScriptedCallerCount_--;
     }
     bool scriptedCallerIsHidden() const {
         return hideScriptedCallerCount_ > 0;
     }
 
-    static size_t offsetOfPrevProfiling() {
-        return offsetof(Activation, prevProfiling_);
-    }
-
   private:
     Activation(const Activation &other) = delete;
     void operator=(const Activation &other) = delete;
 };
 
 // This variable holds a special opcode value which is greater than all normal
 // opcodes, and is chosen such that the bitwise or of this value with any
 // opcode is this value.
@@ -1242,17 +1237,16 @@ class ActivationIterator
 namespace jit {
 
 class BailoutFrameInfo;
 
 // A JitActivation is used for frames running in Baseline or Ion.
 class JitActivation : public Activation
 {
     uint8_t *prevJitTop_;
-    JitActivation *prevJitActivation_;
     JSContext *prevJitJSContext_;
     bool active_;
 
     // Rematerialized Ion frames which has info copied out of snapshots. Maps
     // frame pointers (i.e. jitTop) to a vector of rematerializations of all
     // inline frames associated with that frame.
     //
     // This table is lazily initialized by calling getRematerializedFrame.
@@ -1272,24 +1266,16 @@ class JitActivation : public Activation
 
     // If we are bailing out from Ion, then this field should be a non-null
     // pointer which references the BailoutFrameInfo used to walk the inner
     // frames. This field is used for all newly constructed JitFrameIterators to
     // read the innermost frame information from this bailout data instead of
     // reading it from the stack.
     BailoutFrameInfo *bailoutData_;
 
-    // When profiling is enabled, these fields will be updated to reflect the
-    // last pushed frame for this activation, and if that frame has been
-    // left for a call, the native code site of the call.
-    mozilla::Atomic<void *, mozilla::Relaxed> lastProfilingFrame_;
-    mozilla::Atomic<void *, mozilla::Relaxed> lastProfilingCallSite_;
-    static_assert(sizeof(mozilla::Atomic<void *, mozilla::Relaxed>) == sizeof(void *),
-                  "Atomic should have same memory format as underlying type.");
-
     void clearRematerializedFrames();
 
 #ifdef CHECK_OSIPOINT_REGISTERS
   protected:
     // Used to verify that live registers don't change between a VM call and
     // the OsiPoint that follows it. Protected to silence Clang warning.
     uint32_t checkRegs_;
     RegisterDump regs_;
@@ -1299,30 +1285,29 @@ class JitActivation : public Activation
     explicit JitActivation(JSContext *cx, bool active = true);
     ~JitActivation();
 
     bool isActive() const {
         return active_;
     }
     void setActive(JSContext *cx, bool active = true);
 
-    bool isProfiling() const;
+    bool isProfiling() const {
+        return false;
+    }
 
     uint8_t *prevJitTop() const {
         return prevJitTop_;
     }
     static size_t offsetOfPrevJitTop() {
         return offsetof(JitActivation, prevJitTop_);
     }
     static size_t offsetOfPrevJitJSContext() {
         return offsetof(JitActivation, prevJitJSContext_);
     }
-    static size_t offsetOfPrevJitActivation() {
-        return offsetof(JitActivation, prevJitActivation_);
-    }
     static size_t offsetOfActiveUint8() {
         MOZ_ASSERT(sizeof(bool) == 1);
         return offsetof(JitActivation, active_);
     }
 
 #ifdef CHECK_OSIPOINT_REGISTERS
     void setCheckRegs(bool check) {
         checkRegs_ = check;
@@ -1373,36 +1358,16 @@ class JitActivation : public Activation
     // Return the bailout information if it is registered.
     const BailoutFrameInfo *bailoutData() const { return bailoutData_; }
 
     // Register the bailout data when it is constructed.
     void setBailoutData(BailoutFrameInfo *bailoutData);
 
     // Unregister the bailout data when the frame is reconstructed.
     void cleanBailoutData();
-
-    static size_t offsetOfLastProfilingFrame() {
-        return offsetof(JitActivation, lastProfilingFrame_);
-    }
-    void *lastProfilingFrame() {
-        return lastProfilingFrame_;
-    }
-    void setLastProfilingFrame(void *ptr) {
-        lastProfilingFrame_ = ptr;
-    }
-
-    static size_t offsetOfLastProfilingCallSite() {
-        return offsetof(JitActivation, lastProfilingCallSite_);
-    }
-    void *lastProfilingCallSite() {
-        return lastProfilingCallSite_;
-    }
-    void setLastProfilingCallSite(void *ptr) {
-        lastProfilingCallSite_ = ptr;
-    }
 };
 
 // A filtering of the ActivationIterator to only stop at JitActivations.
 class JitActivationIterator : public ActivationIterator
 {
     void settle() {
         while (!done() && !activation_->isJit())
             ActivationIterator::operator++();
--- a/toolkit/devtools/server/tests/unit/test_profiler_data.js
+++ b/toolkit/devtools/server/tests/unit/test_profiler_data.js
@@ -101,21 +101,22 @@ function test_data(client, actor, callba
           do_check_true(false);
           return;
         }
       }
 
       // Now check the samples. At least one sample is expected to
       // have been in the busy wait above.
       let loc = stack.name + " (" + stack.filename + ":" + funcLine + ")";
+      let line = stack.lineNumber;
 
       do_check_true(response.profile.threads[0].samples.some(sample => {
         return typeof sample.frames == "object" &&
                sample.frames.length != 0 &&
-               sample.frames.some(f => (f.location == loc));
+               sample.frames.some(f => (f.line == line) && (f.location == loc));
       }));
 
       callback();
     });
   }
 
   // Start off with a 100 millisecond delay.
   attempt(INITIAL_WAIT_TIME);
--- a/tools/profiler/TableTicker.cpp
+++ b/tools/profiler/TableTicker.cpp
@@ -386,19 +386,19 @@ void addDynamicTag(ThreadProfile &aProfi
     aProfile.addTag(ProfileEntry('d', *((void**)(&text[0]))));
   }
 }
 
 static
 void addPseudoEntry(volatile StackEntry &entry, ThreadProfile &aProfile,
                     PseudoStack *stack, void *lastpc)
 {
-  // Pseudo-frames with the BEGIN_PSEUDO_JS flag are just annotations
-  // and should not be recorded in the profile.
-  if (entry.hasFlag(StackEntry::BEGIN_PSEUDO_JS))
+  // Pseudo-frames with the ASMJS flag are just annotations and should not be
+  // recorded in the profile.
+  if (entry.hasFlag(StackEntry::ASMJS))
     return;
 
   int lineno = -1;
 
   // First entry has tagName 's' (start)
   // Check for magic pointer bit 1 to indicate copy
   const char* sampleLabel = entry.label();
   if (entry.isCopyLabel()) {
@@ -450,145 +450,143 @@ void addPseudoEntry(volatile StackEntry 
 struct NativeStack
 {
   void** pc_array;
   void** sp_array;
   size_t size;
   size_t count;
 };
 
-mozilla::Atomic<bool> WALKING_JS_STACK(false);
-
-struct AutoWalkJSStack {
-  bool walkAllowed;
-
-  AutoWalkJSStack() : walkAllowed(false) {
-    walkAllowed = WALKING_JS_STACK.compareExchange(false, true);
-  }
-
-  ~AutoWalkJSStack() {
-    if (walkAllowed)
-        WALKING_JS_STACK = false;
-  }
+struct JSFrame
+{
+    void* stackAddress;
+    const char* label;
 };
 
 static
 void mergeStacksIntoProfile(ThreadProfile& aProfile, TickSample* aSample, NativeStack& aNativeStack)
 {
   PseudoStack* pseudoStack = aProfile.GetPseudoStack();
   volatile StackEntry *pseudoFrames = pseudoStack->mStack;
   uint32_t pseudoCount = pseudoStack->stackSize();
 
   // Make a copy of the JS stack into a JSFrame array. This is necessary since,
   // like the native stack, the JS stack is iterated youngest-to-oldest and we
   // need to iterate oldest-to-youngest when adding entries to aProfile.
 
+  JSFrame jsFrames[1000];
   uint32_t jsCount = 0;
-  JS::ProfilingFrameIterator::Frame jsFrames[1000];
-  {
-    AutoWalkJSStack autoWalkJSStack;
-    const uint32_t maxFrames = mozilla::ArrayLength(jsFrames);
-
-    if (aSample && pseudoStack->mRuntime && autoWalkJSStack.walkAllowed) {
-      JS::ProfilingFrameIterator::RegisterState registerState;
-      registerState.pc = aSample->pc;
-      registerState.sp = aSample->sp;
+  if (aSample && pseudoStack->mRuntime) {
+    JS::ProfilingFrameIterator::RegisterState registerState;
+    registerState.pc = aSample->pc;
+    registerState.sp = aSample->sp;
 #ifdef ENABLE_ARM_LR_SAVING
-      registerState.lr = aSample->lr;
+    registerState.lr = aSample->lr;
 #endif
 
-      JS::ProfilingFrameIterator jsIter(pseudoStack->mRuntime, registerState);
-      for (; jsCount < maxFrames && !jsIter.done(); ++jsIter) {
-        uint32_t extracted = jsIter.extractStack(jsFrames, jsCount, maxFrames);
-        MOZ_ASSERT(extracted <= (maxFrames - jsCount));
-        jsCount += extracted;
-        if (jsCount == maxFrames)
-          break;
-      }
+    JS::ProfilingFrameIterator jsIter(pseudoStack->mRuntime, registerState);
+    for (; jsCount < mozilla::ArrayLength(jsFrames) && !jsIter.done(); ++jsCount, ++jsIter) {
+      jsFrames[jsCount].stackAddress = jsIter.stackAddress();
+      jsFrames[jsCount].label = jsIter.label();
     }
   }
 
   // Start the sample with a root entry.
   aProfile.addTag(ProfileEntry('s', "(root)"));
 
   // While the pseudo-stack array is ordered oldest-to-youngest, the JS and
   // native arrays are ordered youngest-to-oldest. We must add frames to
   // aProfile oldest-to-youngest. Thus, iterate over the pseudo-stack forwards
   // and JS and native arrays backwards. Note: this means the terminating
   // condition jsIndex and nativeIndex is being < 0.
   uint32_t pseudoIndex = 0;
   int32_t jsIndex = jsCount - 1;
   int32_t nativeIndex = aNativeStack.count - 1;
 
-  uint8_t *lastPseudoCppStackAddr = nullptr;
-
   // Iterate as long as there is at least one frame remaining.
   while (pseudoIndex != pseudoCount || jsIndex >= 0 || nativeIndex >= 0) {
-    // There are 1 to 3 frames available. Find and add the oldest.
-
-    uint8_t *pseudoStackAddr = nullptr;
-    uint8_t *jsStackAddr = nullptr;
-    uint8_t *nativeStackAddr = nullptr;
-
+    // There are 1 to 3 frames available. Find and add the oldest. Handle pseudo
+    // frames first, since there are two special cases that must be considered
+    // before everything else.
     if (pseudoIndex != pseudoCount) {
       volatile StackEntry &pseudoFrame = pseudoFrames[pseudoIndex];
 
-      if (pseudoFrame.isCpp())
-        lastPseudoCppStackAddr = (uint8_t *) pseudoFrame.stackAddress();
-
-      // Skip any pseudo-stack JS frames which are marked isOSR
-      // Pseudostack frames are marked isOSR when the JS interpreter
-      // enters a jit frame on a loop edge (via on-stack-replacement,
-      // or OSR).  To avoid both the pseudoframe and jit frame being
-      // recorded (and showing up twice), the interpreter marks the
-      // interpreter pseudostack entry with the OSR flag to ensure that
-      // it doesn't get counted.
-      if (pseudoFrame.isJs() && pseudoFrame.isOSR()) {
+      // isJs pseudo-stack frames assume the stackAddress of the preceding isCpp
+      // pseudo-stack frame. If we arrive at an isJs pseudo frame, we've already
+      // encountered the preceding isCpp stack frame and it was oldest, we can
+      // assume the isJs frame is oldest without checking other frames.
+      if (pseudoFrame.isJs()) {
+          addPseudoEntry(pseudoFrame, aProfile, pseudoStack, nullptr);
           pseudoIndex++;
           continue;
       }
 
-      MOZ_ASSERT(lastPseudoCppStackAddr);
-      pseudoStackAddr = lastPseudoCppStackAddr;
+      // Currently, only asm.js frames use the JS stack and Ion/Baseline/Interp
+      // frames use the pseudo stack. In the optimized asm.js->Ion call path, no
+      // isCpp frame is pushed, leading to the callstack:
+      //   old | pseudo isCpp | asm.js | pseudo isJs | new
+      // Since there is no interleaving isCpp pseudo frame between the asm.js
+      // and isJs pseudo frame, the above isJs logic will render the callstack:
+      //   old | pseudo isCpp | pseudo isJs | asm.js | new
+      // which is wrong. To deal with this, a pseudo isCpp frame pushed right
+      // before entering asm.js flagged with StackEntry::ASMJS. When we see this
+      // flag, we first push all the asm.js frames (up to the next frame with a
+      // stackAddress) before pushing the isJs frames. There is no Ion->asm.js
+      // fast path, so we don't have to worry about asm.js->Ion->asm.js.
+      //
+      // (This and the above isJs special cases can be removed once all JS
+      // execution modes switch from the pseudo stack to the JS stack.)
+      if (pseudoFrame.hasFlag(StackEntry::ASMJS)) {
+        void *stopStackAddress = nullptr;
+        for (uint32_t i = pseudoIndex + 1; i != pseudoCount; i++) {
+          if (pseudoFrames[i].isCpp()) {
+            stopStackAddress = pseudoFrames[i].stackAddress();
+            break;
+          }
+        }
+
+        if (nativeIndex >= 0) {
+          stopStackAddress = std::max(stopStackAddress, aNativeStack.sp_array[nativeIndex]);
+        }
+
+        while (jsIndex >= 0 && jsFrames[jsIndex].stackAddress > stopStackAddress) {
+          addDynamicTag(aProfile, 'c', jsFrames[jsIndex].label);
+          jsIndex--;
+        }
+
+        pseudoIndex++;
+        continue;
+      }
+
+      // Finally, consider the normal case of a plain C++ pseudo-frame.
+      if ((jsIndex < 0 || pseudoFrame.stackAddress() > jsFrames[jsIndex].stackAddress) &&
+          (nativeIndex < 0 || pseudoFrame.stackAddress() > aNativeStack.sp_array[nativeIndex]))
+      {
+        // The (C++) pseudo-frame is the oldest.
+        addPseudoEntry(pseudoFrame, aProfile, pseudoStack, nullptr);
+        pseudoIndex++;
+        continue;
+      }
     }
 
-    if (jsIndex >= 0)
-      jsStackAddr = (uint8_t *) jsFrames[jsIndex].stackAddress;
-
-    if (nativeIndex >= 0)
-      nativeStackAddr = (uint8_t *) aNativeStack.sp_array[nativeIndex];
-
-    // Sanity checks.
-    MOZ_ASSERT_IF(pseudoStackAddr, pseudoStackAddr != jsStackAddr &&
-                                   pseudoStackAddr != nativeStackAddr);
-    MOZ_ASSERT_IF(jsStackAddr, jsStackAddr != pseudoStackAddr &&
-                               jsStackAddr != nativeStackAddr);
-    MOZ_ASSERT_IF(nativeStackAddr, nativeStackAddr != pseudoStackAddr &&
-                                   nativeStackAddr != jsStackAddr);
-
-    // Check to see if pseudoStack frame is top-most.
-    if (pseudoStackAddr > jsStackAddr && pseudoStackAddr > nativeStackAddr) {
-      MOZ_ASSERT(pseudoIndex < pseudoCount);
-      volatile StackEntry &pseudoFrame = pseudoFrames[pseudoIndex];
-      addPseudoEntry(pseudoFrame, aProfile, pseudoStack, nullptr);
-      pseudoIndex++;
-      continue;
+    if (jsIndex >= 0) {
+      // Test whether the JS frame is the oldest.
+      JSFrame &jsFrame = jsFrames[jsIndex];
+      if ((pseudoIndex == pseudoCount || jsFrame.stackAddress > pseudoFrames[pseudoIndex].stackAddress()) &&
+          (nativeIndex < 0 || jsFrame.stackAddress > aNativeStack.sp_array[nativeIndex]))
+      {
+        // The JS frame is the oldest.
+        addDynamicTag(aProfile, 'c', jsFrame.label);
+        jsIndex--;
+        continue;
+      }
     }
 
-    // Check to see if JS jit stack frame is top-most
-    if (jsStackAddr > nativeStackAddr) {
-      MOZ_ASSERT(jsIndex >= 0);
-      addDynamicTag(aProfile, 'c', jsFrames[jsIndex].label);
-      jsIndex--;
-      continue;
-    }
-
-    // If we reach here, there must be a native stack entry and it must be the
-    // greatest entry.
-    MOZ_ASSERT(nativeStackAddr);
+    // If execution reaches this point, there must be a native frame and it must
+    // be the oldest.
     MOZ_ASSERT(nativeIndex >= 0);
     aProfile.addTag(ProfileEntry('l', (void*)aNativeStack.pc_array[nativeIndex]));
     nativeIndex--;
   }
 }
 
 #ifdef USE_NS_STACKWALK
 static
@@ -734,17 +732,16 @@ void doSampleStackTrace(ThreadProfile &a
     aProfile.addTag(ProfileEntry('L', (void*)aSample->lr));
 #endif
   }
 #endif
 }
 
 void TableTicker::Tick(TickSample* sample)
 {
-  // Don't allow for ticks to happen within other ticks.
   if (HasUnwinderThread()) {
     UnwinderTick(sample);
   } else {
     InplaceTick(sample);
   }
 }
 
 void TableTicker::InplaceTick(TickSample* sample)
--- a/tools/profiler/tests/test_enterjit_osr.js
+++ b/tools/profiler/tests/test_enterjit_osr.js
@@ -1,58 +1,70 @@
 // Check that the EnterJIT frame, added by the JIT trampoline and
 // usable by a native unwinder to resume unwinding after encountering
 // JIT code, is pushed as expected.
 function run_test() {
     let p = Cc["@mozilla.org/tools/profiler;1"];
     // Just skip the test if the profiler component isn't present.
     if (!p)
-        return;
+	return;
     p = p.getService(Ci.nsIProfiler);
     if (!p)
-        return;
+	return;
 
     // This test assumes that it's starting on an empty SPS stack.
     // (Note that the other profiler tests also assume the profiler
     // isn't already started.)
     do_check_true(!p.IsActive());
 
     const ms = 5;
     p.StartProfiler(100, ms, ["js"], 1);
 
     function arbitrary_name(){
         // A frame for |arbitrary_name| has been pushed.  Do a sequence of
         // increasingly long spins until we get a sample.
         var delayMS = 5;
         while (1) {
             do_print("loop: ms = " + delayMS);
-            let then = Date.now();
-            do {
-                let n = 10000;
-                while (--n); // OSR happens here
-                // Spin in the hope of getting a sample.
-            } while (Date.now() - then < delayMS);
+	    let then = Date.now();
+	    do {
+	        let n = 10000;
+	        while (--n); // OSR happens here
+	        // Spin in the hope of getting a sample.
+	    } while (Date.now() - then < delayMS);
             let pr = p.getProfileData().threads[0].samples;
             if (pr.length > 0 || delayMS > 30000)
                 return pr;
             delayMS *= 2;
         }
     };
 
     var profile = arbitrary_name();
 
     do_check_neq(profile.length, 0);
     let stack = profile[profile.length - 1].frames.map(f => f.location);
-    do_print(stack);
+    stack = stack.slice(stack.lastIndexOf("js::RunScript") + 1);
 
-    // All we can really check here is ensure that there is exactly
-    // one arbitrary_name frame in the list.
-    var gotName = false;
-    for (var i = 0; i < stack.length; i++) {
-        if (stack[i].match(/arbitrary_name/)) {
-            do_check_eq(gotName, false);
-            gotName = true;
-        }
+    do_print(stack);
+    // This test needs to not break on platforms and configurations
+    // where IonMonkey isn't available / enabled.
+    if (stack.length < 2 || stack[1] != "EnterJIT") {
+	do_print("No JIT?");
+	// Try to check what we can....
+	do_check_eq(Math.min(stack.length, 1), 1);
+	let thisInterp = stack[0];
+	do_check_eq(thisInterp.split(" ")[0], "arbitrary_name");
+	if (stack.length >= 2) {
+	    let nextFrame = stack[1];
+	    do_check_neq(nextFrame.split(" ")[0], "arbitrary_name");
+	}
+    } else {
+	do_check_eq(Math.min(stack.length, 3), 3);
+	let thisInterp = stack[0];
+	let enterJit = stack[1];
+	let thisBC = stack[2];
+	do_check_eq(thisInterp.split(" ")[0], "arbitrary_name");
+	do_check_eq(enterJit, "EnterJIT");
+	do_check_eq(thisBC.split(" ")[0], "arbitrary_name");
     }
-    do_check_eq(gotName, true);
 
     p.StopProfiler();
 }