Bug 1057082 - 6/7 - Modify profiler sampler to use jit stack walking instead of pseudostack. r=jandem r=BenWa
authorKannan Vijayan <kvijayan@mozilla.com>
Thu, 15 Jan 2015 20:11:22 -0500
changeset 224227 97c0c777233db3f3eb42b3d5c2bebea9987b3fc3
parent 224226 ea8cce9f66303f415a8bc27693bca6582efd3cca
child 224228 809520c9cb0a28f5bd820c25dc531ac6d965dcd4
push id54160
push userkvijayan@mozilla.com
push dateFri, 16 Jan 2015 16:05:00 +0000
treeherdermozilla-inbound@809520c9cb0a [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjandem, BenWa
bugs1057082
milestone38.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1057082 - 6/7 - Modify profiler sampler to use jit stack walking instead of pseudostack. r=jandem r=BenWa
js/public/ProfilingFrameIterator.h
js/public/ProfilingStack.h
js/src/asmjs/AsmJSFrameIterator.cpp
js/src/asmjs/AsmJSValidate.cpp
js/src/jit/BaselineCompiler.cpp
js/src/jit/CodeGenerator.cpp
js/src/jit/Ion.cpp
js/src/jit/IonCaches.cpp
js/src/jit/IonCode.h
js/src/jit/JitFrameIterator-inl.h
js/src/jit/JitFrameIterator.h
js/src/jit/JitFrames.cpp
js/src/jit/JitcodeMap.cpp
js/src/jit/JitcodeMap.h
js/src/shell/js.cpp
js/src/vm/Interpreter.cpp
js/src/vm/Runtime.h
js/src/vm/SPSProfiler.cpp
js/src/vm/SPSProfiler.h
js/src/vm/Stack.cpp
js/src/vm/Stack.h
tools/profiler/TableTicker.cpp
--- a/js/public/ProfilingFrameIterator.h
+++ b/js/public/ProfilingFrameIterator.h
@@ -14,39 +14,63 @@
 #include "js/Utility.h"
 
 class JSAtom;
 struct JSRuntime;
 
 namespace js {
     class Activation;
     class AsmJSProfilingFrameIterator;
+    namespace jit {
+        class JitActivation;
+        class JitProfilingFrameIterator;
+    }
 }
 
 namespace JS {
 
 // This iterator can be used to walk the stack of a thread suspended at an
 // arbitrary pc. To provide acurate results, profiling must have been enabled
 // (via EnableRuntimeProfilingStack) before executing the callstack being
 // unwound.
 class JS_PUBLIC_API(ProfilingFrameIterator)
 {
+    JSRuntime *rt_;
     js::Activation *activation_;
 
+    // When moving past a JitActivation, we need to save the prevJitTop
+    // from it to use as the exit-frame pointer when the next caller jit
+    // activation (if any) comes around.
+    void *savedPrevJitTop_;
+
     static const unsigned StorageSpace = 6 * sizeof(void*);
     mozilla::AlignedStorage<StorageSpace> storage_;
     js::AsmJSProfilingFrameIterator &asmJSIter() {
         MOZ_ASSERT(!done());
+        MOZ_ASSERT(isAsmJS());
         return *reinterpret_cast<js::AsmJSProfilingFrameIterator*>(storage_.addr());
     }
     const js::AsmJSProfilingFrameIterator &asmJSIter() const {
         MOZ_ASSERT(!done());
+        MOZ_ASSERT(isAsmJS());
         return *reinterpret_cast<const js::AsmJSProfilingFrameIterator*>(storage_.addr());
     }
 
+    js::jit::JitProfilingFrameIterator &jitIter() {
+        MOZ_ASSERT(!done());
+        MOZ_ASSERT(isJit());
+        return *reinterpret_cast<js::jit::JitProfilingFrameIterator*>(storage_.addr());
+    }
+
+    const js::jit::JitProfilingFrameIterator &jitIter() const {
+        MOZ_ASSERT(!done());
+        MOZ_ASSERT(isJit());
+        return *reinterpret_cast<const js::jit::JitProfilingFrameIterator*>(storage_.addr());
+    }
+
     void settle();
 
   public:
     struct RegisterState
     {
         RegisterState() : pc(nullptr), sp(nullptr), lr(nullptr) {}
         void *pc;
         void *sp;
@@ -60,22 +84,38 @@ class JS_PUBLIC_API(ProfilingFrameIterat
 
     // Assuming the stack grows down (we do), the return value:
     //  - always points into the stack
     //  - is weakly monotonically increasing (may be equal for successive frames)
     //  - will compare greater than newer native and psuedo-stack frame addresses
     //    and less than older native and psuedo-stack frame addresses
     void *stackAddress() const;
 
-    // Return a label suitable for regexp-matching as performed by
-    // browser/devtools/profiler/cleopatra/js/parserWorker.js
-    const char *label() const;
+    enum FrameKind
+    {
+      Frame_Baseline,
+      Frame_Ion,
+      Frame_AsmJS
+    };
+
+    struct Frame
+    {
+        FrameKind kind;
+        void *stackAddress;
+        void *returnAddress;
+        void *activation;
+        const char *label;
+    };
+    uint32_t extractStack(Frame *frames, uint32_t offset, uint32_t end) const;
 
   private:
     void iteratorConstruct(const RegisterState &state);
     void iteratorConstruct();
     void iteratorDestroy();
     bool iteratorDone();
+
+    bool isAsmJS() const;
+    bool isJit() const;
 };
 
 } // namespace JS
 
 #endif  /* js_ProfilingFrameIterator_h */
--- a/js/public/ProfilingStack.h
+++ b/js/public/ProfilingStack.h
@@ -55,34 +55,39 @@ class ProfileEntry
         // a JS frame is assumed by default. You're not allowed to publicly
         // change the frame type. Instead, call `setJsFrame` or `setCppFrame`.
         IS_CPP_ENTRY = 0x01,
 
         // Indicate that copying the frame label is not necessary when taking a
         // sample of the pseudostack.
         FRAME_LABEL_COPY = 0x02,
 
-        // This ProfileEntry was pushed immediately before calling into asm.js.
-        ASMJS = 0x04,
+        // This ProfileEntry is a dummy entry indicating the start of a run
+        // of JS pseudostack entries.
+        BEGIN_PSEUDO_JS = 0x04,
+
+        // This flag is used to indicate that an interpreter JS entry has OSR-ed
+        // into baseline.
+        OSR = 0x08,
 
         // Mask for removing all flags except the category information.
-        CATEGORY_MASK = ~IS_CPP_ENTRY & ~FRAME_LABEL_COPY & ~ASMJS
+        CATEGORY_MASK = ~IS_CPP_ENTRY & ~FRAME_LABEL_COPY & ~BEGIN_PSEUDO_JS & ~OSR
     };
 
     // Keep these in sync with browser/devtools/profiler/utils/global.js
     MOZ_BEGIN_NESTED_ENUM_CLASS(Category, uint32_t)
-        OTHER    = 0x08,
-        CSS      = 0x10,
-        JS       = 0x20,
-        GC       = 0x40,
-        CC       = 0x80,
-        NETWORK  = 0x100,
-        GRAPHICS = 0x200,
-        STORAGE  = 0x400,
-        EVENTS   = 0x800,
+        OTHER    = 0x10,
+        CSS      = 0x20,
+        JS       = 0x40,
+        GC       = 0x80,
+        CC       = 0x100,
+        NETWORK  = 0x200,
+        GRAPHICS = 0x400,
+        STORAGE  = 0x800,
+        EVENTS   = 0x1000,
 
         FIRST    = OTHER,
         LAST     = EVENTS
     MOZ_END_NESTED_ENUM_CLASS(Category)
 
     // All of these methods are marked with the 'volatile' keyword because SPS's
     // representation of the stack is stored such that all ProfileEntry
     // instances are volatile. These methods would not be available unless they
@@ -121,16 +126,28 @@ class ProfileEntry
 
     uint32_t flags() const volatile {
         return flags_;
     }
     uint32_t category() const volatile {
         return flags_ & CATEGORY_MASK;
     }
 
+    void setOSR() volatile {
+        MOZ_ASSERT(isJs());
+        setFlag(OSR);
+    }
+    void unsetOSR() volatile {
+        MOZ_ASSERT(isJs());
+        unsetFlag(OSR);
+    }
+    bool isOSR() const volatile {
+        return hasFlag(OSR);
+    }
+
     void *stackAddress() const volatile {
         MOZ_ASSERT(!isJs());
         return spOrScript;
     }
     JSScript *script() const volatile {
         MOZ_ASSERT(isJs());
         return (JSScript *)spOrScript;
     }
--- a/js/src/asmjs/AsmJSFrameIterator.cpp
+++ b/js/src/asmjs/AsmJSFrameIterator.cpp
@@ -409,16 +409,26 @@ js::GenerateAsmJSExitEpilogue(MacroAssem
 AsmJSProfilingFrameIterator::AsmJSProfilingFrameIterator(const AsmJSActivation &activation)
   : module_(&activation.module()),
     callerFP_(nullptr),
     callerPC_(nullptr),
     stackAddress_(nullptr),
     exitReason_(AsmJSExit::None),
     codeRange_(nullptr)
 {
+    // If profiling hasn't been enabled for this module, then CallerFPFromFP
+    // will be trash, so ignore the entire activation. In practice, this only
+    // happens if profiling is enabled while module->active() (in this case,
+    // profiling will be enabled when the module becomes inactive and gets
+    // called again).
+    if (!module_->profilingEnabled()) {
+        MOZ_ASSERT(done());
+        return;
+    }
+
     initFromFP(activation);
 }
 
 static inline void
 AssertMatchesCallSite(const AsmJSModule &module, const AsmJSModule::CodeRange *calleeCodeRange,
                       void *callerPC, void *callerFP, void *fp)
 {
 #ifdef DEBUG
--- a/js/src/asmjs/AsmJSValidate.cpp
+++ b/js/src/asmjs/AsmJSValidate.cpp
@@ -8506,24 +8506,28 @@ GenerateFFIIonExit(ModuleCompiler &m, co
         //   JSContext *cx = activation->cx();
         //   Activation *act = cx->mainThread().activation();
         //   act.active_ = true;
         //   act.prevJitTop_ = cx->mainThread().jitTop;
         //   act.prevJitJSContext_ = cx->mainThread().jitJSContext;
         //   cx->mainThread().jitJSContext = cx;
         //   act.prevJitActivation_ = cx->mainThread().jitActivation;
         //   cx->mainThread().jitActivation = act;
+        //   act.prevProfilingActivation_ = cx->mainThread().profilingActivation;
+        //   cx->mainThread().profilingActivation_ = act;
         // On the ARM store8() uses the secondScratchReg (lr) as a temp.
         size_t offsetOfActivation = offsetof(JSRuntime, mainThread) +
                                     PerThreadData::offsetOfActivation();
         size_t offsetOfJitTop = offsetof(JSRuntime, mainThread) + offsetof(PerThreadData, jitTop);
         size_t offsetOfJitJSContext = offsetof(JSRuntime, mainThread) +
                                       offsetof(PerThreadData, jitJSContext);
         size_t offsetOfJitActivation = offsetof(JSRuntime, mainThread) +
                                        offsetof(PerThreadData, jitActivation);
+        size_t offsetOfProfilingActivation = offsetof(JSRuntime, mainThread) +
+                                             PerThreadData::offsetOfProfilingActivation();
         masm.loadAsmJSActivation(reg0);
         masm.loadPtr(Address(reg0, AsmJSActivation::offsetOfContext()), reg3);
         masm.loadPtr(Address(reg3, JSContext::offsetOfRuntime()), reg0);
         masm.loadPtr(Address(reg0, offsetOfActivation), reg1);
 
         //   act.active_ = true;
         masm.store8(Imm32(1), Address(reg1, JitActivation::offsetOfActiveUint8()));
 
@@ -8537,16 +8541,22 @@ GenerateFFIIonExit(ModuleCompiler &m, co
         //   cx->mainThread().jitJSContext = cx;
         masm.storePtr(reg3, Address(reg0, offsetOfJitJSContext));
 
         //   act.prevJitActivation_ = cx->mainThread().jitActivation;
         masm.loadPtr(Address(reg0, offsetOfJitActivation), reg2);
         masm.storePtr(reg2, Address(reg1, JitActivation::offsetOfPrevJitActivation()));
         //   cx->mainThread().jitActivation = act;
         masm.storePtr(reg1, Address(reg0, offsetOfJitActivation));
+
+        //   act.prevProfilingActivation_ = cx->mainThread().profilingActivation;
+        masm.loadPtr(Address(reg0, offsetOfProfilingActivation), reg2);
+        masm.storePtr(reg2, Address(reg1, Activation::offsetOfPrevProfiling()));
+        //   cx->mainThread().profilingActivation_ = act;
+        masm.storePtr(reg1, Address(reg0, offsetOfProfilingActivation));
     }
 
     // 2. Call
     AssertStackAlignment(masm, AsmJSStackAlignment);
     masm.callJitFromAsmJS(callee);
     AssertStackAlignment(masm, AsmJSStackAlignment);
 
     {
@@ -8556,39 +8566,46 @@ GenerateFFIIonExit(ModuleCompiler &m, co
         // JSReturnReg_Type, so there are five live registers.
         MOZ_ASSERT(JSReturnReg_Data == AsmJSIonExitRegReturnData);
         MOZ_ASSERT(JSReturnReg_Type == AsmJSIonExitRegReturnType);
         Register reg0 = AsmJSIonExitRegD0;
         Register reg1 = AsmJSIonExitRegD1;
         Register reg2 = AsmJSIonExitRegD2;
 
         // The following is inlined:
+        //   rt->mainThread.profilingActivation = prevProfilingActivation_;
         //   rt->mainThread.activation()->active_ = false;
         //   rt->mainThread.jitTop = prevJitTop_;
         //   rt->mainThread.jitJSContext = prevJitJSContext_;
         //   rt->mainThread.jitActivation = prevJitActivation_;
         // On the ARM store8() uses the secondScratchReg (lr) as a temp.
         size_t offsetOfActivation = offsetof(JSRuntime, mainThread) +
                                     PerThreadData::offsetOfActivation();
         size_t offsetOfJitTop = offsetof(JSRuntime, mainThread) + offsetof(PerThreadData, jitTop);
         size_t offsetOfJitJSContext = offsetof(JSRuntime, mainThread) +
                                       offsetof(PerThreadData, jitJSContext);
         size_t offsetOfJitActivation = offsetof(JSRuntime, mainThread) +
                                        offsetof(PerThreadData, jitActivation);
+        size_t offsetOfProfilingActivation = offsetof(JSRuntime, mainThread) +
+                                             PerThreadData::offsetOfProfilingActivation();
 
         masm.movePtr(AsmJSImmPtr(AsmJSImm_Runtime), reg0);
         masm.loadPtr(Address(reg0, offsetOfActivation), reg1);
 
-        //   rt->mainThread.activation()->active_ = false;
-        masm.store8(Imm32(0), Address(reg1, JitActivation::offsetOfActiveUint8()));
-
         //   rt->mainThread.jitTop = prevJitTop_;
         masm.loadPtr(Address(reg1, JitActivation::offsetOfPrevJitTop()), reg2);
         masm.storePtr(reg2, Address(reg0, offsetOfJitTop));
 
+        //   rt->mainThread.profilingActivation = rt->mainThread.activation()->prevProfiling_;
+        masm.loadPtr(Address(reg1, Activation::offsetOfPrevProfiling()), reg2);
+        masm.storePtr(reg2, Address(reg0, offsetOfProfilingActivation));
+
+        //   rt->mainThread.activation()->active_ = false;
+        masm.store8(Imm32(0), Address(reg1, JitActivation::offsetOfActiveUint8()));
+
         //   rt->mainThread.jitJSContext = prevJitJSContext_;
         masm.loadPtr(Address(reg1, JitActivation::offsetOfPrevJitJSContext()), reg2);
         masm.storePtr(reg2, Address(reg0, offsetOfJitJSContext));
 
         //   rt->mainThread.jitActivation = prevJitActivation_;
         masm.loadPtr(Address(reg1, JitActivation::offsetOfPrevJitActivation()), reg2);
         masm.storePtr(reg2, Address(reg0, offsetOfJitActivation));
     }
--- a/js/src/jit/BaselineCompiler.cpp
+++ b/js/src/jit/BaselineCompiler.cpp
@@ -256,33 +256,42 @@ BaselineCompiler::compile()
     // searches for the sought entry when queries are in linear order.
     bytecodeMap[script->nTypeSets()] = 0;
 
     baselineScript->copyYieldEntries(script, yieldOffsets_);
 
     if (compileDebugInstrumentation_)
         baselineScript->setHasDebugInstrumentation();
 
-    // If profiler instrumentation is enabled, register a native => bytecode mapping entry,
-    // and toggle profiling on
-    if (cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(cx->runtime())) {
+    // If profiler instrumentation is enabled, toggle instrumentation on.
+    if (cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(cx->runtime()))
+        baselineScript->toggleProfilerInstrumentation(true);
+
+    // Always register a native => bytecode mapping entry, since profiler can be
+    // turned on with baseline jitcode on stack, and baseline jitcode cannot be invalidated.
+    {
         JitSpew(JitSpew_Profiling, "Added JitcodeGlobalEntry for baseline script %s:%d (%p)",
                     script->filename(), script->lineno(), baselineScript.get());
+
+        // Generate profiling string.
+        char *str = JitcodeGlobalEntry::createScriptString(cx, script);
+        if (!str)
+            return Method_Error;
+
         JitcodeGlobalEntry::BaselineEntry entry;
-        entry.init(code->raw(), code->raw() + code->instructionsSize(), script);
+        entry.init(code->raw(), code->rawEnd(), script, str);
 
         JitcodeGlobalTable *globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
-        if (!globalTable->addEntry(entry))
+        if (!globalTable->addEntry(entry, cx->runtime())) {
+            entry.destroy();
             return Method_Error;
+        }
 
         // Mark the jitcode as having a bytecode map.
         code->setHasBytecodeMap();
-
-        // Toggle profiler instrumentation on in the jitcode.
-        baselineScript->toggleProfilerInstrumentation(true);
     }
 
     script->setBaselineScript(cx, baselineScript.release());
 
     return Method_Compiled;
 }
 
 void
--- a/js/src/jit/CodeGenerator.cpp
+++ b/js/src/jit/CodeGenerator.cpp
@@ -7175,17 +7175,32 @@ CodeGenerator::link(JSContext *cx, types
             return false;
         }
 
         // nativeToBytecodeScriptList_ is no longer needed.
         js_free(nativeToBytecodeScriptList_);
 
         // Add entry to the global table.
         JitcodeGlobalTable *globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
-        if (!globalTable->addEntry(entry)) {
+        if (!globalTable->addEntry(entry, cx->runtime())) {
+            // Memory may have been allocated for the entry.
+            entry.destroy();
+            return false;
+        }
+
+        // Mark the jitcode as having a bytecode map.
+        code->setHasBytecodeMap();
+    } else {
+        // Add a dumy jitcodeGlobalTable entry.
+        JitcodeGlobalEntry::DummyEntry entry;
+        entry.init(code->raw(), code->rawEnd());
+
+        // Add entry to the global table.
+        JitcodeGlobalTable *globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
+        if (!globalTable->addEntry(entry, cx->runtime())) {
             // Memory may have been allocated for the entry.
             entry.destroy();
             return false;
         }
 
         // Mark the jitcode as having a bytecode map.
         code->setHasBytecodeMap();
     }
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -651,27 +651,28 @@ JitCode::trace(JSTracer *trc)
         CompactBufferReader reader(start, start + dataRelocTableBytes_);
         MacroAssembler::TraceDataRelocations(trc, this, reader);
     }
 }
 
 void
 JitCode::finalize(FreeOp *fop)
 {
+    JSRuntime *rt = fop->runtime();
+
     // If this jitcode has a bytecode map, de-register it.
     if (hasBytecodeMap_) {
-        MOZ_ASSERT(fop->runtime()->jitRuntime()->hasJitcodeGlobalTable());
-        fop->runtime()->jitRuntime()->getJitcodeGlobalTable()->removeEntry(raw());
+        MOZ_ASSERT(rt->jitRuntime()->hasJitcodeGlobalTable());
+        rt->jitRuntime()->getJitcodeGlobalTable()->removeEntry(raw(), rt);
     }
 
     // Buffer can be freed at any time hereafter. Catch use-after-free bugs.
     // Don't do this if the Ion code is protected, as the signal handler will
     // deadlock trying to reacquire the interrupt lock.
-    if (fop->runtime()->jitRuntime())
-        memset(code_, JS_SWEPT_CODE_PATTERN, bufferSize_);
+    memset(code_, JS_SWEPT_CODE_PATTERN, bufferSize_);
     code_ = nullptr;
 
     // Code buffers are stored inside JSC pools.
     // Pools are refcounted. Releasing the pool may free it.
     if (pool_) {
         // Horrible hack: if we are using perf integration, we don't
         // want to reuse code addresses, so we just leak the memory instead.
         if (!PerfEnabled())
--- a/js/src/jit/IonCaches.cpp
+++ b/js/src/jit/IonCaches.cpp
@@ -426,21 +426,34 @@ IonCache::linkAndAttachStub(JSContext *c
     writePerfSpewerJitCodeProfile(code, "IonCache");
 #endif
 
     attachStub(masm, attacher, code);
 
     // Add entry to native => bytecode mapping for this stub if needed.
     if (cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(cx->runtime())) {
         JitcodeGlobalEntry::IonCacheEntry entry;
-        entry.init(code->raw(), code->raw() + code->instructionsSize(), rejoinAddress());
+        entry.init(code->raw(), code->rawEnd(), rejoinAddress());
 
         // Add entry to the global table.
         JitcodeGlobalTable *globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
-        if (!globalTable->addEntry(entry)) {
+        if (!globalTable->addEntry(entry, cx->runtime())) {
+            entry.destroy();
+            return false;
+        }
+
+        // Mark the jitcode as having a bytecode map.
+        code->setHasBytecodeMap();
+    } else {
+        JitcodeGlobalEntry::DummyEntry entry;
+        entry.init(code->raw(), code->rawEnd());
+
+        // Add entry to the global table.
+        JitcodeGlobalTable *globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
+        if (!globalTable->addEntry(entry, cx->runtime())) {
             entry.destroy();
             return false;
         }
 
         // Mark the jitcode as having a bytecode map.
         code->setHasBytecodeMap();
     }
 
--- a/js/src/jit/IonCode.h
+++ b/js/src/jit/IonCode.h
@@ -93,16 +93,20 @@ class JitCode : public gc::TenuredCell
 
   public:
     uint8_t *raw() const {
         return code_;
     }
     uint8_t *rawEnd() const {
         return code_ + insnSize_;
     }
+    bool containsNativePC(const void *addr) const {
+        const uint8_t *addr_u8 = (const uint8_t *) addr;
+        return raw() <= addr_u8 && addr_u8 < rawEnd();
+    }
     size_t instructionsSize() const {
         return insnSize_;
     }
     void trace(JSTracer *trc);
     void finalize(FreeOp *fop);
     void fixupAfterMovingGC() {}
     void setInvalidated() {
         invalidated_ = true;
--- a/js/src/jit/JitFrameIterator-inl.h
+++ b/js/src/jit/JitFrameIterator-inl.h
@@ -11,16 +11,29 @@
 
 #include "jit/Bailouts.h"
 #include "jit/BaselineFrame.h"
 #include "jit/JitFrames.h"
 
 namespace js {
 namespace jit {
 
+inline JitFrameLayout *
+JitProfilingFrameIterator::framePtr()
+{
+    MOZ_ASSERT(!done());
+    return (JitFrameLayout *) fp_;
+}
+
+inline JSScript *
+JitProfilingFrameIterator::frameScript()
+{
+    return ScriptFromCalleeToken(framePtr()->calleeToken());
+}
+
 inline BaselineFrame *
 JitFrameIterator::baselineFrame() const
 {
     MOZ_ASSERT(isBaselineJS());
     return (BaselineFrame *)(fp() - BaselineFrame::FramePointerOffset - BaselineFrame::Size());
 }
 
 template <typename T>
--- a/js/src/jit/JitFrameIterator.h
+++ b/js/src/jit/JitFrameIterator.h
@@ -9,16 +9,18 @@
 
 #include "jsfun.h"
 #include "jsscript.h"
 #include "jstypes.h"
 
 #include "jit/IonCode.h"
 #include "jit/Snapshots.h"
 
+#include "js/ProfilingFrameIterator.h"
+
 namespace js {
     class ActivationIterator;
 };
 
 namespace js {
 namespace jit {
 
 enum FrameType
@@ -250,16 +252,43 @@ class JitFrameIterator
 
 #ifdef DEBUG
     bool verifyReturnAddressUsingNativeToBytecodeMap();
 #else
     inline bool verifyReturnAddressUsingNativeToBytecodeMap() { return true; }
 #endif
 };
 
+class JitcodeGlobalTable;
+
+class JitProfilingFrameIterator
+{
+    uint8_t *fp_;
+    FrameType type_;
+    void *returnAddressToFp_;
+
+    inline JitFrameLayout *framePtr();
+    inline JSScript *frameScript();
+    bool tryInitWithPC(void *pc);
+    bool tryInitWithTable(JitcodeGlobalTable *table, void *pc, JSRuntime *rt);
+
+  public:
+    JitProfilingFrameIterator(JSRuntime *rt,
+                              const JS::ProfilingFrameIterator::RegisterState &state);
+    explicit JitProfilingFrameIterator(void *exitFrame);
+
+    void operator++();
+    bool done() const { return fp_ == nullptr; }
+
+    void *fp() const { MOZ_ASSERT(!done()); return fp_; }
+    void *stackAddress() const { return fp(); }
+    FrameType frameType() const { MOZ_ASSERT(!done()); return type_; }
+    void *returnAddressToFp() const { MOZ_ASSERT(!done()); return returnAddressToFp_; }
+};
+
 class RInstructionResults
 {
     // Vector of results of recover instructions.
     typedef mozilla::Vector<RelocatableValue, 1, SystemAllocPolicy> Values;
     mozilla::UniquePtr<Values, JS::DeletePolicy<Values> > results_;
 
     // The frame pointer is used as a key to check if the current frame already
     // bailed out.
--- a/js/src/jit/JitFrames.cpp
+++ b/js/src/jit/JitFrames.cpp
@@ -2666,17 +2666,17 @@ JitFrameIterator::verifyReturnAddressUsi
 
     if (rt->isHeapMinorCollecting())
         return true;
 
     JitRuntime *jitrt = rt->jitRuntime();
 
     // Look up and print bytecode info for the native address.
     JitcodeGlobalEntry entry;
-    if (!jitrt->getJitcodeGlobalTable()->lookup(returnAddressToFp_, &entry))
+    if (!jitrt->getJitcodeGlobalTable()->lookup(returnAddressToFp_, &entry, rt))
         return true;
 
     JitSpew(JitSpew_Profiling, "Found nativeToBytecode entry for %p: %p - %p",
             returnAddressToFp_, entry.nativeStartAddr(), entry.nativeEndAddr());
 
     JitcodeGlobalEntry::BytecodeLocationVector location;
     uint32_t depth = UINT32_MAX;
     if (!entry.callStackAtAddr(rt, returnAddressToFp_, location, &depth))
@@ -2713,16 +2713,288 @@ JitFrameIterator::verifyReturnAddressUsi
                 ++inlineFrames;
         }
     }
 
     return true;
 }
 #endif // DEBUG
 
+JitProfilingFrameIterator::JitProfilingFrameIterator(
+        JSRuntime *rt, const JS::ProfilingFrameIterator::RegisterState &state)
+{
+    // If no profilingActivation is live, initialize directly to
+    // end-of-iteration state.
+    if (!rt->mainThread.profilingActivation()) {
+        type_ = JitFrame_Entry;
+        fp_ = nullptr;
+        returnAddressToFp_ = nullptr;
+        return;
+    }
+
+    MOZ_ASSERT(rt->mainThread.profilingActivation()->isJit());
+
+    JitActivation *act = rt->mainThread.profilingActivation()->asJit();
+
+    // If the top JitActivation has a null lastProfilingFrame, assume that
+    // it's a trivially empty activation, and initialize directly
+    // to end-of-iteration state.
+    if (!act->lastProfilingFrame()) {
+        type_ = JitFrame_Entry;
+        fp_ = nullptr;
+        returnAddressToFp_ = nullptr;
+        return;
+    }
+
+    // Get the fp from the current profilingActivation
+    fp_ = (uint8_t *) act->lastProfilingFrame();
+    void *lastCallSite = act->lastProfilingCallSite();
+
+    JitcodeGlobalTable *table = rt->jitRuntime()->getJitcodeGlobalTable();
+
+    // Profiler sampling must NOT be suppressed if we are here.
+    MOZ_ASSERT(rt->isProfilerSamplingEnabled());
+
+    // Since the frame is on stack, and is a jit frame, it MUST have Baseline jitcode.
+    MOZ_ASSERT(frameScript()->hasBaselineScript());
+
+    // Try initializing with sampler pc
+    if (tryInitWithPC(state.pc))
+        return;
+
+    // Try initializing with sampler pc using native=>bytecode table.
+    if (tryInitWithTable(table, state.pc, rt))
+        return;
+
+    // Try initializing with lastProfilingCallSite pc
+    if (lastCallSite) {
+        if (tryInitWithPC(lastCallSite))
+            return;
+
+        // Try initializing with lastProfilingCallSite pc using native=>bytecode table.
+        if (tryInitWithTable(table, lastCallSite, rt))
+            return;
+    }
+
+    // If nothing matches, for now just assume we are at the start of the last frame's
+    // baseline jit code.
+    type_ = JitFrame_BaselineJS;
+    returnAddressToFp_ = frameScript()->baselineScript()->method()->raw();
+    //++(*this);
+}
+
+template <typename FrameType, typename ReturnType=CommonFrameLayout*>
+inline ReturnType
+GetPreviousRawFrame(FrameType *frame)
+{
+    size_t prevSize = frame->prevFrameLocalSize() + FrameType::Size();
+    return (ReturnType) (((uint8_t *) frame) + prevSize);
+}
+
+JitProfilingFrameIterator::JitProfilingFrameIterator(void *exitFrame)
+{
+    // Exit frame was en
+    ExitFrameLayout *frame = (ExitFrameLayout *) exitFrame;
+    FrameType prevType = frame->prevType();
+
+    if (prevType == JitFrame_IonJS || prevType == JitFrame_BaselineJS ||
+        prevType == JitFrame_Unwound_IonJS)
+    {
+        returnAddressToFp_ = frame->returnAddress();
+        fp_ = GetPreviousRawFrame<ExitFrameLayout, uint8_t *>(frame);
+        type_ = JitFrame_IonJS;
+        return;
+    }
+
+    if (prevType == JitFrame_BaselineStub || prevType == JitFrame_Unwound_BaselineStub) {
+        BaselineStubFrameLayout *stubFrame =
+            GetPreviousRawFrame<ExitFrameLayout, BaselineStubFrameLayout *>(frame);
+        MOZ_ASSERT_IF(prevType == JitFrame_BaselineStub,
+                      stubFrame->prevType() == JitFrame_BaselineJS);
+        MOZ_ASSERT_IF(prevType == JitFrame_Unwound_BaselineStub,
+                      stubFrame->prevType() == JitFrame_BaselineJS ||
+                      stubFrame->prevType() == JitFrame_IonJS);
+        returnAddressToFp_ = stubFrame->returnAddress();
+        fp_ = ((uint8_t *) stubFrame->reverseSavedFramePtr())
+                + jit::BaselineFrame::FramePointerOffset;
+        type_ = JitFrame_BaselineJS;
+        return;
+    }
+
+    MOZ_CRASH("Invalid frame type prior to exit frame.");
+}
+
+bool
+JitProfilingFrameIterator::tryInitWithPC(void *pc)
+{
+    JSScript *callee = frameScript();
+
+    // Check for Ion first, since it's more likely for hot code.
+    if (callee->hasIonScript() && callee->ionScript()->method()->containsNativePC(pc)) {
+        type_ = JitFrame_IonJS;
+        returnAddressToFp_ = pc;
+        return true;
+    }
+
+    // Check for containment in Baseline jitcode second.
+    if (callee->baselineScript()->method()->containsNativePC(pc)) {
+        type_ = JitFrame_BaselineJS;
+        returnAddressToFp_ = pc;
+        return true;
+    }
+
+    return false;
+}
+
+bool
+JitProfilingFrameIterator::tryInitWithTable(JitcodeGlobalTable *table, void *pc, JSRuntime *rt)
+{
+    if (!pc)
+        return false;
+
+    JitcodeGlobalEntry entry;
+    if (!table->lookup(pc, &entry, rt))
+        return false;
+
+    JSScript *callee = frameScript();
+
+    MOZ_ASSERT(entry.isIon() || entry.isBaseline() || entry.isIonCache());
+    if (entry.isIon()) {
+        // If looked-up callee doesn't match frame callee, don't accept lastProfilingCallSite
+        if (entry.ionEntry().getScript(0) != callee)
+            return false;
+
+        type_ = JitFrame_IonJS;
+        returnAddressToFp_ = pc;
+        return true;
+    }
+
+    if (entry.isBaseline()) {
+        // If looked-up callee doesn't match frame callee, don't accept lastProfilingCallSite
+        if (entry.baselineEntry().script() != callee)
+            return false;
+
+        type_ = JitFrame_BaselineJS;
+        returnAddressToFp_ = pc;
+        return true;
+    }
+
+    if (entry.isIonCache()) {
+        JitcodeGlobalEntry ionEntry;
+        table->lookupInfallible(entry.ionCacheEntry().rejoinAddr(), &ionEntry, rt);
+        MOZ_ASSERT(ionEntry.isIon());
+
+        if (ionEntry.ionEntry().getScript(0) != callee)
+            return false;
+
+        type_ = JitFrame_IonJS;
+        returnAddressToFp_ = entry.ionCacheEntry().rejoinAddr();
+        return true;
+    }
+
+    return false;
+}
+
+void
+JitProfilingFrameIterator::operator++()
+{
+    /*
+     * fp_ points to a Baseline or Ion frame.  The possible call-stacks
+     * patterns occurring between this frame and a previous Ion or Baseline
+     * frame are as follows:
+     *
+     * <Baseline-Or-Ion>
+     * ^
+     * |
+     * ^--- Ion
+     * |
+     * ^--- Baseline Stub <---- Baseline
+     * |
+     * ^--- Argument Rectifier
+     * |    ^
+     * |    |
+     * |    ^--- Ion
+     * |    |
+     * |    ^--- Baseline Stub <---- Baseline
+     * |
+     * ^--- Entry Frame (From C++)
+     *      Exit Frame (From previous JitActivation)
+     *      ^
+     *      |
+     *      ^--- Ion
+     *      |
+     *      ^--- Baseline
+     *      |
+     *      ^--- Baseline Stub <---- Baseline
+     */
+    JitFrameLayout *frame = framePtr();
+    FrameType prevType = frame->prevType();
+
+    if (prevType == JitFrame_IonJS) {
+        returnAddressToFp_ = frame->returnAddress();
+        fp_ = GetPreviousRawFrame<JitFrameLayout, uint8_t *>(frame);
+        type_ = JitFrame_IonJS;
+        return;
+    }
+
+    if (prevType == JitFrame_BaselineJS) {
+        returnAddressToFp_ = frame->returnAddress();
+        fp_ = GetPreviousRawFrame<JitFrameLayout, uint8_t *>(frame);
+        type_ = JitFrame_BaselineJS;
+        return;
+    }
+
+    if (prevType == JitFrame_BaselineStub) {
+        BaselineStubFrameLayout *stubFrame =
+            GetPreviousRawFrame<JitFrameLayout, BaselineStubFrameLayout *>(frame);
+        MOZ_ASSERT(stubFrame->prevType() == JitFrame_BaselineJS);
+
+        returnAddressToFp_ = stubFrame->returnAddress();
+        fp_ = ((uint8_t *) stubFrame->reverseSavedFramePtr())
+                + jit::BaselineFrame::FramePointerOffset;
+        type_ = JitFrame_BaselineJS;
+        return;
+    }
+
+    if (prevType == JitFrame_Rectifier) {
+        RectifierFrameLayout *rectFrame =
+            GetPreviousRawFrame<JitFrameLayout, RectifierFrameLayout *>(frame);
+        FrameType rectPrevType = rectFrame->prevType();
+
+        if (rectPrevType == JitFrame_IonJS) {
+            returnAddressToFp_ = rectFrame->returnAddress();
+            fp_ = GetPreviousRawFrame<JitFrameLayout, uint8_t *>(rectFrame);
+            type_ = JitFrame_IonJS;
+            return;
+        }
+
+        if (rectPrevType == JitFrame_BaselineStub) {
+            BaselineStubFrameLayout *stubFrame =
+                GetPreviousRawFrame<JitFrameLayout, BaselineStubFrameLayout *>(rectFrame);
+            returnAddressToFp_ = stubFrame->returnAddress();
+            fp_ = ((uint8_t *) stubFrame->reverseSavedFramePtr())
+                    + jit::BaselineFrame::FramePointerOffset;
+            type_ = JitFrame_BaselineJS;
+            return;
+        }
+
+        MOZ_CRASH("Bad frame type prior to rectifier frame.");
+    }
+
+    if (prevType == JitFrame_Entry) {
+        // No previous frame, set to null to indicate that JitFrameIterator is done()
+        returnAddressToFp_ = nullptr;
+        fp_ = nullptr;
+        type_ = JitFrame_Entry;
+        return;
+    }
+
+    MOZ_CRASH("Bad frame type.");
+}
+
 JitFrameLayout *
 InvalidationBailoutStack::fp() const
 {
     return (JitFrameLayout *) (sp() + ionScript_->frameSize());
 }
 
 void
 InvalidationBailoutStack::checkInvariants() const
--- a/js/src/jit/JitcodeMap.cpp
+++ b/js/src/jit/JitcodeMap.cpp
@@ -2,20 +2,25 @@
  * vim: set ts=8 sts=4 et sw=4 tw=99:
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "jit/JitcodeMap.h"
 
 #include "mozilla/DebugOnly.h"
+#include "mozilla/UniquePtr.h"
+#include "jsprf.h"
+
 #include "jit/BaselineJIT.h"
 #include "jit/JitSpewer.h"
 
 #include "js/Vector.h"
+#include "vm/SPSProfiler.h"
+#include "jsscriptinlines.h"
 
 namespace js {
 namespace jit {
 
 bool
 JitcodeGlobalEntry::IonEntry::callStackAtAddr(JSRuntime *rt, void *ptr,
                                               BytecodeLocationVector &results,
                                               uint32_t *depth) const
@@ -46,33 +51,69 @@ JitcodeGlobalEntry::IonEntry::callStackA
         jsbytecode *pc = script->offsetToPC(pcOffset);
         if (!results.append(BytecodeLocation(script, pc)))
             return false;
     }
 
     return true;
 }
 
+uint32_t
+JitcodeGlobalEntry::IonEntry::callStackAtAddr(JSRuntime *rt, void *ptr,
+                                              const char **results,
+                                              uint32_t maxResults) const
+{
+    MOZ_ASSERT(containsPointer(ptr));
+    MOZ_ASSERT(maxResults >= 1);
+    uint32_t ptrOffset = reinterpret_cast<uint8_t *>(ptr) -
+                         reinterpret_cast<uint8_t *>(nativeStartAddr());
+
+    uint32_t regionIdx = regionTable()->findRegionEntry(ptrOffset);
+    MOZ_ASSERT(regionIdx < regionTable()->numRegions());
+
+    JitcodeRegionEntry region = regionTable()->regionEntry(regionIdx);
+
+    JitcodeRegionEntry::ScriptPcIterator locationIter = region.scriptPcIterator();
+    MOZ_ASSERT(locationIter.hasMore());
+    uint32_t count = 0;
+    while (locationIter.hasMore()) {
+        uint32_t scriptIdx, pcOffset;
+
+        locationIter.readNext(&scriptIdx, &pcOffset);
+        MOZ_ASSERT(getStr(scriptIdx));
+
+        results[count++] = getStr(scriptIdx);
+        if (count >= maxResults)
+            break;
+    }
+
+    return count;
+}
+
 void
 JitcodeGlobalEntry::IonEntry::destroy()
 {
     // The region table is stored at the tail of the compacted data,
     // which means the start of the region table is a pointer to
     // the _middle_ of the memory space allocated for it.
     //
     // When freeing it, obtain the payload start pointer first.
     if (regionTable_)
         js_free((void*) (regionTable_->payloadStart()));
     regionTable_ = nullptr;
 
-    // Single tag is just pointer-to-jsscript, no memory to free.
-    ScriptListTag tag = scriptListTag();
-    if (tag > Single)
-        js_free(scriptListPointer());
-    scriptList_ = 0;
+    // Free the scriptList strs.
+    for (uint32_t i = 0; i < scriptList_->size; i++)  {
+        js_free(scriptList_->pairs[i].str);
+        scriptList_->pairs[i].str = nullptr;
+    }
+
+    // Free the script list
+    js_free(scriptList_);
+    scriptList_ = nullptr;
 }
 
 bool
 JitcodeGlobalEntry::BaselineEntry::callStackAtAddr(JSRuntime *rt, void *ptr,
                                                    BytecodeLocationVector &results,
                                                    uint32_t *depth) const
 {
     MOZ_ASSERT(containsPointer(ptr));
@@ -83,32 +124,70 @@ JitcodeGlobalEntry::BaselineEntry::callS
     if (!results.append(BytecodeLocation(script_, pc)))
         return false;
 
     *depth = 1;
 
     return true;
 }
 
+uint32_t
+JitcodeGlobalEntry::BaselineEntry::callStackAtAddr(JSRuntime *rt, void *ptr,
+                                                   const char **results,
+                                                   uint32_t maxResults) const
+{
+    MOZ_ASSERT(containsPointer(ptr));
+    MOZ_ASSERT(script_->hasBaselineScript());
+    MOZ_ASSERT(maxResults >= 1);
+
+    results[0] = str();
+    return 1;
+}
+
+void
+JitcodeGlobalEntry::BaselineEntry::destroy()
+{
+    if (!str_)
+        return;
+    js_free(str_);
+    str_ = nullptr;
+}
+
 bool
 JitcodeGlobalEntry::IonCacheEntry::callStackAtAddr(JSRuntime *rt, void *ptr,
                                                    BytecodeLocationVector &results,
                                                    uint32_t *depth) const
 {
     MOZ_ASSERT(containsPointer(ptr));
 
     // There must exist an entry for the rejoin addr if this entry exists.
     JitRuntime *jitrt = rt->jitRuntime();
     JitcodeGlobalEntry entry;
-    jitrt->getJitcodeGlobalTable()->lookupInfallible(rejoinAddr(), &entry);
+    jitrt->getJitcodeGlobalTable()->lookupInfallible(rejoinAddr(), &entry, rt);
     MOZ_ASSERT(entry.isIon());
 
     return entry.callStackAtAddr(rt, rejoinAddr(), results, depth);
 }
 
+uint32_t
+JitcodeGlobalEntry::IonCacheEntry::callStackAtAddr(JSRuntime *rt, void *ptr,
+                                                   const char **results,
+                                                   uint32_t maxResults) const
+{
+    MOZ_ASSERT(containsPointer(ptr));
+
+    // There must exist an entry for the rejoin addr if this entry exists.
+    JitRuntime *jitrt = rt->jitRuntime();
+    JitcodeGlobalEntry entry;
+    jitrt->getJitcodeGlobalTable()->lookupInfallible(rejoinAddr(), &entry, rt);
+    MOZ_ASSERT(entry.isIon());
+
+    return entry.callStackAtAddr(rt, rejoinAddr(), results, maxResults);
+}
+
 
 static int ComparePointers(const void *a, const void *b) {
     const uint8_t *a_ptr = reinterpret_cast<const uint8_t *>(a);
     const uint8_t *b_ptr = reinterpret_cast<const uint8_t *>(b);
     if (a_ptr < b_ptr)
         return -1;
     if (a_ptr > b_ptr)
         return 1;
@@ -139,44 +218,145 @@ JitcodeGlobalEntry::compare(const Jitcod
         // query ptr > entry
         return flip * 1;
     }
 
     // query ptr < entry
     return flip * -1;
 }
 
+/* static */ char *
+JitcodeGlobalEntry::createScriptString(JSContext *cx, JSScript *script, size_t *length)
+{
+    // If the script has a function, try calculating its name.
+    bool hasName = false;
+    size_t nameLength = 0;
+    mozilla::UniquePtr<char, JS::FreePolicy> nameStr = nullptr;
+    JSFunction *func = script->functionDelazifying();
+    if (func && func->displayAtom()) {
+        JSAtom *atom = func->displayAtom();
+
+        JS::AutoCheckCannotGC nogc;
+        nameStr = mozilla::UniquePtr<char, JS::FreePolicy>(
+            atom->hasLatin1Chars() ?
+                JS::CharsToNewUTF8CharsZ(cx, atom->latin1Range(nogc)).c_str()
+              : JS::CharsToNewUTF8CharsZ(cx, atom->twoByteRange(nogc)).c_str());
+        if (!nameStr)
+            return nullptr;
+
+        nameLength = strlen(nameStr.get());
+        hasName = true;
+    }
+
+    // Calculate filename length
+    const char *filenameStr = script->filename() ? script->filename() : "(null)";
+    size_t filenameLength = strlen(filenameStr);
+
+    // Calculate lineno length
+    bool hasLineno = false;
+    size_t linenoLength = 0;
+    char linenoStr[15];
+    if (hasName || (script->functionNonDelazifying() || script->isForEval())) {
+        linenoLength = JS_snprintf(linenoStr, 15, "%u", (unsigned) script->lineno());
+        hasLineno = true;
+    }
+
+    // Full profile string for scripts with functions is:
+    //      FuncName (FileName:Lineno)
+    // Full profile string for scripts without functions is:
+    //      FileName:Lineno
+    // Full profile string for scripts without functions and without linenos is:
+    //      FileName
+
+    // Calculate full string length.
+    size_t fullLength = 0;
+    if (hasName) {
+        MOZ_ASSERT(hasLineno);
+        fullLength = nameLength + 2 + filenameLength + 1 + linenoLength + 1;
+    } else if (hasLineno) {
+        fullLength = filenameLength + 1 + linenoLength;
+    } else {
+        fullLength = filenameLength;
+    }
+
+    // Allocate string.
+    char *str = cx->pod_malloc<char>(fullLength + 1);
+    if (!str)
+        return nullptr;
+
+    size_t cur = 0;
+
+    // Fill string with func name if needed.
+    if (hasName) {
+        memcpy(str + cur, nameStr.get(), nameLength);
+        cur += nameLength;
+        str[cur++] = ' ';
+        str[cur++] = '(';
+    }
+
+    // Fill string with filename chars.
+    memcpy(str + cur, filenameStr, filenameLength);
+    cur += filenameLength;
+
+    // Fill lineno chars.
+    if (hasLineno) {
+        str[cur++] = ':';
+        memcpy(str + cur, linenoStr, linenoLength);
+        cur += linenoLength;
+    }
+
+    // Terminal ')' if necessary.
+    if (hasName)
+        str[cur++] = ')';
+
+    MOZ_ASSERT(cur == fullLength);
+    str[cur] = 0;
+
+    if (length)
+        *length = fullLength;
+
+    return str;
+}
+
 bool
-JitcodeGlobalTable::lookup(void *ptr, JitcodeGlobalEntry *result)
+JitcodeGlobalTable::lookup(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt)
 {
     MOZ_ASSERT(result);
 
     // Construct a JitcodeGlobalEntry::Query to do the lookup
     JitcodeGlobalEntry query = JitcodeGlobalEntry::MakeQuery(ptr);
+
+    // Lookups on tree does mutation.  Suppress sampling when this is happening.
+    AutoSuppressProfilerSampling suppressSampling(rt);
     return tree_.contains(query, result);
 }
 
 void
-JitcodeGlobalTable::lookupInfallible(void *ptr, JitcodeGlobalEntry *result)
+JitcodeGlobalTable::lookupInfallible(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt)
 {
-    mozilla::DebugOnly<bool> success = lookup(ptr, result);
+    mozilla::DebugOnly<bool> success = lookup(ptr, result, rt);
     MOZ_ASSERT(success);
 }
 
 bool
-JitcodeGlobalTable::addEntry(const JitcodeGlobalEntry &entry)
+JitcodeGlobalTable::addEntry(const JitcodeGlobalEntry &entry, JSRuntime *rt)
 {
-    // Should only add Main entries for now.
-    MOZ_ASSERT(entry.isIon() || entry.isBaseline() || entry.isIonCache());
+    // Suppress profiler sampling while table is being mutated.
+    AutoSuppressProfilerSampling suppressSampling(rt);
+
+    MOZ_ASSERT(entry.isIon() || entry.isBaseline() || entry.isIonCache() || entry.isDummy());
     return tree_.insert(entry);
 }
 
 void
-JitcodeGlobalTable::removeEntry(void *startAddr)
+JitcodeGlobalTable::removeEntry(void *startAddr, JSRuntime *rt)
 {
+    // Suppress profiler sampling while table is being mutated.
+    AutoSuppressProfilerSampling suppressSampling(rt);
+
     JitcodeGlobalEntry query = JitcodeGlobalEntry::MakeQuery(startAddr);
     JitcodeGlobalEntry result;
     mozilla::DebugOnly<bool> success = tree_.contains(query, &result);
     MOZ_ASSERT(success);
 
     // Destroy entry before removing it from tree.
     result.destroy();
     tree_.remove(query);
@@ -551,44 +731,71 @@ JitcodeRegionEntry::findPcOffset(uint32_
         if (queryNativeOffset <= curNativeOffset + nativeDelta)
             break;
         curNativeOffset += nativeDelta;
         curPcOffset += pcDelta;
     }
     return curPcOffset;
 }
 
+typedef js::Vector<char *, 32, SystemAllocPolicy> ProfilingStringVector;
+
+struct AutoFreeProfilingStrings {
+    ProfilingStringVector &profilingStrings_;
+    bool keep_;
+    explicit AutoFreeProfilingStrings(ProfilingStringVector &vec)
+        : profilingStrings_(vec),
+          keep_(false)
+    {}
+
+    void keepStrings() { keep_ = true; }
+
+    ~AutoFreeProfilingStrings() {
+        if (keep_)
+            return;
+        for (size_t i = 0; i < profilingStrings_.length(); i++)
+            js_free(profilingStrings_[i]);
+    }
+};
+
 bool
 JitcodeIonTable::makeIonEntry(JSContext *cx, JitCode *code,
                               uint32_t numScripts, JSScript **scripts,
                               JitcodeGlobalEntry::IonEntry &out)
 {
     typedef JitcodeGlobalEntry::IonEntry::SizedScriptList SizedScriptList;
 
     MOZ_ASSERT(numScripts > 0);
 
-    if (numScripts == 1) {
-        out.init(code->raw(), code->rawEnd(), scripts[0], this);
-        return true;
-    }
+    // Create profiling strings for script, within vector.
+    typedef js::Vector<char *, 32, SystemAllocPolicy> ProfilingStringVector;
+
+    ProfilingStringVector profilingStrings;
+    if (!profilingStrings.reserve(numScripts))
+        return false;
 
-    if (numScripts < uint32_t(JitcodeGlobalEntry::IonEntry::Multi)) {
-        JSScript **scriptsCopy = cx->pod_malloc<JSScript *>(numScripts);
-        if (!scriptsCopy)
+    AutoFreeProfilingStrings autoFreeProfilingStrings(profilingStrings);
+    for (uint32_t i = 0; i < numScripts; i++) {
+        char *str = JitcodeGlobalEntry::createScriptString(cx, scripts[i]);
+        if (!str)
             return false;
-        memcpy(scriptsCopy, scripts, sizeof(JSScript *) * numScripts);
-        out.init(code->raw(), code->rawEnd(), numScripts, scriptsCopy, this);
-        return true;
+        if (!profilingStrings.append(str))
+            return false;
     }
 
     // Create SizedScriptList
     void *mem = (void *)cx->pod_malloc<uint8_t>(SizedScriptList::AllocSizeFor(numScripts));
     if (!mem)
         return false;
-    SizedScriptList *scriptList = new (mem) SizedScriptList(numScripts, scripts);
+
+    // Keep allocated profiling strings on destruct.
+    autoFreeProfilingStrings.keepStrings();
+
+    SizedScriptList *scriptList = new (mem) SizedScriptList(numScripts, scripts,
+                                                            &profilingStrings[0]);
     out.init(code->raw(), code->rawEnd(), scriptList, this);
     return true;
 }
 
 uint32_t
 JitcodeIonTable::findRegionEntry(uint32_t nativeOffset) const
 {
     static const uint32_t LINEAR_SEARCH_THRESHOLD = 8;
--- a/js/src/jit/JitcodeMap.h
+++ b/js/src/jit/JitcodeMap.h
@@ -36,27 +36,29 @@ class JitcodeRegionEntry;
 class JitcodeGlobalEntry
 {
   public:
     enum Kind {
         INVALID = 0,
         Ion,
         Baseline,
         IonCache,
+        Dummy,
         Query,
         LIMIT
     };
     JS_STATIC_ASSERT(LIMIT <= 8);
 
     struct BytecodeLocation {
         JSScript *script;
         jsbytecode *pc;
         BytecodeLocation(JSScript *script, jsbytecode *pc) : script(script), pc(pc) {}
     };
     typedef Vector<BytecodeLocation, 0, SystemAllocPolicy> BytecodeLocationVector;
+    typedef Vector<const char *, 0, SystemAllocPolicy> ProfileStringVector;
 
     struct BaseEntry
     {
         void *nativeStartAddr_;
         void *nativeEndAddr_;
         Kind kind_;
 
         void init() {
@@ -92,130 +94,71 @@ class JitcodeGlobalEntry
         }
         bool containsPointer(void *ptr) const {
             return startsBelowPointer(ptr) && endsAbovePointer(ptr);
         }
     };
 
     struct IonEntry : public BaseEntry
     {
-        uintptr_t scriptList_;
-
         // regionTable_ points to the start of the region table within the
         // packed map for compile represented by this entry.  Since the
         // region table occurs at the tail of the memory region, this pointer
         // points somewhere inside the region memory space, and not to the start
         // of the memory space.
         JitcodeIonTable *regionTable_;
 
-        static const unsigned LowBits = 3;
-        static const uintptr_t LowMask = (uintptr_t(1) << LowBits) - 1;
-
-        enum ScriptListTag {
-            Single = 0,
-            Multi = 7
+        struct ScriptNamePair {
+            JSScript *script;
+            char *str;
         };
 
         struct SizedScriptList {
             uint32_t size;
-            JSScript *scripts[0];
-            SizedScriptList(uint32_t sz, JSScript **scr) : size(sz) {
-                for (uint32_t i = 0; i < size; i++)
-                    scripts[i] = scr[i];
+            ScriptNamePair pairs[0];
+            SizedScriptList(uint32_t sz, JSScript **scrs, char **strs) : size(sz) {
+                for (uint32_t i = 0; i < size; i++) {
+                    pairs[i].script = scrs[i];
+                    pairs[i].str = strs[i];
+                }
             }
 
             static uint32_t AllocSizeFor(uint32_t nscripts) {
-                return sizeof(SizedScriptList) + (nscripts * sizeof(JSScript *));
+                return sizeof(SizedScriptList) + (nscripts * sizeof(ScriptNamePair));
             }
         };
 
-        void init(void *nativeStartAddr, void *nativeEndAddr,
-                  JSScript *script, JitcodeIonTable *regionTable)
-        {
-            MOZ_ASSERT((uintptr_t(script) & LowMask) == 0);
-            MOZ_ASSERT(script);
-            MOZ_ASSERT(regionTable);
-            BaseEntry::init(Ion, nativeStartAddr, nativeEndAddr);
-            scriptList_ = uintptr_t(script);
-            regionTable_ = regionTable;
-        }
-
-        void init(void *nativeStartAddr, void *nativeEndAddr,
-                  unsigned numScripts, JSScript **scripts, JitcodeIonTable *regionTable)
-        {
-            MOZ_ASSERT((uintptr_t(scripts) & LowMask) == 0);
-            MOZ_ASSERT(numScripts >= 1);
-            MOZ_ASSERT(numScripts <= 6);
-            MOZ_ASSERT(scripts);
-            MOZ_ASSERT(regionTable);
-            BaseEntry::init(Ion, nativeStartAddr, nativeEndAddr);
-            scriptList_ = uintptr_t(scripts) | numScripts;
-            regionTable_ = regionTable;
-        }
+        SizedScriptList *scriptList_;
 
         void init(void *nativeStartAddr, void *nativeEndAddr,
-                  SizedScriptList *scripts, JitcodeIonTable *regionTable)
+                  SizedScriptList *scriptList, JitcodeIonTable *regionTable)
         {
-            MOZ_ASSERT((uintptr_t(scripts) & LowMask) == 0);
-            MOZ_ASSERT(scripts->size > 6);
-            MOZ_ASSERT(scripts);
+            MOZ_ASSERT(scriptList);
             MOZ_ASSERT(regionTable);
-
             BaseEntry::init(Ion, nativeStartAddr, nativeEndAddr);
-            scriptList_ = uintptr_t(scripts) | uintptr_t(Multi);
             regionTable_ = regionTable;
+            scriptList_ = scriptList;
         }
 
-        ScriptListTag scriptListTag() const {
-            return static_cast<ScriptListTag>(scriptList_ & LowMask);
-        }
-        void *scriptListPointer() const {
-            return reinterpret_cast<void *>(scriptList_ & ~LowMask);
-        }
-
-        JSScript *singleScript() const {
-            MOZ_ASSERT(scriptListTag() == Single);
-            return reinterpret_cast<JSScript *>(scriptListPointer());
-        }
-        JSScript **rawScriptArray() const {
-            MOZ_ASSERT(scriptListTag() < Multi);
-            return reinterpret_cast<JSScript **>(scriptListPointer());
-        }
         SizedScriptList *sizedScriptList() const {
-            MOZ_ASSERT(scriptListTag() == Multi);
-            return reinterpret_cast<SizedScriptList *>(scriptListPointer());
+            return scriptList_;
         }
 
         unsigned numScripts() const {
-            ScriptListTag tag = scriptListTag();
-            if (tag == Single)
-                return 1;
-
-            if (tag < Multi) {
-                MOZ_ASSERT(int(tag) >= 2);
-                return static_cast<unsigned>(tag);
-            }
-
-            return sizedScriptList()->size;
+            return scriptList_->size;
         }
 
         JSScript *getScript(unsigned idx) const {
             MOZ_ASSERT(idx < numScripts());
-
-            ScriptListTag tag = scriptListTag();
-
-            if (tag == Single)
-                return singleScript();
+            return sizedScriptList()->pairs[idx].script;
+        }
 
-            if (tag < Multi) {
-                MOZ_ASSERT(int(tag) >= 2);
-                return rawScriptArray()[idx];
-            }
-
-            return sizedScriptList()->scripts[idx];
+        const char *getStr(unsigned idx) const {
+            MOZ_ASSERT(idx < numScripts());
+            return sizedScriptList()->pairs[idx].str;
         }
 
         void destroy();
 
         JitcodeIonTable *regionTable() const {
             return regionTable_;
         }
 
@@ -225,37 +168,49 @@ class JitcodeGlobalEntry
                 if (getScript(i) == script)
                     return i;
             }
             return -1;
         }
 
         bool callStackAtAddr(JSRuntime *rt, void *ptr, BytecodeLocationVector &results,
                              uint32_t *depth) const;
+
+        uint32_t callStackAtAddr(JSRuntime *rt, void *ptr, const char **results,
+                                 uint32_t maxResults) const;
     };
 
     struct BaselineEntry : public BaseEntry
     {
         JSScript *script_;
+        const char *str_;
 
-        void init(void *nativeStartAddr, void *nativeEndAddr, JSScript *script)
+        void init(void *nativeStartAddr, void *nativeEndAddr, JSScript *script, const char *str)
         {
             MOZ_ASSERT(script != nullptr);
             BaseEntry::init(Baseline, nativeStartAddr, nativeEndAddr);
             script_ = script;
+            str_ = str;
         }
 
         JSScript *script() const {
             return script_;
         }
 
-        void destroy() {}
+        const char *str() const {
+            return str_;
+        }
+
+        void destroy();
 
         bool callStackAtAddr(JSRuntime *rt, void *ptr, BytecodeLocationVector &results,
                              uint32_t *depth) const;
+
+        uint32_t callStackAtAddr(JSRuntime *rt, void *ptr, const char **results,
+                                 uint32_t maxResults) const;
     };
 
     struct IonCacheEntry : public BaseEntry
     {
         void *rejoinAddr_;
 
         void init(void *nativeStartAddr, void *nativeEndAddr, void *rejoinAddr)
         {
@@ -267,16 +222,43 @@ class JitcodeGlobalEntry
         void *rejoinAddr() const {
             return rejoinAddr_;
         }
 
         void destroy() {}
 
         bool callStackAtAddr(JSRuntime *rt, void *ptr, BytecodeLocationVector &results,
                              uint32_t *depth) const;
+
+        uint32_t callStackAtAddr(JSRuntime *rt, void *ptr, const char **results,
+                                 uint32_t maxResults) const;
+    };
+
+    // Dummy entries are created for jitcode generated when profiling is not turned on,
+    // so that they have representation in the global table if they are on the
+    // stack when profiling is enabled.
+    struct DummyEntry : public BaseEntry
+    {
+        void init(void *nativeStartAddr, void *nativeEndAddr) {
+            BaseEntry::init(Dummy, nativeStartAddr, nativeEndAddr);
+        }
+
+        void destroy() {}
+
+        bool callStackAtAddr(JSRuntime *rt, void *ptr, BytecodeLocationVector &results,
+                             uint32_t *depth) const
+        {
+            return true;
+        }
+
+        uint32_t callStackAtAddr(JSRuntime *rt, void *ptr, const char **results,
+                                 uint32_t maxResults) const
+        {
+            return 0;
+        }
     };
 
     // QueryEntry is never stored in the table, just used for queries
     // where an instance of JitcodeGlobalEntry is required to do tree
     // lookups.
     struct QueryEntry : public BaseEntry
     {
         void init(void *addr) {
@@ -299,16 +281,19 @@ class JitcodeGlobalEntry
         IonEntry ion_;
 
         // Baseline jitcode.
         BaselineEntry baseline_;
 
         // IonCache stubs.
         IonCacheEntry ionCache_;
 
+        // Dummy entries.
+        DummyEntry dummy_;
+
         // When doing queries on the SplayTree for particular addresses,
         // the query addresses are representd using a QueryEntry.
         QueryEntry query_;
     };
 
   public:
     JitcodeGlobalEntry() {
         base_.init();
@@ -321,16 +306,20 @@ class JitcodeGlobalEntry
     explicit JitcodeGlobalEntry(const BaselineEntry &baseline) {
         baseline_ = baseline;
     }
 
     explicit JitcodeGlobalEntry(const IonCacheEntry &ionCache) {
         ionCache_ = ionCache;
     }
 
+    explicit JitcodeGlobalEntry(const DummyEntry &dummy) {
+        dummy_ = dummy;
+    }
+
     explicit JitcodeGlobalEntry(const QueryEntry &query) {
         query_ = query;
     }
 
     static JitcodeGlobalEntry MakeQuery(void *ptr) {
         QueryEntry query;
         query.init(ptr);
         return JitcodeGlobalEntry(query);
@@ -342,16 +331,19 @@ class JitcodeGlobalEntry
             ionEntry().destroy();
             break;
           case Baseline:
             baselineEntry().destroy();
             break;
           case IonCache:
             ionCacheEntry().destroy();
             break;
+          case Dummy:
+            dummyEntry().destroy();
+            break;
           case Query:
             queryEntry().destroy();
             break;
           default:
             MOZ_CRASH("Invalid JitcodeGlobalEntry kind.");
         }
     }
 
@@ -392,32 +384,39 @@ class JitcodeGlobalEntry
         return kind() == Ion;
     }
     bool isBaseline() const {
         return kind() == Baseline;
     }
     bool isIonCache() const {
         return kind() == IonCache;
     }
+    bool isDummy() const {
+        return kind() == Dummy;
+    }
     bool isQuery() const {
         return kind() == Query;
     }
 
     IonEntry &ionEntry() {
         MOZ_ASSERT(isIon());
         return ion_;
     }
     BaselineEntry &baselineEntry() {
         MOZ_ASSERT(isBaseline());
         return baseline_;
     }
     IonCacheEntry &ionCacheEntry() {
         MOZ_ASSERT(isIonCache());
         return ionCache_;
     }
+    DummyEntry &dummyEntry() {
+        MOZ_ASSERT(isDummy());
+        return dummy_;
+    }
     QueryEntry &queryEntry() {
         MOZ_ASSERT(isQuery());
         return query_;
     }
 
     const IonEntry &ionEntry() const {
         MOZ_ASSERT(isIon());
         return ion_;
@@ -425,16 +424,20 @@ class JitcodeGlobalEntry
     const BaselineEntry &baselineEntry() const {
         MOZ_ASSERT(isBaseline());
         return baseline_;
     }
     const IonCacheEntry &ionCacheEntry() const {
         MOZ_ASSERT(isIonCache());
         return ionCache_;
     }
+    const DummyEntry &dummyEntry() const {
+        MOZ_ASSERT(isDummy());
+        return dummy_;
+    }
     const QueryEntry &queryEntry() const {
         MOZ_ASSERT(isQuery());
         return query_;
     }
 
     // Read the inline call stack at a given point in the native code and append into
     // the given vector.  Innermost (script,pc) pair will be appended first, and
     // outermost appended last.
@@ -445,28 +448,51 @@ class JitcodeGlobalEntry
     {
         switch (kind()) {
           case Ion:
             return ionEntry().callStackAtAddr(rt, ptr, results, depth);
           case Baseline:
             return baselineEntry().callStackAtAddr(rt, ptr, results, depth);
           case IonCache:
             return ionCacheEntry().callStackAtAddr(rt, ptr, results, depth);
+          case Dummy:
+            return dummyEntry().callStackAtAddr(rt, ptr, results, depth);
+          default:
+            MOZ_CRASH("Invalid JitcodeGlobalEntry kind.");
+        }
+        return false;
+    }
+
+    uint32_t callStackAtAddr(JSRuntime *rt, void *ptr, const char **results,
+                             uint32_t maxResults) const
+    {
+        switch (kind()) {
+          case Ion:
+            return ionEntry().callStackAtAddr(rt, ptr, results, maxResults);
+          case Baseline:
+            return baselineEntry().callStackAtAddr(rt, ptr, results, maxResults);
+          case IonCache:
+            return ionCacheEntry().callStackAtAddr(rt, ptr, results, maxResults);
+          case Dummy:
+            return dummyEntry().callStackAtAddr(rt, ptr, results, maxResults);
           default:
             MOZ_CRASH("Invalid JitcodeGlobalEntry kind.");
         }
         return false;
     }
 
     // Figure out the number of the (JSScript *, jsbytecode *) pairs that are active
     // at this location.
     uint32_t lookupInlineCallDepth(void *ptr);
 
     // Compare two global entries.
     static int compare(const JitcodeGlobalEntry &ent1, const JitcodeGlobalEntry &ent2);
+
+    // Compute a profiling string for a given script.
+    static char *createScriptString(JSContext *cx, JSScript *script, size_t *length=nullptr);
 };
 
 /*
  * Global table of JitcodeGlobalEntry values sorted by native address range.
  */
 class JitcodeGlobalTable
 {
   public:
@@ -487,33 +513,36 @@ class JitcodeGlobalTable
         tree_.disableCheckCoherency();
     }
     ~JitcodeGlobalTable() {}
 
     bool empty() const {
         return tree_.empty();
     }
 
-    bool lookup(void *ptr, JitcodeGlobalEntry *result);
-    void lookupInfallible(void *ptr, JitcodeGlobalEntry *result);
+    bool lookup(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt);
+    void lookupInfallible(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt);
 
-    bool addEntry(const JitcodeGlobalEntry::IonEntry &entry) {
-        return addEntry(JitcodeGlobalEntry(entry));
+    bool addEntry(const JitcodeGlobalEntry::IonEntry &entry, JSRuntime *rt) {
+        return addEntry(JitcodeGlobalEntry(entry), rt);
     }
-    bool addEntry(const JitcodeGlobalEntry::BaselineEntry &entry) {
-        return addEntry(JitcodeGlobalEntry(entry));
+    bool addEntry(const JitcodeGlobalEntry::BaselineEntry &entry, JSRuntime *rt) {
+        return addEntry(JitcodeGlobalEntry(entry), rt);
     }
-    bool addEntry(const JitcodeGlobalEntry::IonCacheEntry &entry) {
-        return addEntry(JitcodeGlobalEntry(entry));
+    bool addEntry(const JitcodeGlobalEntry::IonCacheEntry &entry, JSRuntime *rt) {
+        return addEntry(JitcodeGlobalEntry(entry), rt);
+    }
+    bool addEntry(const JitcodeGlobalEntry::DummyEntry &entry, JSRuntime *rt) {
+        return addEntry(JitcodeGlobalEntry(entry), rt);
     }
 
-    void removeEntry(void *startAddr);
+    void removeEntry(void *startAddr, JSRuntime *rt);
 
   private:
-    bool addEntry(const JitcodeGlobalEntry &entry);
+    bool addEntry(const JitcodeGlobalEntry &entry, JSRuntime *rt);
 };
 
 
 /*
  * Container class for main jitcode table.
  * The Region table's memory is structured as follows:
  *
  *      +------------------------------------------------+   |
@@ -810,18 +839,18 @@ class JitcodeIonTable
   public:
     explicit JitcodeIonTable(uint32_t numRegions)
       : numRegions_(numRegions)
     {
         for (uint32_t i = 0; i < numRegions; i++)
             regionOffsets_[i] = 0;
     }
 
-    bool makeIonEntry(JSContext *cx, JitCode *code, uint32_t numScripts, JSScript **scripts,
-                      JitcodeGlobalEntry::IonEntry &out);
+    bool makeIonEntry(JSContext *cx, JitCode *code, uint32_t numScripts,
+                      JSScript **scripts, JitcodeGlobalEntry::IonEntry &out);
 
     uint32_t numRegions() const {
         return numRegions_;
     }
 
     uint32_t regionOffset(uint32_t regionIndex) const {
         MOZ_ASSERT(regionIndex < numRegions());
         return regionOffsets_[regionIndex];
--- a/js/src/shell/js.cpp
+++ b/js/src/shell/js.cpp
@@ -4159,18 +4159,20 @@ SingleStepCallback(void *arg, jit::Simul
     state.lr = (void*)sim->get_register(jit::Simulator::lr);
 
     DebugOnly<void*> lastStackAddress = nullptr;
     StackChars stack;
     for (JS::ProfilingFrameIterator i(rt, state); !i.done(); ++i) {
         MOZ_ASSERT(i.stackAddress() != nullptr);
         MOZ_ASSERT(lastStackAddress <= i.stackAddress());
         lastStackAddress = i.stackAddress();
-        const char *label = i.label();
-        stack.append(label, strlen(label));
+        JS::ProfilingFrameIterator::Frame frames[16];
+        uint32_t nframes = i.extractStack(frames, 0, 16);
+        for (uint32_t i = 0; i < nframes; i++)
+            stack.append(frames[i].label, strlen(frames[i].label));
     }
 
     // Only append the stack if it differs from the last stack.
     if (stacks.empty() ||
         stacks.back().length() != stack.length() ||
         !PodEqual(stacks.back().begin(), stack.begin(), stack.length()))
     {
         stacks.append(Move(stack));
--- a/js/src/vm/Interpreter.cpp
+++ b/js/src/vm/Interpreter.cpp
@@ -1679,17 +1679,22 @@ END_CASE(JSOP_LABEL)
 CASE(JSOP_LOOPENTRY)
     // Attempt on-stack replacement with Baseline code.
     if (jit::IsBaselineEnabled(cx)) {
         jit::MethodStatus status = jit::CanEnterBaselineAtBranch(cx, REGS.fp(), false);
         if (status == jit::Method_Error)
             goto error;
         if (status == jit::Method_Compiled) {
             bool wasSPS = REGS.fp()->hasPushedSPSFrame();
-            jit::JitExecStatus maybeOsr = jit::EnterBaselineAtBranch(cx, REGS.fp(), REGS.pc);
+
+            jit::JitExecStatus maybeOsr;
+            {
+                SPSBaselineOSRMarker spsOSR(cx->runtime(), wasSPS);
+                maybeOsr = jit::EnterBaselineAtBranch(cx, REGS.fp(), REGS.pc);
+            }
 
             // We failed to call into baseline at all, so treat as an error.
             if (maybeOsr == jit::JitExec_Aborted)
                 goto error;
 
             interpReturnOK = (maybeOsr == jit::JitExec_Ok);
 
             // Pop the SPS frame pushed by the interpreter.  (The compiled version of the
--- a/js/src/vm/Runtime.h
+++ b/js/src/vm/Runtime.h
@@ -592,16 +592,19 @@ class PerThreadData : public PerThreadDa
     }
 
     js::Activation *profilingActivation() const {
         return profilingActivation_;
     }
     void *addressOfProfilingActivation() {
         return (void*) &profilingActivation_;
     }
+    static unsigned offsetOfProfilingActivation() {
+        return offsetof(PerThreadData, profilingActivation_);
+    }
 
     js::AsmJSActivation *asmJSActivationStack() const {
         return asmJSActivationStack_;
     }
     static js::AsmJSActivation *innermostAsmJSActivation() {
         PerThreadData *ptd = TlsPerThreadData.get();
         return ptd ? ptd->asmJSActivationStack_ : nullptr;
     }
@@ -1024,17 +1027,17 @@ struct JSRuntime : public JS::shadow::Ru
     /* SPS profiling metadata */
     js::SPSProfiler     spsProfiler;
 
     /* If true, new scripts must be created with PC counter information. */
     bool                profilingScripts;
 
     /* Whether sampling should be enabled or not. */
   private:
-    bool                suppressProfilerSampling;
+    mozilla::Atomic<bool, mozilla::SequentiallyConsistent> suppressProfilerSampling;
 
   public:
     bool isProfilerSamplingEnabled() const {
         return !suppressProfilerSampling;
     }
     void disableProfilerSampling() {
         suppressProfilerSampling = true;
     }
--- a/js/src/vm/SPSProfiler.cpp
+++ b/js/src/vm/SPSProfiler.cpp
@@ -9,16 +9,17 @@
 #include "mozilla/DebugOnly.h"
 
 #include "jsnum.h"
 #include "jsprf.h"
 #include "jsscript.h"
 
 #include "jit/BaselineFrame.h"
 #include "jit/BaselineJIT.h"
+#include "jit/JitFrameIterator.h"
 #include "jit/JitFrames.h"
 #include "vm/StringBuffer.h"
 
 using namespace js;
 
 using mozilla::DebugOnly;
 
 SPSProfiler::SPSProfiler(JSRuntime *rt)
@@ -204,28 +205,28 @@ SPSProfiler::exit(JSScript *script, JSFu
         MOZ_ASSERT(strcmp((const char*) stack_[*size_].label(), str) == 0);
         stack_[*size_].setLabel(nullptr);
         stack_[*size_].setPC(nullptr);
     }
 #endif
 }
 
 void
-SPSProfiler::enterAsmJS(const char *string, void *sp)
+SPSProfiler::beginPseudoJS(const char *string, void *sp)
 {
     /* these operations cannot be re-ordered, so volatile-ize operations */
     volatile ProfileEntry *stack = stack_;
     volatile uint32_t *size = size_;
     uint32_t current = *size;
 
-    MOZ_ASSERT(enabled());
+    MOZ_ASSERT(installed());
     if (current < max_) {
         stack[current].setLabel(string);
         stack[current].setCppFrame(sp, 0);
-        stack[current].setFlag(ProfileEntry::ASMJS);
+        stack[current].setFlag(ProfileEntry::BEGIN_PSEUDO_JS);
     }
     *size = current + 1;
 }
 
 void
 SPSProfiler::push(const char *string, void *sp, JSScript *script, jsbytecode *pc, bool copy)
 {
     MOZ_ASSERT_IF(sp != nullptr, script == nullptr && pc == nullptr);
@@ -327,27 +328,61 @@ SPSEntryMarker::SPSEntryMarker(JSRuntime
 {
     MOZ_GUARD_OBJECT_NOTIFIER_INIT;
     if (!profiler->installed()) {
         profiler = nullptr;
         return;
     }
     size_before = *profiler->size_;
     // We want to push a CPP frame so the profiler can correctly order JS and native stacks.
-    profiler->push("js::RunScript", this, nullptr, nullptr, /* copy = */ false);
+    profiler->beginPseudoJS("js::RunScript", this);
     profiler->push("js::RunScript", nullptr, script, script->code(), /* copy = */ false);
 }
 
 SPSEntryMarker::~SPSEntryMarker()
 {
-    if (profiler != nullptr) {
-        profiler->pop();
-        profiler->pop();
-        MOZ_ASSERT(size_before == *profiler->size_);
+    if (profiler == nullptr)
+        return;
+
+    profiler->pop();
+    profiler->endPseudoJS();
+    MOZ_ASSERT(size_before == *profiler->size_);
+}
+
+SPSBaselineOSRMarker::SPSBaselineOSRMarker(JSRuntime *rt, bool hasSPSFrame
+                                           MOZ_GUARD_OBJECT_NOTIFIER_PARAM_IN_IMPL)
+    : profiler(&rt->spsProfiler)
+{
+    MOZ_GUARD_OBJECT_NOTIFIER_INIT;
+    if (!hasSPSFrame || !profiler->enabled()) {
+        profiler = nullptr;
+        return;
     }
+
+    size_before = profiler->size();
+    if (profiler->size() == 0)
+        return;
+
+    ProfileEntry &entry = profiler->stack()[profiler->size() - 1];
+    MOZ_ASSERT(entry.isJs());
+    entry.setOSR();
+}
+
+SPSBaselineOSRMarker::~SPSBaselineOSRMarker()
+{
+    if (profiler == nullptr)
+        return;
+
+    MOZ_ASSERT(size_before == *profiler->size_);
+    if (profiler->size() == 0)
+        return;
+
+    ProfileEntry &entry = profiler->stack()[profiler->size() - 1];
+    MOZ_ASSERT(entry.isJs());
+    entry.unsetOSR();
 }
 
 JS_FRIEND_API(jsbytecode*)
 ProfileEntry::pc() const volatile
 {
     MOZ_ASSERT(isJs());
     return lineOrPc == NullPCOffset ? nullptr : script()->offsetToPC(lineOrPc);
 }
@@ -379,18 +414,16 @@ js::RegisterRuntimeProfilingEventMarker(
 }
 
 JS_FRIEND_API(jsbytecode*)
 js::ProfilingGetPC(JSRuntime *rt, JSScript *script, void *ip)
 {
     return rt->spsProfiler.ipToPC(script, size_t(ip));
 }
 
-
-
 AutoSuppressProfilerSampling::AutoSuppressProfilerSampling(JSContext *cx
                                                            MOZ_GUARD_OBJECT_NOTIFIER_PARAM_IN_IMPL)
   : rt_(cx->runtime()),
     previouslyEnabled_(rt_->isProfilerSamplingEnabled())
 {
     MOZ_GUARD_OBJECT_NOTIFIER_INIT;
     if (previouslyEnabled_)
         rt_->disableProfilerSampling();
@@ -414,33 +447,12 @@ AutoSuppressProfilerSampling::~AutoSuppr
 
 void *
 js::GetTopProfilingJitFrame(uint8_t *exitFramePtr)
 {
     // For null exitFrame, there is no previous exit frame, just return.
     if (!exitFramePtr)
         return nullptr;
 
-    jit::ExitFrameLayout *exitFrame = (jit::ExitFrameLayout *) exitFramePtr;
-    size_t prevSize = exitFrame->prevFrameLocalSize();
-    jit::FrameType prevType = exitFrame->prevType();
-
-    uint8_t *prev = exitFramePtr + (jit::ExitFrameLayout::Size() + prevSize);
-
-    // previous frame type must be one of IonJS, BaselineJS, or BaselineStub,
-    // or unwound variants thereof.
-    switch (prevType) {
-      case jit::JitFrame_IonJS:
-      case jit::JitFrame_Unwound_IonJS:
-      case jit::JitFrame_BaselineJS:
-        return prev;
-
-      case jit::JitFrame_BaselineStub:
-      case jit::JitFrame_Unwound_BaselineStub: {
-        void *framePtr = ((jit::BaselineStubFrameLayout *) prev)->reverseSavedFramePtr();
-        return ((uint8_t *) framePtr) + jit::BaselineFrame::FramePointerOffset;
-      }
-
-      default:
-        MOZ_CRASH("unknown callee token type");
-        return nullptr;
-    }
+    jit::JitProfilingFrameIterator iter(exitFramePtr);
+    MOZ_ASSERT(!iter.done());
+    return iter.fp();
 }
--- a/js/src/vm/SPSProfiler.h
+++ b/js/src/vm/SPSProfiler.h
@@ -107,20 +107,22 @@
 namespace js {
 
 class ProfileEntry;
 
 typedef HashMap<JSScript*, const char*, DefaultHasher<JSScript*>, SystemAllocPolicy>
         ProfileStringMap;
 
 class SPSEntryMarker;
+class SPSBaselineOSRMarker;
 
 class SPSProfiler
 {
     friend class SPSEntryMarker;
+    friend class SPSBaselineOSRMarker;
 
     JSRuntime            *rt;
     ProfileStringMap     strings;
     ProfileEntry         *stack_;
     uint32_t             *size_;
     uint32_t             max_;
     bool                 slowAssertions;
     uint32_t             enabled_;
@@ -146,16 +148,17 @@ class SPSProfiler
     }
 
     ProfileEntry **addressOfStack() {
         return &stack_;
     }
 
     uint32_t *sizePointer() { return size_; }
     uint32_t maxSize() { return max_; }
+    uint32_t size() { MOZ_ASSERT(installed()); return *size_; }
     ProfileEntry *stack() { return stack_; }
 
     /* management of whether instrumentation is on or off */
     bool enabled() { MOZ_ASSERT_IF(enabled_, installed()); return enabled_; }
     bool installed() { return stack_ != nullptr && size_ != nullptr; }
     void enable(bool enabled);
     void enableSlowAssertions(bool enabled) { slowAssertions = enabled; }
     bool slowAssertionsEnabled() { return slowAssertions; }
@@ -175,18 +178,18 @@ class SPSProfiler
         if (enabled() && *size_ - 1 < max_) {
             MOZ_ASSERT(*size_ > 0);
             MOZ_ASSERT(stack_[*size_ - 1].script() == script);
             stack_[*size_ - 1].setPC(pc);
         }
     }
 
     /* Enter asm.js code */
-    void enterAsmJS(const char *string, void *sp);
-    void exitAsmJS() { pop(); }
+    void beginPseudoJS(const char *string, void *sp);
+    void endPseudoJS() { pop(); }
 
     jsbytecode *ipToPC(JSScript *script, size_t ip) { return nullptr; }
 
     void setProfilingStack(ProfileEntry *stack, uint32_t *size, uint32_t max);
     void setEventMarker(void (*fn)(const char *));
     const char *profileString(JSScript *script, JSFunction *maybeFun);
     void onScriptFinalized(JSScript *script);
 
@@ -267,16 +270,34 @@ class SPSEntryMarker
 
   private:
     SPSProfiler *profiler;
     mozilla::DebugOnly<uint32_t> size_before;
     MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER
 };
 
 /*
+ * This class is used in the interpreter to bound regions where the baseline JIT
+ * being entered via OSR.  It marks the current top pseudostack entry as
+ * OSR-ed
+ */
+class SPSBaselineOSRMarker
+{
+  public:
+    explicit SPSBaselineOSRMarker(JSRuntime *rt, bool hasSPSFrame
+                                  MOZ_GUARD_OBJECT_NOTIFIER_PARAM);
+    ~SPSBaselineOSRMarker();
+
+  private:
+    SPSProfiler *profiler;
+    mozilla::DebugOnly<uint32_t> size_before;
+    MOZ_DECL_USE_GUARD_OBJECT_NOTIFIER
+};
+
+/*
  * SPS is the profiling backend used by the JS engine to enable time profiling.
  * More information can be found in vm/SPSProfiler.{h,cpp}. This class manages
  * the instrumentation portion of the profiling for JIT code.
  *
  * The instrumentation tracks entry into functions, leaving those functions via
  * a function call, reentering the functions from a function call, and exiting
  * the functions from returning. This class also handles inline frames and
  * manages the instrumentation which needs to be attached to them as well.
--- a/js/src/vm/Stack.cpp
+++ b/js/src/vm/Stack.cpp
@@ -9,16 +9,17 @@
 #include "mozilla/PodOperations.h"
 
 #include "jscntxt.h"
 
 #include "asmjs/AsmJSFrameIterator.h"
 #include "asmjs/AsmJSModule.h"
 #include "gc/Marking.h"
 #include "jit/BaselineFrame.h"
+#include "jit/JitcodeMap.h"
 #include "jit/JitCompartment.h"
 #include "js/GCAPI.h"
 #include "vm/Opcodes.h"
 
 #include "jit/JitFrameIterator-inl.h"
 #include "vm/Interpreter-inl.h"
 #include "vm/Probes-inl.h"
 #include "vm/ScopeObject-inl.h"
@@ -1387,42 +1388,54 @@ jit::JitActivation::JitActivation(JSCont
     lastProfilingCallSite_(nullptr)
 {
     if (active) {
         prevJitTop_ = cx->mainThread().jitTop;
         prevJitJSContext_ = cx->mainThread().jitJSContext;
         prevJitActivation_ = cx->mainThread().jitActivation;
         cx->mainThread().jitJSContext = cx;
         cx->mainThread().jitActivation = this;
+
+        registerProfiling();
     } else {
         prevJitTop_ = nullptr;
         prevJitJSContext_ = nullptr;
         prevJitActivation_ = nullptr;
     }
 }
 
 jit::JitActivation::~JitActivation()
 {
     if (active_) {
+        if (isProfiling())
+            unregisterProfiling();
+
         cx_->perThreadData->jitTop = prevJitTop_;
         cx_->perThreadData->jitJSContext = prevJitJSContext_;
         cx_->perThreadData->jitActivation = prevJitActivation_;
     }
 
     // All reocvered value are taken from activation during the bailout.
     MOZ_ASSERT(ionRecovery_.empty());
 
     // The BailoutFrameInfo should have unregistered itself from the
     // JitActivations.
     MOZ_ASSERT(!bailoutData_);
 
     clearRematerializedFrames();
     js_delete(rematerializedFrames_);
 }
 
+bool
+jit::JitActivation::isProfiling() const
+{
+    // All JitActivations can be profiled.
+    return true;
+}
+
 void
 jit::JitActivation::setBailoutData(jit::BailoutFrameInfo *bailoutData)
 {
     MOZ_ASSERT(!bailoutData_);
     bailoutData_ = bailoutData;
 }
 
 void
@@ -1437,28 +1450,35 @@ jit::JitActivation::cleanBailoutData()
 // and disable activation instruction sequences.
 void
 jit::JitActivation::setActive(JSContext *cx, bool active)
 {
     // Only allowed to deactivate/activate if activation is top.
     // (Not tested and will probably fail in other situations.)
     MOZ_ASSERT(cx->mainThread().activation_ == this);
     MOZ_ASSERT(active != active_);
-    active_ = active;
 
     if (active) {
+        *((volatile bool *) active_) = true;
         prevJitTop_ = cx->mainThread().jitTop;
         prevJitJSContext_ = cx->mainThread().jitJSContext;
         prevJitActivation_ = cx->mainThread().jitActivation;
         cx->mainThread().jitJSContext = cx;
         cx->mainThread().jitActivation = this;
+
+        registerProfiling();
+
     } else {
+        unregisterProfiling();
+
         cx->mainThread().jitTop = prevJitTop_;
         cx->mainThread().jitJSContext = prevJitJSContext_;
         cx->mainThread().jitActivation = prevJitActivation_;
+
+        *((volatile bool *) active_) = false;
     }
 }
 
 void
 jit::JitActivation::removeRematerializedFrame(uint8_t *top)
 {
     if (!rematerializedFrames_)
         return;
@@ -1595,20 +1615,18 @@ AsmJSActivation::AsmJSActivation(JSConte
     resumePC_(nullptr),
     fp_(nullptr),
     exitReason_(AsmJSExit::None)
 {
     (void) entrySP_;  // squelch GCC warning
 
     // NB: this is a hack and can be removed once Ion switches over to
     // JS::ProfilingFrameIterator.
-    if (cx->runtime()->spsProfiler.enabled()) {
+    if (cx->runtime()->spsProfiler.enabled())
         profiler_ = &cx->runtime()->spsProfiler;
-        profiler_->enterAsmJS("asm.js code :0", this);
-    }
 
     prevAsmJSForModule_ = module.activation();
     module.activation() = this;
 
     prevAsmJS_ = cx->mainThread().asmJSActivationStack_;
     cx->mainThread().asmJSActivationStack_ = this;
 
     // Now that the AsmJSActivation is fully initialized, make it visible to
@@ -1616,19 +1634,16 @@ AsmJSActivation::AsmJSActivation(JSConte
     registerProfiling();
 }
 
 AsmJSActivation::~AsmJSActivation()
 {
     // Hide this activation from the profiler before is is destroyed.
     unregisterProfiling();
 
-    if (profiler_)
-        profiler_->exitAsmJS();
-
     MOZ_ASSERT(fp_ == nullptr);
 
     MOZ_ASSERT(module_.activation() == this);
     module_.activation() = prevAsmJSForModule_;
 
     JSContext *cx = cx_->asJSContext();
     MOZ_ASSERT(cx->mainThread().asmJSActivationStack_ == this);
 
@@ -1658,17 +1673,23 @@ Activation::registerProfiling()
     cx_->perThreadData->profilingActivation_ = this;
 }
 
 void
 Activation::unregisterProfiling()
 {
     MOZ_ASSERT(isProfiling());
     MOZ_ASSERT(cx_->perThreadData->profilingActivation_ == this);
-    cx_->perThreadData->profilingActivation_ = prevProfiling_;
+
+    // There may be a non-active jit activation in the linked list.  Skip past it.
+    Activation *prevProfiling = prevProfiling_;
+    while (prevProfiling && prevProfiling->isJit() && !prevProfiling->asJit()->isActive())
+        prevProfiling = prevProfiling->prevProfiling_;
+
+    cx_->perThreadData->profilingActivation_ = prevProfiling;
 }
 
 ActivationIterator::ActivationIterator(JSRuntime *rt)
   : jitTop_(rt->mainThread.jitTop),
     activation_(rt->mainThread.activation_)
 {
     settle();
 }
@@ -1696,24 +1717,34 @@ ActivationIterator::settle()
 {
     // Stop at the next active activation. No need to update jitTop_, since
     // we don't iterate over an active jit activation.
     while (!done() && activation_->isJit() && !activation_->asJit()->isActive())
         activation_ = activation_->prev();
 }
 
 JS::ProfilingFrameIterator::ProfilingFrameIterator(JSRuntime *rt, const RegisterState &state)
-  : activation_(rt->mainThread.profilingActivation())
+  : rt_(rt),
+    activation_(rt->mainThread.profilingActivation()),
+    savedPrevJitTop_(nullptr)
 {
     if (!activation_)
         return;
 
+    // If profiler sampling is not enabled, skip.
+    if (!rt_->isProfilerSamplingEnabled()) {
+        activation_ = nullptr;
+        return;
+    }
+
     MOZ_ASSERT(activation_->isProfiling());
 
-    static_assert(sizeof(AsmJSProfilingFrameIterator) <= StorageSpace, "Need to increase storage");
+    static_assert(sizeof(AsmJSProfilingFrameIterator) <= StorageSpace &&
+                  sizeof(jit::JitProfilingFrameIterator) <= StorageSpace,
+                  "Need to increase storage");
 
     iteratorConstruct(state);
     settle();
 }
 
 JS::ProfilingFrameIterator::~ProfilingFrameIterator()
 {
     if (!done()) {
@@ -1721,79 +1752,172 @@ JS::ProfilingFrameIterator::~ProfilingFr
         iteratorDestroy();
     }
 }
 
 void
 JS::ProfilingFrameIterator::operator++()
 {
     MOZ_ASSERT(!done());
+    MOZ_ASSERT(activation_->isAsmJS() || activation_->isJit());
 
-    MOZ_ASSERT(activation_->isAsmJS());
-    ++asmJSIter();
+    if (activation_->isAsmJS()) {
+        ++asmJSIter();
+        settle();
+        return;
+    }
+
+    ++jitIter();
     settle();
 }
 
 void
 JS::ProfilingFrameIterator::settle()
 {
     while (iteratorDone()) {
         iteratorDestroy();
         activation_ = activation_->prevProfiling();
+
+        // Skip past any non-active jit activations in the list.
+        while (activation_ && activation_->isJit() && !activation_->asJit()->isActive())
+            activation_ = activation_->prevProfiling();
+
         if (!activation_)
             return;
         iteratorConstruct();
     }
 }
 
 void
 JS::ProfilingFrameIterator::iteratorConstruct(const RegisterState &state)
 {
     MOZ_ASSERT(!done());
+    MOZ_ASSERT(activation_->isAsmJS() || activation_->isJit());
 
-    MOZ_ASSERT(activation_->isAsmJS());
-    new (storage_.addr()) AsmJSProfilingFrameIterator(*activation_->asAsmJS(), state);
+    if (activation_->isAsmJS()) {
+        new (storage_.addr()) AsmJSProfilingFrameIterator(*activation_->asAsmJS(), state);
+        // Set savedPrevJitTop_ to the actual jitTop_ from the runtime.
+        savedPrevJitTop_ = activation_->cx()->perThreadData->jitTop;
+        return;
+    }
+
+    MOZ_ASSERT(activation_->asJit()->isActive());
+    new (storage_.addr()) jit::JitProfilingFrameIterator(rt_, state);
 }
 
 void
 JS::ProfilingFrameIterator::iteratorConstruct()
 {
     MOZ_ASSERT(!done());
+    MOZ_ASSERT(activation_->isAsmJS() || activation_->isJit());
 
-    MOZ_ASSERT(activation_->isAsmJS());
-    new (storage_.addr()) AsmJSProfilingFrameIterator(*activation_->asAsmJS());
+    if (activation_->isAsmJS()) {
+        new (storage_.addr()) AsmJSProfilingFrameIterator(*activation_->asAsmJS());
+        return;
+    }
+
+    MOZ_ASSERT(activation_->asJit()->isActive());
+    MOZ_ASSERT(savedPrevJitTop_ != nullptr);
+    new (storage_.addr()) jit::JitProfilingFrameIterator(savedPrevJitTop_);
 }
 
 void
 JS::ProfilingFrameIterator::iteratorDestroy()
 {
     MOZ_ASSERT(!done());
+    MOZ_ASSERT(activation_->isAsmJS() || activation_->isJit());
 
-    MOZ_ASSERT(activation_->isAsmJS());
-    asmJSIter().~AsmJSProfilingFrameIterator();
+    if (activation_->isAsmJS()) {
+        asmJSIter().~AsmJSProfilingFrameIterator();
+        return;
+    }
+
+    // Save prevjitTop for later use
+    savedPrevJitTop_ = activation_->asJit()->prevJitTop();
+    jitIter().~JitProfilingFrameIterator();
 }
 
 bool
 JS::ProfilingFrameIterator::iteratorDone()
 {
     MOZ_ASSERT(!done());
+    MOZ_ASSERT(activation_->isAsmJS() || activation_->isJit());
 
-    MOZ_ASSERT(activation_->isAsmJS());
-    return asmJSIter().done();
+    if (activation_->isAsmJS())
+        return asmJSIter().done();
+
+    return jitIter().done();
 }
 
 void *
 JS::ProfilingFrameIterator::stackAddress() const
 {
     MOZ_ASSERT(!done());
+    MOZ_ASSERT(activation_->isAsmJS() || activation_->isJit());
 
-    MOZ_ASSERT(activation_->isAsmJS());
-    return asmJSIter().stackAddress();
+    if (activation_->isAsmJS())
+        return asmJSIter().stackAddress();
+
+    return jitIter().stackAddress();
 }
 
-const char *
-JS::ProfilingFrameIterator::label() const
+uint32_t
+JS::ProfilingFrameIterator::extractStack(Frame *frames, uint32_t offset, uint32_t end) const
+{
+    if (offset >= end)
+        return 0;
+
+    void *stackAddr = stackAddress();
+
+    if (isAsmJS()) {
+        frames[offset].kind = Frame_AsmJS;
+        frames[offset].stackAddress = stackAddr;
+        frames[offset].returnAddress = nullptr;
+        frames[offset].activation = activation_;
+        frames[offset].label = asmJSIter().label();
+        return 1;
+    }
+
+    MOZ_ASSERT(isJit());
+    void *returnAddr = jitIter().returnAddressToFp();
+
+    // Look up an entry for the return address.
+    jit::JitcodeGlobalTable *table = rt_->jitRuntime()->getJitcodeGlobalTable();
+    jit::JitcodeGlobalEntry entry;
+    mozilla::DebugOnly<bool> result = table->lookup(returnAddr, &entry, rt_);
+    MOZ_ASSERT(result);
+
+    MOZ_ASSERT(entry.isIon() || entry.isIonCache() || entry.isBaseline() || entry.isDummy());
+
+    // Dummy frames produce no stack frames.
+    if (entry.isDummy())
+        return 0;
+
+    FrameKind kind = entry.isBaseline() ? Frame_Baseline : Frame_Ion;
+
+    // Extract the stack for the entry.  Assume maximum inlining depth is <64
+    const char *labels[64];
+    uint32_t depth = entry.callStackAtAddr(rt_, returnAddr, labels, 64);
+    MOZ_ASSERT(depth < 64);
+    for (uint32_t i = 0; i < depth; i++) {
+        if (offset + i >= end)
+            return i;
+        frames[offset + i].kind = kind;
+        frames[offset + i].stackAddress = stackAddr;
+        frames[offset + i].returnAddress = returnAddr;
+        frames[offset + i].activation = activation_;
+        frames[offset + i].label = labels[i];
+    }
+    return depth;
+}
+
+bool
+JS::ProfilingFrameIterator::isAsmJS() const
 {
     MOZ_ASSERT(!done());
+    return activation_->isAsmJS();
+}
 
-    MOZ_ASSERT(activation_->isAsmJS());
-    return asmJSIter().label();
+bool
+JS::ProfilingFrameIterator::isJit() const
+{
+    return activation_->isJit();
 }
--- a/js/src/vm/Stack.h
+++ b/js/src/vm/Stack.h
@@ -1126,16 +1126,20 @@ class Activation
     void unhideScriptedCaller() {
         MOZ_ASSERT(hideScriptedCallerCount_ > 0);
         hideScriptedCallerCount_--;
     }
     bool scriptedCallerIsHidden() const {
         return hideScriptedCallerCount_ > 0;
     }
 
+    static size_t offsetOfPrevProfiling() {
+        return offsetof(Activation, prevProfiling_);
+    }
+
   private:
     Activation(const Activation &other) = delete;
     void operator=(const Activation &other) = delete;
 };
 
 // This variable holds a special opcode value which is greater than all normal
 // opcodes, and is chosen such that the bitwise or of this value with any
 // opcode is this value.
@@ -1295,19 +1299,17 @@ class JitActivation : public Activation
     explicit JitActivation(JSContext *cx, bool active = true);
     ~JitActivation();
 
     bool isActive() const {
         return active_;
     }
     void setActive(JSContext *cx, bool active = true);
 
-    bool isProfiling() const {
-        return false;
-    }
+    bool isProfiling() const;
 
     uint8_t *prevJitTop() const {
         return prevJitTop_;
     }
     static size_t offsetOfPrevJitTop() {
         return offsetof(JitActivation, prevJitTop_);
     }
     static size_t offsetOfPrevJitJSContext() {
--- a/tools/profiler/TableTicker.cpp
+++ b/tools/profiler/TableTicker.cpp
@@ -386,19 +386,19 @@ void addDynamicTag(ThreadProfile &aProfi
     aProfile.addTag(ProfileEntry('d', *((void**)(&text[0]))));
   }
 }
 
 static
 void addPseudoEntry(volatile StackEntry &entry, ThreadProfile &aProfile,
                     PseudoStack *stack, void *lastpc)
 {
-  // Pseudo-frames with the ASMJS flag are just annotations and should not be
-  // recorded in the profile.
-  if (entry.hasFlag(StackEntry::ASMJS))
+  // Pseudo-frames with the BEGIN_PSEUDO_JS flag are just annotations
+  // and should not be recorded in the profile.
+  if (entry.hasFlag(StackEntry::BEGIN_PSEUDO_JS))
     return;
 
   int lineno = -1;
 
   // First entry has tagName 's' (start)
   // Check for magic pointer bit 1 to indicate copy
   const char* sampleLabel = entry.label();
   if (entry.isCopyLabel()) {
@@ -450,143 +450,145 @@ void addPseudoEntry(volatile StackEntry 
 struct NativeStack
 {
   void** pc_array;
   void** sp_array;
   size_t size;
   size_t count;
 };
 
-struct JSFrame
-{
-    void* stackAddress;
-    const char* label;
+mozilla::Atomic<bool> WALKING_JS_STACK(false);
+
+struct AutoWalkJSStack {
+  bool walkAllowed;
+
+  AutoWalkJSStack() : walkAllowed(false) {
+    walkAllowed = WALKING_JS_STACK.compareExchange(false, true);
+  }
+
+  ~AutoWalkJSStack() {
+    if (walkAllowed)
+        WALKING_JS_STACK = false;
+  }
 };
 
 static
 void mergeStacksIntoProfile(ThreadProfile& aProfile, TickSample* aSample, NativeStack& aNativeStack)
 {
   PseudoStack* pseudoStack = aProfile.GetPseudoStack();
   volatile StackEntry *pseudoFrames = pseudoStack->mStack;
   uint32_t pseudoCount = pseudoStack->stackSize();
 
   // Make a copy of the JS stack into a JSFrame array. This is necessary since,
   // like the native stack, the JS stack is iterated youngest-to-oldest and we
   // need to iterate oldest-to-youngest when adding entries to aProfile.
 
-  JSFrame jsFrames[1000];
   uint32_t jsCount = 0;
-  if (aSample && pseudoStack->mRuntime) {
-    JS::ProfilingFrameIterator::RegisterState registerState;
-    registerState.pc = aSample->pc;
-    registerState.sp = aSample->sp;
+  JS::ProfilingFrameIterator::Frame jsFrames[1000];
+  {
+    AutoWalkJSStack autoWalkJSStack;
+    const uint32_t maxFrames = mozilla::ArrayLength(jsFrames);
+
+    if (aSample && pseudoStack->mRuntime && autoWalkJSStack.walkAllowed) {
+      JS::ProfilingFrameIterator::RegisterState registerState;
+      registerState.pc = aSample->pc;
+      registerState.sp = aSample->sp;
 #ifdef ENABLE_ARM_LR_SAVING
-    registerState.lr = aSample->lr;
+      registerState.lr = aSample->lr;
 #endif
 
-    JS::ProfilingFrameIterator jsIter(pseudoStack->mRuntime, registerState);
-    for (; jsCount < mozilla::ArrayLength(jsFrames) && !jsIter.done(); ++jsCount, ++jsIter) {
-      jsFrames[jsCount].stackAddress = jsIter.stackAddress();
-      jsFrames[jsCount].label = jsIter.label();
+      JS::ProfilingFrameIterator jsIter(pseudoStack->mRuntime, registerState);
+      for (; jsCount < maxFrames && !jsIter.done(); ++jsIter) {
+        uint32_t extracted = jsIter.extractStack(jsFrames, jsCount, maxFrames);
+        MOZ_ASSERT(extracted <= (maxFrames - jsCount));
+        jsCount += extracted;
+        if (jsCount == maxFrames)
+          break;
+      }
     }
   }
 
   // Start the sample with a root entry.
   aProfile.addTag(ProfileEntry('s', "(root)"));
 
   // While the pseudo-stack array is ordered oldest-to-youngest, the JS and
   // native arrays are ordered youngest-to-oldest. We must add frames to
   // aProfile oldest-to-youngest. Thus, iterate over the pseudo-stack forwards
   // and JS and native arrays backwards. Note: this means the terminating
   // condition jsIndex and nativeIndex is being < 0.
   uint32_t pseudoIndex = 0;
   int32_t jsIndex = jsCount - 1;
   int32_t nativeIndex = aNativeStack.count - 1;
 
+  uint8_t *lastPseudoCppStackAddr = nullptr;
+
   // Iterate as long as there is at least one frame remaining.
   while (pseudoIndex != pseudoCount || jsIndex >= 0 || nativeIndex >= 0) {
-    // There are 1 to 3 frames available. Find and add the oldest. Handle pseudo
-    // frames first, since there are two special cases that must be considered
-    // before everything else.
+    // There are 1 to 3 frames available. Find and add the oldest.
+
+    uint8_t *pseudoStackAddr = nullptr;
+    uint8_t *jsStackAddr = nullptr;
+    uint8_t *nativeStackAddr = nullptr;
+
     if (pseudoIndex != pseudoCount) {
       volatile StackEntry &pseudoFrame = pseudoFrames[pseudoIndex];
 
-      // isJs pseudo-stack frames assume the stackAddress of the preceding isCpp
-      // pseudo-stack frame. If we arrive at an isJs pseudo frame, we've already
-      // encountered the preceding isCpp stack frame and it was oldest, we can
-      // assume the isJs frame is oldest without checking other frames.
-      if (pseudoFrame.isJs()) {
-          addPseudoEntry(pseudoFrame, aProfile, pseudoStack, nullptr);
+      if (pseudoFrame.isCpp())
+        lastPseudoCppStackAddr = (uint8_t *) pseudoFrame.stackAddress();
+
+      // Skip any pseudo-stack JS frames which are marked isOSR
+      // Pseudostack frames are marked isOSR when the JS interpreter
+      // enters a jit frame on a loop edge (via on-stack-replacement,
+      // or OSR).  To avoid both the pseudoframe and jit frame being
+      // recorded (and showing up twice), the interpreter marks the
+      // interpreter pseudostack entry with the OSR flag to ensure that
+      // it doesn't get counted.
+      if (pseudoFrame.isJs() && pseudoFrame.isOSR()) {
           pseudoIndex++;
           continue;
       }
 
-      // Currently, only asm.js frames use the JS stack and Ion/Baseline/Interp
-      // frames use the pseudo stack. In the optimized asm.js->Ion call path, no
-      // isCpp frame is pushed, leading to the callstack:
-      //   old | pseudo isCpp | asm.js | pseudo isJs | new
-      // Since there is no interleaving isCpp pseudo frame between the asm.js
-      // and isJs pseudo frame, the above isJs logic will render the callstack:
-      //   old | pseudo isCpp | pseudo isJs | asm.js | new
-      // which is wrong. To deal with this, a pseudo isCpp frame pushed right
-      // before entering asm.js flagged with StackEntry::ASMJS. When we see this
-      // flag, we first push all the asm.js frames (up to the next frame with a
-      // stackAddress) before pushing the isJs frames. There is no Ion->asm.js
-      // fast path, so we don't have to worry about asm.js->Ion->asm.js.
-      //
-      // (This and the above isJs special cases can be removed once all JS
-      // execution modes switch from the pseudo stack to the JS stack.)
-      if (pseudoFrame.hasFlag(StackEntry::ASMJS)) {
-        void *stopStackAddress = nullptr;
-        for (uint32_t i = pseudoIndex + 1; i != pseudoCount; i++) {
-          if (pseudoFrames[i].isCpp()) {
-            stopStackAddress = pseudoFrames[i].stackAddress();
-            break;
-          }
-        }
-
-        if (nativeIndex >= 0) {
-          stopStackAddress = std::max(stopStackAddress, aNativeStack.sp_array[nativeIndex]);
-        }
-
-        while (jsIndex >= 0 && jsFrames[jsIndex].stackAddress > stopStackAddress) {
-          addDynamicTag(aProfile, 'c', jsFrames[jsIndex].label);
-          jsIndex--;
-        }
-
-        pseudoIndex++;
-        continue;
-      }
-
-      // Finally, consider the normal case of a plain C++ pseudo-frame.
-      if ((jsIndex < 0 || pseudoFrame.stackAddress() > jsFrames[jsIndex].stackAddress) &&
-          (nativeIndex < 0 || pseudoFrame.stackAddress() > aNativeStack.sp_array[nativeIndex]))
-      {
-        // The (C++) pseudo-frame is the oldest.
-        addPseudoEntry(pseudoFrame, aProfile, pseudoStack, nullptr);
-        pseudoIndex++;
-        continue;
-      }
+      MOZ_ASSERT(lastPseudoCppStackAddr);
+      pseudoStackAddr = lastPseudoCppStackAddr;
     }
 
-    if (jsIndex >= 0) {
-      // Test whether the JS frame is the oldest.
-      JSFrame &jsFrame = jsFrames[jsIndex];
-      if ((pseudoIndex == pseudoCount || jsFrame.stackAddress > pseudoFrames[pseudoIndex].stackAddress()) &&
-          (nativeIndex < 0 || jsFrame.stackAddress > aNativeStack.sp_array[nativeIndex]))
-      {
-        // The JS frame is the oldest.
-        addDynamicTag(aProfile, 'c', jsFrame.label);
-        jsIndex--;
-        continue;
-      }
+    if (jsIndex >= 0)
+      jsStackAddr = (uint8_t *) jsFrames[jsIndex].stackAddress;
+
+    if (nativeIndex >= 0)
+      nativeStackAddr = (uint8_t *) aNativeStack.sp_array[nativeIndex];
+
+    // Sanity checks.
+    MOZ_ASSERT_IF(pseudoStackAddr, pseudoStackAddr != jsStackAddr &&
+                                   pseudoStackAddr != nativeStackAddr);
+    MOZ_ASSERT_IF(jsStackAddr, jsStackAddr != pseudoStackAddr &&
+                               jsStackAddr != nativeStackAddr);
+    MOZ_ASSERT_IF(nativeStackAddr, nativeStackAddr != pseudoStackAddr &&
+                                   nativeStackAddr != jsStackAddr);
+
+    // Check to see if pseudoStack frame is top-most.
+    if (pseudoStackAddr > jsStackAddr && pseudoStackAddr > nativeStackAddr) {
+      MOZ_ASSERT(pseudoIndex < pseudoCount);
+      volatile StackEntry &pseudoFrame = pseudoFrames[pseudoIndex];
+      addPseudoEntry(pseudoFrame, aProfile, pseudoStack, nullptr);
+      pseudoIndex++;
+      continue;
     }
 
-    // If execution reaches this point, there must be a native frame and it must
-    // be the oldest.
+    // Check to see if JS jit stack frame is top-most
+    if (jsStackAddr > nativeStackAddr) {
+      MOZ_ASSERT(jsIndex >= 0);
+      addDynamicTag(aProfile, 'c', jsFrames[jsIndex].label);
+      jsIndex--;
+      continue;
+    }
+
+    // If we reach here, there must be a native stack entry and it must be the
+    // greatest entry.
+    MOZ_ASSERT(nativeStackAddr);
     MOZ_ASSERT(nativeIndex >= 0);
     aProfile.addTag(ProfileEntry('l', (void*)aNativeStack.pc_array[nativeIndex]));
     nativeIndex--;
   }
 }
 
 #ifdef USE_NS_STACKWALK
 static
@@ -732,16 +734,17 @@ void doSampleStackTrace(ThreadProfile &a
     aProfile.addTag(ProfileEntry('L', (void*)aSample->lr));
 #endif
   }
 #endif
 }
 
 void TableTicker::Tick(TickSample* sample)
 {
+  // Don't allow for ticks to happen within other ticks.
   if (HasUnwinderThread()) {
     UnwinderTick(sample);
   } else {
     InplaceTick(sample);
   }
 }
 
 void TableTicker::InplaceTick(TickSample* sample)