Bug 1129510 - Trace references to JS heap from Profiler buffers. r=shu
☠☠ backed out by 56efbd155062 ☠ ☠
authorKannan Vijayan <kvijayan@mozilla.com>
Fri, 20 Feb 2015 17:50:02 -0500
changeset 257152 e8cfd49ac790eb7a8db2ff79f6fad74c7f567129
parent 257151 4a083a63de4d89b101ccfba3d52c1c713d0b5b69
child 257153 e9a8fe34405ccb260609e18ff885921502a78327
push id4610
push userjlund@mozilla.com
push dateMon, 30 Mar 2015 18:32:55 +0000
treeherdermozilla-beta@4df54044d9ef [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersshu
bugs1129510
milestone38.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1129510 - Trace references to JS heap from Profiler buffers. r=shu
js/public/ProfilingFrameIterator.h
js/src/ds/SplayTree.h
js/src/gc/Marking.cpp
js/src/jit/BaselineCompiler.cpp
js/src/jit/CodeGenerator.cpp
js/src/jit/Ion.cpp
js/src/jit/IonCaches.cpp
js/src/jit/JitFrames.cpp
js/src/jit/JitcodeMap.cpp
js/src/jit/JitcodeMap.h
js/src/jsgc.cpp
js/src/vm/Runtime.cpp
js/src/vm/Runtime.h
js/src/vm/Stack.cpp
js/src/vm/TypeInference.h
tools/profiler/ProfileEntry.h
tools/profiler/TableTicker.cpp
--- a/js/public/ProfilingFrameIterator.h
+++ b/js/public/ProfilingFrameIterator.h
@@ -29,16 +29,17 @@ namespace JS {
 
 // This iterator can be used to walk the stack of a thread suspended at an
 // arbitrary pc. To provide acurate results, profiling must have been enabled
 // (via EnableRuntimeProfilingStack) before executing the callstack being
 // unwound.
 class JS_PUBLIC_API(ProfilingFrameIterator)
 {
     JSRuntime *rt_;
+    uint32_t sampleBufferGen_;
     js::Activation *activation_;
 
     // When moving past a JitActivation, we need to save the prevJitTop
     // from it to use as the exit-frame pointer when the next caller jit
     // activation (if any) comes around.
     void *savedPrevJitTop_;
 
     static const unsigned StorageSpace = 6 * sizeof(void*);
@@ -63,26 +64,31 @@ class JS_PUBLIC_API(ProfilingFrameIterat
     const js::jit::JitProfilingFrameIterator &jitIter() const {
         MOZ_ASSERT(!done());
         MOZ_ASSERT(isJit());
         return *reinterpret_cast<const js::jit::JitProfilingFrameIterator*>(storage_.addr());
     }
 
     void settle();
 
+    bool hasSampleBufferGen() const {
+        return sampleBufferGen_ != UINT32_MAX;
+    }
+
   public:
     struct RegisterState
     {
         RegisterState() : pc(nullptr), sp(nullptr), lr(nullptr) {}
         void *pc;
         void *sp;
         void *lr;
     };
 
-    ProfilingFrameIterator(JSRuntime *rt, const RegisterState &state);
+    ProfilingFrameIterator(JSRuntime *rt, const RegisterState &state,
+                           uint32_t sampleBufferGen = UINT32_MAX);
     ~ProfilingFrameIterator();
     void operator++();
     bool done() const { return !activation_; }
 
     // Assuming the stack grows down (we do), the return value:
     //  - always points into the stack
     //  - is weakly monotonically increasing (may be equal for successive frames)
     //  - will compare greater than newer native and psuedo-stack frame addresses
@@ -112,11 +118,23 @@ class JS_PUBLIC_API(ProfilingFrameIterat
     void iteratorConstruct();
     void iteratorDestroy();
     bool iteratorDone();
 
     bool isAsmJS() const;
     bool isJit() const;
 };
 
+/**
+ * After each sample run, this method should be called with the latest sample
+ * buffer generation, and the lapCount.  It will update corresponding fields on
+ * JSRuntime.
+ *
+ * See fields |profilerSampleBufferGen|, |profilerSampleBufferLapCount| on
+ * JSRuntime for documentation about what these values are used for.
+ */
+JS_FRIEND_API(void)
+UpdateJSRuntimeProfilerSampleBufferGen(JSRuntime *runtime, uint32_t generation,
+                                       uint32_t lapCount);
+
 } // namespace JS
 
 #endif  /* js_ProfilingFrameIterator_h */
--- a/js/src/ds/SplayTree.h
+++ b/js/src/ds/SplayTree.h
@@ -60,16 +60,24 @@ class SplayTree
         enableCheckCoherency = false;
 #endif
     }
 
     bool empty() const {
         return !root;
     }
 
+    T *maybeLookup(const T &v)
+    {
+        if (!root)
+            return nullptr;
+        Node *last = lookup(v);
+        return (C::compare(v, last->item) == 0) ? &(last->item) : nullptr;
+    }
+
     bool contains(const T &v, T *res)
     {
         if (!root)
             return false;
         Node *last = lookup(v);
         splay(last);
         checkCoherency(root, nullptr);
         if (C::compare(v, last->item) == 0) {
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -785,21 +785,21 @@ gc::MarkValueRoot(JSTracer *trc, Value *
     MarkValueInternal(trc, v);
 }
 
 void
 TypeSet::MarkTypeRoot(JSTracer *trc, TypeSet::Type *v, const char *name)
 {
     JS_ROOT_MARKING_ASSERT(trc);
     trc->setTracingName(name);
-    if (v->isSingleton()) {
+    if (v->isSingletonUnchecked()) {
         JSObject *obj = v->singleton();
         MarkInternal(trc, &obj);
         *v = TypeSet::ObjectType(obj);
-    } else if (v->isGroup()) {
+    } else if (v->isGroupUnchecked()) {
         ObjectGroup *group = v->group();
         MarkInternal(trc, &group);
         *v = TypeSet::ObjectType(group);
     }
 }
 
 void
 gc::MarkValueRange(JSTracer *trc, size_t len, BarrieredBase<Value> *vec, const char *name)
--- a/js/src/jit/BaselineCompiler.cpp
+++ b/js/src/jit/BaselineCompiler.cpp
@@ -272,17 +272,17 @@ BaselineCompiler::compile()
                     script->filename(), script->lineno(), baselineScript.get());
 
         // Generate profiling string.
         char *str = JitcodeGlobalEntry::createScriptString(cx, script);
         if (!str)
             return Method_Error;
 
         JitcodeGlobalEntry::BaselineEntry entry;
-        entry.init(code->raw(), code->rawEnd(), script, str);
+        entry.init(code, code->raw(), code->rawEnd(), script, str);
 
         JitcodeGlobalTable *globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
         if (!globalTable->addEntry(entry, cx->runtime())) {
             entry.destroy();
             return Method_Error;
         }
 
         // Mark the jitcode as having a bytecode map.
--- a/js/src/jit/CodeGenerator.cpp
+++ b/js/src/jit/CodeGenerator.cpp
@@ -7475,17 +7475,17 @@ CodeGenerator::link(JSContext *cx, Compi
             return false;
         }
 
         // Mark the jitcode as having a bytecode map.
         code->setHasBytecodeMap();
     } else {
         // Add a dumy jitcodeGlobalTable entry.
         JitcodeGlobalEntry::DummyEntry entry;
-        entry.init(code->raw(), code->rawEnd());
+        entry.init(code, code->raw(), code->rawEnd());
 
         // Add entry to the global table.
         JitcodeGlobalTable *globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
         if (!globalTable->addEntry(entry, cx->runtime())) {
             // Memory may have been allocated for the entry.
             entry.destroy();
             return false;
         }
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -490,16 +490,23 @@ jit::LazyLinkTopActivation(JSContext *cx
 JitRuntime::Mark(JSTracer *trc)
 {
     MOZ_ASSERT(!trc->runtime()->isHeapMinorCollecting());
     Zone *zone = trc->runtime()->atomsCompartment()->zone();
     for (gc::ZoneCellIterUnderGC i(zone, gc::FINALIZE_JITCODE); !i.done(); i.next()) {
         JitCode *code = i.get<JitCode>();
         MarkJitCodeRoot(trc, &code, "wrapper");
     }
+
+    // Mark all heap the jitcode global table map.
+    if (trc->runtime()->hasJitRuntime() &&
+        trc->runtime()->jitRuntime()->hasJitcodeGlobalTable())
+    {
+        trc->runtime()->jitRuntime()->getJitcodeGlobalTable()->mark(trc);
+    }
 }
 
 void
 JitCompartment::mark(JSTracer *trc, JSCompartment *compartment)
 {
     // Free temporary OSR buffer.
     trc->runtime()->jitRuntime()->freeOsrTempData();
 }
@@ -655,17 +662,17 @@ JitCode::fixupNurseryObjects(JSContext *
 void
 JitCode::finalize(FreeOp *fop)
 {
     JSRuntime *rt = fop->runtime();
 
     // If this jitcode has a bytecode map, de-register it.
     if (hasBytecodeMap_) {
         MOZ_ASSERT(rt->jitRuntime()->hasJitcodeGlobalTable());
-        rt->jitRuntime()->getJitcodeGlobalTable()->removeEntry(raw(), rt);
+        rt->jitRuntime()->getJitcodeGlobalTable()->releaseEntry(raw(), rt);
     }
 
     // Buffer can be freed at any time hereafter. Catch use-after-free bugs.
     // Don't do this if the Ion code is protected, as the signal handler will
     // deadlock trying to reacquire the interrupt lock.
     memset(code_, JS_SWEPT_CODE_PATTERN, bufferSize_);
     code_ = nullptr;
 
--- a/js/src/jit/IonCaches.cpp
+++ b/js/src/jit/IonCaches.cpp
@@ -426,30 +426,30 @@ IonCache::linkAndAttachStub(JSContext *c
     writePerfSpewerJitCodeProfile(code, "IonCache");
 #endif
 
     attachStub(masm, attacher, code);
 
     // Add entry to native => bytecode mapping for this stub if needed.
     if (cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(cx->runtime())) {
         JitcodeGlobalEntry::IonCacheEntry entry;
-        entry.init(code->raw(), code->rawEnd(), rejoinAddress());
+        entry.init(code, code->raw(), code->rawEnd(), rejoinAddress());
 
         // Add entry to the global table.
         JitcodeGlobalTable *globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
         if (!globalTable->addEntry(entry, cx->runtime())) {
             entry.destroy();
             return false;
         }
 
         // Mark the jitcode as having a bytecode map.
         code->setHasBytecodeMap();
     } else {
         JitcodeGlobalEntry::DummyEntry entry;
-        entry.init(code->raw(), code->rawEnd());
+        entry.init(code, code->raw(), code->rawEnd());
 
         // Add entry to the global table.
         JitcodeGlobalTable *globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
         if (!globalTable->addEntry(entry, cx->runtime())) {
             entry.destroy();
             return false;
         }
 
--- a/js/src/jit/JitFrames.cpp
+++ b/js/src/jit/JitFrames.cpp
@@ -2846,17 +2846,16 @@ inline ReturnType
 GetPreviousRawFrame(FrameType *frame)
 {
     size_t prevSize = frame->prevFrameLocalSize() + FrameType::Size();
     return (ReturnType) (((uint8_t *) frame) + prevSize);
 }
 
 JitProfilingFrameIterator::JitProfilingFrameIterator(void *exitFrame)
 {
-    // Exit frame was en
     ExitFrameLayout *frame = (ExitFrameLayout *) exitFrame;
     FrameType prevType = frame->prevType();
 
     if (prevType == JitFrame_IonJS || prevType == JitFrame_BaselineJS ||
         prevType == JitFrame_Unwound_IonJS)
     {
         returnAddressToFp_ = frame->returnAddress();
         fp_ = GetPreviousRawFrame<ExitFrameLayout, uint8_t *>(frame);
--- a/js/src/jit/JitcodeMap.cpp
+++ b/js/src/jit/JitcodeMap.cpp
@@ -4,16 +4,17 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "jit/JitcodeMap.h"
 
 #include "mozilla/DebugOnly.h"
 #include "mozilla/UniquePtr.h"
 #include "jsprf.h"
+#include "gc/Marking.h"
 
 #include "jit/BaselineJIT.h"
 #include "jit/JitSpewer.h"
 
 #include "js/Vector.h"
 #include "vm/SPSProfiler.h"
 #include "jsscriptinlines.h"
 
@@ -329,31 +330,54 @@ JitcodeGlobalEntry::createScriptString(J
     return str;
 }
 
 bool
 JitcodeGlobalTable::lookup(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt)
 {
     MOZ_ASSERT(result);
 
-    // Construct a JitcodeGlobalEntry::Query to do the lookup
-    JitcodeGlobalEntry query = JitcodeGlobalEntry::MakeQuery(ptr);
+    JitcodeGlobalEntry *entry = lookupInPlace(ptr);
+    if (!entry)
+        return false;
+
+    *result = *entry;
+    return true;
+}
 
-    // Lookups on tree does mutation.  Suppress sampling when this is happening.
-    AutoSuppressProfilerSampling suppressSampling(rt);
-    return tree_.contains(query, result);
+bool
+JitcodeGlobalTable::lookupForSampler(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt,
+                                     uint32_t sampleBufferGen)
+{
+    MOZ_ASSERT(result);
+
+    JitcodeGlobalEntry *entry = lookupInPlace(ptr);
+    if (!entry)
+        return false;
+
+    entry->setGeneration(sampleBufferGen);
+
+    *result = *entry;
+    return true;
 }
 
 void
 JitcodeGlobalTable::lookupInfallible(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt)
 {
     mozilla::DebugOnly<bool> success = lookup(ptr, result, rt);
     MOZ_ASSERT(success);
 }
 
+JitcodeGlobalEntry *
+JitcodeGlobalTable::lookupInPlace(void *ptr)
+{
+    JitcodeGlobalEntry query = JitcodeGlobalEntry::MakeQuery(ptr);
+    return tree_.maybeLookup(query);
+}
+
 bool
 JitcodeGlobalTable::addEntry(const JitcodeGlobalEntry &entry, JSRuntime *rt)
 {
     // Suppress profiler sampling while table is being mutated.
     AutoSuppressProfilerSampling suppressSampling(rt);
 
     MOZ_ASSERT(entry.isIon() || entry.isBaseline() || entry.isIonCache() || entry.isDummy());
     return tree_.insert(entry);
@@ -361,25 +385,95 @@ JitcodeGlobalTable::addEntry(const Jitco
 
 void
 JitcodeGlobalTable::removeEntry(void *startAddr, JSRuntime *rt)
 {
     // Suppress profiler sampling while table is being mutated.
     AutoSuppressProfilerSampling suppressSampling(rt);
 
     JitcodeGlobalEntry query = JitcodeGlobalEntry::MakeQuery(startAddr);
-    JitcodeGlobalEntry result;
-    mozilla::DebugOnly<bool> success = tree_.contains(query, &result);
-    MOZ_ASSERT(success);
+    JitcodeGlobalEntry *entry = tree_.maybeLookup(query);
+    MOZ_ASSERT(entry);
 
     // Destroy entry before removing it from tree.
-    result.destroy();
+    entry->destroy();
     tree_.remove(query);
 }
 
+void
+JitcodeGlobalTable::releaseEntry(void *startAddr, JSRuntime *rt)
+{
+    JitcodeGlobalEntry query = JitcodeGlobalEntry::MakeQuery(startAddr);
+    mozilla::DebugOnly<JitcodeGlobalEntry *> entry = tree_.maybeLookup(query);
+    mozilla::DebugOnly<uint32_t> gen = rt->profilerSampleBufferGen();
+    mozilla::DebugOnly<uint32_t> lapCount = rt->profilerSampleBufferLapCount();
+    MOZ_ASSERT(entry);
+    MOZ_ASSERT_IF(gen != UINT32_MAX, !entry->isSampled(gen, lapCount));
+    removeEntry(startAddr, rt);
+}
+
+struct JitcodeMapEntryTraceCallback
+{
+    JSTracer *trc;
+    uint32_t gen;
+    uint32_t lapCount;
+
+    JitcodeMapEntryTraceCallback(JSTracer *trc)
+      : trc(trc),
+        gen(trc->runtime()->profilerSampleBufferGen()),
+        lapCount(trc->runtime()->profilerSampleBufferLapCount())
+    {
+        if (!trc->runtime()->spsProfiler.enabled())
+            gen = UINT32_MAX;
+    }
+
+    void operator()(JitcodeGlobalEntry &entry) {
+        // If an entry is not sampled, reset its generation to
+        // the invalid generation, and skip it.
+        if (!entry.isSampled(gen, lapCount)) {
+            entry.setGeneration(UINT32_MAX);
+            return;
+        }
+
+        // Mark jitcode pointed to by this entry.
+        entry.baseEntry().markJitcode(trc);
+
+        // Mark ion entry if necessary.
+        if (entry.isIon())
+            entry.ionEntry().mark(trc);
+    }
+};
+
+void
+JitcodeGlobalTable::mark(JSTracer *trc)
+{
+    AutoSuppressProfilerSampling suppressSampling(trc->runtime());
+    JitcodeMapEntryTraceCallback traceCallback(trc);
+    tree_.forEach<JitcodeMapEntryTraceCallback &>(traceCallback);
+}
+
+void
+JitcodeGlobalEntry::BaseEntry::markJitcode(JSTracer *trc)
+{
+    MarkJitCodeRoot(trc, &jitcode_, "jitcodglobaltable-baseentry-jitcode");
+}
+
+void
+JitcodeGlobalEntry::IonEntry::mark(JSTracer *trc)
+{
+    if (!optsAllTypes_)
+        return;
+
+    for (IonTrackedTypeWithAddendum *iter = optsAllTypes_->begin();
+         iter != optsAllTypes_->end(); iter++)
+    {
+        TypeSet::MarkTypeRoot(trc, &(iter->type), "jitcodeglobaltable-ionentry-type");
+    }
+}
+
 /* static */ void
 JitcodeRegionEntry::WriteHead(CompactBufferWriter &writer,
                               uint32_t nativeOffset, uint8_t scriptDepth)
 {
     writer.writeUnsigned(nativeOffset);
     writer.writeByte(scriptDepth);
 }
 
@@ -799,17 +893,17 @@ JitcodeIonTable::makeIonEntry(JSContext 
     if (!mem)
         return false;
 
     // Keep allocated profiling strings on destruct.
     autoFreeProfilingStrings.keepStrings();
 
     SizedScriptList *scriptList = new (mem) SizedScriptList(numScripts, scripts,
                                                             &profilingStrings[0]);
-    out.init(code->raw(), code->rawEnd(), scriptList, this);
+    out.init(code, code->raw(), code->rawEnd(), scriptList, this);
     return true;
 }
 
 uint32_t
 JitcodeIonTable::findRegionEntry(uint32_t nativeOffset) const
 {
     static const uint32_t LINEAR_SEARCH_THRESHOLD = 8;
     uint32_t regions = numRegions();
--- a/js/src/jit/JitcodeMap.h
+++ b/js/src/jit/JitcodeMap.h
@@ -5,16 +5,17 @@
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef jit_JitcodeMap_h
 #define jit_JitcodeMap_h
 
 #include "ds/SplayTree.h"
 #include "jit/CompactBuffer.h"
 #include "jit/CompileInfo.h"
+#include "jit/ExecutableAllocator.h"
 #include "jit/OptimizationTracking.h"
 #include "jit/shared/CodeGenerator-shared.h"
 
 namespace js {
 namespace jit {
 
 /*
  * The Ion jitcode map implements tables to allow mapping from addresses in ion jitcode
@@ -53,54 +54,81 @@ class JitcodeGlobalEntry
         jsbytecode *pc;
         BytecodeLocation(JSScript *script, jsbytecode *pc) : script(script), pc(pc) {}
     };
     typedef Vector<BytecodeLocation, 0, SystemAllocPolicy> BytecodeLocationVector;
     typedef Vector<const char *, 0, SystemAllocPolicy> ProfileStringVector;
 
     struct BaseEntry
     {
+        JitCode *jitcode_;
         void *nativeStartAddr_;
         void *nativeEndAddr_;
-        Kind kind_;
+        uint32_t gen_;
+        Kind kind_ : 7;
 
         void init() {
+            jitcode_ = nullptr;
             nativeStartAddr_ = nullptr;
             nativeEndAddr_ = nullptr;
+            gen_ = UINT32_MAX;
             kind_ = INVALID;
         }
 
-        void init(Kind kind, void *nativeStartAddr, void *nativeEndAddr) {
+        void init(Kind kind, JitCode *code,
+                  void *nativeStartAddr, void *nativeEndAddr)
+        {
+            MOZ_ASSERT_IF(kind != Query, code);
             MOZ_ASSERT(nativeStartAddr);
             MOZ_ASSERT(nativeEndAddr);
             MOZ_ASSERT(kind > INVALID && kind < LIMIT);
+            jitcode_ = code;
             nativeStartAddr_ = nativeStartAddr;
             nativeEndAddr_ = nativeEndAddr;
+            gen_ = UINT32_MAX;
             kind_ = kind;
         }
 
+        uint32_t generation() const {
+            return gen_;
+        }
+        void setGeneration(uint32_t gen) {
+            gen_ = gen;
+        }
+        bool isSampled(uint32_t currentGen, uint32_t lapCount) {
+            if (gen_ == UINT32_MAX || currentGen == UINT32_MAX)
+                return false;
+            MOZ_ASSERT(currentGen >= gen_);
+            return (currentGen - gen_) <= lapCount;
+        }
+
         Kind kind() const {
             return kind_;
         }
+        JitCode *jitcode() const {
+            return jitcode_;
+        }
         void *nativeStartAddr() const {
             return nativeStartAddr_;
         }
         void *nativeEndAddr() const {
             return nativeEndAddr_;
         }
 
         bool startsBelowPointer(void *ptr) const {
             return ((uint8_t *)nativeStartAddr()) <= ((uint8_t *) ptr);
         }
         bool endsAbovePointer(void *ptr) const {
             return ((uint8_t *)nativeEndAddr()) > ((uint8_t *) ptr);
         }
         bool containsPointer(void *ptr) const {
             return startsBelowPointer(ptr) && endsAbovePointer(ptr);
         }
+
+        void markJitcode(JSTracer *trc);
     };
 
     struct IonEntry : public BaseEntry
     {
         // regionTable_ points to the start of the region table within the
         // packed map for compile represented by this entry.  Since the
         // region table occurs at the tail of the memory region, this pointer
         // points somewhere inside the region memory space, and not to the start
@@ -141,22 +169,22 @@ class JitcodeGlobalEntry
 
             static uint32_t AllocSizeFor(uint32_t nscripts) {
                 return sizeof(SizedScriptList) + (nscripts * sizeof(ScriptNamePair));
             }
         };
 
         SizedScriptList *scriptList_;
 
-        void init(void *nativeStartAddr, void *nativeEndAddr,
+        void init(JitCode *code, void *nativeStartAddr, void *nativeEndAddr,
                   SizedScriptList *scriptList, JitcodeIonTable *regionTable)
         {
             MOZ_ASSERT(scriptList);
             MOZ_ASSERT(regionTable);
-            BaseEntry::init(Ion, nativeStartAddr, nativeEndAddr);
+            BaseEntry::init(Ion, code, nativeStartAddr, nativeEndAddr);
             regionTable_ = regionTable;
             scriptList_ = scriptList;
             optsRegionTable_ = nullptr;
             optsTypesTable_ = nullptr;
             optsAllTypes_ = nullptr;
             optsAttemptsTable_ = nullptr;
         }
 
@@ -225,33 +253,36 @@ class JitcodeGlobalEntry
         }
 
         const IonTrackedTypeVector *allTrackedTypes() {
             MOZ_ASSERT(hasTrackedOptimizations());
             return optsAllTypes_;
         }
 
         mozilla::Maybe<uint8_t> trackedOptimizationIndexAtAddr(void *ptr);
+
+        void mark(JSTracer *trc);
     };
 
     struct BaselineEntry : public BaseEntry
     {
         JSScript *script_;
         const char *str_;
 
         // Last location that caused Ion to abort compilation and the reason
         // therein, if any. Only actionable aborts are tracked. Internal
         // errors like OOMs are not.
         jsbytecode *ionAbortPc_;
         const char *ionAbortMessage_;
 
-        void init(void *nativeStartAddr, void *nativeEndAddr, JSScript *script, const char *str)
+        void init(JitCode *code, void *nativeStartAddr, void *nativeEndAddr,
+                  JSScript *script, const char *str)
         {
             MOZ_ASSERT(script != nullptr);
-            BaseEntry::init(Baseline, nativeStartAddr, nativeEndAddr);
+            BaseEntry::init(Baseline, code, nativeStartAddr, nativeEndAddr);
             script_ = script;
             str_ = str;
         }
 
         JSScript *script() const {
             return script_;
         }
 
@@ -279,20 +310,21 @@ class JitcodeGlobalEntry
         uint32_t callStackAtAddr(JSRuntime *rt, void *ptr, const char **results,
                                  uint32_t maxResults) const;
     };
 
     struct IonCacheEntry : public BaseEntry
     {
         void *rejoinAddr_;
 
-        void init(void *nativeStartAddr, void *nativeEndAddr, void *rejoinAddr)
+        void init(JitCode *code, void *nativeStartAddr, void *nativeEndAddr,
+                  void *rejoinAddr)
         {
             MOZ_ASSERT(rejoinAddr != nullptr);
-            BaseEntry::init(IonCache, nativeStartAddr, nativeEndAddr);
+            BaseEntry::init(IonCache, code, nativeStartAddr, nativeEndAddr);
             rejoinAddr_ = rejoinAddr;
         }
 
         void *rejoinAddr() const {
             return rejoinAddr_;
         }
 
         void destroy() {}
@@ -304,18 +336,18 @@ class JitcodeGlobalEntry
                                  uint32_t maxResults) const;
     };
 
     // Dummy entries are created for jitcode generated when profiling is not turned on,
     // so that they have representation in the global table if they are on the
     // stack when profiling is enabled.
     struct DummyEntry : public BaseEntry
     {
-        void init(void *nativeStartAddr, void *nativeEndAddr) {
-            BaseEntry::init(Dummy, nativeStartAddr, nativeEndAddr);
+        void init(JitCode *code, void *nativeStartAddr, void *nativeEndAddr) {
+            BaseEntry::init(Dummy, code, nativeStartAddr, nativeEndAddr);
         }
 
         void destroy() {}
 
         bool callStackAtAddr(JSRuntime *rt, void *ptr, BytecodeLocationVector &results,
                              uint32_t *depth) const
         {
             return true;
@@ -329,17 +361,17 @@ class JitcodeGlobalEntry
     };
 
     // QueryEntry is never stored in the table, just used for queries
     // where an instance of JitcodeGlobalEntry is required to do tree
     // lookups.
     struct QueryEntry : public BaseEntry
     {
         void init(void *addr) {
-            BaseEntry::init(Query, addr, addr);
+            BaseEntry::init(Query, nullptr, addr, addr);
         }
         uint8_t *addr() const {
             return reinterpret_cast<uint8_t *>(nativeStartAddr());
         }
         void destroy() {}
     };
 
   private:
@@ -414,23 +446,36 @@ class JitcodeGlobalEntry
           case Query:
             queryEntry().destroy();
             break;
           default:
             MOZ_CRASH("Invalid JitcodeGlobalEntry kind.");
         }
     }
 
+    JitCode *jitcode() const {
+        return baseEntry().jitcode();
+    }
     void *nativeStartAddr() const {
         return base_.nativeStartAddr();
     }
     void *nativeEndAddr() const {
         return base_.nativeEndAddr();
     }
 
+    uint32_t generation() const {
+        return baseEntry().generation();
+    }
+    void setGeneration(uint32_t gen) {
+        baseEntry().setGeneration(gen);
+    }
+    bool isSampled(uint32_t currentGen, uint32_t lapCount) {
+        return baseEntry().isSampled(currentGen, lapCount);
+    }
+
     bool startsBelowPointer(void *ptr) const {
         return base_.startsBelowPointer(ptr);
     }
     bool endsAbovePointer(void *ptr) const {
         return base_.endsAbovePointer(ptr);
     }
     bool containsPointer(void *ptr) const {
         return base_.containsPointer(ptr);
@@ -447,32 +492,39 @@ class JitcodeGlobalEntry
 
         return false;
     }
 
     Kind kind() const {
         return base_.kind();
     }
 
+    bool isValid() const {
+        return (kind() > INVALID) && (kind() < LIMIT);
+    }
     bool isIon() const {
         return kind() == Ion;
     }
     bool isBaseline() const {
         return kind() == Baseline;
     }
     bool isIonCache() const {
         return kind() == IonCache;
     }
     bool isDummy() const {
         return kind() == Dummy;
     }
     bool isQuery() const {
         return kind() == Query;
     }
 
+    BaseEntry &baseEntry() {
+        MOZ_ASSERT(isValid());
+        return base_;
+    }
     IonEntry &ionEntry() {
         MOZ_ASSERT(isIon());
         return ion_;
     }
     BaselineEntry &baselineEntry() {
         MOZ_ASSERT(isBaseline());
         return baseline_;
     }
@@ -484,16 +536,20 @@ class JitcodeGlobalEntry
         MOZ_ASSERT(isDummy());
         return dummy_;
     }
     QueryEntry &queryEntry() {
         MOZ_ASSERT(isQuery());
         return query_;
     }
 
+    const BaseEntry &baseEntry() const {
+        MOZ_ASSERT(isValid());
+        return base_;
+    }
     const IonEntry &ionEntry() const {
         MOZ_ASSERT(isIon());
         return ion_;
     }
     const BaselineEntry &baselineEntry() const {
         MOZ_ASSERT(isBaseline());
         return baseline_;
     }
@@ -605,56 +661,61 @@ class JitcodeGlobalEntry
 /*
  * Global table of JitcodeGlobalEntry values sorted by native address range.
  */
 class JitcodeGlobalTable
 {
   public:
     typedef SplayTree<JitcodeGlobalEntry, JitcodeGlobalEntry> EntryTree;
 
-    typedef Vector<JitcodeGlobalEntry, 0, SystemAllocPolicy> EntryVector;
-
   private:
     static const size_t LIFO_CHUNK_SIZE = 16 * 1024;
     LifoAlloc treeAlloc_;
     EntryTree tree_;
-    EntryVector entries_;
 
   public:
-    JitcodeGlobalTable() : treeAlloc_(LIFO_CHUNK_SIZE), tree_(&treeAlloc_), entries_() {
+    JitcodeGlobalTable() : treeAlloc_(LIFO_CHUNK_SIZE), tree_(&treeAlloc_) {
         // Always checking coherency in DEBUG builds may cause tests to time
         // out under --baseline-eager or --ion-eager.
         tree_.disableCheckCoherency();
     }
     ~JitcodeGlobalTable() {}
 
     bool empty() const {
         return tree_.empty();
     }
 
     bool lookup(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt);
+    bool lookupForSampler(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt,
+                          uint32_t sampleBufferGen);
+
     void lookupInfallible(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt);
 
     bool addEntry(const JitcodeGlobalEntry::IonEntry &entry, JSRuntime *rt) {
         return addEntry(JitcodeGlobalEntry(entry), rt);
     }
     bool addEntry(const JitcodeGlobalEntry::BaselineEntry &entry, JSRuntime *rt) {
         return addEntry(JitcodeGlobalEntry(entry), rt);
     }
     bool addEntry(const JitcodeGlobalEntry::IonCacheEntry &entry, JSRuntime *rt) {
         return addEntry(JitcodeGlobalEntry(entry), rt);
     }
     bool addEntry(const JitcodeGlobalEntry::DummyEntry &entry, JSRuntime *rt) {
         return addEntry(JitcodeGlobalEntry(entry), rt);
     }
 
     void removeEntry(void *startAddr, JSRuntime *rt);
+    void releaseEntry(void *startAddr, JSRuntime *rt);
+
+    void mark(JSTracer *trc);
 
   private:
     bool addEntry(const JitcodeGlobalEntry &entry, JSRuntime *rt);
+
+    JitcodeGlobalEntry *lookupInPlace(void *ptr);
 };
 
 
 /*
  * Container class for main jitcode table.
  * The Region table's memory is structured as follows:
  *
  *      +------------------------------------------------+   |
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -214,16 +214,17 @@
 
 #include "gc/FindSCCs.h"
 #include "gc/GCInternals.h"
 #include "gc/GCTrace.h"
 #include "gc/Marking.h"
 #include "gc/Memory.h"
 #include "jit/BaselineJIT.h"
 #include "jit/IonCode.h"
+#include "jit/JitcodeMap.h"
 #include "js/SliceBudget.h"
 #include "proxy/DeadObjectProxy.h"
 #include "vm/Debugger.h"
 #include "vm/ProxyObject.h"
 #include "vm/Shape.h"
 #include "vm/String.h"
 #include "vm/Symbol.h"
 #include "vm/TraceLogging.h"
@@ -5122,16 +5123,17 @@ GCRuntime::beginSweepPhase(bool lastGC)
         for (JSCompartment::WrapperEnum e(c); !e.empty(); e.popFront()) {
             if (e.front().key().kind != CrossCompartmentKey::StringWrapper)
                 AssertNotOnGrayList(&e.front().value().get().toObject());
         }
     }
 #endif
 
     DropStringWrappers(rt);
+
     findZoneGroups();
     endMarkingZoneGroup();
     beginSweepingZoneGroup();
 }
 
 bool
 ArenaLists::foregroundFinalize(FreeOp *fop, AllocKind thingKind, SliceBudget &sliceBudget,
                                SortedArenaList &sweepList)
--- a/js/src/vm/Runtime.cpp
+++ b/js/src/vm/Runtime.cpp
@@ -115,16 +115,18 @@ ReturnZeroSize(const void *p)
 JSRuntime::JSRuntime(JSRuntime *parentRuntime)
   : mainThread(this),
     jitTop(nullptr),
     jitJSContext(nullptr),
     jitActivation(nullptr),
     jitStackLimit_(0xbad),
     activation_(nullptr),
     profilingActivation_(nullptr),
+    profilerSampleBufferGen_(0),
+    profilerSampleBufferLapCount_(1),
     asmJSActivationStack_(nullptr),
     parentRuntime(parentRuntime),
     interrupt_(false),
     telemetryCallback(nullptr),
     handlingSignal(false),
     interruptCallback(nullptr),
     exclusiveAccessLock(nullptr),
     exclusiveAccessOwner(nullptr),
@@ -367,16 +369,19 @@ JSRuntime::~JSRuntime()
          * Flag us as being destroyed. This allows the GC to free things like
          * interned atoms and Ion trampolines.
          */
         beingDestroyed_ = true;
 
         /* Allow the GC to release scripts that were being profiled. */
         profilingScripts = false;
 
+        /* Set the profiler sampler buffer generation to invalid. */
+        profilerSampleBufferGen_ = UINT32_MAX;
+
         JS::PrepareForFullGC(this);
         gc.gc(GC_NORMAL, JS::gcreason::DESTROY_RUNTIME);
     }
 
     /*
      * Clear the self-hosted global and delete self-hosted classes *after*
      * GC, as finalizers for objects check for clasp->finalize during GC.
      */
@@ -829,8 +834,16 @@ void
 js::AssertCurrentThreadCanLock(RuntimeLock which)
 {
     PerThreadData *pt = TlsPerThreadData.get();
     if (pt && pt->runtime_)
         pt->runtime_->assertCanLock(which);
 }
 
 #endif // DEBUG
+
+JS_FRIEND_API(void)
+JS::UpdateJSRuntimeProfilerSampleBufferGen(JSRuntime *runtime, uint32_t generation,
+                                           uint32_t lapCount)
+{
+    runtime->setProfilerSampleBufferGen(generation);
+    runtime->updateProfilerSampleBufferLapCount(lapCount);
+}
--- a/js/src/vm/Runtime.h
+++ b/js/src/vm/Runtime.h
@@ -636,16 +636,31 @@ struct JSRuntime : public JS::shadow::Ru
     js::Activation *activation_;
 
     /*
      * Points to the most recent profiling activation running on the
      * thread.
      */
     js::Activation * volatile profilingActivation_;
 
+    /*
+     * The profiler sampler generation after the latest sample.
+     *
+     * The lapCount indicates the number of largest number of 'laps'
+     * (wrapping from high to low) that occurred when writing entries
+     * into the sample buffer.  All JitcodeGlobalMap entries referenced
+     * from a given sample are assigned the generation of the sample buffer
+     * at the START of the run.  If multiple laps occur, then some entries
+     * (towards the end) will be written out with the "wrong" generation.
+     * The lapCount indicates the required fudge factor to use to compare
+     * entry generations with the sample buffer generation.
+     */
+    mozilla::Atomic<uint32_t, mozilla::ReleaseAcquire> profilerSampleBufferGen_;
+    mozilla::Atomic<uint32_t, mozilla::ReleaseAcquire> profilerSampleBufferLapCount_;
+
     /* See AsmJSActivation comment. */
     js::AsmJSActivation * volatile asmJSActivationStack_;
 
   public:
     js::Activation *const *addressOfActivation() const {
         return &activation_;
     }
     static unsigned offsetOfActivation() {
@@ -657,16 +672,41 @@ struct JSRuntime : public JS::shadow::Ru
     }
     void *addressOfProfilingActivation() {
         return (void*) &profilingActivation_;
     }
     static unsigned offsetOfProfilingActivation() {
         return offsetof(JSRuntime, profilingActivation_);
     }
 
+    uint32_t profilerSampleBufferGen() {
+        return profilerSampleBufferGen_;
+    }
+    void setProfilerSampleBufferGen(uint32_t gen) {
+        profilerSampleBufferGen_ = gen;
+    }
+
+    uint32_t profilerSampleBufferLapCount() {
+        MOZ_ASSERT(profilerSampleBufferLapCount_ > 0);
+        return profilerSampleBufferLapCount_;
+    }
+    void updateProfilerSampleBufferLapCount(uint32_t lapCount) {
+        MOZ_ASSERT(profilerSampleBufferLapCount_ > 0);
+
+        // Use compareExchange to make sure we have monotonic increase.
+        for (;;) {
+            uint32_t curLapCount = profilerSampleBufferLapCount_;
+            if (curLapCount >= lapCount)
+                break;
+
+            if (profilerSampleBufferLapCount_.compareExchange(curLapCount, lapCount))
+                break;
+        }
+    }
+
     js::AsmJSActivation *asmJSActivationStack() const {
         return asmJSActivationStack_;
     }
     static js::AsmJSActivation *innermostAsmJSActivation() {
         js::PerThreadData *ptd = js::TlsPerThreadData.get();
         return ptd ? ptd->runtimeFromMainThread()->asmJSActivationStack_ : nullptr;
     }
 
--- a/js/src/vm/Stack.cpp
+++ b/js/src/vm/Stack.cpp
@@ -1709,18 +1709,20 @@ void
 ActivationIterator::settle()
 {
     // Stop at the next active activation. No need to update jitTop_, since
     // we don't iterate over an active jit activation.
     while (!done() && activation_->isJit() && !activation_->asJit()->isActive())
         activation_ = activation_->prev();
 }
 
-JS::ProfilingFrameIterator::ProfilingFrameIterator(JSRuntime *rt, const RegisterState &state)
+JS::ProfilingFrameIterator::ProfilingFrameIterator(JSRuntime *rt, const RegisterState &state,
+                                                   uint32_t sampleBufferGen)
   : rt_(rt),
+    sampleBufferGen_(sampleBufferGen),
     activation_(rt->profilingActivation()),
     savedPrevJitTop_(nullptr)
 {
     if (!activation_)
         return;
 
     // If profiler sampling is not enabled, skip.
     if (!rt_->isProfilerSamplingEnabled()) {
@@ -1872,16 +1874,20 @@ JS::ProfilingFrameIterator::extractStack
 
     MOZ_ASSERT(isJit());
     void *returnAddr = jitIter().returnAddressToFp();
 
     // Look up an entry for the return address.
     jit::JitcodeGlobalTable *table = rt_->jitRuntime()->getJitcodeGlobalTable();
     jit::JitcodeGlobalEntry entry;
     table->lookupInfallible(returnAddr, &entry, rt_);
+    if (hasSampleBufferGen())
+        table->lookupForSampler(returnAddr, &entry, rt_, sampleBufferGen_);
+    else
+        table->lookup(returnAddr, &entry, rt_);
 
     MOZ_ASSERT(entry.isIon() || entry.isIonCache() || entry.isBaseline() || entry.isDummy());
 
     // Dummy frames produce no stack frames.
     if (entry.isDummy())
         return 0;
 
     FrameKind kind = entry.isBaseline() ? Frame_Baseline : Frame_Ion;
--- a/js/src/vm/TypeInference.h
+++ b/js/src/vm/TypeInference.h
@@ -310,25 +310,31 @@ class TypeSet
 
         inline ObjectKey *objectKey() const;
 
         /* Accessors for JSObject types */
 
         bool isSingleton() const {
             return isObject() && !!(data & 1);
         }
+        bool isSingletonUnchecked() const {
+            return isObjectUnchecked() && !!(data & 1);
+        }
 
         inline JSObject *singleton() const;
         inline JSObject *singletonNoBarrier() const;
 
         /* Accessors for ObjectGroup types */
 
         bool isGroup() const {
             return isObject() && !(data & 1);
         }
+        bool isGroupUnchecked() const {
+            return isObjectUnchecked() && !(data & 1);
+        }
 
         inline ObjectGroup *group() const;
         inline ObjectGroup *groupNoBarrier() const;
 
         bool operator == (Type o) const { return data == o.data; }
         bool operator != (Type o) const { return data != o.data; }
 
         static ThingRootKind rootKind() { return THING_ROOT_TYPE; }
--- a/tools/profiler/ProfileEntry.h
+++ b/tools/profiler/ProfileEntry.h
@@ -147,16 +147,22 @@ public:
 
   ThreadInfo* GetThreadInfo() const { return mThreadInfo; }
   ThreadResponsiveness* GetThreadResponsiveness() { return &mRespInfo; }
   void SetPendingDelete()
   {
     mPseudoStack = nullptr;
     mPlatformData = nullptr;
   }
+
+  uint32_t bufferGeneration() const {
+    MOZ_ASSERT(mBuffer->mGeneration >= 0);
+    return mBuffer->mGeneration;
+  }
+
 private:
   FRIEND_TEST(ThreadProfile, InsertOneTag);
   FRIEND_TEST(ThreadProfile, InsertOneTagWithTinyBuffer);
   FRIEND_TEST(ThreadProfile, InsertTagsNoWrap);
   FRIEND_TEST(ThreadProfile, InsertTagsWrap);
   FRIEND_TEST(ThreadProfile, MemoryMeasure);
   ThreadInfo* mThreadInfo;
 
--- a/tools/profiler/TableTicker.cpp
+++ b/tools/profiler/TableTicker.cpp
@@ -481,31 +481,34 @@ void mergeStacksIntoProfile(ThreadProfil
   PseudoStack* pseudoStack = aProfile.GetPseudoStack();
   volatile StackEntry *pseudoFrames = pseudoStack->mStack;
   uint32_t pseudoCount = pseudoStack->stackSize();
 
   // Make a copy of the JS stack into a JSFrame array. This is necessary since,
   // like the native stack, the JS stack is iterated youngest-to-oldest and we
   // need to iterate oldest-to-youngest when adding entries to aProfile.
 
+  uint32_t startBufferGen = aProfile.bufferGeneration();
   uint32_t jsCount = 0;
   JS::ProfilingFrameIterator::Frame jsFrames[1000];
   {
     AutoWalkJSStack autoWalkJSStack;
     const uint32_t maxFrames = mozilla::ArrayLength(jsFrames);
 
     if (aSample && pseudoStack->mRuntime && autoWalkJSStack.walkAllowed) {
       JS::ProfilingFrameIterator::RegisterState registerState;
       registerState.pc = aSample->pc;
       registerState.sp = aSample->sp;
 #ifdef ENABLE_ARM_LR_SAVING
       registerState.lr = aSample->lr;
 #endif
 
-      JS::ProfilingFrameIterator jsIter(pseudoStack->mRuntime, registerState);
+      JS::ProfilingFrameIterator jsIter(pseudoStack->mRuntime,
+                                        registerState,
+                                        startBufferGen);
       for (; jsCount < maxFrames && !jsIter.done(); ++jsIter) {
         uint32_t extracted = jsIter.extractStack(jsFrames, jsCount, maxFrames);
         MOZ_ASSERT(extracted <= (maxFrames - jsCount));
         jsCount += extracted;
         if (jsCount == maxFrames)
           break;
       }
     }
@@ -596,16 +599,26 @@ void mergeStacksIntoProfile(ThreadProfil
 
     // If we reach here, there must be a native stack entry and it must be the
     // greatest entry.
     MOZ_ASSERT(nativeStackAddr);
     MOZ_ASSERT(nativeIndex >= 0);
     aProfile.addTag(ProfileEntry('l', (void*)aNativeStack.pc_array[nativeIndex]));
     nativeIndex--;
   }
+
+  MOZ_ASSERT(aProfile.bufferGeneration() >= startBufferGen);
+  uint32_t lapCount = aProfile.bufferGeneration() - startBufferGen;
+
+  // Update the JS runtime with the current profile sample buffer generation.
+  if (pseudoStack->mRuntime) {
+    JS::UpdateJSRuntimeProfilerSampleBufferGen(pseudoStack->mRuntime,
+                                               aProfile.bufferGeneration(),
+                                               lapCount);
+  }
 }
 
 #ifdef USE_NS_STACKWALK
 static
 void StackWalkCallback(uint32_t aFrameNumber, void* aPC, void* aSP,
                        void* aClosure)
 {
   NativeStack* nativeStack = static_cast<NativeStack*>(aClosure);