Bug 1129510 - Trace references to JS heap from Profiler buffers. r=shu
authorKannan Vijayan <kvijayan@mozilla.com>
Wed, 25 Feb 2015 16:43:39 -0500
changeset 230831 49f1f94b73af6943a55233e57cbff35b41185c7a
parent 230830 18365d823fdf8c524c2be59507f43e3aa47ee0a8
child 230832 da8e7a594541258f4c57f063044d2b717c8862c0
push id28337
push usercbook@mozilla.com
push dateThu, 26 Feb 2015 10:57:44 +0000
treeherdermozilla-central@599b84826bf3 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersshu
bugs1129510
milestone39.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1129510 - Trace references to JS heap from Profiler buffers. r=shu
js/public/ProfilingFrameIterator.h
js/src/ds/SplayTree.h
js/src/gc/Marking.cpp
js/src/jit/BaselineCompiler.cpp
js/src/jit/CodeGenerator.cpp
js/src/jit/Ion.cpp
js/src/jit/IonCaches.cpp
js/src/jit/JitFrames.cpp
js/src/jit/JitcodeMap.cpp
js/src/jit/JitcodeMap.h
js/src/jsgc.cpp
js/src/vm/Runtime.cpp
js/src/vm/Runtime.h
js/src/vm/Stack.cpp
js/src/vm/TypeInference.h
tools/profiler/ProfileEntry.h
tools/profiler/TableTicker.cpp
--- a/js/public/ProfilingFrameIterator.h
+++ b/js/public/ProfilingFrameIterator.h
@@ -29,16 +29,17 @@ namespace JS {
 
 // This iterator can be used to walk the stack of a thread suspended at an
 // arbitrary pc. To provide acurate results, profiling must have been enabled
 // (via EnableRuntimeProfilingStack) before executing the callstack being
 // unwound.
 class JS_PUBLIC_API(ProfilingFrameIterator)
 {
     JSRuntime *rt_;
+    uint32_t sampleBufferGen_;
     js::Activation *activation_;
 
     // When moving past a JitActivation, we need to save the prevJitTop
     // from it to use as the exit-frame pointer when the next caller jit
     // activation (if any) comes around.
     void *savedPrevJitTop_;
 
     static const unsigned StorageSpace = 6 * sizeof(void*);
@@ -63,26 +64,31 @@ class JS_PUBLIC_API(ProfilingFrameIterat
     const js::jit::JitProfilingFrameIterator &jitIter() const {
         MOZ_ASSERT(!done());
         MOZ_ASSERT(isJit());
         return *reinterpret_cast<const js::jit::JitProfilingFrameIterator*>(storage_.addr());
     }
 
     void settle();
 
+    bool hasSampleBufferGen() const {
+        return sampleBufferGen_ != UINT32_MAX;
+    }
+
   public:
     struct RegisterState
     {
         RegisterState() : pc(nullptr), sp(nullptr), lr(nullptr) {}
         void *pc;
         void *sp;
         void *lr;
     };
 
-    ProfilingFrameIterator(JSRuntime *rt, const RegisterState &state);
+    ProfilingFrameIterator(JSRuntime *rt, const RegisterState &state,
+                           uint32_t sampleBufferGen = UINT32_MAX);
     ~ProfilingFrameIterator();
     void operator++();
     bool done() const { return !activation_; }
 
     // Assuming the stack grows down (we do), the return value:
     //  - always points into the stack
     //  - is weakly monotonically increasing (may be equal for successive frames)
     //  - will compare greater than newer native and psuedo-stack frame addresses
@@ -112,11 +118,23 @@ class JS_PUBLIC_API(ProfilingFrameIterat
     void iteratorConstruct();
     void iteratorDestroy();
     bool iteratorDone();
 
     bool isAsmJS() const;
     bool isJit() const;
 };
 
+/**
+ * After each sample run, this method should be called with the latest sample
+ * buffer generation, and the lapCount.  It will update corresponding fields on
+ * JSRuntime.
+ *
+ * See fields |profilerSampleBufferGen|, |profilerSampleBufferLapCount| on
+ * JSRuntime for documentation about what these values are used for.
+ */
+JS_FRIEND_API(void)
+UpdateJSRuntimeProfilerSampleBufferGen(JSRuntime *runtime, uint32_t generation,
+                                       uint32_t lapCount);
+
 } // namespace JS
 
 #endif  /* js_ProfilingFrameIterator_h */
--- a/js/src/ds/SplayTree.h
+++ b/js/src/ds/SplayTree.h
@@ -60,16 +60,24 @@ class SplayTree
         enableCheckCoherency = false;
 #endif
     }
 
     bool empty() const {
         return !root;
     }
 
+    T *maybeLookup(const T &v)
+    {
+        if (!root)
+            return nullptr;
+        Node *last = lookup(v);
+        return (C::compare(v, last->item) == 0) ? &(last->item) : nullptr;
+    }
+
     bool contains(const T &v, T *res)
     {
         if (!root)
             return false;
         Node *last = lookup(v);
         splay(last);
         checkCoherency(root, nullptr);
         if (C::compare(v, last->item) == 0) {
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -797,21 +797,21 @@ gc::MarkValueRoot(JSTracer *trc, Value *
     MarkValueInternal(trc, v);
 }
 
 void
 TypeSet::MarkTypeRoot(JSTracer *trc, TypeSet::Type *v, const char *name)
 {
     JS_ROOT_MARKING_ASSERT(trc);
     trc->setTracingName(name);
-    if (v->isSingleton()) {
+    if (v->isSingletonUnchecked()) {
         JSObject *obj = v->singleton();
         MarkInternal(trc, &obj);
         *v = TypeSet::ObjectType(obj);
-    } else if (v->isGroup()) {
+    } else if (v->isGroupUnchecked()) {
         ObjectGroup *group = v->group();
         MarkInternal(trc, &group);
         *v = TypeSet::ObjectType(group);
     }
 }
 
 void
 gc::MarkValueRange(JSTracer *trc, size_t len, BarrieredBase<Value> *vec, const char *name)
--- a/js/src/jit/BaselineCompiler.cpp
+++ b/js/src/jit/BaselineCompiler.cpp
@@ -272,17 +272,17 @@ BaselineCompiler::compile()
                     script->filename(), script->lineno(), baselineScript.get());
 
         // Generate profiling string.
         char *str = JitcodeGlobalEntry::createScriptString(cx, script);
         if (!str)
             return Method_Error;
 
         JitcodeGlobalEntry::BaselineEntry entry;
-        entry.init(code->raw(), code->rawEnd(), script, str);
+        entry.init(code, code->raw(), code->rawEnd(), script, str);
 
         JitcodeGlobalTable *globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
         if (!globalTable->addEntry(entry, cx->runtime())) {
             entry.destroy();
             return Method_Error;
         }
 
         // Mark the jitcode as having a bytecode map.
--- a/js/src/jit/CodeGenerator.cpp
+++ b/js/src/jit/CodeGenerator.cpp
@@ -7492,17 +7492,17 @@ CodeGenerator::link(JSContext *cx, Compi
             return false;
         }
 
         // Mark the jitcode as having a bytecode map.
         code->setHasBytecodeMap();
     } else {
         // Add a dumy jitcodeGlobalTable entry.
         JitcodeGlobalEntry::DummyEntry entry;
-        entry.init(code->raw(), code->rawEnd());
+        entry.init(code, code->raw(), code->rawEnd());
 
         // Add entry to the global table.
         JitcodeGlobalTable *globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
         if (!globalTable->addEntry(entry, cx->runtime())) {
             // Memory may have been allocated for the entry.
             entry.destroy();
             return false;
         }
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -490,16 +490,23 @@ jit::LazyLinkTopActivation(JSContext *cx
 JitRuntime::Mark(JSTracer *trc)
 {
     MOZ_ASSERT(!trc->runtime()->isHeapMinorCollecting());
     Zone *zone = trc->runtime()->atomsCompartment()->zone();
     for (gc::ZoneCellIterUnderGC i(zone, gc::FINALIZE_JITCODE); !i.done(); i.next()) {
         JitCode *code = i.get<JitCode>();
         MarkJitCodeRoot(trc, &code, "wrapper");
     }
+
+    // Mark all heap the jitcode global table map.
+    if (trc->runtime()->hasJitRuntime() &&
+        trc->runtime()->jitRuntime()->hasJitcodeGlobalTable())
+    {
+        trc->runtime()->jitRuntime()->getJitcodeGlobalTable()->mark(trc);
+    }
 }
 
 void
 JitCompartment::mark(JSTracer *trc, JSCompartment *compartment)
 {
     // Free temporary OSR buffer.
     trc->runtime()->jitRuntime()->freeOsrTempData();
 }
@@ -655,17 +662,17 @@ JitCode::fixupNurseryObjects(JSContext *
 void
 JitCode::finalize(FreeOp *fop)
 {
     JSRuntime *rt = fop->runtime();
 
     // If this jitcode has a bytecode map, de-register it.
     if (hasBytecodeMap_) {
         MOZ_ASSERT(rt->jitRuntime()->hasJitcodeGlobalTable());
-        rt->jitRuntime()->getJitcodeGlobalTable()->removeEntry(raw(), rt);
+        rt->jitRuntime()->getJitcodeGlobalTable()->releaseEntry(raw(), rt);
     }
 
     // Buffer can be freed at any time hereafter. Catch use-after-free bugs.
     // Don't do this if the Ion code is protected, as the signal handler will
     // deadlock trying to reacquire the interrupt lock.
     memset(code_, JS_SWEPT_CODE_PATTERN, bufferSize_);
     code_ = nullptr;
 
--- a/js/src/jit/IonCaches.cpp
+++ b/js/src/jit/IonCaches.cpp
@@ -426,30 +426,30 @@ IonCache::linkAndAttachStub(JSContext *c
     writePerfSpewerJitCodeProfile(code, "IonCache");
 #endif
 
     attachStub(masm, attacher, code);
 
     // Add entry to native => bytecode mapping for this stub if needed.
     if (cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(cx->runtime())) {
         JitcodeGlobalEntry::IonCacheEntry entry;
-        entry.init(code->raw(), code->rawEnd(), rejoinAddress());
+        entry.init(code, code->raw(), code->rawEnd(), rejoinAddress());
 
         // Add entry to the global table.
         JitcodeGlobalTable *globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
         if (!globalTable->addEntry(entry, cx->runtime())) {
             entry.destroy();
             return false;
         }
 
         // Mark the jitcode as having a bytecode map.
         code->setHasBytecodeMap();
     } else {
         JitcodeGlobalEntry::DummyEntry entry;
-        entry.init(code->raw(), code->rawEnd());
+        entry.init(code, code->raw(), code->rawEnd());
 
         // Add entry to the global table.
         JitcodeGlobalTable *globalTable = cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
         if (!globalTable->addEntry(entry, cx->runtime())) {
             entry.destroy();
             return false;
         }
 
--- a/js/src/jit/JitFrames.cpp
+++ b/js/src/jit/JitFrames.cpp
@@ -2851,17 +2851,16 @@ inline ReturnType
 GetPreviousRawFrame(FrameType *frame)
 {
     size_t prevSize = frame->prevFrameLocalSize() + FrameType::Size();
     return (ReturnType) (((uint8_t *) frame) + prevSize);
 }
 
 JitProfilingFrameIterator::JitProfilingFrameIterator(void *exitFrame)
 {
-    // Exit frame was en
     ExitFrameLayout *frame = (ExitFrameLayout *) exitFrame;
     FrameType prevType = frame->prevType();
 
     if (prevType == JitFrame_IonJS || prevType == JitFrame_BaselineJS ||
         prevType == JitFrame_Unwound_IonJS)
     {
         returnAddressToFp_ = frame->returnAddress();
         fp_ = GetPreviousRawFrame<ExitFrameLayout, uint8_t *>(frame);
--- a/js/src/jit/JitcodeMap.cpp
+++ b/js/src/jit/JitcodeMap.cpp
@@ -2,18 +2,20 @@
  * vim: set ts=8 sts=4 et sw=4 tw=99:
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "jit/JitcodeMap.h"
 
 #include "mozilla/DebugOnly.h"
+#include "mozilla/MathAlgorithms.h"
 #include "mozilla/UniquePtr.h"
 #include "jsprf.h"
+#include "gc/Marking.h"
 
 #include "jit/BaselineJIT.h"
 #include "jit/JitSpewer.h"
 
 #include "js/Vector.h"
 #include "vm/SPSProfiler.h"
 #include "jsscriptinlines.h"
 
@@ -370,55 +372,374 @@ JitcodeGlobalEntry::createScriptString(J
     return str;
 }
 
 bool
 JitcodeGlobalTable::lookup(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt)
 {
     MOZ_ASSERT(result);
 
-    // Construct a JitcodeGlobalEntry::Query to do the lookup
-    JitcodeGlobalEntry query = JitcodeGlobalEntry::MakeQuery(ptr);
+    JitcodeGlobalEntry *entry = lookupInternal(ptr);
+    if (!entry)
+        return false;
+
+    *result = *entry;
+    return true;
+}
 
-    // Lookups on tree does mutation.  Suppress sampling when this is happening.
-    AutoSuppressProfilerSampling suppressSampling(rt);
-    return tree_.contains(query, result);
+bool
+JitcodeGlobalTable::lookupForSampler(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt,
+                                     uint32_t sampleBufferGen)
+{
+    MOZ_ASSERT(result);
+
+    JitcodeGlobalEntry *entry = lookupInternal(ptr);
+    if (!entry)
+        return false;
+
+    entry->setGeneration(sampleBufferGen);
+
+    *result = *entry;
+    return true;
 }
 
-void
-JitcodeGlobalTable::lookupInfallible(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt)
+JitcodeGlobalEntry *
+JitcodeGlobalTable::lookupInternal(void *ptr)
 {
-    mozilla::DebugOnly<bool> success = lookup(ptr, result, rt);
-    MOZ_ASSERT(success);
+    JitcodeGlobalEntry query = JitcodeGlobalEntry::MakeQuery(ptr);
+    JitcodeGlobalEntry *searchTower[JitcodeSkiplistTower::MAX_HEIGHT];
+    searchInternal(query, searchTower);
+
+    if (searchTower[0] == nullptr) {
+        // Check startTower
+        if (startTower_[0] == nullptr)
+            return nullptr;
+
+        MOZ_ASSERT(startTower_[0]->compareTo(query) >= 0);
+        int cmp = startTower_[0]->compareTo(query);
+        MOZ_ASSERT(cmp >= 0);
+        return (cmp == 0) ? startTower_[0] : nullptr;
+    }
+
+    JitcodeGlobalEntry *bottom = searchTower[0];
+    MOZ_ASSERT(bottom->compareTo(query) < 0);
+
+    JitcodeGlobalEntry *bottomNext = bottom->tower_->next(0);
+    if (bottomNext == nullptr)
+        return nullptr;
+
+    int cmp = bottomNext->compareTo(query);
+    MOZ_ASSERT(cmp >= 0);
+    return (cmp == 0) ? bottomNext : nullptr;
 }
 
 bool
 JitcodeGlobalTable::addEntry(const JitcodeGlobalEntry &entry, JSRuntime *rt)
 {
-    // Suppress profiler sampling while table is being mutated.
+    MOZ_ASSERT(entry.isIon() || entry.isBaseline() || entry.isIonCache() || entry.isDummy());
+
+    JitcodeGlobalEntry *searchTower[JitcodeSkiplistTower::MAX_HEIGHT];
+    searchInternal(entry, searchTower);
+
+    // Allocate a new entry and tower.
+    JitcodeSkiplistTower *newTower = allocateTower(generateTowerHeight());
+    if (!newTower)
+        return false;
+
+    JitcodeGlobalEntry *newEntry = allocateEntry();
+    if (!newEntry)
+        return false;
+
+    *newEntry = entry;
+    newEntry->tower_ = newTower;
+
+    // Suppress profiler sampling while skiplist is being mutated.
     AutoSuppressProfilerSampling suppressSampling(rt);
 
-    MOZ_ASSERT(entry.isIon() || entry.isBaseline() || entry.isIonCache() || entry.isDummy());
-    return tree_.insert(entry);
+    // Link up entry with forward entries taken from tower.
+    for (int level = newTower->height() - 1; level >= 0; level--) {
+        JitcodeGlobalEntry *searchTowerEntry = searchTower[level];
+        if (searchTowerEntry) {
+            MOZ_ASSERT(searchTowerEntry->compareTo(*newEntry) < 0);
+            JitcodeGlobalEntry *searchTowerNextEntry = searchTowerEntry->tower_->next(level);
+
+            MOZ_ASSERT_IF(searchTowerNextEntry, searchTowerNextEntry->compareTo(*newEntry) > 0);
+
+            newTower->setNext(level, searchTowerNextEntry);
+            searchTowerEntry->tower_->setNext(level, newEntry);
+        } else {
+            newTower->setNext(level, startTower_[level]);
+            startTower_[level] = newEntry;
+        }
+    }
+    skiplistSize_++;
+    // verifySkiplist(); - disabled for release.
+    return true;
 }
 
 void
 JitcodeGlobalTable::removeEntry(void *startAddr, JSRuntime *rt)
 {
-    // Suppress profiler sampling while table is being mutated.
-    AutoSuppressProfilerSampling suppressSampling(rt);
+    JitcodeGlobalEntry query = JitcodeGlobalEntry::MakeQuery(startAddr);
+    JitcodeGlobalEntry *searchTower[JitcodeSkiplistTower::MAX_HEIGHT];
+    searchInternal(query, searchTower);
+
+    JitcodeGlobalEntry *queryEntry;
+    if (searchTower[0]) {
+        MOZ_ASSERT(searchTower[0]->compareTo(query) < 0);
+        queryEntry = searchTower[0]->tower_->next(0);
+    } else {
+        MOZ_ASSERT(startTower_[0]);
+        queryEntry = startTower_[0];
+    }
+    MOZ_ASSERT(queryEntry->compareTo(query) == 0);
+
+    {
+        // Suppress profiler sampling while table is being mutated.
+        AutoSuppressProfilerSampling suppressSampling(rt);
+
+        // Unlink query entry.
+        for (int level = queryEntry->tower_->height() - 1; level >= 0; level--) {
+            JitcodeGlobalEntry *searchTowerEntry = searchTower[level];
+            if (searchTowerEntry) {
+                MOZ_ASSERT(searchTowerEntry);
+                searchTowerEntry->tower_->setNext(level, queryEntry->tower_->next(level));
+            } else {
+                startTower_[level] = queryEntry->tower_->next(level);
+            }
+        }
+        skiplistSize_--;
+        // verifySkiplist(); - disabled for release.
+    }
+
+    // Entry has been unlinked.
+    queryEntry->destroy();
+    queryEntry->tower_->addToFreeList(&(freeTowers_[queryEntry->tower_->height() - 1]));
+    queryEntry->tower_ = nullptr;
+    *queryEntry = JitcodeGlobalEntry();
+    queryEntry->addToFreeList(&freeEntries_);
+}
+
+void
+JitcodeGlobalTable::releaseEntry(void *startAddr, JSRuntime *rt)
+{
+    mozilla::DebugOnly<JitcodeGlobalEntry *> entry = lookupInternal(startAddr);
+    mozilla::DebugOnly<uint32_t> gen = rt->profilerSampleBufferGen();
+    mozilla::DebugOnly<uint32_t> lapCount = rt->profilerSampleBufferLapCount();
+    MOZ_ASSERT(entry);
+    MOZ_ASSERT_IF(gen != UINT32_MAX, !entry->isSampled(gen, lapCount));
+    removeEntry(startAddr, rt);
+}
+
+void
+JitcodeGlobalTable::searchInternal(const JitcodeGlobalEntry &query, JitcodeGlobalEntry **towerOut)
+{
+    JitcodeGlobalEntry *cur = nullptr;
+    for (int level = JitcodeSkiplistTower::MAX_HEIGHT - 1; level >= 0; level--) {
+        JitcodeGlobalEntry *entry = searchAtHeight(level, cur, query);
+        MOZ_ASSERT_IF(entry == nullptr, cur == nullptr);
+        towerOut[level] = entry;
+        cur = entry;
+    }
+
+    // Validate the resulting tower.
+#ifdef DEBUG
+    for (int level = JitcodeSkiplistTower::MAX_HEIGHT - 1; level >= 0; level--) {
+        if (towerOut[level] == nullptr) {
+            // If we got NULL for a given level, then we should have gotten NULL
+            // for the level above as well.
+            MOZ_ASSERT_IF(unsigned(level) < (JitcodeSkiplistTower::MAX_HEIGHT - 1),
+                          towerOut[level + 1] == nullptr);
+            continue;
+        }
+
+        JitcodeGlobalEntry *cur = towerOut[level];
+
+        // Non-null result at a given level must sort < query.
+        MOZ_ASSERT(cur->compareTo(query) < 0);
+
+        // The entry must have a tower height that accomodates level.
+        if (!cur->tower_->next(level))
+            continue;
+
+        JitcodeGlobalEntry *next = cur->tower_->next(level);
+
+        // Next entry must have tower height that accomodates level.
+        MOZ_ASSERT(unsigned(level) < next->tower_->height());
+
+        // Next entry must sort >= query.
+        MOZ_ASSERT(next->compareTo(query) >= 0);
+    }
+#endif // DEBUG
+}
+
+JitcodeGlobalEntry *
+JitcodeGlobalTable::searchAtHeight(unsigned level, JitcodeGlobalEntry *start,
+                                   const JitcodeGlobalEntry &query)
+{
+    JitcodeGlobalEntry *cur = start;
+
+    // If starting with nullptr, use the start tower.
+    if (start == nullptr) {
+        cur = startTower_[level];
+        if (cur == nullptr || cur->compareTo(query) >= 0)
+            return nullptr;
+    }
+
+    // Keep skipping at |level| until we reach an entry < query whose
+    // successor is an entry >= query.
+    for (;;) {
+        JitcodeGlobalEntry *next = cur->tower_->next(level);
+        if (next == nullptr || next->compareTo(query) >= 0)
+            return cur;
+
+        cur = next;
+    }
+}
+
+unsigned
+JitcodeGlobalTable::generateTowerHeight()
+{
+    // Implementation taken from Hars L. and Pteruska G.,
+    // "Pseudorandom Recursions: Small and fast Pseudorandom number generators for
+    //  embedded applications."
+    rand_ ^= mozilla::RotateLeft(rand_, 5) ^ mozilla::RotateLeft(rand_, 24);
+    rand_ += 0x37798849;
 
-    JitcodeGlobalEntry query = JitcodeGlobalEntry::MakeQuery(startAddr);
-    JitcodeGlobalEntry result;
-    mozilla::DebugOnly<bool> success = tree_.contains(query, &result);
-    MOZ_ASSERT(success);
+    // Return number of lowbit zeros in new randval.
+    unsigned result = 0;
+    for (unsigned i = 0; i < 32; i++) {
+        if ((rand_ >> i) & 0x1)
+            break;
+        result++;
+    }
+    return result + 1;
+}
+
+JitcodeSkiplistTower *
+JitcodeGlobalTable::allocateTower(unsigned height)
+{
+    MOZ_ASSERT(height >= 1);
+    JitcodeSkiplistTower *tower = JitcodeSkiplistTower::PopFromFreeList(&freeTowers_[height - 1]);
+    if (tower)
+        return tower;
+
+    size_t size = JitcodeSkiplistTower::CalculateSize(height);
+    tower = (JitcodeSkiplistTower *) alloc_.alloc(size);
+    if (!tower)
+        return nullptr;
+
+    return new (tower) JitcodeSkiplistTower(height);
+}
+
+JitcodeGlobalEntry *
+JitcodeGlobalTable::allocateEntry()
+{
+    JitcodeGlobalEntry *entry = JitcodeGlobalEntry::PopFromFreeList(&freeEntries_);
+    if (entry)
+        return entry;
+
+    return alloc_.new_<JitcodeGlobalEntry>();
+}
+
+#ifdef DEBUG
+void
+JitcodeGlobalTable::verifySkiplist()
+{
+    JitcodeGlobalEntry *curTower[JitcodeSkiplistTower::MAX_HEIGHT];
+    for (unsigned i = 0; i < JitcodeSkiplistTower::MAX_HEIGHT; i++)
+        curTower[i] = startTower_[i];
+
+    uint32_t count = 0;
+    JitcodeGlobalEntry *curEntry = startTower_[0];
+    while (curEntry) {
+        count++;
+        unsigned curHeight = curEntry->tower_->height();
+        MOZ_ASSERT(curHeight >= 1);
+
+        for (unsigned i = 0; i < JitcodeSkiplistTower::MAX_HEIGHT; i++) {
+            if (i < curHeight) {
+                MOZ_ASSERT(curTower[i] == curEntry);
+                JitcodeGlobalEntry *nextEntry = curEntry->tower_->next(i);
+                MOZ_ASSERT_IF(nextEntry, curEntry->compareTo(*nextEntry) < 0);
+                curTower[i] = nextEntry;
+            } else {
+                MOZ_ASSERT_IF(curTower[i], curTower[i]->compareTo(*curEntry) > 0);
+            }
+        }
+        curEntry = curEntry->tower_->next(0);
+    }
 
-    // Destroy entry before removing it from tree.
-    result.destroy();
-    tree_.remove(query);
+    MOZ_ASSERT(count == skiplistSize_);
+}
+#endif // DEBUG
+
+struct JitcodeMapEntryTraceCallback
+{
+    JSTracer *trc;
+    uint32_t gen;
+    uint32_t lapCount;
+
+    explicit JitcodeMapEntryTraceCallback(JSTracer *trc)
+      : trc(trc),
+        gen(trc->runtime()->profilerSampleBufferGen()),
+        lapCount(trc->runtime()->profilerSampleBufferLapCount())
+    {
+        if (!trc->runtime()->spsProfiler.enabled())
+            gen = UINT32_MAX;
+    }
+
+    void operator()(JitcodeGlobalEntry &entry) {
+        // If an entry is not sampled, reset its generation to
+        // the invalid generation, and skip it.
+        if (!entry.isSampled(gen, lapCount)) {
+            entry.setGeneration(UINT32_MAX);
+            return;
+        }
+
+        // Mark jitcode pointed to by this entry.
+        entry.baseEntry().markJitcode(trc);
+
+        // Mark ion entry if necessary.
+        if (entry.isIon())
+            entry.ionEntry().mark(trc);
+    }
+};
+
+void
+JitcodeGlobalTable::mark(JSTracer *trc)
+{
+    AutoSuppressProfilerSampling suppressSampling(trc->runtime());
+    JitcodeMapEntryTraceCallback traceCallback(trc);
+
+    // Find start entry.
+    JitcodeGlobalEntry *entry = startTower_[0];
+    while (entry != nullptr) {
+        traceCallback(*entry);
+        entry = entry->tower_->next(0);
+    }
+}
+
+void
+JitcodeGlobalEntry::BaseEntry::markJitcode(JSTracer *trc)
+{
+    MarkJitCodeRoot(trc, &jitcode_, "jitcodglobaltable-baseentry-jitcode");
+}
+
+void
+JitcodeGlobalEntry::IonEntry::mark(JSTracer *trc)
+{
+    if (!optsAllTypes_)
+        return;
+
+    for (IonTrackedTypeWithAddendum *iter = optsAllTypes_->begin();
+         iter != optsAllTypes_->end(); iter++)
+    {
+        TypeSet::MarkTypeRoot(trc, &(iter->type), "jitcodeglobaltable-ionentry-type");
+    }
 }
 
 /* static */ void
 JitcodeRegionEntry::WriteHead(CompactBufferWriter &writer,
                               uint32_t nativeOffset, uint8_t scriptDepth)
 {
     writer.writeUnsigned(nativeOffset);
     writer.writeByte(scriptDepth);
@@ -840,17 +1161,17 @@ JitcodeIonTable::makeIonEntry(JSContext 
     if (!mem)
         return false;
 
     // Keep allocated profiling strings on destruct.
     autoFreeProfilingStrings.keepStrings();
 
     SizedScriptList *scriptList = new (mem) SizedScriptList(numScripts, scripts,
                                                             &profilingStrings[0]);
-    out.init(code->raw(), code->rawEnd(), scriptList, this);
+    out.init(code, code->raw(), code->rawEnd(), scriptList, this);
     return true;
 }
 
 uint32_t
 JitcodeIonTable::findRegionEntry(uint32_t nativeOffset) const
 {
     static const uint32_t LINEAR_SEARCH_THRESHOLD = 8;
     uint32_t regions = numRegions();
--- a/js/src/jit/JitcodeMap.h
+++ b/js/src/jit/JitcodeMap.h
@@ -2,19 +2,19 @@
  * vim: set ts=8 sts=4 et sw=4 tw=99:
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef jit_JitcodeMap_h
 #define jit_JitcodeMap_h
 
-#include "ds/SplayTree.h"
 #include "jit/CompactBuffer.h"
 #include "jit/CompileInfo.h"
+#include "jit/ExecutableAllocator.h"
 #include "jit/OptimizationTracking.h"
 #include "jit/shared/CodeGenerator-shared.h"
 
 namespace js {
 namespace jit {
 
 /*
  * The Ion jitcode map implements tables to allow mapping from addresses in ion jitcode
@@ -26,21 +26,107 @@ namespace jit {
  * At the top level, a global splay-tree of JitcodeGlobalEntry describings the mapping for
  * each individual IonCode script generated by compiles.  The entries are ordered by their
  * nativeStartAddr.
  *
  * Every entry in the table is of fixed size, but there are different entry types,
  * distinguished by the kind field.
  */
 
+class JitcodeGlobalTable;
 class JitcodeIonTable;
 class JitcodeRegionEntry;
 
+class JitcodeGlobalEntry;
+
+class JitcodeSkiplistTower
+{
+  public:
+    static const unsigned MAX_HEIGHT = 32;
+
+  private:
+    uint8_t height_;
+    bool isFree_;
+    JitcodeGlobalEntry *ptrs_[1];
+
+  public:
+    explicit JitcodeSkiplistTower(unsigned height)
+      : height_(height),
+        isFree_(false)
+    {
+        MOZ_ASSERT(height >= 1 && height <= MAX_HEIGHT);
+        clearPtrs();
+    }
+
+    unsigned height() const {
+        return height_;
+    }
+
+    JitcodeGlobalEntry **ptrs(unsigned level) {
+        return ptrs_;
+    }
+
+    JitcodeGlobalEntry *next(unsigned level) const {
+        MOZ_ASSERT(!isFree_);
+        MOZ_ASSERT(level < height());
+        return ptrs_[level];
+    }
+    void setNext(unsigned level, JitcodeGlobalEntry *entry) {
+        MOZ_ASSERT(!isFree_);
+        MOZ_ASSERT(level < height());
+        ptrs_[level] = entry;
+    }
+
+    //
+    // When stored in a free-list, towers use 'ptrs_[0]' to store a
+    // pointer to the next tower.  In this context only, 'ptrs_[0]'
+    // may refer to a |JitcodeSkiplistTower *| instead of a
+    // |JitcodeGlobalEntry *|.
+    //
+
+    void addToFreeList(JitcodeSkiplistTower **freeList) {
+        JitcodeSkiplistTower *nextFreeTower = *freeList;
+        MOZ_ASSERT_IF(nextFreeTower, nextFreeTower->isFree_ &&
+                                     nextFreeTower->height() == height_);
+        ptrs_[0] = (JitcodeGlobalEntry *) nextFreeTower;
+        isFree_ = true;
+        *freeList = this;
+    }
+
+    static JitcodeSkiplistTower *PopFromFreeList(JitcodeSkiplistTower **freeList) {
+        if (!*freeList)
+            return nullptr;
+
+        JitcodeSkiplistTower *tower = *freeList;
+        MOZ_ASSERT(tower->isFree_);
+        JitcodeSkiplistTower *nextFreeTower = (JitcodeSkiplistTower *) tower->ptrs_[0];
+        tower->clearPtrs();
+        tower->isFree_ = false;
+        *freeList = nextFreeTower;
+        return tower;
+    }
+
+    static size_t CalculateSize(unsigned height) {
+        MOZ_ASSERT(height >= 1);
+        return sizeof(JitcodeSkiplistTower) +
+               (sizeof(JitcodeGlobalEntry *) * (height - 1));
+    }
+
+  private:
+    void clearPtrs() {
+        for (unsigned i = 0; i < height_; i++)
+            ptrs_[0] = nullptr;
+    }
+};
+
+
 class JitcodeGlobalEntry
 {
+    friend class JitcodeGlobalTable;
+
   public:
     enum Kind {
         INVALID = 0,
         Ion,
         Baseline,
         IonCache,
         Dummy,
         Query,
@@ -53,54 +139,81 @@ class JitcodeGlobalEntry
         jsbytecode *pc;
         BytecodeLocation(JSScript *script, jsbytecode *pc) : script(script), pc(pc) {}
     };
     typedef Vector<BytecodeLocation, 0, SystemAllocPolicy> BytecodeLocationVector;
     typedef Vector<const char *, 0, SystemAllocPolicy> ProfileStringVector;
 
     struct BaseEntry
     {
+        JitCode *jitcode_;
         void *nativeStartAddr_;
         void *nativeEndAddr_;
-        Kind kind_;
+        uint32_t gen_;
+        Kind kind_ : 7;
 
         void init() {
+            jitcode_ = nullptr;
             nativeStartAddr_ = nullptr;
             nativeEndAddr_ = nullptr;
+            gen_ = UINT32_MAX;
             kind_ = INVALID;
         }
 
-        void init(Kind kind, void *nativeStartAddr, void *nativeEndAddr) {
+        void init(Kind kind, JitCode *code,
+                  void *nativeStartAddr, void *nativeEndAddr)
+        {
+            MOZ_ASSERT_IF(kind != Query, code);
             MOZ_ASSERT(nativeStartAddr);
             MOZ_ASSERT(nativeEndAddr);
             MOZ_ASSERT(kind > INVALID && kind < LIMIT);
+            jitcode_ = code;
             nativeStartAddr_ = nativeStartAddr;
             nativeEndAddr_ = nativeEndAddr;
+            gen_ = UINT32_MAX;
             kind_ = kind;
         }
 
+        uint32_t generation() const {
+            return gen_;
+        }
+        void setGeneration(uint32_t gen) {
+            gen_ = gen;
+        }
+        bool isSampled(uint32_t currentGen, uint32_t lapCount) {
+            if (gen_ == UINT32_MAX || currentGen == UINT32_MAX)
+                return false;
+            MOZ_ASSERT(currentGen >= gen_);
+            return (currentGen - gen_) <= lapCount;
+        }
+
         Kind kind() const {
             return kind_;
         }
+        JitCode *jitcode() const {
+            return jitcode_;
+        }
         void *nativeStartAddr() const {
             return nativeStartAddr_;
         }
         void *nativeEndAddr() const {
             return nativeEndAddr_;
         }
 
         bool startsBelowPointer(void *ptr) const {
             return ((uint8_t *)nativeStartAddr()) <= ((uint8_t *) ptr);
         }
         bool endsAbovePointer(void *ptr) const {
             return ((uint8_t *)nativeEndAddr()) > ((uint8_t *) ptr);
         }
         bool containsPointer(void *ptr) const {
             return startsBelowPointer(ptr) && endsAbovePointer(ptr);
         }
+
+        void markJitcode(JSTracer *trc);
     };
 
     struct IonEntry : public BaseEntry
     {
         // regionTable_ points to the start of the region table within the
         // packed map for compile represented by this entry.  Since the
         // region table occurs at the tail of the memory region, this pointer
         // points somewhere inside the region memory space, and not to the start
@@ -141,22 +254,22 @@ class JitcodeGlobalEntry
 
             static uint32_t AllocSizeFor(uint32_t nscripts) {
                 return sizeof(SizedScriptList) + (nscripts * sizeof(ScriptNamePair));
             }
         };
 
         SizedScriptList *scriptList_;
 
-        void init(void *nativeStartAddr, void *nativeEndAddr,
+        void init(JitCode *code, void *nativeStartAddr, void *nativeEndAddr,
                   SizedScriptList *scriptList, JitcodeIonTable *regionTable)
         {
             MOZ_ASSERT(scriptList);
             MOZ_ASSERT(regionTable);
-            BaseEntry::init(Ion, nativeStartAddr, nativeEndAddr);
+            BaseEntry::init(Ion, code, nativeStartAddr, nativeEndAddr);
             regionTable_ = regionTable;
             scriptList_ = scriptList;
             optsRegionTable_ = nullptr;
             optsTypesTable_ = nullptr;
             optsAllTypes_ = nullptr;
             optsAttemptsTable_ = nullptr;
         }
 
@@ -238,33 +351,36 @@ class JitcodeGlobalEntry
         }
 
         const IonTrackedTypeVector *allTrackedTypes() {
             MOZ_ASSERT(hasTrackedOptimizations());
             return optsAllTypes_;
         }
 
         mozilla::Maybe<uint8_t> trackedOptimizationIndexAtAddr(void *ptr);
+
+        void mark(JSTracer *trc);
     };
 
     struct BaselineEntry : public BaseEntry
     {
         JSScript *script_;
         const char *str_;
 
         // Last location that caused Ion to abort compilation and the reason
         // therein, if any. Only actionable aborts are tracked. Internal
         // errors like OOMs are not.
         jsbytecode *ionAbortPc_;
         const char *ionAbortMessage_;
 
-        void init(void *nativeStartAddr, void *nativeEndAddr, JSScript *script, const char *str)
+        void init(JitCode *code, void *nativeStartAddr, void *nativeEndAddr,
+                  JSScript *script, const char *str)
         {
             MOZ_ASSERT(script != nullptr);
-            BaseEntry::init(Baseline, nativeStartAddr, nativeEndAddr);
+            BaseEntry::init(Baseline, code, nativeStartAddr, nativeEndAddr);
             script_ = script;
             str_ = str;
         }
 
         JSScript *script() const {
             return script_;
         }
 
@@ -295,20 +411,21 @@ class JitcodeGlobalEntry
         void youngestFrameLocationAtAddr(JSRuntime *rt, void *ptr,
                                          JSScript **script, jsbytecode **pc) const;
     };
 
     struct IonCacheEntry : public BaseEntry
     {
         void *rejoinAddr_;
 
-        void init(void *nativeStartAddr, void *nativeEndAddr, void *rejoinAddr)
+        void init(JitCode *code, void *nativeStartAddr, void *nativeEndAddr,
+                  void *rejoinAddr)
         {
             MOZ_ASSERT(rejoinAddr != nullptr);
-            BaseEntry::init(IonCache, nativeStartAddr, nativeEndAddr);
+            BaseEntry::init(IonCache, code, nativeStartAddr, nativeEndAddr);
             rejoinAddr_ = rejoinAddr;
         }
 
         void *rejoinAddr() const {
             return rejoinAddr_;
         }
 
         void destroy() {}
@@ -323,18 +440,18 @@ class JitcodeGlobalEntry
                                          JSScript **script, jsbytecode **pc) const;
     };
 
     // Dummy entries are created for jitcode generated when profiling is not turned on,
     // so that they have representation in the global table if they are on the
     // stack when profiling is enabled.
     struct DummyEntry : public BaseEntry
     {
-        void init(void *nativeStartAddr, void *nativeEndAddr) {
-            BaseEntry::init(Dummy, nativeStartAddr, nativeEndAddr);
+        void init(JitCode *code, void *nativeStartAddr, void *nativeEndAddr) {
+            BaseEntry::init(Dummy, code, nativeStartAddr, nativeEndAddr);
         }
 
         void destroy() {}
 
         bool callStackAtAddr(JSRuntime *rt, void *ptr, BytecodeLocationVector &results,
                              uint32_t *depth) const
         {
             return true;
@@ -355,25 +472,27 @@ class JitcodeGlobalEntry
     };
 
     // QueryEntry is never stored in the table, just used for queries
     // where an instance of JitcodeGlobalEntry is required to do tree
     // lookups.
     struct QueryEntry : public BaseEntry
     {
         void init(void *addr) {
-            BaseEntry::init(Query, addr, addr);
+            BaseEntry::init(Query, nullptr, addr, addr);
         }
         uint8_t *addr() const {
             return reinterpret_cast<uint8_t *>(nativeStartAddr());
         }
         void destroy() {}
     };
 
   private:
+    JitcodeSkiplistTower *tower_;
+
     union {
         // Shadowing BaseEntry instance to allow access to base fields
         // and type extraction.
         BaseEntry base_;
 
         // The most common entry type: describing jitcode generated by
         // Ion main-line code.
         IonEntry ion_;
@@ -388,37 +507,49 @@ class JitcodeGlobalEntry
         DummyEntry dummy_;
 
         // When doing queries on the SplayTree for particular addresses,
         // the query addresses are representd using a QueryEntry.
         QueryEntry query_;
     };
 
   public:
-    JitcodeGlobalEntry() {
+    JitcodeGlobalEntry()
+      : tower_(nullptr)
+    {
         base_.init();
     }
 
-    explicit JitcodeGlobalEntry(const IonEntry &ion) {
+    explicit JitcodeGlobalEntry(const IonEntry &ion)
+      : tower_(nullptr)
+    {
         ion_ = ion;
     }
 
-    explicit JitcodeGlobalEntry(const BaselineEntry &baseline) {
+    explicit JitcodeGlobalEntry(const BaselineEntry &baseline)
+      : tower_(nullptr)
+    {
         baseline_ = baseline;
     }
 
-    explicit JitcodeGlobalEntry(const IonCacheEntry &ionCache) {
+    explicit JitcodeGlobalEntry(const IonCacheEntry &ionCache)
+      : tower_(nullptr)
+    {
         ionCache_ = ionCache;
     }
 
-    explicit JitcodeGlobalEntry(const DummyEntry &dummy) {
+    explicit JitcodeGlobalEntry(const DummyEntry &dummy)
+      : tower_(nullptr)
+    {
         dummy_ = dummy;
     }
 
-    explicit JitcodeGlobalEntry(const QueryEntry &query) {
+    explicit JitcodeGlobalEntry(const QueryEntry &query)
+      : tower_(nullptr)
+    {
         query_ = query;
     }
 
     static JitcodeGlobalEntry MakeQuery(void *ptr) {
         QueryEntry query;
         query.init(ptr);
         return JitcodeGlobalEntry(query);
     }
@@ -440,23 +571,36 @@ class JitcodeGlobalEntry
           case Query:
             queryEntry().destroy();
             break;
           default:
             MOZ_CRASH("Invalid JitcodeGlobalEntry kind.");
         }
     }
 
+    JitCode *jitcode() const {
+        return baseEntry().jitcode();
+    }
     void *nativeStartAddr() const {
         return base_.nativeStartAddr();
     }
     void *nativeEndAddr() const {
         return base_.nativeEndAddr();
     }
 
+    uint32_t generation() const {
+        return baseEntry().generation();
+    }
+    void setGeneration(uint32_t gen) {
+        baseEntry().setGeneration(gen);
+    }
+    bool isSampled(uint32_t currentGen, uint32_t lapCount) {
+        return baseEntry().isSampled(currentGen, lapCount);
+    }
+
     bool startsBelowPointer(void *ptr) const {
         return base_.startsBelowPointer(ptr);
     }
     bool endsAbovePointer(void *ptr) const {
         return base_.endsAbovePointer(ptr);
     }
     bool containsPointer(void *ptr) const {
         return base_.containsPointer(ptr);
@@ -473,32 +617,39 @@ class JitcodeGlobalEntry
 
         return false;
     }
 
     Kind kind() const {
         return base_.kind();
     }
 
+    bool isValid() const {
+        return (kind() > INVALID) && (kind() < LIMIT);
+    }
     bool isIon() const {
         return kind() == Ion;
     }
     bool isBaseline() const {
         return kind() == Baseline;
     }
     bool isIonCache() const {
         return kind() == IonCache;
     }
     bool isDummy() const {
         return kind() == Dummy;
     }
     bool isQuery() const {
         return kind() == Query;
     }
 
+    BaseEntry &baseEntry() {
+        MOZ_ASSERT(isValid());
+        return base_;
+    }
     IonEntry &ionEntry() {
         MOZ_ASSERT(isIon());
         return ion_;
     }
     BaselineEntry &baselineEntry() {
         MOZ_ASSERT(isBaseline());
         return baseline_;
     }
@@ -510,16 +661,20 @@ class JitcodeGlobalEntry
         MOZ_ASSERT(isDummy());
         return dummy_;
     }
     QueryEntry &queryEntry() {
         MOZ_ASSERT(isQuery());
         return query_;
     }
 
+    const BaseEntry &baseEntry() const {
+        MOZ_ASSERT(isValid());
+        return base_;
+    }
     const IonEntry &ionEntry() const {
         MOZ_ASSERT(isIon());
         return ion_;
     }
     const BaselineEntry &baselineEntry() const {
         MOZ_ASSERT(isBaseline());
         return baseline_;
     }
@@ -595,16 +750,19 @@ class JitcodeGlobalEntry
     }
 
     // Figure out the number of the (JSScript *, jsbytecode *) pairs that are active
     // at this location.
     uint32_t lookupInlineCallDepth(void *ptr);
 
     // Compare two global entries.
     static int compare(const JitcodeGlobalEntry &ent1, const JitcodeGlobalEntry &ent2);
+    int compareTo(const JitcodeGlobalEntry &other) {
+        return compare(*this, other);
+    }
 
     // Compute a profiling string for a given script.
     static char *createScriptString(JSContext *cx, JSScript *script, size_t *length=nullptr);
 
     bool hasTrackedOptimizations() const {
         switch (kind()) {
           case Ion:
             return ionEntry().hasTrackedOptimizations();
@@ -638,66 +796,132 @@ class JitcodeGlobalEntry
 
     IonTrackedOptimizationsTypeInfo trackedOptimizationTypeInfo(uint8_t index) {
         return ionEntry().trackedOptimizationTypeInfo(index);
     }
 
     const IonTrackedTypeVector *allTrackedTypes() {
         return ionEntry().allTrackedTypes();
     }
+
+    //
+    // When stored in a free-list, entries use 'tower_' to store a
+    // pointer to the next entry.  In this context only, 'tower_'
+    // may refer to a |JitcodeGlobalEntry *| instead of a
+    // |JitcodeSkiplistTower *|.
+    //
+
+    void addToFreeList(JitcodeGlobalEntry **freeList) {
+        MOZ_ASSERT(!isValid());
+
+        JitcodeGlobalEntry *nextFreeEntry = *freeList;
+        MOZ_ASSERT_IF(nextFreeEntry, !nextFreeEntry->isValid());
+
+        tower_ = (JitcodeSkiplistTower *) nextFreeEntry;
+        *freeList = this;
+    }
+
+    static JitcodeGlobalEntry *PopFromFreeList(JitcodeGlobalEntry **freeList) {
+        if (!*freeList)
+            return nullptr;
+
+        JitcodeGlobalEntry *entry = *freeList;
+        MOZ_ASSERT(!entry->isValid());
+        JitcodeGlobalEntry *nextFreeEntry = (JitcodeGlobalEntry *) entry->tower_;
+        entry->tower_ = nullptr;
+        *freeList = nextFreeEntry;
+        return entry;
+    }
 };
 
 /*
  * Global table of JitcodeGlobalEntry values sorted by native address range.
  */
 class JitcodeGlobalTable
 {
-  public:
-    typedef SplayTree<JitcodeGlobalEntry, JitcodeGlobalEntry> EntryTree;
-
-    typedef Vector<JitcodeGlobalEntry, 0, SystemAllocPolicy> EntryVector;
-
   private:
     static const size_t LIFO_CHUNK_SIZE = 16 * 1024;
-    LifoAlloc treeAlloc_;
-    EntryTree tree_;
-    EntryVector entries_;
+
+    LifoAlloc alloc_;
+    JitcodeGlobalEntry *freeEntries_;
+    uint32_t rand_;
+    uint32_t skiplistSize_;
+
+    JitcodeGlobalEntry *startTower_[JitcodeSkiplistTower::MAX_HEIGHT];
+    JitcodeSkiplistTower *freeTowers_[JitcodeSkiplistTower::MAX_HEIGHT];
 
   public:
-    JitcodeGlobalTable() : treeAlloc_(LIFO_CHUNK_SIZE), tree_(&treeAlloc_), entries_() {
-        // Always checking coherency in DEBUG builds may cause tests to time
-        // out under --baseline-eager or --ion-eager.
-        tree_.disableCheckCoherency();
+    JitcodeGlobalTable()
+      : alloc_(LIFO_CHUNK_SIZE), freeEntries_(nullptr), rand_(0), skiplistSize_(0)
+    {
+        for (unsigned i = 0; i < JitcodeSkiplistTower::MAX_HEIGHT; i++)
+            startTower_[i] = nullptr;
+        for (unsigned i = 0; i < JitcodeSkiplistTower::MAX_HEIGHT; i++)
+            freeTowers_[i] = nullptr;
     }
     ~JitcodeGlobalTable() {}
 
     bool empty() const {
-        return tree_.empty();
+        return skiplistSize_ == 0;
     }
 
     bool lookup(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt);
-    void lookupInfallible(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt);
+    bool lookupForSampler(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt,
+                          uint32_t sampleBufferGen);
+
+    void lookupInfallible(void *ptr, JitcodeGlobalEntry *result, JSRuntime *rt) {
+        mozilla::DebugOnly<bool> success = lookup(ptr, result, rt);
+        MOZ_ASSERT(success);
+    }
 
     bool addEntry(const JitcodeGlobalEntry::IonEntry &entry, JSRuntime *rt) {
         return addEntry(JitcodeGlobalEntry(entry), rt);
     }
     bool addEntry(const JitcodeGlobalEntry::BaselineEntry &entry, JSRuntime *rt) {
         return addEntry(JitcodeGlobalEntry(entry), rt);
     }
     bool addEntry(const JitcodeGlobalEntry::IonCacheEntry &entry, JSRuntime *rt) {
         return addEntry(JitcodeGlobalEntry(entry), rt);
     }
     bool addEntry(const JitcodeGlobalEntry::DummyEntry &entry, JSRuntime *rt) {
         return addEntry(JitcodeGlobalEntry(entry), rt);
     }
 
     void removeEntry(void *startAddr, JSRuntime *rt);
+    void releaseEntry(void *startAddr, JSRuntime *rt);
+
+    void mark(JSTracer *trc);
 
   private:
     bool addEntry(const JitcodeGlobalEntry &entry, JSRuntime *rt);
+
+    JitcodeGlobalEntry *lookupInternal(void *ptr);
+
+    // Initialize towerOut such that towerOut[i] (for i in [0, MAX_HEIGHT-1])
+    // is a JitcodeGlobalEntry that is sorted to be <query, whose successor at
+    // level i is either null, or sorted to be >= query.
+    //
+    // If entry with the given properties does not exist for level i, then
+    // towerOut[i] is initialized to nullptr.
+    void searchInternal(const JitcodeGlobalEntry &query, JitcodeGlobalEntry **towerOut);
+
+    JitcodeGlobalEntry *searchAtHeight(unsigned level, JitcodeGlobalEntry *start,
+                                       const JitcodeGlobalEntry &query);
+
+    // Calculate next random tower height.
+    unsigned generateTowerHeight();
+
+    JitcodeSkiplistTower *allocateTower(unsigned height);
+    JitcodeGlobalEntry *allocateEntry();
+
+#ifdef DEBUG
+    void verifySkiplist();
+#else
+    void verifySkiplist() {}
+#endif
 };
 
 
 /*
  * Container class for main jitcode table.
  * The Region table's memory is structured as follows:
  *
  *      +------------------------------------------------+   |
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -214,16 +214,17 @@
 
 #include "gc/FindSCCs.h"
 #include "gc/GCInternals.h"
 #include "gc/GCTrace.h"
 #include "gc/Marking.h"
 #include "gc/Memory.h"
 #include "jit/BaselineJIT.h"
 #include "jit/IonCode.h"
+#include "jit/JitcodeMap.h"
 #include "js/SliceBudget.h"
 #include "proxy/DeadObjectProxy.h"
 #include "vm/Debugger.h"
 #include "vm/ProxyObject.h"
 #include "vm/Shape.h"
 #include "vm/String.h"
 #include "vm/Symbol.h"
 #include "vm/TraceLogging.h"
@@ -5173,16 +5174,17 @@ GCRuntime::beginSweepPhase(bool lastGC)
         for (JSCompartment::WrapperEnum e(c); !e.empty(); e.popFront()) {
             if (e.front().key().kind != CrossCompartmentKey::StringWrapper)
                 AssertNotOnGrayList(&e.front().value().get().toObject());
         }
     }
 #endif
 
     DropStringWrappers(rt);
+
     findZoneGroups();
     endMarkingZoneGroup();
     beginSweepingZoneGroup();
 }
 
 bool
 ArenaLists::foregroundFinalize(FreeOp *fop, AllocKind thingKind, SliceBudget &sliceBudget,
                                SortedArenaList &sweepList)
--- a/js/src/vm/Runtime.cpp
+++ b/js/src/vm/Runtime.cpp
@@ -115,16 +115,18 @@ ReturnZeroSize(const void *p)
 JSRuntime::JSRuntime(JSRuntime *parentRuntime)
   : mainThread(this),
     jitTop(nullptr),
     jitJSContext(nullptr),
     jitActivation(nullptr),
     jitStackLimit_(0xbad),
     activation_(nullptr),
     profilingActivation_(nullptr),
+    profilerSampleBufferGen_(0),
+    profilerSampleBufferLapCount_(1),
     asmJSActivationStack_(nullptr),
     parentRuntime(parentRuntime),
     interrupt_(false),
     telemetryCallback(nullptr),
     handlingSignal(false),
     interruptCallback(nullptr),
     exclusiveAccessLock(nullptr),
     exclusiveAccessOwner(nullptr),
@@ -367,16 +369,19 @@ JSRuntime::~JSRuntime()
          * Flag us as being destroyed. This allows the GC to free things like
          * interned atoms and Ion trampolines.
          */
         beingDestroyed_ = true;
 
         /* Allow the GC to release scripts that were being profiled. */
         profilingScripts = false;
 
+        /* Set the profiler sampler buffer generation to invalid. */
+        profilerSampleBufferGen_ = UINT32_MAX;
+
         JS::PrepareForFullGC(this);
         gc.gc(GC_NORMAL, JS::gcreason::DESTROY_RUNTIME);
     }
 
     /*
      * Clear the self-hosted global and delete self-hosted classes *after*
      * GC, as finalizers for objects check for clasp->finalize during GC.
      */
@@ -829,8 +834,16 @@ void
 js::AssertCurrentThreadCanLock(RuntimeLock which)
 {
     PerThreadData *pt = TlsPerThreadData.get();
     if (pt && pt->runtime_)
         pt->runtime_->assertCanLock(which);
 }
 
 #endif // DEBUG
+
+JS_FRIEND_API(void)
+JS::UpdateJSRuntimeProfilerSampleBufferGen(JSRuntime *runtime, uint32_t generation,
+                                           uint32_t lapCount)
+{
+    runtime->setProfilerSampleBufferGen(generation);
+    runtime->updateProfilerSampleBufferLapCount(lapCount);
+}
--- a/js/src/vm/Runtime.h
+++ b/js/src/vm/Runtime.h
@@ -636,16 +636,31 @@ struct JSRuntime : public JS::shadow::Ru
     js::Activation *activation_;
 
     /*
      * Points to the most recent profiling activation running on the
      * thread.
      */
     js::Activation * volatile profilingActivation_;
 
+    /*
+     * The profiler sampler generation after the latest sample.
+     *
+     * The lapCount indicates the number of largest number of 'laps'
+     * (wrapping from high to low) that occurred when writing entries
+     * into the sample buffer.  All JitcodeGlobalMap entries referenced
+     * from a given sample are assigned the generation of the sample buffer
+     * at the START of the run.  If multiple laps occur, then some entries
+     * (towards the end) will be written out with the "wrong" generation.
+     * The lapCount indicates the required fudge factor to use to compare
+     * entry generations with the sample buffer generation.
+     */
+    mozilla::Atomic<uint32_t, mozilla::ReleaseAcquire> profilerSampleBufferGen_;
+    mozilla::Atomic<uint32_t, mozilla::ReleaseAcquire> profilerSampleBufferLapCount_;
+
     /* See AsmJSActivation comment. */
     js::AsmJSActivation * volatile asmJSActivationStack_;
 
   public:
     js::Activation *const *addressOfActivation() const {
         return &activation_;
     }
     static unsigned offsetOfActivation() {
@@ -657,16 +672,41 @@ struct JSRuntime : public JS::shadow::Ru
     }
     void *addressOfProfilingActivation() {
         return (void*) &profilingActivation_;
     }
     static unsigned offsetOfProfilingActivation() {
         return offsetof(JSRuntime, profilingActivation_);
     }
 
+    uint32_t profilerSampleBufferGen() {
+        return profilerSampleBufferGen_;
+    }
+    void setProfilerSampleBufferGen(uint32_t gen) {
+        profilerSampleBufferGen_ = gen;
+    }
+
+    uint32_t profilerSampleBufferLapCount() {
+        MOZ_ASSERT(profilerSampleBufferLapCount_ > 0);
+        return profilerSampleBufferLapCount_;
+    }
+    void updateProfilerSampleBufferLapCount(uint32_t lapCount) {
+        MOZ_ASSERT(profilerSampleBufferLapCount_ > 0);
+
+        // Use compareExchange to make sure we have monotonic increase.
+        for (;;) {
+            uint32_t curLapCount = profilerSampleBufferLapCount_;
+            if (curLapCount >= lapCount)
+                break;
+
+            if (profilerSampleBufferLapCount_.compareExchange(curLapCount, lapCount))
+                break;
+        }
+    }
+
     js::AsmJSActivation *asmJSActivationStack() const {
         return asmJSActivationStack_;
     }
     static js::AsmJSActivation *innermostAsmJSActivation() {
         js::PerThreadData *ptd = js::TlsPerThreadData.get();
         return ptd ? ptd->runtimeFromMainThread()->asmJSActivationStack_ : nullptr;
     }
 
--- a/js/src/vm/Stack.cpp
+++ b/js/src/vm/Stack.cpp
@@ -1709,18 +1709,20 @@ void
 ActivationIterator::settle()
 {
     // Stop at the next active activation. No need to update jitTop_, since
     // we don't iterate over an active jit activation.
     while (!done() && activation_->isJit() && !activation_->asJit()->isActive())
         activation_ = activation_->prev();
 }
 
-JS::ProfilingFrameIterator::ProfilingFrameIterator(JSRuntime *rt, const RegisterState &state)
+JS::ProfilingFrameIterator::ProfilingFrameIterator(JSRuntime *rt, const RegisterState &state,
+                                                   uint32_t sampleBufferGen)
   : rt_(rt),
+    sampleBufferGen_(sampleBufferGen),
     activation_(rt->profilingActivation()),
     savedPrevJitTop_(nullptr)
 {
     if (!activation_)
         return;
 
     // If profiler sampling is not enabled, skip.
     if (!rt_->isProfilerSamplingEnabled()) {
@@ -1872,16 +1874,20 @@ JS::ProfilingFrameIterator::extractStack
 
     MOZ_ASSERT(isJit());
     void *returnAddr = jitIter().returnAddressToFp();
 
     // Look up an entry for the return address.
     jit::JitcodeGlobalTable *table = rt_->jitRuntime()->getJitcodeGlobalTable();
     jit::JitcodeGlobalEntry entry;
     table->lookupInfallible(returnAddr, &entry, rt_);
+    if (hasSampleBufferGen())
+        table->lookupForSampler(returnAddr, &entry, rt_, sampleBufferGen_);
+    else
+        table->lookup(returnAddr, &entry, rt_);
 
     MOZ_ASSERT(entry.isIon() || entry.isIonCache() || entry.isBaseline() || entry.isDummy());
 
     // Dummy frames produce no stack frames.
     if (entry.isDummy())
         return 0;
 
     FrameKind kind = entry.isBaseline() ? Frame_Baseline : Frame_Ion;
--- a/js/src/vm/TypeInference.h
+++ b/js/src/vm/TypeInference.h
@@ -319,25 +319,31 @@ class TypeSet
 
         inline ObjectKey *objectKey() const;
 
         /* Accessors for JSObject types */
 
         bool isSingleton() const {
             return isObject() && !!(data & 1);
         }
+        bool isSingletonUnchecked() const {
+            return isObjectUnchecked() && !!(data & 1);
+        }
 
         inline JSObject *singleton() const;
         inline JSObject *singletonNoBarrier() const;
 
         /* Accessors for ObjectGroup types */
 
         bool isGroup() const {
             return isObject() && !(data & 1);
         }
+        bool isGroupUnchecked() const {
+            return isObjectUnchecked() && !(data & 1);
+        }
 
         inline ObjectGroup *group() const;
         inline ObjectGroup *groupNoBarrier() const;
 
         bool operator == (Type o) const { return data == o.data; }
         bool operator != (Type o) const { return data != o.data; }
 
         static ThingRootKind rootKind() { return THING_ROOT_TYPE; }
--- a/tools/profiler/ProfileEntry.h
+++ b/tools/profiler/ProfileEntry.h
@@ -147,16 +147,22 @@ public:
 
   ThreadInfo* GetThreadInfo() const { return mThreadInfo; }
   ThreadResponsiveness* GetThreadResponsiveness() { return &mRespInfo; }
   void SetPendingDelete()
   {
     mPseudoStack = nullptr;
     mPlatformData = nullptr;
   }
+
+  uint32_t bufferGeneration() const {
+    MOZ_ASSERT(mBuffer->mGeneration >= 0);
+    return mBuffer->mGeneration;
+  }
+
 private:
   FRIEND_TEST(ThreadProfile, InsertOneTag);
   FRIEND_TEST(ThreadProfile, InsertOneTagWithTinyBuffer);
   FRIEND_TEST(ThreadProfile, InsertTagsNoWrap);
   FRIEND_TEST(ThreadProfile, InsertTagsWrap);
   FRIEND_TEST(ThreadProfile, MemoryMeasure);
   ThreadInfo* mThreadInfo;
 
--- a/tools/profiler/TableTicker.cpp
+++ b/tools/profiler/TableTicker.cpp
@@ -481,31 +481,34 @@ void mergeStacksIntoProfile(ThreadProfil
   PseudoStack* pseudoStack = aProfile.GetPseudoStack();
   volatile StackEntry *pseudoFrames = pseudoStack->mStack;
   uint32_t pseudoCount = pseudoStack->stackSize();
 
   // Make a copy of the JS stack into a JSFrame array. This is necessary since,
   // like the native stack, the JS stack is iterated youngest-to-oldest and we
   // need to iterate oldest-to-youngest when adding entries to aProfile.
 
+  uint32_t startBufferGen = aProfile.bufferGeneration();
   uint32_t jsCount = 0;
   JS::ProfilingFrameIterator::Frame jsFrames[1000];
   {
     AutoWalkJSStack autoWalkJSStack;
     const uint32_t maxFrames = mozilla::ArrayLength(jsFrames);
 
     if (aSample && pseudoStack->mRuntime && autoWalkJSStack.walkAllowed) {
       JS::ProfilingFrameIterator::RegisterState registerState;
       registerState.pc = aSample->pc;
       registerState.sp = aSample->sp;
 #ifdef ENABLE_ARM_LR_SAVING
       registerState.lr = aSample->lr;
 #endif
 
-      JS::ProfilingFrameIterator jsIter(pseudoStack->mRuntime, registerState);
+      JS::ProfilingFrameIterator jsIter(pseudoStack->mRuntime,
+                                        registerState,
+                                        startBufferGen);
       for (; jsCount < maxFrames && !jsIter.done(); ++jsIter) {
         uint32_t extracted = jsIter.extractStack(jsFrames, jsCount, maxFrames);
         MOZ_ASSERT(extracted <= (maxFrames - jsCount));
         jsCount += extracted;
         if (jsCount == maxFrames)
           break;
       }
     }
@@ -596,16 +599,26 @@ void mergeStacksIntoProfile(ThreadProfil
 
     // If we reach here, there must be a native stack entry and it must be the
     // greatest entry.
     MOZ_ASSERT(nativeStackAddr);
     MOZ_ASSERT(nativeIndex >= 0);
     aProfile.addTag(ProfileEntry('l', (void*)aNativeStack.pc_array[nativeIndex]));
     nativeIndex--;
   }
+
+  MOZ_ASSERT(aProfile.bufferGeneration() >= startBufferGen);
+  uint32_t lapCount = aProfile.bufferGeneration() - startBufferGen;
+
+  // Update the JS runtime with the current profile sample buffer generation.
+  if (pseudoStack->mRuntime) {
+    JS::UpdateJSRuntimeProfilerSampleBufferGen(pseudoStack->mRuntime,
+                                               aProfile.bufferGeneration(),
+                                               lapCount);
+  }
 }
 
 #ifdef USE_NS_STACKWALK
 static
 void StackWalkCallback(uint32_t aFrameNumber, void* aPC, void* aSP,
                        void* aClosure)
 {
   NativeStack* nativeStack = static_cast<NativeStack*>(aClosure);