Bug 1551796 part 11 - Move more JitScript code into JitScript.cpp. r=tcampbell
authorJan de Mooij <jdemooij@mozilla.com>
Fri, 24 May 2019 12:03:13 +0000
changeset 475387 ec5f175f4f2c30c9446920856cba63ed7d2df99c
parent 475386 87aaa29b2fff6efbf469828597fc9ddffac4f3ef
child 475388 c347f878342197b00ce58f30d86d1b436abfd6ac
push id113208
push usercbrindusan@mozilla.com
push dateFri, 24 May 2019 21:57:39 +0000
treeherdermozilla-inbound@5d3e1ea77693 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerstcampbell
bugs1551796
milestone69.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1551796 part 11 - Move more JitScript code into JitScript.cpp. r=tcampbell JitScript::initICEntriesAndBytecodeTypeMap is still in BaselineIC.cpp because it depends on things defined there (like FallbackStubAllocator) and I think it's not unreasonable to keep it there. Differential Revision: https://phabricator.services.mozilla.com/D32303
js/src/jit/BaselineIC.cpp
js/src/jit/BaselineJIT.cpp
js/src/jit/BaselineJIT.h
js/src/jit/JitScript.cpp
js/src/jit/JitScript.h
--- a/js/src/jit/BaselineIC.cpp
+++ b/js/src/jit/BaselineIC.cpp
@@ -1288,25 +1288,16 @@ void ICStubCompilerBase::pushStubPayload
 }
 
 void ICStubCompilerBase::PushStubPayload(MacroAssembler& masm,
                                          Register scratch) {
   pushStubPayload(masm, scratch);
   masm.adjustFrame(sizeof(intptr_t));
 }
 
-void JitScript::noteAccessedGetter(uint32_t pcOffset) {
-  ICEntry& entry = icEntryFromPCOffset(pcOffset);
-  ICFallbackStub* stub = entry.fallbackStub();
-
-  if (stub->isGetProp_Fallback()) {
-    stub->toGetProp_Fallback()->noteAccessedGetter();
-  }
-}
-
 // TypeMonitor_Fallback
 //
 
 bool ICTypeMonitor_Fallback::addMonitorStubForValue(JSContext* cx,
                                                     BaselineFrame* frame,
                                                     StackTypeSet* types,
                                                     HandleValue val) {
   MOZ_ASSERT(types);
@@ -2484,25 +2475,16 @@ bool FallbackICCodeCompiler::emit_SetEle
   masm.push(ICStubReg);
   pushStubPayload(masm, R0.scratchReg());
 
   using Fn = bool (*)(JSContext*, BaselineFrame*, ICSetElem_Fallback*, Value*,
                       HandleValue, HandleValue, HandleValue);
   return tailCallVM<Fn, DoSetElemFallback>(masm);
 }
 
-void JitScript::noteHasDenseAdd(uint32_t pcOffset) {
-  ICEntry& entry = icEntryFromPCOffset(pcOffset);
-  ICFallbackStub* stub = entry.fallbackStub();
-
-  if (stub->isSetElem_Fallback()) {
-    stub->toSetElem_Fallback()->noteHasDenseAdd();
-  }
-}
-
 template <typename T>
 void StoreToTypedArray(JSContext* cx, MacroAssembler& masm, Scalar::Type type,
                        const ValueOperand& value, const T& dest,
                        Register scratch, Label* failure) {
   Label done;
 
   if (type == Scalar::Float32 || type == Scalar::Float64) {
     masm.ensureDouble(value, FloatReg0, failure);
--- a/js/src/jit/BaselineJIT.cpp
+++ b/js/src/jit/BaselineJIT.cpp
@@ -512,24 +512,16 @@ BaselineScript* BaselineScript::New(
   return script;
 }
 
 void BaselineScript::trace(JSTracer* trc) {
   TraceEdge(trc, &method_, "baseline-method");
   TraceNullableEdge(trc, &templateEnv_, "baseline-template-environment");
 }
 
-void JitScript::trace(JSTracer* trc) {
-  // Mark all IC stub codes hanging off the IC stub entries.
-  for (size_t i = 0; i < numICEntries(); i++) {
-    ICEntry& ent = icEntry(i);
-    ent.trace(trc);
-  }
-}
-
 /* static */
 void BaselineScript::writeBarrierPre(Zone* zone, BaselineScript* script) {
   if (zone->needsIncrementalBarrier()) {
     script->trace(zone->barrierTracer());
   }
 }
 
 void BaselineScript::Trace(JSTracer* trc, BaselineScript* script) {
@@ -614,25 +606,16 @@ CompactBufferReader BaselineScript::pcMa
   uint8_t* dataEnd =
       (indexEntry == numPCMappingIndexEntries() - 1)
           ? pcMappingData() + pcMappingSize_
           : pcMappingData() + pcMappingIndexEntry(indexEntry + 1).bufferOffset;
 
   return CompactBufferReader(dataStart, dataEnd);
 }
 
-struct ICEntries {
-  JitScript* const jitScript_;
-
-  explicit ICEntries(JitScript* jitScript) : jitScript_(jitScript) {}
-
-  size_t numEntries() const { return jitScript_->numICEntries(); }
-  ICEntry& operator[](size_t index) const { return jitScript_->icEntry(index); }
-};
-
 struct RetAddrEntries {
   BaselineScript* const baseline_;
 
   explicit RetAddrEntries(BaselineScript* baseline) : baseline_(baseline) {}
 
   size_t numEntries() const { return baseline_->numRetAddrEntries(); }
   RetAddrEntry& operator[](size_t index) const {
     return baseline_->retAddrEntry(index);
@@ -662,40 +645,16 @@ RetAddrEntry& BaselineScript::retAddrEnt
 
   MOZ_ASSERT(found);
   MOZ_ASSERT(loc < numRetAddrEntries());
   MOZ_ASSERT(retAddrEntry(loc).returnOffset().offset() ==
              returnOffset.offset());
   return retAddrEntry(loc);
 }
 
-static bool ComputeBinarySearchMid(ICEntries entries, uint32_t pcOffset,
-                                   size_t* loc) {
-  return BinarySearchIf(
-      entries, 0, entries.numEntries(),
-      [pcOffset](const ICEntry& entry) {
-        uint32_t entryOffset = entry.pcOffset();
-        if (pcOffset < entryOffset) {
-          return -1;
-        }
-        if (entryOffset < pcOffset) {
-          return 1;
-        }
-        if (entry.isForPrologue()) {
-          // Prologue ICEntries are used for function argument type checks.
-          // Ignore these entries and return 1 because these entries appear in
-          // the ICEntry list before the other ICEntry (if any) at offset 0.
-          MOZ_ASSERT(entryOffset == 0);
-          return 1;
-        }
-        return 0;
-      },
-      loc);
-}
-
 static bool ComputeBinarySearchMid(RetAddrEntries entries, uint32_t pcOffset,
                                    size_t* loc) {
   return BinarySearchIf(
       entries, 0, entries.numEntries(),
       [pcOffset](const RetAddrEntry& entry) {
         uint32_t entryOffset = entry.pcOffset();
         if (pcOffset < entryOffset) {
           return -1;
@@ -707,92 +666,16 @@ static bool ComputeBinarySearchMid(RetAd
       },
       loc);
 }
 
 uint8_t* BaselineScript::returnAddressForEntry(const RetAddrEntry& ent) {
   return method()->raw() + ent.returnOffset().offset();
 }
 
-ICEntry* JitScript::maybeICEntryFromPCOffset(uint32_t pcOffset) {
-  // This method ignores prologue IC entries. There can be at most one
-  // non-prologue IC per bytecode op.
-
-  size_t mid;
-  if (!ComputeBinarySearchMid(ICEntries(this), pcOffset, &mid)) {
-    return nullptr;
-  }
-
-  MOZ_ASSERT(mid < numICEntries());
-
-  ICEntry& entry = icEntry(mid);
-  MOZ_ASSERT(!entry.isForPrologue());
-  MOZ_ASSERT(entry.pcOffset() == pcOffset);
-  return &entry;
-}
-
-ICEntry& JitScript::icEntryFromPCOffset(uint32_t pcOffset) {
-  ICEntry* entry = maybeICEntryFromPCOffset(pcOffset);
-  MOZ_RELEASE_ASSERT(entry);
-  return *entry;
-}
-
-ICEntry* JitScript::maybeICEntryFromPCOffset(uint32_t pcOffset,
-                                             ICEntry* prevLookedUpEntry) {
-  // Do a linear forward search from the last queried PC offset, or fallback to
-  // a binary search if the last offset is too far away.
-  if (prevLookedUpEntry && pcOffset >= prevLookedUpEntry->pcOffset() &&
-      (pcOffset - prevLookedUpEntry->pcOffset()) <= 10) {
-    ICEntry* firstEntry = &icEntry(0);
-    ICEntry* lastEntry = &icEntry(numICEntries() - 1);
-    ICEntry* curEntry = prevLookedUpEntry;
-    while (curEntry >= firstEntry && curEntry <= lastEntry) {
-      if (curEntry->pcOffset() == pcOffset && !curEntry->isForPrologue()) {
-        return curEntry;
-      }
-      curEntry++;
-    }
-    return nullptr;
-  }
-
-  return maybeICEntryFromPCOffset(pcOffset);
-}
-
-ICEntry& JitScript::icEntryFromPCOffset(uint32_t pcOffset,
-                                        ICEntry* prevLookedUpEntry) {
-  ICEntry* entry = maybeICEntryFromPCOffset(pcOffset, prevLookedUpEntry);
-  MOZ_RELEASE_ASSERT(entry);
-  return *entry;
-}
-
-ICEntry* JitScript::interpreterICEntryFromPCOffset(uint32_t pcOffset) {
-  // We have to return the entry to store in BaselineFrame::interpreterICEntry
-  // when resuming in the Baseline Interpreter at pcOffset. The bytecode op at
-  // pcOffset does not necessarily have an ICEntry, so we want to return the
-  // first ICEntry for which the following is true:
-  //
-  //    !entry.isForPrologue() && entry.pcOffset() >= pcOffset
-  //
-  // Fortunately, ComputeBinarySearchMid returns exactly this entry.
-
-  size_t mid;
-  ComputeBinarySearchMid(ICEntries(this), pcOffset, &mid);
-
-  if (mid < numICEntries()) {
-    ICEntry& entry = icEntry(mid);
-    MOZ_ASSERT(!entry.isForPrologue());
-    MOZ_ASSERT(entry.pcOffset() >= pcOffset);
-    return &entry;
-  }
-
-  // Resuming at a pc after the last ICEntry. Just return nullptr:
-  // BaselineFrame::interpreterICEntry will never be used in this case.
-  return nullptr;
-}
-
 RetAddrEntry& BaselineScript::retAddrEntryFromPCOffset(
     uint32_t pcOffset, RetAddrEntry::Kind kind) {
   size_t mid;
   MOZ_ALWAYS_TRUE(ComputeBinarySearchMid(RetAddrEntries(this), pcOffset, &mid));
   MOZ_ASSERT(mid < numRetAddrEntries());
 
   for (size_t i = mid; retAddrEntry(i).pcOffset() == pcOffset; i--) {
     if (retAddrEntry(i).kind() == kind) {
@@ -1192,160 +1075,16 @@ void BaselineInterpreter::toggleCodeCove
   if (coverage::IsLCovEnabled()) {
     // Instrumentation is enabled no matter what.
     return;
   }
 
   toggleCodeCoverageInstrumentationUnchecked(enable);
 }
 
-void JitScript::purgeOptimizedStubs(JSScript* script) {
-  MOZ_ASSERT(script->jitScript() == this);
-
-  Zone* zone = script->zone();
-  if (zone->isGCSweeping() && IsAboutToBeFinalizedDuringSweep(*script)) {
-    // We're sweeping and the script is dead. Don't purge optimized stubs
-    // because (1) accessing CacheIRStubInfo pointers in ICStubs is invalid
-    // because we may have swept them already when we started (incremental)
-    // sweeping and (2) it's unnecessary because this script will be finalized
-    // soon anyway.
-    return;
-  }
-
-  JitSpew(JitSpew_BaselineIC, "Purging optimized stubs");
-
-  for (size_t i = 0; i < numICEntries(); i++) {
-    ICEntry& entry = icEntry(i);
-    ICStub* lastStub = entry.firstStub();
-    while (lastStub->next()) {
-      lastStub = lastStub->next();
-    }
-
-    if (lastStub->isFallback()) {
-      // Unlink all stubs allocated in the optimized space.
-      ICStub* stub = entry.firstStub();
-      ICStub* prev = nullptr;
-
-      while (stub->next()) {
-        if (!stub->allocatedInFallbackSpace()) {
-          lastStub->toFallbackStub()->unlinkStub(zone, prev, stub);
-          stub = stub->next();
-          continue;
-        }
-
-        prev = stub;
-        stub = stub->next();
-      }
-
-      if (lastStub->isMonitoredFallback()) {
-        // Monitor stubs can't make calls, so are always in the
-        // optimized stub space.
-        ICTypeMonitor_Fallback* lastMonStub =
-            lastStub->toMonitoredFallbackStub()->maybeFallbackMonitorStub();
-        if (lastMonStub) {
-          lastMonStub->resetMonitorStubChain(zone);
-        }
-      }
-    } else if (lastStub->isTypeMonitor_Fallback()) {
-      lastStub->toTypeMonitor_Fallback()->resetMonitorStubChain(zone);
-    } else {
-      MOZ_CRASH("Unknown fallback stub");
-    }
-  }
-
-#ifdef DEBUG
-  // All remaining stubs must be allocated in the fallback space.
-  for (size_t i = 0; i < numICEntries(); i++) {
-    ICEntry& entry = icEntry(i);
-    ICStub* stub = entry.firstStub();
-    while (stub->next()) {
-      MOZ_ASSERT(stub->allocatedInFallbackSpace());
-      stub = stub->next();
-    }
-  }
-#endif
-}
-
-#ifdef JS_STRUCTURED_SPEW
-static bool GetStubEnteredCount(ICStub* stub, uint32_t* count) {
-  switch (stub->kind()) {
-    case ICStub::CacheIR_Regular:
-      *count = stub->toCacheIR_Regular()->enteredCount();
-      return true;
-    case ICStub::CacheIR_Updated:
-      *count = stub->toCacheIR_Updated()->enteredCount();
-      return true;
-    case ICStub::CacheIR_Monitored:
-      *count = stub->toCacheIR_Monitored()->enteredCount();
-      return true;
-    default:
-      return false;
-  }
-}
-
-bool HasEnteredCounters(ICEntry& entry) {
-  ICStub* stub = entry.firstStub();
-  while (stub && !stub->isFallback()) {
-    uint32_t count;
-    if (GetStubEnteredCount(stub, &count)) {
-      return true;
-    }
-    stub = stub->next();
-  }
-  return false;
-}
-
-void jit::JitSpewBaselineICStats(JSScript* script, const char* dumpReason) {
-  MOZ_ASSERT(script->hasJitScript());
-  JSContext* cx = TlsContext.get();
-  AutoStructuredSpewer spew(cx, SpewChannel::BaselineICStats, script);
-  if (!spew) {
-    return;
-  }
-
-  JitScript* jitScript = script->jitScript();
-  spew->property("reason", dumpReason);
-  spew->beginListProperty("entries");
-  for (size_t i = 0; i < jitScript->numICEntries(); i++) {
-    ICEntry& entry = jitScript->icEntry(i);
-    if (!HasEnteredCounters(entry)) {
-      continue;
-    }
-
-    uint32_t pcOffset = entry.pcOffset();
-    jsbytecode* pc = entry.pc(script);
-
-    unsigned column;
-    unsigned int line = PCToLineNumber(script, pc, &column);
-
-    spew->beginObject();
-    spew->property("op", CodeName[*pc]);
-    spew->property("pc", pcOffset);
-    spew->property("line", line);
-    spew->property("column", column);
-
-    spew->beginListProperty("counts");
-    ICStub* stub = entry.firstStub();
-    while (stub && !stub->isFallback()) {
-      uint32_t count;
-      if (GetStubEnteredCount(stub, &count)) {
-        spew->value(count);
-      } else {
-        spew->value("?");
-      }
-      stub = stub->next();
-    }
-    spew->endList();
-    spew->property("fallback_count", entry.fallbackStub()->enteredCount());
-    spew->endObject();
-  }
-  spew->endList();
-}
-#endif
-
 void jit::FinishDiscardBaselineScript(FreeOp* fop, JSScript* script) {
   MOZ_ASSERT(script->hasBaselineScript());
   MOZ_ASSERT(!script->jitScript()->active());
 
   BaselineScript* baseline = script->baselineScript();
   script->setBaselineScript(fop->runtime(), nullptr);
   BaselineScript::Destroy(fop, baseline);
 }
@@ -1405,61 +1144,16 @@ void jit::ToggleBaselineTraceLoggerEngin
         continue;
       }
       script->baselineScript()->toggleTraceLoggerEngine(enable);
     }
   }
 }
 #endif
 
-static void MarkActiveJitScripts(JSContext* cx,
-                                 const JitActivationIterator& activation) {
-  for (OnlyJSJitFrameIter iter(activation); !iter.done(); ++iter) {
-    const JSJitFrameIter& frame = iter.frame();
-    switch (frame.type()) {
-      case FrameType::BaselineJS:
-        frame.script()->jitScript()->setActive();
-        break;
-      case FrameType::Exit:
-        if (frame.exitFrame()->is<LazyLinkExitFrameLayout>()) {
-          LazyLinkExitFrameLayout* ll =
-              frame.exitFrame()->as<LazyLinkExitFrameLayout>();
-          JSScript* script =
-              ScriptFromCalleeToken(ll->jsFrame()->calleeToken());
-          script->jitScript()->setActive();
-        }
-        break;
-      case FrameType::Bailout:
-      case FrameType::IonJS: {
-        // Keep the JitScript and BaselineScript around, since bailouts from
-        // the ion jitcode need to re-enter into the Baseline code.
-        frame.script()->jitScript()->setActive();
-        for (InlineFrameIterator inlineIter(cx, &frame); inlineIter.more();
-             ++inlineIter) {
-          inlineIter.script()->jitScript()->setActive();
-        }
-        break;
-      }
-      default:;
-    }
-  }
-}
-
-void jit::MarkActiveJitScripts(Zone* zone) {
-  if (zone->isAtomsZone()) {
-    return;
-  }
-  JSContext* cx = TlsContext.get();
-  for (JitActivationIterator iter(cx); !iter.done(); ++iter) {
-    if (iter->compartment()->zone() == zone) {
-      MarkActiveJitScripts(cx, iter);
-    }
-  }
-}
-
 void BaselineInterpreter::init(JitCode* code, uint32_t interpretOpOffset,
                                uint32_t profilerEnterToggleOffset,
                                uint32_t profilerExitToggleOffset,
                                uint32_t debuggeeCheckOffset,
                                CodeOffsetVector&& debugTrapOffsets,
                                CodeOffsetVector&& codeCoverageOffsets) {
   code_ = code;
   interpretOpOffset_ = interpretOpOffset;
--- a/js/src/jit/BaselineJIT.h
+++ b/js/src/jit/BaselineJIT.h
@@ -633,27 +633,19 @@ struct BaselineBailoutInfo {
   BailoutKind bailoutKind;
 };
 
 MOZ_MUST_USE bool BailoutIonToBaseline(
     JSContext* cx, JitActivation* activation, const JSJitFrameIter& iter,
     bool invalidate, BaselineBailoutInfo** bailoutInfo,
     const ExceptionBailoutInfo* exceptionInfo);
 
-// Mark JitScripts on the stack as active, so that they are not discarded
-// during GC.
-void MarkActiveJitScripts(Zone* zone);
-
 MethodStatus BaselineCompile(JSContext* cx, JSScript* script,
                              bool forceDebugInstrumentation = false);
 
-#ifdef JS_STRUCTURED_SPEW
-void JitSpewBaselineICStats(JSScript* script, const char* dumpReason);
-#endif
-
 static const unsigned BASELINE_MAX_ARGS_LENGTH = 20000;
 
 // Class storing the generated Baseline Interpreter code for the runtime.
 class BaselineInterpreter {
   // The interpreter code.
   JitCode* code_ = nullptr;
 
   // Offset of the code to start interpreting a bytecode op.
--- a/js/src/jit/JitScript.cpp
+++ b/js/src/jit/JitScript.cpp
@@ -1,24 +1,27 @@
 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*-
  * vim: set ts=8 sts=2 et sw=2 tw=80:
  * This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "jit/JitScript-inl.h"
 
+#include "mozilla/BinarySearch.h"
 #include "mozilla/IntegerPrintfMacros.h"
 #include "mozilla/Move.h"
 #include "mozilla/ScopeExit.h"
 
 #include "jit/BaselineIC.h"
 #include "vm/JSScript.h"
+#include "vm/Stack.h"
 #include "vm/TypeInference.h"
 
+#include "jit/JSJitFrameIter-inl.h"
 #include "vm/JSScript-inl.h"
 #include "vm/TypeInference-inl.h"
 
 using namespace js;
 using namespace js::jit;
 
 static size_t NumTypeSets(JSScript* script) {
   // We rely on |num| not overflowing below.
@@ -148,20 +151,22 @@ void JSScript::maybeReleaseJitScript() {
 
   MOZ_ASSERT(!hasIonScript());
 
   JitScript::Destroy(zone(), jitScript_);
   jitScript_ = nullptr;
   updateJitCodeRaw(runtimeFromMainThread());
 }
 
-/* static */
-void JitScript::Destroy(Zone* zone, JitScript* script) {
-  script->prepareForDestruction(zone);
-  js_delete(script);
+void JitScript::trace(JSTracer* trc) {
+  // Mark all IC stub codes hanging off the IC stub entries.
+  for (size_t i = 0; i < numICEntries(); i++) {
+    ICEntry& ent = icEntry(i);
+    ent.trace(trc);
+  }
 }
 
 #ifdef DEBUG
 void JitScript::printTypes(JSContext* cx, HandleScript script) {
   AutoSweepJitScript sweep(script);
   MOZ_ASSERT(script->jitScript() == this);
 
   AutoEnterAnalysis enter(nullptr, script->zone());
@@ -212,8 +217,330 @@ void JitScript::printTypes(JSContext* cx
       types->print();
       fprintf(stderr, "\n");
     }
   }
 
   fprintf(stderr, "\n");
 }
 #endif /* DEBUG */
+
+/* static */
+void JitScript::Destroy(Zone* zone, JitScript* script) {
+  script->prepareForDestruction(zone);
+  js_delete(script);
+}
+
+struct ICEntries {
+  JitScript* const jitScript_;
+
+  explicit ICEntries(JitScript* jitScript) : jitScript_(jitScript) {}
+
+  size_t numEntries() const { return jitScript_->numICEntries(); }
+  ICEntry& operator[](size_t index) const { return jitScript_->icEntry(index); }
+};
+
+static bool ComputeBinarySearchMid(ICEntries entries, uint32_t pcOffset,
+                                   size_t* loc) {
+  return mozilla::BinarySearchIf(
+      entries, 0, entries.numEntries(),
+      [pcOffset](const ICEntry& entry) {
+        uint32_t entryOffset = entry.pcOffset();
+        if (pcOffset < entryOffset) {
+          return -1;
+        }
+        if (entryOffset < pcOffset) {
+          return 1;
+        }
+        if (entry.isForPrologue()) {
+          // Prologue ICEntries are used for function argument type checks.
+          // Ignore these entries and return 1 because these entries appear in
+          // the ICEntry list before the other ICEntry (if any) at offset 0.
+          MOZ_ASSERT(entryOffset == 0);
+          return 1;
+        }
+        return 0;
+      },
+      loc);
+}
+
+ICEntry* JitScript::maybeICEntryFromPCOffset(uint32_t pcOffset) {
+  // This method ignores prologue IC entries. There can be at most one
+  // non-prologue IC per bytecode op.
+
+  size_t mid;
+  if (!ComputeBinarySearchMid(ICEntries(this), pcOffset, &mid)) {
+    return nullptr;
+  }
+
+  MOZ_ASSERT(mid < numICEntries());
+
+  ICEntry& entry = icEntry(mid);
+  MOZ_ASSERT(!entry.isForPrologue());
+  MOZ_ASSERT(entry.pcOffset() == pcOffset);
+  return &entry;
+}
+
+ICEntry& JitScript::icEntryFromPCOffset(uint32_t pcOffset) {
+  ICEntry* entry = maybeICEntryFromPCOffset(pcOffset);
+  MOZ_RELEASE_ASSERT(entry);
+  return *entry;
+}
+
+ICEntry* JitScript::maybeICEntryFromPCOffset(uint32_t pcOffset,
+                                             ICEntry* prevLookedUpEntry) {
+  // Do a linear forward search from the last queried PC offset, or fallback to
+  // a binary search if the last offset is too far away.
+  if (prevLookedUpEntry && pcOffset >= prevLookedUpEntry->pcOffset() &&
+      (pcOffset - prevLookedUpEntry->pcOffset()) <= 10) {
+    ICEntry* firstEntry = &icEntry(0);
+    ICEntry* lastEntry = &icEntry(numICEntries() - 1);
+    ICEntry* curEntry = prevLookedUpEntry;
+    while (curEntry >= firstEntry && curEntry <= lastEntry) {
+      if (curEntry->pcOffset() == pcOffset && !curEntry->isForPrologue()) {
+        return curEntry;
+      }
+      curEntry++;
+    }
+    return nullptr;
+  }
+
+  return maybeICEntryFromPCOffset(pcOffset);
+}
+
+ICEntry& JitScript::icEntryFromPCOffset(uint32_t pcOffset,
+                                        ICEntry* prevLookedUpEntry) {
+  ICEntry* entry = maybeICEntryFromPCOffset(pcOffset, prevLookedUpEntry);
+  MOZ_RELEASE_ASSERT(entry);
+  return *entry;
+}
+
+ICEntry* JitScript::interpreterICEntryFromPCOffset(uint32_t pcOffset) {
+  // We have to return the entry to store in BaselineFrame::interpreterICEntry
+  // when resuming in the Baseline Interpreter at pcOffset. The bytecode op at
+  // pcOffset does not necessarily have an ICEntry, so we want to return the
+  // first ICEntry for which the following is true:
+  //
+  //    !entry.isForPrologue() && entry.pcOffset() >= pcOffset
+  //
+  // Fortunately, ComputeBinarySearchMid returns exactly this entry.
+
+  size_t mid;
+  ComputeBinarySearchMid(ICEntries(this), pcOffset, &mid);
+
+  if (mid < numICEntries()) {
+    ICEntry& entry = icEntry(mid);
+    MOZ_ASSERT(!entry.isForPrologue());
+    MOZ_ASSERT(entry.pcOffset() >= pcOffset);
+    return &entry;
+  }
+
+  // Resuming at a pc after the last ICEntry. Just return nullptr:
+  // BaselineFrame::interpreterICEntry will never be used in this case.
+  return nullptr;
+}
+
+void JitScript::purgeOptimizedStubs(JSScript* script) {
+  MOZ_ASSERT(script->jitScript() == this);
+
+  Zone* zone = script->zone();
+  if (zone->isGCSweeping() && IsAboutToBeFinalizedDuringSweep(*script)) {
+    // We're sweeping and the script is dead. Don't purge optimized stubs
+    // because (1) accessing CacheIRStubInfo pointers in ICStubs is invalid
+    // because we may have swept them already when we started (incremental)
+    // sweeping and (2) it's unnecessary because this script will be finalized
+    // soon anyway.
+    return;
+  }
+
+  JitSpew(JitSpew_BaselineIC, "Purging optimized stubs");
+
+  for (size_t i = 0; i < numICEntries(); i++) {
+    ICEntry& entry = icEntry(i);
+    ICStub* lastStub = entry.firstStub();
+    while (lastStub->next()) {
+      lastStub = lastStub->next();
+    }
+
+    if (lastStub->isFallback()) {
+      // Unlink all stubs allocated in the optimized space.
+      ICStub* stub = entry.firstStub();
+      ICStub* prev = nullptr;
+
+      while (stub->next()) {
+        if (!stub->allocatedInFallbackSpace()) {
+          lastStub->toFallbackStub()->unlinkStub(zone, prev, stub);
+          stub = stub->next();
+          continue;
+        }
+
+        prev = stub;
+        stub = stub->next();
+      }
+
+      if (lastStub->isMonitoredFallback()) {
+        // Monitor stubs can't make calls, so are always in the
+        // optimized stub space.
+        ICTypeMonitor_Fallback* lastMonStub =
+            lastStub->toMonitoredFallbackStub()->maybeFallbackMonitorStub();
+        if (lastMonStub) {
+          lastMonStub->resetMonitorStubChain(zone);
+        }
+      }
+    } else if (lastStub->isTypeMonitor_Fallback()) {
+      lastStub->toTypeMonitor_Fallback()->resetMonitorStubChain(zone);
+    } else {
+      MOZ_CRASH("Unknown fallback stub");
+    }
+  }
+
+#ifdef DEBUG
+  // All remaining stubs must be allocated in the fallback space.
+  for (size_t i = 0; i < numICEntries(); i++) {
+    ICEntry& entry = icEntry(i);
+    ICStub* stub = entry.firstStub();
+    while (stub->next()) {
+      MOZ_ASSERT(stub->allocatedInFallbackSpace());
+      stub = stub->next();
+    }
+  }
+#endif
+}
+
+void JitScript::noteAccessedGetter(uint32_t pcOffset) {
+  ICEntry& entry = icEntryFromPCOffset(pcOffset);
+  ICFallbackStub* stub = entry.fallbackStub();
+
+  if (stub->isGetProp_Fallback()) {
+    stub->toGetProp_Fallback()->noteAccessedGetter();
+  }
+}
+
+void JitScript::noteHasDenseAdd(uint32_t pcOffset) {
+  ICEntry& entry = icEntryFromPCOffset(pcOffset);
+  ICFallbackStub* stub = entry.fallbackStub();
+
+  if (stub->isSetElem_Fallback()) {
+    stub->toSetElem_Fallback()->noteHasDenseAdd();
+  }
+}
+
+#ifdef JS_STRUCTURED_SPEW
+static bool GetStubEnteredCount(ICStub* stub, uint32_t* count) {
+  switch (stub->kind()) {
+    case ICStub::CacheIR_Regular:
+      *count = stub->toCacheIR_Regular()->enteredCount();
+      return true;
+    case ICStub::CacheIR_Updated:
+      *count = stub->toCacheIR_Updated()->enteredCount();
+      return true;
+    case ICStub::CacheIR_Monitored:
+      *count = stub->toCacheIR_Monitored()->enteredCount();
+      return true;
+    default:
+      return false;
+  }
+}
+
+static bool HasEnteredCounters(ICEntry& entry) {
+  ICStub* stub = entry.firstStub();
+  while (stub && !stub->isFallback()) {
+    uint32_t count;
+    if (GetStubEnteredCount(stub, &count)) {
+      return true;
+    }
+    stub = stub->next();
+  }
+  return false;
+}
+
+void jit::JitSpewBaselineICStats(JSScript* script, const char* dumpReason) {
+  MOZ_ASSERT(script->hasJitScript());
+  JSContext* cx = TlsContext.get();
+  AutoStructuredSpewer spew(cx, SpewChannel::BaselineICStats, script);
+  if (!spew) {
+    return;
+  }
+
+  JitScript* jitScript = script->jitScript();
+  spew->property("reason", dumpReason);
+  spew->beginListProperty("entries");
+  for (size_t i = 0; i < jitScript->numICEntries(); i++) {
+    ICEntry& entry = jitScript->icEntry(i);
+    if (!HasEnteredCounters(entry)) {
+      continue;
+    }
+
+    uint32_t pcOffset = entry.pcOffset();
+    jsbytecode* pc = entry.pc(script);
+
+    unsigned column;
+    unsigned int line = PCToLineNumber(script, pc, &column);
+
+    spew->beginObject();
+    spew->property("op", CodeName[*pc]);
+    spew->property("pc", pcOffset);
+    spew->property("line", line);
+    spew->property("column", column);
+
+    spew->beginListProperty("counts");
+    ICStub* stub = entry.firstStub();
+    while (stub && !stub->isFallback()) {
+      uint32_t count;
+      if (GetStubEnteredCount(stub, &count)) {
+        spew->value(count);
+      } else {
+        spew->value("?");
+      }
+      stub = stub->next();
+    }
+    spew->endList();
+    spew->property("fallback_count", entry.fallbackStub()->enteredCount());
+    spew->endObject();
+  }
+  spew->endList();
+}
+#endif
+
+static void MarkActiveJitScripts(JSContext* cx,
+                                 const JitActivationIterator& activation) {
+  for (OnlyJSJitFrameIter iter(activation); !iter.done(); ++iter) {
+    const JSJitFrameIter& frame = iter.frame();
+    switch (frame.type()) {
+      case FrameType::BaselineJS:
+        frame.script()->jitScript()->setActive();
+        break;
+      case FrameType::Exit:
+        if (frame.exitFrame()->is<LazyLinkExitFrameLayout>()) {
+          LazyLinkExitFrameLayout* ll =
+              frame.exitFrame()->as<LazyLinkExitFrameLayout>();
+          JSScript* script =
+              ScriptFromCalleeToken(ll->jsFrame()->calleeToken());
+          script->jitScript()->setActive();
+        }
+        break;
+      case FrameType::Bailout:
+      case FrameType::IonJS: {
+        // Keep the JitScript and BaselineScript around, since bailouts from
+        // the ion jitcode need to re-enter into the Baseline code.
+        frame.script()->jitScript()->setActive();
+        for (InlineFrameIterator inlineIter(cx, &frame); inlineIter.more();
+             ++inlineIter) {
+          inlineIter.script()->jitScript()->setActive();
+        }
+        break;
+      }
+      default:;
+    }
+  }
+}
+
+void jit::MarkActiveJitScripts(Zone* zone) {
+  if (zone->isAtomsZone()) {
+    return;
+  }
+  JSContext* cx = TlsContext.get();
+  for (JitActivationIterator iter(cx); !iter.done(); ++iter) {
+    if (iter->compartment()->zone() == zone) {
+      MarkActiveJitScripts(cx, iter);
+    }
+  }
+}
--- a/js/src/jit/JitScript.h
+++ b/js/src/jit/JitScript.h
@@ -309,12 +309,20 @@ class MOZ_RAII AutoKeepJitScripts {
   AutoKeepJitScripts(const AutoKeepJitScripts&) = delete;
   void operator=(const AutoKeepJitScripts&) = delete;
 
  public:
   explicit inline AutoKeepJitScripts(JSContext* cx);
   inline ~AutoKeepJitScripts();
 };
 
+// Mark JitScripts on the stack as active, so that they are not discarded
+// during GC.
+void MarkActiveJitScripts(Zone* zone);
+
+#ifdef JS_STRUCTURED_SPEW
+void JitSpewBaselineICStats(JSScript* script, const char* dumpReason);
+#endif
+
 }  // namespace jit
 }  // namespace js
 
 #endif /* jit_JitScript_h */