Bug 1296649: Split ICEntry between Ion and Baseline version, r=jandem a=ritu
authorHannes Verschore <hv1989@gmail.com>
Mon, 10 Oct 2016 14:02:28 -0700
changeset 350628 f4385d3a94d43a159e42326ff9ad3615260fa22a
parent 350627 99caef326a26a027c42fdf6ac8a28a4d7d76e927
child 350629 3d2ba22eee02c115699d0558d1f3a15a159051b6
push id1230
push userjlund@mozilla.com
push dateMon, 31 Oct 2016 18:13:35 +0000
treeherdermozilla-release@5e06e3766db2 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjandem, ritu
bugs1296649
milestone50.0
Bug 1296649: Split ICEntry between Ion and Baseline version, r=jandem a=ritu
js/src/jit/BaselineBailouts.cpp
js/src/jit/BaselineCompiler.cpp
js/src/jit/BaselineDebugModeOSR.cpp
js/src/jit/BaselineInspector.h
js/src/jit/BaselineJIT.cpp
js/src/jit/BaselineJIT.h
js/src/jit/Ion.cpp
js/src/jit/SharedIC.cpp
js/src/jit/SharedIC.h
js/src/jit/shared/BaselineCompiler-shared.h
--- a/js/src/jit/BaselineBailouts.cpp
+++ b/js/src/jit/BaselineBailouts.cpp
@@ -1008,32 +1008,32 @@ InitFromBailout(JSContext* cx, HandleScr
         // If the bailout was a resumeAfter, and the opcode is monitored,
         // then the bailed out state should be in a position to enter
         // into the ICTypeMonitor chain for the op.
         bool enterMonitorChain = false;
         if (resumeAfter && (CodeSpec[op].format & JOF_TYPESET)) {
             // Not every monitored op has a monitored fallback stub, e.g.
             // JSOP_NEWOBJECT, which always returns the same type for a
             // particular script/pc location.
-            ICEntry& icEntry = baselineScript->icEntryFromPCOffset(pcOff);
+            BaselineICEntry& icEntry = baselineScript->icEntryFromPCOffset(pcOff);
             ICFallbackStub* fallbackStub = icEntry.firstStub()->getChainFallback();
             if (fallbackStub->isMonitoredFallback())
                 enterMonitorChain = true;
         }
 
         uint32_t numCallArgs = isCall ? GET_ARGC(pc) : 0;
 
         if (resumeAfter && !enterMonitorChain)
             pc = GetNextPc(pc);
 
         builder.setResumePC(pc);
         builder.setResumeFramePtr(prevFramePtr);
 
         if (enterMonitorChain) {
-            ICEntry& icEntry = baselineScript->icEntryFromPCOffset(pcOff);
+            BaselineICEntry& icEntry = baselineScript->icEntryFromPCOffset(pcOff);
             ICFallbackStub* fallbackStub = icEntry.firstStub()->getChainFallback();
             MOZ_ASSERT(fallbackStub->isMonitoredFallback());
             JitSpew(JitSpew_BaselineBailouts, "      [TYPE-MONITOR CHAIN]");
             ICMonitoredFallbackStub* monFallbackStub = fallbackStub->toMonitoredFallbackStub();
             ICStub* firstMonStub = monFallbackStub->fallbackMonitorStub()->firstMonitorStub();
 
             // To enter a monitoring chain, we load the top stack value into R0
             JitSpew(JitSpew_BaselineBailouts, "      Popping top stack value into R0.");
@@ -1172,17 +1172,17 @@ InitFromBailout(JSContext* cx, HandleScr
     size_t baselineFrameDescr = MakeFrameDescriptor((uint32_t) builder.framePushed(),
                                                     JitFrame_BaselineJS,
                                                     BaselineStubFrameLayout::Size());
     if (!builder.writeWord(baselineFrameDescr, "Descriptor"))
         return false;
 
     // Calculate and write out return address.
     // The icEntry in question MUST have an inlinable fallback stub.
-    ICEntry& icEntry = baselineScript->icEntryFromPCOffset(pcOff);
+    BaselineICEntry& icEntry = baselineScript->icEntryFromPCOffset(pcOff);
     MOZ_ASSERT(IsInlinableFallback(icEntry.firstStub()->getChainFallback()));
     if (!builder.writePtr(baselineScript->returnAddressForIC(icEntry), "ReturnAddr"))
         return false;
 
     // Build baseline stub frame:
     // +===============+
     // |    StubPtr    |
     // +---------------+
--- a/js/src/jit/BaselineCompiler.cpp
+++ b/js/src/jit/BaselineCompiler.cpp
@@ -246,17 +246,17 @@ BaselineCompiler::compile()
     // If profiler instrumentation is enabled, toggle instrumentation on.
     if (cx->runtime()->jitRuntime()->isProfilerInstrumentationEnabled(cx->runtime()))
         baselineScript->toggleProfilerInstrumentation(true);
 
     // Patch IC loads using IC entries.
     for (size_t i = 0; i < icLoadLabels_.length(); i++) {
         CodeOffset label = icLoadLabels_[i].label;
         size_t icEntry = icLoadLabels_[i].icEntry;
-        ICEntry* entryAddr = &(baselineScript->icEntry(icEntry));
+        BaselineICEntry* entryAddr = &(baselineScript->icEntry(icEntry));
         Assembler::PatchDataWithValueCheck(CodeLocationLabel(code, label),
                                            ImmPtr(entryAddr),
                                            ImmPtr((void*)-1));
     }
 
     if (modifiesArguments_)
         baselineScript->setModifiesArguments();
 
@@ -495,17 +495,17 @@ BaselineCompiler::emitOutOfLinePostBarri
     masm.popValue(R0);
     masm.ret();
     return true;
 }
 
 bool
 BaselineCompiler::emitIC(ICStub* stub, ICEntry::Kind kind)
 {
-    ICEntry* entry = allocateICEntry(stub, kind);
+    BaselineICEntry* entry = allocateICEntry(stub, kind);
     if (!entry)
         return false;
 
     CodeOffset patchOffset;
     EmitCallIC(&patchOffset, masm);
     entry->setReturnOffset(CodeOffset(masm.currentOffset()));
     if (!addICLoadLabel(patchOffset))
         return false;
--- a/js/src/jit/BaselineDebugModeOSR.cpp
+++ b/js/src/jit/BaselineDebugModeOSR.cpp
@@ -214,17 +214,17 @@ CollectJitStackScripts(JSContext* cx, co
                 // We are in the middle of handling an exception and the frame
                 // must have an override pc.
                 uint32_t offset = script->pcToOffset(frame->overridePc());
                 if (!entries.append(DebugModeOSREntry(script, offset)))
                     return false;
             } else {
                 // The frame must be settled on a pc with an ICEntry.
                 uint8_t* retAddr = iter.returnAddressToFp();
-                ICEntry& icEntry = script->baselineScript()->icEntryFromReturnAddress(retAddr);
+                BaselineICEntry& icEntry = script->baselineScript()->icEntryFromReturnAddress(retAddr);
                 if (!entries.append(DebugModeOSREntry(script, icEntry)))
                     return false;
             }
 
             if (entries.back().needsRecompileInfo()) {
                 if (!entries.back().allocateRecompileInfo(cx))
                     return false;
 
@@ -488,43 +488,43 @@ PatchBaselineFramesForDebugMode(JSContex
                 //
                 // Patching returns from a VM call. After fixing up the the
                 // continuation for unsynced values (the frame register is
                 // popped by the callVM trampoline), we resume at the
                 // return-from-callVM address. The assumption here is that all
                 // callVMs which can trigger debug mode OSR are the *only*
                 // callVMs generated for their respective pc locations in the
                 // baseline JIT code.
-                ICEntry& callVMEntry = bl->callVMEntryFromPCOffset(pcOffset);
+                BaselineICEntry& callVMEntry = bl->callVMEntryFromPCOffset(pcOffset);
                 recompInfo->resumeAddr = bl->returnAddressForIC(callVMEntry);
                 popFrameReg = false;
                 break;
               }
 
               case ICEntry::Kind_WarmupCounter: {
                 // Case J above.
                 //
                 // Patching mechanism is identical to a CallVM. This is
                 // handled especially only because the warmup counter VM call is
                 // part of the prologue, and not tied an opcode.
-                ICEntry& warmupCountEntry = bl->warmupCountICEntry();
+                BaselineICEntry& warmupCountEntry = bl->warmupCountICEntry();
                 recompInfo->resumeAddr = bl->returnAddressForIC(warmupCountEntry);
                 popFrameReg = false;
                 break;
               }
 
               case ICEntry::Kind_StackCheck:
               case ICEntry::Kind_EarlyStackCheck: {
                 // Case I above.
                 //
                 // Patching mechanism is identical to a CallVM. This is
                 // handled especially only because the stack check VM call is
                 // part of the prologue, and not tied an opcode.
                 bool earlyCheck = kind == ICEntry::Kind_EarlyStackCheck;
-                ICEntry& stackCheckEntry = bl->stackCheckICEntry(earlyCheck);
+                BaselineICEntry& stackCheckEntry = bl->stackCheckICEntry(earlyCheck);
                 recompInfo->resumeAddr = bl->returnAddressForIC(stackCheckEntry);
                 popFrameReg = false;
                 break;
               }
 
               case ICEntry::Kind_DebugTrap:
                 // Case C above.
                 //
--- a/js/src/jit/BaselineInspector.h
+++ b/js/src/jit/BaselineInspector.h
@@ -40,17 +40,17 @@ class SetElemICInspector : public ICInsp
     bool sawDenseWrite() const;
     bool sawTypedArrayWrite() const;
 };
 
 class BaselineInspector
 {
   private:
     JSScript* script;
-    ICEntry* prevLookedUpEntry;
+    BaselineICEntry* prevLookedUpEntry;
 
   public:
     explicit BaselineInspector(JSScript* script)
       : script(script), prevLookedUpEntry(nullptr)
     {
         MOZ_ASSERT(script);
     }
 
@@ -64,28 +64,29 @@ class BaselineInspector
 
   private:
 #ifdef DEBUG
     bool isValidPC(jsbytecode* pc) {
         return script->containsPC(pc);
     }
 #endif
 
-    ICEntry& icEntryFromPC(jsbytecode* pc) {
+    BaselineICEntry& icEntryFromPC(jsbytecode* pc) {
         MOZ_ASSERT(hasBaselineScript());
         MOZ_ASSERT(isValidPC(pc));
-        ICEntry& ent = baselineScript()->icEntryFromPCOffset(script->pcToOffset(pc), prevLookedUpEntry);
+        BaselineICEntry& ent =
+            baselineScript()->icEntryFromPCOffset(script->pcToOffset(pc), prevLookedUpEntry);
         MOZ_ASSERT(ent.isForOp());
         prevLookedUpEntry = &ent;
         return ent;
     }
 
     template <typename ICInspectorType>
     ICInspectorType makeICInspector(jsbytecode* pc, ICStub::Kind expectedFallbackKind) {
-        ICEntry* ent = nullptr;
+        BaselineICEntry* ent = nullptr;
         if (hasBaselineScript()) {
             ent = &icEntryFromPC(pc);
             MOZ_ASSERT(ent->fallbackStub()->kind() == expectedFallbackKind);
         }
         return ICInspectorType(this, pc, ent);
     }
 
     ICStub* monomorphicStub(jsbytecode* pc);
--- a/js/src/jit/BaselineJIT.cpp
+++ b/js/src/jit/BaselineJIT.cpp
@@ -403,17 +403,17 @@ BaselineScript::New(JSScript* jsscript, 
                     uint32_t profilerEnterToggleOffset, uint32_t profilerExitToggleOffset,
                     uint32_t traceLoggerEnterToggleOffset, uint32_t traceLoggerExitToggleOffset,
                     uint32_t postDebugPrologueOffset,
                     size_t icEntries, size_t pcMappingIndexEntries, size_t pcMappingSize,
                     size_t bytecodeTypeMapEntries, size_t yieldEntries)
 {
     static const unsigned DataAlignment = sizeof(uintptr_t);
 
-    size_t icEntriesSize = icEntries * sizeof(ICEntry);
+    size_t icEntriesSize = icEntries * sizeof(BaselineICEntry);
     size_t pcMappingIndexEntriesSize = pcMappingIndexEntries * sizeof(PCMappingIndexEntry);
     size_t bytecodeTypeMapSize = bytecodeTypeMapEntries * sizeof(uint32_t);
     size_t yieldEntriesSize = yieldEntries * sizeof(uintptr_t);
 
     size_t paddedICEntriesSize = AlignBytes(icEntriesSize, DataAlignment);
     size_t paddedPCMappingIndexEntriesSize = AlignBytes(pcMappingIndexEntriesSize, DataAlignment);
     size_t paddedPCMappingSize = AlignBytes(pcMappingSize, DataAlignment);
     size_t paddedBytecodeTypesMapSize = AlignBytes(bytecodeTypeMapSize, DataAlignment);
@@ -461,17 +461,17 @@ BaselineScript::New(JSScript* jsscript, 
 void
 BaselineScript::trace(JSTracer* trc)
 {
     TraceEdge(trc, &method_, "baseline-method");
     TraceNullableEdge(trc, &templateScope_, "baseline-template-scope");
 
     // Mark all IC stub codes hanging off the IC stub entries.
     for (size_t i = 0; i < numICEntries(); i++) {
-        ICEntry& ent = icEntry(i);
+        BaselineICEntry& ent = icEntry(i);
         ent.trace(trc);
     }
 }
 
 /* static */
 void
 BaselineScript::writeBarrierPre(Zone* zone, BaselineScript* script)
 {
@@ -556,17 +556,17 @@ BaselineScript::removeDependentWasmImpor
     for (DependentWasmImport& dep : *dependentWasmImports_) {
         if (dep.instance == &instance && dep.importIndex == idx) {
             dependentWasmImports_->erase(&dep);
             break;
         }
     }
 }
 
-ICEntry&
+BaselineICEntry&
 BaselineScript::icEntry(size_t index)
 {
     MOZ_ASSERT(index < numICEntries());
     return icEntryList()[index];
 }
 
 PCMappingIndexEntry&
 BaselineScript::pcMappingIndexEntry(size_t index)
@@ -589,30 +589,30 @@ BaselineScript::pcMappingReader(size_t i
 }
 
 struct ICEntries
 {
     BaselineScript* const baseline_;
 
     explicit ICEntries(BaselineScript* baseline) : baseline_(baseline) {}
 
-    ICEntry& operator[](size_t index) const {
+    BaselineICEntry& operator[](size_t index) const {
         return baseline_->icEntry(index);
     }
 };
 
-ICEntry&
+BaselineICEntry&
 BaselineScript::icEntryFromReturnOffset(CodeOffset returnOffset)
 {
     size_t loc;
 #ifdef DEBUG
     bool found =
 #endif
         BinarySearchIf(ICEntries(this), 0, numICEntries(),
-                       [&returnOffset](ICEntry& entry) {
+                       [&returnOffset](BaselineICEntry& entry) {
                            size_t roffset = returnOffset.offset();
                            size_t entryRoffset = entry.returnOffset().offset();
                            if (roffset < entryRoffset)
                                return -1;
                            if (entryRoffset < roffset)
                                return 1;
                            return 0;
                        },
@@ -624,35 +624,35 @@ BaselineScript::icEntryFromReturnOffset(
     return icEntry(loc);
 }
 
 static inline size_t
 ComputeBinarySearchMid(BaselineScript* baseline, uint32_t pcOffset)
 {
     size_t loc;
     BinarySearchIf(ICEntries(baseline), 0, baseline->numICEntries(),
-                   [pcOffset](ICEntry& entry) {
+                   [pcOffset](BaselineICEntry& entry) {
                        uint32_t entryOffset = entry.pcOffset();
                        if (pcOffset < entryOffset)
                            return -1;
                        if (entryOffset < pcOffset)
                            return 1;
                        return 0;
                    },
                    &loc);
     return loc;
 }
 
 uint8_t*
-BaselineScript::returnAddressForIC(const ICEntry& ent)
+BaselineScript::returnAddressForIC(const BaselineICEntry& ent)
 {
     return method()->raw() + ent.returnOffset().offset();
 }
 
-ICEntry&
+BaselineICEntry&
 BaselineScript::icEntryFromPCOffset(uint32_t pcOffset)
 {
     // Multiple IC entries can have the same PC offset, but this method only looks for
     // those which have isForOp() set.
     size_t mid = ComputeBinarySearchMid(this, pcOffset);
 
     // Found an IC entry with a matching PC offset.  Search backward, and then
     // forward from this IC entry, looking for one with the same PC offset which
@@ -663,40 +663,40 @@ BaselineScript::icEntryFromPCOffset(uint
     }
     for (size_t i = mid+1; i < numICEntries() && icEntry(i).pcOffset() == pcOffset; i++) {
         if (icEntry(i).isForOp())
             return icEntry(i);
     }
     MOZ_CRASH("Invalid PC offset for IC entry.");
 }
 
-ICEntry&
-BaselineScript::icEntryFromPCOffset(uint32_t pcOffset, ICEntry* prevLookedUpEntry)
+BaselineICEntry&
+BaselineScript::icEntryFromPCOffset(uint32_t pcOffset, BaselineICEntry* prevLookedUpEntry)
 {
     // Do a linear forward search from the last queried PC offset, or fallback to a
     // binary search if the last offset is too far away.
     if (prevLookedUpEntry && pcOffset >= prevLookedUpEntry->pcOffset() &&
         (pcOffset - prevLookedUpEntry->pcOffset()) <= 10)
     {
-        ICEntry* firstEntry = &icEntry(0);
-        ICEntry* lastEntry = &icEntry(numICEntries() - 1);
-        ICEntry* curEntry = prevLookedUpEntry;
+        BaselineICEntry* firstEntry = &icEntry(0);
+        BaselineICEntry* lastEntry = &icEntry(numICEntries() - 1);
+        BaselineICEntry* curEntry = prevLookedUpEntry;
         while (curEntry >= firstEntry && curEntry <= lastEntry) {
             if (curEntry->pcOffset() == pcOffset && curEntry->isForOp())
                 break;
             curEntry++;
         }
         MOZ_ASSERT(curEntry->pcOffset() == pcOffset && curEntry->isForOp());
         return *curEntry;
     }
 
     return icEntryFromPCOffset(pcOffset);
 }
 
-ICEntry&
+BaselineICEntry&
 BaselineScript::callVMEntryFromPCOffset(uint32_t pcOffset)
 {
     // Like icEntryFromPCOffset, but only looks for the fake ICEntries
     // inserted by VM calls.
     size_t mid = ComputeBinarySearchMid(this, pcOffset);
 
     for (size_t i = mid; i < numICEntries() && icEntry(i).pcOffset() == pcOffset; i--) {
         if (icEntry(i).kind() == ICEntry::Kind_CallVM)
@@ -704,44 +704,44 @@ BaselineScript::callVMEntryFromPCOffset(
     }
     for (size_t i = mid+1; i < numICEntries() && icEntry(i).pcOffset() == pcOffset; i++) {
         if (icEntry(i).kind() == ICEntry::Kind_CallVM)
             return icEntry(i);
     }
     MOZ_CRASH("Invalid PC offset for callVM entry.");
 }
 
-ICEntry&
+BaselineICEntry&
 BaselineScript::stackCheckICEntry(bool earlyCheck)
 {
     // The stack check will always be at offset 0, so just do a linear search
     // from the beginning. This is only needed for debug mode OSR, when
     // patching a frame that has invoked a Debugger hook via the interrupt
     // handler via the stack check, which is part of the prologue.
     ICEntry::Kind kind = earlyCheck ? ICEntry::Kind_EarlyStackCheck : ICEntry::Kind_StackCheck;
     for (size_t i = 0; i < numICEntries() && icEntry(i).pcOffset() == 0; i++) {
         if (icEntry(i).kind() == kind)
             return icEntry(i);
     }
     MOZ_CRASH("No stack check ICEntry found.");
 }
 
-ICEntry&
+BaselineICEntry&
 BaselineScript::warmupCountICEntry()
 {
     // The stack check will be at a very low offset, so just do a linear search
     // from the beginning.
     for (size_t i = 0; i < numICEntries() && icEntry(i).pcOffset() == 0; i++) {
         if (icEntry(i).kind() == ICEntry::Kind_WarmupCounter)
             return icEntry(i);
     }
     MOZ_CRASH("No warmup count ICEntry found.");
 }
 
-ICEntry&
+BaselineICEntry&
 BaselineScript::icEntryFromReturnAddress(uint8_t* returnAddr)
 {
     MOZ_ASSERT(returnAddr > method_->raw());
     MOZ_ASSERT(returnAddr < method_->raw() + method_->instructionsSize());
     CodeOffset offset(returnAddr - method_->raw());
     return icEntryFromReturnOffset(offset);
 }
 
@@ -752,22 +752,22 @@ BaselineScript::copyYieldEntries(JSScrip
 
     for (size_t i = 0; i < yieldOffsets.length(); i++) {
         uint32_t offset = yieldOffsets[i];
         entries[i] = nativeCodeForPC(script, script->offsetToPC(offset));
     }
 }
 
 void
-BaselineScript::copyICEntries(JSScript* script, const ICEntry* entries, MacroAssembler& masm)
+BaselineScript::copyICEntries(JSScript* script, const BaselineICEntry* entries, MacroAssembler& masm)
 {
     // Fix up the return offset in the IC entries and copy them in.
     // Also write out the IC entry ptrs in any fallback stubs that were added.
     for (uint32_t i = 0; i < numICEntries(); i++) {
-        ICEntry& realEntry = icEntry(i);
+        BaselineICEntry& realEntry = icEntry(i);
         realEntry = entries[i];
 
         if (!realEntry.hasStub()) {
             // VM call without any stubs.
             continue;
         }
 
         // If the attached stub is a fallback stub, then fix it up with
@@ -1069,17 +1069,17 @@ BaselineScript::toggleProfilerInstrument
 }
 
 void
 BaselineScript::purgeOptimizedStubs(Zone* zone)
 {
     JitSpew(JitSpew_BaselineIC, "Purging optimized stubs");
 
     for (size_t i = 0; i < numICEntries(); i++) {
-        ICEntry& entry = icEntry(i);
+        BaselineICEntry& entry = icEntry(i);
         if (!entry.hasStub())
             continue;
 
         ICStub* lastStub = entry.firstStub();
         while (lastStub->next())
             lastStub = lastStub->next();
 
         if (lastStub->isFallback()) {
@@ -1110,17 +1110,17 @@ BaselineScript::purgeOptimizedStubs(Zone
         } else {
             MOZ_ASSERT(lastStub->isTableSwitch());
         }
     }
 
 #ifdef DEBUG
     // All remaining stubs must be allocated in the fallback space.
     for (size_t i = 0; i < numICEntries(); i++) {
-        ICEntry& entry = icEntry(i);
+        BaselineICEntry& entry = icEntry(i);
         if (!entry.hasStub())
             continue;
 
         ICStub* stub = entry.firstStub();
         while (stub->next()) {
             MOZ_ASSERT(stub->allocatedInFallbackSpace());
             stub = stub->next();
         }
--- a/js/src/jit/BaselineJIT.h
+++ b/js/src/jit/BaselineJIT.h
@@ -17,17 +17,17 @@
 #include "jit/IonCode.h"
 #include "jit/MacroAssembler.h"
 #include "vm/TraceLogging.h"
 
 namespace js {
 namespace jit {
 
 class StackValue;
-class ICEntry;
+class BaselineICEntry;
 class ICStub;
 
 class PCMappingSlotInfo
 {
     uint8_t slotInfo_;
 
   public:
     // SlotInfo encoding:
@@ -333,18 +333,18 @@ struct BaselineScript
 
     uint32_t postDebugPrologueOffset() const {
         return postDebugPrologueOffset_;
     }
     uint8_t* postDebugPrologueAddr() const {
         return method_->raw() + postDebugPrologueOffset_;
     }
 
-    ICEntry* icEntryList() {
-        return (ICEntry*)(reinterpret_cast<uint8_t*>(this) + icEntriesOffset_);
+    BaselineICEntry* icEntryList() {
+        return (BaselineICEntry*)(reinterpret_cast<uint8_t*>(this) + icEntriesOffset_);
     }
     uint8_t** yieldEntryList() {
         return (uint8_t**)(reinterpret_cast<uint8_t*>(this) + yieldEntriesOffset_);
     }
     PCMappingIndexEntry* pcMappingIndexEntryList() {
         return (PCMappingIndexEntry*)(reinterpret_cast<uint8_t*>(this) + pcMappingIndexOffset_);
     }
     uint8_t* pcMappingData() {
@@ -373,31 +373,31 @@ struct BaselineScript
     void toggleBarriers(bool enabled, ReprotectCode reprotect = Reprotect) {
         method()->togglePreBarriers(enabled, reprotect);
     }
 
     bool containsCodeAddress(uint8_t* addr) const {
         return method()->raw() <= addr && addr <= method()->raw() + method()->instructionsSize();
     }
 
-    ICEntry& icEntry(size_t index);
-    ICEntry& icEntryFromReturnOffset(CodeOffset returnOffset);
-    ICEntry& icEntryFromPCOffset(uint32_t pcOffset);
-    ICEntry& icEntryFromPCOffset(uint32_t pcOffset, ICEntry* prevLookedUpEntry);
-    ICEntry& callVMEntryFromPCOffset(uint32_t pcOffset);
-    ICEntry& stackCheckICEntry(bool earlyCheck);
-    ICEntry& warmupCountICEntry();
-    ICEntry& icEntryFromReturnAddress(uint8_t* returnAddr);
-    uint8_t* returnAddressForIC(const ICEntry& ent);
+    BaselineICEntry& icEntry(size_t index);
+    BaselineICEntry& icEntryFromReturnOffset(CodeOffset returnOffset);
+    BaselineICEntry& icEntryFromPCOffset(uint32_t pcOffset);
+    BaselineICEntry& icEntryFromPCOffset(uint32_t pcOffset, BaselineICEntry* prevLookedUpEntry);
+    BaselineICEntry& callVMEntryFromPCOffset(uint32_t pcOffset);
+    BaselineICEntry& stackCheckICEntry(bool earlyCheck);
+    BaselineICEntry& warmupCountICEntry();
+    BaselineICEntry& icEntryFromReturnAddress(uint8_t* returnAddr);
+    uint8_t* returnAddressForIC(const BaselineICEntry& ent);
 
     size_t numICEntries() const {
         return icEntries_;
     }
 
-    void copyICEntries(JSScript* script, const ICEntry* entries, MacroAssembler& masm);
+    void copyICEntries(JSScript* script, const BaselineICEntry* entries, MacroAssembler& masm);
     void adoptFallbackStubs(FallbackICStubSpace* stubSpace);
 
     void copyYieldEntries(JSScript* script, Vector<uint32_t>& yieldOffsets);
 
     PCMappingIndexEntry& pcMappingIndexEntry(size_t index);
     CompactBufferReader pcMappingReader(size_t indexEntry);
 
     size_t numPCMappingIndexEntries() const {
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -1039,17 +1039,17 @@ IonScript::trace(JSTracer* trc)
     if (deoptTable_)
         TraceEdge(trc, &deoptTable_, "deoptimizationTable");
 
     for (size_t i = 0; i < numConstants(); i++)
         TraceEdge(trc, &getConstant(i), "constant");
 
     // Mark all IC stub codes hanging off the IC stub entries.
     for (size_t i = 0; i < numSharedStubs(); i++) {
-        ICEntry& ent = sharedStubList()[i];
+        IonICEntry& ent = sharedStubList()[i];
         ent.trace(trc);
     }
 
     // Trace caches so that the JSScript pointer can be updated if moved.
     for (size_t i = 0; i < numCaches(); i++)
         getCacheFromIndex(i).trace(trc);
 }
 
--- a/js/src/jit/SharedIC.cpp
+++ b/js/src/jit/SharedIC.cpp
@@ -90,17 +90,30 @@ TypeFallbackICSpew(JSContext* cx, ICType
 
 ICFallbackStub*
 ICEntry::fallbackStub() const
 {
     return firstStub()->getChainFallback();
 }
 
 void
-ICEntry::trace(JSTracer* trc)
+IonICEntry::trace(JSTracer* trc)
+{
+    TraceManuallyBarrieredEdge(trc, &script_, "IonICEntry::script_");
+    traceEntry(trc);
+}
+
+void
+BaselineICEntry::trace(JSTracer* trc)
+{
+    traceEntry(trc);
+}
+
+void
+ICEntry::traceEntry(JSTracer* trc)
 {
     if (!hasStub())
         return;
     for (ICStub* stub = firstStub(); stub; stub = stub->next())
         stub->trace(trc);
 }
 
 ICStubConstIterator&
--- a/js/src/jit/SharedIC.h
+++ b/js/src/jit/SharedIC.h
@@ -332,32 +332,45 @@ class ICEntry
     static inline size_t offsetOfFirstStub() {
         return offsetof(ICEntry, firstStub_);
     }
 
     inline ICStub** addressOfFirstStub() {
         return &firstStub_;
     }
 
+  protected:
+    void traceEntry(JSTracer* trc);
+};
+
+class BaselineICEntry : public ICEntry
+{
+  public:
+    BaselineICEntry(uint32_t pcOffset, Kind kind)
+      : ICEntry(pcOffset, kind)
+    { }
+
     void trace(JSTracer* trc);
 };
 
 class IonICEntry : public ICEntry
 {
     JSScript* script_;
 
   public:
     IonICEntry(uint32_t pcOffset, Kind kind, JSScript* script)
       : ICEntry(pcOffset, kind),
         script_(script)
     { }
 
     JSScript* script() {
         return script_;
     }
+
+    void trace(JSTracer* trc);
 };
 
 class ICMonitoredStub;
 class ICMonitoredFallbackStub;
 class ICUpdatedStub;
 
 // Constant iterator that traverses arbitrary chains of ICStubs.
 // No requirements are made of the ICStub used to construct this
--- a/js/src/jit/shared/BaselineCompiler-shared.h
+++ b/js/src/jit/shared/BaselineCompiler-shared.h
@@ -26,17 +26,17 @@ class BaselineCompilerShared
     bool ionOSRCompileable_;
     bool compileDebugInstrumentation_;
 
     TempAllocator& alloc_;
     BytecodeAnalysis analysis_;
     FrameInfo frame;
 
     FallbackICStubSpace stubSpace_;
-    js::Vector<ICEntry, 16, SystemAllocPolicy> icEntries_;
+    js::Vector<BaselineICEntry, 16, SystemAllocPolicy> icEntries_;
 
     // Stores the native code offset for a bytecode pc.
     struct PCMappingEntry
     {
         uint32_t pcOffset;
         uint32_t nativeOffset;
         PCMappingSlotInfo slotInfo;
 
@@ -66,37 +66,37 @@ class BaselineCompilerShared
     CodeOffset profilerEnterFrameToggleOffset_;
     CodeOffset profilerExitFrameToggleOffset_;
     CodeOffset traceLoggerEnterToggleOffset_;
     CodeOffset traceLoggerExitToggleOffset_;
     CodeOffset traceLoggerScriptTextIdOffset_;
 
     BaselineCompilerShared(JSContext* cx, TempAllocator& alloc, JSScript* script);
 
-    ICEntry* allocateICEntry(ICStub* stub, ICEntry::Kind kind) {
+    BaselineICEntry* allocateICEntry(ICStub* stub, ICEntry::Kind kind) {
         if (!stub)
             return nullptr;
 
         // Create the entry and add it to the vector.
-        if (!icEntries_.append(ICEntry(script->pcToOffset(pc), kind))) {
+        if (!icEntries_.append(BaselineICEntry(script->pcToOffset(pc), kind))) {
             ReportOutOfMemory(cx);
             return nullptr;
         }
-        ICEntry& vecEntry = icEntries_.back();
+        BaselineICEntry& vecEntry = icEntries_.back();
 
         // Set the first stub for the IC entry to the fallback stub
         vecEntry.setFirstStub(stub);
 
         // Return pointer to the IC entry
         return &vecEntry;
     }
 
     // Append an ICEntry without a stub.
     bool appendICEntry(ICEntry::Kind kind, uint32_t returnOffset) {
-        ICEntry entry(script->pcToOffset(pc), kind);
+        BaselineICEntry entry(script->pcToOffset(pc), kind);
         entry.setReturnOffset(CodeOffset(returnOffset));
         if (!icEntries_.append(entry)) {
             ReportOutOfMemory(cx);
             return false;
         }
         return true;
     }