Bug 1533890: Migrate ConstStringSplit to CacheIR r=mgaudet
authorIain Ireland <iireland@mozilla.com>
Mon, 08 Apr 2019 15:29:19 +0000
changeset 468396 b698d91f51ddcf94064cc7befd810f9c0b25d16d
parent 468395 90f3b654d3ac69515e688942af08bf5480b5f0fa
child 468397 0d7d1ef8d08e18a27fa9449985b3fd45c774456a
push id35835
push useraciure@mozilla.com
push dateMon, 08 Apr 2019 19:00:29 +0000
treeherdermozilla-central@40456af7da1c [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersmgaudet
bugs1533890
milestone68.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1533890: Migrate ConstStringSplit to CacheIR r=mgaudet This patch adds support for ConstStringSplit. ConstStringSplit is attached after the call is performed, so that it can grab and cache the result. To make it work, I added a new AttachDecision value. Note: 1. Testing this code made me realize that a previous change I made was wrong. Even in call ICs that don't use argc, we have to initialize the input location. If we don't, the register can be allocated and clobbered, which will break subsequent ICs. 2. We only want a ConstStringSplit if this is the first stub that we try attaching. (If there's already another stub, then it can't be a constant string split. The old implementation would remove any existing ConstStringSplit stubs when it realized that the optimization had failed, but there isn't a particularly nice way of doing that in CacheIR and I don't think it matters much. 3. For unclear reasons, we had an implementation of CallStringSplitResult in IonCacheIRCompiler. It was dead code, so I removed it. Differential Revision: https://phabricator.services.mozilla.com/D25876
js/src/jit/BaselineCacheIRCompiler.cpp
js/src/jit/BaselineIC.cpp
js/src/jit/CacheIR.cpp
js/src/jit/CacheIR.h
js/src/jit/IonCacheIRCompiler.cpp
--- a/js/src/jit/BaselineCacheIRCompiler.cpp
+++ b/js/src/jit/BaselineCacheIRCompiler.cpp
@@ -910,16 +910,37 @@ bool BaselineCacheIRCompiler::emitCallSt
   using Fn = bool (*)(JSContext*, HandleString, HandleString, HandleObjectGroup,
                       uint32_t limit, MutableHandleValue);
   callVM<Fn, StringSplitHelper>(masm);
 
   stubFrame.leave(masm);
   return true;
 }
 
+bool BaselineCacheIRCompiler::emitCallConstStringSplitResult() {
+  JitSpew(JitSpew_Codegen, __FUNCTION__);
+  Address resultTemplateAddr(stubAddress(reader.stubOffset()));
+
+  AutoScratchRegister scratch(allocator, masm);
+  allocator.discardStack(masm);
+
+  AutoStubFrame stubFrame(*this);
+  stubFrame.enter(masm, scratch);
+
+  // Push argument
+  masm.loadPtr(resultTemplateAddr, scratch);
+  masm.Push(scratch);
+
+  using Fn = bool (*)(JSContext*, HandleArrayObject, MutableHandleValue);
+  callVM<Fn, CopyStringSplitArray>(masm);
+
+  stubFrame.leave(masm);
+  return true;
+}
+
 bool BaselineCacheIRCompiler::emitCompareStringResult() {
   JitSpew(JitSpew_Codegen, __FUNCTION__);
   AutoOutputRegister output(*this);
 
   Register left = allocator.useRegister(masm, reader.stringOperandId());
   Register right = allocator.useRegister(masm, reader.stringOperandId());
   JSOp op = reader.jsop();
 
@@ -2134,26 +2155,23 @@ bool BaselineCacheIRCompiler::init(Cache
       allocator.initInputLocation(0, R0.scratchReg(), JSVAL_TYPE_OBJECT);
 #if defined(JS_NUNBOX32)
       // availableGeneralRegs can't know that GetName/BindName is only using
       // the payloadReg and not typeReg on x86.
       available.add(R0.typeReg());
 #endif
       break;
     case CacheKind::Call:
-      // Calls pass argc in R0. CallICs may or may not use it.
-      MOZ_ASSERT(numInputs == 0 || numInputs == 1);
-      if (numInputs == 1) {
-        allocator.initInputLocation(0, R0.scratchReg(), JSVAL_TYPE_INT32);
+      MOZ_ASSERT(numInputs == 1);
+      allocator.initInputLocation(0, R0.scratchReg(), JSVAL_TYPE_INT32);
 #if defined(JS_NUNBOX32)
-        // availableGeneralRegs can't know that Call is only using
-        // the payloadReg and not typeReg on x86.
-        available.add(R0.typeReg());
+      // availableGeneralRegs can't know that Call is only using
+      // the payloadReg and not typeReg on x86.
+      available.add(R0.typeReg());
 #endif
-      }
       break;
   }
 
   // Baseline doesn't allocate float registers so none of them are live.
   liveFloatRegs_ = LiveFloatRegisterSet(FloatRegisterSet());
 
   allocator.initAvailableRegs(available);
   outputUnchecked_.emplace(R0);
--- a/js/src/jit/BaselineIC.cpp
+++ b/js/src/jit/BaselineIC.cpp
@@ -3802,24 +3802,26 @@ bool DoCallFallback(JSContext* cx, Basel
   }
 
   // Transition stub state to megamorphic or generic if warranted.
   if (stub->state().maybeTransition()) {
     stub->discardStubs(cx);
   }
 
   bool canAttachStub = stub->state().canAttachStub();
+  bool isFirstStub = stub->numOptimizedStubs() == 0;
   bool handled = false;
+  bool deferred = false;
 
   // Only bother to try optimizing JSOP_CALL with CacheIR if the chain is still
   // allowed to attach stubs.
   if (canAttachStub) {
     HandleValueArray args = HandleValueArray::fromMarkedLocation(argc, vp + 2);
     CallIRGenerator gen(cx, script, pc, op, stub->state().mode(), argc, callee,
-                        callArgs.thisv(), newTarget, args);
+                        callArgs.thisv(), newTarget, args, isFirstStub);
     switch (gen.tryAttachStub()) {
       case AttachDecision::NoAction:
         break;
       case AttachDecision::Attach: {
         ICStub* newStub = AttachBaselineCacheIRStub(
             cx, gen.writerRef(), gen.cacheKind(), gen.cacheIRStubKind(), script,
             stub, &handled);
         if (newStub) {
@@ -3830,21 +3832,23 @@ bool DoCallFallback(JSContext* cx, Basel
             SetUpdateStubData(newStub->toCacheIR_Updated(),
                               gen.typeCheckInfo());
           }
         }
       } break;
       case AttachDecision::TemporarilyUnoptimizable:
         handled = true;
         break;
+      case AttachDecision::Deferred:
+        deferred = true;
     }
 
     // Try attaching a regular call stub, but only if the CacheIR attempt didn't
     // add any stubs.
-    if (!handled) {
+    if (!handled && !deferred) {
       bool createSingleton =
           ObjectGroup::useSingletonForNewObject(cx, script, pc);
       if (!TryAttachCallStub(cx, stub, script, pc, op, argc, vp, constructing,
                              false, createSingleton, &handled)) {
         return false;
       }
     }
   }
@@ -3885,16 +3889,44 @@ bool DoCallFallback(JSContext* cx, Basel
   }
 
   // Try to transition again in case we called this IC recursively.
   if (stub->state().maybeTransition()) {
     stub->discardStubs(cx);
   }
   canAttachStub = stub->state().canAttachStub();
 
+  if (deferred && canAttachStub) {
+    HandleValueArray args = HandleValueArray::fromMarkedLocation(argc, vp + 2);
+    CallIRGenerator gen(cx, script, pc, op, stub->state().mode(), argc, callee,
+                        callArgs.thisv(), newTarget, args, isFirstStub);
+    switch (gen.tryAttachDeferredStub(res)) {
+      case AttachDecision::Attach: {
+        ICStub* newStub = AttachBaselineCacheIRStub(
+            cx, gen.writerRef(), gen.cacheKind(), gen.cacheIRStubKind(), script,
+            stub, &handled);
+        if (newStub) {
+          JitSpew(JitSpew_BaselineIC, "  Attached Call CacheIR stub");
+
+          // If it's an updated stub, initialize it.
+          if (gen.cacheIRStubKind() == BaselineCacheIRStubKind::Updated) {
+            SetUpdateStubData(newStub->toCacheIR_Updated(),
+                              gen.typeCheckInfo());
+          }
+        }
+      } break;
+      case AttachDecision::NoAction:
+        break;
+      case AttachDecision::TemporarilyUnoptimizable:
+      case AttachDecision::Deferred:
+        MOZ_ASSERT_UNREACHABLE("Impossible attach decision");
+        break;
+    }
+  }
+
   if (!handled && canAttachStub && !constructing) {
     // If 'callee' is a potential Call_ConstStringSplit, try to attach an
     // optimized ConstStringSplit stub. Note that vp[0] now holds the return
     // value instead of the callee, so we pass the callee as well.
     if (!TryAttachConstStringSplit(cx, stub, script, argc, callee, vp, pc, res,
                                    &handled)) {
       return false;
     }
@@ -3929,26 +3961,27 @@ bool DoSpreadCallFallback(JSContext* cx,
 
   // Transition stub state to megamorphic or generic if warranted.
   if (stub->state().maybeTransition()) {
     stub->discardStubs(cx);
   }
 
   // Try attaching a call stub.
   bool handled = false;
+  bool isFirstStub = stub->numOptimizedStubs() == 0;
   if (op != JSOP_SPREADEVAL && op != JSOP_STRICTSPREADEVAL &&
       stub->state().canAttachStub()) {
     // Try CacheIR first:
     RootedArrayObject aobj(cx, &arr.toObject().as<ArrayObject>());
     MOZ_ASSERT(aobj->length() == aobj->getDenseInitializedLength());
 
     HandleValueArray args = HandleValueArray::fromMarkedLocation(
         aobj->length(), aobj->getDenseElements());
     CallIRGenerator gen(cx, script, pc, op, stub->state().mode(), 1, callee,
-                        thisv, newTarget, args);
+                        thisv, newTarget, args, isFirstStub);
     switch (gen.tryAttachStub()) {
       case AttachDecision::NoAction:
         break;
       case AttachDecision::Attach: {
         ICStub* newStub = AttachBaselineCacheIRStub(
             cx, gen.writerRef(), gen.cacheKind(), gen.cacheIRStubKind(), script,
             stub, &handled);
 
@@ -3960,16 +3993,19 @@ bool DoSpreadCallFallback(JSContext* cx,
             SetUpdateStubData(newStub->toCacheIR_Updated(),
                               gen.typeCheckInfo());
           }
         }
       } break;
       case AttachDecision::TemporarilyUnoptimizable:
         handled = true;
         break;
+      case AttachDecision::Deferred:
+        MOZ_ASSERT_UNREACHABLE("No deferred optimizations for spread calls");
+        break;
     }
 
     // Try attaching a regular call stub, but only if the CacheIR attempt didn't
     // add any stubs.
     if (!handled) {
       if (!TryAttachCallStub(cx, stub, script, pc, op, 1, vp, constructing,
                              true, false, &handled)) {
         return false;
--- a/js/src/jit/CacheIR.cpp
+++ b/js/src/jit/CacheIR.cpp
@@ -4673,38 +4673,38 @@ void GetIteratorIRGenerator::trackAttach
   }
 #endif
 }
 
 CallIRGenerator::CallIRGenerator(JSContext* cx, HandleScript script,
                                  jsbytecode* pc, JSOp op, ICState::Mode mode,
                                  uint32_t argc, HandleValue callee,
                                  HandleValue thisval, HandleValue newTarget,
-                                 HandleValueArray args)
+                                 HandleValueArray args, bool isFirstStub)
     : IRGenerator(cx, script, pc, CacheKind::Call, mode),
       op_(op),
       argc_(argc),
       callee_(callee),
       thisval_(thisval),
       newTarget_(newTarget),
       args_(args),
       typeCheckInfo_(cx, /* needsTypeBarrier = */ true),
-      cacheIRStubKind_(BaselineCacheIRStubKind::Regular) {}
+      cacheIRStubKind_(BaselineCacheIRStubKind::Regular),
+      isFirstStub_(isFirstStub) {}
 
 AttachDecision CallIRGenerator::tryAttachStringSplit() {
   // Only optimize StringSplitString(str, str)
   if (argc_ != 2 || !args_[0].isString() || !args_[1].isString()) {
     return AttachDecision::NoAction;
   }
 
-  // Just for now: if they're both atoms, then do not optimize using
-  // CacheIR and allow the legacy "ConstStringSplit" BaselineIC optimization
-  // to proceed.
-  if (args_[0].toString()->isAtom() && args_[1].toString()->isAtom()) {
-    return AttachDecision::NoAction;
+  // If we have not previously attached a stub and both arguments are atoms,
+  // defer until after the call and attach a const string split stub.
+  if (isOptimizableConstStringSplit()) {
+    return AttachDecision::Deferred;
   }
 
   // Get the object group to use for this location.
   RootedObjectGroup group(cx_,
                           ObjectGroupRealm::getStringSplitStringGroup(cx_));
   if (!group) {
     cx_->clearPendingException();
     return AttachDecision::NoAction;
@@ -4898,16 +4898,18 @@ AttachDecision CallIRGenerator::tryAttac
 
 AttachDecision CallIRGenerator::tryAttachIsSuspendedGenerator() {
   // The IsSuspendedGenerator intrinsic is only called in
   // self-hosted code, so it's safe to assume we have a single
   // argument and the callee is our intrinsic.
 
   MOZ_ASSERT(argc_ == 1);
 
+  Int32OperandId argcId(writer.setInputOperandId(0));
+
   // Stack layout here is (bottom to top):
   //  2: Callee
   //  1: ThisValue
   //  0: Arg <-- Top of stack.
   // We only care about the argument.
   ValOperandId valId = writer.loadArgumentFixedSlot(ArgumentKind::Arg0, argc_);
 
   // Check whether the argument is a suspended generator.
@@ -5526,16 +5528,139 @@ AttachDecision CallIRGenerator::tryAttac
   // Check for native-function optimizations.
   if (calleeFunc->isNative()) {
     return tryAttachCallNative(calleeFunc);
   }
 
   return AttachDecision::NoAction;
 }
 
+bool CallIRGenerator::isOptimizableConstStringSplit() {
+  // If we have not yet attached any stubs to this IC...
+  if (!isFirstStub_) {
+    return false;
+  }
+
+  // And we have two arguments, both of which are strings...
+  if (argc_ != 2 || !args_[0].isString() || !args_[1].isString()) {
+    return false;
+  }
+
+  // And the strings are atoms...
+  if (!args_[0].toString()->isAtom() || !args_[1].toString()->isAtom()) {
+    return false;
+  }
+
+  // And we are calling a function in the current realm...
+  RootedFunction calleeFunc(cx_, &callee_.toObject().as<JSFunction>());
+  if (calleeFunc->realm() != cx_->realm()) {
+    return false;
+  }
+
+  // Which is the String split intrinsic...
+  if (!calleeFunc->isNative() ||
+      calleeFunc->native() != js::intrinsic_StringSplitString) {
+    return false;
+  }
+
+  // Then this might be a call of the form:
+  //  "literal list".split("literal separator")
+  // If so, we can cache the result and avoid having to perform the operation
+  // each time.
+  return true;
+}
+
+AttachDecision CallIRGenerator::tryAttachConstStringSplit(HandleValue result) {
+  if (!isOptimizableConstStringSplit()) {
+    return AttachDecision::NoAction;
+  }
+
+  RootedString str(cx_, args_[0].toString());
+  RootedString sep(cx_, args_[1].toString());
+  RootedArrayObject resultObj(cx_, &result.toObject().as<ArrayObject>());
+  uint32_t initLength = resultObj->getDenseInitializedLength();
+  MOZ_ASSERT(initLength == resultObj->length(),
+             "string-split result is a fully initialized array");
+
+  // Copy the array before storing in stub.
+  RootedArrayObject arrObj(cx_);
+  arrObj = NewFullyAllocatedArrayTryReuseGroup(cx_, resultObj, initLength,
+                                               TenuredObject);
+  if (!arrObj) {
+    cx_->clearPendingException();
+    return AttachDecision::NoAction;
+  }
+  arrObj->ensureDenseInitializedLength(cx_, 0, initLength);
+
+  // Atomize all elements of the array.
+  if (initLength > 0) {
+    // Mimic NewFullyAllocatedStringArray() and directly inform TI about
+    // the element type.
+    AddTypePropertyId(cx_, arrObj, JSID_VOID, TypeSet::StringType());
+
+    for (uint32_t i = 0; i < initLength; i++) {
+      JSAtom* str =
+          js::AtomizeString(cx_, resultObj->getDenseElement(i).toString());
+      if (!str) {
+        cx_->clearPendingException();
+        return AttachDecision::NoAction;
+      }
+      arrObj->initDenseElement(i, StringValue(str));
+    }
+  }
+
+  Int32OperandId argcId(writer.setInputOperandId(0));
+
+  // Guard that callee is the |intrinsic_StringSplitString| native function.
+  ValOperandId calleeValId =
+      writer.loadArgumentFixedSlot(ArgumentKind::Callee, 2);
+  ObjOperandId calleeObjId = writer.guardIsObject(calleeValId);
+  writer.guardSpecificNativeFunction(calleeObjId, intrinsic_StringSplitString);
+
+  // Guard that the first argument is the expected string
+  ValOperandId strValId = writer.loadArgumentFixedSlot(ArgumentKind::Arg0, 2);
+  StringOperandId strStringId = writer.guardIsString(strValId);
+  writer.guardSpecificAtom(strStringId, &str->asAtom());
+
+  // Guard that the second argument is the expected string
+  ValOperandId sepValId = writer.loadArgumentFixedSlot(ArgumentKind::Arg1, 2);
+  StringOperandId sepStringId = writer.guardIsString(sepValId);
+  writer.guardSpecificAtom(sepStringId, &sep->asAtom());
+
+  writer.callConstStringSplitResult(arrObj);
+
+  writer.typeMonitorResult();
+  cacheIRStubKind_ = BaselineCacheIRStubKind::Monitored;
+
+  trackAttached("Const string split");
+
+  return AttachDecision::Attach;
+}
+
+AttachDecision CallIRGenerator::tryAttachDeferredStub(HandleValue result) {
+  AutoAssertNoPendingException aanpe(cx_);
+
+  // Ensure that the opcode makes sense.
+  MOZ_ASSERT(op_ == JSOP_CALL || op_ == JSOP_CALL_IGNORES_RV);
+
+  // Ensure that the mode makes sense.
+  MOZ_ASSERT(mode_ == ICState::Mode::Specialized);
+
+  // We currently only defer native functions.
+  RootedFunction calleeFunc(cx_, &callee_.toObject().as<JSFunction>());
+  MOZ_ASSERT(calleeFunc->isNative());
+
+  if (calleeFunc->native() == js::intrinsic_StringSplitString) {
+    return tryAttachConstStringSplit(result);
+  }
+
+  MOZ_ASSERT_UNREACHABLE("Unexpected deferred function");
+  return AttachDecision::NoAction;
+}
+
 void CallIRGenerator::trackAttached(const char* name) {
 #ifdef JS_CACHEIR_SPEW
   if (const CacheIRSpewer::Guard& sp = CacheIRSpewer::Guard(*this, name)) {
     sp.valueProperty("callee", callee_);
     sp.valueProperty("thisval", thisval_);
     sp.valueProperty("argc", Int32Value(argc_));
   }
 #endif
--- a/js/src/jit/CacheIR.h
+++ b/js/src/jit/CacheIR.h
@@ -362,16 +362,17 @@ extern const uint32_t ArgLengths[];
   _(LoadInt32TruthyResult, Id)                                                 \
   _(LoadDoubleTruthyResult, Id)                                                \
   _(LoadStringTruthyResult, Id)                                                \
   _(LoadObjectTruthyResult, Id)                                                \
   _(LoadValueResult, Field)                                                    \
   _(LoadNewObjectFromTemplateResult, Field, UInt32, UInt32)                    \
                                                                                \
   _(CallStringSplitResult, Id, Id, Field)                                      \
+  _(CallConstStringSplitResult, Field)                                         \
   _(CallStringConcatResult, Id, Id)                                            \
   _(CallStringObjectConcatResult, Id, Id)                                      \
   _(CallIsSuspendedGeneratorResult, Id)                                        \
                                                                                \
   _(CompareStringResult, Id, Id, Byte)                                         \
   _(CompareObjectResult, Id, Id, Byte)                                         \
   _(CompareSymbolResult, Id, Id, Byte)                                         \
   _(CompareInt32Result, Id, Id, Byte)                                          \
@@ -503,17 +504,22 @@ class CallFlags {
   static_assert(LastArgFormat <= ArgFormatMask, "Not enough arg format bits");
   static const uint8_t IsConstructing = 1 << 5;
   static const uint8_t IsSameRealm = 1 << 6;
 
   friend class CacheIRReader;
   friend class CacheIRWriter;
 };
 
-enum class AttachDecision { NoAction, Attach, TemporarilyUnoptimizable };
+enum class AttachDecision {
+  NoAction,
+  Attach,
+  TemporarilyUnoptimizable,
+  Deferred
+};
 
 // Set of arguments supported by GetIndexOfArgument.
 // Support for Arg2 and up can be added easily, but is currently unneeded.
 enum class ArgumentKind : uint8_t { Callee, This, NewTarget, Arg0, Arg1 };
 
 // This function calculates the index of an argument based on the call flags.
 // addArgc is an out-parameter, indicating whether the value of argc should
 // be added to the return value to find the actual index.
@@ -1683,16 +1689,20 @@ class MOZ_RAII CacheIRWriter : public JS
     writeOperandId(rhs);
   }
   void callStringSplitResult(StringOperandId str, StringOperandId sep,
                              ObjectGroup* group) {
     writeOpWithOperandId(CacheOp::CallStringSplitResult, str);
     writeOperandId(sep);
     addStubField(uintptr_t(group), StubField::Type::ObjectGroup);
   }
+  void callConstStringSplitResult(ArrayObject* resultTemplate) {
+    writeOp(CacheOp::CallConstStringSplitResult);
+    addStubField(uintptr_t(resultTemplate), StubField::Type::JSObject);
+  }
 
   void compareStringResult(uint32_t op, StringOperandId lhs,
                            StringOperandId rhs) {
     writeOpWithOperandId(CacheOp::CompareStringResult, lhs);
     writeOperandId(rhs);
     buffer_.writeByte(uint32_t(op));
   }
   void compareObjectResult(uint32_t op, ObjOperandId lhs, ObjOperandId rhs) {
@@ -2292,46 +2302,54 @@ class MOZ_RAII CallIRGenerator : public 
   JSOp op_;
   uint32_t argc_;
   HandleValue callee_;
   HandleValue thisval_;
   HandleValue newTarget_;
   HandleValueArray args_;
   PropertyTypeCheckInfo typeCheckInfo_;
   BaselineCacheIRStubKind cacheIRStubKind_;
+  bool isFirstStub_;
 
   bool getTemplateObjectForScripted(HandleFunction calleeFunc,
                                     MutableHandleObject result,
                                     bool* skipAttach);
   bool getTemplateObjectForNative(HandleFunction calleeFunc,
                                   MutableHandleObject result);
   bool getTemplateObjectForClassHook(HandleObject calleeObj,
                                      MutableHandleObject result);
 
+  // Regular stubs
   AttachDecision tryAttachStringSplit();
   AttachDecision tryAttachArrayPush();
   AttachDecision tryAttachArrayJoin();
   AttachDecision tryAttachIsSuspendedGenerator();
   AttachDecision tryAttachFunCall();
   AttachDecision tryAttachFunApply();
   AttachDecision tryAttachCallScripted(HandleFunction calleeFunc);
   AttachDecision tryAttachSpecialCaseCallNative(HandleFunction calleeFunc);
   AttachDecision tryAttachCallNative(HandleFunction calleeFunc);
   AttachDecision tryAttachCallHook(HandleObject calleeObj);
 
+  // Deferred stubs
+  AttachDecision tryAttachConstStringSplit(HandleValue result);
+
   void trackAttached(const char* name);
 
  public:
   CallIRGenerator(JSContext* cx, HandleScript script, jsbytecode* pc, JSOp op,
                   ICState::Mode mode, uint32_t argc, HandleValue callee,
                   HandleValue thisval, HandleValue newTarget,
-                  HandleValueArray args);
+                  HandleValueArray args, bool isFirstStub);
 
   AttachDecision tryAttachStub();
 
+  bool isOptimizableConstStringSplit();
+  AttachDecision tryAttachDeferredStub(HandleValue result);
+
   BaselineCacheIRStubKind cacheIRStubKind() const { return cacheIRStubKind_; }
 
   const PropertyTypeCheckInfo* typeCheckInfo() const { return &typeCheckInfo_; }
 };
 
 class MOZ_RAII CompareIRGenerator : public IRGenerator {
   JSOp op_;
   HandleValue lhsVal_;
--- a/js/src/jit/IonCacheIRCompiler.cpp
+++ b/js/src/jit/IonCacheIRCompiler.cpp
@@ -1274,38 +1274,22 @@ bool IonCacheIRCompiler::emitLoadEnviron
 
 bool IonCacheIRCompiler::emitLoadStringResult() {
   JitSpew(JitSpew_Codegen, __FUNCTION__);
   MOZ_CRASH("not used in ion");
 }
 
 bool IonCacheIRCompiler::emitCallStringSplitResult() {
   JitSpew(JitSpew_Codegen, __FUNCTION__);
-  AutoSaveLiveRegisters save(*this);
-  AutoOutputRegister output(*this);
-
-  Register str = allocator.useRegister(masm, reader.stringOperandId());
-  Register sep = allocator.useRegister(masm, reader.stringOperandId());
-  ObjectGroup* group = groupStubField(reader.stubOffset());
-
-  allocator.discardStack(masm);
-
-  prepareVMCall(masm, save);
-
-  masm.Push(str);
-  masm.Push(sep);
-  masm.Push(ImmGCPtr(group));
-  masm.Push(Imm32(INT32_MAX));
-
-  using Fn = bool (*)(JSContext*, HandleString, HandleString, HandleObjectGroup,
-                      uint32_t limit, MutableHandleValue);
-  callVM<Fn, StringSplitHelper>(masm);
-
-  masm.storeCallResultValue(output);
-  return true;
+  MOZ_CRASH("not used in ion");
+}
+
+bool IonCacheIRCompiler::emitCallConstStringSplitResult() {
+  JitSpew(JitSpew_Codegen, __FUNCTION__);
+  MOZ_CRASH("not used in ion");
 }
 
 bool IonCacheIRCompiler::emitCompareStringResult() {
   JitSpew(JitSpew_Codegen, __FUNCTION__);
   AutoSaveLiveRegisters save(*this);
   AutoOutputRegister output(*this);
 
   Register left = allocator.useRegister(masm, reader.stringOperandId());