Bug 1533890: Add megamorphic scripted stubs to CacheIR. r=mgaudet
authorIain Ireland <iireland@mozilla.com>
Mon, 08 Apr 2019 15:28:59 +0000
changeset 468393 8664fa8a8a10098284899e8ca8e843072fcce9ab
parent 468392 66ea618f7b136044ceb240b1ed8190f6fe42a4b9
child 468394 190f9fd0c2a7d6f6a411646ba1c25194b8574a01
push id35835
push useraciure@mozilla.com
push dateMon, 08 Apr 2019 19:00:29 +0000
treeherdermozilla-central@40456af7da1c [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersmgaudet
bugs1533890
milestone68.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1533890: Add megamorphic scripted stubs to CacheIR. r=mgaudet This patch moves CallAnyScripted over to CacheIR. Most of the pieces were already in place to implement FunCall/FunApply. The only new CacheIR op is GuardFunctionIsConstructor. I also took the opportunity to refactor the masm function flag code a little bit. Differential Revision: https://phabricator.services.mozilla.com/D25873
js/src/jit/CacheIR.cpp
js/src/jit/CacheIR.h
js/src/jit/CacheIRCompiler.cpp
js/src/jit/CacheIRCompiler.h
js/src/jit/MacroAssembler-inl.h
js/src/jit/MacroAssembler.cpp
js/src/jit/MacroAssembler.h
--- a/js/src/jit/CacheIR.cpp
+++ b/js/src/jit/CacheIR.cpp
@@ -5039,16 +5039,17 @@ bool CallIRGenerator::tryAttachFunApply(
   } else {
     trackAttached("Native fun_apply");
   }
 
   return true;
 }
 
 bool CallIRGenerator::tryAttachSpecialCaseCallNative(HandleFunction callee) {
+  MOZ_ASSERT(mode_ == ICState::Mode::Specialized);
   MOZ_ASSERT(callee->isNative());
 
   if (op_ == JSOP_FUNCALL && callee->native() == fun_call) {
     if (tryAttachFunCall()) {
       return true;
     }
   }
 
@@ -5156,19 +5157,21 @@ bool CallIRGenerator::tryAttachCallScrip
   }
 
   // Never attach optimized scripted call stubs for JSOP_FUNAPPLY.
   // MagicArguments may escape the frame through them.
   if (op_ == JSOP_FUNAPPLY) {
     return false;
   }
 
+  bool isSpecialized = mode_ == ICState::Mode::Specialized;
+
   bool isConstructing = IsConstructorCallPC(pc_);
   bool isSpread = IsSpreadCallPC(pc_);
-  bool isSameRealm = cx_->realm() == calleeFunc->realm();
+  bool isSameRealm = isSpecialized && cx_->realm() == calleeFunc->realm();
   CallFlags flags(isConstructing, isSpread, isSameRealm);
 
   // If callee is not an interpreted constructor, we have to throw.
   if (isConstructing && !calleeFunc->isConstructor()) {
     return false;
   }
 
   // Likewise, if the callee is a class constructor, we have to throw.
@@ -5195,49 +5198,67 @@ bool CallIRGenerator::tryAttachCallScrip
   // Keep track of the function's |prototype| property in type
   // information, for use during Ion compilation.
   if (IsIonEnabled(cx_)) {
     EnsureTrackPropertyTypes(cx_, calleeFunc, NameToId(cx_->names().prototype));
   }
 
   RootedObject templateObj(cx_);
   bool skipAttach = false;
-  if (isConstructing &&
+  if (isConstructing && isSpecialized &&
       !getTemplateObjectForScripted(calleeFunc, &templateObj, &skipAttach)) {
     return false;
   }
   if (skipAttach) {
     // TODO: this should mark "handled" somehow
     return false;
   }
 
   // Load argc.
   Int32OperandId argcId(writer.setInputOperandId(0));
 
   // Load the callee and ensure it is an object
   ValOperandId calleeValId =
       writer.loadArgumentDynamicSlot(ArgumentKind::Callee, argcId, flags);
   ObjOperandId calleeObjId = writer.guardIsObject(calleeValId);
 
-  // Ensure callee matches this stub's callee
-  FieldOffset calleeOffset =
-      writer.guardSpecificObject(calleeObjId, calleeFunc);
-
-  // Guard against relazification
-  writer.guardFunctionHasJitEntry(calleeObjId, isConstructing);
+  FieldOffset calleeOffset = 0;
+  if (isSpecialized) {
+    // Ensure callee matches this stub's callee
+    calleeOffset = writer.guardSpecificObject(calleeObjId, calleeFunc);
+    // Guard against relazification
+    writer.guardFunctionHasJitEntry(calleeObjId, isConstructing);
+  } else {
+    // Guard that object is a scripted function
+    writer.guardClass(calleeObjId, GuardClassKind::JSFunction);
+    writer.guardFunctionHasJitEntry(calleeObjId, isConstructing);
+
+    if (isConstructing) {
+      // If callee is not a constructor, we have to throw.
+      writer.guardFunctionIsConstructor(calleeObjId);
+    } else {
+      // If callee is a class constructor, we have to throw.
+      writer.guardNotClassConstructor(calleeObjId);
+    }
+  }
 
   writer.callScriptedFunction(calleeObjId, argcId, flags);
   writer.typeMonitorResult();
 
   if (templateObj) {
+    MOZ_ASSERT(isSpecialized);
     writer.metaScriptedTemplateObject(templateObj, calleeOffset);
   }
 
   cacheIRStubKind_ = BaselineCacheIRStubKind::Monitored;
-  trackAttached("Call scripted func");
+  if (isSpecialized) {
+    trackAttached("Call scripted func");
+  } else {
+    trackAttached("Call any scripted func");
+  }
 
   return true;
 }
 
 bool CallIRGenerator::getTemplateObjectForNative(HandleFunction calleeFunc,
                                                  MutableHandleObject res) {
   if (!calleeFunc->hasJitInfo() ||
       calleeFunc->jitInfo()->type() != JSJitInfo::InlinableNative) {
@@ -5321,19 +5342,23 @@ bool CallIRGenerator::getTemplateObjectF
     }
 
     default:
       return true;
   }
 }
 
 bool CallIRGenerator::tryAttachCallNative(HandleFunction calleeFunc) {
-  MOZ_ASSERT(mode_ == ICState::Mode::Specialized);
   MOZ_ASSERT(calleeFunc->isNative());
 
+  bool isSpecialized = mode_ == ICState::Mode::Specialized;
+  if (!isSpecialized) {
+    return false;
+  }
+
   bool isSpread = IsSpreadCallPC(pc_);
   bool isSameRealm = cx_->realm() == calleeFunc->realm();
   bool isConstructing = IsConstructorCallPC(pc_);
   CallFlags flags(isConstructing, isSpread, isSameRealm);
 
   if (isConstructing && !calleeFunc->isConstructor()) {
     return false;
   }
@@ -5401,16 +5426,23 @@ bool CallIRGenerator::tryAttachCallHook(
   if (JitOptions.disableCacheIRCalls) {
     return false;
   }
 
   if (op_ == JSOP_FUNAPPLY) {
     return false;
   }
 
+  if (mode_ != ICState::Mode::Specialized) {
+    // We do not have megamorphic call hook stubs.
+    // TODO: Should we attach specialized call hook stubs in
+    // megamorphic mode to avoid going generic?
+    return false;
+  }
+
   bool isSpread = IsSpreadCallPC(pc_);
   bool isConstructing = IsConstructorCallPC(pc_);
   CallFlags flags(isConstructing, isSpread);
   JSNative hook =
       isConstructing ? calleeObj->constructHook() : calleeObj->callHook();
   if (!hook) {
     return false;
   }
@@ -5458,20 +5490,17 @@ bool CallIRGenerator::tryAttachStub() {
     case JSOP_SPREADNEW:
     case JSOP_FUNCALL:
     case JSOP_FUNAPPLY:
       break;
     default:
       return false;
   }
 
-  // Only optimize when the mode is Specialized.
-  if (mode_ != ICState::Mode::Specialized) {
-    return false;
-  }
+  MOZ_ASSERT(mode_ != ICState::Mode::Generic);
 
   // Ensure callee is a function.
   if (!callee_.isObject()) {
     return false;
   }
 
   RootedObject calleeObj(cx_, &callee_.toObject());
   if (!calleeObj->is<JSFunction>()) {
--- a/js/src/jit/CacheIR.h
+++ b/js/src/jit/CacheIR.h
@@ -244,16 +244,17 @@ extern const uint32_t ArgLengths[];
   _(GuardIndexGreaterThanDenseInitLength, Id, Id)                              \
   _(GuardTagNotEqual, Id, Id)                                                  \
   _(GuardXrayExpandoShapeAndDefaultProto, Id, Byte, Field)                     \
   _(GuardFunctionPrototype, Id, Id, Field)                                     \
   _(GuardNoAllocationMetadataBuilder, None)                                    \
   _(GuardObjectGroupNotPretenured, Field)                                      \
   _(GuardFunctionHasJitEntry, Id, Byte)                                        \
   _(GuardFunctionIsNative, Id)                                                 \
+  _(GuardFunctionIsConstructor, Id)                                            \
   _(GuardNotClassConstructor, Id)                                              \
   _(GuardFunApply, Id, Byte)                                                   \
   _(LoadObject, Id, Field)                                                     \
   _(LoadProto, Id, Id)                                                         \
   _(LoadEnclosingEnvironment, Id, Id)                                          \
   _(LoadWrapperTarget, Id, Id)                                                 \
   _(LoadValueTag, Id, Id)                                                      \
   _(LoadArgumentFixedSlot, Id, Byte)                                           \
@@ -900,16 +901,19 @@ class MOZ_RAII CacheIRWriter : public JS
   }
   FieldOffset guardAnyClass(ObjOperandId obj, const Class* clasp) {
     writeOpWithOperandId(CacheOp::GuardAnyClass, obj);
     return addStubField(uintptr_t(clasp), StubField::Type::RawWord);
   }
   void guardFunctionIsNative(ObjOperandId obj) {
     writeOpWithOperandId(CacheOp::GuardFunctionIsNative, obj);
   }
+  void guardFunctionIsConstructor(ObjOperandId obj) {
+    writeOpWithOperandId(CacheOp::GuardFunctionIsConstructor, obj);
+  }
   void guardSpecificNativeFunction(ObjOperandId obj, JSNative nativeFunc) {
     writeOpWithOperandId(CacheOp::GuardSpecificNativeFunction, obj);
     writePointer(JS_FUNC_TO_DATA_PTR(void*, nativeFunc));
   }
   void guardIsNativeObject(ObjOperandId obj) {
     writeOpWithOperandId(CacheOp::GuardIsNativeObject, obj);
   }
   void guardIsProxy(ObjOperandId obj) {
--- a/js/src/jit/CacheIRCompiler.cpp
+++ b/js/src/jit/CacheIRCompiler.cpp
@@ -3068,16 +3068,32 @@ bool CacheIRCompiler::emitGuardFunctionI
     return false;
   }
 
   // Ensure obj is not an interpreted function.
   masm.branchIfInterpreted(obj, /*isConstructing =*/false, failure->label());
   return true;
 }
 
+bool CacheIRCompiler::emitGuardFunctionIsConstructor() {
+  JitSpew(JitSpew_Codegen, __FUNCTION__);
+  Register funcReg = allocator.useRegister(masm, reader.objOperandId());
+  AutoScratchRegister scratch(allocator, masm);
+
+  FailurePath* failure;
+  if (!addFailurePath(&failure)) {
+    return false;
+  }
+
+  // Ensure obj is a constructor
+  masm.branchTestFunctionFlags(funcReg, JSFunction::CONSTRUCTOR,
+                               Assembler::Zero, failure->label());
+  return true;
+}
+
 bool CacheIRCompiler::emitGuardNotClassConstructor() {
   Register fun = allocator.useRegister(masm, reader.objOperandId());
   AutoScratchRegister scratch(allocator, masm);
 
   FailurePath* failure;
   if (!addFailurePath(&failure)) {
     return false;
   }
--- a/js/src/jit/CacheIRCompiler.h
+++ b/js/src/jit/CacheIRCompiler.h
@@ -30,16 +30,17 @@ namespace jit {
   _(GuardIsNumber)                        \
   _(GuardIsInt32)                         \
   _(GuardIsInt32Index)                    \
   _(GuardType)                            \
   _(GuardClass)                           \
   _(GuardGroupHasUnanalyzedNewScript)     \
   _(GuardIsExtensible)                    \
   _(GuardFunctionIsNative)                \
+  _(GuardFunctionIsConstructor)           \
   _(GuardSpecificNativeFunction)          \
   _(GuardFunctionPrototype)               \
   _(GuardIsNativeObject)                  \
   _(GuardIsProxy)                         \
   _(GuardNotDOMProxy)                     \
   _(GuardSpecificInt32Immediate)          \
   _(GuardMagicValue)                      \
   _(GuardNoDetachedTypedObjects)          \
--- a/js/src/jit/MacroAssembler-inl.h
+++ b/js/src/jit/MacroAssembler-inl.h
@@ -344,48 +344,48 @@ void MacroAssembler::branchLatin1String(
                Imm32(JSString::LATIN1_CHARS_BIT), label);
 }
 
 void MacroAssembler::branchTwoByteString(Register string, Label* label) {
   branchTest32(Assembler::Zero, Address(string, JSString::offsetOfFlags()),
                Imm32(JSString::LATIN1_CHARS_BIT), label);
 }
 
-void MacroAssembler::branchIfFunctionHasNoJitEntry(Register fun,
-                                                   bool isConstructing,
-                                                   Label* label) {
+void MacroAssembler::branchTestFunctionFlags(Register fun, uint32_t flags,
+                                             Condition cond, Label* label) {
   // 16-bit loads are slow and unaligned 32-bit loads may be too so
   // perform an aligned 32-bit load and adjust the bitmask accordingly.
 
   static_assert(JSFunction::offsetOfNargs() % sizeof(uint32_t) == 0,
                 "The code in this function and the ones below must change");
   static_assert(JSFunction::offsetOfFlags() == JSFunction::offsetOfNargs() + 2,
                 "The code in this function and the ones below must change");
 
+  int32_t bit = IMM32_16ADJ(flags);
   Address address(fun, JSFunction::offsetOfNargs());
-  int32_t bit = JSFunction::INTERPRETED;
+  branchTest32(cond, address, Imm32(bit), label);
+}
+
+void MacroAssembler::branchIfFunctionHasNoJitEntry(Register fun,
+                                                   bool isConstructing,
+                                                   Label* label) {
+  int32_t flags = JSFunction::INTERPRETED;
   if (!isConstructing) {
-    bit |= JSFunction::WASM_JIT_ENTRY;
+    flags |= JSFunction::WASM_JIT_ENTRY;
   }
-  bit = IMM32_16ADJ(bit);
-  branchTest32(Assembler::Zero, address, Imm32(bit), label);
+  branchTestFunctionFlags(fun, flags, Assembler::Zero, label);
 }
 
 void MacroAssembler::branchIfInterpreted(Register fun, bool isConstructing,
                                          Label* label) {
-  // 16-bit loads are slow and unaligned 32-bit loads may be too so
-  // perform an aligned 32-bit load and adjust the bitmask accordingly.
-
-  Address address(fun, JSFunction::offsetOfNargs());
-  int32_t bit = JSFunction::INTERPRETED | JSFunction::INTERPRETED_LAZY;
+  int32_t flags = JSFunction::INTERPRETED | JSFunction::INTERPRETED_LAZY;
   if (!isConstructing) {
-    bit |= JSFunction::WASM_JIT_ENTRY;
+    flags |= JSFunction::WASM_JIT_ENTRY;
   }
-  bit = IMM32_16ADJ(bit);
-  branchTest32(Assembler::NonZero, address, Imm32(bit), label);
+  branchTestFunctionFlags(fun, flags, Assembler::NonZero, label);
 }
 
 void MacroAssembler::branchIfObjectEmulatesUndefined(Register objReg,
                                                      Register scratch,
                                                      Label* slowCheck,
                                                      Label* label) {
   // The branches to out-of-line code here implement a conservative version
   // of the JSObject::isWrapper test performed in EmulatesUndefined.
--- a/js/src/jit/MacroAssembler.cpp
+++ b/js/src/jit/MacroAssembler.cpp
@@ -2969,31 +2969,21 @@ void MacroAssembler::moveRegPair(Registe
 }
 
 // ===============================================================
 // Branch functions
 
 void MacroAssembler::branchIfNotInterpretedConstructor(Register fun,
                                                        Register scratch,
                                                        Label* label) {
-  // 16-bit loads are slow and unaligned 32-bit loads may be too so
-  // perform an aligned 32-bit load and adjust the bitmask accordingly.
-  static_assert(JSFunction::offsetOfNargs() % sizeof(uint32_t) == 0,
-                "JSFunction nargs are aligned to uint32_t");
-  static_assert(JSFunction::offsetOfFlags() == JSFunction::offsetOfNargs() + 2,
-                "JSFunction nargs and flags are stored next to each other");
-
   // First, ensure it's a scripted function.
-  load32(Address(fun, JSFunction::offsetOfNargs()), scratch);
-  int32_t bits = IMM32_16ADJ(JSFunction::INTERPRETED);
-  branchTest32(Assembler::Zero, scratch, Imm32(bits), label);
+  branchTestFunctionFlags(fun, JSFunction::INTERPRETED, Assembler::Zero, label);
 
   // Check if the CONSTRUCTOR bit is set.
-  bits = IMM32_16ADJ(JSFunction::CONSTRUCTOR);
-  branchTest32(Assembler::Zero, scratch, Imm32(bits), label);
+  branchTestFunctionFlags(fun, JSFunction::CONSTRUCTOR, Assembler::Zero, label);
 }
 
 void MacroAssembler::branchTestObjGroupNoSpectreMitigations(
     Condition cond, Register obj, const Address& group, Register scratch,
     Label* label) {
   // Note: obj and scratch registers may alias.
   MOZ_ASSERT(group.base != scratch);
   MOZ_ASSERT(group.base != obj);
--- a/js/src/jit/MacroAssembler.h
+++ b/js/src/jit/MacroAssembler.h
@@ -1290,16 +1290,19 @@ class MacroAssembler : public MacroAssem
   inline void branchIfRope(Register str, Label* label);
   inline void branchIfRopeOrExternal(Register str, Register temp, Label* label);
 
   inline void branchIfNotRope(Register str, Label* label);
 
   inline void branchLatin1String(Register string, Label* label);
   inline void branchTwoByteString(Register string, Label* label);
 
+  inline void branchTestFunctionFlags(Register fun, uint32_t flags,
+                                      Condition cond, Label* label);
+
   inline void branchIfFunctionHasNoJitEntry(Register fun, bool isConstructing,
                                             Label* label);
   inline void branchIfInterpreted(Register fun, bool isConstructing,
                                   Label* label);
 
   inline void branchFunctionKind(Condition cond, JSFunction::FunctionKind kind,
                                  Register fun, Register scratch, Label* label);