author | Jan de Mooij <jdemooij@mozilla.com> |
Thu, 28 Feb 2019 12:31:01 +0000 | |
changeset 461717 | 756272e36e32264332e6b95aa804b69581295c7e |
parent 461716 | d629f7abeca07b44babffed909a38120fe24aceb |
child 461718 | 9d1e9834a326d6b9ebdef03cf4622d0482aa17a9 |
push id | 35627 |
push user | opoprus@mozilla.com |
push date | Thu, 28 Feb 2019 21:44:07 +0000 |
treeherder | mozilla-central@db533ea3d561 [default view] [failures only] |
perfherder | [talos] [build metrics] [platform microbench] (compared to previous push) |
reviewers | nbp, tcampbell |
bugs | 1530937 |
milestone | 67.0a1 |
first release with | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
last release without | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
--- a/js/src/jit/BaselineCompiler.cpp +++ b/js/src/jit/BaselineCompiler.cpp @@ -26,16 +26,17 @@ #include "vm/EnvironmentObject.h" #include "vm/Interpreter.h" #include "vm/JSFunction.h" #include "vm/TraceLogging.h" #include "vtune/VTuneWrapper.h" #include "jit/BaselineFrameInfo-inl.h" #include "jit/MacroAssembler-inl.h" +#include "jit/VMFunctionList-inl.h" #include "vm/Interpreter-inl.h" #include "vm/JSScript-inl.h" #include "vm/NativeObject-inl.h" #include "vm/TypeInference-inl.h" using namespace js; using namespace js::jit; @@ -579,20 +580,18 @@ void BaselineInterpreterCodeGen::storeFr // Push frame descriptor based on the full frame size. masm.makeFrameDescriptor(scratch1, FrameType::BaselineJS, ExitFrameLayout::Size()); masm.push(scratch1); } template <typename Handler> -bool BaselineCodeGen<Handler>::callVM(const VMFunction& fun, - CallVMPhase phase) { - TrampolinePtr code = cx->runtime()->jitRuntime()->getVMWrapper(fun); - +bool BaselineCodeGen<Handler>::callVM(const VMFunctionData& fun, + TrampolinePtr code, CallVMPhase phase) { #ifdef DEBUG // Assert prepareVMCall() has been called. MOZ_ASSERT(inCall_); inCall_ = false; // Assert the frame does not have an override pc when we're executing JIT // code. { @@ -660,16 +659,31 @@ bool BaselineCodeGen<Handler>::callVM(co masm.assumeUnreachable("BaselineFrame shouldn't override pc after VM call"); masm.bind(&ok); } #endif return handler.appendRetAddrEntry(cx, RetAddrEntry::Kind::CallVM, callOffset); } +template <typename Handler> +bool BaselineCodeGen<Handler>::callVM(const VMFunction& fun, + CallVMPhase phase) { + TrampolinePtr code = cx->runtime()->jitRuntime()->getVMWrapper(fun); + return callVM(fun, code, phase); +} + +template <typename Handler> +template <typename Fn, Fn fn> +bool BaselineCodeGen<Handler>::callVM(CallVMPhase phase) { + VMFunctionId fnId = VMFunctionToId<Fn, fn>::id; + TrampolinePtr code = cx->runtime()->jitRuntime()->getVMWrapper(fnId); + return callVM(GetVMFunction(fnId), code, phase); +} + typedef bool (*CheckOverRecursedBaselineFn)(JSContext*, BaselineFrame*); static const VMFunction CheckOverRecursedBaselineInfo = FunctionInfo<CheckOverRecursedBaselineFn>(CheckOverRecursedBaseline, "CheckOverRecursedBaseline"); template <typename Handler> bool BaselineCodeGen<Handler>::emitStackCheck() { // If this is the late stack check for a frame which contains an early stack @@ -882,30 +896,28 @@ void BaselineCompilerCodeGen::loadResume masm.move32(Imm32(GET_RESUMEINDEX(handler.pc())), dest); } template <> void BaselineInterpreterCodeGen::loadResumeIndexBytecodeOperand(Register dest) { MOZ_CRASH("NYI: interpreter loadResumeIndexBytecodeOperand"); } -typedef bool (*DebugPrologueFn)(JSContext*, BaselineFrame*, jsbytecode*, bool*); -static const VMFunction DebugPrologueInfo = - FunctionInfo<DebugPrologueFn>(jit::DebugPrologue, "DebugPrologue"); - template <typename Handler> bool BaselineCodeGen<Handler>::emitDebugPrologue() { auto ifDebuggee = [this]() { // Load pointer to BaselineFrame in R0. masm.loadBaselineFramePtr(BaselineFrameReg, R0.scratchReg()); prepareVMCall(); pushBytecodePCArg(); pushArg(R0.scratchReg()); - if (!callVM(DebugPrologueInfo)) { + + using Fn = bool (*)(JSContext*, BaselineFrame*, jsbytecode*, bool*); + if (!callVM<Fn, jit::DebugPrologue>()) { return false; } // Fix up the RetAddrEntry appended by callVM for on-stack recompilation. handler.markLastRetAddrEntryKind(RetAddrEntry::Kind::DebugPrologue); // If the stub returns |true|, we have to return the value stored in the // frame's return value slot. @@ -1004,43 +1016,35 @@ bool BaselineCompilerCodeGen::initEnviro return true; } template <> bool BaselineInterpreterCodeGen::initEnvironmentChain() { MOZ_CRASH("NYI: interpreter initEnvironmentChain"); } -typedef bool (*InterruptCheckFn)(JSContext*); -static const VMFunction InterruptCheckInfo = - FunctionInfo<InterruptCheckFn>(InterruptCheck, "InterruptCheck"); - template <typename Handler> bool BaselineCodeGen<Handler>::emitInterruptCheck() { frame.syncStack(0); Label done; masm.branch32(Assembler::Equal, AbsoluteAddress(cx->addressOfInterruptBits()), Imm32(0), &done); prepareVMCall(); - if (!callVM(InterruptCheckInfo)) { + + using Fn = bool (*)(JSContext*); + if (!callVM<Fn, InterruptCheck>()) { return false; } masm.bind(&done); return true; } -typedef bool (*IonCompileScriptForBaselineFn)(JSContext*, BaselineFrame*, - jsbytecode*); -static const VMFunction IonCompileScriptForBaselineInfo = - FunctionInfo<IonCompileScriptForBaselineFn>(IonCompileScriptForBaseline, - "IonCompileScriptForBaseline"); - template <> bool BaselineCompilerCodeGen::emitWarmUpCounterIncrement() { // Emit no warm-up counter increments or bailouts if Ion is not // enabled, or if the script will never be Ion-compileable if (!handler.maybeIonCompileable()) { return true; } @@ -1094,17 +1098,18 @@ bool BaselineCompilerCodeGen::emitWarmUp // To call stubs we need to have an opcode. This code handles the // prologue and there is no dedicatd opcode present. Therefore use an // annotated vm call. prepareVMCall(); pushBytecodePCArg(); masm.PushBaselineFramePtr(BaselineFrameReg, R0.scratchReg()); - if (!callVM(IonCompileScriptForBaselineInfo)) { + using Fn = bool (*)(JSContext*, BaselineFrame*, jsbytecode*); + if (!callVM<Fn, IonCompileScriptForBaseline>()) { return false; } // Annotate the RetAddrEntry as warmup counter. handler.markLastRetAddrEntryKind(RetAddrEntry::Kind::WarmupCounter); } masm.bind(&skipCall); @@ -1777,70 +1782,55 @@ bool BaselineCodeGen<Handler>::emit_JSOP } template <typename Handler> bool BaselineCodeGen<Handler>::emit_JSOP_NULL() { frame.push(NullValue()); return true; } -typedef bool (*ThrowCheckIsObjectFn)(JSContext*, CheckIsObjectKind); -static const VMFunction ThrowCheckIsObjectInfo = - FunctionInfo<ThrowCheckIsObjectFn>(ThrowCheckIsObject, - "ThrowCheckIsObject"); - template <typename Handler> bool BaselineCodeGen<Handler>::emit_JSOP_CHECKISOBJ() { frame.syncStack(0); masm.loadValue(frame.addressOfStackValue(-1), R0); Label ok; masm.branchTestObject(Assembler::Equal, R0, &ok); prepareVMCall(); pushUint8BytecodeOperandArg(); - if (!callVM(ThrowCheckIsObjectInfo)) { + + using Fn = bool (*)(JSContext*, CheckIsObjectKind); + if (!callVM<Fn, ThrowCheckIsObject>()) { return false; } masm.bind(&ok); return true; } -typedef bool (*CheckIsCallableFn)(JSContext*, HandleValue, CheckIsCallableKind); -static const VMFunction CheckIsCallableInfo = - FunctionInfo<CheckIsCallableFn>(CheckIsCallable, "CheckIsCallable"); - template <typename Handler> bool BaselineCodeGen<Handler>::emit_JSOP_CHECKISCALLABLE() { frame.syncStack(0); masm.loadValue(frame.addressOfStackValue(-1), R0); prepareVMCall(); pushUint8BytecodeOperandArg(); pushArg(R0); - if (!callVM(CheckIsCallableInfo)) { + + using Fn = bool (*)(JSContext*, HandleValue, CheckIsCallableKind); + if (!callVM<Fn, CheckIsCallable>()) { return false; } return true; } -typedef bool (*ThrowUninitializedThisFn)(JSContext*, BaselineFrame* frame); -static const VMFunction ThrowUninitializedThisInfo = - FunctionInfo<ThrowUninitializedThisFn>(BaselineThrowUninitializedThis, - "BaselineThrowUninitializedThis"); - -typedef bool (*ThrowInitializedThisFn)(JSContext*); -static const VMFunction ThrowInitializedThisInfo = - FunctionInfo<ThrowInitializedThisFn>(BaselineThrowInitializedThis, - "BaselineThrowInitializedThis"); - template <typename Handler> bool BaselineCodeGen<Handler>::emit_JSOP_CHECKTHIS() { frame.syncStack(0); masm.loadValue(frame.addressOfStackValue(-1), R0); return emitCheckThis(R0); } @@ -1859,53 +1849,52 @@ bool BaselineCodeGen<Handler>::emitCheck masm.branchTestMagic(Assembler::Equal, val, &thisOK); } else { masm.branchTestMagic(Assembler::NotEqual, val, &thisOK); } prepareVMCall(); if (reinit) { - if (!callVM(ThrowInitializedThisInfo)) { + using Fn = bool (*)(JSContext*); + if (!callVM<Fn, BaselineThrowInitializedThis>()) { return false; } } else { masm.loadBaselineFramePtr(BaselineFrameReg, val.scratchReg()); pushArg(val.scratchReg()); - if (!callVM(ThrowUninitializedThisInfo)) { + using Fn = bool (*)(JSContext*, BaselineFrame*); + if (!callVM<Fn, BaselineThrowUninitializedThis>()) { return false; } } masm.bind(&thisOK); return true; } -typedef bool (*ThrowBadDerivedReturnFn)(JSContext*, HandleValue); -static const VMFunction ThrowBadDerivedReturnInfo = - FunctionInfo<ThrowBadDerivedReturnFn>(jit::ThrowBadDerivedReturn, - "ThrowBadDerivedReturn"); - template <typename Handler> bool BaselineCodeGen<Handler>::emit_JSOP_CHECKRETURN() { MOZ_ASSERT_IF(handler.maybeScript(), handler.maybeScript()->isDerivedClassConstructor()); // Load |this| in R0, return value in R1. frame.popRegsAndSync(1); emitLoadReturnValue(R1); Label done, returnOK; masm.branchTestObject(Assembler::Equal, R1, &done); masm.branchTestUndefined(Assembler::Equal, R1, &returnOK); prepareVMCall(); pushArg(R1); - if (!callVM(ThrowBadDerivedReturnInfo)) { + + using Fn = bool (*)(JSContext*, HandleValue); + if (!callVM<Fn, ThrowBadDerivedReturn>()) { return false; } masm.assumeUnreachable("Should throw on bad derived constructor return"); masm.bind(&returnOK); if (!emitCheckThis(R0)) { return false; @@ -1914,21 +1903,16 @@ bool BaselineCodeGen<Handler>::emit_JSOP // Store |this| in the return value slot. masm.storeValue(R0, frame.addressOfReturnValue()); masm.or32(Imm32(BaselineFrame::HAS_RVAL), frame.addressOfFlags()); masm.bind(&done); return true; } -typedef bool (*GetFunctionThisFn)(JSContext*, BaselineFrame*, - MutableHandleValue); -static const VMFunction GetFunctionThisInfo = FunctionInfo<GetFunctionThisFn>( - jit::BaselineGetFunctionThis, "BaselineGetFunctionThis"); - template <typename Handler> bool BaselineCodeGen<Handler>::emit_JSOP_FUNCTIONTHIS() { MOZ_ASSERT_IF(handler.maybeFunction(), !handler.maybeFunction()->isArrow()); frame.pushThis(); auto boxThis = [this]() { // Load |thisv| in R0. Skip the call if it's already an object. @@ -1936,47 +1920,43 @@ bool BaselineCodeGen<Handler>::emit_JSOP frame.popRegsAndSync(1); masm.branchTestObject(Assembler::Equal, R0, &skipCall); prepareVMCall(); masm.loadBaselineFramePtr(BaselineFrameReg, R1.scratchReg()); pushArg(R1.scratchReg()); - if (!callVM(GetFunctionThisInfo)) { + using Fn = bool (*)(JSContext*, BaselineFrame*, MutableHandleValue); + if (!callVM<Fn, BaselineGetFunctionThis>()) { return false; } masm.bind(&skipCall); frame.push(R0); return true; }; // In strict mode code, |this| is left alone. return emitTestScriptFlag(JSScript::ImmutableFlags::Strict, false, boxThis, R2.scratchReg()); } -typedef void (*GetNonSyntacticGlobalThisFn)(JSContext*, HandleObject, - MutableHandleValue); -static const VMFunction GetNonSyntacticGlobalThisInfo = - FunctionInfo<GetNonSyntacticGlobalThisFn>(js::GetNonSyntacticGlobalThis, - "GetNonSyntacticGlobalThis"); - template <typename Handler> bool BaselineCodeGen<Handler>::emit_JSOP_GLOBALTHIS() { frame.syncStack(0); auto getNonSyntacticThis = [this]() { prepareVMCall(); masm.loadPtr(frame.addressOfEnvironmentChain(), R0.scratchReg()); pushArg(R0.scratchReg()); - if (!callVM(GetNonSyntacticGlobalThisInfo)) { + using Fn = void (*)(JSContext*, HandleObject, MutableHandleValue); + if (!callVM<Fn, GetNonSyntacticGlobalThis>()) { return false; } frame.push(R0); return true; }; auto getGlobalThis = [this]() { loadGlobalThisValue(R0);
--- a/js/src/jit/BaselineCompiler.h +++ b/js/src/jit/BaselineCompiler.h @@ -341,18 +341,23 @@ class BaselineCodeGen { void prepareVMCall(); void storeFrameSizeAndPushDescriptor(uint32_t frameBaseSize, uint32_t argSize, const Address& frameSizeAddr, Register scratch1, Register scratch2); enum CallVMPhase { POST_INITIALIZE, CHECK_OVER_RECURSED }; + bool callVM(const VMFunctionData& fun, TrampolinePtr code, + CallVMPhase phase = POST_INITIALIZE); bool callVM(const VMFunction& fun, CallVMPhase phase = POST_INITIALIZE); + template <typename Fn, Fn fn> + bool callVM(CallVMPhase phase = POST_INITIALIZE); + bool callVMNonOp(const VMFunction& fun, CallVMPhase phase = POST_INITIALIZE) { if (!callVM(fun, phase)) { return false; } handler.markLastRetAddrEntryKind(RetAddrEntry::Kind::NonOpCallVM); return true; }
--- a/js/src/jit/CodeGenerator.cpp +++ b/js/src/jit/CodeGenerator.cpp @@ -8907,18 +8907,18 @@ void JitRuntime::generateDoubleToInt32Va masm.convertDoubleToInt32(FloatReg0, R1.scratchReg(), &done, /* negativeZeroCheck = */ false); masm.tagValue(JSVAL_TYPE_INT32, R1.scratchReg(), R0); masm.bind(&done); masm.abiret(); } -bool JitRuntime::generateTLEventVM(MacroAssembler& masm, const VMFunction& f, - bool enter) { +bool JitRuntime::generateTLEventVM(MacroAssembler& masm, + const VMFunctionData& f, bool enter) { #ifdef JS_TRACE_LOGGING bool vmEventEnabled = TraceLogTextIdEnabled(TraceLogger_VM); bool vmSpecificEventEnabled = TraceLogTextIdEnabled(TraceLogger_VMSpecific); if (vmEventEnabled || vmSpecificEventEnabled) { AllocatableRegisterSet regs(RegisterSet::Volatile()); Register loggerReg = regs.takeAnyGeneral(); masm.Push(loggerReg);
--- a/js/src/jit/Ion.cpp +++ b/js/src/jit/Ion.cpp @@ -281,23 +281,32 @@ bool JitRuntime::initialize(JSContext* c JitSpew(JitSpew_Codegen, "# Emitting interpreter stub"); generateInterpreterStub(masm); JitSpew(JitSpew_Codegen, "# Emitting double-to-int32-value stub"); generateDoubleToInt32ValueStub(masm); JitSpew(JitSpew_Codegen, "# Emitting VM function wrappers"); + if (!generateVMWrappers(cx, masm)) { + return false; + } + + // TODO(bug 1530937): remove this after converting all VM functions. for (VMFunction* fun = VMFunction::functions; fun; fun = fun->next) { if (functionWrappers_->has(fun)) { // Duplicate VMFunction definition. See VMFunction::hash. continue; } JitSpew(JitSpew_Codegen, "# VM function wrapper (%s)", fun->name()); - if (!generateVMWrapper(cx, masm, *fun)) { + uint32_t offset; + if (!generateVMWrapper(cx, masm, *fun, &offset)) { + return false; + } + if (!functionWrappers_->putNew(fun, offset)) { return false; } } JitSpew(JitSpew_Codegen, "# Emitting profiler exit frame tail stub"); Label profilerExitTail; generateProfilerExitFrameTailStub(masm, &profilerExitTail);
--- a/js/src/jit/JitFrames.cpp +++ b/js/src/jit/JitFrames.cpp @@ -1017,17 +1017,18 @@ static void TraceIonICCallFrame(JSTracer #ifdef JS_CODEGEN_MIPS32 uint8_t* alignDoubleSpillWithOffset(uint8_t* pointer, int32_t offset) { uint32_t address = reinterpret_cast<uint32_t>(pointer); address = (address - offset) & ~(ABIStackAlignment - 1); return reinterpret_cast<uint8_t*>(address); } -static void TraceJitExitFrameCopiedArguments(JSTracer* trc, const VMFunction* f, +static void TraceJitExitFrameCopiedArguments(JSTracer* trc, + const VMFunctionData* f, ExitFooterFrame* footer) { uint8_t* doubleArgs = reinterpret_cast<uint8_t*>(footer); doubleArgs = alignDoubleSpillWithOffset(doubleArgs, sizeof(intptr_t)); if (f->outParam == Type_Handle) { doubleArgs -= sizeof(Value); } doubleArgs -= f->doubleByRefArgs() * sizeof(double); @@ -1039,17 +1040,18 @@ static void TraceJitExitFrameCopiedArgum } else { MOZ_ASSERT(f->argRootType(explicitArg) == VMFunction::RootNone); } doubleArgs += sizeof(double); } } } #else -static void TraceJitExitFrameCopiedArguments(JSTracer* trc, const VMFunction* f, +static void TraceJitExitFrameCopiedArguments(JSTracer* trc, + const VMFunctionData* f, ExitFooterFrame* footer) { // This is NO-OP on other platforms. } #endif static void TraceJitExitFrame(JSTracer* trc, const JSJitFrameIter& frame) { ExitFooterFrame* footer = frame.exitFrame()->footer(); @@ -1122,17 +1124,17 @@ static void TraceJitExitFrame(JSTracer* if (frame.isBareExit()) { // Nothing to trace. Fake exit frame pushed for VM functions with // nothing to trace on the stack. return; } MOZ_ASSERT(frame.exitFrame()->isWrapperExit()); - const VMFunction* f = footer->function(); + const VMFunctionData* f = footer->function(); MOZ_ASSERT(f); // Trace arguments of the VM wrapper. uint8_t* argBase = frame.exitFrame()->argBase(); for (uint32_t explicitArg = 0; explicitArg < f->explicitArgs; explicitArg++) { switch (f->argRootType(explicitArg)) { case VMFunction::RootNone: break;
--- a/js/src/jit/JitFrames.h +++ b/js/src/jit/JitFrames.h @@ -12,16 +12,17 @@ #include "jit/JSJitFrameIter.h" #include "vm/JSContext.h" #include "vm/JSFunction.h" namespace js { namespace jit { struct SafepointSlotEntry; +struct VMFunctionData; enum CalleeTokenTag { CalleeToken_Function = 0x0, // untagged CalleeToken_FunctionConstructing = 0x1, CalleeToken_Script = 0x2 }; // Any CalleeToken with this bit set must be CalleeToken_Script. @@ -413,34 +414,34 @@ enum class ExitFrameType : uint8_t { VMFunction = 0xFD, LazyLink = 0xFE, Bare = 0xFF, }; // GC related data used to keep alive data surrounding the Exit frame. class ExitFooterFrame { // Stores the ExitFrameType or, for ExitFrameType::VMFunction, the - // VMFunction*. + // VMFunctionData*. uintptr_t data_; public: static inline size_t Size() { return sizeof(ExitFooterFrame); } void setBareExitFrame() { data_ = uintptr_t(ExitFrameType::Bare); } ExitFrameType type() const { static_assert(sizeof(ExitFrameType) == sizeof(uint8_t), "Code assumes ExitFrameType fits in a byte"); if (data_ > UINT8_MAX) { return ExitFrameType::VMFunction; } MOZ_ASSERT(ExitFrameType(data_) != ExitFrameType::VMFunction); return ExitFrameType(data_); } - inline const VMFunction* function() const { + inline const VMFunctionData* function() const { MOZ_ASSERT(type() == ExitFrameType::VMFunction); - return reinterpret_cast<const VMFunction*>(data_); + return reinterpret_cast<const VMFunctionData*>(data_); } // This should only be called for function()->outParam == Type_Handle template <typename T> T* outParam() { uint8_t* address = reinterpret_cast<uint8_t*>(this); address = alignDoubleSpillWithOffset(address, sizeof(intptr_t)); return reinterpret_cast<T*>(address - sizeof(T));
--- a/js/src/jit/JitRealm.h +++ b/js/src/jit/JitRealm.h @@ -23,16 +23,18 @@ #include "js/GCHashTable.h" #include "js/Value.h" #include "vm/Stack.h" namespace js { namespace jit { class FrameSizeClass; +struct VMFunctionData; +enum class VMFunctionId; struct EnterJitData { explicit EnterJitData(JSContext* cx) : jitcode(nullptr), osrFrame(nullptr), calleeToken(nullptr), maxArgv(nullptr), maxArgc(0), @@ -138,16 +140,20 @@ class JitRuntime { // Code for trampolines and VMFunction wrappers. WriteOnceData<JitCode*> trampolineCode_; // Map VMFunction addresses to the offset of the wrapper in // trampolineCode_. using VMWrapperMap = HashMap<const VMFunction*, uint32_t, VMFunction>; WriteOnceData<VMWrapperMap*> functionWrappers_; + // Maps VMFunctionId to the offset of the wrapper code in trampolineCode_. + using VMWrapperOffsets = Vector<uint32_t, 0, SystemAllocPolicy>; + VMWrapperOffsets functionWrapperOffsets_; + // Global table of jitcode native address => bytecode address mappings. UnprotectedData<JitcodeGlobalTable*> jitcodeGlobalTable_; #ifdef DEBUG // The number of possible bailing places encounters before forcefully bailing // in that place. Zero means inactive. MainThreadData<uint32_t> ionBailAfter_; #endif @@ -185,25 +191,28 @@ class JitRuntime { void generateInvalidator(MacroAssembler& masm, Label* bailoutTail); uint32_t generatePreBarrier(JSContext* cx, MacroAssembler& masm, MIRType type); void generateMallocStub(MacroAssembler& masm); void generateFreeStub(MacroAssembler& masm); JitCode* generateDebugTrapHandler(JSContext* cx); JitCode* generateBaselineDebugModeOSRHandler( JSContext* cx, uint32_t* noFrameRegPopOffsetOut); + bool generateVMWrapper(JSContext* cx, MacroAssembler& masm, - const VMFunction& f); + const VMFunctionData& f, uint32_t* wrapperOffset); + bool generateVMWrappers(JSContext* cx, MacroAssembler& masm); - bool generateTLEventVM(MacroAssembler& masm, const VMFunction& f, bool enter); + bool generateTLEventVM(MacroAssembler& masm, const VMFunctionData& f, + bool enter); - inline bool generateTLEnterVM(MacroAssembler& masm, const VMFunction& f) { + inline bool generateTLEnterVM(MacroAssembler& masm, const VMFunctionData& f) { return generateTLEventVM(masm, f, /* enter = */ true); } - inline bool generateTLExitVM(MacroAssembler& masm, const VMFunction& f) { + inline bool generateTLExitVM(MacroAssembler& masm, const VMFunctionData& f) { return generateTLEventVM(masm, f, /* enter = */ false); } uint32_t startTrampolineCode(MacroAssembler& masm); TrampolinePtr trampolineCode(uint32_t offset) const { MOZ_ASSERT(offset > 0); MOZ_ASSERT(offset < trampolineCode_->instructionsSize()); @@ -222,16 +231,22 @@ class JitRuntime { ExecutableAllocator& execAlloc() { return execAlloc_.ref(); } IonCompilationId nextCompilationId() { return IonCompilationId(nextCompilationId_++); } TrampolinePtr getVMWrapper(const VMFunction& f) const; + + TrampolinePtr getVMWrapper(const VMFunctionId funId) const { + MOZ_ASSERT(trampolineCode_); + return trampolineCode(functionWrapperOffsets_[size_t(funId)]); + } + JitCode* debugTrapHandler(JSContext* cx); JitCode* getBaselineDebugModeOSRHandler(JSContext* cx); void* getBaselineDebugModeOSRHandlerAddress(JSContext* cx, bool popFrameReg); TrampolinePtr getGenericBailoutHandler() const { return trampolineCode(bailoutHandlerOffset_); }
--- a/js/src/jit/MacroAssembler-inl.h +++ b/js/src/jit/MacroAssembler-inl.h @@ -255,17 +255,17 @@ uint32_t MacroAssembler::buildFakeExitFr MOZ_ASSERT(framePushed() == initialDepth + ExitFrameLayout::Size()); return retAddr; } // =============================================================== // Exit frame footer. void MacroAssembler::enterExitFrame(Register cxreg, Register scratch, - const VMFunction* f) { + const VMFunctionData* f) { MOZ_ASSERT(f); linkExitFrame(cxreg, scratch); // Push VMFunction pointer, to mark arguments. Push(ImmPtr(f)); } void MacroAssembler::enterFakeExitFrame(Register cxreg, Register scratch, ExitFrameType type) {
--- a/js/src/jit/MacroAssembler.h +++ b/js/src/jit/MacroAssembler.h @@ -709,19 +709,19 @@ class MacroAssembler : public MacroAssem // Exit frame footer. // // When calling outside the Jit we push an exit frame. To mark the stack // correctly, we have to push additional information, called the Exit frame // footer, which is used to identify how the stack is marked. // // See JitFrames.h, and MarkJitExitFrame in JitFrames.cpp. - // Push stub code and the VMFunction pointer. + // Push stub code and the VMFunctionData pointer. inline void enterExitFrame(Register cxreg, Register scratch, - const VMFunction* f); + const VMFunctionData* f); // Push an exit frame token to identify which fake exit frame this footer // corresponds to. inline void enterFakeExitFrame(Register cxreg, Register scratch, ExitFrameType type); // Push an exit frame token for a native call. inline void enterFakeExitFrameForNative(Register cxreg, Register scratch,
new file mode 100644 --- /dev/null +++ b/js/src/jit/VMFunctionList-inl.h @@ -0,0 +1,69 @@ +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- + * vim: set ts=8 sts=2 et sw=2 tw=80: + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "jit/BaselineIC.h" +#include "jit/JitRealm.h" +#include "jit/VMFunctions.h" +#include "vm/Interpreter.h" + +#include "jit/BaselineFrame-inl.h" +#include "vm/Interpreter-inl.h" + +namespace js { +namespace jit { + +// List of all VM functions to be used with callVM. Each entry stores the name +// (must be unique, used for the VMFunctionId enum and profiling) and the C++ +// function to be called. This list must be sorted on the name field. +#define VMFUNCTION_LIST(_) \ + _(BaselineDebugPrologue, js::jit::DebugPrologue) \ + _(BaselineGetFunctionThis, js::jit::BaselineGetFunctionThis) \ + _(BaselineThrowInitializedThis, js::jit::BaselineThrowInitializedThis) \ + _(BaselineThrowUninitializedThis, js::jit::BaselineThrowUninitializedThis) \ + _(CheckIsCallable, js::jit::CheckIsCallable) \ + _(CheckOverRecursedBaseline, js::jit::CheckOverRecursedBaseline) \ + _(GetNonSyntacticGlobalThis, js::GetNonSyntacticGlobalThis) \ + _(InterruptCheck, js::jit::InterruptCheck) \ + _(IonCompileScriptForBaseline, js::jit::IonCompileScriptForBaseline) \ + _(ThrowBadDerivedReturn, js::jit::ThrowBadDerivedReturn) \ + _(ThrowCheckIsObject, js::ThrowCheckIsObject) + +enum class VMFunctionId { +#define DEF_ID(name, fp) name, + VMFUNCTION_LIST(DEF_ID) +#undef DEF_ID + Count +}; + +// Define the VMFunctionToId template to map from signature + function to +// the VMFunctionId. This lets us verify the consumer/codegen code matches +// the C++ signature. +template <typename Function, Function fun> +struct VMFunctionToId; // Error on this line? Forgot to update VMFUNCTION_LIST? + +// GCC warns when the signature does not have matching attributes (for example +// MOZ_MUST_USE). Squelch this warning to avoid a GCC-only footgun. +#if MOZ_IS_GCC +# pragma GCC diagnostic push +# pragma GCC diagnostic ignored "-Wignored-attributes" +#endif + +// Note: the use of ::fp instead of fp is intentional to enforce use of +// fully-qualified names in the list above. +#define DEF_TEMPLATE(name, fp) \ + template <> \ + struct VMFunctionToId<decltype(&(::fp)), ::fp> { \ + static constexpr VMFunctionId id = VMFunctionId::name; \ + }; +VMFUNCTION_LIST(DEF_TEMPLATE) +#undef DEF_TEMPLATE + +#if MOZ_IS_GCC +# pragma GCC diagnostic pop +#endif + +} // namespace jit +} // namespace js
--- a/js/src/jit/VMFunctions.cpp +++ b/js/src/jit/VMFunctions.cpp @@ -19,29 +19,137 @@ #include "vm/Debugger.h" #include "vm/EqualityOperations.h" // js::StrictlyEqual #include "vm/Interpreter.h" #include "vm/SelfHosting.h" #include "vm/TraceLogging.h" #include "jit/BaselineFrame-inl.h" #include "jit/JitFrames-inl.h" +#include "jit/VMFunctionList-inl.h" #include "vm/Debugger-inl.h" #include "vm/Interpreter-inl.h" #include "vm/NativeObject-inl.h" #include "vm/StringObject-inl.h" #include "vm/TypeInference-inl.h" #include "vm/UnboxedObject-inl.h" using namespace js; using namespace js::jit; namespace js { namespace jit { +// Helper template to build the VMFunctionData for a function. +template <typename... Args> +struct VMFunctionDataHelper; + +template <class R, typename... Args> +struct VMFunctionDataHelper<R (*)(JSContext*, Args...)> + : public VMFunctionData { + using Fun = R (*)(JSContext*, Args...); + + static constexpr DataType returnType() { return TypeToDataType<R>::result; } + static constexpr DataType outParam() { + return OutParamToDataType<typename LastArg<Args...>::Type>::result; + } + static constexpr RootType outParamRootType() { + return OutParamToRootType<typename LastArg<Args...>::Type>::result; + } + static constexpr size_t NbArgs() { return LastArg<Args...>::nbArgs; } + static constexpr size_t explicitArgs() { + return NbArgs() - (outParam() != Type_Void ? 1 : 0); + } + static constexpr uint32_t argumentProperties() { + return BitMask<TypeToArgProperties, uint32_t, 2, Args...>::result; + } + static constexpr uint32_t argumentPassedInFloatRegs() { + return BitMask<TypeToPassInFloatReg, uint32_t, 2, Args...>::result; + } + static constexpr uint64_t argumentRootTypes() { + return BitMask<TypeToRootType, uint64_t, 3, Args...>::result; + } + constexpr VMFunctionDataHelper(Fun fun, const char* name, + PopValues extraValuesToPop = PopValues(0)) + : VMFunctionData((void*)fun, name, explicitArgs(), argumentProperties(), + argumentPassedInFloatRegs(), argumentRootTypes(), + outParam(), outParamRootType(), returnType(), + extraValuesToPop.numValues, NonTailCall) {} + constexpr VMFunctionDataHelper(Fun fun, const char* name, + MaybeTailCall expectTailCall, + PopValues extraValuesToPop = PopValues(0)) + : VMFunctionData((void*)fun, name, explicitArgs(), argumentProperties(), + argumentPassedInFloatRegs(), argumentRootTypes(), + outParam(), outParamRootType(), returnType(), + extraValuesToPop.numValues, expectTailCall) {} +}; + +// GCC warns when the signature does not have matching attributes (for example +// MOZ_MUST_USE). Squelch this warning to avoid a GCC-only footgun. +#if MOZ_IS_GCC +# pragma GCC diagnostic push +# pragma GCC diagnostic ignored "-Wignored-attributes" +#endif + +// Generate VMFunctionData array. +static constexpr VMFunctionData vmFunctions[] = { +#define DEF_VMFUNCTION(name, fp) \ + VMFunctionDataHelper<decltype(&(::fp))>(::fp, #name), + VMFUNCTION_LIST(DEF_VMFUNCTION) +#undef DEF_VMFUNCTION +}; + +#if MOZ_IS_GCC +# pragma GCC diagnostic pop +#endif + +const VMFunctionData& GetVMFunction(VMFunctionId id) { + return vmFunctions[size_t(id)]; +} + +bool JitRuntime::generateVMWrappers(JSContext* cx, MacroAssembler& masm) { + // Generate all VM function wrappers. + + static constexpr size_t NumVMFunctions = size_t(VMFunctionId::Count); + + if (!functionWrapperOffsets_.reserve(NumVMFunctions)) { + return false; + } + +#ifdef DEBUG + const char* lastName = nullptr; +#endif + + for (size_t i = 0; i < NumVMFunctions; i++) { + VMFunctionId id = VMFunctionId(i); + const VMFunctionData& fun = GetVMFunction(id); + +#ifdef DEBUG + // Assert the list is sorted by name. + if (lastName) { + MOZ_ASSERT(strcmp(lastName, fun.name()) < 0, + "VM function list must be sorted by name"); + } + lastName = fun.name(); +#endif + + JitSpew(JitSpew_Codegen, "# VM function wrapper (%s)", fun.name()); + + uint32_t offset; + if (!generateVMWrapper(cx, masm, fun, &offset)) { + return false; + } + + MOZ_ASSERT(functionWrapperOffsets_.length() == size_t(id)); + functionWrapperOffsets_.infallibleAppend(offset); + } + + return true; +} + // Statics are initialized to null. /* static */ VMFunction* VMFunction::functions; AutoDetectInvalidation::AutoDetectInvalidation(JSContext* cx, MutableHandleValue rval) : cx_(cx), ionScript_(GetTopJitJSScript(cx)->ionScript()), rval_(rval),
--- a/js/src/jit/VMFunctions.h +++ b/js/src/jit/VMFunctions.h @@ -50,16 +50,18 @@ struct PopValues { explicit constexpr PopValues(uint8_t numValues) : numValues(numValues) {} }; enum MaybeTailCall : bool { TailCall, NonTailCall }; // [SMDOC] JIT-to-C++ Function Calls. (callVM) // +// TODO(bug 1530937): update this comment after converting all VM functions. +// // Sometimes it is easier to reuse C++ code by calling VM's functions. Calling a // function from the VM can be achieved with the use of callWithABI but this is // discouraged when the called functions might trigger exceptions and/or // garbage collections which are expecting to walk the stack. VMFunctions and // callVM are interfaces provided to handle the exception handling and register // the stack end (JITActivation) such that walking the stack is made possible. // // A VMFunction is a structure which contains the necessary information needed @@ -118,21 +120,19 @@ enum MaybeTailCall : bool { TailCall, No // // masm.Push(id); // masm.Push(obj); // if (!callVM(FooInfo)) { // return false; // } // // After this, the result value is in the return value register. -struct VMFunction { - // Global linked list of all VMFunctions. - static VMFunction* functions; - VMFunction* next; +// Data for a VM function. All VMFunctionDatas are stored in a constexpr array. +struct VMFunctionData { // Address of the C function. void* wrapped; #if defined(JS_JITSPEW) || defined(JS_TRACE_LOGGING) // Informative name of the wrapped function. The name should not be present // in release builds in order to save memory. const char* name_; #endif @@ -288,62 +288,86 @@ struct VMFunction { // few loop iterations) while (n) { count++; n &= n - 1; } return count; } - constexpr VMFunction(void* wrapped, const char* name, uint32_t explicitArgs, - uint32_t argumentProperties, - uint32_t argumentPassedInFloatRegs, - uint64_t argRootTypes, DataType outParam, - RootType outParamRootType, DataType returnType, - uint8_t extraValuesToPop = 0, - MaybeTailCall expectTailCall = NonTailCall) - : next(nullptr), - wrapped(wrapped), + constexpr VMFunctionData(void* wrapped, const char* name, + uint32_t explicitArgs, uint32_t argumentProperties, + uint32_t argumentPassedInFloatRegs, + uint64_t argRootTypes, DataType outParam, + RootType outParamRootType, DataType returnType, + uint8_t extraValuesToPop = 0, + MaybeTailCall expectTailCall = NonTailCall) + : wrapped(wrapped), #if defined(JS_JITSPEW) || defined(JS_TRACE_LOGGING) name_(name), #endif argumentRootTypes(argRootTypes), argumentProperties(argumentProperties), argumentPassedInFloatRegs(argumentPassedInFloatRegs), explicitArgs(explicitArgs), outParamRootType(outParamRootType), outParam(outParam), returnType(returnType), extraValuesToPop(extraValuesToPop), expectTailCall(expectTailCall) { + // Check for valid failure/return type. + MOZ_ASSERT_IF(outParam != Type_Void, + returnType == Type_Void || returnType == Type_Bool); + MOZ_ASSERT(returnType == Type_Void || returnType == Type_Bool || + returnType == Type_Object); } - VMFunction(const VMFunction& o) - : next(functions), - wrapped(o.wrapped), + // Note: clang-tidy suggests using |= auto| here but that generates extra + // static initializers for old-style VMFunction definitions with Clang. We can + // do this after bug 1530937 converts all of them. + constexpr VMFunctionData(const VMFunctionData& o) + : wrapped(o.wrapped), #if defined(JS_JITSPEW) || defined(JS_TRACE_LOGGING) name_(o.name_), #endif argumentRootTypes(o.argumentRootTypes), argumentProperties(o.argumentProperties), argumentPassedInFloatRegs(o.argumentPassedInFloatRegs), explicitArgs(o.explicitArgs), outParamRootType(o.outParamRootType), outParam(o.outParam), returnType(o.returnType), extraValuesToPop(o.extraValuesToPop), expectTailCall(o.expectTailCall) { + } +}; + +// TODO(bug 1530937): remove VMFunction and FunctionInfo after converting all VM +// functions to the new design. +struct VMFunction : public VMFunctionData { + // Global linked list of all VMFunctions. + static VMFunction* functions; + VMFunction* next; + + constexpr VMFunction(void* wrapped, const char* name, uint32_t explicitArgs, + uint32_t argumentProperties, + uint32_t argumentPassedInFloatRegs, + uint64_t argRootTypes, DataType outParam, + RootType outParamRootType, DataType returnType, + uint8_t extraValuesToPop = 0, + MaybeTailCall expectTailCall = NonTailCall) + : VMFunctionData(wrapped, name, explicitArgs, argumentProperties, + argumentPassedInFloatRegs, argRootTypes, outParam, + outParamRootType, returnType, extraValuesToPop, + expectTailCall), + next(nullptr) {} + + VMFunction(const VMFunction& o) : VMFunctionData(o), next(functions) { // Add this to the global list of VMFunctions. functions = this; - - // Check for valid failure/return type. - MOZ_ASSERT_IF(outParam != Type_Void, - returnType == Type_Void || returnType == Type_Bool); - MOZ_ASSERT(returnType == Type_Void || returnType == Type_Bool || - returnType == Type_Object); } typedef const VMFunction* Lookup; static HashNumber hash(const VMFunction* f) { // The hash is based on the wrapped function, not the VMFunction*, to // avoid generating duplicate wrapper code. HashNumber hash = 0; @@ -1210,12 +1234,16 @@ extern const VMFunction AddOrUpdateSpars extern const VMFunction GetSparseElementHelperInfo; extern const VMFunction ToNumberInfo; extern const VMFunction ToNumericInfo; // TailCall VMFunctions extern const VMFunction DoConcatStringObjectInfo; +enum class VMFunctionId; + +extern const VMFunctionData& GetVMFunction(VMFunctionId id); + } // namespace jit } // namespace js #endif /* jit_VMFunctions_h */
--- a/js/src/jit/arm/Trampoline-arm.cpp +++ b/js/src/jit/arm/Trampoline-arm.cpp @@ -699,20 +699,21 @@ JitRuntime::BailoutTable JitRuntime::gen void JitRuntime::generateBailoutHandler(MacroAssembler& masm, Label* bailoutTail) { bailoutHandlerOffset_ = startTrampolineCode(masm); GenerateBailoutThunk(masm, NO_FRAME_SIZE_CLASS_ID, bailoutTail); } bool JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm, - const VMFunction& f) { + const VMFunctionData& f, + uint32_t* wrapperOffset) { MOZ_ASSERT(functionWrappers_); - uint32_t wrapperOffset = startTrampolineCode(masm); + *wrapperOffset = startTrampolineCode(masm); AllocatableGeneralRegisterSet regs(Register::Codes::WrapperMask); static_assert( (Register::Codes::VolatileMask & ~Register::Codes::WrapperMask) == 0, "Wrapper register set must be a superset of Volatile register set."); // The context is the first argument; r0 is the first argument register. @@ -888,17 +889,17 @@ bool JitRuntime::generateVMWrapper(JSCon masm.speculationBarrier(); } masm.leaveExitFrame(); masm.retn(Imm32(sizeof(ExitFrameLayout) + f.explicitStackSlots() * sizeof(void*) + f.extraValuesToPop * sizeof(Value))); - return functionWrappers_->putNew(&f, wrapperOffset); + return true; } uint32_t JitRuntime::generatePreBarrier(JSContext* cx, MacroAssembler& masm, MIRType type) { uint32_t offset = startTrampolineCode(masm); masm.pushReturnAddress();
--- a/js/src/jit/arm64/Trampoline-arm64.cpp +++ b/js/src/jit/arm64/Trampoline-arm64.cpp @@ -528,20 +528,21 @@ JitRuntime::BailoutTable JitRuntime::gen void JitRuntime::generateBailoutHandler(MacroAssembler& masm, Label* bailoutTail) { bailoutHandlerOffset_ = startTrampolineCode(masm); GenerateBailoutThunk(masm, bailoutTail); } bool JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm, - const VMFunction& f) { + const VMFunctionData& f, + uint32_t* wrapperOffset) { MOZ_ASSERT(functionWrappers_); - uint32_t wrapperOffset = startTrampolineCode(masm); + *wrapperOffset = startTrampolineCode(masm); // Avoid conflicts with argument registers while discarding the result after // the function call. AllocatableGeneralRegisterSet regs(Register::Codes::WrapperMask); static_assert( (Register::Codes::VolatileMask & ~Register::Codes::WrapperMask) == 0, "Wrapper register set must be a superset of the Volatile register set."); @@ -728,17 +729,17 @@ bool JitRuntime::generateVMWrapper(JSCon masm.speculationBarrier(); } masm.leaveExitFrame(); masm.retn(Imm32(sizeof(ExitFrameLayout) + f.explicitStackSlots() * sizeof(void*) + f.extraValuesToPop * sizeof(Value))); - return functionWrappers_->putNew(&f, wrapperOffset); + return true; } uint32_t JitRuntime::generatePreBarrier(JSContext* cx, MacroAssembler& masm, MIRType type) { uint32_t offset = startTrampolineCode(masm); MOZ_ASSERT(PreBarrierReg == r1); Register temp1 = r2;
--- a/js/src/jit/none/Trampoline-none.cpp +++ b/js/src/jit/none/Trampoline-none.cpp @@ -35,17 +35,17 @@ void JitRuntime::generateExceptionTailSt void JitRuntime::generateBailoutTailStub(MacroAssembler&, Label*) { MOZ_CRASH(); } void JitRuntime::generateProfilerExitFrameTailStub(MacroAssembler&, Label*) { MOZ_CRASH(); } bool JitRuntime::generateVMWrapper(JSContext*, MacroAssembler&, - const VMFunction&) { + const VMFunctionData&, uint32_t*) { MOZ_CRASH(); } FrameSizeClass FrameSizeClass::FromDepth(uint32_t) { MOZ_CRASH(); } FrameSizeClass FrameSizeClass::ClassLimit() { MOZ_CRASH(); } uint32_t FrameSizeClass::frameSize() const { MOZ_CRASH(); } BailoutFrameInfo::BailoutFrameInfo(const JitActivationIterator& iter,
--- a/js/src/jit/x64/Trampoline-x64.cpp +++ b/js/src/jit/x64/Trampoline-x64.cpp @@ -586,20 +586,21 @@ JitRuntime::BailoutTable JitRuntime::gen void JitRuntime::generateBailoutHandler(MacroAssembler& masm, Label* bailoutTail) { bailoutHandlerOffset_ = startTrampolineCode(masm); GenerateBailoutThunk(masm, NO_FRAME_SIZE_CLASS_ID, bailoutTail); } bool JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm, - const VMFunction& f) { + const VMFunctionData& f, + uint32_t* wrapperOffset) { MOZ_ASSERT(functionWrappers_); - uint32_t wrapperOffset = startTrampolineCode(masm); + *wrapperOffset = startTrampolineCode(masm); // Avoid conflicts with argument registers while discarding the result after // the function call. AllocatableGeneralRegisterSet regs(Register::Codes::WrapperMask); static_assert( (Register::Codes::VolatileMask & ~Register::Codes::WrapperMask) == 0, "Wrapper register set must be a superset of Volatile register set"); @@ -767,17 +768,17 @@ bool JitRuntime::generateVMWrapper(JSCon masm.speculationBarrier(); } masm.leaveExitFrame(); masm.retn(Imm32(sizeof(ExitFrameLayout) + f.explicitStackSlots() * sizeof(void*) + f.extraValuesToPop * sizeof(Value))); - return functionWrappers_->putNew(&f, wrapperOffset); + return true; } uint32_t JitRuntime::generatePreBarrier(JSContext* cx, MacroAssembler& masm, MIRType type) { uint32_t offset = startTrampolineCode(masm); MOZ_ASSERT(PreBarrierReg == rdx); Register temp1 = rax;
--- a/js/src/jit/x86/Trampoline-x86.cpp +++ b/js/src/jit/x86/Trampoline-x86.cpp @@ -603,20 +603,21 @@ JitRuntime::BailoutTable JitRuntime::gen void JitRuntime::generateBailoutHandler(MacroAssembler& masm, Label* bailoutTail) { bailoutHandlerOffset_ = startTrampolineCode(masm); GenerateBailoutThunk(masm, NO_FRAME_SIZE_CLASS_ID, bailoutTail); } bool JitRuntime::generateVMWrapper(JSContext* cx, MacroAssembler& masm, - const VMFunction& f) { + const VMFunctionData& f, + uint32_t* wrapperOffset) { MOZ_ASSERT(functionWrappers_); - uint32_t wrapperOffset = startTrampolineCode(masm); + *wrapperOffset = startTrampolineCode(masm); // Avoid conflicts with argument registers while discarding the result after // the function call. AllocatableGeneralRegisterSet regs(Register::Codes::WrapperMask); static_assert( (Register::Codes::VolatileMask & ~Register::Codes::WrapperMask) == 0, "Wrapper register set must be a superset of Volatile register set."); @@ -781,17 +782,17 @@ bool JitRuntime::generateVMWrapper(JSCon masm.speculationBarrier(); } masm.leaveExitFrame(); masm.retn(Imm32(sizeof(ExitFrameLayout) + f.explicitStackSlots() * sizeof(void*) + f.extraValuesToPop * sizeof(Value))); - return functionWrappers_->putNew(&f, wrapperOffset); + return true; } uint32_t JitRuntime::generatePreBarrier(JSContext* cx, MacroAssembler& masm, MIRType type) { uint32_t offset = startTrampolineCode(masm); MOZ_ASSERT(PreBarrierReg == edx); Register temp1 = eax;