author | Brian Hackett <bhackett1024@gmail.com> |
Sat, 10 Sep 2011 22:03:01 +0200 | |
changeset 78375 | 323595f354b119ccc91ea41ac7ab9ca25fc7c991 |
parent 78374 | f1c585415dd4532e6d64b8ac8323d935e92b8553 |
child 78376 | 7c89b0ff453da8d3547278b50569497f920f8429 |
push id | 78 |
push user | clegnitto@mozilla.com |
push date | Fri, 16 Dec 2011 17:32:24 +0000 |
treeherder | mozilla-release@79d24e644fdd [default view] [failures only] |
perfherder | [talos] [build metrics] [platform microbench] (compared to previous push) |
bugs | 683804 |
milestone | 9.0a1 |
first release with | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
last release without | nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
|
--- a/js/src/methodjit/BaseAssembler.h +++ b/js/src/methodjit/BaseAssembler.h @@ -601,17 +601,17 @@ static const JSC::MacroAssembler::Regist return getLinkerCallReturnOffset(call); } #define STUB_CALL_TYPE(type) \ Call callWithVMFrame(bool inlining, type stub, jsbytecode *pc, \ DataLabelPtr *pinlined, uint32 fd) { \ return fallibleVMCall(inlining, JS_FUNC_TO_DATA_PTR(void *, stub), \ - pc, pinlined, fd); \ + pc, NULL, pinlined, fd); \ } STUB_CALL_TYPE(JSObjStub); STUB_CALL_TYPE(VoidPtrStubUInt32); STUB_CALL_TYPE(VoidStubUInt32); STUB_CALL_TYPE(VoidStub); #undef STUB_CALL_TYPE @@ -633,48 +633,50 @@ static const JSC::MacroAssembler::Regist setupFrameDepth(frameDepth); // The JIT has moved Arg1 already, and we've guaranteed to not clobber // it. Move ArgReg0 into place now. setupFallibleVMFrame will not // clobber it either. move(MacroAssembler::stackPointerRegister, Registers::ArgReg0); } - void setupFallibleVMFrame(bool inlining, jsbytecode *pc, + void setupFallibleVMFrame(bool inlining, jsbytecode *pc, CallSite *inlined, DataLabelPtr *pinlined, int32 frameDepth) { setupInfallibleVMFrame(frameDepth); /* regs->fp = fp */ storePtr(JSFrameReg, FrameAddress(VMFrame::offsetOfFp)); /* PC -> regs->pc :( */ storePtr(ImmPtr(pc), FrameAddress(offsetof(VMFrame, regs.pc))); if (inlining) { /* inlined -> regs->inlined :( */ - DataLabelPtr ptr = storePtrWithPatch(ImmPtr(NULL), - FrameAddress(VMFrame::offsetOfInlined)); - if (pinlined) - *pinlined = ptr; + if (inlined) { + storePtr(ImmPtr(inlined), FrameAddress(VMFrame::offsetOfInlined)); + } else { + DataLabelPtr ptr = storePtrWithPatch(ImmPtr(NULL), + FrameAddress(VMFrame::offsetOfInlined)); + if (pinlined) + *pinlined = ptr; + } } restoreStackBase(); } - void setupFallibleABICall(bool inlining, jsbytecode *pc, int32 frameDepth) { + void setupFallibleABICall(bool inlining, jsbytecode *pc, CallSite *inlined, int32 frameDepth) { setupFrameDepth(frameDepth); - /* Store fp and pc */ + /* Store fp/pc/inlined */ storePtr(JSFrameReg, FrameAddress(VMFrame::offsetOfFp)); storePtr(ImmPtr(pc), FrameAddress(offsetof(VMFrame, regs.pc))); - if (inlining) { - /* ABI calls cannot be made from inlined frames. */ - storePtr(ImmPtr(NULL), FrameAddress(VMFrame::offsetOfInlined)); - } + if (inlining) + storePtr(ImmPtr(inlined), FrameAddress(VMFrame::offsetOfInlined)); } void restoreStackBase() { #if defined(JS_CPU_X86) /* * We use the %ebp base stack pointer on x86 to store the JSStackFrame. * Restore this before calling so that debuggers can construct a * coherent stack if we crash outside of JIT code. @@ -692,18 +694,18 @@ static const JSC::MacroAssembler::Regist setupInfallibleVMFrame(frameDepth); return wrapVMCall(ptr); } // A fallible VM call is a stub call (taking a VMFrame & and one optional // parameter) that needs the entire VMFrame to be coherent, meaning that // |pc|, |inlined| and |fp| are guaranteed to be up-to-date. Call fallibleVMCall(bool inlining, void *ptr, jsbytecode *pc, - DataLabelPtr *pinlined, int32 frameDepth) { - setupFallibleVMFrame(inlining, pc, pinlined, frameDepth); + CallSite *inlined, DataLabelPtr *pinlined, int32 frameDepth) { + setupFallibleVMFrame(inlining, pc, inlined, pinlined, frameDepth); Call call = wrapVMCall(ptr); // Restore the frame pointer from the VM. loadPtr(FrameAddress(VMFrame::offsetOfFp), JSFrameReg); return call; }
--- a/js/src/methodjit/BaseCompiler.h +++ b/js/src/methodjit/BaseCompiler.h @@ -179,33 +179,34 @@ class NativeStubLinker : public LinkerHe { public: #ifdef JS_CPU_X64 typedef JSC::MacroAssembler::DataLabelPtr FinalJump; #else typedef JSC::MacroAssembler::Jump FinalJump; #endif - NativeStubLinker(Assembler &masm, JITScript *jit, jsbytecode *pc, FinalJump done) - : LinkerHelper(masm, JSC::METHOD_CODE), jit(jit), pc(pc), done(done) + NativeStubLinker(Assembler &masm, JITScript *jit, jsbytecode *pc, CallSite *inlined, FinalJump done) + : LinkerHelper(masm, JSC::METHOD_CODE), jit(jit), pc(pc), inlined(inlined), done(done) {} bool init(JSContext *cx); void patchJump(JSC::CodeLocationLabel target) { #ifdef JS_CPU_X64 patch(done, target); #else link(done, target); #endif } private: JITScript *jit; jsbytecode *pc; + CallSite *inlined; FinalJump done; }; bool NativeStubEpilogue(VMFrame &f, Assembler &masm, NativeStubLinker::FinalJump *result, int32 initialFrameDepth, int32 vpOffset, MaybeRegisterID typeReg, MaybeRegisterID dataReg);
--- a/js/src/methodjit/Compiler.cpp +++ b/js/src/methodjit/Compiler.cpp @@ -3199,17 +3199,17 @@ mjit::Compiler::prepareStubCall(Uses use JSC::MacroAssembler::Call mjit::Compiler::emitStubCall(void *ptr, DataLabelPtr *pinline) { JaegerSpew(JSpew_Insns, " ---- CALLING STUB ---- \n"); masm.bumpStubCounter(script, PC, Registers::tempCallReg()); Call cl = masm.fallibleVMCall(cx->typeInferenceEnabled(), - ptr, outerPC(), pinline, frame.totalDepth()); + ptr, outerPC(), NULL, pinline, frame.totalDepth()); JaegerSpew(JSpew_Insns, " ---- END STUB CALL ---- \n"); return cl; } void mjit::Compiler::interruptCheckHelper() { /*
--- a/js/src/methodjit/LoopState.cpp +++ b/js/src/methodjit/LoopState.cpp @@ -254,17 +254,17 @@ LoopState::flushLoop(StubCompiler &stubc patch->hasPatch = true; patch->codePatch = masm.storePtrWithPatch(ImmPtr(NULL), FrameAddress(offsetof(VMFrame, scratch))); JS_STATIC_ASSERT(Registers::ReturnReg != Registers::ArgReg1); masm.move(Registers::ReturnReg, Registers::ArgReg1); if (call.entry) { masm.fallibleVMCall(true, JS_FUNC_TO_DATA_PTR(void *, stubs::InvariantFailure), - pc, NULL, 0); + pc, NULL, NULL, 0); } else { /* f.regs are already coherent, don't write new values to them. */ masm.infallibleVMCall(JS_FUNC_TO_DATA_PTR(void *, stubs::InvariantFailure), -1); } } } } else { for (unsigned i = 0; i < restoreInvariantCalls.length(); i++) {
--- a/js/src/methodjit/MethodJIT.cpp +++ b/js/src/methodjit/MethodJIT.cpp @@ -1076,18 +1076,16 @@ JITScript::polyICSectionsLimit() const template <typename T> static inline void Destroy(T &t) { t.~T(); } mjit::JITScript::~JITScript() { - code.release(); - if (pcLengths) Foreground::free_(pcLengths); #if defined JS_POLYIC ic::GetElementIC *getElems_ = getElems(); ic::SetElementIC *setElems_ = setElems(); ic::PICInfo *pics_ = pics(); for (uint32 i = 0; i < nGetElems; i++) @@ -1111,35 +1109,28 @@ mjit::JITScript::~JITScript() for (unsigned i = 0; i < nativeCallStubs.length(); i++) { JSC::ExecutablePool *pool = nativeCallStubs[i].pool; if (pool) pool->release(); } ic::CallICInfo *callICs_ = callICs(); - for (uint32 i = 0; i < nCallICs; i++) { - callICs_[i].releasePools(); - if (callICs_[i].fastGuardedObject) - callICs_[i].purgeGuardedObject(); - } + for (uint32 i = 0; i < nCallICs; i++) + callICs_[i].purge(); // Fixup any ICs still referring to this JIT. while (!JS_CLIST_IS_EMPTY(&callers)) { JS_STATIC_ASSERT(offsetof(ic::CallICInfo, links) == 0); ic::CallICInfo *ic = (ic::CallICInfo *) callers.next; - - uint8 *start = (uint8 *)ic->funGuard.executableAddress(); - JSC::RepatchBuffer repatch(JSC::JITCode(start - 32, 64)); - - repatch.repatch(ic->funGuard, NULL); - repatch.relink(ic->funJump, ic->slowPathStart); - ic->purgeGuardedObject(); + ic->purge(); } #endif + + code.release(); } size_t JSScript::jitDataSize(JSUsableSizeFun usf) { size_t n = 0; if (jitNormal) n += jitNormal->scriptDataSize(usf);
--- a/js/src/methodjit/MethodJIT.h +++ b/js/src/methodjit/MethodJIT.h @@ -556,18 +556,19 @@ struct PCLengthEntry { }; /* * Pools and patch locations for managing stubs for non-FASTCALL C++ calls made * from native call and PropertyOp stubs. Ownership of these may be transferred * into the orphanedNativePools for the compartment. */ struct NativeCallStub { - /* PC for the stub. Native call stubs cannot be added for inline frames. */ + /* pc/inlined location of the stub. */ jsbytecode *pc; + CallSite *inlined; /* Pool for the stub, NULL if it has been removed from the script. */ JSC::ExecutablePool *pool; /* * Fallthrough jump returning to jitcode which may be patched during * recompilation. On x64 this is an indirect jump to avoid issues with far * jumps on relative branches. @@ -659,16 +660,17 @@ struct JITScript { bool isValidCode(void *ptr) { char *jitcode = (char *)code.m_code.executableAddress(); char *jcheck = (char *)ptr; return jcheck >= jitcode && jcheck < jitcode + code.m_size; } void nukeScriptDependentICs(); + void purgeGetterPICs(); /* |usf| can be NULL here, in which case the fallback size computation will be used. */ size_t scriptDataSize(JSUsableSizeFun usf); jsbytecode *nativeToPC(void *returnAddress, CallSite **pinline) const; private: /* Helpers used to navigate the variable-length sections. */
--- a/js/src/methodjit/MonoIC.cpp +++ b/js/src/methodjit/MonoIC.cpp @@ -550,25 +550,45 @@ SlowCallFromIC(VMFrame &f, ic::CallICInf static void * JS_FASTCALL SlowNewFromIC(VMFrame &f, ic::CallICInfo *ic) { stubs::SlowNew(f, ic->frameSize.staticArgc()); return NULL; } +void +CallICInfo::purge() +{ + uint8 *start = (uint8 *)funGuard.executableAddress(); + JSC::RepatchBuffer repatch(JSC::JITCode(start - 32, 64)); + + repatch.repatch(funGuard, NULL); + repatch.relink(funJump, slowPathStart); + + releasePools(); + fastGuardedNative = NULL; + + if (fastGuardedObject) { + hasJsFunCheck = false; + fastGuardedObject = NULL; + JS_REMOVE_LINK(&links); + } +} + bool NativeStubLinker::init(JSContext *cx) { JSC::ExecutablePool *pool = LinkerHelper::init(cx); if (!pool) return false; NativeCallStub stub; stub.pc = pc; + stub.inlined = inlined; stub.pool = pool; stub.jump = locationOf(done); if (!jit->nativeCallStubs.append(stub)) { pool->release(); return false; } return true; @@ -630,17 +650,17 @@ mjit::NativeStubEpilogue(VMFrame &f, Ass #endif /* Generate a call for type check failures on the native result. */ if (!mismatches.empty()) { for (unsigned i = 0; i < mismatches.length(); i++) mismatches[i].linkTo(masm.label(), &masm); masm.addPtr(Imm32(vpOffset), JSFrameReg, Registers::ArgReg1); masm.fallibleVMCall(true, JS_FUNC_TO_DATA_PTR(void *, stubs::TypeBarrierReturn), - f.regs.pc, NULL, initialFrameDepth); + f.regs.pc, f.regs.inlined(), NULL, initialFrameDepth); masm.storePtr(ImmPtr(NULL), FrameAddress(offsetof(VMFrame, stubRejoin))); masm.jump().linkTo(finished, &masm); } /* Move JaegerThrowpoline into register for very far jump on x64. */ hasException.linkTo(masm.label(), &masm); if (f.cx->typeInferenceEnabled()) masm.storePtr(ImmPtr(NULL), FrameAddress(offsetof(VMFrame, stubRejoin))); @@ -761,25 +781,25 @@ class CallCompiler : public BaseCompiler masm.storePtr(ImmPtr((void *) ic.frameSize.rejoinState(f.pc(), false)), FrameAddress(offsetof(VMFrame, stubRejoin))); } masm.bumpStubCounter(f.script(), f.pc(), Registers::tempCallReg()); /* Try and compile. On success we get back the nmap pointer. */ void *compilePtr = JS_FUNC_TO_DATA_PTR(void *, stubs::CompileFunction); - DataLabelPtr inlined; if (ic.frameSize.isStatic()) { masm.move(Imm32(ic.frameSize.staticArgc()), Registers::ArgReg1); masm.fallibleVMCall(cx->typeInferenceEnabled(), - compilePtr, f.regs.pc, &inlined, ic.frameSize.staticLocalSlots()); + compilePtr, f.regs.pc, f.regs.inlined(), NULL, + ic.frameSize.staticLocalSlots()); } else { masm.load32(FrameAddress(offsetof(VMFrame, u.call.dynamicArgc)), Registers::ArgReg1); masm.fallibleVMCall(cx->typeInferenceEnabled(), - compilePtr, f.regs.pc, &inlined, -1); + compilePtr, f.regs.pc, f.regs.inlined(), NULL, -1); } Jump notCompiled = masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg, Registers::ReturnReg); masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.sp)), JSFrameReg); /* Compute the value of ncode to use at this call site. */ ncode = (uint8 *) f.jit()->code.m_code.executableAddress() + ic.call->codeOffset; @@ -807,21 +827,16 @@ class CallCompiler : public BaseCompiler } linker.link(notCompiled, ic.slowPathStart.labelAtOffset(ic.slowJoinOffset)); JSC::CodeLocationLabel cs = linker.finalize(); JaegerSpew(JSpew_PICs, "generated CALL stub %p (%lu bytes)\n", cs.executableAddress(), (unsigned long) masm.size()); - if (f.regs.inlined()) { - JSC::LinkBuffer code((uint8 *) cs.executableAddress(), masm.size(), JSC::METHOD_CODE); - code.patch(inlined, f.regs.inlined()); - } - Repatcher repatch(from); JSC::CodeLocationJump oolJump = ic.slowPathStart.jumpAtOffset(ic.oolJumpOffset); repatch.relink(oolJump, cs); return true; } bool patchInlinePath(JITScript *from, JSScript *script, JSObject *obj) @@ -948,20 +963,16 @@ class CallCompiler : public BaseCompiler /* Don't touch the IC if the call triggered a recompilation. */ if (monitor.recompiled()) return true; /* Right now, take slow-path for IC misses or multiple stubs. */ if (ic.fastGuardedNative || ic.hasJsFunCheck) return true; - /* Don't generate native MICs within inlined frames, we can't recompile them yet. */ - if (f.regs.inlined()) - return true; - /* Native MIC needs to warm up first. */ if (!ic.hit) { ic.hit = true; return true; } /* Generate fast-path for calling this native. */ Assembler masm; @@ -980,25 +991,25 @@ class CallCompiler : public BaseCompiler FrameAddress(offsetof(VMFrame, stubRejoin))); } /* N.B. After this call, the frame will have a dynamic frame size. */ if (ic.frameSize.isDynamic()) { masm.bumpStubCounter(f.script(), f.pc(), Registers::tempCallReg()); masm.fallibleVMCall(cx->typeInferenceEnabled(), JS_FUNC_TO_DATA_PTR(void *, ic::SplatApplyArgs), - f.regs.pc, NULL, initialFrameDepth); + f.pc(), NULL, NULL, initialFrameDepth); } Registers tempRegs = Registers::tempCallRegMask(); RegisterID t0 = tempRegs.takeAnyReg().reg(); masm.bumpStubCounter(f.script(), f.pc(), t0); int32 storeFrameDepth = ic.frameSize.isStatic() ? initialFrameDepth : -1; - masm.setupFallibleABICall(cx->typeInferenceEnabled(), f.regs.pc, storeFrameDepth); + masm.setupFallibleABICall(cx->typeInferenceEnabled(), f.pc(), f.regs.inlined(), storeFrameDepth); /* Grab cx. */ #ifdef JS_CPU_X86 RegisterID cxReg = tempRegs.takeAnyReg().reg(); #else RegisterID cxReg = Registers::ArgReg0; #endif masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), cxReg); @@ -1050,17 +1061,17 @@ class CallCompiler : public BaseCompiler if (native == js_regexp_exec && !CallResultEscapes(f.pc())) native = js_regexp_test; masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, native), false); NativeStubLinker::FinalJump done; if (!NativeStubEpilogue(f, masm, &done, initialFrameDepth, vpOffset, MaybeRegisterID(), MaybeRegisterID())) return false; - NativeStubLinker linker(masm, f.jit(), f.regs.pc, done); + NativeStubLinker linker(masm, f.jit(), f.pc(), f.regs.inlined(), done); if (!linker.init(f.cx)) THROWV(true); if (!linker.verifyRange(jit)) { disable(jit); return true; }
--- a/js/src/methodjit/MonoIC.h +++ b/js/src/methodjit/MonoIC.h @@ -279,23 +279,17 @@ struct CallICInfo { inline void releasePool(PoolIndex index) { if (pools[index]) { pools[index]->release(); pools[index] = NULL; } } - inline void purgeGuardedObject() { - JS_ASSERT(fastGuardedObject); - releasePool(CallICInfo::Pool_ClosureStub); - hasJsFunCheck = false; - fastGuardedObject = NULL; - JS_REMOVE_LINK(&links); - } + void purge(); }; void * JS_FASTCALL New(VMFrame &f, ic::CallICInfo *ic); void * JS_FASTCALL Call(VMFrame &f, ic::CallICInfo *ic); void * JS_FASTCALL NativeNew(VMFrame &f, ic::CallICInfo *ic); void * JS_FASTCALL NativeCall(VMFrame &f, ic::CallICInfo *ic); JSBool JS_FASTCALL SplatApplyArgs(VMFrame &f);
--- a/js/src/methodjit/PolyIC.cpp +++ b/js/src/methodjit/PolyIC.cpp @@ -807,18 +807,16 @@ struct GetPropertyHelper { return ic.disable(cx, "method valued shape"); } else { if (shape->hasGetterValue()) return ic.disable(cx, "getter value shape"); if (shape->hasSlot() && holder != obj) return ic.disable(cx, "slotful getter hook through prototype"); if (!ic.canCallHook) return ic.disable(cx, "can't call getter hook"); - if (f.regs.inlined()) - return ic.disable(cx, "hook called from inline frame"); } } else if (!shape->hasSlot()) { return ic.disable(cx, "no slot"); } return Lookup_Cacheable; } @@ -1197,17 +1195,17 @@ class GetPropCompiler : public PICStubCo masm.loadObjProp(obj, holdObjReg, shape, Registers::ClobberInCall, t0); masm.storeValueFromComponents(Registers::ClobberInCall, t0, Address(JSFrameReg, vpOffset)); } else { masm.storeValue(UndefinedValue(), Address(JSFrameReg, vpOffset)); } int32 initialFrameDepth = f.regs.sp - f.fp()->slots(); - masm.setupFallibleABICall(cx->typeInferenceEnabled(), f.regs.pc, initialFrameDepth); + masm.setupFallibleABICall(cx->typeInferenceEnabled(), f.pc(), f.regs.inlined(), initialFrameDepth); /* Grab cx. */ #ifdef JS_CPU_X86 RegisterID cxReg = tempRegs.takeAnyReg().reg(); #else RegisterID cxReg = Registers::ArgReg0; #endif masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), cxReg); @@ -1223,17 +1221,17 @@ class GetPropCompiler : public PICStubCo masm.storeArg(1, holdObjReg); masm.storeArg(0, cxReg); masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, getter), false); NativeStubLinker::FinalJump done; if (!NativeStubEpilogue(f, masm, &done, 0, vpOffset, pic.shapeReg, pic.objReg)) return; - NativeStubLinker linker(masm, f.jit(), f.regs.pc, done); + NativeStubLinker linker(masm, f.jit(), f.pc(), f.regs.inlined(), done); if (!linker.init(f.cx)) THROW(); if (!linker.verifyRange(f.jit())) { disable("code memory is out of range"); return; } @@ -3227,10 +3225,29 @@ ic::SetElement(VMFrame &f, ic::SetElemen } stubs::SetElem<strict>(f); } template void JS_FASTCALL ic::SetElement<true>(VMFrame &f, SetElementIC *ic); template void JS_FASTCALL ic::SetElement<false>(VMFrame &f, SetElementIC *ic); +void +JITScript::purgeGetterPICs() +{ + Repatcher repatcher(this); + PICInfo *pics_ = pics(); + for (uint32 i = 0; i < nPICs; i++) { + PICInfo &pic = pics_[i]; + switch (pic.kind) { + case PICInfo::CALL: /* fall-through */ + case PICInfo::GET: + GetPropCompiler::reset(repatcher, pic); + pic.reset(); + break; + default: + break; + } + } +} + #endif /* JS_POLYIC */
--- a/js/src/methodjit/Retcon.cpp +++ b/js/src/methodjit/Retcon.cpp @@ -129,43 +129,40 @@ Recompiler::patchCall(JITScript *jit, St } } JS_NOT_REACHED("failed to find call site"); } void Recompiler::patchNative(JSCompartment *compartment, JITScript *jit, StackFrame *fp, - jsbytecode *pc, RejoinState rejoin) + jsbytecode *pc, CallSite *inlined, RejoinState rejoin) { /* * There is a native call or getter IC at pc which triggered recompilation. * The recompilation could have been triggered either by the native call * itself, or by a SplatApplyArgs preparing for the native call. Either * way, we don't want to patch up the call, but will instead steal the pool * for the IC so it doesn't get freed with the JITScript, and patch up the * jump at the end to go to the interpoline. - * - * When doing this, we do not reset the the IC itself; the JITScript must - * be dead and about to be released due to the recompilation (or a GC). */ fp->setRejoin(StubRejoin(rejoin)); /* :XXX: We might crash later if this fails. */ compartment->jaegerCompartment()->orphanedNativeFrames.append(fp); DebugOnly<bool> found = false; /* * Find and patch all native call stubs attached to the given PC. There may * be multiple ones for getter stubs attached to e.g. a GETELEM. */ for (unsigned i = 0; i < jit->nativeCallStubs.length(); i++) { NativeCallStub &stub = jit->nativeCallStubs[i]; - if (stub.pc != pc) + if (stub.pc != pc || stub.inlined != inlined) continue; found = true; /* Check for pools that were already patched. */ if (!stub.pool) continue; @@ -189,36 +186,50 @@ Recompiler::patchNative(JSCompartment *c /* :XXX: We leak the pool if this fails. Oh well. */ compartment->jaegerCompartment()->orphanedNativePools.append(stub.pool); /* Mark as stolen in case there are multiple calls on the stack. */ stub.pool = NULL; } JS_ASSERT(found); + + if (inlined) { + /* + * Purge all ICs in the script which can make native calls, to make + * sure the stolen stub is not reentered. This is only necessary if we + * are expanding inline frames, as in other circumstances the jitcode + * is about to be discarded. + */ + jit->purgeGetterPICs(); + ic::CallICInfo *callICs_ = jit->callICs(); + for (uint32 i = 0; i < jit->nCallICs; i++) + callICs_[i].purge(); + } } void Recompiler::patchFrame(JSCompartment *compartment, VMFrame *f, JSScript *script) { /* * Check if the VMFrame returns directly into the script's jitcode. This * depends on the invariant that f->fp() reflects the frame at the point * where the call occurred, irregardless of any frames which were pushed * inside the call. */ + JS_ASSERT(!f->regs.inlined()); StackFrame *fp = f->fp(); void **addr = f->returnAddressLocation(); RejoinState rejoin = (RejoinState) f->stubRejoin; if (rejoin == REJOIN_NATIVE || rejoin == REJOIN_NATIVE_LOWERED || rejoin == REJOIN_NATIVE_GETTER) { /* Native call. */ if (fp->script() == script) { - patchNative(compartment, fp->jit(), fp, f->regs.pc, rejoin); + patchNative(compartment, fp->jit(), fp, f->regs.pc, NULL, rejoin); f->stubRejoin = REJOIN_NATIVE_PATCHED; } } else if (rejoin == REJOIN_NATIVE_PATCHED) { /* Already patched, don't do anything. */ } else if (rejoin) { /* Recompilation triggered by CompileFunction. */ if (fp->script() == script) { fp->setRejoin(StubRejoin(rejoin)); @@ -297,24 +308,30 @@ Recompiler::expandInlineFrames(JSCompart InlineFrame *inner = &fp->jit()->inlineFrames()[inlined->inlineIndex]; jsbytecode *innerpc = inner->fun->script()->code + inlined->pcOffset; StackFrame *innerfp = expandInlineFrameChain(fp, inner); /* Check if the VMFrame returns into the inlined frame. */ if (f->stubRejoin && f->fp() == fp) { - /* The VMFrame is calling CompileFunction. */ - JS_ASSERT(f->stubRejoin != REJOIN_NATIVE && - f->stubRejoin != REJOIN_NATIVE_LOWERED && - f->stubRejoin != REJOIN_NATIVE_GETTER && - f->stubRejoin != REJOIN_NATIVE_PATCHED); - innerfp->setRejoin(StubRejoin((RejoinState) f->stubRejoin)); - *frameAddr = JS_FUNC_TO_DATA_PTR(void *, JaegerInterpoline); - f->stubRejoin = 0; + RejoinState rejoin = (RejoinState) f->stubRejoin; + JS_ASSERT(rejoin != REJOIN_NATIVE_PATCHED); + if (rejoin == REJOIN_NATIVE || + rejoin == REJOIN_NATIVE_LOWERED || + rejoin == REJOIN_NATIVE_GETTER) { + /* The VMFrame is calling a native. */ + patchNative(compartment, fp->jit(), innerfp, innerpc, inlined, rejoin); + f->stubRejoin = REJOIN_NATIVE_PATCHED; + } else { + /* The VMFrame is calling CompileFunction. */ + innerfp->setRejoin(StubRejoin(rejoin)); + *frameAddr = JS_FUNC_TO_DATA_PTR(void *, JaegerInterpoline); + f->stubRejoin = 0; + } } if (CallsiteMatches(codeStart, *inlined, *frameAddr)) { /* The VMFrame returns directly into the expanded frame. */ SetRejoinState(innerfp, *inlined, frameAddr); } if (f->fp() == fp) { JS_ASSERT(f->regs.inlined() == inlined); @@ -503,22 +520,17 @@ void Recompiler::cleanup(JITScript *jit) { while (!JS_CLIST_IS_EMPTY(&jit->callers)) { JaegerSpew(JSpew_Recompile, "Purging IC caller\n"); JS_STATIC_ASSERT(offsetof(ic::CallICInfo, links) == 0); ic::CallICInfo *ic = (ic::CallICInfo *) jit->callers.next; - uint8 *start = (uint8 *)ic->funGuard.executableAddress(); - JSC::RepatchBuffer repatch(JSC::JITCode(start - 32, 64)); - - repatch.repatch(ic->funGuard, NULL); - repatch.relink(ic->funJump, ic->slowPathStart); - ic->purgeGuardedObject(); + ic->purge(); } } } /* namespace mjit */ } /* namespace js */ #endif /* JS_METHODJIT */
--- a/js/src/methodjit/Retcon.h +++ b/js/src/methodjit/Retcon.h @@ -95,17 +95,17 @@ public: static void patchFrame(JSCompartment *compartment, VMFrame *f, JSScript *script); private: JSContext *cx; JSScript *script; static void patchCall(JITScript *jit, StackFrame *fp, void **location); static void patchNative(JSCompartment *compartment, JITScript *jit, StackFrame *fp, - jsbytecode *pc, RejoinState rejoin); + jsbytecode *pc, CallSite *inlined, RejoinState rejoin); static StackFrame * expandInlineFrameChain(StackFrame *outer, InlineFrame *inner); /* Detach jit from any IC callers. */ static void cleanup(JITScript *jit); };
--- a/js/src/methodjit/StubCompiler.cpp +++ b/js/src/methodjit/StubCompiler.cpp @@ -177,17 +177,17 @@ StubCompiler::emitStubCall(void *ptr, Re JSC::MacroAssembler::Call StubCompiler::emitStubCall(void *ptr, RejoinState rejoin, Uses uses, int32 slots) { JaegerSpew(JSpew_Insns, " ---- BEGIN SLOW CALL CODE ---- \n"); masm.bumpStubCounter(cc.script, cc.PC, Registers::tempCallReg()); DataLabelPtr inlinePatch; Call cl = masm.fallibleVMCall(cx->typeInferenceEnabled(), - ptr, cc.outerPC(), &inlinePatch, slots); + ptr, cc.outerPC(), NULL, &inlinePatch, slots); JaegerSpew(JSpew_Insns, " ---- END SLOW CALL CODE ---- \n"); /* Add the call site for debugging and recompilation. */ Compiler::InternalCallSite site(masm.callReturnOffset(cl), cc.inlineIndex(), cc.inlinePC(), rejoin, true); site.inlinePatch = inlinePatch;
--- a/js/src/methodjit/TrampolineCompiler.cpp +++ b/js/src/methodjit/TrampolineCompiler.cpp @@ -117,17 +117,17 @@ TrampolineCompiler::compileTrampoline(Tr */ bool TrampolineCompiler::generateForceReturn(Assembler &masm) { /* The JSStackFrame register may have been clobbered while returning, reload it. */ masm.loadPtr(FrameAddress(VMFrame::offsetOfFp), JSFrameReg); /* Perform the frame epilogue. */ - masm.fallibleVMCall(true, JS_FUNC_TO_DATA_PTR(void *, stubs::AnyFrameEpilogue), NULL, NULL, 0); + masm.fallibleVMCall(true, JS_FUNC_TO_DATA_PTR(void *, stubs::AnyFrameEpilogue), NULL, NULL, NULL, 0); /* Store any known return value */ masm.loadValueAsComponents(UndefinedValue(), JSReturnReg_Type, JSReturnReg_Data); Jump rvalClear = masm.branchTest32(Assembler::Zero, FrameFlagsAddress(), Imm32(StackFrame::HAS_RVAL)); Address rvalAddress(JSFrameReg, StackFrame::offsetOfReturnValue()); masm.loadValueAsComponents(rvalAddress, JSReturnReg_Type, JSReturnReg_Data); rvalClear.linkTo(masm.label(), &masm);