--- a/js/src/jsopcode.tbl
+++ b/js/src/jsopcode.tbl
@@ -249,20 +249,20 @@ OPDEF(JSOP_UINT16, 88, "uint16",
OPDEF(JSOP_NEWINIT, 89, "newinit", NULL, 5, 0, 1, 19, JOF_UINT16PAIR)
OPDEF(JSOP_ENDINIT, 90, "endinit", NULL, 1, 0, 0, 19, JOF_BYTE)
OPDEF(JSOP_INITPROP, 91, "initprop", NULL, 3, 2, 1, 3, JOF_ATOM|JOF_PROP|JOF_SET|JOF_DETECTING)
OPDEF(JSOP_INITELEM, 92, "initelem", NULL, 1, 3, 1, 3, JOF_BYTE |JOF_ELEM|JOF_SET|JOF_DETECTING)
OPDEF(JSOP_DEFSHARP, 93, "defsharp", NULL, 5, 0, 0, 0, JOF_UINT16PAIR|JOF_SHARPSLOT)
OPDEF(JSOP_USESHARP, 94, "usesharp", NULL, 5, 0, 1, 0, JOF_UINT16PAIR|JOF_SHARPSLOT)
/* Fast inc/dec ops for args and locals. */
-OPDEF(JSOP_INCARG, 95, "incarg", NULL, 3, 0, 1, 15, JOF_QARG |JOF_NAME|JOF_INC|JOF_TMPSLOT2)
-OPDEF(JSOP_DECARG, 96, "decarg", NULL, 3, 0, 1, 15, JOF_QARG |JOF_NAME|JOF_DEC|JOF_TMPSLOT2)
-OPDEF(JSOP_ARGINC, 97, "arginc", NULL, 3, 0, 1, 15, JOF_QARG |JOF_NAME|JOF_INC|JOF_POST|JOF_TMPSLOT2)
-OPDEF(JSOP_ARGDEC, 98, "argdec", NULL, 3, 0, 1, 15, JOF_QARG |JOF_NAME|JOF_DEC|JOF_POST|JOF_TMPSLOT2)
+OPDEF(JSOP_INCARG, 95, "incarg", NULL, 3, 0, 1, 15, JOF_QARG |JOF_NAME|JOF_INC|JOF_TMPSLOT3)
+OPDEF(JSOP_DECARG, 96, "decarg", NULL, 3, 0, 1, 15, JOF_QARG |JOF_NAME|JOF_DEC|JOF_TMPSLOT3)
+OPDEF(JSOP_ARGINC, 97, "arginc", NULL, 3, 0, 1, 15, JOF_QARG |JOF_NAME|JOF_INC|JOF_POST|JOF_TMPSLOT3)
+OPDEF(JSOP_ARGDEC, 98, "argdec", NULL, 3, 0, 1, 15, JOF_QARG |JOF_NAME|JOF_DEC|JOF_POST|JOF_TMPSLOT3)
OPDEF(JSOP_INCLOCAL, 99, "inclocal", NULL, 3, 0, 1, 15, JOF_LOCAL|JOF_NAME|JOF_INC|JOF_TMPSLOT3)
OPDEF(JSOP_DECLOCAL, 100,"declocal", NULL, 3, 0, 1, 15, JOF_LOCAL|JOF_NAME|JOF_DEC|JOF_TMPSLOT3)
OPDEF(JSOP_LOCALINC, 101,"localinc", NULL, 3, 0, 1, 15, JOF_LOCAL|JOF_NAME|JOF_INC|JOF_POST|JOF_TMPSLOT3)
OPDEF(JSOP_LOCALDEC, 102,"localdec", NULL, 3, 0, 1, 15, JOF_LOCAL|JOF_NAME|JOF_DEC|JOF_POST|JOF_TMPSLOT3)
OPDEF(JSOP_IMACOP, 103,"imacop", NULL, 1, 0, 0, 0, JOF_BYTE)
--- a/js/src/methodjit/Compiler.cpp
+++ b/js/src/methodjit/Compiler.cpp
@@ -88,17 +88,17 @@ mjit::Compiler::Compiler(JSContext *cx,
script(fp->script()),
scopeChain(&fp->scopeChain()),
globalObj(scopeChain->getGlobal()),
fun(fp->isFunctionFrame() && !fp->isEvalFrame()
? fp->fun()
: NULL),
isConstructing(fp->isConstructing()),
analysis(NULL), jumpMap(NULL), savedTraps(NULL),
- frame(cx, script, masm),
+ frame(cx, script, fun, masm),
branchPatches(CompilerAllocPolicy(cx, *thisFromCtor())),
#if defined JS_MONOIC
mics(CompilerAllocPolicy(cx, *thisFromCtor())),
callICs(CompilerAllocPolicy(cx, *thisFromCtor())),
equalityICs(CompilerAllocPolicy(cx, *thisFromCtor())),
traceICs(CompilerAllocPolicy(cx, *thisFromCtor())),
#endif
#if defined JS_POLYIC
@@ -169,18 +169,17 @@ mjit::Compiler::performCompilation(JITSc
return Compile_Error;
if (analysis.failed()) {
JaegerSpew(JSpew_Abort, "couldn't analyze bytecode; probably switchX or OOM\n");
return Compile_Abort;
}
this->analysis = &analysis;
- uint32 nargs = fun ? fun->nargs : 0;
- if (!frame.init(nargs) || !stubcc.init(nargs))
+ if (!frame.init())
return Compile_Abort;
jumpMap = (Label *)cx->malloc(sizeof(Label) * script->length);
if (!jumpMap)
return Compile_Error;
#ifdef DEBUG
for (uint32 i = 0; i < script->length; i++)
jumpMap[i] = Label();
@@ -195,16 +194,18 @@ mjit::Compiler::performCompilation(JITSc
PC = script->code;
#ifdef JS_METHODJIT
script->debugMode = debugMode();
#endif
for (uint32 i = 0; i < script->nClosedVars; i++)
frame.setClosedVar(script->getClosedVar(i));
+ for (uint32 i = 0; i < script->nClosedArgs; i++)
+ frame.setClosedArg(script->getClosedArg(i));
CHECK_STATUS(generatePrologue());
CHECK_STATUS(generateMethod());
CHECK_STATUS(generateEpilogue());
CHECK_STATUS(finishThisUp(jitp));
#ifdef JS_METHODJIT_SPEW
prof.stop();
@@ -936,17 +937,17 @@ mjit::Compiler::generateMethod()
applyTricks = LazyArgsObj;
else
jsop_arguments();
frame.pushSynced();
END_CASE(JSOP_ARGUMENTS)
BEGIN_CASE(JSOP_FORARG)
iterNext();
- jsop_setarg(GET_SLOTNO(PC), true);
+ frame.storeArg(GET_SLOTNO(PC), true);
frame.pop();
END_CASE(JSOP_FORARG)
BEGIN_CASE(JSOP_FORLOCAL)
iterNext();
frame.storeLocal(GET_SLOTNO(PC), true);
frame.pop();
END_CASE(JSOP_FORLOCAL)
@@ -1263,17 +1264,17 @@ mjit::Compiler::generateMethod()
/* Push thisv onto stack. */
jsop_this();
if (!jsop_getprop(script->getAtom(fullAtomIndex(PC))))
return Compile_Error;
END_CASE(JSOP_GETTHISPROP);
BEGIN_CASE(JSOP_GETARGPROP)
/* Push arg onto stack. */
- jsop_getarg(GET_SLOTNO(PC));
+ frame.pushArg(GET_SLOTNO(PC));
if (!jsop_getprop(script->getAtom(fullAtomIndex(&PC[ARGNO_LEN]))))
return Compile_Error;
END_CASE(JSOP_GETARGPROP)
BEGIN_CASE(JSOP_GETLOCALPROP)
frame.pushLocal(GET_SLOTNO(PC));
if (!jsop_getprop(script->getAtom(fullAtomIndex(&PC[SLOTNO_LEN]))))
return Compile_Error;
@@ -1447,28 +1448,37 @@ mjit::Compiler::generateMethod()
inlineCallHelper(GET_ARGC(PC), true);
JaegerSpew(JSpew_Insns, " --- END NEW OPERATOR --- \n");
}
END_CASE(JSOP_NEW)
BEGIN_CASE(JSOP_GETARG)
BEGIN_CASE(JSOP_CALLARG)
{
- jsop_getarg(GET_SLOTNO(PC));
+ frame.pushArg(GET_SLOTNO(PC));
if (op == JSOP_CALLARG)
frame.push(UndefinedValue());
}
END_CASE(JSOP_GETARG)
BEGIN_CASE(JSOP_BINDGNAME)
jsop_bindgname();
END_CASE(JSOP_BINDGNAME)
BEGIN_CASE(JSOP_SETARG)
- jsop_setarg(GET_SLOTNO(PC), JSOp(PC[JSOP_SETARG_LENGTH]) == JSOP_POP);
+ {
+ jsbytecode *next = &PC[JSOP_SETLOCAL_LENGTH];
+ bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
+ frame.storeArg(GET_SLOTNO(PC), pop);
+ if (pop) {
+ frame.pop();
+ PC += JSOP_SETARG_LENGTH + JSOP_POP_LENGTH;
+ break;
+ }
+ }
END_CASE(JSOP_SETARG)
BEGIN_CASE(JSOP_GETLOCAL)
{
uint32 slot = GET_SLOTNO(PC);
frame.pushLocal(slot);
}
END_CASE(JSOP_GETLOCAL)
@@ -1717,17 +1727,17 @@ mjit::Compiler::generateMethod()
INLINE_STUBCALL(stubs::DefVar);
}
END_CASE(JSOP_DEFVAR)
BEGIN_CASE(JSOP_DEFLOCALFUN_FC)
{
uint32 slot = GET_SLOTNO(PC);
JSFunction *fun = script->getFunction(fullAtomIndex(&PC[SLOTNO_LEN]));
- prepareStubCall(Uses(frame.frameDepth()));
+ prepareStubCall(Uses(frame.frameSlots()));
masm.move(ImmPtr(fun), Registers::ArgReg1);
INLINE_STUBCALL(stubs::DefLocalFun_FC);
frame.takeReg(Registers::ReturnReg);
frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
frame.storeLocal(slot, true);
frame.pop();
}
END_CASE(JSOP_DEFLOCALFUN_FC)
@@ -1745,17 +1755,17 @@ mjit::Compiler::generateMethod()
if (next == JSOP_INITMETHOD) {
stub = stubs::LambdaForInit;
} else if (next == JSOP_SETMETHOD) {
stub = stubs::LambdaForSet;
uses = 1;
} else if (fun->joinable()) {
if (next == JSOP_CALL) {
stub = stubs::LambdaJoinableForCall;
- uses = frame.frameDepth();
+ uses = frame.frameSlots();
} else if (next == JSOP_NULL) {
stub = stubs::LambdaJoinableForNull;
}
}
prepareStubCall(Uses(uses));
masm.move(ImmPtr(fun), Registers::ArgReg1);
@@ -1776,19 +1786,22 @@ mjit::Compiler::generateMethod()
BEGIN_CASE(JSOP_TRY)
frame.syncAndForgetEverything();
END_CASE(JSOP_TRY)
BEGIN_CASE(JSOP_GETFCSLOT)
BEGIN_CASE(JSOP_CALLFCSLOT)
{
uintN index = GET_UINT16(PC);
- // JSObject *obj = &fp->argv[-2].toObject();
- RegisterID reg = frame.allocReg();
- masm.loadPayload(Address(JSFrameReg, JSStackFrame::offsetOfCallee(fun)), reg);
+
+ // Load the callee's payload into a register.
+ frame.pushCallee();
+ RegisterID reg = frame.copyDataIntoReg(frame.peek(-1));
+ frame.pop();
+
// obj->getFlatClosureUpvars()
masm.loadPtr(Address(reg, offsetof(JSObject, slots)), reg);
Address upvarAddress(reg, JSObject::JSSLOT_FLAT_CLOSURE_UPVARS * sizeof(Value));
masm.loadPrivate(upvarAddress, reg);
// push ((Value *) reg)[index]
frame.freeReg(reg);
frame.push(Address(reg, index * sizeof(Value)));
if (op == JSOP_CALLFCSLOT)
@@ -1924,17 +1937,17 @@ mjit::Compiler::generateMethod()
BEGIN_CASE(JSOP_HOLE)
frame.push(MagicValue(JS_ARRAY_HOLE));
END_CASE(JSOP_HOLE)
BEGIN_CASE(JSOP_LAMBDA_FC)
{
JSFunction *fun = script->getFunction(fullAtomIndex(PC));
- prepareStubCall(Uses(frame.frameDepth()));
+ prepareStubCall(Uses(frame.frameSlots()));
masm.move(ImmPtr(fun), Registers::ArgReg1);
INLINE_STUBCALL(stubs::FlatLambda);
frame.takeReg(Registers::ReturnReg);
frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
}
END_CASE(JSOP_LAMBDA_FC)
BEGIN_CASE(JSOP_TRACE)
@@ -2171,21 +2184,25 @@ mjit::Compiler::loadReturnValue(Assemble
// is returned, either explicitly or implicitly, the newly created object is
// loaded out of the frame. Otherwise, the explicitly returned object is kept.
//
void
mjit::Compiler::fixPrimitiveReturn(Assembler *masm, FrameEntry *fe)
{
JS_ASSERT(isConstructing);
+ bool ool = (masm != &this->masm);
Address thisv(JSFrameReg, JSStackFrame::offsetOfThis(fun));
// Easy cases - no return value, or known primitive, so just return thisv.
if (!fe || (fe->isTypeKnown() && fe->getKnownType() != JSVAL_TYPE_OBJECT)) {
- masm->loadValueAsComponents(thisv, JSReturnReg_Type, JSReturnReg_Data);
+ if (ool)
+ masm->loadValueAsComponents(thisv, JSReturnReg_Type, JSReturnReg_Data);
+ else
+ frame.loadThisForReturn(JSReturnReg_Type, JSReturnReg_Data, Registers::ReturnReg);
return;
}
// If the type is known to be an object, just load the return value as normal.
if (fe->isTypeKnown() && fe->getKnownType() == JSVAL_TYPE_OBJECT) {
loadReturnValue(masm, fe);
return;
}
@@ -2237,17 +2254,17 @@ mjit::Compiler::emitReturn(FrameEntry *f
/* There will always be a call object. */
prepareStubCall(Uses(fe ? 1 : 0));
INLINE_STUBCALL(stubs::PutActivationObjects);
} else {
/* if (hasCallObj() || hasArgsObj()) stubs::PutActivationObjects() */
Jump putObjs = masm.branchTest32(Assembler::NonZero,
Address(JSFrameReg, JSStackFrame::offsetOfFlags()),
Imm32(JSFRAME_HAS_CALL_OBJ | JSFRAME_HAS_ARGS_OBJ));
- stubcc.linkExit(putObjs, Uses(frame.frameDepth()));
+ stubcc.linkExit(putObjs, Uses(frame.frameSlots()));
stubcc.leave();
OOL_STUBCALL(stubs::PutActivationObjects);
emitReturnValue(&stubcc.masm, fe);
emitFinalReturn(stubcc.masm);
}
}
@@ -2419,18 +2436,18 @@ mjit::Compiler::checkCallApplySpeculatio
OOL_STUBCALL(stubs::Arguments);
frameDepthAdjust = +1;
} else {
frameDepthAdjust = 0;
}
stubcc.masm.move(Imm32(callImmArgc), Registers::ArgReg1);
JaegerSpew(JSpew_Insns, " ---- BEGIN SLOW CALL CODE ---- \n");
- OOL_STUBCALL_SLOTS(JS_FUNC_TO_DATA_PTR(void *, stubs::UncachedCall),
- frame.frameDepth() + frameDepthAdjust);
+ OOL_STUBCALL_LOCAL_SLOTS(JS_FUNC_TO_DATA_PTR(void *, stubs::UncachedCall),
+ frame.localSlots() + frameDepthAdjust);
JaegerSpew(JSpew_Insns, " ---- END SLOW CALL CODE ---- \n");
RegisterID r0 = Registers::ReturnReg;
Jump notCompiled = stubcc.masm.branchTestPtr(Assembler::Zero, r0, r0);
stubcc.masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
Address ncodeAddr(JSFrameReg, JSStackFrame::offsetOfncode());
uncachedCallPatch->hasSlowNcode = true;
@@ -2571,27 +2588,27 @@ mjit::Compiler::inlineCallHelper(uint32
/*
* For f.call(), since we compile the ic under the (checked)
* assumption that call == js_fun_call, we still have a static
* frame size. For f.apply(), the frame size depends on the dynamic
* length of the array passed to apply.
*/
if (*PC == JSOP_FUNCALL)
- callIC.frameSize.initStatic(frame.frameDepth(), speculatedArgc - 1);
+ callIC.frameSize.initStatic(frame.localSlots(), speculatedArgc - 1);
else
callIC.frameSize.initDynamic();
} else {
/* Leaves pinned regs untouched. */
frame.syncAndKill(Registers(Registers::AvailRegs), Uses(speculatedArgc + 2));
icCalleeType = origCalleeType;
icCalleeData = origCalleeData;
icRvalAddr = frame.addressOf(origCallee);
- callIC.frameSize.initStatic(frame.frameDepth(), speculatedArgc);
+ callIC.frameSize.initStatic(frame.localSlots(), speculatedArgc);
}
}
/* Test the type if necessary. Failing this always takes a really slow path. */
MaybeJump notObjectJump;
if (icCalleeType.isSet())
notObjectJump = masm.testObject(Assembler::NotEqual, icCalleeType.reg());
@@ -2655,19 +2672,19 @@ mjit::Compiler::inlineCallHelper(uint32
/*
* At this point the function is definitely scripted, so we try to
* compile it and patch either funGuard/funJump or oolJump. This code
* is only executed once.
*/
callIC.addrLabel1 = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
void *icFunPtr = JS_FUNC_TO_DATA_PTR(void *, callingNew ? ic::New : ic::Call);
if (callIC.frameSize.isStatic())
- callIC.oolCall = OOL_STUBCALL_SLOTS(icFunPtr, frame.frameDepth());
+ callIC.oolCall = OOL_STUBCALL_LOCAL_SLOTS(icFunPtr, frame.localSlots());
else
- callIC.oolCall = OOL_STUBCALL_SLOTS(icFunPtr, -1);
+ callIC.oolCall = OOL_STUBCALL_LOCAL_SLOTS(icFunPtr, -1);
callIC.funObjReg = icCalleeData;
callIC.funPtrReg = funPtrReg;
/*
* The IC call either returns NULL, meaning call completed, or a
* function pointer to jump to. Caveat: Must restore JSFrameReg
* because a new frame has been pushed.
@@ -3109,17 +3126,17 @@ mjit::Compiler::jsop_callprop_generic(JS
pic.objReg = objReg;
pic.shapeReg = shapeReg;
pic.atom = atom;
/*
* Store the type and object back. Don't bother keeping them in registers,
* since a sync will be needed for the upcoming call.
*/
- uint32 thisvSlot = frame.frameDepth();
+ uint32 thisvSlot = frame.localSlots();
Address thisv = Address(JSFrameReg, sizeof(JSStackFrame) + thisvSlot * sizeof(Value));
#if defined JS_NUNBOX32
masm.storeValueFromComponents(pic.typeReg, pic.objReg, thisv);
#elif defined JS_PUNBOX64
masm.orPtr(pic.objReg, pic.typeReg);
masm.storePtr(pic.typeReg, thisv);
#endif
frame.freeReg(pic.typeReg);
@@ -3698,47 +3715,41 @@ mjit::Compiler::jsop_bindname(uint32 ind
frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
stubcc.rejoin(Changes(1));
}
#endif
void
-mjit::Compiler::jsop_getarg(uint32 slot)
-{
- frame.push(Address(JSFrameReg, JSStackFrame::offsetOfFormalArg(fun, slot)));
-}
-
-void
-mjit::Compiler::jsop_setarg(uint32 slot, bool popped)
-{
- FrameEntry *top = frame.peek(-1);
- RegisterID reg = frame.allocReg();
- Address address = Address(JSFrameReg, JSStackFrame::offsetOfFormalArg(fun, slot));
- frame.storeTo(top, address, popped);
- frame.freeReg(reg);
-}
-
-void
mjit::Compiler::jsop_this()
{
- Address thisvAddr(JSFrameReg, JSStackFrame::offsetOfThis(fun));
- frame.push(thisvAddr);
+ frame.pushThis();
+
/*
* In strict mode code, we don't wrap 'this'.
* In direct-call eval code, we wrapped 'this' before entering the eval.
* In global code, 'this' is always an object.
*/
if (fun && !script->strictModeCode) {
- Jump notObj = frame.testObject(Assembler::NotEqual, frame.peek(-1));
- stubcc.linkExit(notObj, Uses(1));
- stubcc.leave();
- OOL_STUBCALL(stubs::This);
- stubcc.rejoin(Changes(1));
+ FrameEntry *thisFe = frame.peek(-1);
+ if (!thisFe->isTypeKnown()) {
+ Jump notObj = frame.testObject(Assembler::NotEqual, thisFe);
+ stubcc.linkExit(notObj, Uses(1));
+ stubcc.leave();
+ OOL_STUBCALL(stubs::This);
+ stubcc.rejoin(Changes(1));
+
+ // Now we know that |this| is an object.
+ frame.pop();
+ frame.learnThisIsObject();
+ frame.pushThis();
+ }
+
+ JS_ASSERT(thisFe->isType(JSVAL_TYPE_OBJECT));
}
}
void
mjit::Compiler::jsop_gnameinc(JSOp op, VoidStubAtom stub, uint32 index)
{
#if defined JS_MONOIC
jsbytecode *next = &PC[JSOP_GNAMEINC_LENGTH];
@@ -4753,17 +4764,17 @@ mjit::Compiler::enterBlock(JSObject *obj
{
// If this is an exception entry point, then jsl_InternalThrow has set
// VMFrame::fp to the correct fp for the entry point. We need to copy
// that value here to FpReg so that FpReg also has the correct sp.
// Otherwise, we would simply be using a stale FpReg value.
if (analysis->getCode(PC).exceptionEntry)
restoreFrameRegs(masm);
- uint32 oldFrameDepth = frame.frameDepth();
+ uint32 oldFrameDepth = frame.localSlots();
/* For now, don't bother doing anything for this opcode. */
frame.syncAndForgetEverything();
masm.move(ImmPtr(obj), Registers::ArgReg1);
uint32 n = js_GetEnterBlockStackDefs(cx, script, PC);
INLINE_STUBCALL(stubs::EnterBlock);
frame.enterBlock(n);
@@ -4801,20 +4812,17 @@ mjit::Compiler::leaveBlock()
// call js_CreateThisFromFunctionWithProto(...)
//
bool
mjit::Compiler::constructThis()
{
JS_ASSERT(isConstructing);
// Load the callee.
- Address callee(JSFrameReg, JSStackFrame::offsetOfCallee(fun));
- RegisterID calleeReg = frame.allocReg();
- masm.loadPayload(callee, calleeReg);
- frame.pushTypedPayload(JSVAL_TYPE_OBJECT, calleeReg);
+ frame.pushCallee();
// Get callee.prototype.
if (!jsop_getprop(cx->runtime->atomState.classPrototypeAtom, false, false))
return false;
// Reach into the proto Value and grab a register for its data.
FrameEntry *protoFe = frame.peek(-1);
RegisterID protoReg = frame.ownRegForData(protoFe);
--- a/js/src/methodjit/Compiler.h
+++ b/js/src/methodjit/Compiler.h
@@ -480,16 +480,16 @@ class Compiler : public BaseCompiler
// Given a stub call, emits the call into the out-of-line assembly path. If
// debug mode is on, adds the appropriate instrumentation for recompilation.
// Unlike the INLINE_STUBCALL variant, this returns the Call offset.
#define OOL_STUBCALL(stub) \
stubcc.emitStubCall(JS_FUNC_TO_DATA_PTR(void *, (stub)), __LINE__) \
// Same as OOL_STUBCALL, but specifies a slot depth.
-#define OOL_STUBCALL_SLOTS(stub, slots) \
+#define OOL_STUBCALL_LOCAL_SLOTS(stub, slots) \
stubcc.emitStubCall(JS_FUNC_TO_DATA_PTR(void *, (stub)), (slots), __LINE__) \
} /* namespace js */
} /* namespace mjit */
#endif
--- a/js/src/methodjit/FastArithmetic.cpp
+++ b/js/src/methodjit/FastArithmetic.cpp
@@ -986,17 +986,17 @@ mjit::Compiler::jsop_equality_int_string
ValueRemat lvr, rvr;
frame.pinEntry(lhs, lvr);
frame.pinEntry(rhs, rvr);
/*
* Sync everything except the top two entries.
* We will handle the lhs/rhs in the stub call path.
*/
- frame.syncAndKill(Registers(Registers::AvailRegs), Uses(frame.frameDepth()), Uses(2));
+ frame.syncAndKill(Registers(Registers::AvailRegs), Uses(frame.frameSlots()), Uses(2));
RegisterID tempReg = frame.allocReg();
frame.pop();
frame.pop();
frame.discardFrame();
/* Start of the slow path for equality stub call. */
@@ -1021,23 +1021,24 @@ mjit::Compiler::jsop_equality_int_string
ic.stub = stub;
bool useIC = !addTraceHints || target >= PC;
/* Call the IC stub, which may generate a fast path. */
if (useIC) {
/* Adjust for the two values just pushed. */
ic.addrLabel = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
- ic.stubCall = OOL_STUBCALL_SLOTS(ic::Equality, frame.stackDepth() + script->nfixed + 2);
+ ic.stubCall = OOL_STUBCALL_LOCAL_SLOTS(ic::Equality,
+ frame.stackDepth() + script->nfixed + 2);
needStub = false;
}
#endif
if (needStub)
- OOL_STUBCALL_SLOTS(stub, frame.stackDepth() + script->nfixed + 2);
+ OOL_STUBCALL_LOCAL_SLOTS(stub, frame.stackDepth() + script->nfixed + 2);
/*
* The stub call has no need to rejoin, since state is synced.
* Instead, we can just test the return value.
*/
Assembler::Condition ncond = (fused == JSOP_IFEQ)
? Assembler::Zero
: Assembler::NonZero;
@@ -1413,17 +1414,17 @@ mjit::Compiler::jsop_relational_full(JSO
*
* Note: doubles have not been swapped yet. Use original op.
*/
MaybeJump doubleTest, doubleFall;
Assembler::DoubleCondition dblCond = DoubleCondForOp(op, fused);
if (hasDoublePath) {
if (lhsUnknownDone.isSet())
lhsUnknownDone.get().linkTo(stubcc.masm.label(), &stubcc.masm);
- frame.sync(stubcc.masm, Uses(frame.frameDepth()));
+ frame.sync(stubcc.masm, Uses(frame.frameSlots()));
doubleTest = stubcc.masm.branchDouble(dblCond, fpLeft, fpRight);
doubleFall = stubcc.masm.jump();
/* Link all incoming slow paths to here. */
if (lhsNotDouble.isSet()) {
lhsNotDouble.get().linkTo(stubcc.masm.label(), &stubcc.masm);
if (rhsNotNumber.isSet())
rhsNotNumber.get().linkTo(stubcc.masm.label(), &stubcc.masm);
@@ -1431,17 +1432,17 @@ mjit::Compiler::jsop_relational_full(JSO
if (rhsNotNumber2.isSet())
rhsNotNumber2.get().linkTo(stubcc.masm.label(), &stubcc.masm);
/*
* For fusions, spill the tracker state. xmm* remain intact. Note
* that frame.sync() must be used directly, to avoid syncExit()'s
* jumping logic.
*/
- frame.sync(stubcc.masm, Uses(frame.frameDepth()));
+ frame.sync(stubcc.masm, Uses(frame.frameSlots()));
stubcc.leave();
OOL_STUBCALL(stub);
}
/* Forget the world, preserving data. */
frame.pinReg(cmpReg);
if (reg.isSet())
frame.pinReg(reg.reg());
--- a/js/src/methodjit/FastOps.cpp
+++ b/js/src/methodjit/FastOps.cpp
@@ -967,17 +967,17 @@ mjit::Compiler::booleanJumpScript(JSOp o
/* OOL path: Conversion to boolean. */
MaybeJump jmpCvtExecScript;
MaybeJump jmpCvtRejoin;
Label lblCvtPath = stubcc.masm.label();
if (!fe->isTypeKnown() ||
!(fe->isType(JSVAL_TYPE_BOOLEAN) || fe->isType(JSVAL_TYPE_INT32))) {
stubcc.masm.infallibleVMCall(JS_FUNC_TO_DATA_PTR(void *, stubs::ValueToBoolean),
- frame.frameDepth());
+ frame.localSlots());
jmpCvtExecScript.setJump(stubcc.masm.branchTest32(cond, Registers::ReturnReg,
Registers::ReturnReg));
jmpCvtRejoin.setJump(stubcc.masm.jump());
}
/* Rejoin tag. */
Label lblAfterScript = masm.label();
@@ -1139,55 +1139,69 @@ mjit::Compiler::jsop_localinc(JSOp op, u
frame.forgetType(frame.peek(-1));
stubcc.rejoin(Changes(0));
}
void
mjit::Compiler::jsop_arginc(JSOp op, uint32 slot, bool popped)
{
- int amt = (js_CodeSpec[op].format & JOF_INC) ? 1 : -1;
- bool post = !!(js_CodeSpec[op].format & JOF_POST);
- uint32 depth = frame.stackDepth();
+ if (popped || (op == JSOP_INCARG || op == JSOP_DECARG)) {
+ int amt = (op == JSOP_ARGINC || op == JSOP_INCARG) ? -1 : 1;
+
+ // Before:
+ // After: V
+ frame.pushArg(slot);
+
+ // Before: V
+ // After: V 1
+ frame.push(Int32Value(amt));
+
+ // Note, SUB will perform integer conversion for us.
+ // Before: V 1
+ // After: N+1
+ jsop_binary(JSOP_SUB, stubs::Sub);
- jsop_getarg(slot);
- if (post && !popped)
+ // Before: N+1
+ // After: N+1
+ frame.storeArg(slot, popped);
+
+ if (popped)
+ frame.pop();
+ } else {
+ int amt = (op == JSOP_ARGINC || op == JSOP_INCARG) ? 1 : -1;
+
+ // Before:
+ // After: V
+ frame.pushArg(slot);
+
+ // Before: V
+ // After: N
+ jsop_pos();
+
+ // Before: N
+ // After: N N
frame.dup();
- FrameEntry *fe = frame.peek(-1);
- Jump notInt = frame.testInt32(Assembler::NotEqual, fe);
- stubcc.linkExit(notInt, Uses(0));
-
- RegisterID reg = frame.ownRegForData(fe);
- frame.pop();
+ // Before: N N
+ // After: N N 1
+ frame.push(Int32Value(amt));
- Jump ovf;
- if (amt > 0)
- ovf = masm.branchAdd32(Assembler::Overflow, Imm32(1), reg);
- else
- ovf = masm.branchSub32(Assembler::Overflow, Imm32(1), reg);
- stubcc.linkExit(ovf, Uses(0));
+ // Before: N N 1
+ // After: N N+1
+ jsop_binary(JSOP_ADD, stubs::Add);
- stubcc.leave();
- stubcc.masm.addPtr(Imm32(JSStackFrame::offsetOfFormalArg(fun, slot)),
- JSFrameReg, Registers::ArgReg1);
- stubcc.vpInc(op, depth);
-
- frame.pushTypedPayload(JSVAL_TYPE_INT32, reg);
- fe = frame.peek(-1);
+ // Before: N N+1
+ // After: N N+1
+ frame.storeArg(slot, true);
- Address address = Address(JSFrameReg, JSStackFrame::offsetOfFormalArg(fun, slot));
- frame.storeTo(fe, address, popped);
-
- if (post || popped)
+ // Before: N N+1
+ // After: N
frame.pop();
- else
- frame.forgetType(fe);
-
- stubcc.rejoin(Changes((post || popped) ? 0 : 1));
+ }
}
static inline bool
IsCacheableSetElem(FrameEntry *obj, FrameEntry *id, FrameEntry *value)
{
if (obj->isNotType(JSVAL_TYPE_OBJECT))
return false;
if (id->isNotType(JSVAL_TYPE_INT32))
--- a/js/src/methodjit/FrameState-inl.h
+++ b/js/src/methodjit/FrameState-inl.h
@@ -44,17 +44,17 @@ namespace js {
namespace mjit {
inline void
FrameState::addToTracker(FrameEntry *fe)
{
JS_ASSERT(!fe->isTracked());
fe->track(tracker.nentries);
tracker.add(fe);
- JS_ASSERT(tracker.nentries <= script->nslots);
+ JS_ASSERT(tracker.nentries <= feLimit());
}
inline FrameEntry *
FrameState::peek(int32 depth)
{
JS_ASSERT(depth < 0);
JS_ASSERT(sp + depth >= spBase);
FrameEntry *fe = &sp[depth];
@@ -188,17 +188,17 @@ FrameState::syncAndForgetEverything(uint
{
syncAndForgetEverything();
sp = spBase + newStackDepth;
}
inline FrameEntry *
FrameState::rawPush()
{
- JS_ASSERT(unsigned(sp - entries) < nargs + script->nslots);
+ JS_ASSERT(unsigned(sp - entries) < feLimit());
if (!sp->isTracked())
addToTracker(sp);
return sp++;
}
inline void
@@ -696,20 +696,27 @@ FrameState::learnType(FrameEntry *fe, JS
fe->isNumber = false;
#endif
fe->setType(type);
}
inline JSC::MacroAssembler::Address
FrameState::addressOf(const FrameEntry *fe) const
{
- uint32 index = (fe - entries);
- JS_ASSERT(index >= nargs);
- index -= nargs;
- return Address(JSFrameReg, sizeof(JSStackFrame) + sizeof(Value) * index);
+ int32 frameOffset = 0;
+ if (fe >= locals)
+ frameOffset = JSStackFrame::offsetOfFixed(uint32(fe - locals));
+ else if (fe >= args)
+ frameOffset = JSStackFrame::offsetOfFormalArg(fun, uint32(fe - args));
+ else if (fe == this_)
+ frameOffset = JSStackFrame::offsetOfThis(fun);
+ else if (fe == callee_)
+ frameOffset = JSStackFrame::offsetOfCallee(fun);
+ JS_ASSERT(frameOffset);
+ return Address(JSFrameReg, frameOffset);
}
inline JSC::MacroAssembler::Address
FrameState::addressForDataRemat(const FrameEntry *fe) const
{
if (fe->isCopy() && !fe->data.synced())
fe = fe->copyOf();
JS_ASSERT(fe->data.synced());
@@ -784,27 +791,59 @@ FrameState::testString(Assembler::Condit
{
JS_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
if (shouldAvoidTypeRemat(fe))
return masm.testString(cond, addressOf(fe));
return masm.testString(cond, tempRegForType(fe));
}
inline FrameEntry *
-FrameState::getLocal(uint32 slot)
+FrameState::getOrTrack(uint32 index)
{
- uint32 index = nargs + slot;
FrameEntry *fe = &entries[index];
if (!fe->isTracked()) {
addToTracker(fe);
fe->resetSynced();
}
return fe;
}
+inline FrameEntry *
+FrameState::getLocal(uint32 slot)
+{
+ JS_ASSERT(slot < script->nslots);
+ return getOrTrack(uint32(&locals[slot] - entries));
+}
+
+inline FrameEntry *
+FrameState::getArg(uint32 slot)
+{
+ JS_ASSERT(slot < nargs);
+ return getOrTrack(uint32(&args[slot] - entries));
+}
+
+inline FrameEntry *
+FrameState::getThis()
+{
+ return getOrTrack(uint32(this_ - entries));
+}
+
+inline FrameEntry *
+FrameState::getCallee()
+{
+ // Callee can only be used in function code, and it's always an object.
+ JS_ASSERT(fun);
+ if (!callee_->isTracked()) {
+ addToTracker(callee_);
+ callee_->resetSynced();
+ callee_->setType(JSVAL_TYPE_OBJECT);
+ }
+ return callee_;
+}
+
inline void
FrameState::pinReg(RegisterID reg)
{
regstate[reg].pin();
}
inline void
FrameState::unpinReg(RegisterID reg)
@@ -836,22 +875,16 @@ FrameState::swapInTracker(FrameEntry *lh
JS_ASSERT(tracker[li] == lhs);
JS_ASSERT(tracker[ri] == rhs);
tracker.entries[ri] = lhs;
tracker.entries[li] = rhs;
lhs->index_ = ri;
rhs->index_ = li;
}
-inline uint32
-FrameState::localIndex(uint32 n)
-{
- return nargs + n;
-}
-
inline void
FrameState::dup()
{
dupAt(-1);
}
inline void
FrameState::dup2()
@@ -868,36 +901,78 @@ FrameState::dupAt(int32 n)
JS_ASSERT(n < 0);
FrameEntry *fe = peek(n);
pushCopyOf(indexOfFe(fe));
}
inline void
FrameState::pushLocal(uint32 n)
{
- if (!eval && !isClosedVar(n)) {
- pushCopyOf(indexOfFe(getLocal(n)));
+ FrameEntry *fe = getLocal(n);
+ if (!isClosedVar(n)) {
+ pushCopyOf(indexOfFe(fe));
} else {
#ifdef DEBUG
/*
* We really want to assert on local variables, but in the presence of
* SETLOCAL equivocation of stack slots, and let expressions, just
* weakly assert on the fixed local vars.
*/
FrameEntry *fe = &locals[n];
if (fe->isTracked() && n < script->nfixed) {
JS_ASSERT(fe->type.inMemory());
JS_ASSERT(fe->data.inMemory());
}
#endif
- push(Address(JSFrameReg, sizeof(JSStackFrame) + n * sizeof(Value)));
+ push(addressOf(fe));
+ }
+}
+
+inline void
+FrameState::pushArg(uint32 n)
+{
+ FrameEntry *fe = getArg(n);
+ if (!isClosedArg(n)) {
+ pushCopyOf(indexOfFe(fe));
+ } else {
+#ifdef DEBUG
+ FrameEntry *fe = &args[n];
+ if (fe->isTracked()) {
+ JS_ASSERT(fe->type.inMemory());
+ JS_ASSERT(fe->data.inMemory());
+ }
+#endif
+ push(addressOf(fe));
}
}
inline void
+FrameState::pushCallee()
+{
+ FrameEntry *fe = getCallee();
+ pushCopyOf(indexOfFe(fe));
+}
+
+inline void
+FrameState::pushThis()
+{
+ FrameEntry *fe = getThis();
+ pushCopyOf(indexOfFe(fe));
+}
+
+void
+FrameState::learnThisIsObject()
+{
+ // This is safe, albeit hacky. This is only called from the compiler,
+ // and only on the first use of |this| inside a basic block. Thus,
+ // there are no copies of |this| anywhere.
+ learnType(this_, JSVAL_TYPE_OBJECT);
+}
+
+inline void
FrameState::leaveBlock(uint32 n)
{
popn(n);
}
inline void
FrameState::enterBlock(uint32 n)
{
@@ -922,16 +997,23 @@ FrameState::eviscerate(FrameEntry *fe)
inline void
FrameState::setClosedVar(uint32 slot)
{
if (!eval)
closedVars[slot] = true;
}
+inline void
+FrameState::setClosedArg(uint32 slot)
+{
+ if (!eval && !usesArguments)
+ closedArgs[slot] = true;
+}
+
inline StateRemat
FrameState::dataRematInfo(const FrameEntry *fe) const
{
if (fe->isCopy())
fe = fe->copyOf();
if (fe->data.inRegister())
return StateRemat::FromRegister(fe->data.reg());
@@ -1008,17 +1090,23 @@ FrameState::loadDouble(FrameEntry *fe, F
ensureFeSynced(fe, masm);
masm.loadDouble(addressOf(fe), fpReg);
}
inline bool
FrameState::isClosedVar(uint32 slot)
{
- return closedVars[slot];
+ return eval || closedVars[slot];
+}
+
+inline bool
+FrameState::isClosedArg(uint32 slot)
+{
+ return eval || usesArguments || closedArgs[slot];
}
class PinRegAcrossSyncAndKill
{
typedef JSC::MacroAssembler::RegisterID RegisterID;
FrameState &frame;
MaybeRegisterID maybeReg;
public:
--- a/js/src/methodjit/FrameState.cpp
+++ b/js/src/methodjit/FrameState.cpp
@@ -41,70 +41,87 @@
#include "FrameState-inl.h"
using namespace js;
using namespace js::mjit;
/* Because of Value alignment */
JS_STATIC_ASSERT(sizeof(FrameEntry) % 8 == 0);
-FrameState::FrameState(JSContext *cx, JSScript *script, Assembler &masm)
- : cx(cx), script(script), masm(masm), entries(NULL),
+FrameState::FrameState(JSContext *cx, JSScript *script, JSFunction *fun, Assembler &masm)
+ : cx(cx), script(script), fun(fun),
+ nargs(fun ? fun->nargs : 0),
+ masm(masm), entries(NULL),
#if defined JS_NUNBOX32
reifier(cx, *thisFromCtor()),
#endif
+ closedVars(NULL),
+ closedArgs(NULL),
+ usesArguments(script->usesArguments),
inTryBlock(false)
{
}
FrameState::~FrameState()
{
cx->free(entries);
}
bool
-FrameState::init(uint32 nargs)
+FrameState::init()
{
- this->nargs = nargs;
-
- uint32 nslots = script->nslots + nargs;
- if (!nslots) {
+ // nslots + nargs + 2 (callee, this)
+ uint32 nentries = feLimit();
+ if (!nentries) {
sp = spBase = locals = args = NULL;
return true;
}
eval = script->usesEval || cx->compartment->debugMode;
- size_t totalBytes = sizeof(FrameEntry) * nslots + // entries[]
- sizeof(FrameEntry *) * nslots + // tracker.entries
- (eval ? 0 : sizeof(JSPackedBool) * nslots); // closedVars[]
+ size_t totalBytes = sizeof(FrameEntry) * nentries + // entries[], w/ callee+this
+ sizeof(FrameEntry *) * nentries + // tracker.entries
+ (eval
+ ? 0
+ : sizeof(JSPackedBool) * script->nslots) + // closedVars[]
+ (eval || usesArguments
+ ? 0
+ : sizeof(JSPackedBool) * nargs); // closedArgs[]
uint8 *cursor = (uint8 *)cx->calloc(totalBytes);
if (!cursor)
return false;
#if defined JS_NUNBOX32
- if (!reifier.init(nslots))
+ if (!reifier.init(nentries))
return false;
#endif
entries = (FrameEntry *)cursor;
- cursor += sizeof(FrameEntry) * nslots;
+ cursor += sizeof(FrameEntry) * nentries;
- args = entries;
+ callee_ = entries;
+ this_ = entries + 1;
+ args = entries + 2;
locals = args + nargs;
spBase = locals + script->nfixed;
sp = spBase;
tracker.entries = (FrameEntry **)cursor;
- cursor += sizeof(FrameEntry *) * nslots;
+ cursor += sizeof(FrameEntry *) * nentries;
- if (!eval && nslots) {
- closedVars = (JSPackedBool *)cursor;
- cursor += sizeof(JSPackedBool) * nslots;
+ if (!eval) {
+ if (script->nslots) {
+ closedVars = (JSPackedBool *)cursor;
+ cursor += sizeof(JSPackedBool) * script->nslots;
+ }
+ if (!usesArguments && nargs) {
+ closedArgs = (JSPackedBool *)cursor;
+ cursor += sizeof(JSPackedBool) * nargs;
+ }
}
JS_ASSERT(reinterpret_cast<uint8 *>(entries) + totalBytes == cursor);
return true;
}
void
@@ -175,17 +192,17 @@ FrameState::evictSomeReg(uint32 mask)
evictReg(fallback);
return fallback;
}
void
FrameState::syncAndForgetEverything()
{
- syncAndKill(Registers(Registers::AvailRegs), Uses(frameDepth()));
+ syncAndKill(Registers(Registers::AvailRegs), Uses(frameSlots()));
forgetEverything();
}
void
FrameState::resetInternalState()
{
for (uint32 i = 0; i < tracker.nentries; i++)
tracker[i]->untrack();
@@ -321,16 +338,22 @@ FrameState::storeTo(FrameEntry *fe, Addr
if (popped)
freeReg(reg);
else
fe->type.setRegister(reg);
}
#endif
}
+void
+FrameState::loadThisForReturn(RegisterID typeReg, RegisterID dataReg, RegisterID tempReg)
+{
+ return loadForReturn(getThis(), typeReg, dataReg, tempReg);
+}
+
void FrameState::loadForReturn(FrameEntry *fe, RegisterID typeReg, RegisterID dataReg, RegisterID tempReg)
{
JS_ASSERT(dataReg != typeReg && dataReg != tempReg && typeReg != tempReg);
if (fe->isConstant()) {
masm.loadValueAsComponents(fe->getValue(), typeReg, dataReg);
return;
}
@@ -1134,35 +1157,50 @@ FrameState::uncopy(FrameEntry *original)
fe->data.inherit(original->data);
if (fe->data.inRegister())
regstate[fe->data.reg()].reassociate(fe);
return fe;
}
void
+FrameState::finishStore(FrameEntry *fe, bool closed)
+{
+ // Make sure the backing store entry is synced to memory, then if it's
+ // closed, forget it entirely (removing all copies) and reset it to a
+ // synced, in-memory state.
+ syncFe(fe);
+ if (closed) {
+ if (!fe->isCopy())
+ forgetEntry(fe);
+ fe->resetSynced();
+ }
+}
+
+void
FrameState::storeLocal(uint32 n, bool popGuaranteed, bool typeChange)
{
FrameEntry *local = getLocal(n);
-
storeTop(local, popGuaranteed, typeChange);
- bool closed = eval || isClosedVar(n);
+ bool closed = isClosedVar(n);
if (!closed && !inTryBlock)
return;
- /* Ensure that the local variable remains synced. */
- syncFe(local);
+ finishStore(local, closed);
+}
- if (closed) {
- /* If the FE can have registers, free them before resetting. */
- if (!local->isCopy())
- forgetEntry(local);
- local->resetSynced();
- }
+void
+FrameState::storeArg(uint32 n, bool popGuaranteed)
+{
+ // Note that args are always immediately synced, because they can be
+ // aliased (but not written to) via f.arguments.
+ FrameEntry *arg = getArg(n);
+ storeTop(arg, popGuaranteed, true);
+ finishStore(arg, isClosedArg(n));
}
void
FrameState::forgetEntry(FrameEntry *fe)
{
if (fe->isCopied()) {
uncopy(fe);
if (!fe->isCopied())
--- a/js/src/methodjit/FrameState.h
+++ b/js/src/methodjit/FrameState.h
@@ -233,19 +233,19 @@ class FrameState
FrameEntry *save_;
/* Part of the FrameEntry that owns the FE. */
RematInfo::RematType type_;
};
FrameState *thisFromCtor() { return this; }
public:
- FrameState(JSContext *cx, JSScript *script, Assembler &masm);
+ FrameState(JSContext *cx, JSScript *script, JSFunction *fun, Assembler &masm);
~FrameState();
- bool init(uint32 nargs);
+ bool init();
/*
* Pushes a synced slot.
*/
inline void pushSynced();
/*
* Pushes a slot that has a known, synced type and payload.
@@ -326,20 +326,23 @@ class FrameState
inline bool haveSameBacking(FrameEntry *lhs, FrameEntry *rhs);
/*
* Temporarily increase and decrease local variable depth.
*/
inline void enterBlock(uint32 n);
inline void leaveBlock(uint32 n);
- /*
- * Pushes a copy of a local variable.
- */
+ // Pushes a copy of a slot (formal argument, local variable, or stack slot)
+ // onto the operation stack.
void pushLocal(uint32 n);
+ void pushArg(uint32 n);
+ void pushCallee();
+ void pushThis();
+ inline void learnThisIsObject();
/*
* Allocates a temporary register for a FrameEntry's type. The register
* can be spilled or clobbered by the frame. The compiler may only operate
* on it temporarily, and must take care not to clobber it.
*/
inline RegisterID tempRegForType(FrameEntry *fe);
@@ -541,22 +544,25 @@ class FrameState
*/
void storeTo(FrameEntry *fe, Address address, bool popHint = false);
/*
* Fully stores a FrameEntry into two arbitrary registers. tempReg may be
* used as a temporary.
*/
void loadForReturn(FrameEntry *fe, RegisterID typeReg, RegisterID dataReg, RegisterID tempReg);
+ void loadThisForReturn(RegisterID typeReg, RegisterID dataReg, RegisterID tempReg);
/*
* Stores the top stack slot back to a slot.
*/
void storeLocal(uint32 n, bool popGuaranteed = false, bool typeChange = true);
+ void storeArg(uint32 n, bool popGuaranteed = false);
void storeTop(FrameEntry *target, bool popGuaranteed = false, bool typeChange = true);
+ void finishStore(FrameEntry *fe, bool closed);
/*
* Restores state from a slow path.
*/
void merge(Assembler &masm, Changes changes) const;
/*
* Writes unsynced stores to an arbitrary buffer.
@@ -567,17 +573,17 @@ class FrameState
* Syncs all outstanding stores to memory and possibly kills regs in the
* process. The top [ignored..uses-1] frame entries will be synced.
*/
void syncAndKill(Registers kill, Uses uses, Uses ignored);
void syncAndKill(Registers kill, Uses uses) { syncAndKill(kill, uses, Uses(0)); }
/* Syncs and kills everything. */
void syncAndKillEverything() {
- syncAndKill(Registers(Registers::AvailRegs), Uses(frameDepth()));
+ syncAndKill(Registers(Registers::AvailRegs), Uses(frameSlots()));
}
/*
* Clear all tracker entries, syncing all outstanding stores in the process.
* The stack depth is in case some merge points' edges did not immediately
* precede the current instruction.
*/
inline void syncAndForgetEverything(uint32 newStackDepth);
@@ -701,27 +707,42 @@ class FrameState
inline void dupAt(int32 n);
/*
* If the frameentry is a copy, give it its own registers.
* This may only be called on the topmost fe.
*/
inline void giveOwnRegs(FrameEntry *fe);
- /*
- * Returns the current stack depth of the frame.
- */
uint32 stackDepth() const { return sp - spBase; }
- uint32 frameDepth() const { return stackDepth() + script->nfixed; }
+
+ // Returns the number of entries in the frame, that is:
+ // 2 for callee, this +
+ // nargs +
+ // nfixed +
+ // currently pushed stack slots
+ uint32 frameSlots() const { return uint32(sp - entries); }
+
+ // Returns the number of local variables and active stack slots.
+ uint32 localSlots() const { return uint32(sp - locals); }
#ifdef DEBUG
void assertValidRegisterState() const;
#endif
+ // Return an address, relative to the JSStackFrame, that represents where
+ // this FrameEntry is stored in memory. Note that this is its canonical
+ // address, not its backing store. There is no guarantee that the memory
+ // is coherent.
Address addressOf(const FrameEntry *fe) const;
+
+ // Returns an address, relative to the JSStackFrame, that represents where
+ // this FrameEntry is backed in memory. This is not necessarily its
+ // canonical address, but the address for which the payload has been synced
+ // to memory. The caller guarantees that the payload has been synced.
Address addressForDataRemat(const FrameEntry *fe) const;
inline StateRemat dataRematInfo(const FrameEntry *fe) const;
/*
* This is similar to freeReg(ownRegForData(fe)) - except no movement takes place.
* The fe is simply invalidated as if it were popped. This can be used to free
* registers in the working area of the stack. Obviously, this can only be called
@@ -737,20 +758,19 @@ class FrameState
/*
* Stores the top item on the stack to a stack slot, count down from the
* current stack depth. For example, to move the top (-1) to -3, you would
* call shift(-2).
*/
void shift(int32 n);
- /*
- * Notifies the frame of a slot that can escape.
- */
+ // Notifies the frame that a local variable or argument slot is closed over.
inline void setClosedVar(uint32 slot);
+ inline void setClosedArg(uint32 slot);
inline void setInTryBlock(bool inTryBlock) {
this->inTryBlock = inTryBlock;
}
private:
inline RegisterID allocReg(FrameEntry *fe, RematInfo::RematType type);
inline void forgetReg(RegisterID reg);
@@ -764,20 +784,23 @@ class FrameState
inline void ensureTypeSynced(const FrameEntry *fe, Assembler &masm) const;
inline void ensureDataSynced(const FrameEntry *fe, Assembler &masm) const;
/* Guarantee sync, even if register allocation is required, and set sync. */
inline void syncFe(FrameEntry *fe);
inline void syncType(FrameEntry *fe);
inline void syncData(FrameEntry *fe);
+ inline FrameEntry *getOrTrack(uint32 index);
inline FrameEntry *getLocal(uint32 slot);
+ inline FrameEntry *getArg(uint32 slot);
+ inline FrameEntry *getCallee();
+ inline FrameEntry *getThis();
inline void forgetAllRegs(FrameEntry *fe);
inline void swapInTracker(FrameEntry *lhs, FrameEntry *rhs);
- inline uint32 localIndex(uint32 n);
void pushCopyOf(uint32 index);
#if defined JS_NUNBOX32
void syncFancy(Assembler &masm, Registers avail, FrameEntry *resumeAt,
FrameEntry *bottom) const;
#endif
inline bool tryFastDoubleLoad(FrameEntry *fe, FPRegisterID fpReg, Assembler &masm) const;
void resetInternalState();
@@ -799,42 +822,46 @@ class FrameState
*/
void forgetEntry(FrameEntry *fe);
FrameEntry *entryFor(uint32 index) const {
JS_ASSERT(entries[index].isTracked());
return &entries[index];
}
- RegisterID evictSomeReg() {
- return evictSomeReg(Registers::AvailRegs);
- }
-
- uint32 indexOf(int32 depth) {
+ RegisterID evictSomeReg() { return evictSomeReg(Registers::AvailRegs); }
+ uint32 indexOf(int32 depth) const {
+ JS_ASSERT(uint32((sp + depth) - entries) < feLimit());
return uint32((sp + depth) - entries);
}
-
uint32 indexOfFe(FrameEntry *fe) const {
+ JS_ASSERT(uint32(fe - entries) < feLimit());
return uint32(fe - entries);
}
+ uint32 feLimit() const { return script->nslots + nargs + 2; }
inline bool isClosedVar(uint32 slot);
+ inline bool isClosedArg(uint32 slot);
private:
JSContext *cx;
JSScript *script;
+ JSFunction *fun;
uint32 nargs;
Assembler &masm;
/* All allocated registers. */
Registers freeRegs;
/* Cache of FrameEntry objects. */
FrameEntry *entries;
+ FrameEntry *callee_;
+ FrameEntry *this_;
+
/* Base pointer for arguments. */
FrameEntry *args;
/* Base pointer for local variables. */
FrameEntry *locals;
/* Base pointer for the stack. */
FrameEntry *spBase;
@@ -851,17 +878,19 @@ class FrameState
*/
RegisterState regstate[Assembler::TotalRegisters];
#if defined JS_NUNBOX32
mutable ImmutableSync reifier;
#endif
JSPackedBool *closedVars;
+ JSPackedBool *closedArgs;
bool eval;
+ bool usesArguments;
bool inTryBlock;
};
class AutoPreserveAcrossSyncAndKill;
} /* namespace mjit */
} /* namespace js */
--- a/js/src/methodjit/InlineFrameAssembler.h
+++ b/js/src/methodjit/InlineFrameAssembler.h
@@ -106,17 +106,17 @@ class InlineFrameAssembler {
DataLabelPtr assemble(void *ncode)
{
JS_ASSERT((flags & ~JSFRAME_CONSTRUCTING) == 0);
/* Generate JSStackFrame::initCallFrameCallerHalf. */
DataLabelPtr ncodePatch;
if (frameSize.isStatic()) {
- uint32 frameDepth = frameSize.staticFrameDepth();
+ uint32 frameDepth = frameSize.staticLocalSlots();
AdjustedFrame newfp(sizeof(JSStackFrame) + frameDepth * sizeof(Value));
Address flagsAddr = newfp.addrOf(JSStackFrame::offsetOfFlags());
masm.store32(Imm32(JSFRAME_FUNCTION | flags), flagsAddr);
Address prevAddr = newfp.addrOf(JSStackFrame::offsetOfPrev());
masm.storePtr(JSFrameReg, prevAddr);
Address ncodeAddr = newfp.addrOf(JSStackFrame::offsetOfncode());
ncodePatch = masm.storePtrWithPatch(ImmPtr(ncode), ncodeAddr);
--- a/js/src/methodjit/MonoIC.cpp
+++ b/js/src/methodjit/MonoIC.cpp
@@ -523,17 +523,17 @@ class CallCompiler : public BaseCompiler
masm.loadPtr(Address(t0, offset), t0);
Jump hasCode = masm.branchPtr(Assembler::Above, t0, ImmPtr(JS_UNJITTABLE_SCRIPT));
/* Try and compile. On success we get back the nmap pointer. */
masm.storePtr(JSFrameReg, FrameAddress(offsetof(VMFrame, regs.fp)));
void *compilePtr = JS_FUNC_TO_DATA_PTR(void *, stubs::CompileFunction);
if (ic.frameSize.isStatic()) {
masm.move(Imm32(ic.frameSize.staticArgc()), Registers::ArgReg1);
- masm.fallibleVMCall(compilePtr, script->code, ic.frameSize.staticFrameDepth());
+ masm.fallibleVMCall(compilePtr, script->code, ic.frameSize.staticLocalSlots());
} else {
masm.load32(FrameAddress(offsetof(VMFrame, u.call.dynamicArgc)), Registers::ArgReg1);
masm.fallibleVMCall(compilePtr, script->code, -1);
}
masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
Jump notCompiled = masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
Registers::ReturnReg);
@@ -637,17 +637,17 @@ class CallCompiler : public BaseCompiler
uintN initialFrameDepth = f.regs.sp - f.regs.fp->slots();
/*
* SplatApplyArgs has not been called, so we call it here before
* potentially touching f.u.call.dynamicArgc.
*/
Value *vp;
if (ic.frameSize.isStatic()) {
- JS_ASSERT(f.regs.sp - f.regs.fp->slots() == (int)ic.frameSize.staticFrameDepth());
+ JS_ASSERT(f.regs.sp - f.regs.fp->slots() == (int)ic.frameSize.staticLocalSlots());
vp = f.regs.sp - (2 + ic.frameSize.staticArgc());
} else {
JS_ASSERT(*f.regs.pc == JSOP_FUNAPPLY && GET_ARGC(f.regs.pc) == 2);
if (!ic::SplatApplyArgs(f)) /* updates regs.sp */
THROWV(true);
vp = f.regs.sp - (2 + f.u.call.dynamicArgc);
}
--- a/js/src/methodjit/MonoIC.h
+++ b/js/src/methodjit/MonoIC.h
@@ -68,17 +68,17 @@ class FrameSize
bool isStatic() const {
return frameDepth_ > 0;
}
bool isDynamic() const {
return frameDepth_ == 0;
}
- uint32 staticFrameDepth() const {
+ uint32 staticLocalSlots() const {
JS_ASSERT(isStatic());
return frameDepth_;
}
uint32 staticArgc() const {
JS_ASSERT(isStatic());
return argc_;
}
--- a/js/src/methodjit/RematInfo.h
+++ b/js/src/methodjit/RematInfo.h
@@ -90,17 +90,20 @@ struct StateRemat {
// the largest local:
// ((UINT16_LIMIT - 1) * sizeof(Value) + sizeof(JSStackFrame),
// And an extra bit for the sign on arguments.
#define MIN_STATE_REMAT_BITS 17
bool isConstant() const { return offset_ == CONSTANT; }
bool inRegister() const { return offset_ >= 0 &&
offset_ <= int32(JSC::MacroAssembler::TotalRegisters); }
- bool inMemory() const { return offset_ >= int32(sizeof(JSStackFrame)); }
+ bool inMemory() const {
+ return offset_ >= int32(sizeof(JSStackFrame)) ||
+ offset_ < 0;
+ }
int32 toInt32() const { return offset_; }
Address address() const {
JS_ASSERT(inMemory());
return Address(JSFrameReg, offset_);
}
RegisterID reg() const {
JS_ASSERT(inRegister());
--- a/js/src/methodjit/StubCompiler.cpp
+++ b/js/src/methodjit/StubCompiler.cpp
@@ -59,22 +59,16 @@ StubCompiler::StubCompiler(JSContext *cx
scriptJoins(CompilerAllocPolicy(cx, cc)),
jumpList(SystemAllocPolicy())
{
#ifdef DEBUG
masm.setSpewPath(true);
#endif
}
-bool
-StubCompiler::init(uint32 nargs)
-{
- return true;
-}
-
void
StubCompiler::linkExitDirect(Jump j, Label L)
{
exits.append(CrossPatch(j, L));
}
JSC::MacroAssembler::Label
StubCompiler::syncExit(Uses uses)
@@ -131,17 +125,17 @@ StubCompiler::linkExit(Jump j, Uses uses
// Special version of linkExit that is used when there is a JavaScript
// control-flow branch after the slow path. Our compilation strategy
// requires the JS frame to be fully materialized in memory across branches.
// This function does a linkExit and also fully materializes the frame.
void
StubCompiler::linkExitForBranch(Jump j)
{
- Label l = syncExit(Uses(frame.frameDepth()));
+ Label l = syncExit(Uses(frame.frameSlots()));
linkExitDirect(j, l);
}
void
StubCompiler::leave()
{
JaegerSpew(JSpew_Insns, " ---- BEGIN SLOW LEAVE CODE ---- \n");
for (size_t i = 0; i < jumpList.length(); i++)
--- a/js/src/methodjit/StubCompiler.h
+++ b/js/src/methodjit/StubCompiler.h
@@ -91,18 +91,16 @@ class StubCompiler
Vector<CrossPatch, 64, mjit::CompilerAllocPolicy> exits;
Vector<CrossPatch, 64, mjit::CompilerAllocPolicy> joins;
Vector<CrossJumpInScript, 64, mjit::CompilerAllocPolicy> scriptJoins;
Vector<Jump, 8, SystemAllocPolicy> jumpList;
public:
StubCompiler(JSContext *cx, mjit::Compiler &cc, FrameState &frame, JSScript *script);
- bool init(uint32 nargs);
-
size_t size() {
return masm.size();
}
uint8 *buffer() {
return masm.buffer();
}