--- a/js/src/jstracer.cpp
+++ b/js/src/jstracer.cpp
@@ -503,45 +503,29 @@ TraceRecorder::TraceRecorder(JSContext*
lirbuf->state = addName(lir->insParam(0), "state");
lirbuf->param1 = addName(lir->insParam(1), "param1");
}
lirbuf->sp = addName(lir->insLoadi(lirbuf->state, offsetof(InterpState, sp)), "sp");
lirbuf->rp = addName(lir->insLoadi(lirbuf->state, offsetof(InterpState, rp)), "rp");
cx_ins = addName(lir->insLoadi(lirbuf->state, offsetof(InterpState, cx)), "cx");
gp_ins = addName(lir->insLoadi(lirbuf->state, offsetof(InterpState, gp)), "gp");
- JSStackFrame* localFrame = NULL;
- jsuword* localNames = NULL;
-#ifdef DEBUG
- void* mark = NULL;
- if (cx->fp->fun) {
- mark = JS_ARENA_MARK(&cx->tempPool);
- localFrame = cx->fp;
- localNames = js_GetLocalNameArray(cx, localFrame->fun, &cx->tempPool);
- }
-#else
- localFrame = NULL;
- localNames = NULL;
-#endif
/* the first time we compile a tree this will be empty as we add entries lazily */
uint16* gslots = treeInfo->globalSlots.data();
uint8* m = globalTypeMap;
FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
- import(gp_ins, nativeGlobalOffset(vp), vp, *m, vpname, vpnum, localFrame, localNames);
+ import(gp_ins, nativeGlobalOffset(vp), vp, *m, vpname, vpnum, NULL);
m++;
);
ptrdiff_t offset = -treeInfo->nativeStackBase + 8;
m = stackTypeMap;
FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
- import(lirbuf->sp, offset, vp, *m, vpname, vpnum, localFrame, localNames);
+ import(lirbuf->sp, offset, vp, *m, vpname, vpnum, f);
m++; offset += sizeof(double);
);
-#ifdef DEBUG
- JS_ARENA_RELEASE(&cx->tempPool, mark);
-#endif
recompileFlag = false;
backEdgeCount = 0;
}
TraceRecorder::~TraceRecorder()
{
#ifdef DEBUG
@@ -851,16 +835,22 @@ FlushNativeStackFrame(JSContext* cx, uns
uint8* mp_base = mp;
double* np_base = np;
/* Root all string and object references first (we don't need to call the GC for this). */
FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
if ((*mp == JSVAL_STRING || *mp == JSVAL_OBJECT) && !NativeToValue(cx, *vp, *mp, np))
return false;
++mp; ++np
);
+
+ // Restore thisp from the now-restored argv[-1] in each pending frame.
+ unsigned n = callDepth;
+ for (JSStackFrame* f = cx->fp; n-- != 0; f = f->down)
+ f->thisp = JSVAL_TO_OBJECT(f->argv[-1]);
+
/* Now do this again but this time for all values (properly quicker than actually checking
the type and excluding strings and objects). The GC might kick in when we store doubles,
but everything is rooted now (all strings/objects and all doubles we already boxed). */
mp = mp_base;
np = np_base;
FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
if (!NativeToValue(cx, *vp, *mp, np))
return false;
@@ -868,17 +858,17 @@ FlushNativeStackFrame(JSContext* cx, uns
);
debug_only(printf("\n");)
return true;
}
/* Emit load instructions onto the trace that read the initial stack state. */
void
TraceRecorder::import(LIns* base, ptrdiff_t offset, jsval* p, uint8& t,
- const char *prefix, int index, JSStackFrame* fp, jsuword *localNames)
+ const char *prefix, int index, JSStackFrame *fp)
{
LIns* ins;
if (t == JSVAL_INT) { /* demoted */
JS_ASSERT(isInt32(*p));
/* Ok, we have a valid demotion attempt pending, so insert an integer
read and promote it to double since all arithmetic operations expect
to see doubles on entry. The first op to use this slot will emit a
f2i cast which will cancel out the i2f we insert here. */
@@ -889,27 +879,37 @@ TraceRecorder::import(LIns* base, ptrdif
JS_ASSERT(isNumber(*p) == (t == JSVAL_DOUBLE));
ins = lir->insLoad(t == JSVAL_DOUBLE ? LIR_ldq : LIR_ld, base, offset);
nativeFrameTracker.set(p, ins);
}
tracker.set(p, ins);
#ifdef DEBUG
char name[64];
JS_ASSERT(strlen(prefix) < 10);
+ void* mark = NULL;
+ jsuword* localNames = NULL;
+ if (*prefix == 'a' || *prefix == 'v') {
+ mark = JS_ARENA_MARK(&cx->tempPool);
+ localNames = js_GetLocalNameArray(cx, fp->fun, &cx->tempPool);
+ }
+
if (!strcmp(prefix, "argv")) {
JSAtom *atom = JS_LOCAL_NAME_TO_ATOM(localNames[index]);
JS_snprintf(name, sizeof name, "$%s.%s", js_AtomToPrintableString(cx, fp->fun->atom),
js_AtomToPrintableString(cx, atom));
} else if (!strcmp(prefix, "vars")) {
- JSAtom *atom = JS_LOCAL_NAME_TO_ATOM(localNames[index + fp->fun->nargs]);
+ JSAtom *atom = JS_LOCAL_NAME_TO_ATOM(localNames[fp->fun->nargs + index]);
JS_snprintf(name, sizeof name, "$%s.%s", js_AtomToPrintableString(cx, fp->fun->atom),
js_AtomToPrintableString(cx, atom));
} else {
JS_snprintf(name, sizeof name, "$%s%d", prefix, index);
}
+
+ if (mark)
+ JS_ARENA_RELEASE(&cx->tempPool, mark);
addName(ins, name);
static const char* typestr[] = {
"object", "int", "double", "3", "string", "5", "boolean", "any"
};
printf("import vp=%p name=%s type=%s flags=%d\n", p, name, typestr[t & 7], t >> 3);
#endif
}
@@ -922,17 +922,17 @@ TraceRecorder::lazilyImportGlobalSlot(un
return false;
jsval* vp = &STOBJ_GET_SLOT(globalObj, slot);
if (tracker.has(vp))
return true; /* we already have it */
unsigned index = treeInfo->globalSlots.length();
treeInfo->globalSlots.add(slot);
treeInfo->globalTypeMap.add(getCoercedType(*vp));
import(gp_ins, slot*sizeof(double), vp, treeInfo->globalTypeMap.data()[index],
- "global", index, NULL, NULL);
+ "global", index, NULL);
return true;
}
/* Update the tracker, then issue a write back store. */
void
TraceRecorder::set(jsval* p, LIns* i, bool initializing)
{
JS_ASSERT(initializing || tracker.has(p));
@@ -1001,17 +1001,16 @@ js_IsLoopExit(JSContext* cx, JSScript* s
default:;
}
return false;
}
struct FrameInfo {
JSObject* callee; // callee function object
- JSObject* thisp; // |this| parameter
jsbytecode* callpc; // pc of JSOP_CALL in caller script
union {
struct {
uint16 spdist; // distance from fp->slots to fp->regs->sp at JSOP_CALL
uint16 argc; // actual argument count, may be < fun->nargs
} s;
uint32 word; // for spdist/argc LIR store in record_JSOP_CALL
};
@@ -1307,17 +1306,17 @@ js_SynthesizeFrame(JSContext* cx, const
newifp->frame.scopeChain = OBJ_GET_PARENT(cx, fi.callee);
newifp->frame.sharpDepth = 0;
newifp->frame.sharpArray = NULL;
newifp->frame.flags = 0;
newifp->frame.dormantNext = NULL;
newifp->frame.xmlNamespace = NULL;
newifp->frame.blockChain = NULL;
newifp->mark = newmark;
- newifp->frame.thisp = fi.thisp;
+ newifp->frame.thisp = NULL; // will be set by js_ExecuteTree -> FlushNativeStackFrame
newifp->frame.regs = cx->fp->regs;
newifp->frame.regs->pc = script->code;
newifp->frame.regs->sp = newsp + script->nfixed;
newifp->frame.slots = newsp;
#ifdef DEBUG
newifp->frame.pcDisabledSave = 0;
#endif
@@ -2669,44 +2668,41 @@ bool TraceRecorder::record_JSOP_CALL()
// TODO: make sure args are not copied, or track the copying via the tracker
if (argc < fun->nargs &&
jsuword(fp->regs->sp + (fun->nargs - argc)) > cx->stackPool.current->limit) {
ABORT_TRACE("can't trace calls with too few args requiring argv move");
}
FrameInfo fi = {
JSVAL_TO_OBJECT(fval),
- JSVAL_TO_OBJECT(stackval(0 - (argc + 1))),
fp->regs->pc,
fp->regs->sp + (fun->nargs - argc) - fp->slots,
argc
};
unsigned callDepth = getCallDepth();
if (callDepth >= treeInfo->maxCallDepth)
treeInfo->maxCallDepth = callDepth + 1;
lir->insStorei(lir->insImmPtr(fi.callee), lirbuf->rp,
callDepth * sizeof(FrameInfo) + offsetof(FrameInfo, callee));
- lir->insStorei(lir->insImmPtr(fi.thisp), lirbuf->rp,
- callDepth * sizeof(FrameInfo) + offsetof(FrameInfo, thisp));
lir->insStorei(lir->insImmPtr(fi.callpc), lirbuf->rp,
callDepth * sizeof(FrameInfo) + offsetof(FrameInfo, callpc));
lir->insStorei(lir->insImm(fi.word), lirbuf->rp,
callDepth * sizeof(FrameInfo) + offsetof(FrameInfo, word));
atoms = fun->u.i.script->atomMap.vector;
return true;
}
if (FUN_SLOW_NATIVE(fun))
ABORT_TRACE("slow native");
enum JSTNErrType { INFALLIBLE, FAIL_NULL, FAIL_NEG };
- struct JSTraceableNative {
+ static struct JSTraceableNative {
JSFastNative native;
int builtin;
const char *prefix;
const char *argtypes;
JSTNErrType errtype;
} knownNatives[] = {
{ js_math_sin, F_Math_sin, "", "d", INFALLIBLE, },
{ js_math_cos, F_Math_cos, "", "d", INFALLIBLE, },