--- a/js/src/jstracer.cpp
+++ b/js/src/jstracer.cpp
@@ -1981,17 +1981,17 @@ TraceRecorder::snapshot(ExitType exitTyp
exit->numStackSlotsBelowCurrentFrame = cx->fp->callee
? nativeStackOffset(&cx->fp->argv[-2])/sizeof(double)
: 0;
exit->exitType = exitType;
exit->addGuard(rec);
exit->block = fp->blockChain;
exit->ip_adj = ip_adj;
exit->sp_adj = (stackSlots * sizeof(double)) - treeInfo->nativeStackBase;
- exit->rp_adj = exit->calldepth * sizeof(FrameInfo);
+ exit->rp_adj = exit->calldepth * sizeof(FrameInfo*);
memcpy(getTypeMap(exit), typemap, typemap_size);
/* BIG FAT WARNING: If compilation fails, we currently don't reset the lirbuf so its safe
to keep references to the side exits here. If we ever start rewinding those lirbufs,
we have to make sure we purge the side exits that then no longer will be in valid
memory. */
if (exitType == LOOP_EXIT)
treeInfo->sideExits.add(exit);
@@ -2502,29 +2502,29 @@ TraceRecorder::prepareTreeCall(Fragment*
trace) is currently inside a function inlining code (calldepth > 0), we have to advance
the native stack pointer such that we match what the inner trace expects to see. We
move it back when we come out of the inner tree call. */
if (callDepth > 0) {
/* Calculate the amount we have to lift the native stack pointer by to compensate for
any outer frames that the inner tree doesn't expect but the outer tree has. */
ptrdiff_t sp_adj = nativeStackOffset(&cx->fp->argv[-2]);
/* Calculate the amount we have to lift the call stack by */
- ptrdiff_t rp_adj = callDepth * sizeof(FrameInfo);
+ ptrdiff_t rp_adj = callDepth * sizeof(FrameInfo*);
/* Guard that we have enough stack space for the tree we are trying to call on top
of the new value for sp. */
debug_only_v(printf("sp_adj=%d outer=%d inner=%d\n",
sp_adj, treeInfo->nativeStackBase, ti->nativeStackBase));
LIns* sp_top = lir->ins2i(LIR_piadd, lirbuf->sp,
- treeInfo->nativeStackBase /* rebase sp to beginning of outer tree's stack */
+ sp_adj /* adjust for stack in outer frame inner tree can't see */
+ ti->maxNativeStackSlots * sizeof(double)); /* plus the inner tree's stack */
guard(true, lir->ins2(LIR_lt, sp_top, eos_ins), OOM_EXIT);
/* Guard that we have enough call stack space. */
LIns* rp_top = lir->ins2i(LIR_piadd, lirbuf->rp, rp_adj +
- ti->maxCallDepth * sizeof(FrameInfo));
+ ti->maxCallDepth * sizeof(FrameInfo*));
guard(true, lir->ins2(LIR_lt, rp_top, eor_ins), OOM_EXIT);
/* We have enough space, so adjust sp and rp to their new level. */
lir->insStorei(inner_sp_ins = lir->ins2i(LIR_piadd, lirbuf->sp,
- treeInfo->nativeStackBase /* rebase sp to beginning of outer tree's stack */
+ sp_adj /* adjust for stack in outer frame inner tree can't see */
+ ti->nativeStackBase), /* plus the inner tree's stack base */
lirbuf->state, offsetof(InterpState, sp));
lir->insStorei(lir->ins2i(LIR_piadd, lirbuf->rp, rp_adj),
@@ -3548,18 +3548,18 @@ js_ExecuteTree(JSContext* cx, Fragment*
ti->maxNativeStackSlots,
f->code());)
if (ngslots)
BuildNativeGlobalFrame(cx, ngslots, gslots, tm->globalTypeMap->data(), global);
BuildNativeStackFrame(cx, 0/*callDepth*/, ti->stackTypeMap.data(), stack);
double* entry_sp = &stack[ti->nativeStackBase/sizeof(double)];
- FrameInfo callstack_buffer[MAX_CALL_STACK_ENTRIES];
- FrameInfo* callstack = callstack_buffer;
+ FrameInfo* callstack_buffer[MAX_CALL_STACK_ENTRIES];
+ FrameInfo** callstack = callstack_buffer;
InterpState state;
state.sp = (void*)entry_sp;
state.eos = ((double*)state.sp) + MAX_NATIVE_STACK_SLOTS;
state.rp = callstack;
state.eor = callstack + MAX_CALL_STACK_ENTRIES;
state.gp = global;
state.cx = cx;
@@ -3607,49 +3607,49 @@ js_ExecuteTree(JSContext* cx, Fragment*
/* While executing a tree we do not update state.sp and state.rp even if they grow. Instead,
guards tell us by how much sp and rp should be incremented in case of a side exit. When
calling a nested tree, however, we actively adjust sp and rp. If we have such frames
from outer trees on the stack, then rp will have been adjusted. Before we can process
the stack of the frames of the tree we directly exited from, we have to first work our
way through the outer frames and generate interpreter frames for them. Once the call
stack (rp) is empty, we can process the final frames (which again are not directly
visible and only the guard we exited on will tells us about). */
- FrameInfo* rp = (FrameInfo*)state.rp;
+ FrameInfo** rp = (FrameInfo**)state.rp;
if (lr->exitType == NESTED_EXIT) {
VMSideExit* nested = state.lastTreeCallGuard;
if (!nested) {
/* If lastTreeCallGuard is not set in state, we only have a single level of
nesting in this exit, so lr itself is the innermost and outermost nested
guard, and hence we set nested to lr. The calldepth of the innermost guard
is not added to state.rp, so we do it here manually. For a nesting depth
greater than 1 the CallTree builtin already added the innermost guard's
calldepth to state.rpAtLastTreeCall. */
nested = lr;
rp += lr->calldepth;
} else {
/* During unwinding state.rp gets overwritten at every step and we restore
it here to its state at the innermost nested guard. The builtin already
added the calldepth of that innermost guard to rpAtLastTreeCall. */
- rp = (FrameInfo*)state.rpAtLastTreeCall;
+ rp = (FrameInfo**)state.rpAtLastTreeCall;
}
innermost = state.lastTreeExitGuard;
if (innermostNestedGuardp)
*innermostNestedGuardp = nested;
JS_ASSERT(nested);
JS_ASSERT(nested->exitType == NESTED_EXIT);
JS_ASSERT(state.lastTreeExitGuard);
JS_ASSERT(state.lastTreeExitGuard->exitType != NESTED_EXIT);
}
while (callstack < rp) {
/* Synthesize a stack frame and write out the values in it using the type map pointer
on the native call stack. */
- if (js_SynthesizeFrame(cx, *callstack) < 0)
+ if (js_SynthesizeFrame(cx, **callstack) < 0)
return NULL;
- int slots = FlushNativeStackFrame(cx, 1/*callDepth*/, callstack->typemap, stack, cx->fp);
+ int slots = FlushNativeStackFrame(cx, 1/*callDepth*/, (uint8*)(*callstack+1), stack, cx->fp);
#ifdef DEBUG
JSStackFrame* fp = cx->fp;
debug_only_v(printf("synthesized deep frame for %s:%u@%u, slots=%d\n",
fp->script->filename,
js_FramePCToLineNumber(cx, fp),
FramePCOffset(fp),
slots);)
#endif
@@ -3663,17 +3663,17 @@ js_ExecuteTree(JSContext* cx, Fragment*
}
/* We already synthesized the frames around the innermost guard. Here we just deal
with additional frames inside the tree we are bailing out from. */
JS_ASSERT(rp == callstack);
unsigned calldepth = innermost->calldepth;
unsigned calldepth_slots = 0;
for (unsigned n = 0; n < calldepth; ++n) {
- int nslots = js_SynthesizeFrame(cx, callstack[n]);
+ int nslots = js_SynthesizeFrame(cx, *callstack[n]);
if (nslots < 0)
return NULL;
calldepth_slots += nslots;
++inlineCallCount;
#ifdef DEBUG
JSStackFrame* fp = cx->fp;
debug_only_v(printf("synthesized shallow frame for %s:%u@%u\n",
fp->script->filename, js_FramePCToLineNumber(cx, fp),
@@ -6690,52 +6690,41 @@ TraceRecorder::interpretedFunctionCall(j
// TODO: track the copying via the tracker...
if (argc < fun->nargs &&
jsuword(fp->regs->sp + (fun->nargs - argc)) > cx->stackPool.current->limit) {
ABORT_TRACE("can't trace calls with too few args requiring argv move");
}
// Generate a type map for the outgoing frame and stash it in the LIR
unsigned stackSlots = js_NativeStackSlots(cx, 0/*callDepth*/);
- LIns* data = lir_buf_writer->skip(stackSlots * sizeof(uint8));
- uint8* typemap = (uint8 *)data->payload();
+ LIns* data = lir_buf_writer->skip(sizeof(FrameInfo) + stackSlots * sizeof(uint8));
+ FrameInfo* fi = (FrameInfo*)data->payload();
+ uint8* typemap = (uint8 *)(fi + 1);
uint8* m = typemap;
/* Determine the type of a store by looking at the current type of the actual value the
interpreter is using. For numbers we have to check what kind of store we used last
(integer or double) to figure out what the side exit show reflect in its typemap. */
FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0/*callDepth*/,
*m++ = determineSlotType(vp);
);
if (argc >= 0x8000)
ABORT_TRACE("too many arguments");
- FrameInfo fi = {
- JSVAL_TO_OBJECT(fval),
- fp->blockChain,
- ENCODE_IP_ADJ(fp, fp->regs->pc),
- typemap,
- { { fp->regs->sp - fp->slots, argc | (constructing ? 0x8000 : 0) } }
- };
+ fi->callee = JSVAL_TO_OBJECT(fval);
+ fi->block = fp->blockChain;
+ fi->ip_adj = ENCODE_IP_ADJ(fp, fp->regs->pc);
+ fi->s.spdist = fp->regs->sp - fp->slots;
+ fi->s.argc = argc | (constructing ? 0x8000 : 0);
unsigned callDepth = getCallDepth();
if (callDepth >= treeInfo->maxCallDepth)
treeInfo->maxCallDepth = callDepth + 1;
-#define STORE_AT_RP(name) \
- lir->insStorei(INS_CONSTPTR(fi.name), lirbuf->rp, \
- callDepth * sizeof(FrameInfo) + offsetof(FrameInfo, name))
-
- STORE_AT_RP(callee);
- STORE_AT_RP(block);
- STORE_AT_RP(ip_adj);
- STORE_AT_RP(typemap);
- STORE_AT_RP(word);
-
-#undef STORE_AT_RP
+ lir->insStorei(INS_CONSTPTR(fi), lirbuf->rp, callDepth * sizeof(FrameInfo*));
atoms = fun->u.i.script->atomMap.vector;
return true;
}
JS_REQUIRES_STACK bool
TraceRecorder::record_JSOP_CALL()
{