Properly compute 'this' object on trace and wrap if necessary (488203, r=mrbkap).
Properly compute 'this' object on trace and wrap if necessary (488203, r=mrbkap).
--- a/js/src/jsinterp.cpp
+++ b/js/src/jsinterp.cpp
@@ -4360,25 +4360,18 @@ js_Interpret(JSContext *cx)
OBJ_SET_SLOT(cx, fp->varobj, slot, rval);
len = JSOP_INCGVAR_LENGTH; /* all gvar incops are same length */
JS_ASSERT(len == js_CodeSpec[op].length);
DO_NEXT_OP(len);
}
#define COMPUTE_THIS(cx, fp, obj) \
JS_BEGIN_MACRO \
- if (fp->flags & JSFRAME_COMPUTED_THIS) { \
- obj = fp->thisp; \
- } else { \
- obj = js_ComputeThis(cx, JS_TRUE, fp->argv); \
- if (!obj) \
- goto error; \
- fp->thisp = obj; \
- fp->flags |= JSFRAME_COMPUTED_THIS; \
- } \
+ if (!(obj = js_ComputeThisForFrame(cx, fp))) \
+ goto error; \
JS_END_MACRO
BEGIN_CASE(JSOP_THIS)
COMPUTE_THIS(cx, fp, obj);
PUSH_OPND(OBJECT_TO_JSVAL(obj));
END_CASE(JSOP_THIS)
BEGIN_CASE(JSOP_GETTHISPROP)
--- a/js/src/jsinterp.h
+++ b/js/src/jsinterp.h
@@ -461,16 +461,29 @@ js_ComputeThis(JSContext *cx, JSBool laz
extern const uint16 js_PrimitiveTestFlags[];
#define PRIMITIVE_THIS_TEST(fun,thisv) \
(JS_ASSERT(!JSVAL_IS_VOID(thisv)), \
JSFUN_THISP_TEST(JSFUN_THISP_FLAGS((fun)->flags), \
js_PrimitiveTestFlags[JSVAL_TAG(thisv) - 1]))
+static inline JSObject *
+js_ComputeThisForFrame(JSContext *cx, JSStackFrame *fp)
+{
+ if (fp->flags & JSFRAME_COMPUTED_THIS)
+ return fp->thisp;
+ JSObject* obj = js_ComputeThis(cx, JS_TRUE, fp->argv);
+ if (!obj)
+ return NULL;
+ fp->thisp = obj;
+ fp->flags |= JSFRAME_COMPUTED_THIS;
+ return obj;
+}
+
/*
* NB: js_Invoke requires that cx is currently running JS (i.e., that cx->fp
* is non-null), and that vp points to the callee, |this| parameter, and
* actual arguments of the call. [vp .. vp + 2 + argc) must belong to the last
* JS stack segment that js_AllocStack allocated. The function may use the
* space available after vp + 2 + argc in the stack segment for temporaries,
* so the caller should not use that space for values that must be preserved
* across the call.
--- a/js/src/jstracer.cpp
+++ b/js/src/jstracer.cpp
@@ -1734,19 +1734,21 @@ skip:
}
// Skip over stopFrame itself.
JS_ASSERT(n != 0);
--n;
fp = fp->down;
}
for (; n != 0; fp = fp->down) {
--n;
- if (fp->callee) { // might not have it if the entry frame is global
+ if (fp->callee) {
JS_ASSERT(JSVAL_IS_OBJECT(fp->argv[-1]));
fp->thisp = JSVAL_TO_OBJECT(fp->argv[-1]);
+ if (fp->flags & JSFRAME_CONSTRUCTING) // constructors always compute 'this'
+ fp->flags |= JSFRAME_COMPUTED_THIS;
}
}
}
debug_only_v(printf("\n");)
return mp - mp_base;
}
/* Emit load instructions onto the trace that read the initial stack state. */
@@ -3381,17 +3383,17 @@ js_SynthesizeFrame(JSContext* cx, const
newifp->frame.scopeChain = OBJ_GET_PARENT(cx, fi.callee);
newifp->frame.sharpDepth = 0;
newifp->frame.sharpArray = NULL;
newifp->frame.flags = constructing ? JSFRAME_CONSTRUCTING : 0;
newifp->frame.dormantNext = NULL;
newifp->frame.xmlNamespace = NULL;
newifp->frame.blockChain = NULL;
newifp->mark = newmark;
- newifp->frame.thisp = NULL; // will be set by js_ExecuteTree -> FlushNativeStackFrame
+ newifp->frame.thisp = NULL; // will be updated in FlushNativeStackFrame
newifp->frame.regs = fp->regs;
newifp->frame.regs->pc = script->code;
newifp->frame.regs->sp = newsp + script->nfixed;
newifp->frame.imacpc = NULL;
newifp->frame.slots = newsp;
if (script->staticLevel < JS_DISPLAY_SIZE) {
JSStackFrame **disp = &cx->display[script->staticLevel];
@@ -4339,18 +4341,16 @@ LeaveTree(InterpState& state, VMSideExit
getStackTypeMap(innermost),
stack, NULL);
JS_ASSERT(unsigned(slots) == innermost->numStackSlots);
#ifdef DEBUG
// Verify that our state restoration worked.
for (JSStackFrame* fp = cx->fp; fp; fp = fp->down) {
JS_ASSERT_IF(fp->callee, JSVAL_IS_OBJECT(fp->argv[-1]));
- JS_ASSERT_IF(fp->callee && fp->thisp != JSVAL_TO_OBJECT(fp->argv[-1]),
- !(fp->flags & JSFRAME_COMPUTED_THIS) && !fp->thisp);
}
#endif
#ifdef JS_JIT_SPEW
if (innermost->exitType != TIMEOUT_EXIT)
AUDIT(sideExitIntoInterpreter);
else
AUDIT(timeoutIntoInterpreter);
#endif
@@ -6227,27 +6227,54 @@ TraceRecorder::unbox_jsval(jsval v, LIns
lir->ins2(LIR_piand, v_ins, INS_CONST(JSVAL_TAGMASK)),
JSVAL_STRING),
exit);
v_ins = lir->ins2(LIR_piand, v_ins, INS_CONST(~JSVAL_TAGMASK));
return;
}
}
+static JSObject*
+ComputeThis_tn(JSContext* cx)
+{
+ return js_ComputeThisForFrame(cx, cx->fp);
+}
+
+JS_DEFINE_CALLINFO_1(static, OBJECT, ComputeThis_tn, CONTEXT, 1, 1) /* safe to CSE */
+
JS_REQUIRES_STACK bool
TraceRecorder::getThis(LIns*& this_ins)
{
- if (cx->fp->callee) { /* in a function */
- if (JSVAL_IS_NULL(cx->fp->argv[-1]))
- return false;
- this_ins = get(&cx->fp->argv[-1]);
- guard(false, lir->ins_eq0(this_ins), MISMATCH_EXIT);
- } else { /* in global code */
- this_ins = scopeChain();
- }
+ JSObject* thisObj = js_ComputeThisForFrame(cx, cx->fp);
+ if (!thisObj)
+ ABORT_TRACE("js_ComputeThis failed");
+ if (!cx->fp->callee || JSVAL_IS_NULL(cx->fp->argv[-1])) {
+ JS_ASSERT(callDepth == 0);
+ /*
+ * In global code, or if this is NULL, wrap the global object and bake it directly
+ * into the trace.
+ */
+ this_ins = INS_CONSTPTR(thisObj);
+ set(&cx->fp->argv[-1], this_ins);
+ return true;
+ }
+ if (callDepth == 0) {
+ /*
+ * Check that we computed the 'this' object for the entry frame. We only have to worry
+ * about this for callDepth == 0 since inlined function calls are not subject to
+ * deferred wrapping as we do not trace across global objects. Note that for nested
+ * tree calls the current cx->fp might not be the current entry frame of the called
+ * tree, however, since we don't cross global objects, we can call js_ComputeThis
+ * on that frame to ensure that the global object was wrapped, and that guarantee than
+ * inductively extends to our current 'this' object.
+ */
+ LIns* args = { cx_ins };
+ guard(false, lir->ins_eq0(lir->insCall(&ComputeThis_tn_ci, &args)), MISMATCH_EXIT);
+ }
+ this_ins = get(&cx->fp->argv[-1]);
return true;
}
JS_REQUIRES_STACK bool
TraceRecorder::guardClass(JSObject* obj, LIns* obj_ins, JSClass* clasp, LIns* exit)
{
bool cond = STOBJ_GET_CLASS(obj) == clasp;