Allow JaegerShot to only partially execute frames, bug 665815. r=luke
authorBrian Hackett <bhackett1024@gmail.com>
Thu, 07 Jul 2011 21:02:57 -0700
changeset 77341 4bb2b60db2e21a47badfd9676450a31a0e91d68a
parent 77340 9b9fd467eb5f5eeb3858d7805f968fbad5a9809f
child 77342 311372d6fdcd431abd8b675e4d6ce1517470886e
push id78
push userclegnitto@mozilla.com
push dateFri, 16 Dec 2011 17:32:24 +0000
treeherdermozilla-release@79d24e644fdd [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersluke
bugs665815
milestone8.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Allow JaegerShot to only partially execute frames, bug 665815. r=luke
js/src/jit-test/tests/basic/testApplyInterpretLowered.js
js/src/jit-test/tests/basic/testApplyInterpretLowered2.js
js/src/jsfun.h
js/src/jsinferinlines.h
js/src/jsinterp.cpp
js/src/jsinterp.h
js/src/jsinterpinlines.h
js/src/jstracer.cpp
js/src/methodjit/Compiler.cpp
js/src/methodjit/InlineFrameAssembler.h
js/src/methodjit/InvokeHelpers.cpp
js/src/methodjit/MethodJIT-inl.h
js/src/methodjit/MethodJIT.cpp
js/src/methodjit/MethodJIT.h
js/src/methodjit/MonoIC.cpp
js/src/methodjit/MonoIC.h
js/src/methodjit/Retcon.cpp
js/src/methodjit/StubCalls.h
js/src/vm/Stack-inl.h
js/src/vm/Stack.cpp
js/src/vm/Stack.h
new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/basic/testApplyInterpretLowered.js
@@ -0,0 +1,15 @@
+
+/* Read correct return value when the interpreter pops a lowered call/apply. */
+
+function recompile() {}
+
+function foo() {
+  if (arguments[0] == 9)
+    recompile();
+  return arguments[0];
+}
+function bar() {
+  for (var i = 0; i < 10; i++)
+    assertEq(foo.apply(null, [i]), i);
+}
+bar();
new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/basic/testApplyInterpretLowered2.js
@@ -0,0 +1,12 @@
+
+/* Make sure the interpreter can pop lowered frames pushed by CompileFunction. */
+
+function recompile() {}
+
+function bar() {
+  for (var i = 0; i < 50; i++) {
+    var foo = new Function("recompile(arguments[0] + " + i + "); return arguments[0]");
+    assertEq(foo.apply(null, [i]), i);
+  }
+}
+bar();
--- a/js/src/jsfun.h
+++ b/js/src/jsfun.h
@@ -502,17 +502,17 @@ js_NewDebuggableFlatClosure(JSContext *c
 
 extern JSFunction *
 js_DefineFunction(JSContext *cx, JSObject *obj, jsid id, js::Native native,
                   uintN nargs, uintN flags);
 
 /*
  * Flags for js_ValueToFunction and js_ReportIsNotFunction.
  */
-#define JSV2F_CONSTRUCT         CONSTRUCT
+#define JSV2F_CONSTRUCT         INITIAL_CONSTRUCT
 #define JSV2F_SEARCH_STACK      0x10000
 
 extern JSFunction *
 js_ValueToFunction(JSContext *cx, const js::Value *vp, uintN flags);
 
 extern JSObject *
 js_ValueToFunctionObject(JSContext *cx, js::Value *vp, uintN flags);
 
--- a/js/src/jsinferinlines.h
+++ b/js/src/jsinferinlines.h
@@ -386,16 +386,24 @@ FixObjectType(JSContext *cx, JSObject *o
     if (cx->typeInferenceEnabled())
         cx->compartment->types.fixObjectType(cx, obj);
 }
 
 /* Interface helpers for JSScript */
 extern void TypeMonitorResult(JSContext *cx, JSScript *script, jsbytecode *pc, const js::Value &rval);
 extern void TypeDynamicResult(JSContext *cx, JSScript *script, jsbytecode *pc, js::types::jstype type);
 
+inline bool
+UseNewTypeAtEntry(JSContext *cx, StackFrame *fp)
+{
+    return fp->isConstructing() && cx->typeInferenceEnabled() &&
+           fp->prev() && fp->prev()->isScriptFrame() &&
+           UseNewType(cx, fp->prev()->script(), fp->prev()->pcQuadratic(cx->stack, fp));
+}
+
 /////////////////////////////////////////////////////////////////////
 // Script interface functions
 /////////////////////////////////////////////////////////////////////
 
 inline JSScript *
 TypeScript::script()
 {
     /*
--- a/js/src/jsinterp.cpp
+++ b/js/src/jsinterp.cpp
@@ -610,17 +610,17 @@ RunScript(JSContext *cx, JSScript *scrip
 
 #ifdef JS_METHODJIT
     mjit::CompileStatus status;
     status = mjit::CanMethodJIT(cx, script, fp, mjit::CompileRequest_Interpreter);
     if (status == mjit::Compile_Error)
         return false;
 
     if (status == mjit::Compile_Okay)
-        return mjit::JaegerShot(cx);
+        return mjit::JaegerShot(cx, false);
 #endif
 
     return Interpret(cx, fp);
 }
 
 /*
  * Find a function reference and its 'this' value implicit first parameter
  * under argc arguments on cx's stack, and call the function.  Push missing
@@ -632,33 +632,36 @@ Invoke(JSContext *cx, const CallArgs &ar
 {
     /* N.B. Must be kept in sync with InvokeSessionGuard::start/invoke */
 
     CallArgs args = argsRef;
     JS_ASSERT(args.argc() <= JS_ARGS_LENGTH_MAX);
 
     JS_ASSERT(!cx->compartment->activeAnalysis);
 
+    /* MaybeConstruct is a subset of InitialFrameFlags */
+    InitialFrameFlags initial = (InitialFrameFlags) construct;
+
     if (args.calleev().isPrimitive()) {
-        js_ReportIsNotFunction(cx, &args.calleev(), ToReportFlags(construct));
+        js_ReportIsNotFunction(cx, &args.calleev(), ToReportFlags(initial));
         return false;
     }
 
     JSObject &callee = args.callee();
     Class *clasp = callee.getClass();
 
     /* Invoke non-functions. */
     if (JS_UNLIKELY(clasp != &js_FunctionClass)) {
 #if JS_HAS_NO_SUCH_METHOD
         if (JS_UNLIKELY(clasp == &js_NoSuchMethodClass))
             return NoSuchMethod(cx, args.argc(), args.base());
 #endif
         JS_ASSERT_IF(construct, !clasp->construct);
         if (!clasp->call) {
-            js_ReportIsNotFunction(cx, &args.calleev(), ToReportFlags(construct));
+            js_ReportIsNotFunction(cx, &args.calleev(), ToReportFlags(initial));
             return false;
         }
         return CallJSNative(cx, clasp->call, args);
     }
 
     /* Invoke native functions. */
     JSFunction *fun = callee.getFunctionPrivate();
     JS_ASSERT_IF(construct, !fun->isConstructor());
@@ -680,17 +683,17 @@ Invoke(JSContext *cx, const CallArgs &ar
         }
         return true;
     }
 
     TypeMonitorCall(cx, args, construct);
 
     /* Get pointer to new frame/slots, prepare arguments. */
     InvokeFrameGuard ifg;
-    if (!cx->stack.pushInvokeFrame(cx, args, construct, &ifg))
+    if (!cx->stack.pushInvokeFrame(cx, args, initial, &ifg))
         return false;
 
     /* Now that the new frame is rooted, maybe create a call object. */
     StackFrame *fp = ifg.fp();
     if (fun->isHeavyweight() && !CreateFunCallObject(cx, fp))
         return false;
 
     /* Run function until JSOP_STOP, JSOP_RETURN or error. */
@@ -740,17 +743,17 @@ InvokeSessionGuard::start(JSContext *cx,
         /*
          * The frame will remain pushed even when the callee isn't active which
          * will affect the observable current global, so avoid any change.
          */
         if (callee.getGlobal() != GetGlobalForScopeChain(cx))
             break;
 
         /* Push the stack frame once for the session. */
-        if (!stack.pushInvokeFrame(cx, args_, NO_CONSTRUCT, &ifg_))
+        if (!stack.pushInvokeFrame(cx, args_, INITIAL_NONE, &ifg_))
             return false;
 
         /*
          * Update the 'this' type of the callee according to the value given,
          * along with the types of any missing arguments. These will be the
          * same across all calls.
          */
         script_->types.setThis(cx, thisv);
@@ -1863,27 +1866,45 @@ Interpret(JSContext *cx, StackFrame *ent
     JS_BEGIN_MACRO                                                            \
         mjit::CompileStatus status =                                          \
             mjit::CanMethodJITAtBranch(cx, script, regs.fp(), regs.pc);       \
         if (status == mjit::Compile_Error)                                    \
             goto error;                                                       \
         if (status == mjit::Compile_Okay) {                                   \
             void *ncode =                                                     \
                 script->nativeCodeForPC(regs.fp()->isConstructing(), regs.pc);\
-            interpReturnOK = mjit::JaegerShotAtSafePoint(cx, ncode);          \
+            mjit::JaegerStatus status =                                       \
+                mjit::JaegerShotAtSafePoint(cx, ncode, true);                 \
+            CHECK_PARTIAL_METHODJIT(status);                                  \
+            interpReturnOK = (status == mjit::Jaeger_Returned);               \
             if (entryFrame != regs.fp())                                      \
                 goto jit_return;                                              \
             regs.fp()->setFinishedInInterpreter();                            \
             goto leave_on_safe_point;                                         \
         }                                                                     \
         if (status == mjit::Compile_Abort) {                                  \
             useMethodJIT = false;                                             \
         }                                                                     \
     JS_END_MACRO
 
+#define CHECK_PARTIAL_METHODJIT(status)                                       \
+    JS_BEGIN_MACRO                                                            \
+        if (status == mjit::Jaeger_Unfinished) {                              \
+            op = (JSOp) *regs.pc;                                             \
+            RESTORE_INTERP_VARS();                                            \
+            DO_OP();                                                          \
+        } else if (status == mjit::Jaeger_UnfinishedAtTrap) {                 \
+            interpMode = JSINTERP_SKIP_TRAP;                                  \
+            JS_ASSERT(JSOp(*regs.pc) == JSOP_TRAP);                           \
+            op = JSOP_TRAP;                                                   \
+            RESTORE_INTERP_VARS();                                            \
+            DO_OP();                                                          \
+        }                                                                     \
+    JS_END_MACRO
+
 #else
 
 #define RESET_USE_METHODJIT() ((void) 0)
 
 #define MONITOR_BRANCH_METHODJIT() ((void) 0)
 
 #endif
 
@@ -1904,16 +1925,17 @@ Interpret(JSContext *cx, StackFrame *ent
 #endif
 #else
 #define MONITOR_BRANCH_TRACEVIS
 #endif
 
 #define RESTORE_INTERP_VARS()                                                 \
     JS_BEGIN_MACRO                                                            \
         script = regs.fp()->script();                                         \
+        pcCounts = script->pcCounters.get(JSRUNMODE_INTERP);                  \
         argv = regs.fp()->maybeFormalArgs();                                  \
         atoms = FrameAtomBase(cx, regs.fp());                                 \
         JS_ASSERT(&cx->regs() == &regs);                                      \
         if (cx->isExceptionPending())                                         \
             goto error;                                                       \
     JS_END_MACRO
 
 #define MONITOR_BRANCH()                                                      \
@@ -2069,20 +2091,17 @@ Interpret(JSContext *cx, StackFrame *ent
     /* Any script we interpret needs to have its type sets filled in. */
     if (cx->typeInferenceEnabled() && !script->types.ensureTypeArray(cx))
         goto error;
 
     /* Don't call the script prologue if executing between Method and Trace JIT. */
     if (interpMode == JSINTERP_NORMAL) {
         StackFrame *fp = regs.fp();
         JS_ASSERT_IF(!fp->isGeneratorFrame(), regs.pc == script->code);
-        bool newType = fp->isConstructing() && cx->typeInferenceEnabled() &&
-            fp->prev() && fp->prev()->isScriptFrame() &&
-            UseNewType(cx, fp->prev()->script(), fp->prev()->pcQuadratic(cx->stack, fp));
-        if (!ScriptPrologueOrGeneratorResume(cx, fp, newType))
+        if (!ScriptPrologueOrGeneratorResume(cx, fp, UseNewTypeAtEntry(cx, fp)))
             goto error;
     }
 
     /* The REJOIN mode acts like the normal mode, except the prologue is skipped. */
     if (interpMode == JSINTERP_REJOIN)
         interpMode = JSINTERP_NORMAL;
 
     JS_ASSERT_IF(interpMode == JSINTERP_SKIP_TRAP, JSOp(*regs.pc) == JSOP_TRAP);
@@ -2398,32 +2417,40 @@ BEGIN_CASE(JSOP_STOP)
         JS_ASSERT(!js_IsActiveWithOrBlock(cx, &regs.fp()->scopeChain(), 0));
         interpReturnOK = ScriptEpilogue(cx, regs.fp(), interpReturnOK);
         CHECK_INTERRUPT_HANDLER();
 
         /* The JIT inlines ScriptEpilogue. */
 #ifdef JS_METHODJIT
   jit_return:
 #endif
+
+        /* The results of lowered call/apply frames need to be shifted. */
+        bool shiftResult = regs.fp()->loweredCallOrApply();
+
         cx->stack.popInlineFrame(regs);
 
-        /* Sync interpreter locals. */
-        script = regs.fp()->script();
-        pcCounts = script->pcCounters.get(JSRUNMODE_INTERP);
-        argv = regs.fp()->maybeFormalArgs();
-        atoms = FrameAtomBase(cx, regs.fp());
+        RESTORE_INTERP_VARS();
 
         /* Resume execution in the calling frame. */
         RESET_USE_METHODJIT();
         if (JS_LIKELY(interpReturnOK)) {
             JS_ASSERT(js_CodeSpec[js_GetOpcode(cx, script, regs.pc)].length
                       == JSOP_CALL_LENGTH);
             TRACE_0(LeaveFrame);
             script->types.monitor(cx, regs.pc, regs.sp[-1]);
+
+            op = JSOp(*regs.pc);
             len = JSOP_CALL_LENGTH;
+
+            if (shiftResult) {
+                regs.sp[-2] = regs.sp[-1];
+                regs.sp--;
+            }
+
             DO_NEXT_OP(len);
         }
         goto error;
     } else {
         JS_ASSERT(regs.sp == regs.fp()->base());
     }
     interpReturnOK = true;
     goto exit;
@@ -4153,17 +4180,17 @@ END_CASE(JSOP_EVAL)
 BEGIN_CASE(JSOP_NEW)
 BEGIN_CASE(JSOP_CALL)
 BEGIN_CASE(JSOP_FUNCALL)
 BEGIN_CASE(JSOP_FUNAPPLY)
 {
     CallArgs args = CallArgsFromSp(GET_ARGC(regs.pc), regs.sp);
     JS_ASSERT(args.base() >= regs.fp()->base());
 
-    MaybeConstruct construct = *regs.pc == JSOP_NEW ? CONSTRUCT : NO_CONSTRUCT;
+    bool construct = (*regs.pc == JSOP_NEW);
 
     JSObject *callee;
     JSFunction *fun;
 
     /* Don't bother trying to fast-path calls to scripted non-constructors. */
     if (!IsFunctionObject(args.calleev(), &callee, &fun) || !fun->isInterpretedConstructor()) {
         if (construct) {
             if (!InvokeConstructor(cx, args))
@@ -4177,25 +4204,23 @@ BEGIN_CASE(JSOP_FUNAPPLY)
         CHECK_INTERRUPT_HANDLER();
         TRACE_0(NativeCallComplete);
         len = JSOP_CALL_LENGTH;
         DO_NEXT_OP(len);
     }
 
     TypeMonitorCall(cx, args, construct);
 
+    InitialFrameFlags initial = construct ? INITIAL_CONSTRUCT : INITIAL_NONE;
+
     JSScript *newScript = fun->script();
-    if (!cx->stack.pushInlineFrame(cx, regs, args, *callee, fun, newScript, construct, OOMCheck()))
+    if (!cx->stack.pushInlineFrame(cx, regs, args, *callee, fun, newScript, initial, OOMCheck()))
         goto error;
 
-    /* Refresh local js::Interpret state. */
-    script = newScript;
-    pcCounts = script->pcCounters.get(JSRUNMODE_INTERP);
-    argv = regs.fp()->formalArgsEnd() - fun->nargs;
-    atoms = script->atomMap.vector;
+    RESTORE_INTERP_VARS();
 
     /* Only create call object after frame is rooted. */
     if (fun->isHeavyweight() && !CreateFunCallObject(cx, regs.fp()))
         goto error;
 
     RESET_USE_METHODJIT();
     TRACE_0(EnterFrame);
 
@@ -4204,17 +4229,19 @@ BEGIN_CASE(JSOP_FUNAPPLY)
         /* Try to ensure methods are method JIT'd.  */
         mjit::CompileRequest request = (interpMode == JSINTERP_NORMAL)
                                        ? mjit::CompileRequest_Interpreter
                                        : mjit::CompileRequest_JIT;
         mjit::CompileStatus status = mjit::CanMethodJIT(cx, script, regs.fp(), request);
         if (status == mjit::Compile_Error)
             goto error;
         if (!TRACE_RECORDER(cx) && !TRACE_PROFILER(cx) && status == mjit::Compile_Okay) {
-            interpReturnOK = mjit::JaegerShot(cx);
+            mjit::JaegerStatus status = mjit::JaegerShot(cx, true);
+            CHECK_PARTIAL_METHODJIT(status);
+            interpReturnOK = (status == mjit::Jaeger_Returned);
             CHECK_INTERRUPT_HANDLER();
             goto jit_return;
         }
     }
 #endif
 
     bool newType = cx->typeInferenceEnabled() && UseNewType(cx, script, regs.pc);
     if (!ScriptPrologue(cx, regs.fp(), newType))
--- a/js/src/jsinterp.h
+++ b/js/src/jsinterp.h
@@ -139,16 +139,21 @@ BoxNonStrictThis(JSContext *cx, const Ca
  * Ensure that fp->thisValue() is the correct value of |this| for the scripted
  * call represented by |fp|. ComputeThis is necessary because fp->thisValue()
  * may be set to 'undefined' when 'this' should really be the global object (as
  * an optimization to avoid global-this computation).
  */
 inline bool
 ComputeThis(JSContext *cx, StackFrame *fp);
 
+enum MaybeConstruct {
+    NO_CONSTRUCT = INITIAL_NONE,
+    CONSTRUCT = INITIAL_CONSTRUCT
+};
+
 /*
  * The js::InvokeArgumentsGuard passed to js_Invoke must come from an
  * immediately-enclosing successful call to js::StackSpace::pushInvokeArgs,
  * i.e., there must have been no un-popped pushes to cx->stack. Furthermore,
  * |args.getvp()[0]| should be the callee, |args.getvp()[1]| should be |this|,
  * and the range [args.getvp() + 2, args.getvp() + 2 + args.getArgc()) should
  * be initialized actual arguments.
  */
--- a/js/src/jsinterpinlines.h
+++ b/js/src/jsinterpinlines.h
@@ -154,17 +154,17 @@ InvokeSessionGuard::invoke(JSContext *cx
     fp->resetCallFrame(script_);
 
     JSBool ok;
     {
         AutoPreserveEnumerators preserve(cx);
         args_.setActive();  /* From js::Invoke(InvokeArgsGuard) overload. */
         Probes::enterJSFun(cx, fp->fun(), script_);
 #ifdef JS_METHODJIT
-        ok = mjit::EnterMethodJIT(cx, fp, code, stackLimit_);
+        ok = mjit::EnterMethodJIT(cx, fp, code, stackLimit_, /* partial = */ false);
         cx->regs().pc = stop_;
 #else
         cx->regs().pc = script_->code;
         ok = Interpret(cx, cx->fp());
 #endif
         Probes::exitJSFun(cx, fp->fun(), script_);
         args_.setInactive();
     }
--- a/js/src/jstracer.cpp
+++ b/js/src/jstracer.cpp
@@ -5738,17 +5738,17 @@ SynthesizeFrame(JSContext* cx, const Fra
     regs.sp = fp->slots() + fi.spdist;
     regs.pc = fi.pc;
     if (fi.imacpc)
         fp->setImacropc(fi.imacpc);
 
     /* Push a frame for the call. */
     CallArgs args = CallArgsFromSp(fi.get_argc(), regs.sp);
     cx->stack.pushInlineFrame(cx, regs, args, *callee, newfun, newscript,
-                              MaybeConstructFromBool(fi.is_constructing()),
+                              InitialFrameFlagsFromConstructing(fi.is_constructing()),
                               NoCheck());
 
 #ifdef DEBUG
     /* These should be initialized by FlushNativeStackFrame. */
     regs.fp()->thisValue().setMagic(JS_THIS_POISON);
     regs.fp()->setScopeChainNoCallObj(*StackFrame::sInvalidScopeChain);
 #endif
 
--- a/js/src/methodjit/Compiler.cpp
+++ b/js/src/methodjit/Compiler.cpp
@@ -3629,17 +3629,17 @@ mjit::Compiler::inlineCallHelper(uint32 
     callIC.hotPathLabel = masm.label();
 
     uint32 flags = 0;
     if (callingNew)
         flags |= StackFrame::CONSTRUCTING;
 
     InlineFrameAssembler inlFrame(masm, callIC, flags);
     callPatch.hasFastNcode = true;
-    callPatch.fastNcodePatch = inlFrame.assemble(NULL);
+    callPatch.fastNcodePatch = inlFrame.assemble(NULL, PC);
 
     callIC.hotJump = masm.jump();
     callIC.joinPoint = callPatch.joinPoint = masm.label();
     callIC.callIndex = callSites.length();
     addReturnSite();
     if (lowerFunCallOrApply)
         uncachedCallPatch.joinPoint = callIC.joinPoint;
 
--- a/js/src/methodjit/InlineFrameAssembler.h
+++ b/js/src/methodjit/InlineFrameAssembler.h
@@ -97,29 +97,35 @@ class InlineFrameAssembler {
     InlineFrameAssembler(Assembler &masm, Compiler::CallGenInfo &gen, uint32 flags)
       : masm(masm), flags(flags), tempRegs(Registers::AvailRegs)
     {
         frameSize = gen.frameSize;
         funObjReg = gen.funObjReg;
         tempRegs.takeReg(funObjReg);
     }
 
-    DataLabelPtr assemble(void *ncode)
+    DataLabelPtr assemble(void *ncode, jsbytecode *pc)
     {
         JS_ASSERT((flags & ~StackFrame::CONSTRUCTING) == 0);
 
         /* Generate StackFrame::initCallFrameCallerHalf. */
 
+        /* Get the actual flags to write. */
+        JS_ASSERT(!(flags & ~StackFrame::CONSTRUCTING));
+        uint32 flags = this->flags | StackFrame::FUNCTION;
+        if (frameSize.lowered(pc))
+            flags |= StackFrame::LOWERED_CALL_APPLY;
+
         DataLabelPtr ncodePatch;
         if (frameSize.isStatic()) {
             uint32 frameDepth = frameSize.staticLocalSlots();
             AdjustedFrame newfp(sizeof(StackFrame) + frameDepth * sizeof(Value));
 
             Address flagsAddr = newfp.addrOf(StackFrame::offsetOfFlags());
-            masm.store32(Imm32(StackFrame::FUNCTION | flags), flagsAddr);
+            masm.store32(Imm32(flags), flagsAddr);
             Address prevAddr = newfp.addrOf(StackFrame::offsetOfPrev());
             masm.storePtr(JSFrameReg, prevAddr);
             Address ncodeAddr = newfp.addrOf(StackFrame::offsetOfNcode());
             ncodePatch = masm.storePtrWithPatch(ImmPtr(ncode), ncodeAddr);
 
             masm.addPtr(Imm32(sizeof(StackFrame) + frameDepth * sizeof(Value)), JSFrameReg);
         } else {
             /*
@@ -129,17 +135,17 @@ class InlineFrameAssembler {
              * stores the dynamic stack pointer (i.e., regs.sp after pushing a
              * dynamic number of arguments) to VMFrame.regs, so we just load it
              * here to get the new frame pointer.
              */
             RegisterID newfp = tempRegs.takeAnyReg().reg();
             masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.sp)), newfp);
 
             Address flagsAddr(newfp, StackFrame::offsetOfFlags());
-            masm.store32(Imm32(StackFrame::FUNCTION | flags), flagsAddr);
+            masm.store32(Imm32(flags), flagsAddr);
             Address prevAddr(newfp, StackFrame::offsetOfPrev());
             masm.storePtr(JSFrameReg, prevAddr);
             Address ncodeAddr(newfp, StackFrame::offsetOfNcode());
             ncodePatch = masm.storePtrWithPatch(ImmPtr(ncode), ncodeAddr);
 
             masm.move(newfp, JSFrameReg);
             tempRegs.putReg(newfp);
         }
--- a/js/src/methodjit/InvokeHelpers.cpp
+++ b/js/src/methodjit/InvokeHelpers.cpp
@@ -245,28 +245,28 @@ stubs::FixupArity(VMFrame &f, uint32 nac
     JS_ASSERT(nactual != oldfp->numFormalArgs());
 
     /*
      * Grossssss! *move* the stack frame. If this ends up being perf-critical,
      * we can figure out how to spot-optimize it. Be careful to touch only the
      * members that have been initialized by initJitFrameCallerHalf and the
      * early prologue.
      */
-    MaybeConstruct construct = oldfp->isConstructing();
-    JSFunction *fun          = oldfp->fun();
-    JSScript *script         = fun->script();
-    void *ncode              = oldfp->nativeReturnAddress();
+    InitialFrameFlags initial = oldfp->initialFlags();
+    JSFunction *fun           = oldfp->fun();
+    JSScript *script          = fun->script();
+    void *ncode               = oldfp->nativeReturnAddress();
 
     /* Pop the inline frame. */
     f.regs.popPartialFrame((Value *)oldfp);
 
     /* Reserve enough space for a callee frame. */
     CallArgs args = CallArgsFromSp(nactual, f.regs.sp);
     StackFrame *fp = cx->stack.getFixupFrame(cx, f.regs, args, fun, script, ncode,
-                                             construct, LimitCheck(&f.stackLimit, ncode));
+                                             initial, LimitCheck(&f.stackLimit, ncode));
 
     /*
      * Note: this function is called without f.regs intact, but if the previous
      * call failed it will use ncode to set f.regs to reflect the state at the
      * call site. We can't use the value for ncode now as generating the
      * exception may have caused us to discard the caller's code.
      */
     if (!fp)
@@ -288,48 +288,56 @@ stubs::CompileFunction(VMFrame &f, uint3
     /*
      * Note: the stubRejoin kind for the frame was written before the call, and
      * needs to be cleared out on all return paths (doing this directly in the
      * IC stub will not handle cases where we recompiled or threw).
      */
     JS_ASSERT_IF(f.cx->typeInferenceEnabled(), f.stubRejoin);
     ResetStubRejoin reset(f);
 
-    bool isConstructing = f.fp()->isConstructing();
+    InitialFrameFlags initial = f.fp()->initialFlags();
     f.regs.popPartialFrame((Value *)f.fp());
 
-    return isConstructing ? UncachedNew(f, argc) : UncachedCall(f, argc);
+    if (InitialFrameFlagsAreConstructing(initial))
+        return UncachedNew(f, argc);
+    else if (InitialFrameFlagsAreLowered(initial))
+        return UncachedLoweredCall(f, argc);
+    else
+        return UncachedCall(f, argc);
 }
 
 static inline bool
-UncachedInlineCall(VMFrame &f, MaybeConstruct construct, void **pret, bool *unjittable, uint32 argc)
+UncachedInlineCall(VMFrame &f, InitialFrameFlags initial,
+                   void **pret, bool *unjittable, uint32 argc)
 {
     JSContext *cx = f.cx;
     CallArgs args = CallArgsFromSp(argc, f.regs.sp);
     JSObject &callee = args.callee();
     JSFunction *newfun = callee.getFunctionPrivate();
     JSScript *newscript = newfun->script();
 
+    bool construct = InitialFrameFlagsAreConstructing(initial);
+
     bool newType = construct && cx->typeInferenceEnabled() &&
         types::UseNewType(cx, f.script(), f.pc());
 
     types::TypeMonitorCall(cx, args, construct);
 
     /*
      * Preserve f.regs.fp while pushing the new frame, for the invariant that
      * f.regs reflects the state when we entered the stub call. This handoff is
      * tricky: we need to make sure that f.regs is not updated to the new
      * frame, and we also need to ensure that cx->regs still points to f.regs
      * when space is reserved, in case doing so throws an exception.
      */
     FrameRegs regs = f.regs;
 
     /* Get pointer to new frame/slots, prepare arguments. */
     LimitCheck check(&f.stackLimit, NULL);
-    if (!cx->stack.pushInlineFrame(cx, regs, args, callee, newfun, newscript, construct, check))
+    if (!cx->stack.pushInlineFrame(cx, regs, args, callee, newfun, newscript, initial, check))
         return false;
 
     /* Finish the handoff to the new frame regs. */
     PreserveRegsGuard regsGuard(cx, regs);
 
     /* Scope with a call object parented by callee's parent. */
     if (newfun->isHeavyweight() && !js::CreateFunCallObject(cx, regs.fp()))
         return false;
@@ -384,30 +392,38 @@ stubs::UncachedNewHelper(VMFrame &f, uin
 {
     ucr->init();
     JSContext *cx = f.cx;
     CallArgs args = CallArgsFromSp(argc, f.regs.sp);
 
     /* Try to do a fast inline call before the general Invoke path. */
     if (IsFunctionObject(args.calleev(), &ucr->fun) && ucr->fun->isInterpretedConstructor()) {
         ucr->callee = &args.callee();
-        if (!UncachedInlineCall(f, CONSTRUCT, &ucr->codeAddr, &ucr->unjittable, argc))
+        if (!UncachedInlineCall(f, INITIAL_CONSTRUCT, &ucr->codeAddr, &ucr->unjittable, argc))
             THROW();
     } else {
         if (!InvokeConstructor(cx, args))
             THROW();
         f.script()->types.monitor(cx, f.pc(), args.rval());
     }
 }
 
 void * JS_FASTCALL
 stubs::UncachedCall(VMFrame &f, uint32 argc)
 {
     UncachedCallResult ucr;
-    UncachedCallHelper(f, argc, &ucr);
+    UncachedCallHelper(f, argc, false, &ucr);
+    return ucr.codeAddr;
+}
+
+void * JS_FASTCALL
+stubs::UncachedLoweredCall(VMFrame &f, uint32 argc)
+{
+    UncachedCallResult ucr;
+    UncachedCallHelper(f, argc, true, &ucr);
     return ucr.codeAddr;
 }
 
 void JS_FASTCALL
 stubs::Eval(VMFrame &f, uint32 argc)
 {
     CallArgs args = CallArgsFromSp(argc, f.regs.sp);
 
@@ -422,29 +438,30 @@ stubs::Eval(VMFrame &f, uint32 argc)
     JS_ASSERT(f.fp() == f.cx->fp());
     if (!DirectEval(f.cx, args))
         THROW();
 
     f.script()->types.monitor(f.cx, f.pc(), args.rval());
 }
 
 void
-stubs::UncachedCallHelper(VMFrame &f, uint32 argc, UncachedCallResult *ucr)
+stubs::UncachedCallHelper(VMFrame &f, uint32 argc, bool lowered, UncachedCallResult *ucr)
 {
     ucr->init();
 
     JSContext *cx = f.cx;
     CallArgs args = CallArgsFromSp(argc, f.regs.sp);
 
     if (IsFunctionObject(args.calleev(), &ucr->callee)) {
         ucr->callee = &args.callee();
         ucr->fun = GET_FUNCTION_PRIVATE(cx, ucr->callee);
 
         if (ucr->fun->isInterpreted()) {
-            if (!UncachedInlineCall(f, NO_CONSTRUCT, &ucr->codeAddr, &ucr->unjittable, argc))
+            InitialFrameFlags initial = lowered ? INITIAL_LOWERED : INITIAL_NONE;
+            if (!UncachedInlineCall(f, initial, &ucr->codeAddr, &ucr->unjittable, argc))
                 THROW();
             return;
         }
 
         if (ucr->fun->isNative()) {
             if (!CallJSNative(cx, ucr->fun->u.n.native, args))
                 THROW();
             f.script()->types.monitor(cx, f.pc(), args.rval());
@@ -867,17 +884,17 @@ EvaluateExcessFrame(VMFrame &f, StackFra
      * RETURN, RETRVAL, etc. We check for finished frames BEFORE looking
      * for a safe point. If the frame was finished, we could have already
      * called ScriptEpilogue(), and entering the JIT could call it twice.
      */
     if (!fp->hasImacropc() && FrameIsFinished(cx))
         return HandleFinishedFrame(f, entryFrame);
 
     if (void *ncode = AtSafePoint(cx)) {
-        if (!JaegerShotAtSafePoint(cx, ncode))
+        if (!JaegerShotAtSafePoint(cx, ncode, false))
             return false;
         InlineReturn(f);
         AdvanceReturnPC(cx);
         return true;
     }
 
     return PartialInterpret(f);
 }
@@ -1266,18 +1283,17 @@ js_InternalInterpret(void *returnData, v
     JS_ASSERT(&cx->regs() == &f.regs);
 
 #ifdef JS_METHODJIT_SPEW
     JaegerSpew(JSpew_Recompile, "interpreter rejoin (file \"%s\") (line \"%d\") (op %s) (opline \"%d\")\n",
                script->filename, script->lineno, OpcodeNames[op], js_PCToLineNumber(cx, script, pc));
 #endif
 
     uint32 nextDepth = uint32(-1);
-
-    InterpMode interpMode = JSINTERP_REJOIN;
+    bool skipTrap = false;
 
     if ((cs->format & (JOF_INC | JOF_DEC)) &&
         rejoin != REJOIN_FALLTHROUGH &&
         rejoin != REJOIN_RESUME &&
         rejoin != REJOIN_THIS_PROTOTYPE &&
         rejoin != REJOIN_CHECK_ARGUMENTS) {
         /* We may reenter the interpreter while finishing the INC/DEC operation. */
         nextDepth = analysis->getCode(nextpc).stackDepth;
@@ -1354,18 +1370,22 @@ js_InternalInterpret(void *returnData, v
       case REJOIN_NONE:
         JS_NOT_REACHED("Unpossible rejoin!");
         break;
 
       case REJOIN_RESUME:
         break;
 
       case REJOIN_TRAP:
-        /* Watch out for the case where the TRAP removed itself. */
-        interpMode = untrap.trap ? JSINTERP_SKIP_TRAP : JSINTERP_REJOIN;
+        /*
+         * Make sure when resuming in the interpreter we do not execute the
+         * trap again. Watch out for the case where the TRAP removed itself.
+         */
+        if (untrap.trap)
+            skipTrap = true;
         break;
 
       case REJOIN_FALLTHROUGH:
         f.regs.pc = nextpc;
         break;
 
       case REJOIN_NATIVE:
       case REJOIN_NATIVE_LOWERED: {
@@ -1423,23 +1443,23 @@ js_InternalInterpret(void *returnData, v
          * the stack check and late prologue have not been performed.
          */
         if (!CheckStackQuota(f))
             return js_InternalThrow(f);
         if (fp->fun()->isHeavyweight()) {
             if (!js::CreateFunCallObject(cx, fp))
                 return js_InternalThrow(f);
         }
+
+        fp->scopeChain();
         SetValueRangeToUndefined(fp->slots(), script->nfixed);
 
-        /*
-         * Use the normal interpreter mode, which will construct the 'this'
-         * object if this is a constructor frame.
-         */
-        interpMode = JSINTERP_NORMAL;
+        /* Construct the 'this' object for the frame if necessary. */
+        if (!ScriptPrologueOrGeneratorResume(cx, fp, types::UseNewTypeAtEntry(cx, fp)))
+            return js_InternalThrow(f);
         break;
       }
 
       case REJOIN_CALL_PROLOGUE:
       case REJOIN_CALL_PROLOGUE_LOWERED_CALL:
       case REJOIN_CALL_PROLOGUE_LOWERED_APPLY:
         if (returnReg) {
             uint32 argc = 0;
@@ -1593,31 +1613,15 @@ js_InternalInterpret(void *returnData, v
       default:
         JS_NOT_REACHED("Missing rejoin");
     }
 
     if (nextDepth == uint32(-1))
         nextDepth = analysis->getCode(f.regs.pc).stackDepth;
     f.regs.sp = fp->base() + nextDepth;
 
-    /* Reinsert any trap before resuming in the interpreter. */
-    untrap.retrap();
-
-    /* Release lock on analysis data before resuming. */
-    enter.leave();
-
-    if (!Interpret(cx, NULL, interpMode))
-        return js_InternalThrow(f);
-
-    /* The interpreter should have finished its entry frame. */
-    JS_ASSERT(f.regs.fp() == fp);
+    /* Mark the entry frame as unfinished, and update the regs to resume at. */
+    JaegerStatus status = skipTrap ? Jaeger_UnfinishedAtTrap : Jaeger_Unfinished;
+    cx->compartment->jaegerCompartment()->setLastUnfinished(status);
+    *f.oldregs = f.regs;
 
-    /* Force construction of the frame's return value, if it was not set. */
-    fp->returnValue();
-
-    /*
-     * The frame is done, but if it finished in the interpreter the call/args
-     * objects need to be detached from the frame.
-     */
-    fp->putActivationObjects();
-
-    return fp->nativeReturnAddress();
+    return NULL;
 }
--- a/js/src/methodjit/MethodJIT-inl.h
+++ b/js/src/methodjit/MethodJIT-inl.h
@@ -75,46 +75,24 @@ CanMethodJIT(JSContext *cx, JSScript *sc
     {
         return Compile_Skipped;
     }
     if (status == JITScript_None)
         return TryCompile(cx, fp);
     return Compile_Okay;
 }
 
-static inline bool
-RecursiveMethodJIT(JSContext *cx, StackFrame *fp)
-{
-    if (!cx->compartment->hasJaegerCompartment())
-        return false;
-
-    /*
-     * We can recursively enter the method JIT on a single stack frame by
-     * taking back edges, compiling, getting kicked back into the interpreter
-     * and repeating. Watch for this case here, and finish the frame in the
-     * interpreter. :XXX: should be more robust.
-     */
-    static const unsigned RECURSIVE_METHODJIT_LIMIT = 10;
-    VMFrame *f = cx->compartment->jaegerCompartment()->activeFrame();
-    for (unsigned i = 0; i < RECURSIVE_METHODJIT_LIMIT; i++) {
-        if (!f || f->entryfp != fp)
-            return false;
-        f = f->previous;
-    }
-    return true;
-}
-
 /*
  * Called from a backedge in the interpreter to decide if we should transition to the
  * methodjit. If so, we compile the given function.
  */
 static inline CompileStatus
 CanMethodJITAtBranch(JSContext *cx, JSScript *script, StackFrame *fp, jsbytecode *pc)
 {
-    if (!cx->methodJitEnabled || RecursiveMethodJIT(cx, fp))
+    if (!cx->methodJitEnabled)
         return Compile_Abort;
     JITScriptStatus status = script->getJITStatus(fp->isConstructing());
     if (status == JITScript_Invalid)
         return Compile_Abort;
     if (status == JITScript_None && !cx->hasRunOption(JSOPTION_METHODJIT_ALWAYS)) {
         /*
          * Backedges are counted differently with type inference vs. with the
          * tracer. For inference, we use the script's use count, so that we can
--- a/js/src/methodjit/MethodJIT.cpp
+++ b/js/src/methodjit/MethodJIT.cpp
@@ -127,31 +127,31 @@ StackFrame::methodjitStaticAsserts()
 static const size_t STUB_CALLS_FOR_OP_COUNT = 255;
 static uint32 StubCallsForOp[STUB_CALLS_FOR_OP_COUNT];
 #endif
 
 extern "C" void JS_FASTCALL
 PushActiveVMFrame(VMFrame &f)
 {
     f.entryfp->script()->compartment->jaegerCompartment()->pushActiveFrame(&f);
-    f.entryncode = f.entryfp->nativeReturnAddress();
     f.entryfp->setNativeReturnAddress(JS_FUNC_TO_DATA_PTR(void*, JaegerTrampolineReturn));
     f.regs.clearInlined();
 }
 
 extern "C" void JS_FASTCALL
 PopActiveVMFrame(VMFrame &f)
 {
     f.entryfp->script()->compartment->jaegerCompartment()->popActiveFrame();
-    f.entryfp->setNativeReturnAddress(f.entryncode);
 }
 
 extern "C" void JS_FASTCALL
 SetVMFrameRegs(VMFrame &f)
 {
+    f.oldregs = &f.cx->stack.regs();
+
     /* Restored on exit from EnterMethodJIT. */
     f.cx->stack.repointRegs(&f.regs);
 }
 
 #if defined(__APPLE__) || (defined(XP_WIN) && !defined(JS_CPU_X64)) || defined(XP_OS2)
 # define SYMBOL_STRING(name) "_" #name
 #else
 # define SYMBOL_STRING(name) #name
@@ -833,16 +833,17 @@ JaegerCompartment::Initialize()
     }
 
 #ifdef JS_METHODJIT_PROFILE_STUBS
     for (size_t i = 0; i < STUB_CALLS_FOR_OP_COUNT; ++i)
         StubCallsForOp[i] = 0;
 #endif
 
     activeFrame_ = NULL;
+    lastUnfinished_ = (JaegerStatus) 0;
 
     return true;
 }
 
 void
 JaegerCompartment::Finish()
 {
     TrampolineCompiler::release(&trampolines);
@@ -855,101 +856,123 @@ JaegerCompartment::Finish()
 # undef OPDEF
     fclose(fp);
 #endif
 }
 
 extern "C" JSBool
 JaegerTrampoline(JSContext *cx, StackFrame *fp, void *code, Value *stackLimit);
 
-JSBool
-mjit::EnterMethodJIT(JSContext *cx, StackFrame *fp, void *code, Value *stackLimit)
+JaegerStatus
+mjit::EnterMethodJIT(JSContext *cx, StackFrame *fp, void *code, Value *stackLimit, bool partial)
 {
 #ifdef JS_METHODJIT_SPEW
     Profiler prof;
     JSScript *script = fp->script();
 
     JaegerSpew(JSpew_Prof, "%s jaeger script, line %d\n",
                script->filename, script->lineno);
     prof.start();
 #endif
 
     JS_ASSERT(cx->fp() == fp);
     FrameRegs &oldRegs = cx->regs();
 
-    fp->scopeChain();
-    if (fp->isFunctionFrame() && fp->script()->usesArguments)
-        fp->ensureCoherentArgCount();
-
     JSBool ok;
     {
         AssertCompartmentUnchanged pcc(cx);
         JSAutoResolveFlags rf(cx, RESOLVE_INFER);
         ok = JaegerTrampoline(cx, fp, code, stackLimit);
     }
 
+#ifdef JS_METHODJIT_SPEW
+    prof.stop();
+    JaegerSpew(JSpew_Prof, "script run took %d ms\n", prof.time_ms());
+#endif
+
     /* Undo repointRegs in SetVMFrameRegs. */
     cx->stack.repointRegs(&oldRegs);
+
+    JaegerStatus status = cx->compartment->jaegerCompartment()->lastUnfinished();
+    if (status) {
+        if (partial) {
+            /*
+             * Being called from the interpreter, which will resume execution
+             * where the JIT left off.
+             */
+            return status;
+        }
+
+        /*
+         * Call back into the interpreter to finish the initial frame. This may
+         * invoke EnterMethodJIT again, but will allow partial execution for
+         * that recursive invocation, so we can have at most two VM frames for
+         * a range of inline frames.
+         */
+        InterpMode mode = (status == Jaeger_UnfinishedAtTrap)
+            ? JSINTERP_SKIP_TRAP
+            : JSINTERP_REJOIN;
+        ok = Interpret(cx, fp, mode);
+
+        return ok ? Jaeger_Returned : Jaeger_Throwing;
+    }
+
+    /* The entry frame should have finished. */
     JS_ASSERT(fp == cx->fp());
 
     if (ok) {
         /* The trampoline wrote the return value but did not set the HAS_RVAL flag. */
         fp->markReturnValue();
     }
 
     /* See comment in mjit::Compiler::emitReturn. */
     fp->markActivationObjectsAsPut();
 
-#ifdef JS_METHODJIT_SPEW
-    prof.stop();
-    JaegerSpew(JSpew_Prof, "script run took %d ms\n", prof.time_ms());
-#endif
-
-    return ok;
+    return ok ? Jaeger_Returned : Jaeger_Throwing;
 }
 
-static inline JSBool
-CheckStackAndEnterMethodJIT(JSContext *cx, StackFrame *fp, void *code)
+static inline JaegerStatus
+CheckStackAndEnterMethodJIT(JSContext *cx, StackFrame *fp, void *code, bool partial)
 {
-    JS_CHECK_RECURSION(cx, return false);
+    JS_CHECK_RECURSION(cx, return Jaeger_Throwing);
 
     JS_ASSERT(!cx->compartment->activeAnalysis);
 
     Value *stackLimit = cx->stack.space().getStackLimit(cx);
     if (!stackLimit)
-        return false;
+        return Jaeger_Throwing;
 
-    return EnterMethodJIT(cx, fp, code, stackLimit);
+    return EnterMethodJIT(cx, fp, code, stackLimit, partial);
 }
 
-JSBool
-mjit::JaegerShot(JSContext *cx)
+JaegerStatus
+mjit::JaegerShot(JSContext *cx, bool partial)
 {
     StackFrame *fp = cx->fp();
     JSScript *script = fp->script();
     JITScript *jit = script->getJIT(fp->isConstructing());
 
 #ifdef JS_TRACER
     if (TRACE_RECORDER(cx))
         AbortRecording(cx, "attempt to enter method JIT while recording");
 #endif
 
     JS_ASSERT(cx->regs().pc == script->code);
 
-    return CheckStackAndEnterMethodJIT(cx, cx->fp(), jit->invokeEntry);
+    return CheckStackAndEnterMethodJIT(cx, cx->fp(), jit->invokeEntry, partial);
 }
 
-JSBool
-js::mjit::JaegerShotAtSafePoint(JSContext *cx, void *safePoint)
+JaegerStatus
+js::mjit::JaegerShotAtSafePoint(JSContext *cx, void *safePoint, bool partial)
 {
 #ifdef JS_TRACER
     JS_ASSERT(!TRACE_RECORDER(cx));
 #endif
 
-    return CheckStackAndEnterMethodJIT(cx, cx->fp(), safePoint);
+    return CheckStackAndEnterMethodJIT(cx, cx->fp(), safePoint, partial);
 }
 
 NativeMapEntry *
 JITScript::nmap() const
 {
     return (NativeMapEntry *)((char*)this + sizeof(JITScript));
 }
 
--- a/js/src/methodjit/MethodJIT.h
+++ b/js/src/methodjit/MethodJIT.h
@@ -107,17 +107,17 @@ struct VMFrame
     } u;
 
     VMFrame      *previous;
     void         *scratch;
     FrameRegs    regs;
     JSContext    *cx;
     Value        *stackLimit;
     StackFrame   *entryfp;
-    void         *entryncode;
+    FrameRegs    *oldregs;
     JSRejoinState stubRejoin;  /* How to rejoin if inside a call from an IC stub. */
 
 #if defined(JS_CPU_X86)
     void         *unused0, *unused1;  /* For 16 byte alignment */
 #endif
 
 #if defined(JS_CPU_X86)
     void *savedEBX;
@@ -349,26 +349,51 @@ struct Trampolines {
     JSC::ExecutablePool *forceReturnPool;
 
 #if (defined(JS_NO_FASTCALL) && defined(JS_CPU_X86)) || defined(_WIN64)
     TrampolinePtr       forceReturnFast;
     JSC::ExecutablePool *forceReturnFastPool;
 #endif
 };
 
+/* Result status of executing mjit code on a frame. */
+enum JaegerStatus
+{
+    /* Entry frame finished, and is throwing an exception. */
+    Jaeger_Throwing = 0,
+
+    /* Entry frame finished, and is returning. */
+    Jaeger_Returned = 1,
+
+    /*
+     * Entry frame did not finish. cx->regs reflects where to resume execution.
+     * This result is only possible if 'partial' is passed as true below.
+     */
+    Jaeger_Unfinished = 2,
+
+    /*
+     * As for Unfinished, but stopped after a TRAP triggered recompilation.
+     * The trap has been reinstalled, but should not execute again when
+     * resuming execution.
+     */
+    Jaeger_UnfinishedAtTrap = 3
+};
+
 /*
  * Method JIT compartment data. Currently, there is exactly one per
  * JS compartment. It would be safe for multiple JS compartments to
  * share a JaegerCompartment as long as only one thread can enter
  * the JaegerCompartment at a time.
  */
 class JaegerCompartment {
     JSC::ExecutableAllocator *execAlloc_;    // allocator for jit code
     Trampolines              trampolines;    // force-return trampolines
     VMFrame                  *activeFrame_;  // current active VMFrame
+    JaegerStatus             lastUnfinished_;// result status of last VM frame,
+                                             // if unfinished
 
     void Finish();
 
   public:
     bool Initialize();
 
     JaegerCompartment();
     ~JaegerCompartment() { Finish(); }
@@ -377,26 +402,38 @@ class JaegerCompartment {
         return execAlloc_;
     }
 
     VMFrame *activeFrame() {
         return activeFrame_;
     }
 
     void pushActiveFrame(VMFrame *f) {
+        JS_ASSERT(!lastUnfinished_);
         f->previous = activeFrame_;
         f->scratch = NULL;
         activeFrame_ = f;
     }
 
     void popActiveFrame() {
         JS_ASSERT(activeFrame_);
         activeFrame_ = activeFrame_->previous;
     }
 
+    void setLastUnfinished(JaegerStatus status) {
+        JS_ASSERT(!lastUnfinished_);
+        lastUnfinished_ = status;
+    }
+
+    JaegerStatus lastUnfinished() {
+        JaegerStatus result = lastUnfinished_;
+        lastUnfinished_ = (JaegerStatus) 0;
+        return result;
+    }
+
     void *forceReturnFromExternC() const {
         return JS_FUNC_TO_DATA_PTR(void *, trampolines.forceReturn);
     }
 
     void *forceReturnFromFastCall() const {
 #if (defined(JS_NO_FASTCALL) && defined(JS_CPU_X86)) || defined(_WIN64)
         return JS_FUNC_TO_DATA_PTR(void *, trampolines.forceReturnFast);
 #else
@@ -596,23 +633,24 @@ struct JITScript {
     char *monoICSectionsLimit() const;
     char *polyICSectionsLimit() const;
 };
 
 /*
  * Execute the given mjit code. This is a low-level call and callers must
  * provide the same guarantees as JaegerShot/CheckStackAndEnterMethodJIT.
  */
-JSBool EnterMethodJIT(JSContext *cx, StackFrame *fp, void *code, Value *stackLimit);
+JaegerStatus EnterMethodJIT(JSContext *cx, StackFrame *fp, void *code, Value *stackLimit,
+                            bool partial);
 
 /* Execute a method that has been JIT compiled. */
-JSBool JaegerShot(JSContext *cx);
+JaegerStatus JaegerShot(JSContext *cx, bool partial);
 
 /* Drop into the middle of a method at an arbitrary point, and execute. */
-JSBool JaegerShotAtSafePoint(JSContext *cx, void *safePoint);
+JaegerStatus JaegerShotAtSafePoint(JSContext *cx, void *safePoint, bool partial);
 
 enum CompileStatus
 {
     Compile_Okay,
     Compile_Abort,        // abort compilation
     Compile_InlineAbort,  // inlining attempt failed, continue compilation
     Compile_Retry,        // static overflow or failed inline, try to recompile
     Compile_Error,        // OOM
--- a/js/src/methodjit/MonoIC.cpp
+++ b/js/src/methodjit/MonoIC.cpp
@@ -632,17 +632,18 @@ class CallCompiler : public BaseCompiler
          * checks for compilability. Perhaps this should be a separate, shared
          * trampoline, but for now we generate it dynamically.
          */
         Assembler masm;
         InlineFrameAssembler inlFrame(masm, ic, flags);
         RegisterID t0 = inlFrame.tempRegs.takeAnyReg().reg();
 
         /* Generate the inline frame creation. */
-        inlFrame.assemble(ic.funGuard.labelAtOffset(ic.joinPointOffset).executableAddress());
+        void *ncode = ic.funGuard.labelAtOffset(ic.joinPointOffset).executableAddress();
+        inlFrame.assemble(ncode, f.pc());
 
         /* funPtrReg is still valid. Check if a compilation is needed. */
         Address scriptAddr(ic.funPtrReg, offsetof(JSFunction, u) +
                            offsetof(JSFunction::U::Scripted, script));
         masm.loadPtr(scriptAddr, t0);
 
         /*
          * Test if script->nmap is NULL - same as checking ncode, but faster
@@ -678,17 +679,17 @@ class CallCompiler : public BaseCompiler
                                 compilePtr, f.regs.pc, &inlined, -1);
         }
 
         Jump notCompiled = masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
                                               Registers::ReturnReg);
         masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.sp)), JSFrameReg);
 
         /* Compute the value of ncode to use at this call site. */
-        uint8 *ncode = (uint8 *) f.jit()->code.m_code.executableAddress() + ic.call->codeOffset;
+        ncode = (uint8 *) f.jit()->code.m_code.executableAddress() + ic.call->codeOffset;
         masm.storePtr(ImmPtr(ncode), Address(JSFrameReg, StackFrame::offsetOfNcode()));
 
         masm.jump(Registers::ReturnReg);
 
         hasCode.linkTo(masm.label(), &masm);
 
         /* Get nmap[ARITY], set argc, call. */
         if (ic.frameSize.isStatic())
@@ -1059,21 +1060,24 @@ class CallCompiler : public BaseCompiler
     }
 
     void *update()
     {
         StackFrame *fp = f.fp();
         JITScript *jit = fp->jit();
         RecompilationMonitor monitor(cx);
 
+        bool lowered = ic.frameSize.lowered(f.pc());
+        JS_ASSERT_IF(lowered, !callingNew);
+
         stubs::UncachedCallResult ucr;
         if (callingNew)
             stubs::UncachedNewHelper(f, ic.frameSize.staticArgc(), &ucr);
         else
-            stubs::UncachedCallHelper(f, ic.frameSize.getArgc(f), &ucr);
+            stubs::UncachedCallHelper(f, ic.frameSize.getArgc(f), lowered, &ucr);
 
         // Watch out in case the IC was invalidated by a recompilation on the calling
         // script. This can happen either if the callee is executed or if it compiles
         // and the compilation has a static overflow.
         if (monitor.recompiled())
             return ucr.codeAddr;
 
         // If the function cannot be jitted (generally unjittable or empty script),
--- a/js/src/methodjit/MonoIC.h
+++ b/js/src/methodjit/MonoIC.h
@@ -97,16 +97,20 @@ class FrameSize
         if (isStatic()) {
             if (staticArgc() == GET_ARGC(pc))
                 return native ? REJOIN_NATIVE : REJOIN_CALL_PROLOGUE;
             JS_ASSERT(staticArgc() == GET_ARGC(pc) - 1);
             return native ? REJOIN_NATIVE_LOWERED : REJOIN_CALL_PROLOGUE_LOWERED_CALL;
         }
         return native ? REJOIN_NATIVE_LOWERED : REJOIN_CALL_PROLOGUE_LOWERED_APPLY;
     }
+
+    bool lowered(jsbytecode *pc) {
+        return !isStatic() || staticArgc() != GET_ARGC(pc);
+    }
 };
 
 namespace ic {
 
 struct GlobalNameIC
 {
     typedef JSC::MacroAssembler::RegisterID RegisterID;
 
--- a/js/src/methodjit/Retcon.cpp
+++ b/js/src/methodjit/Retcon.cpp
@@ -403,20 +403,16 @@ Recompiler::recompile(bool resetUses)
                 // check for a scripted call returning into the recompiled script.
                 // this misses scanning the entry fp, which cannot return directly
                 // into JIT code.
                 void **addr = next->addressOfNativeReturnAddress();
 
                 if (JITCodeReturnAddress(*addr)) {
                     JS_ASSERT(fp->jit()->isValidCode(*addr));
                     patchCall(fp->jit(), fp, addr);
-                } else if (nextf && nextf->entryfp == next &&
-                           JITCodeReturnAddress(nextf->entryncode)) {
-                    JS_ASSERT(fp->jit()->isValidCode(nextf->entryncode));
-                    patchCall(fp->jit(), fp, &nextf->entryncode);
                 }
             }
 
             next = fp;
         }
 
         /*
          * Check if the VMFrame returns directly into the recompiled script.
--- a/js/src/methodjit/StubCalls.h
+++ b/js/src/methodjit/StubCalls.h
@@ -66,16 +66,17 @@ void JS_FASTCALL InitMethod(VMFrame &f, 
 
 void JS_FASTCALL HitStackQuota(VMFrame &f);
 void * JS_FASTCALL FixupArity(VMFrame &f, uint32 argc);
 void * JS_FASTCALL CompileFunction(VMFrame &f, uint32 argc);
 void JS_FASTCALL SlowNew(VMFrame &f, uint32 argc);
 void JS_FASTCALL SlowCall(VMFrame &f, uint32 argc);
 void * JS_FASTCALL UncachedNew(VMFrame &f, uint32 argc);
 void * JS_FASTCALL UncachedCall(VMFrame &f, uint32 argc);
+void * JS_FASTCALL UncachedLoweredCall(VMFrame &f, uint32 argc);
 void JS_FASTCALL Eval(VMFrame &f, uint32 argc);
 void JS_FASTCALL ScriptDebugPrologue(VMFrame &f);
 void JS_FASTCALL ScriptDebugEpilogue(VMFrame &f);
 void JS_FASTCALL ScriptProbeOnlyPrologue(VMFrame &f);
 void JS_FASTCALL ScriptProbeOnlyEpilogue(VMFrame &f);
 
 /*
  * Result struct for UncachedXHelper.
@@ -102,17 +103,17 @@ struct UncachedCallResult {
     }        
 };
 
 /*
  * Helper functions for stubs and IC functions for calling functions.
  * These functions either execute the function, return a native code
  * pointer that can be used to call the function, or throw.
  */
-void UncachedCallHelper(VMFrame &f, uint32 argc, UncachedCallResult *ucr);
+void UncachedCallHelper(VMFrame &f, uint32 argc, bool lowered, UncachedCallResult *ucr);
 void UncachedNewHelper(VMFrame &f, uint32 argc, UncachedCallResult *ucr);
 
 void JS_FASTCALL CreateThis(VMFrame &f, JSObject *proto);
 void JS_FASTCALL Throw(VMFrame &f);
 void JS_FASTCALL PutActivationObjects(VMFrame &f);
 void JS_FASTCALL CreateFunCallObject(VMFrame &f);
 #if JS_MONOIC
 void * JS_FASTCALL InvokeTracer(VMFrame &f, ic::TraceICInfo *tic);
--- a/js/src/vm/Stack-inl.h
+++ b/js/src/vm/Stack-inl.h
@@ -75,16 +75,20 @@ StackFrame::resetGeneratorPrev(JSContext
 {
     flags_ |= HAS_PREVPC;
     initPrev(cx);
 }
 
 inline void
 StackFrame::initInlineFrame(JSFunction *fun, StackFrame *prevfp, jsbytecode *prevpc)
 {
+    /*
+     * Note: no need to ensure the scopeChain is instantiated for inline
+     * frames. Functions which use the scope chain are never inlined.
+     */
     flags_ = StackFrame::FUNCTION;
     exec.fun = fun;
     resetInlinePrev(prevfp, prevpc);
 }
 
 inline void
 StackFrame::resetInlinePrev(StackFrame *prevfp, jsbytecode *prevpc)
 {
@@ -95,25 +99,26 @@ StackFrame::resetInlinePrev(StackFrame *
     prevInline_ = NULL;
 }
 
 inline void
 StackFrame::initCallFrame(JSContext *cx, JSObject &callee, JSFunction *fun,
                           JSScript *script, uint32 nactual, StackFrame::Flags flagsArg)
 {
     JS_ASSERT((flagsArg & ~(CONSTRUCTING |
+                            LOWERED_CALL_APPLY |
                             OVERFLOW_ARGS |
                             UNDERFLOW_ARGS)) == 0);
     JS_ASSERT(fun == callee.getFunctionPrivate());
     JS_ASSERT(script == fun->script());
 
     /* Initialize stack frame members. */
     flags_ = FUNCTION | HAS_PREVPC | HAS_SCOPECHAIN | flagsArg;
     exec.fun = fun;
-    args.nactual = nactual;  /* only need to write if over/under-flow */
+    args.nactual = nactual;
     scopeChain_ = callee.getParent();
     ncode_ = NULL;
     initPrev(cx);
     JS_ASSERT(!hasImacropc());
     JS_ASSERT(!hasHookData());
     JS_ASSERT(annotation() == NULL);
     JS_ASSERT(!hasCallObj());
 
@@ -137,45 +142,36 @@ StackFrame::resetCallFrame(JSScript *scr
                            OVERFLOW_ARGS |
                            UNDERFLOW_ARGS |
                            OVERRIDE_ARGS |
                            HAS_PREVPC |
                            HAS_RVAL |
                            HAS_SCOPECHAIN |
                            HAS_ANNOTATION |
                            HAS_HOOK_DATA |
-                           HAS_CALL_OBJ |
-                           HAS_ARGS_OBJ |
                            FINISHED_IN_INTERP |
                            DOWN_FRAMES_EXPANDED)));
 
-    /*
-     * Since the stack frame is usually popped after PutActivationObjects,
-     * these bits aren't cleared. The activation objects must have actually
-     * been put, though.
-     */
-    JS_ASSERT_IF(flags_ & HAS_CALL_OBJ, callObj().getPrivate() == NULL);
-    JS_ASSERT_IF(flags_ & HAS_ARGS_OBJ, argsObj().getPrivate() == NULL);
-
     flags_ &= FUNCTION |
               OVERFLOW_ARGS |
               HAS_PREVPC |
               UNDERFLOW_ARGS;
 
     JS_ASSERT(exec.fun == callee().getFunctionPrivate());
     scopeChain_ = callee().getParent();
 
     SetValueRangeToUndefined(slots(), script->nfixed);
 }
 
 inline void
 StackFrame::initJitFrameCallerHalf(JSContext *cx, StackFrame::Flags flags,
                                     void *ncode)
 {
     JS_ASSERT((flags & ~(CONSTRUCTING |
+                         LOWERED_CALL_APPLY |
                          FUNCTION |
                          OVERFLOW_ARGS |
                          UNDERFLOW_ARGS)) == 0);
 
     flags_ = FUNCTION | flags;
     prev_ = cx->fp();
     ncode_ = ncode;
 }
@@ -183,18 +179,17 @@ StackFrame::initJitFrameCallerHalf(JSCon
 /*
  * The "early prologue" refers to either the fast path or arity check path up
  * to the "late prologue".
  */
 inline void
 StackFrame::initJitFrameEarlyPrologue(JSFunction *fun, uint32 nactual)
 {
     exec.fun = fun;
-    if (flags_ & (OVERFLOW_ARGS | UNDERFLOW_ARGS))
-        args.nactual = nactual;
+    args.nactual = nactual;
 }
 
 /*
  * The "late prologue" (in generatePrologue) extends from the join point of the
  * fast path and arity check to where the call object is (possibly) created.
  */
 inline bool
 StackFrame::initJitFrameLatePrologue(JSContext *cx, Value **limit)
@@ -205,16 +200,17 @@ StackFrame::initJitFrameLatePrologue(JSC
         ContextStack &stack = cx->stack;
         if (!stack.space().tryBumpLimit(NULL, slots(), nvals, limit)) {
             stack.popFrameAfterOverflow();
             js_ReportOverRecursed(cx);
             return false;
         }
     }
 
+    scopeChain();
     SetValueRangeToUndefined(slots(), script()->nfixed);
     return true;
 }
 
 inline Value &
 StackFrame::canonicalActualArg(uintN i) const
 {
     if (i < numFormalArgs())
@@ -282,29 +278,30 @@ struct CopyTo
         *dst++ = *src;
         return true;
     }
 };
 
 inline uintN
 StackFrame::numActualArgs() const
 {
+    /*
+     * args.nactual is always coherent, except for method JIT frames where the
+     * callee does not access its arguments and the number of actual arguments
+     * matches the number of formal arguments. The JIT requires that all frames
+     * which do not have an arguments object and use their arguments have a
+     * coherent args.nactual (even though the below code may not use it), as
+     * JIT code may access the field directly.
+     */
     JS_ASSERT(hasArgs());
     if (JS_UNLIKELY(flags_ & (OVERFLOW_ARGS | UNDERFLOW_ARGS)))
         return hasArgsObj() ? argsObj().initialLength() : args.nactual;
     return numFormalArgs();
 }
 
-inline void
-StackFrame::ensureCoherentArgCount()
-{
-    if (!hasArgsObj())
-        args.nactual = numActualArgs();
-}
-
 inline Value *
 StackFrame::actualArgs() const
 {
     JS_ASSERT(hasArgs());
     Value *argv = formalArgs();
     if (JS_UNLIKELY(flags_ & OVERFLOW_ARGS)) {
         uintN nactual = hasArgsObj() ? argsObj().initialLength() : args.nactual;
         return argv - (2 + nactual);
@@ -540,25 +537,25 @@ ContextStack::getCallFrame(JSContext *cx
     PodCopy(dst, src, ncopy);
     return reinterpret_cast<StackFrame *>(firstUnused + ncopy);
 }
 
 template <class Check>
 JS_ALWAYS_INLINE bool
 ContextStack::pushInlineFrame(JSContext *cx, FrameRegs &regs, const CallArgs &args,
                               JSObject &callee, JSFunction *fun, JSScript *script,
-                              MaybeConstruct construct, Check check)
+                              InitialFrameFlags initial, Check check)
 {
     JS_ASSERT(onTop());
     JS_ASSERT(regs.sp == args.end());
     /* Cannot assert callee == args.callee() since this is called from LeaveTree. */
     JS_ASSERT(callee.getFunctionPrivate() == fun);
     JS_ASSERT(fun->script() == script);
 
-    StackFrame::Flags flags = ToFrameFlags(construct);
+    StackFrame::Flags flags = ToFrameFlags(initial);
     StackFrame *fp = getCallFrame(cx, args, fun, script, &flags, check);
     if (!fp)
         return false;
 
     /* Initialize frame, locals, regs. */
     fp->initCallFrame(cx, callee, fun, script, args.argc(), flags);
 
     /*
@@ -567,25 +564,25 @@ ContextStack::pushInlineFrame(JSContext 
      */
     regs.prepareToRun(*fp, script);
     return true;
 }
 
 JS_ALWAYS_INLINE StackFrame *
 ContextStack::getFixupFrame(JSContext *cx, FrameRegs &regs, const CallArgs &args,
                             JSFunction *fun, JSScript *script, void *ncode,
-                            MaybeConstruct construct, LimitCheck check)
+                            InitialFrameFlags initial, LimitCheck check)
 {
     JS_ASSERT(onTop());
     JS_ASSERT(&regs == &cx->regs());
     JS_ASSERT(regs.sp == args.end());
     JS_ASSERT(args.callee().getFunctionPrivate() == fun);
     JS_ASSERT(fun->script() == script);
 
-    StackFrame::Flags flags = ToFrameFlags(construct);
+    StackFrame::Flags flags = ToFrameFlags(initial);
     StackFrame *fp = getCallFrame(cx, args, fun, script, &flags, check);
     if (!fp)
         return NULL;
 
     /* Do not init late prologue or regs; this is done by jit code. */
     fp->initJitFrameCallerHalf(cx, flags, ncode);
     fp->initJitFrameEarlyPrologue(fun, args.argc());
     return fp;
--- a/js/src/vm/Stack.cpp
+++ b/js/src/vm/Stack.cpp
@@ -600,26 +600,26 @@ ContextStack::popInvokeArgs(const Invoke
 
     seg_->popCall();
     if (iag.pushedSeg_)
         popSegment();
 }
 
 bool
 ContextStack::pushInvokeFrame(JSContext *cx, const CallArgs &args,
-                              MaybeConstruct construct, InvokeFrameGuard *ifg)
+                              InitialFrameFlags initial, InvokeFrameGuard *ifg)
 {
     JS_ASSERT(onTop());
     JS_ASSERT(space().firstUnused() == args.end());
 
     JSObject &callee = args.callee();
     JSFunction *fun = callee.getFunctionPrivate();
     JSScript *script = fun->script();
 
-    StackFrame::Flags flags = ToFrameFlags(construct);
+    StackFrame::Flags flags = ToFrameFlags(initial);
     StackFrame *fp = getCallFrame(cx, args, fun, script, &flags, OOMCheck());
     if (!fp)
         return false;
 
     fp->initCallFrame(cx, callee, fun, script, args.argc(), flags);
     ifg->regs_.prepareToRun(*fp, script);
 
     ifg->prevRegs_ = seg_->pushRegs(ifg->regs_);
@@ -955,24 +955,22 @@ StackIter::settleOnNewState()
              *
              * Function.prototype.call will however appear, hence the debugger
              * can, by inspecting 'args.thisv', give some useful information.
              */
             JSOp op = js_GetOpcode(cx_, fp_->script(), pc_);
             if (op == JSOP_CALL || op == JSOP_FUNCALL) {
                 uintN argc = GET_ARGC(pc_);
                 DebugOnly<uintN> spoff = sp_ - fp_->base();
-#if 0
 #ifdef DEBUG
                 if (cx_->stackIterAssertionEnabled) {
                     JS_ASSERT_IF(!fp_->hasImacropc(),
                                  spoff == js_ReconstructStackDepth(cx_, fp_->script(), pc_));
                 }
 #endif
-#endif
                 Value *vp = sp_ - (2 + argc);
 
                 if (IsNativeFunction(*vp)) {
                     state_ = IMPLICIT_NATIVE;
                     args_ = CallArgsFromVp(argc, vp);
                     return;
                 }
             } else if (op == JSOP_FUNAPPLY) {
--- a/js/src/vm/Stack.h
+++ b/js/src/vm/Stack.h
@@ -298,19 +298,21 @@ CallArgsListFromArgv(uintN argc, Value *
 JS_ALWAYS_INLINE CallArgsList
 CallArgsListFromVp(uintN argc, Value *vp, CallArgsList *prev)
 {
     return CallArgsListFromArgv(argc, vp + 2, prev);
 }
 
 /*****************************************************************************/
 
-enum MaybeConstruct {
-    NO_CONSTRUCT           =          0, /* == false */
-    CONSTRUCT              =       0x80  /* == StackFrame::CONSTRUCTING, asserted below */
+/* Flags specified for a frame as it is constructed. */
+enum InitialFrameFlags {
+    INITIAL_NONE           =          0,
+    INITIAL_CONSTRUCT      =       0x80, /* == StackFrame::CONSTRUCTING, asserted below */
+    INITIAL_LOWERED        =   0x400000  /* == StackFrame::LOWERED_CALL_APPLY, asserted below */
 };
 
 enum ExecuteType {
     EXECUTE_GLOBAL         =        0x1, /* == StackFrame::GLOBAL */
     EXECUTE_DIRECT_EVAL    =        0x8, /* == StackFrame::EVAL */
     EXECUTE_INDIRECT_EVAL  =        0x9, /* == StackFrame::GLOBAL | EVAL */
     EXECUTE_DEBUG          =       0x18  /* == StackFrame::EVAL | DEBUGGER */
 };
@@ -347,17 +349,19 @@ class StackFrame
         HAS_CALL_OBJ       =     0x4000,  /* frame has a callobj reachable from scopeChain_ */
         HAS_ARGS_OBJ       =     0x8000,  /* frame has an argsobj in StackFrame::args */
         HAS_HOOK_DATA      =    0x10000,  /* frame has hookData_ set */
         HAS_ANNOTATION     =    0x20000,  /* frame has annotation_ set */
         HAS_RVAL           =    0x40000,  /* frame has rval_ set */
         HAS_SCOPECHAIN     =    0x80000,  /* frame has scopeChain_ set */
         HAS_PREVPC         =   0x100000,  /* frame has prevpc_ and prevInline_ set */
 
-        DOWN_FRAMES_EXPANDED = 0x200000   /* inlining in down frames has been expanded */
+        /* Method JIT state */
+        DOWN_FRAMES_EXPANDED = 0x200000,  /* inlining in down frames has been expanded */
+        LOWERED_CALL_APPLY   = 0x400000   /* Pushed by a lowered call/apply */
     };
 
   private:
     mutable uint32      flags_;         /* bits described by Flags */
     union {                             /* describes what code is executing in a */
         JSScript        *script;        /*   global frame */
         JSFunction      *fun;           /*   function frame, pre GetScopeChain */
     } exec;
@@ -672,17 +676,16 @@ class StackFrame
         return (flags_ & (FUNCTION | EVAL)) == FUNCTION
                ? formalArgs()
                : NULL;
     }
 
     inline uintN numActualArgs() const;
     inline Value *actualArgs() const;
     inline Value *actualArgsEnd() const;
-    inline void ensureCoherentArgCount();
 
     inline Value &canonicalActualArg(uintN i) const;
     template <class Op>
     inline bool forEachCanonicalActualArg(Op op, uintN start = 0, uintN count = uintN(-1));
     template <class Op> inline bool forEachFormalArg(Op op);
 
     bool hasArgsObj() const {
         return !!(flags_ & HAS_ARGS_OBJ);
@@ -805,16 +808,22 @@ class StackFrame
      * performance reasons, call objects are created lazily for 'lightweight'
      * functions, i.e., functions which are not statically known to require a
      * call object. Thus, a given function frame may or may not have a call
      * object. When a function does have a call object, it is found by walking
      * up the scope chain until the first call object. Thus, it is important,
      * when setting the scope chain, to indicate whether the new scope chain
      * contains a new call object and thus changes the 'hasCallObj' state.
      *
+     * The method JIT requires that HAS_SCOPECHAIN be set for all frames which
+     * use NAME or related opcodes that can access the scope chain (so it does
+     * not have to test the bit). To ensure this, we always initialize the
+     * scope chain when pushing frames in the VM, and only initialize it when
+     * pushing frames in JIT code when the above situation applies.
+     *
      * NB: 'fp->hasCallObj()' implies that fp->callObj() needs to be 'put' when
      * the frame is popped. Since the scope chain of a non-strict eval frame
      * contains the call object of the parent (function) frame, it is possible
      * to have:
      *   !fp->hasCall() && fp->scopeChain().isCall()
      */
 
     JSObject &scopeChain() const {
@@ -932,17 +941,17 @@ class StackFrame
 
     /* Down frame expansion state */
 
     void setDownFramesExpanded() {
         flags_ |= DOWN_FRAMES_EXPANDED;
     }
 
     bool downFramesExpanded() {
-        return flags_ & DOWN_FRAMES_EXPANDED;
+        return !!(flags_ & DOWN_FRAMES_EXPANDED);
     }
 
     /* Debugger hook data */
 
     bool hasHookData() const {
         return !!(flags_ & HAS_HOOK_DATA);
     }
 
@@ -1036,20 +1045,37 @@ class StackFrame
     bool isFramePushedByExecute() const {
         return !!(flags_ & (GLOBAL | EVAL));
     }
 
     /*
      * Other flags
      */
 
-    MaybeConstruct isConstructing() const {
-        JS_STATIC_ASSERT((int)CONSTRUCT == (int)CONSTRUCTING);
-        JS_STATIC_ASSERT((int)NO_CONSTRUCT == 0);
-        return MaybeConstruct(flags_ & CONSTRUCTING);
+    InitialFrameFlags initialFlags() const {
+        JS_STATIC_ASSERT((int)INITIAL_NONE == 0);
+        JS_STATIC_ASSERT((int)INITIAL_CONSTRUCT == (int)CONSTRUCTING);
+        JS_STATIC_ASSERT((int)INITIAL_LOWERED == (int)LOWERED_CALL_APPLY);
+        uint32 mask = CONSTRUCTING | LOWERED_CALL_APPLY;
+        JS_ASSERT((flags_ & mask) != mask);
+        return InitialFrameFlags(flags_ & mask);
+    }
+
+    bool isConstructing() const {
+        return !!(flags_ & CONSTRUCTING);
+    }
+
+    /*
+     * The method JIT call/apply optimization can erase Function.{call,apply}
+     * invocations from the stack and push the callee frame directly. The base
+     * of these frames will be offset by one value, however, which the
+     * interpreter needs to account for if it ends up popping the frame.
+     */
+    bool loweredCallOrApply() const {
+        return !!(flags_ & LOWERED_CALL_APPLY);
     }
 
     bool isDebuggerFrame() const {
         return !!(flags_ & DEBUGGER);
     }
 
     bool hasOverriddenArgs() const {
         return !!(flags_ & OVERRIDE_ARGS);
@@ -1155,33 +1181,43 @@ class StackFrame
 #endif
 
     void methodjitStaticAsserts();
 };
 
 static const size_t VALUES_PER_STACK_FRAME = sizeof(StackFrame) / sizeof(Value);
 
 static inline uintN
-ToReportFlags(MaybeConstruct construct)
+ToReportFlags(InitialFrameFlags initial)
 {
-    return uintN(construct);
+    return uintN(initial & StackFrame::CONSTRUCTING);
 }
 
 static inline StackFrame::Flags
-ToFrameFlags(MaybeConstruct construct)
+ToFrameFlags(InitialFrameFlags initial)
 {
-    JS_STATIC_ASSERT((int)CONSTRUCT == (int)StackFrame::CONSTRUCTING);
-    JS_STATIC_ASSERT((int)NO_CONSTRUCT == 0);
-    return StackFrame::Flags(construct);
+    return StackFrame::Flags(initial);
+}
+
+static inline InitialFrameFlags
+InitialFrameFlagsFromConstructing(bool b)
+{
+    return b ? INITIAL_CONSTRUCT : INITIAL_NONE;
 }
 
-static inline MaybeConstruct
-MaybeConstructFromBool(bool b)
+static inline bool
+InitialFrameFlagsAreConstructing(InitialFrameFlags initial)
 {
-    return b ? CONSTRUCT : NO_CONSTRUCT;
+    return !!(initial & INITIAL_CONSTRUCT);
+}
+
+static inline bool
+InitialFrameFlagsAreLowered(InitialFrameFlags initial)
+{
+    return !!(initial & INITIAL_LOWERED);
 }
 
 inline StackFrame *          Valueify(JSStackFrame *fp) { return (StackFrame *)fp; }
 static inline JSStackFrame * Jsvalify(StackFrame *fp)   { return (JSStackFrame *)fp; }
 
 /*****************************************************************************/
 
 class FrameRegs
@@ -1577,17 +1613,17 @@ class ContextStack
      * the arguments to Invoke. A single allocation can be used for multiple
      * Invoke calls. The InvokeArgumentsGuard passed to Invoke must come from
      * an immediately-enclosing (stack-wise) call to pushInvokeArgs.
      */
     bool pushInvokeArgs(JSContext *cx, uintN argc, InvokeArgsGuard *ag);
 
     /* Called by Invoke for a scripted function call. */
     bool pushInvokeFrame(JSContext *cx, const CallArgs &args,
-                         MaybeConstruct construct, InvokeFrameGuard *ifg);
+                         InitialFrameFlags initial, InvokeFrameGuard *ifg);
 
     /* Called by Execute for execution of eval or global code. */
     bool pushExecuteFrame(JSContext *cx, JSScript *script, const Value &thisv,
                           JSObject &scopeChain, ExecuteType type,
                           StackFrame *evalInFrame, ExecuteFrameGuard *efg);
 
     /*
      * Called by SendToGenerator to resume a yielded generator. In addition to
@@ -1603,17 +1639,17 @@ class ContextStack
     /*
      * An "inline frame" may only be pushed from within the top, active
      * segment. This is the case for calls made inside mjit code and Interpret.
      * For the Check parameter, see OOMCheck et al above.
      */
     template <class Check>
     bool pushInlineFrame(JSContext *cx, FrameRegs &regs, const CallArgs &args,
                          JSObject &callee, JSFunction *fun, JSScript *script,
-                         MaybeConstruct construct, Check check);
+                         InitialFrameFlags initial, Check check);
     void popInlineFrame(FrameRegs &regs);
 
     /* Pop a partially-pushed frame after hitting the limit before throwing. */
     void popFrameAfterOverflow();
 
     /* Get the topmost script and optional pc on the stack. */
     inline JSScript *currentScript(jsbytecode **pc = NULL) const;
 
@@ -1625,17 +1661,17 @@ class ContextStack
      * hot, so getFixupFrame avoids doing call setup performed by jit code when
      * FixupArity returns. In terms of work done:
      *
      *   getFixupFrame = pushInlineFrame -
      *                   (fp->initJitFrameLatePrologue + regs->prepareToRun)
      */
     StackFrame *getFixupFrame(JSContext *cx, FrameRegs &regs, const CallArgs &args,
                               JSFunction *fun, JSScript *script, void *ncode,
-                              MaybeConstruct construct, LimitCheck check);
+                              InitialFrameFlags flags, LimitCheck check);
 
     bool saveFrameChain();
     void restoreFrameChain();
 
     /*
      * As an optimization, the interpreter/mjit can operate on a local
      * FrameRegs instance repoint the ContextStack to this local instance.
      */