[JAEGER] Guard that calls have compiled code.
authorDavid Anderson <danderson@mozilla.com>
Thu, 17 Jun 2010 18:36:28 -0700
changeset 52872 c8f3c19d3b0f0bf1ee16951008a7f65185e41f7e
parent 52871 ea8fd44593573982f5055631025e37a4ef4de3b3
child 52873 da23e3e77a69bb28d9ceaea8220f7f12dcb4fac1
push id1
push userroot
push dateTue, 26 Apr 2011 22:38:44 +0000
treeherdermozilla-beta@bfdb6e623a36 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
milestone1.9.3a5pre
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
[JAEGER] Guard that calls have compiled code.
js/src/Makefile.in
js/src/assembler/assembler/X86Assembler.h
js/src/jsfun.h
js/src/jsscript.cpp
js/src/methodjit/Compiler.cpp
js/src/methodjit/Compiler.h
js/src/methodjit/MethodJIT.cpp
js/src/methodjit/MethodJIT.h
js/src/methodjit/StubCalls.cpp
js/src/methodjit/StubCalls.h
js/src/methodjit/StubCompiler.cpp
js/src/methodjit/StubCompiler.h
--- a/js/src/Makefile.in
+++ b/js/src/Makefile.in
@@ -309,16 +309,17 @@ CPPSRCS += 	Assertions.cpp \
 		Logging.cpp \
 		StubCalls.cpp \
 		Compiler.cpp \
 		FrameState.cpp \
 		FastOps.cpp \
 		StubCompiler.cpp \
 		MonoIC.cpp \
 		ImmutableSync.cpp \
+		InvokeHelpers.cpp \
 		$(NULL)
 #		PICStubCompiler.cpp \
 
 ifeq (86, $(findstring 86,$(TARGET_CPU)))
 ifeq (x86_64, $(TARGET_CPU))
 ifeq ($(OS_ARCH),WINNT)
 ASFILES +=	TrampolineMasmX64.asm
 endif
--- a/js/src/assembler/assembler/X86Assembler.h
+++ b/js/src/assembler/assembler/X86Assembler.h
@@ -712,17 +712,18 @@ public:
         } else {
             m_formatter.oneByteOp(OP_GROUP1_EvIz, GROUP1_OP_SUB, dst);
             m_formatter.immediate32(imm);
         }
     }
     
     void subl_im(int imm, int offset, RegisterID base)
     {
-        FIXME_INSN_PRINTING;
+        js::JaegerSpew(js::JSpew_Insns,
+                       IPFX "subl       $0x%x, %d(%s)\n", imm, offset, nameIReg(4, base));
         if (CAN_SIGN_EXTEND_8_32(imm)) {
             m_formatter.oneByteOp(OP_GROUP1_EvIb, GROUP1_OP_SUB, base, offset);
             m_formatter.immediate8(imm);
         } else {
             m_formatter.oneByteOp(OP_GROUP1_EvIz, GROUP1_OP_SUB, base, offset);
             m_formatter.immediate32(imm);
         }
     }
--- a/js/src/jsfun.h
+++ b/js/src/jsfun.h
@@ -126,26 +126,26 @@ typedef union JSLocalNames {
                               JS_ASSERT((fun)->flags & JSFUN_TRCINFO),        \
                               fun->u.n.trcinfo)
 
 struct JSFunction : public JSObject
 {
     uint16          nargs;        /* maximum number of specified arguments,
                                      reflected as f.length/f.arity */
     uint16          flags;        /* flags, see JSFUN_* below and in jsapi.h */
-    union {
+    union U {
         struct {
             uint16      extra;    /* number of arg slots for local GC roots */
             uint16      spare;    /* reserved for future use */
             js::Native  native;   /* native method pointer or null */
             js::Class   *clasp;   /* class of objects constructed
                                      by this function */
             JSNativeTraceInfo *trcinfo;
         } n;
-        struct {
+        struct Scripted {
             uint16      nvars;    /* number of local variables */
             uint16      nupvars;  /* number of upvars (computable from script
                                      but here for faster access) */
             uint16       skipmin; /* net skip amount up (toward zero) from
                                      script->staticLevel to nearest upvar,
                                      including upvars in nested functions */
             JSPackedBool wrapper; /* true if this function is a wrapper that
                                      rewrites bytecode optimized for a function
--- a/js/src/jsscript.cpp
+++ b/js/src/jsscript.cpp
@@ -74,17 +74,17 @@
 using namespace js;
 
 static const jsbytecode emptyScriptCode[] = {JSOP_STOP, SRC_NULL};
 
 /* static */ const JSScript JSScript::emptyScriptConst = {
     const_cast<jsbytecode*>(emptyScriptCode),
     1, JSVERSION_DEFAULT, 0, 0, 0, 0, 0, 0, 0, true, false, false, false, false,
     const_cast<jsbytecode*>(emptyScriptCode),
-    {0, NULL}, NULL, 0, 0, 0, NULL
+    {0, NULL}, NULL, 0, 0, 0, NULL, {NULL}, reinterpret_cast<void*>(1)
 };
 
 #if JS_HAS_XDR
 
 JSBool
 js_XDRScript(JSXDRState *xdr, JSScript **scriptp, bool needMutableScript,
              JSBool *hasMagic)
 {
--- a/js/src/methodjit/Compiler.cpp
+++ b/js/src/methodjit/Compiler.cpp
@@ -165,36 +165,36 @@ mjit::TryCompile(JSContext *cx, JSScript
         script->ncode = JS_UNJITTABLE_METHOD;
 
     return status;
 }
 
 CompileStatus
 mjit::Compiler::generatePrologue()
 {
+    if (fun) {
+    }
+
+    invokeLabel = masm.label();
+    restoreFrameRegs();
+
 #ifdef JS_CPU_ARM
     /*
      * Unlike x86/x64, the return address is not pushed on the stack. To
      * compensate, we store the LR back into the stack on entry. This means
      * it's really done twice when called via the trampoline, but it's only
      * one instruction so probably not a big deal.
      *
      * The trampoline version goes through a veneer to make sure we can enter
      * scripts at any arbitrary point - i.e. we can't rely on this being here,
      * except for inline calls.
      */
     masm.storePtr(ARMRegisters::lr, FrameAddress(offsetof(VMFrame, scriptedReturn)));
 #endif
 
-    /*
-     * This saves us from having to load frame regs before every call, even if
-     * it's not always necessary.
-     */
-    restoreFrameRegs();
-
     return Compile_Okay;
 }
 
 CompileStatus
 mjit::Compiler::generateEpilogue()
 {
     return Compile_Okay;
 }
@@ -212,22 +212,23 @@ mjit::Compiler::finishThisUp()
         return Compile_Abort;
 
     uint8 *result = (uint8 *)execPool->alloc(masm.size() + stubcc.size());
     JSC::ExecutableAllocator::makeWritable(result, masm.size() + stubcc.size());
     memcpy(result, masm.buffer(), masm.size());
     memcpy(result + masm.size(), stubcc.buffer(), stubcc.size());
 
     /* Build the pc -> ncode mapping. */
-    void **nmap = (void **)cx->calloc(sizeof(void *) * script->length);
+    void **nmap = (void **)cx->calloc(sizeof(void *) * script->length + 1);
     if (!nmap) {
         execPool->release();
         return Compile_Error;
     }
 
+    *nmap++ = (uint8 *)(result + masm.distanceOf(invokeLabel));
     script->nmap = nmap;
 
     for (size_t i = 0; i < script->length; i++) {
         Label L = jumpMap[i];
         if (analysis[i].safePoint) {
             JS_ASSERT(L.isValid());
             nmap[i] = (uint8 *)(result + masm.distanceOf(L));
         }
@@ -1494,17 +1495,17 @@ mjit::Compiler::stubCall(void *ptr, Uses
 
 void
 mjit::Compiler::inlineCallHelper(uint32 argc, bool callingNew)
 {
     FrameEntry *fe = frame.peek(-int(argc + 2));
     bool typeKnown = fe->isTypeKnown();
 
     if (typeKnown && fe->getTypeTag() != JSVAL_MASK32_FUNOBJ) {
-        VoidStubUInt32 stub = callingNew ? stubs::SlowNew : stubs::SlowCall;
+        VoidPtrStubUInt32 stub = callingNew ? stubs::SlowNew : stubs::SlowCall;
         masm.move(Imm32(argc), Registers::ArgReg1);
         masm.stubCall(stub, PC, frame.stackDepth() + script->nfixed);
         frame.popn(argc + 2);
         frame.pushSynced();
         return;
     }
 
     bool hasTypeReg;
@@ -1525,113 +1526,139 @@ mjit::Compiler::inlineCallHelper(uint32 
     }
 
     /*
      * We rely on the fact that syncAndKill() is not allowed to touch the
      * registers we've preserved.
      */
     frame.forgetEverything();
 
-    Jump invokeCallDone;
+    Label invoke;
     if (!typeKnown) {
         Jump j;
         if (!hasTypeReg)
             j = masm.testFunObj(Assembler::NotEqual, frame.addressOf(fe));
         else
             j = masm.testFunObj(Assembler::NotEqual, type);
+        invoke = stubcc.masm.label();
         stubcc.linkExit(j);
         stubcc.leave();
         stubcc.masm.move(Imm32(argc), Registers::ArgReg1);
         stubcc.call(callingNew ? stubs::SlowNew : stubs::SlowCall);
-        invokeCallDone = stubcc.masm.jump();
     }
 
     /* Get function private pointer. */
     Address funPrivate(data, offsetof(JSObject, fslots) +
                              JSSLOT_PRIVATE * sizeof(Value));
     masm.loadData32(funPrivate, data);
 
-    /* Test if it's interpreted. */
     frame.takeReg(data);
     RegisterID t0 = frame.allocReg();
     RegisterID t1 = frame.allocReg();
-    masm.load16(Address(data, offsetof(JSFunction, flags)), t0);
-    masm.move(t0, t1);
-    masm.and32(Imm32(JSFUN_KINDMASK), t1);
-    Jump notInterp = masm.branch32(Assembler::Below, t1, Imm32(JSFUN_INTERPRETED));
-    stubcc.linkExit(notInterp);
+
+    /* Test if the function is interpreted, and if not, take a slow path. */
+    {
+        masm.load16(Address(data, offsetof(JSFunction, flags)), t0);
+        masm.move(t0, t1);
+        masm.and32(Imm32(JSFUN_KINDMASK), t1);
+        Jump notInterp = masm.branch32(Assembler::Below, t1, Imm32(JSFUN_INTERPRETED));
+
+        if (!typeKnown) {
+            /* Re-use the existing stub, if possible. */
+            stubcc.linkExitDirect(notInterp, invoke);
+        } else {
+            /* Create a new slow path. */
+            invoke = stubcc.masm.label();
+            stubcc.linkExit(notInterp);
+            stubcc.leave();
+            stubcc.masm.move(Imm32(argc), Registers::ArgReg1);
+            stubcc.call(callingNew ? stubs::SlowNew : stubs::SlowCall);
+        }
+    }
+
+    /* Test if it's not got compiled code. */
+    Address scriptAddr(data, offsetof(JSFunction, u) + offsetof(JSFunction::U::Scripted, script));
+    masm.loadPtr(scriptAddr, data);
+    Jump notCompiled = masm.branchPtr(Assembler::BelowOrEqual,
+                                      Address(data, offsetof(JSScript, ncode)),
+                                      ImmIntPtr(1));
+    {
+        stubcc.linkExitDirect(notCompiled, invoke);
+    }
 
     frame.freeReg(t0);
     frame.freeReg(t1);
     frame.freeReg(data);
 
-    stubcc.leave();
-    stubcc.masm.move(Imm32(argc), Registers::ArgReg1);
-    stubcc.call(callingNew ? stubs::SlowNew : stubs::NativeCall);
-    Jump slowCallDone = stubcc.masm.jump();
-
     /* Scripted call. */
     masm.move(Imm32(argc), Registers::ArgReg1);
     masm.stubCall(callingNew ? stubs::New : stubs::Call,
                   PC, frame.stackDepth() + script->nfixed);
 
-    /*
-     * Stub call returns a pointer to JIT'd code, or NULL.
-     *
-     * If the function could not be JIT'd, it was already invoked using
-     * js_Interpret() or js_Invoke(). In that case, the stack frame has
-     * already been popped. We don't have to do any extra work, except
-     * update FpReg later on.
-     *
-     * Otherwise, pop the VMFrame's cached return address, then call
-     * (which realigns it to SP).
-     */
-    Jump j = masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg, Registers::ReturnReg);
-    stubcc.linkExit(j);
+    Jump invokeCallDone;
+    {
+        /*
+         * Stub call returns a pointer to JIT'd code, or NULL.
+         *
+         * If the function could not be JIT'd, it was already invoked using
+         * js_Interpret() or js_Invoke(). In that case, the stack frame has
+         * already been popped. We don't have to do any extra work.
+         */
+        Jump j = stubcc.masm.branchTestPtr(Assembler::NonZero, Registers::ReturnReg, Registers::ReturnReg);
+        stubcc.crossJump(j, masm.label());
+        if (callingNew)
+            invokeCallDone = stubcc.masm.jump();
+    }
+
+    /* Fast-path: return address contains scripted call. */
 
 #ifndef JS_CPU_ARM
     /*
      * Since ARM does not push return addresses on the stack, we rely on the
      * scripted entry to store back the LR safely. Upon return we then write
      * back the LR to the VMFrame instead of pushing.
      */
     masm.addPtr(Imm32(sizeof(void*)), Registers::StackPointer);
 #endif
     masm.call(Registers::ReturnReg);
 
+    /*
+     * The scripted call returns a register triplet, containing the jsval and
+     * the current f.scriptedReturn.
+     */
 #ifdef JS_CPU_ARM
     masm.storePtr(Registers::ReturnReg, FrameAddress(offsetof(VMFrame, scriptedReturn)));
 #else
     masm.push(Registers::ReturnReg);
 #endif
 
+    /*
+     * Functions invoked with |new| can return, for some reason, primitive
+     * values. Just deal with this here.
+     */
     if (callingNew) {
-        /* Deal with primitive |this| */
         masm.move(JSReturnReg_Type, Registers::ReturnReg);
         masm.and32(Imm32(JSVAL_MASK32_OBJECT), Registers::ReturnReg);
         Jump primitive = masm.branch32(Assembler::BelowOrEqual, Registers::ReturnReg,
                                        Imm32(JSVAL_MASK32_CLEAR));
         stubcc.linkExit(primitive);
         FrameEntry *fe = frame.peek(-int(argc + 1));
         Address thisv(frame.addressOf(fe));
         stubcc.masm.loadTypeTag(thisv, JSReturnReg_Type);
         stubcc.masm.loadData32(thisv, JSReturnReg_Data);
         Jump primFix = stubcc.masm.jump();
         stubcc.crossJump(primFix, masm.label());
+        invokeCallDone.linkTo(stubcc.masm.label(), &stubcc.masm);
     }
 
     frame.popn(argc + 2);
     frame.takeReg(JSReturnReg_Type);
     frame.takeReg(JSReturnReg_Data);
     frame.pushRegs(JSReturnReg_Type, JSReturnReg_Data);
 
-    stubcc.leave();
-    slowCallDone.linkTo(stubcc.masm.label(), &stubcc.masm);
-    if (!typeKnown)
-        invokeCallDone.linkTo(stubcc.masm.label(), &stubcc.masm);
     stubcc.rejoin(0);
 }
 
 void
 mjit::Compiler::restoreFrameRegs()
 {
     masm.loadPtr(FrameAddress(offsetof(VMFrame, fp)), JSFrameReg);
 }
--- a/js/src/methodjit/Compiler.h
+++ b/js/src/methodjit/Compiler.h
@@ -107,16 +107,17 @@ class Compiler
     BytecodeAnalyzer analysis;
     Label *jumpMap;
     jsbytecode *PC;
     Assembler masm;
     FrameState frame;
     js::Vector<BranchPatch, 64> branchPatches;
     js::Vector<MICGenInfo, 64> mics;
     StubCompiler stubcc;
+    Label invokeLabel;
 
   public:
     // Special atom index used to indicate that the atom is 'length'. This
     // follows interpreter usage in JSOP_LENGTH.
     enum { LengthAtomIndex = uint32(-2) };
 
     Compiler(JSContext *cx, JSScript *script, JSFunction *fun, JSObject *scopeChain);
     ~Compiler();
--- a/js/src/methodjit/MethodJIT.cpp
+++ b/js/src/methodjit/MethodJIT.cpp
@@ -588,17 +588,17 @@ mjit::JaegerShot(JSContext *cx)
     JS_ASSERT(script->ncode && script->ncode != JS_UNJITTABLE_METHOD);
 
 #ifdef JS_TRACER
     if (TRACE_RECORDER(cx))
         AbortRecording(cx, "attempt to enter method JIT while recording");
 #endif
 
     if (pc == script->code)
-        code = script->ncode;
+        code = script->nmap[-1];
     else
         code = script->nmap[pc - script->code];
 
     JS_ASSERT(code);
 
 #ifdef JS_METHODJIT_SPEW
     Profiler prof;
 
@@ -647,17 +647,17 @@ mjit::ReleaseScriptCode(JSContext *cx, J
             script->pics = NULL;
             JS_METHODJIT_DATA(cx).removeScript(script);
         }
         script->npics = 0;
 #endif
     }
 
     if (script->nmap) {
-        cx->free(script->nmap);
+        cx->free(script->nmap - 1);
         script->nmap = NULL;
     }
     if (script->mics) {
         cx->free(script->mics);
         script->mics = NULL;
     }
 
 # if 0 /* def JS_TRACER */
--- a/js/src/methodjit/MethodJIT.h
+++ b/js/src/methodjit/MethodJIT.h
@@ -171,17 +171,17 @@ typedef JSObject * (JS_FASTCALL *JSObjSt
 typedef JSObject * (JS_FASTCALL *JSObjStubUInt32)(VMFrame &, uint32);
 typedef JSObject * (JS_FASTCALL *JSObjStubFun)(VMFrame &, JSFunction *);
 typedef JSObject * (JS_FASTCALL *JSObjStubJSObj)(VMFrame &, JSObject *);
 typedef void (JS_FASTCALL *VoidStubAtom)(VMFrame &, JSAtom *);
 typedef JSString * (JS_FASTCALL *JSStrStub)(VMFrame &);
 typedef JSString * (JS_FASTCALL *JSStrStubUInt32)(VMFrame &, uint32);
 typedef void (JS_FASTCALL *VoidStubJSObj)(VMFrame &, JSObject *);
 
-#define JS_UNJITTABLE_METHOD (reinterpret_cast<void*>(-1))
+#define JS_UNJITTABLE_METHOD (reinterpret_cast<void*>(1))
 
 namespace mjit {
 
 JSBool
 JaegerShot(JSContext *cx);
 
 enum CompileStatus
 {
--- a/js/src/methodjit/StubCalls.cpp
+++ b/js/src/methodjit/StubCalls.cpp
@@ -107,221 +107,31 @@ mjit::stubs::BindName(VMFrame &f)
 }
 
 JSObject * JS_FASTCALL
 mjit::stubs::BindGlobalName(VMFrame &f)
 {
     return f.fp->scopeChainObj()->getGlobal();
 }
 
-static bool
-InlineReturn(JSContext *cx)
-{
-    bool ok = true;
-
-    JSStackFrame *fp = cx->fp;
-
-    JS_ASSERT(!fp->blockChain);
-    JS_ASSERT(!js_IsActiveWithOrBlock(cx, fp->scopeChainObj(), 0));
-
-    if (fp->script->staticLevel < JS_DISPLAY_SIZE)
-        cx->display[fp->script->staticLevel] = fp->displaySave;
-
-    // Marker for debug support.
-    void *hookData = fp->hookData;
-    if (JS_UNLIKELY(hookData != NULL)) {
-        JSInterpreterHook hook;
-        JSBool status;
-
-        hook = cx->debugHooks->callHook;
-        if (hook) {
-            /*
-             * Do not pass &ok directly as exposing the address inhibits
-             * optimizations and uninitialised warnings.
-             */
-            status = ok;
-            hook(cx, fp, JS_FALSE, &status, hookData);
-            ok = (status == JS_TRUE);
-            // CHECK_INTERRUPT_HANDLER();
-        }
-    }
-
-    fp->putActivationObjects(cx);
-
-    /* :TODO: version stuff */
-
-    if (fp->flags & JSFRAME_CONSTRUCTING && fp->rval.isPrimitive())
-        fp->rval = fp->thisv;
-
-    cx->stack().popInlineFrame(cx, fp, fp->down);
-    cx->regs->sp[-1] = fp->rval;
-
-    return ok;
-}
-
 void JS_FASTCALL
 mjit::stubs::DebugHook(VMFrame &f)
 {
     JSContext *cx = f.cx;
     JSStackFrame *fp = f.fp;
 
     void *hookData = fp->hookData;
     JS_ASSERT(hookData);
 
     JSBool interpReturnOK = JS_TRUE;
     if (JSInterpreterHook hook = cx->debugHooks->callHook)
         hook(cx, fp, JS_FALSE, &interpReturnOK, hookData);
 
-    if (!interpReturnOK) {
-        stubs::Return(f);
+    if (!interpReturnOK)
         THROW();
-    }
-}
-
-void * JS_FASTCALL
-mjit::stubs::Return(VMFrame &f)
-{
-    if (!f.inlineCallCount)
-        return f.fp->ncode;
-
-    JSContext *cx = f.cx;
-    JS_ASSERT(f.fp == cx->fp);
-
-#ifdef DEBUG
-    bool wasInterp = f.fp->script->ncode == JS_UNJITTABLE_METHOD;
-#endif
-
-    bool ok = InlineReturn(cx);
-
-    f.inlineCallCount--;
-    JS_ASSERT(f.regs.sp == cx->regs->sp);
-    f.fp = cx->fp;
-
-    JS_ASSERT_IF(f.inlineCallCount > 1 && !wasInterp,
-                 f.fp->down->script->isValidJitCode(f.fp->ncode));
-
-    if (!ok)
-        THROWV(NULL);
-
-    return f.fp->ncode;
-}
-
-static jsbytecode *
-FindExceptionHandler(JSContext *cx)
-{
-    JSStackFrame *fp = cx->fp;
-    JSScript *script = fp->script;
-
-top:
-    if (cx->throwing && script->trynotesOffset) {
-        // The PC is updated before every stub call, so we can use it here.
-        unsigned offset = cx->regs->pc - script->main;
-
-        JSTryNoteArray *tnarray = script->trynotes();
-        for (unsigned i = 0; i < tnarray->length; ++i) {
-            JSTryNote *tn = &tnarray->vector[i];
-            if (offset - tn->start >= tn->length)
-                continue;
-            if (tn->stackDepth > cx->regs->sp - fp->base())
-                continue;
-
-            jsbytecode *pc = script->main + tn->start + tn->length;
-            JSBool ok = js_UnwindScope(cx, tn->stackDepth, JS_TRUE);
-            JS_ASSERT(cx->regs->sp == fp->base() + tn->stackDepth);
-
-            switch (tn->kind) {
-                case JSTRY_CATCH:
-                  JS_ASSERT(js_GetOpcode(cx, fp->script, pc) == JSOP_ENTERBLOCK);
-
-#if JS_HAS_GENERATORS
-                  /* Catch cannot intercept the closing of a generator. */
-                  if (JS_UNLIKELY(cx->exception.isMagic(JS_GENERATOR_CLOSING)))
-                      break;
-#endif
-
-                  /*
-                   * Don't clear cx->throwing to save cx->exception from GC
-                   * until it is pushed to the stack via [exception] in the
-                   * catch block.
-                   */
-                  return pc;
-
-                case JSTRY_FINALLY:
-                  /*
-                   * Push (true, exception) pair for finally to indicate that
-                   * [retsub] should rethrow the exception.
-                   */
-                  cx->regs->sp[0].setBoolean(true);
-                  cx->regs->sp[1] = cx->exception;
-                  cx->regs->sp += 2;
-                  cx->throwing = JS_FALSE;
-                  return pc;
-
-                case JSTRY_ITER:
-                {
-                  /*
-                   * This is similar to JSOP_ENDITER in the interpreter loop,
-                   * except the code now uses the stack slot normally used by
-                   * JSOP_NEXTITER, namely regs.sp[-1] before the regs.sp -= 2
-                   * adjustment and regs.sp[1] after, to save and restore the
-                   * pending exception.
-                   */
-                  AutoValueRooter tvr(cx, cx->exception);
-                  JS_ASSERT(js_GetOpcode(cx, fp->script, pc) == JSOP_ENDITER);
-                  cx->throwing = JS_FALSE;
-                  ok = !!js_CloseIterator(cx, cx->regs->sp[-1]);
-                  cx->regs->sp -= 1;
-                  if (!ok)
-                      goto top;
-                  cx->throwing = JS_TRUE;
-                  cx->exception = tvr.value();
-                }
-            }
-        }
-    }
-
-    return NULL;
-}
-
-extern "C" void *
-js_InternalThrow(VMFrame &f)
-{
-    JSContext *cx = f.cx;
-
-    // Make sure sp is up to date.
-    JS_ASSERT(cx->regs == &f.regs);
-
-    jsbytecode *pc = NULL;
-    for (;;) {
-        pc = FindExceptionHandler(cx);
-        if (pc)
-            break;
-
-        // If |f.inlineCallCount == 0|, then we are on the 'topmost' frame (where
-        // topmost means the first frame called into through js_Interpret). In this
-        // case, we still unwind, but we shouldn't return from a JS function, because
-        // we're not in a JS function.
-        bool lastFrame = (f.inlineCallCount == 0);
-        js_UnwindScope(cx, 0, cx->throwing);
-        if (lastFrame)
-            break;
-
-        JS_ASSERT(f.regs.sp == cx->regs->sp);
-        f.scriptedReturn = stubs::Return(f);
-    }
-
-    JS_ASSERT(f.regs.sp == cx->regs->sp);
-
-    if (!pc) {
-        *f.oldRegs = f.regs;
-        f.cx->setCurrentRegs(f.oldRegs);
-        return NULL;
-    }
-
-    return cx->fp->script->pcToNative(pc);
 }
 
 #define NATIVE_SET(cx,obj,sprop,entry,vp)                                     \
     JS_BEGIN_MACRO                                                            \
         if (sprop->hasDefaultSetter() &&                                      \
             (sprop)->slot != SPROP_INVALID_SLOT &&                            \
             !obj->scope()->brandedOrHasMethodBarrier()) {                     \
             /* Fast path for, e.g., plain Object instance properties. */      \
@@ -1022,279 +832,16 @@ stubs::DecVp(VMFrame &f, Value *vp)
 
 void JS_FASTCALL
 stubs::IncVp(VMFrame &f, Value *vp)
 {
     if (!PreInc<1>(f, vp))
         THROW();
 }
 
-static inline bool
-InlineCall(VMFrame &f, uint32 flags, void **pret, uint32 argc)
-{
-    JSContext *cx = f.cx;
-    JSStackFrame *fp = f.fp;
-    Value *vp = f.regs.sp - (argc + 2);
-    JSObject *funobj = &vp->asFunObj();
-    JSFunction *fun = GET_FUNCTION_PRIVATE(cx, funobj);
-
-    JS_ASSERT(FUN_INTERPRETED(fun));
-
-    JSScript *newscript = fun->u.i.script;
-
-    if (f.inlineCallCount >= JS_MAX_INLINE_CALL_COUNT) {
-        js_ReportOverRecursed(cx);
-        return false;
-    }
-
-    /* Allocate the frame. */
-    StackSpace &stack = cx->stack();
-    uintN nslots = newscript->nslots;
-    uintN funargs = fun->nargs;
-    Value *argv = vp + 2;
-    JSStackFrame *newfp;
-    if (argc < funargs) {
-        uintN missing = funargs - argc;
-        newfp = stack.getInlineFrame(cx, f.regs.sp, missing, nslots);
-        if (!newfp)
-            return false;
-        for (Value *v = argv + argc, *end = v + missing; v != end; ++v)
-            v->setUndefined();
-    } else {
-        newfp = stack.getInlineFrame(cx, f.regs.sp, 0, nslots);
-        if (!newfp)
-            return false;
-    }
-
-    /* Initialize the frame. */
-    newfp->ncode = NULL;
-    newfp->callobj = NULL;
-    newfp->argsval.setNull();
-    newfp->script = newscript;
-    newfp->fun = fun;
-    newfp->argc = argc;
-    newfp->argv = vp + 2;
-    newfp->rval.setUndefined();
-    newfp->annotation = NULL;
-    newfp->scopeChain.setNonFunObj(*funobj->getParent());
-    newfp->flags = flags;
-    newfp->blockChain = NULL;
-    JS_ASSERT(!JSFUN_BOUND_METHOD_TEST(fun->flags));
-    newfp->thisv = vp[1];
-    newfp->imacpc = NULL;
-
-    /* Push void to initialize local variables. */
-    Value *newslots = newfp->slots();
-    Value *newsp = newslots + fun->u.i.nvars;
-    for (Value *v = newslots; v != newsp; ++v)
-        v->setUndefined();
-
-    /* Scope with a call object parented by callee's parent. */
-    if (fun->isHeavyweight() && !js_GetCallObject(cx, newfp))
-        return false;
-
-    /* :TODO: Switch version if currentVersion wasn't overridden. */
-    newfp->callerVersion = (JSVersion)cx->version;
-
-    // Marker for debug support.
-    if (JSInterpreterHook hook = cx->debugHooks->callHook) {
-        newfp->hookData = hook(cx, fp, JS_TRUE, 0,
-                               cx->debugHooks->callHookData);
-        // CHECK_INTERRUPT_HANDLER();
-    } else {
-        newfp->hookData = NULL;
-    }
-
-    f.inlineCallCount++;
-    f.fp = newfp;
-    stack.pushInlineFrame(cx, fp, cx->regs->pc, newfp);
-
-    if (newscript->staticLevel < JS_DISPLAY_SIZE) {
-        JSStackFrame **disp = &cx->display[newscript->staticLevel];
-        newfp->displaySave = *disp;
-        *disp = newfp;
-    }
-
-    f.regs.pc = newscript->code;
-    f.regs.sp = newsp;
-
-    if (cx->options & JSOPTION_METHODJIT) {
-        if (!newscript->ncode) {
-            if (mjit::TryCompile(cx, newscript, fun, newfp->scopeChainObj()) == Compile_Error)
-                return false;
-        }
-        JS_ASSERT(newscript->ncode);
-        if (newscript->ncode != JS_UNJITTABLE_METHOD) {
-            fp->ncode = f.scriptedReturn;
-            *pret = newscript->ncode;
-            return true;
-        }
-    }
-
-    bool ok = !!Interpret(cx); //, newfp, f.inlineCallCount);
-    stubs::Return(f);
-
-    *pret = NULL;
-    return ok;
-}
-
-void JS_FASTCALL
-stubs::SlowCall(VMFrame &f, uint32 argc)
-{
-    Value *vp = f.regs.sp - (argc + 2);
-
-    JS_ASSERT(!vp->isFunObj());
-
-    if (!Invoke(f.cx, InvokeArgsGuard(vp, argc), 0))
-        THROW();
-}
-
-void JS_FASTCALL
-stubs::NativeCall(VMFrame &f, uint32 argc)
-{
-    Value *vp = f.regs.sp - (argc + 2);
-    JSContext *cx = f.cx;
-
-    JS_ASSERT(vp->isFunObj());
-    JSObject *obj = &vp->asFunObj();
-    JSFunction *fun = GET_FUNCTION_PRIVATE(cx, obj);
-
-    JS_ASSERT(!fun->isInterpreted());
-    if (!fun->isFastNative()) {
-        if (!Invoke(cx, InvokeArgsGuard(vp, argc), 0))
-            THROW();
-        return;
-    }
-
-    FastNative fn = (FastNative)fun->u.n.native;
-    if (!fn(cx, argc, vp))
-        THROW();
-}
-
-void * JS_FASTCALL
-stubs::Call(VMFrame &f, uint32 argc)
-{
-    Value *vp = f.regs.sp - (argc + 2);
-
-    JS_ASSERT(vp->isFunObj());
-
-    JSObject *obj = &vp->asFunObj();
-    JSFunction *fun = GET_FUNCTION_PRIVATE(cx, obj);
-
-    JS_ASSERT(FUN_INTERPRETED(fun));
-
-    if (fun->u.i.script->isEmpty()) {
-        vp->setUndefined();
-        f.regs.sp = vp + 1;
-        return NULL;
-    }
-
-    void *ret;
-    if (!InlineCall(f, 0, &ret, argc))
-        THROWV(NULL);
-
-    f.cx->regs->pc = f.fp->script->code;
-
-#if 0 /* def JS_TRACER */
-    if (ret && f.cx->jitEnabled && IsTraceableRecursion(f.cx)) {
-        /* Top of script should always have traceId 0. */
-        f.u.tracer.traceId = 0;
-        f.u.tracer.offs = 0;
-
-        /* cx.regs.sp is only set in InlineCall() if non-jittable. */
-        JS_ASSERT(f.cx->regs == &f.regs);
-
-        /*
-         * NB: Normally, the function address is returned, and the
-         * caller's JIT'd code will set f.scriptedReturn and jump.
-         * Invoking the tracer breaks this in two ways:
-         *  1) f.scriptedReturn is not yet set, so when pushing new
-         *     inline frames, the call stack would get corrupted.
-         *  2) If the tracer does not push new frames, but runs some
-         *     code, the JIT'd code to set f.scriptedReturn will not
-         *     be run.
-         *
-         * So, a simple hack: set f.scriptedReturn now.
-         */
-        f.scriptedReturn = GetReturnAddress(f, f.fp);
-
-        void *newRet = InvokeTracer(f, Record_Recursion);
-
-        /* 
-         * The tracer could have dropped us off anywhere. Hijack the
-         * stub return address to JaegerFromTracer, which will restore
-         * state correctly.
-         */
-        if (newRet) {
-            void *ptr = JS_FUNC_TO_DATA_PTR(void *, JaegerFromTracer);
-            f.setReturnAddress(ReturnAddressPtr(FunctionPtr(ptr)));
-            return newRet;
-        }
-    }
-#endif
-
-    return ret;
-}
-
-void JS_FASTCALL
-stubs::CopyThisv(VMFrame &f)
-{
-    JS_ASSERT(f.fp->flags & JSFRAME_CONSTRUCTING);
-    if (f.fp->rval.isPrimitive())
-        f.fp->rval = f.fp->thisv;
-}
-
-void JS_FASTCALL
-stubs::SlowNew(VMFrame &f, uint32 argc)
-{
-    JSContext *cx = f.cx;
-    Value *vp = f.regs.sp - (argc + 2);
-
-    JS_ASSERT_IF(vp[0].isFunObj(),
-              !(GET_FUNCTION_PRIVATE(cx, &vp[0].asFunObj()))->isInterpreted());
-
-    if (!InvokeConstructor(cx, InvokeArgsGuard(vp, argc), JS_TRUE))
-        THROW();
-}
-
-void * JS_FASTCALL
-stubs::New(VMFrame &f, uint32 argc)
-{
-    JSContext *cx = f.cx;
-    Value *vp = f.regs.sp - (argc + 2);
-
-    JS_ASSERT(vp[0].isFunObj());
-
-    JSObject *funobj = &vp[0].asFunObj();
-    JSFunction *fun = GET_FUNCTION_PRIVATE(cx, funobj);
-
-    JS_ASSERT(fun->isInterpreted());
-
-    jsid id = ATOM_TO_JSID(cx->runtime->atomState.classPrototypeAtom);
-    if (!funobj->getProperty(cx, id, &vp[1]))
-        THROWV(NULL);
-
-    JSObject *proto = vp[1].isObject() ? &vp[1].asObject() : NULL;
-    JSObject *obj2 = NewObject(cx, &js_ObjectClass, proto, funobj->getParent());
-    if (!obj2)
-        THROWV(NULL);
-
-    if (fun->u.i.script->isEmpty()) {
-        vp[0].setNonFunObj(*obj2);
-        f.regs.sp = vp + 1;
-        return NULL;
-    }
-
-    vp[1].setNonFunObj(*obj2);
-    void *pret;
-    if (!InlineCall(f, JSFRAME_CONSTRUCTING, &pret, argc))
-        THROWV(NULL);
-    return pret;
-}
-
 void JS_FASTCALL
 stubs::DefFun(VMFrame &f, uint32 index)
 {
     bool doSet;
     JSObject *pobj, *obj2;
     JSProperty *prop;
     uint32 old;
 
@@ -2916,23 +2463,8 @@ stubs::TableSwitch(VMFrame &f, jsbytecod
 
 finally:
     /* Provide the native address. */
     ptrdiff_t offset = (originalPC + jumpOffset) - script->code;
     JS_ASSERT(script->nmap[offset]);
     return script->nmap[offset];
 }
 
-void JS_FASTCALL
-stubs::PutCallObject(VMFrame &f)
-{
-    JS_ASSERT(f.fp->callobj);
-    js_PutCallObject(f.cx, f.fp);
-    JS_ASSERT(f.fp->argsval.isNull());
-}
-
-void JS_FASTCALL
-stubs::PutArgsObject(VMFrame &f)
-{
-    JS_ASSERT(f.fp->argsval.isNonFunObj());
-    js_PutArgsObject(f.cx, f.fp);
-}
-
--- a/js/src/methodjit/StubCalls.h
+++ b/js/src/methodjit/StubCalls.h
@@ -55,21 +55,19 @@ void JS_FASTCALL Interrupt(VMFrame &f);
 void JS_FASTCALL InitElem(VMFrame &f, uint32 last);
 void JS_FASTCALL InitProp(VMFrame &f, JSAtom *atom);
 void JS_FASTCALL InitMethod(VMFrame &f, JSAtom *atom);
 void JS_FASTCALL EndInit(VMFrame &f);
 JSString * JS_FASTCALL ConcatN(VMFrame &f, uint32 argc);
 
 void * JS_FASTCALL Call(VMFrame &f, uint32 argc);
 void * JS_FASTCALL New(VMFrame &f, uint32 argc);
-void JS_FASTCALL SlowNew(VMFrame &f, uint32 argc);
-void JS_FASTCALL SlowCall(VMFrame &f, uint32 argc);
-void JS_FASTCALL NativeCall(VMFrame &f, uint32 argc);
+void * JS_FASTCALL SlowNew(VMFrame &f, uint32 argc);
+void * JS_FASTCALL SlowCall(VMFrame &f, uint32 argc);
 JSObject * JS_FASTCALL NewObject(VMFrame &f);
-void * JS_FASTCALL Return(VMFrame &f);
 void JS_FASTCALL Throw(VMFrame &f);
 void * JS_FASTCALL LookupSwitch(VMFrame &f, jsbytecode *pc);
 void * JS_FASTCALL TableSwitch(VMFrame &f, jsbytecode *origPc);
 void JS_FASTCALL DebugHook(VMFrame &f);
 void JS_FASTCALL PutCallObject(VMFrame &f);
 void JS_FASTCALL PutArgsObject(VMFrame &f);
 void JS_FASTCALL CopyThisv(VMFrame &f);
 
--- a/js/src/methodjit/StubCompiler.cpp
+++ b/js/src/methodjit/StubCompiler.cpp
@@ -54,32 +54,38 @@ StubCompiler::StubCompiler(JSContext *cx
 }
 
 bool
 StubCompiler::init(uint32 nargs)
 {
     return true;
 }
 
+void
+StubCompiler::linkExitDirect(Jump j, Label L)
+{
+    exits.append(CrossPatch(j, L));
+}
+
 /*
  * The "slow path" generation is interleaved with the main compilation phase,
  * though it is generated into a separate buffer. The fast path can "exit"
  * into the slow path, and the slow path rejoins into the fast path. The slow
  * path is kept in a separate buffer, but appended to the main one, for ideal
  * icache locality.
  */
 void
 StubCompiler::linkExit(Jump j)
 {
     JaegerSpew(JSpew_Insns, " ---- BEGIN SLOW MERGE CODE ---- \n");
     if (lastGeneration == generation) {
         Jump j2 = masm.jump();
         jumpList.append(j2);
     }
-    exits.append(CrossPatch(j, masm.label()));
+    linkExitDirect(j, masm.label());
     frame.sync(masm);
     lastGeneration = generation;
     JaegerSpew(JSpew_Insns, " ---- END SLOW MERGE CODE ---- \n");
 }
 
 void
 StubCompiler::leave()
 {
--- a/js/src/methodjit/StubCompiler.h
+++ b/js/src/methodjit/StubCompiler.h
@@ -113,22 +113,24 @@ class StubCompiler
 #define STUB_CALL_TYPE(type)                                    \
     Call call(type stub) {                                      \
         return stubCall(JS_FUNC_TO_DATA_PTR(void *, stub));     \
     }
 
     STUB_CALL_TYPE(JSObjStub);
     STUB_CALL_TYPE(VoidStub);
     STUB_CALL_TYPE(VoidStubUInt32);
+    STUB_CALL_TYPE(VoidPtrStubUInt32);
     STUB_CALL_TYPE(BoolStub);
 
 #undef STUB_CALL_TYPE
 
     /* Exits from the fast path into the slow path. */
     void linkExit(Jump j);
+    void linkExitDirect(Jump j, Label L);
 
     void leave();
     void leaveWithDepth(uint32 depth);
 
     /*
      * Rejoins slow-path code back to the fast-path. The invalidation param
      * specifies how many stack slots below sp must not be reloaded from
      * registers.