[INFER] Coalesce rejoin sites for call ops, bug 648843.
authorBrian Hackett <bhackett1024@gmail.com>
Sun, 10 Apr 2011 21:43:35 -0700
changeset 74921 74a8fb1bbec564ab45bbc12e516a595bfbdfe1d6
parent 74920 3816e4abb15803e3d612a883ab6f423f45f59e44
child 74922 dc855edb9bc5561183353bacdc1ec2f188f29b12
push id2
push userbsmedberg@mozilla.com
push dateFri, 19 Aug 2011 14:38:13 +0000
bugs648843
milestone2.2a1pre
[INFER] Coalesce rejoin sites for call ops, bug 648843.
js/src/jit-test/tests/jaeger/recompile/bug648843.js
js/src/methodjit/Compiler.cpp
js/src/methodjit/Compiler.h
js/src/methodjit/FastOps.cpp
js/src/methodjit/FrameState-inl.h
js/src/methodjit/FrameState.h
new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/jaeger/recompile/bug648843.js
@@ -0,0 +1,9 @@
+
+var HOTLOOP = 100;
+function jit(on)
+{
+  options().match
+}
+function options() { return "tracejit,methodjit"; }
+gczeal(2);
+for (i = 0; i < HOTLOOP ; ++i) { jit(jit(42, [])); }
--- a/js/src/methodjit/Compiler.cpp
+++ b/js/src/methodjit/Compiler.cpp
@@ -1072,20 +1072,24 @@ mjit::Compiler::finishThisUp(JITScript *
         
         stubCode.patch(traceICs[i].addrLabel, &jitTraceICs[i]);
     }
 #endif /* JS_MONOIC */
 
     for (size_t i = 0; i < callPatches.length(); i++) {
         CallPatchInfo &patch = callPatches[i];
 
+        CodeLocationLabel joinPoint = patch.joinSlow
+            ? stubCode.locationOf(patch.joinPoint)
+            : fullCode.locationOf(patch.joinPoint);
+
         if (patch.hasFastNcode)
-            fullCode.patch(patch.fastNcodePatch, fullCode.locationOf(patch.joinPoint));
+            fullCode.patch(patch.fastNcodePatch, joinPoint);
         if (patch.hasSlowNcode)
-            stubCode.patch(patch.slowNcodePatch, fullCode.locationOf(patch.joinPoint));
+            stubCode.patch(patch.slowNcodePatch, joinPoint);
     }
 
 #ifdef JS_POLYIC
     ic::GetElementIC *jitGetElems = (ic::GetElementIC *)cursor;
     jit->nGetElems = getElemICs.length();
     cursor += sizeof(ic::GetElementIC) * jit->nGetElems;
     for (size_t i = 0; i < jit->nGetElems; i++) {
         ic::GetElementIC &to = jitGetElems[i];
@@ -1983,41 +1987,102 @@ mjit::Compiler::generateMethod()
             REJOIN_SITE_ANY();
             JaegerSpew(JSpew_Insns, " --- EVAL --- \n");
             emitEval(GET_ARGC(PC));
             JaegerSpew(JSpew_Insns, " --- END EVAL --- \n");
           }
           END_CASE(JSOP_EVAL)
 
           BEGIN_CASE(JSOP_CALL)
+          BEGIN_CASE(JSOP_NEW)
           BEGIN_CASE(JSOP_FUNAPPLY)
           BEGIN_CASE(JSOP_FUNCALL)
           {
             REJOIN_SITE_ANY();
+          {
+            bool callingNew = (op == JSOP_NEW);
+
+            AutoRejoinSite autoRejoinCall(this,
+                JS_FUNC_TO_DATA_PTR(void *, callingNew ? ic::New : ic::Call),
+                JS_FUNC_TO_DATA_PTR(void *, callingNew ? stubs::UncachedNew : stubs::UncachedCall));
+            AutoRejoinSite autoRejoinNcode(this, (void *) CallSite::NCODE_RETURN_ID);
+
             bool done = false;
+            bool inlined = false;
             if (op == JSOP_CALL) {
-                CompileStatus status = inlineNativeFunction(GET_ARGC(PC), false);
+                CompileStatus status = inlineNativeFunction(GET_ARGC(PC), callingNew);
                 if (status == Compile_Okay)
                     done = true;
                 else if (status != Compile_InlineAbort)
                     return status;
             }
             if (!done && inlining) {
-                CompileStatus status = inlineScriptedFunction(GET_ARGC(PC), false);
-                if (status == Compile_Okay)
+                CompileStatus status = inlineScriptedFunction(GET_ARGC(PC), callingNew);
+                if (status == Compile_Okay) {
                     done = true;
+                    inlined = true;
+                }
                 else if (status != Compile_InlineAbort)
                     return status;
             }
+
+            FrameSize frameSize;
+            frameSize.initStatic(frame.totalDepth(), GET_ARGC(PC));
+
             if (!done) {
                 JaegerSpew(JSpew_Insns, " --- SCRIPTED CALL --- \n");
-                inlineCallHelper(GET_ARGC(PC), false);
+                inlineCallHelper(GET_ARGC(PC), callingNew, frameSize);
                 JaegerSpew(JSpew_Insns, " --- END SCRIPTED CALL --- \n");
             }
-          }
+
+            /*
+             * Generate skeleton rejoin paths for calls which either triggered
+             * a recompilation while compiling or within a scripted call.
+             * We always need this rejoin if we inlined frames at this site,
+             * in case we end up expanding those frames. Note that the ncode
+             * join is in the slow path, which will break nativeCodeForPC and
+             * keep us from computing the implicit prevpc/prevInline for the
+             * next frame. All next frames manipulated here must have had these
+             * set explicitly when they were pushed by a stub, expanded or
+             * redirected by a recompilation.
+             */
+            if (needRejoins(PC) || inlined) {
+                if (inlined)
+                    autoRejoinNcode.forceGeneration();
+
+                autoRejoinCall.oolRejoin(stubcc.masm.label());
+                Jump fallthrough = stubcc.masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
+                                                             Registers::ReturnReg);
+                if (frameSize.isStatic())
+                    stubcc.masm.move(Imm32(frameSize.staticArgc()), JSParamReg_Argc);
+                else
+                    stubcc.masm.load32(FrameAddress(offsetof(VMFrame, u.call.dynamicArgc)), JSParamReg_Argc);
+
+                CallPatchInfo callPatch;
+                callPatch.hasSlowNcode = true;
+                callPatch.slowNcodePatch =
+                    stubcc.masm.storePtrWithPatch(ImmPtr(NULL),
+                                                  Address(JSFrameReg, JSStackFrame::offsetOfncode()));
+                stubcc.masm.jump(Registers::ReturnReg);
+
+                callPatch.joinPoint = stubcc.masm.label();
+                callPatch.joinSlow = true;
+
+                stubcc.masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfPrev()), JSFrameReg);
+                autoRejoinNcode.oolRejoin(stubcc.masm.label());
+                stubcc.masm.storeValueFromComponents(JSReturnReg_Type, JSReturnReg_Data,
+                                                     frame.addressOf(frame.peek(-1)));
+                fallthrough.linkTo(stubcc.masm.label(), &stubcc.masm);
+                if (knownPushedType(0) == JSVAL_TYPE_DOUBLE)
+                    stubcc.masm.ensureInMemoryDouble(frame.addressOf(frame.peek(-1)));
+                stubcc.rejoin(Changes(1));
+
+                callPatches.append(callPatch);
+            }
+          } }
           END_CASE(JSOP_CALL)
 
           BEGIN_CASE(JSOP_NAME)
           {
             JSAtom *atom = script->getAtom(fullAtomIndex(PC));
             jsop_name(atom, knownPushedType(0));
             frame.extra(frame.peek(-1)).name = atom;
           }
@@ -2146,25 +2211,16 @@ mjit::Compiler::generateMethod()
           BEGIN_CASE(JSOP_ENDITER)
             iterEnd();
           END_CASE(JSOP_ENDITER)
 
           BEGIN_CASE(JSOP_POP)
             frame.pop();
           END_CASE(JSOP_POP)
 
-          BEGIN_CASE(JSOP_NEW)
-          {
-            REJOIN_SITE_ANY();
-            JaegerSpew(JSpew_Insns, " --- NEW OPERATOR --- \n");
-            inlineCallHelper(GET_ARGC(PC), true);
-            JaegerSpew(JSpew_Insns, " --- END NEW OPERATOR --- \n");
-          }
-          END_CASE(JSOP_NEW)
-
           BEGIN_CASE(JSOP_GETARG)
           {
             uint32 arg = GET_SLOTNO(PC);
             frame.pushArg(arg, knownArgumentType(arg));
           }
           END_CASE(JSOP_GETARG)
 
           BEGIN_CASE(JSOP_CALLARG)
@@ -3176,37 +3232,30 @@ mjit::Compiler::recompileCheckHelper()
 
 void
 mjit::Compiler::addReturnSite()
 {
     InternalCallSite site(masm.distanceOf(masm.label()), a->inlineIndex, PC,
                           CallSite::NCODE_RETURN_ID, false, true);
     addCallSite(site);
     masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfPrev()), JSFrameReg);
-
-    if (needRejoins(PC))
-        addRejoinSite((void *) CallSite::NCODE_RETURN_ID, false, Label());
 }
 
 void
 mjit::Compiler::emitUncachedCall(uint32 argc, bool callingNew)
 {
     CallPatchInfo callPatch;
 
     RegisterID r0 = Registers::ReturnReg;
     VoidPtrStubUInt32 stub = callingNew ? stubs::UncachedNew : stubs::UncachedCall;
 
-    {
-        REJOIN_SITE_2(stub, callingNew ? ic::New : ic::Call);
-
-        frame.syncAndKill(Uses(argc + 2));
-        prepareStubCall(Uses(argc + 2));
-        masm.move(Imm32(argc), Registers::ArgReg1);
-        INLINE_STUBCALL(stub);
-    }
+    frame.syncAndKill(Uses(argc + 2));
+    prepareStubCall(Uses(argc + 2));
+    masm.move(Imm32(argc), Registers::ArgReg1);
+    INLINE_STUBCALL(stub);
 
     Jump notCompiled = masm.branchTestPtr(Assembler::Zero, r0, r0);
 
     if (!cx->typeInferenceEnabled())
         masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
 
     callPatch.hasFastNcode = true;
     callPatch.fastNcodePatch =
@@ -3263,53 +3312,35 @@ mjit::Compiler::checkCallApplySpeculatio
                                    Address(origCalleeData, JSFunction::offsetOfNativeOrScript()),
                                    ImmPtr(JS_FUNC_TO_DATA_PTR(void *, native)));
 
     /*
      * If speculation fails, we can't use the ic, since it is compiled on the
      * assumption that speculation succeeds. Instead, just do an uncached call.
      */
     {
-        AutoRejoinSite autoRejoin(this, JS_FUNC_TO_DATA_PTR(void *, stubs::UncachedCall),
-                                  JS_FUNC_TO_DATA_PTR(void *, ic::Call));
-
         if (isObj.isSet())
             stubcc.linkExitDirect(isObj.getJump(), stubcc.masm.label());
         stubcc.linkExitDirect(isFun, stubcc.masm.label());
         stubcc.linkExitDirect(isNative, stubcc.masm.label());
 
         int32 frameDepthAdjust;
         if (applyTricks == LazyArgsObj) {
             OOL_STUBCALL_NO_REJOIN(stubs::Arguments);
             frameDepthAdjust = +1;
         } else {
             frameDepthAdjust = 0;
         }
 
         stubcc.masm.move(Imm32(callImmArgc), Registers::ArgReg1);
         JaegerSpew(JSpew_Insns, " ---- BEGIN SLOW CALL CODE ---- \n");
-        OOL_STUBCALL_LOCAL_SLOTS(JS_FUNC_TO_DATA_PTR(void *, stubs::UncachedCall),
+        OOL_STUBCALL_LOCAL_SLOTS(JS_FUNC_TO_DATA_PTR(void *, stubs::SlowCall),
                                  frame.totalDepth() + frameDepthAdjust);
         JaegerSpew(JSpew_Insns, " ---- END SLOW CALL CODE ---- \n");
 
-        autoRejoin.oolRejoin(stubcc.masm.label());
-
-        RegisterID r0 = Registers::ReturnReg;
-        Jump notCompiled = stubcc.masm.branchTestPtr(Assembler::Zero, r0, r0);
-
-        if (!cx->typeInferenceEnabled())
-            stubcc.masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
-
-        Address ncodeAddr(JSFrameReg, JSStackFrame::offsetOfncode());
-        uncachedCallPatch->hasSlowNcode = true;
-        uncachedCallPatch->slowNcodePatch = stubcc.masm.storePtrWithPatch(ImmPtr(NULL), ncodeAddr);
-
-        stubcc.masm.jump(r0);
-        notCompiled.linkTo(stubcc.masm.label(), &stubcc.masm);
-
         /*
          * inlineCallHelper will link uncachedCallSlowRejoin to the join point
          * at the end of the ic. At that join point, the return value of the
          * call is assumed to be in registers, so load them before jumping.
          */
         JaegerSpew(JSpew_Insns, " ---- BEGIN SLOW RESTORE CODE ---- \n");
         Address rval = frame.addressOf(origCallee);  /* vp[0] == rval */
         if (knownPushedType(0) == JSVAL_TYPE_DOUBLE)
@@ -3339,17 +3370,17 @@ mjit::Compiler::canUseApplyTricks()
     return *nextpc == JSOP_FUNAPPLY &&
            IsLowerableFunCallOrApply(nextpc) &&
            !a->analysis.jumpTarget(nextpc) &&
            !debugMode() && !a->parent;
 }
 
 /* See MonoIC.cpp, CallCompiler for more information on call ICs. */
 bool
-mjit::Compiler::inlineCallHelper(uint32 callImmArgc, bool callingNew)
+mjit::Compiler::inlineCallHelper(uint32 callImmArgc, bool callingNew, FrameSize &callFrameSize)
 {
     /* Check for interrupts on function call */
     interruptCheckHelper();
 
     int32 speculatedArgc;
     if (applyTricks == LazyArgsObj) {
         frame.pop();
         speculatedArgc = 1;
@@ -3386,35 +3417,32 @@ mjit::Compiler::inlineCallHelper(uint32 
      * their return values in a different slot, so when recompiling we need
      * to go down the exact same path.
      */
     bool lowerFunCallOrApply = IsLowerableFunCallOrApply(PC);
 
     bool newType = callingNew && cx->typeInferenceEnabled() && types::UseNewType(cx, script, PC);
 
 #ifdef JS_MONOIC
-    if (debugMode() || newType || origCallee->isNotType(JSVAL_TYPE_OBJECT)) {
+    if (debugMode() || newType) {
 #endif
         if (applyTricks == LazyArgsObj) {
             /* frame.pop() above reset us to pre-JSOP_ARGUMENTS state */
             jsop_arguments();
             frame.pushSynced(JSVAL_TYPE_UNKNOWN);
         }
         emitUncachedCall(callImmArgc, callingNew);
         applyTricks = NoApplyTricks;
         return true;
 #ifdef JS_MONOIC
     }
 
-    frame.forgetConstantData(origCallee);
-    if (lowerFunCallOrApply) {
-        frame.forgetConstantData(origThis);
-        if (origThis->isNotType(JSVAL_TYPE_OBJECT))
-            frame.forgetType(origThis);
-    }
+    frame.forgetMismatchedObject(origCallee);
+    if (lowerFunCallOrApply)
+        frame.forgetMismatchedObject(origThis);
 
     /* Initialized by both branches below. */
     CallGenInfo     callIC;
     CallPatchInfo   callPatch;
     MaybeRegisterID icCalleeType; /* type to test for function-ness */
     RegisterID      icCalleeData; /* data to call */
     Address         icRvalAddr;   /* return slot on slow-path rejoin */
 
@@ -3485,16 +3513,18 @@ mjit::Compiler::inlineCallHelper(uint32 
 
             icCalleeType = origCalleeType;
             icCalleeData = origCalleeData;
             icRvalAddr = frame.addressOf(origCallee);
             callIC.frameSize.initStatic(frame.totalDepth(), speculatedArgc);
         }
     }
 
+    callFrameSize = callIC.frameSize;
+
     callIC.argTypes = NULL;
     callIC.typeMonitored = monitored(PC);
     if (callIC.typeMonitored && callIC.frameSize.isStatic()) {
         unsigned argc = callIC.frameSize.staticArgc();
         callIC.argTypes = (types::ClonedTypeSet *)
             js_calloc((1 + argc) * sizeof(types::ClonedTypeSet));
         if (!callIC.argTypes) {
             js_ReportOutOfMemory(cx);
@@ -3538,19 +3568,20 @@ mjit::Compiler::inlineCallHelper(uint32 
     Jump j = masm.branchPtrWithPatch(Assembler::NotEqual, icCalleeData, callIC.funGuard);
     callIC.funJump = j;
 
     /* Reserve space just before initialization of slowPathStart. */
     RESERVE_OOL_SPACE(stubcc.masm);
 
     Jump rejoin1, rejoin2;
     {
-        AutoRejoinSite autoRejoin(this,
-            JS_FUNC_TO_DATA_PTR(void *, callingNew ? ic::New : ic::Call),
-            JS_FUNC_TO_DATA_PTR(void *, callingNew ? stubs::UncachedNew : stubs::UncachedCall));
+        /*
+         * Rejoin site for recompiling from SplatApplyArgs. We ensure that this
+         * call is always either emitted or not emitted across compilations.
+         */
         AutoRejoinSite autoRejoinSplat(this,
             JS_FUNC_TO_DATA_PTR(void *, ic::SplatApplyArgs));
 
         RESERVE_OOL_SPACE(stubcc.masm);
         stubcc.linkExitDirect(j, stubcc.masm.label());
         callIC.slowPathStart = stubcc.masm.label();
 
         /*
@@ -3596,18 +3627,16 @@ mjit::Compiler::inlineCallHelper(uint32 
         if (callIC.frameSize.isStatic())
             callIC.oolCall = OOL_STUBCALL_LOCAL_SLOTS(icFunPtr, frame.totalDepth());
         else
             callIC.oolCall = OOL_STUBCALL_LOCAL_SLOTS(icFunPtr, -1);
 
         callIC.funObjReg = icCalleeData;
         callIC.funPtrReg = funPtrReg;
 
-        autoRejoin.oolRejoin(stubcc.masm.label());
-
         /*
          * The IC call either returns NULL, meaning call completed, or a
          * function pointer to jump to.
          */
         rejoin1 = stubcc.masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
                                             Registers::ReturnReg);
         if (callIC.frameSize.isStatic())
             stubcc.masm.move(Imm32(callIC.frameSize.staticArgc()), JSParamReg_Argc);
@@ -3921,17 +3950,17 @@ mjit::Compiler::inlineScriptedFunction(u
 
     /*
      * If this is a polymorphic callsite, get a register for the callee too.
      * After this, do not touch the register state in the current frame until
      * stubs for all callees have been generated.
      */
     MaybeRegisterID calleeReg;
     if (count > 1) {
-        frame.forgetConstantData(origCallee);
+        frame.forgetMismatchedObject(origCallee);
         calleeReg = frame.tempRegForData(origCallee);
     }
     MaybeJump calleePrevious;
 
     /*
      * Registers for entries which will be popped after the call finishes do
      * not need to be preserved by the inline frames.
      */
@@ -4024,29 +4053,16 @@ mjit::Compiler::inlineScriptedFunction(u
         if (returnRegister.isReg())
             frame.pushTypedPayload(returnType, returnRegister.reg());
         else
             frame.pushDouble(returnRegister.fpreg());
     } else {
         frame.pushSynced(JSVAL_TYPE_UNKNOWN);
     }
 
-    /* If we end up expanding inline frames here, they will need a return site to rejoin at. */
-    if (a == outer) {
-        Label oolStart = stubcc.masm.label();
-        if (needReturnValue) {
-            stubcc.masm.storeValueFromComponents(JSReturnReg_Type, JSReturnReg_Data,
-                                                 frame.addressOf(frame.peek(-1)));
-            if (!syncReturnValue && !returnRegister.isReg())
-                stubcc.masm.ensureInMemoryDouble(frame.addressOf(frame.peek(-1)));
-        }
-        stubcc.rejoin(Changes(1));
-        addRejoinSite((void *) CallSite::NCODE_RETURN_ID, true, oolStart);
-    }
-
     JaegerSpew(JSpew_Inlining, "finished inlining call to script (file \"%s\") (line \"%d\")\n",
                script->filename, script->lineno);
 
     return Compile_Okay;
 }
 
 /*
  * This function must be called immediately after any instruction which could
@@ -4328,17 +4344,17 @@ mjit::Compiler::jsop_getprop(JSAtom *ato
     FrameEntry *top = frame.peek(-1);
 
     /* If the incoming type will never PIC, take slow path. */
     if (top->isNotType(JSVAL_TYPE_OBJECT)) {
         jsop_getprop_slow(atom, usePropCache);
         return true;
     }
 
-    frame.forgetConstantData(top);
+    frame.forgetMismatchedObject(top);
 
     /*
      * These two must be loaded first. The objReg because the string path
      * wants to read it, and the shapeReg because it could cause a spill that
      * the string path wouldn't sink back.
      */
     RegisterID objReg = Registers::ReturnReg;
     RegisterID shapeReg = Registers::ReturnReg;
@@ -4903,17 +4919,17 @@ mjit::Compiler::jsop_setprop(JSAtom *ato
         typeCheck = stubcc.masm.jump();
         pic.hasTypeCheck = true;
     } else {
         pic.fastPathStart = masm.label();
         pic.hasTypeCheck = false;
         pic.typeReg = Registers::ReturnReg;
     }
 
-    frame.forgetConstantData(lhs);
+    frame.forgetMismatchedObject(lhs);
 
     /* Get the object into a mutable register. */
     RegisterID objReg = frame.copyDataIntoReg(lhs);
     pic.objReg = objReg;
 
     /* Get info about the RHS and pin it. */
     ValueRemat vr;
     frame.pinEntry(rhs, vr);
@@ -5053,17 +5069,17 @@ mjit::Compiler::jsop_xname(JSAtom *atom)
         return jsop_getprop(atom, knownPushedType(0));
     }
 
     if (!fe->isTypeKnown()) {
         Jump notObject = frame.testObject(Assembler::NotEqual, fe);
         stubcc.linkExit(notObject, Uses(1));
     }
 
-    frame.forgetConstantData(fe);
+    frame.forgetMismatchedObject(fe);
 
     RESERVE_IC_SPACE(masm);
 
     pic.shapeReg = frame.allocReg();
     pic.objReg = frame.copyDataIntoReg(fe);
     pic.typeReg = Registers::ReturnReg;
     pic.atom = atom;
     pic.hasTypeCheck = false;
@@ -5552,17 +5568,17 @@ mjit::Compiler::iter(uintN flags)
         return true;
     }
 
     if (!fe->isTypeKnown()) {
         Jump notObject = frame.testObject(Assembler::NotEqual, fe);
         stubcc.linkExit(notObject, Uses(1));
     }
 
-    frame.forgetConstantData(fe);
+    frame.forgetMismatchedObject(fe);
 
     RegisterID reg = frame.tempRegForData(fe);
 
     frame.pinReg(reg);
     RegisterID ioreg = frame.allocReg();  /* Will hold iterator JSObject */
     RegisterID nireg = frame.allocReg();  /* Will hold NativeIterator */
     RegisterID T1 = frame.allocReg();
     RegisterID T2 = frame.allocReg();
@@ -6171,18 +6187,18 @@ mjit::Compiler::jsop_instanceof()
     }
 
     MaybeJump firstSlow;
     if (!rhs->isTypeKnown()) {
         Jump j = frame.testObject(Assembler::NotEqual, rhs);
         stubcc.linkExit(j, Uses(2));
     }
 
-    frame.forgetConstantData(lhs);
-    frame.forgetConstantData(rhs);
+    frame.forgetMismatchedObject(lhs);
+    frame.forgetMismatchedObject(rhs);
 
     RegisterID obj = frame.tempRegForData(rhs);
     Jump notFunction = masm.testFunction(Assembler::NotEqual, obj);
     stubcc.linkExit(notFunction, Uses(2));
 
     /* Test for bound functions. */
     Jump isBound = masm.branchTest32(Assembler::NonZero, Address(obj, offsetof(JSObject, flags)),
                                      Imm32(JSObject::BOUND_FUNCTION));
--- a/js/src/methodjit/Compiler.h
+++ b/js/src/methodjit/Compiler.h
@@ -166,22 +166,23 @@ class Compiler : public BaseCompiler
   private:
 #endif
 
     /*
      * Writes of call return addresses which needs to be delayed until the final
      * absolute address of the join point is known.
      */
     struct CallPatchInfo {
-        CallPatchInfo() : hasFastNcode(false), hasSlowNcode(false) {}
+        CallPatchInfo() : hasFastNcode(false), hasSlowNcode(false), joinSlow(false) {}
         Label joinPoint;
         DataLabelPtr fastNcodePatch;
         DataLabelPtr slowNcodePatch;
         bool hasFastNcode;
         bool hasSlowNcode;
+        bool joinSlow;
     };
 
     struct BaseICInfo {
         BaseICInfo(JSOp op) : op(op)
         { }
         Label fastPathStart;
         Label fastPathRejoin;
         Label slowPathStart;
@@ -346,34 +347,40 @@ class Compiler : public BaseCompiler
             : label(label), pc(pc), id(id)
         { }
     };
 
     struct AutoRejoinSite {
         Compiler *cc;
         jsbytecode *pc;
 
+        bool force;
         bool ool;
         Label oolLabel;
 
         // number of call/rejoin sites when this AutoRejoinSite was created.
         uint32 startSites;
         uint32 rejoinSites;
 
         void *stub1;
         void *stub2;
         void *stub3;
 
         AutoRejoinSite(Compiler *cc, void *stub1, void *stub2 = NULL, void *stub3 = NULL)
-            : cc(cc), pc(cc->PC), ool(false),
+            : cc(cc), pc(cc->PC), force(false), ool(false),
               startSites(cc->callSites.length()),
               rejoinSites(cc->rejoinSites.length()),
               stub1(stub1), stub2(stub2), stub3(stub3)
         {}
 
+        void forceGeneration()
+        {
+            force = true;
+        }
+
         /*
          * Rejoin a particular slow path label in a synced state, rather than
          * the current point of the fast path when the AutoRejoinSite finishes.
          */
         void oolRejoin(Label label)
         {
             ool = true;
             oolLabel = label;
@@ -386,17 +393,17 @@ class Compiler : public BaseCompiler
 #ifdef DEBUG
             JS_ASSERT(pc == cc->PC);
             cc->checkRejoinSite(startSites, rejoinSites, stub1);
             if (stub2)
                 cc->checkRejoinSite(startSites, rejoinSites, stub2);
             if (stub3)
                 cc->checkRejoinSite(startSites, rejoinSites, stub3);
 #endif
-            if (cc->needRejoins(pc)) {
+            if (force || cc->needRejoins(pc)) {
                 cc->addRejoinSite(stub1, ool, oolLabel);
                 if (stub2)
                     cc->addRejoinSite(stub2, ool, oolLabel);
                 if (stub3)
                     cc->addRejoinSite(stub3, ool, oolLabel);
             }
         }
     };
@@ -657,17 +664,17 @@ class Compiler : public BaseCompiler
     void interruptCheckHelper();
     void recompileCheckHelper();
     void emitUncachedCall(uint32 argc, bool callingNew);
     void checkCallApplySpeculation(uint32 callImmArgc, uint32 speculatedArgc,
                                    FrameEntry *origCallee, FrameEntry *origThis,
                                    MaybeRegisterID origCalleeType, RegisterID origCalleeData,
                                    MaybeRegisterID origThisType, RegisterID origThisData,
                                    Jump *uncachedCallSlowRejoin, CallPatchInfo *uncachedCallPatch);
-    bool inlineCallHelper(uint32 argc, bool callingNew);
+    bool inlineCallHelper(uint32 argc, bool callingNew, FrameSize &callFrameSize);
     void fixPrimitiveReturn(Assembler *masm, FrameEntry *fe);
     bool jsop_gnameinc(JSOp op, VoidStubAtom stub, uint32 index);
     CompileStatus jsop_nameinc(JSOp op, VoidStubAtom stub, uint32 index);
     CompileStatus jsop_propinc(JSOp op, VoidStubAtom stub, uint32 index);
     void jsop_eleminc(JSOp op, VoidStub);
     void jsop_getgname(uint32 index, JSValueType type);
     void jsop_getgname_slow(uint32 index);
     void jsop_callgname_epilogue();
--- a/js/src/methodjit/FastOps.cpp
+++ b/js/src/methodjit/FastOps.cpp
@@ -489,18 +489,18 @@ mjit::Compiler::jsop_equality(JSOp op, B
         types::ObjectKind lhsKind =
             lhsTypes ? lhsTypes->getKnownObjectKind(cx) : types::OBJECT_UNKNOWN;
         types::ObjectKind rhsKind =
             rhsTypes ? rhsTypes->getKnownObjectKind(cx) : types::OBJECT_UNKNOWN;
 
         if (lhsKind != types::OBJECT_UNKNOWN && rhsKind != types::OBJECT_UNKNOWN) {
             /* :TODO: Merge with jsop_relational_int? */
             JS_ASSERT_IF(!target, fused != JSOP_IFEQ);
-            frame.forgetConstantData(lhs);
-            frame.forgetConstantData(rhs);
+            frame.forgetMismatchedObject(lhs);
+            frame.forgetMismatchedObject(rhs);
             Assembler::Condition cond = GetCompareCondition(op, fused);
             if (target) {
                 fixDoubleTypes();
                 autoRejoin.oolRejoin(stubcc.masm.label());
                 Jump sj = stubcc.masm.branchTest32(GetStubCompareCondition(fused),
                                                    Registers::ReturnReg, Registers::ReturnReg);
                 if (!frame.syncForBranch(target, Uses(2)))
                     return false;
@@ -1204,17 +1204,17 @@ mjit::Compiler::jsop_setelem(bool popGua
     FrameEntry *id = frame.peek(-2);
     FrameEntry *value = frame.peek(-1);
 
     if (!IsCacheableSetElem(obj, id, value) || monitored(PC)) {
         jsop_setelem_slow();
         return true;
     }
 
-    frame.forgetConstantData(obj);
+    frame.forgetMismatchedObject(obj);
 
     if (cx->typeInferenceEnabled()) {
         types::TypeSet *types = frame.extra(obj).types;
         types::ObjectKind kind = types
             ? types->getKnownObjectKind(cx)
             : types::OBJECT_UNKNOWN;
         if (id->mightBeType(JSVAL_TYPE_INT32) &&
             (kind == types::OBJECT_DENSE_ARRAY || kind == types::OBJECT_PACKED_ARRAY) &&
@@ -1538,17 +1538,17 @@ mjit::Compiler::jsop_getelem(bool isCall
     if (!IsCacheableGetElem(obj, id)) {
         if (isCall)
             jsop_callelem_slow();
         else
             jsop_getelem_slow();
         return true;
     }
 
-    frame.forgetConstantData(obj);
+    frame.forgetMismatchedObject(obj);
 
     if (cx->typeInferenceEnabled()) {
         types::TypeSet *types = frame.extra(obj).types;
         types::ObjectKind kind = types
             ? types->getKnownObjectKind(cx)
             : types::OBJECT_UNKNOWN;
         if (!isCall && id->mightBeType(JSVAL_TYPE_INT32) &&
             (kind == types::OBJECT_DENSE_ARRAY || kind == types::OBJECT_PACKED_ARRAY) &&
--- a/js/src/methodjit/FrameState-inl.h
+++ b/js/src/methodjit/FrameState-inl.h
@@ -508,27 +508,28 @@ FrameState::tempRegForData(FrameEntry *f
         return fe->data.reg();
 
     RegisterID reg = allocAndLoadReg(fe, false, RematInfo::DATA).reg();
     fe->data.setRegister(reg);
     return reg;
 }
 
 inline void
-FrameState::forgetConstantData(FrameEntry *fe)
+FrameState::forgetMismatchedObject(FrameEntry *fe)
 {
-    if (!fe->isConstant())
-        return;
-    JS_ASSERT(fe->isType(JSVAL_TYPE_OBJECT));
+    if (fe->isNotType(JSVAL_TYPE_OBJECT))
+        syncAndForgetFe(fe);
 
-    RegisterID reg = allocReg();
-    regstate(reg).associate(fe, RematInfo::DATA);
+    if (fe->isConstant()) {
+        RegisterID reg = allocReg();
+        regstate(reg).associate(fe, RematInfo::DATA);
 
-    masm.move(JSC::MacroAssembler::ImmPtr(&fe->getValue().toObject()), reg);
-    fe->data.setRegister(reg);
+        masm.move(JSC::MacroAssembler::ImmPtr(&fe->getValue().toObject()), reg);
+        fe->data.setRegister(reg);
+    }
 }
 
 inline JSC::MacroAssembler::FPRegisterID
 FrameState::tempFPRegForData(FrameEntry *fe)
 {
     JS_ASSERT(!fe->isConstant());
     JS_ASSERT(fe->isType(JSVAL_TYPE_DOUBLE));
 
@@ -856,20 +857,21 @@ FrameState::forgetType(FrameEntry *fe)
      * callers' lives simpler, bail out if the type is not known.
      */
     if (!fe->isTypeKnown())
         return;
 
     /*
      * Likewise, storeLocal() may have set this FE, with a known type,
      * to be a copy of another FE, which has an unknown type.
-     * Just forget the type, since the backing is used in all cases.
      */
     if (fe->isCopy()) {
-        fe->type.invalidate();
+        syncFe(fe);
+        fe->clear();
+        fe->resetSynced();
         return;
     }
 
     ensureTypeSynced(fe, masm);
     fe->type.setMemory();
 }
 
 inline void
--- a/js/src/methodjit/FrameState.h
+++ b/js/src/methodjit/FrameState.h
@@ -401,21 +401,21 @@ class FrameState
 
     /*
      * Same as above, except loads into reg (using masm) if the entry does not
      * already have a register, and does not change the frame state in doing so.
      */
     inline RegisterID tempRegForData(FrameEntry *fe, RegisterID reg, Assembler &masm) const;
 
     /*
-     * If fe is a constant, allocate a register and forget its payload. This
-     * function is a stopgap to cover missing paths in the Compiler, uses of it
-     * should be fixed.
+     * For opcodes which expect to operate on an object, forget the entry if it
+     * is either a known constant or a non-object. This simplifies path
+     * generation in the Compiler for such unusual cases.
      */
-    inline void forgetConstantData(FrameEntry *fe);
+    inline void forgetMismatchedObject(FrameEntry *fe);
 
     /*
      * Convert an integer to a double without applying
      * additional Register pressure.
      */
     inline void convertInt32ToDouble(Assembler &masm, FrameEntry *fe,
                                      FPRegisterID fpreg) const;