[INFER] Separate call sites from rejoin sites in JITScript, reconstruct doubles on each rejoin, bug 647991.
authorBrian Hackett <bhackett1024@gmail.com>
Sun, 10 Apr 2011 16:09:01 -0700
changeset 74920 3816e4abb15803e3d612a883ab6f423f45f59e44
parent 74919 d3215d1e985a03eb795203c3a6de4bc86c0b246c
child 74921 74a8fb1bbec564ab45bbc12e516a595bfbdfe1d6
push id2
push userbsmedberg@mozilla.com
push dateFri, 19 Aug 2011 14:38:13 +0000
bugs647991
milestone2.2a1pre
[INFER] Separate call sites from rejoin sites in JITScript, reconstruct doubles on each rejoin, bug 647991.
js/src/jit-test/tests/jaeger/recompile/bug647991-1.js
js/src/jit-test/tests/jaeger/recompile/bug647991-2.js
js/src/jit-test/tests/jaeger/recompile/bug648502.js
js/src/methodjit/Compiler.cpp
js/src/methodjit/Compiler.h
js/src/methodjit/FastArithmetic.cpp
js/src/methodjit/FastOps.cpp
js/src/methodjit/FrameState.cpp
js/src/methodjit/FrameState.h
js/src/methodjit/MethodJIT.cpp
js/src/methodjit/MethodJIT.h
js/src/methodjit/Retcon.cpp
js/src/methodjit/Retcon.h
js/src/methodjit/StubCalls.cpp
js/src/methodjit/StubCalls.h
js/src/methodjit/StubCompiler.cpp
js/src/methodjit/StubCompiler.h
new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/jaeger/recompile/bug647991-1.js
@@ -0,0 +1,18 @@
+function f() {
+    function g() {
+        eval("");
+        gc();
+        Math.abs(4);
+        NaN;
+    }
+    g();
+}
+function h() {
+    var x, y;
+    x = Math.floor(-0);
+    y = parseInt("1");
+}
+
+f();
+h();
+
new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/jaeger/recompile/bug647991-2.js
@@ -0,0 +1,2 @@
+[""][NaN] = 2;
+-([][[""][String] = ""] = null);
new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/jaeger/recompile/bug648502.js
@@ -0,0 +1,12 @@
+function f(x, y) {
+    -(undefined ? 0 : 0);
+    assertEq(y === y, true);
+    return 0;
+}
+f(1, 2);
+{
+    f(3, 3.14);
+    f(true, f(4, 5));
+
+    function g() {}
+}
--- a/js/src/methodjit/Compiler.cpp
+++ b/js/src/methodjit/Compiler.cpp
@@ -85,17 +85,17 @@ static const char *OpcodeNames[] = {
 
 /*
  * Number of times a script must be called or had a backedge before we try to
  * inline its calls.
  */
 static const size_t CALLS_BACKEDGES_BEFORE_INLINING = 10000;
 
 mjit::Compiler::Compiler(JSContext *cx, JSScript *outerScript, bool isConstructing,
-                         const Vector<PatchableFrame> *patchFrames, bool recompiling)
+                         const Vector<PatchableFrame> *patchFrames)
   : BaseCompiler(cx),
     outerScript(outerScript),
     isConstructing(isConstructing),
     globalObj(outerScript->global),
     patchFrames(patchFrames),
     savedTraps(NULL),
     frame(cx, *this, masm, stubcc),
     a(NULL), outer(NULL), script(NULL), PC(NULL), loop(NULL),
@@ -109,29 +109,29 @@ mjit::Compiler::Compiler(JSContext *cx, 
     traceICs(CompilerAllocPolicy(cx, *thisFromCtor())),
 #endif
 #if defined JS_POLYIC
     pics(CompilerAllocPolicy(cx, *thisFromCtor())), 
     getElemICs(CompilerAllocPolicy(cx, *thisFromCtor())),
     setElemICs(CompilerAllocPolicy(cx, *thisFromCtor())),
 #endif
     callPatches(CompilerAllocPolicy(cx, *thisFromCtor())),
-    callSites(CompilerAllocPolicy(cx, *thisFromCtor())), 
+    callSites(CompilerAllocPolicy(cx, *thisFromCtor())),
+    rejoinSites(CompilerAllocPolicy(cx, *thisFromCtor())),
     doubleList(CompilerAllocPolicy(cx, *thisFromCtor())),
     jumpTables(CompilerAllocPolicy(cx, *thisFromCtor())),
     jumpTableOffsets(CompilerAllocPolicy(cx, *thisFromCtor())),
     loopEntries(CompilerAllocPolicy(cx, *thisFromCtor())),
     stubcc(cx, *thisFromCtor(), frame),
     debugMode_(cx->compartment->debugMode),
 #if defined JS_TRACER
     addTraceHints(cx->traceJitEnabled),
 #else
     addTraceHints(false),
 #endif
-    recompiling(recompiling),
     inlining(false),
     oomInVector(false),
     applyTricks(NoApplyTricks)
 {
     /* :FIXME: bug 637856 disabling traceJit if inference is enabled */
     if (cx->typeInferenceEnabled())
         addTraceHints = false;
 
@@ -251,16 +251,18 @@ mjit::Compiler::pushActiveFrame(JSScript
         if (status != Compile_Okay)
             return status;
     }
 
     this->script = script;
     this->PC = script->code;
     this->a = newa;
 
+    variadicRejoin = false;
+
     return Compile_Okay;
 }
 
 void
 mjit::Compiler::popActiveFrame()
 {
     JS_ASSERT(a->parent);
     this->PC = a->parentPC;
@@ -319,39 +321,44 @@ mjit::Compiler::performCompilation(JITSc
 
     JaegerSpew(JSpew_Scripts, "successfully compiled (code \"%p\") (size \"%ld\")\n",
                (*jitp)->code.m_code.executableAddress(), (*jitp)->code.m_size);
 
     if (!*jitp)
         return Compile_Abort;
 
     /*
-     * Make sure any inlined scripts have JIT code associated that we can
-     * rejoin into if we expand the inlined frames.
+     * Make sure any inlined scripts have JIT code associated with all rejoin
+     * points added, so we can always expand the inlined frames.
      */
+    bool expanded = false;
     for (unsigned i = 0; i < (*jitp)->nInlineFrames; i++) {
         JSScript *script = (*jitp)->inlineFrames()[i].fun->script();
 
         script->inlineParents = true;
 
         /* We should have bailed out while inlining if the script is unjittable. */
         JS_ASSERT(script->jitArityCheckNormal != JS_UNJITTABLE_SCRIPT);
 
         if (script->jitNormal && !script->jitNormal->rejoinPoints) {
+            if (!expanded) {
+                ExpandInlineFrames(cx, true);
+                expanded = true;
+            }
             mjit::Recompiler recompiler(cx, script);
             if (!recompiler.recompile()) {
                 ReleaseScriptCode(cx, outerScript, true);
                 return Compile_Error;
             }
         }
 
         if (!script->jitNormal) {
             CompileStatus status = Compile_Retry;
             while (status == Compile_Retry) {
-                mjit::Compiler cc(cx, script, isConstructing, NULL, true);
+                mjit::Compiler cc(cx, script, isConstructing, NULL);
                 status = cc.compile();
             }
             if (status != Compile_Okay) {
                 ReleaseScriptCode(cx, outerScript, true);
                 return status;
             }
         }
     }
@@ -446,17 +453,17 @@ mjit::TryCompile(JSContext *cx, JSStackF
 
     types::AutoEnterTypeInference enter(cx, true);
 
     // If there were recoverable compilation failures in the function from
     // static overflow or bad inline callees, try recompiling a few times
     // before giving up.
     CompileStatus status = Compile_Retry;
     for (unsigned i = 0; status == Compile_Retry && i < 5; i++) {
-        Compiler cc(cx, fp->script(), fp->isConstructing(), NULL, fp->script()->inlineParents);
+        Compiler cc(cx, fp->script(), fp->isConstructing(), NULL);
         status = cc.compile();
     }
 
     if (!cx->compartment->types.checkPendingRecompiles(cx))
         return Compile_Error;
 
     return status;
 }
@@ -498,16 +505,18 @@ mjit::Compiler::generatePrologue()
         invokeLabel = masm.label();
 
         Label fastPath = masm.label();
 
         /* Store this early on so slow paths can access it. */
         masm.storePtr(ImmPtr(script->fun), Address(JSFrameReg, JSStackFrame::offsetOfExec()));
 
         {
+            REJOIN_SITE(stubs::CheckArgumentTypes);
+
             /*
              * Entry point #3: The caller has partially constructed a frame,
              * but argc might be != nargs, so an arity check might be called.
              *
              * This loops back to entry point #2.
              */
             arityLabel = stubcc.masm.label();
 
@@ -515,17 +524,17 @@ mjit::Compiler::generatePrologue()
                                                  Imm32(script->fun->nargs));
 
             if (JSParamReg_Argc != Registers::ArgReg1)
                 stubcc.masm.move(JSParamReg_Argc, Registers::ArgReg1);
 
             /* Slow path - call the arity check function. Returns new fp. */
             stubcc.masm.storePtr(ImmPtr(script->fun),
                                  Address(JSFrameReg, JSStackFrame::offsetOfExec()));
-            OOL_STUBCALL(stubs::FixupArity);
+            OOL_STUBCALL_NO_REJOIN(stubs::FixupArity);
             stubcc.masm.move(Registers::ReturnReg, JSFrameReg);
             argMatch.linkTo(stubcc.masm.label(), &stubcc.masm);
 
             /* Type check the arguments as well. */
             if (cx->typeInferenceEnabled()) {
 #ifdef JS_MONOIC
                 this->argsCheckJump = stubcc.masm.jump();
                 this->argsCheckStub = stubcc.masm.label();
@@ -550,17 +559,17 @@ mjit::Compiler::generatePrologue()
         uint32 nvals = script->nslots + VALUES_PER_STACK_FRAME + StackSpace::STACK_EXTRA;
         masm.addPtr(Imm32(nvals * sizeof(Value)), JSFrameReg, Registers::ReturnReg);
         Jump stackCheck = masm.branchPtr(Assembler::AboveOrEqual, Registers::ReturnReg,
                                          FrameAddress(offsetof(VMFrame, stackLimit)));
 
         /* If the stack check fails... */
         {
             stubcc.linkExitDirect(stackCheck, stubcc.masm.label());
-            OOL_STUBCALL(stubs::HitStackQuota);
+            OOL_STUBCALL_NO_REJOIN(stubs::HitStackQuota);
             stubcc.crossJump(stubcc.masm.jump(), masm.label());
         }
 
         /*
          * Set locals to undefined, as in initCallFrameLatePrologue.
          * Skip locals which aren't closed and are known to be defined before used,
          * :FIXME: bug 604541: write undefined if we might be using the tracer, so it works.
          */
@@ -569,17 +578,17 @@ mjit::Compiler::generatePrologue()
                 Address local(JSFrameReg, sizeof(JSStackFrame) + i * sizeof(Value));
                 masm.storeValue(UndefinedValue(), local);
             }
         }
 
         /* Create the call object. */
         if (script->fun->isHeavyweight()) {
             prepareStubCall(Uses(0));
-            INLINE_STUBCALL(stubs::CreateFunCallObject);
+            INLINE_STUBCALL_NO_REJOIN(stubs::CreateFunCallObject);
         }
 
         j.linkTo(masm.label(), &masm);
 
         if (a->analysis.usesScopeChain() && !script->fun->isHeavyweight()) {
             /*
              * Load the scope chain into the frame if necessary.  The scope chain
              * is always set for global and eval frames, and will have been set by
@@ -593,18 +602,20 @@ mjit::Compiler::generatePrologue()
             masm.storePtr(t0, Address(JSFrameReg, JSStackFrame::offsetOfScopeChain()));
             hasScope.linkTo(masm.label(), &masm);
         }
     }
 
     if (isConstructing)
         constructThis();
 
-    if (debugMode() || Probes::callTrackingActive(cx))
+    if (debugMode() || Probes::callTrackingActive(cx)) {
+        REJOIN_SITE(stubs::ScriptDebugPrologue);
         INLINE_STUBCALL(stubs::ScriptDebugPrologue);
+    }
 
     /*
      * Set initial types of locals with known type. These will stay synced
      * through the rest of the script, allowing us to avoid syncing the types
      * of locals after writing their payloads. Notes:
      *
      * - We don't call generatePrologue and perform this syncing when inlining
      *   frames; such locals are not assumed to be synced after being assigned.
@@ -735,16 +746,17 @@ mjit::Compiler::finishThisUp(JITScript *
     for (size_t i = 0; i < inlineFrames.length(); i++)
         nUnsyncedEntries += inlineFrames[i]->unsyncedEntries.length();
 
     /* Please keep in sync with JITScript::scriptDataSize! */
     size_t totalBytes = sizeof(JITScript) +
                         sizeof(NativeMapEntry) * nNmapLive +
                         sizeof(InlineFrame) * inlineFrames.length() +
                         sizeof(CallSite) * callSites.length() +
+                        sizeof(RejoinSite) * rejoinSites.length() +
 #if defined JS_MONOIC
                         sizeof(ic::GetGlobalNameIC) * getGlobalNames.length() +
                         sizeof(ic::SetGlobalNameIC) * setGlobalNames.length() +
                         sizeof(ic::CallICInfo) * callICs.length() +
                         sizeof(ic::EqualityICInfo) * equalityICs.length() +
                         sizeof(ic::TraceICInfo) * traceICs.length() +
 #endif
 #if defined JS_POLYIC
@@ -761,27 +773,21 @@ mjit::Compiler::finishThisUp(JITScript *
         return Compile_Error;
     }
 
     JITScript *jit = new(cursor) JITScript;
     cursor += sizeof(JITScript);
 
     JS_ASSERT(outerScript == script);
 
-    /*
-     * We always need to remit rejoin points when compiling a script with inline parents,
-     * so we can expand inline frames at any point.
-     */
-    JS_ASSERT_IF(outerScript->inlineParents, recompiling);
-
     jit->script = script;
     jit->code = JSC::MacroAssemblerCodeRef(result, execPool, masm.size() + stubcc.size());
     jit->invokeEntry = result;
     jit->singleStepMode = script->singleStepMode;
-    jit->rejoinPoints = recompiling;
+    jit->rejoinPoints = script->inlineParents;
     if (script->fun) {
         jit->arityCheckEntry = stubCode.locationOf(arityLabel).executableAddress();
         jit->fastEntry = fullCode.locationOf(invokeLabel).executableAddress();
     }
 
     /* 
      * WARNING: mics(), callICs() et al depend on the ordering of these
      * variable-length sections.  See JITScript's declaration for details.
@@ -842,16 +848,24 @@ mjit::Compiler::finishThisUp(JITScript *
     /* Build the table of call sites. */
     CallSite *jitCallSites = (CallSite *)cursor;
     jit->nCallSites = callSites.length();
     cursor += sizeof(CallSite) * jit->nCallSites;
     for (size_t i = 0; i < jit->nCallSites; i++) {
         CallSite &to = jitCallSites[i];
         InternalCallSite &from = callSites[i];
 
+        /*
+         * Make sure that we emitted rejoin sites for at least the calls in
+         * this compilation. This doesn't ensure we have rejoin sites for
+         * calls emitted in *other* compilations, but catches many of the
+         * cases we have insufficient code for rejoining.
+         */
+        JS_ASSERT_IF(cx->typeInferenceEnabled(), !from.needsRejoin);
+
         /* Patch stores of f.regs.inlined for stubs called from within inline frames. */
         if (cx->typeInferenceEnabled() &&
             from.id != CallSite::NCODE_RETURN_ID &&
             from.id != CallSite::MAGIC_TRAP_ID &&
             from.inlineIndex != uint32(-1)) {
             if (from.ool)
                 stubCode.patch(from.inlinePatch, &to);
             else
@@ -861,16 +875,28 @@ mjit::Compiler::finishThisUp(JITScript *
         JSScript *script =
             (from.inlineIndex == uint32(-1)) ? outerScript : inlineFrames[from.inlineIndex]->script;
         uint32 codeOffset = from.ool
                             ? masm.size() + from.returnOffset
                             : from.returnOffset;
         to.initialize(codeOffset, from.inlineIndex, from.inlinepc - script->code, from.id);
     }
 
+    /* Build the table of rejoin sites. */
+    RejoinSite *jitRejoinSites = (RejoinSite *)cursor;
+    jit->nRejoinSites = rejoinSites.length();
+    cursor += sizeof(RejoinSite) * jit->nRejoinSites;
+    for (size_t i = 0; i < jit->nRejoinSites; i++) {
+        RejoinSite &to = jitRejoinSites[i];
+        InternalRejoinSite &from = rejoinSites[i];
+
+        uint32 codeOffset = (uint8 *) stubCode.locationOf(from.label).executableAddress() - result;
+        to.initialize(codeOffset, from.pc - outerScript->code, from.id);
+    }
+
 #if defined JS_MONOIC
     JS_INIT_CLIST(&jit->callers);
 
     if (script->fun && cx->typeInferenceEnabled()) {
         jit->argsCheckStub = stubCode.locationOf(argsCheckStub);
         jit->argsCheckFallthrough = stubCode.locationOf(argsCheckFallthrough);
         jit->argsCheckJump = stubCode.locationOf(argsCheckJump);
         jit->argsCheckPool = NULL;
@@ -1279,16 +1305,17 @@ mjit::Compiler::generateMethod()
         if (op == JSOP_TRAP) {
             if (!trapper.untrap(PC))
                 return Compile_Error;
             op = JSOp(*PC);
             trap |= stubs::JSTRAP_TRAP;
         }
         if (script->singleStepMode && scanner.firstOpInLine(PC - script->code))
             trap |= stubs::JSTRAP_SINGLESTEP;
+        variadicRejoin = false;
 
         analyze::Bytecode *opinfo = a->analysis.maybeCode(PC);
 
         if (!opinfo) {
             if (op == JSOP_STOP)
                 break;
             if (js_CodeSpec[op].length != -1)
                 PC += js_CodeSpec[op].length;
@@ -1299,17 +1326,17 @@ mjit::Compiler::generateMethod()
 
         if (loop)
             loop->PC = PC;
 
         frame.setPC(PC);
         frame.setInTryBlock(opinfo->inTryBlock);
         if (opinfo->jumpTarget || trap) {
             if (fallthrough) {
-                fixDoubleTypes(Uses(0));
+                fixDoubleTypes();
 
                 /*
                  * Watch for fallthrough to the head of a 'do while' loop.
                  * We don't know what register state we will be using at the head
                  * of the loop so sync, branch, and fix it up after the loop
                  * has been processed.
                  */
                 if (cx->typeInferenceEnabled() && a->liveness.getCode(PC).loop) {
@@ -1336,21 +1363,22 @@ mjit::Compiler::generateMethod()
         }
 
         a->jumpMap[uint32(PC - script->code)] = masm.label();
 
         SPEW_OPCODE();
         JS_ASSERT(frame.stackDepth() == opinfo->stackDepth);
 
         if (trap) {
+            REJOIN_SITE(CallSite::MAGIC_TRAP_ID);
             prepareStubCall(Uses(0));
             masm.move(Imm32(trap), Registers::ArgReg1);
             Call cl = emitStubCall(JS_FUNC_TO_DATA_PTR(void *, stubs::Trap), NULL);
             InternalCallSite site(masm.callReturnOffset(cl), a->inlineIndex, PC,
-                                  CallSite::MAGIC_TRAP_ID, true, false);
+                                  CallSite::MAGIC_TRAP_ID, false, true);
             addCallSite(site);
         } else if (!a->parent && savedTraps && savedTraps[PC - script->code]) {
             // Normally when we patch return addresses, we have generated the
             // same exact code at that site. For example, patching a stub call's
             // return address will resume at the same stub call.
             //
             // In the case we're handling here, we could potentially be
             // recompiling to remove a trap, and therefore we won't generate
@@ -1367,63 +1395,29 @@ mjit::Compiler::generateMethod()
             // address.
             //
             // Unfortunately, this means that if a bytecode is ever trapped,
             // we will always generate a CallSite (either Trapped or not) for
             // every debug recompilation of the script thereafter. The reason
             // is that MAGIC_TRAP_ID callsites always propagate to the next
             // recompilation. That's okay, and not worth fixing - it's a small
             // amount of memory.
+            REJOIN_SITE(CallSite::MAGIC_TRAP_ID);
             uint32 offset = stubcc.masm.distanceOf(stubcc.masm.label());
             if (Assembler::ReturnStackAdjustment) {
                 stubcc.masm.addPtr(Imm32(Assembler::ReturnStackAdjustment),
                                    Assembler::stackPointerRegister);
             }
             stubcc.crossJump(stubcc.masm.jump(), masm.label());
 
             InternalCallSite site(offset, a->inlineIndex, PC,
-                                  CallSite::MAGIC_TRAP_ID, false, true);
+                                  CallSite::MAGIC_TRAP_ID, true, true);
             addCallSite(site);
         }
 
-        /*
-         * If we are recompiling, check for any frames on the stack at this
-         * opcode, and patch the types of any arg/local/stack slots which are
-         * integers but need to be doubles. Any value assumed to be a double in
-         * this compilation may instead be an int in the earlier compilation
-         * and stack frames. Other transitions between known types are not
-         * possible --- type sets can only grow, and if new non-double type
-         * tags become possible we will treat that slot as unknown in this
-         * compilation.
-         */
-        for (unsigned i = 0; patchFrames && i < patchFrames->length(); i++) {
-            if ((*patchFrames)[i].pc != PC)
-                continue;
-            JSStackFrame *patchfp = (*patchFrames)[i].fp;
-
-            for (unsigned j = 0; script->fun && j < script->fun->nargs; j++) {
-                FrameEntry *fe = frame.getArg(j);
-                if (fe->isType(JSVAL_TYPE_DOUBLE))
-                    FixDouble(patchfp->formalArg(j));
-            }
-
-            for (unsigned j = 0; j < script->nfixed; j++) {
-                FrameEntry *fe = frame.getLocal(j);
-                if (fe->isType(JSVAL_TYPE_DOUBLE))
-                    FixDouble(patchfp->varSlot(j));
-            }
-
-            unsigned depth = opinfo->stackDepth - analyze::GetUseCount(script, PC - script->code);
-            for (unsigned j = 0; j < depth; j++) {
-                FrameEntry *fe = frame.getStack(j);
-                if (fe->isType(JSVAL_TYPE_DOUBLE))
-                    FixDouble(patchfp->base()[j]);
-            }
-        }
-
     /**********************
      * BEGIN COMPILER OPS *
      **********************/ 
 
         jsbytecode *oldPC = PC;
 
         switch (op) {
           BEGIN_CASE(JSOP_NOP)
@@ -1455,17 +1449,17 @@ mjit::Compiler::generateMethod()
             emitReturn(frame.peek(-1));
             fallthrough = false;
           END_CASE(JSOP_RETURN)
 
           BEGIN_CASE(JSOP_GOTO)
           BEGIN_CASE(JSOP_DEFAULT)
           {
             jsbytecode *target = PC + GET_JUMP_OFFSET(PC);
-            fixDoubleTypes(Uses(0));
+            fixDoubleTypes();
 
             /*
              * Watch out for backward jumps linking 'continue' statements
              * together. These are jumping to another GOTO at the head of the
              * loop, which should be short circuited so we don't mistake this
              * for an actual loop back edge. :XXX: could there be a trap at
              * the target?
              */
@@ -1498,17 +1492,17 @@ mjit::Compiler::generateMethod()
                     return Compile_Error;
             }
             fallthrough = false;
           }
           END_CASE(JSOP_GOTO)
 
           BEGIN_CASE(JSOP_IFEQ)
           BEGIN_CASE(JSOP_IFNE)
-            fixDoubleTypes(Uses(1));
+            fixDoubleTypes();
             if (!jsop_ifneq(op, PC + GET_JUMP_OFFSET(PC)))
                 return Compile_Error;
           END_CASE(JSOP_IFNE)
 
           BEGIN_CASE(JSOP_ARGUMENTS)
             /*
              * For calls of the form 'f.apply(x, arguments)' we can avoid
              * creating an args object by having ic::SplatApplyArgs pull
@@ -1567,17 +1561,17 @@ mjit::Compiler::generateMethod()
             JSOp fused = JSOp(*next);
             if ((fused != JSOP_IFEQ && fused != JSOP_IFNE) || a->analysis.jumpTarget(next))
                 fused = JSOP_NOP;
 
             /* Get jump target, if any. */
             jsbytecode *target = NULL;
             if (fused != JSOP_NOP) {
                 target = next + GET_JUMP_OFFSET(next);
-                fixDoubleTypes(Uses(2));
+                fixDoubleTypes();
             }
 
             BoolStub stub = NULL;
             switch (op) {
               case JSOP_LT:
                 stub = stubs::LessThan;
                 break;
               case JSOP_LE:
@@ -1595,60 +1589,73 @@ mjit::Compiler::generateMethod()
               case JSOP_NE:
                 stub = stubs::NotEqual;
                 break;
               default:
                 JS_NOT_REACHED("WAT");
                 break;
             }
 
+            /*
+             * We need to ensure in the target case that we always rejoin
+             * before the rval test. In the non-target case we will rejoin
+             * correctly after the op finishes.
+             */
+
             FrameEntry *rhs = frame.peek(-1);
             FrameEntry *lhs = frame.peek(-2);
 
             /* Check for easy cases that the parser does not constant fold. */
             if (lhs->isConstant() && rhs->isConstant()) {
                 /* Primitives can be trivially constant folded. */
                 const Value &lv = lhs->getValue();
                 const Value &rv = rhs->getValue();
 
+                AutoRejoinSite autoRejoin(this, (void *) RejoinSite::VARIADIC_ID);
+
                 if (lv.isPrimitive() && rv.isPrimitive()) {
                     bool result = compareTwoValues(cx, op, lv, rv);
 
                     frame.pop();
                     frame.pop();
 
                     if (!target) {
                         frame.push(Value(BooleanValue(result)));
                     } else {
                         if (fused == JSOP_IFEQ)
                             result = !result;
 
                         if (result) {
-                            fixDoubleTypes(Uses(0));
+                            fixDoubleTypes();
                             if (!frame.syncForBranch(target, Uses(0)))
                                 return Compile_Error;
+                            if (needRejoins(PC)) {
+                                autoRejoin.oolRejoin(stubcc.masm.label());
+                                stubcc.rejoin(Changes(0));
+                            }
                             Jump j = masm.jump();
                             if (!jumpAndTrace(j, target))
                                 return Compile_Error;
                         } else {
                             /*
                              * Branch is never taken, but clean up any loop
                              * if this is a backedge.
                              */
                             if (target < PC && !finishLoop(target))
                                 return Compile_Error;
                         }
                     }
                 } else {
-                    if (!emitStubCmpOp(stub, target, fused))
+                    if (!emitStubCmpOp(stub, autoRejoin, target, fused))
                         return Compile_Error;
                 }
             } else {
                 /* Anything else should go through the fast path generator. */
-                if (!jsop_relational(op, stub, target, fused))
+                AutoRejoinSite autoRejoin(this, (void *) RejoinSite::VARIADIC_ID);
+                if (!jsop_relational(op, stub, autoRejoin, target, fused))
                     return Compile_Error;
             }
 
             /* Advance PC manually. */
             JS_STATIC_ASSERT(JSOP_LT_LENGTH == JSOP_GE_LENGTH);
             JS_STATIC_ASSERT(JSOP_LE_LENGTH == JSOP_GE_LENGTH);
             JS_STATIC_ASSERT(JSOP_GT_LENGTH == JSOP_GE_LENGTH);
             JS_STATIC_ASSERT(JSOP_EQ_LENGTH == JSOP_GE_LENGTH);
@@ -1716,16 +1723,17 @@ mjit::Compiler::generateMethod()
             } else {
                 jsop_bitnot();
             }
           }
           END_CASE(JSOP_BITNOT)
 
           BEGIN_CASE(JSOP_NEG)
           {
+            REJOIN_SITE(stubs::Neg);
             FrameEntry *top = frame.peek(-1);
             if (top->isConstant() && top->getValue().isPrimitive()) {
                 double d;
                 ValueToNumber(cx, top->getValue(), &d);
                 d = -d;
                 Value v = NumberValue(d);
 
                 /* Watch for overflow in constant propagation. */
@@ -1744,44 +1752,49 @@ mjit::Compiler::generateMethod()
           END_CASE(JSOP_NEG)
 
           BEGIN_CASE(JSOP_POS)
             jsop_pos();
           END_CASE(JSOP_POS)
 
           BEGIN_CASE(JSOP_DELNAME)
           {
+            REJOIN_SITE_ANY();
             uint32 index = fullAtomIndex(PC);
             JSAtom *atom = script->getAtom(index);
 
             prepareStubCall(Uses(0));
             masm.move(ImmPtr(atom), Registers::ArgReg1);
             INLINE_STUBCALL(stubs::DelName);
             pushSyncedEntry(0);
           }
           END_CASE(JSOP_DELNAME)
 
           BEGIN_CASE(JSOP_DELPROP)
           {
+            REJOIN_SITE_ANY();
             uint32 index = fullAtomIndex(PC);
             JSAtom *atom = script->getAtom(index);
 
             prepareStubCall(Uses(1));
             masm.move(ImmPtr(atom), Registers::ArgReg1);
             INLINE_STUBCALL(STRICT_VARIANT(stubs::DelProp));
             frame.pop();
             pushSyncedEntry(0);
           }
           END_CASE(JSOP_DELPROP) 
 
           BEGIN_CASE(JSOP_DELELEM)
+          {
+            REJOIN_SITE_ANY();
             prepareStubCall(Uses(2));
             INLINE_STUBCALL(STRICT_VARIANT(stubs::DelElem));
             frame.popn(2);
             pushSyncedEntry(0);
+          }
           END_CASE(JSOP_DELELEM)
 
           BEGIN_CASE(JSOP_TYPEOF)
           BEGIN_CASE(JSOP_TYPEOFEXPR)
             jsop_typeof();
           END_CASE(JSOP_TYPEOF)
 
           BEGIN_CASE(JSOP_VOID)
@@ -1949,38 +1962,41 @@ mjit::Compiler::generateMethod()
             bool pop = (JSOp(*next) == JSOP_POP && !a->analysis.jumpTarget(next));
             if (!jsop_setelem(pop))
                 return Compile_Error;
           }
           END_CASE(JSOP_SETELEM);
 
           BEGIN_CASE(JSOP_CALLNAME)
           {
+            REJOIN_SITE_ANY();
             uint32 index = fullAtomIndex(PC);
             prepareStubCall(Uses(0));
             masm.move(Imm32(index), Registers::ArgReg1);
             INLINE_STUBCALL(stubs::CallName);
             pushSyncedEntry(0);
             pushSyncedEntry(1);
             frame.extra(frame.peek(-2)).name = script->getAtom(index);
           }
           END_CASE(JSOP_CALLNAME)
 
           BEGIN_CASE(JSOP_EVAL)
           {
+            REJOIN_SITE_ANY();
             JaegerSpew(JSpew_Insns, " --- EVAL --- \n");
             emitEval(GET_ARGC(PC));
             JaegerSpew(JSpew_Insns, " --- END EVAL --- \n");
           }
           END_CASE(JSOP_EVAL)
 
           BEGIN_CASE(JSOP_CALL)
           BEGIN_CASE(JSOP_FUNAPPLY)
           BEGIN_CASE(JSOP_FUNCALL)
           {
+            REJOIN_SITE_ANY();
             bool done = false;
             if (op == JSOP_CALL) {
                 CompileStatus status = inlineNativeFunction(GET_ARGC(PC), false);
                 if (status == Compile_Okay)
                     done = true;
                 else if (status != Compile_InlineAbort)
                     return status;
             }
@@ -2040,68 +2056,69 @@ mjit::Compiler::generateMethod()
           END_CASE(JSOP_FALSE)
 
           BEGIN_CASE(JSOP_TRUE)
             frame.push(Value(BooleanValue(true)));
           END_CASE(JSOP_TRUE)
 
           BEGIN_CASE(JSOP_OR)
           BEGIN_CASE(JSOP_AND)
-            fixDoubleTypes(Uses(0));
+            fixDoubleTypes();
             if (!jsop_andor(op, PC + GET_JUMP_OFFSET(PC)))
                 return Compile_Error;
           END_CASE(JSOP_AND)
 
           BEGIN_CASE(JSOP_TABLESWITCH)
             /*
              * Note: there is no need to syncForBranch for the various targets of
              * switch statement.  The liveness analysis has already marked these as
              * allocated with no registers in use.
              */
-            fixDoubleTypes(Uses(1));
+            fixDoubleTypes();
 #if defined JS_CPU_ARM /* Need to implement jump(BaseIndex) for ARM */
-            frame.syncAndForgetEverything();
+            frame.syncAndKillEverything();
             masm.move(ImmPtr(PC), Registers::ArgReg1);
 
             /* prepareStubCall() is not needed due to syncAndForgetEverything() */
-            INLINE_STUBCALL(stubs::TableSwitch);
+            INLINE_STUBCALL_NO_REJOIN(stubs::TableSwitch);
             frame.pop();
 
             masm.jump(Registers::ReturnReg);
 #else
             if (!jsop_tableswitch(PC))
                 return Compile_Error;
 #endif
             PC += js_GetVariableBytecodeLength(PC);
             break;
           END_CASE(JSOP_TABLESWITCH)
 
           BEGIN_CASE(JSOP_LOOKUPSWITCH)
-            fixDoubleTypes(Uses(1));
+            fixDoubleTypes();
             frame.syncAndForgetEverything();
             masm.move(ImmPtr(PC), Registers::ArgReg1);
 
             /* prepareStubCall() is not needed due to syncAndForgetEverything() */
-            INLINE_STUBCALL(stubs::LookupSwitch);
+            INLINE_STUBCALL_NO_REJOIN(stubs::LookupSwitch);
             frame.pop();
 
             masm.jump(Registers::ReturnReg);
             PC += js_GetVariableBytecodeLength(PC);
             break;
           END_CASE(JSOP_LOOKUPSWITCH)
 
           BEGIN_CASE(JSOP_CASE)
             // X Y
 
             frame.dupAt(-2);
             // X Y X
 
             jsop_stricteq(JSOP_STRICTEQ);
             // X cond
 
+            fixDoubleTypes();
             if (!jsop_ifneq(JSOP_IFNE, PC + GET_JUMP_OFFSET(PC)))
                 return Compile_Error;
           END_CASE(JSOP_CASE)
 
           BEGIN_CASE(JSOP_STRICTEQ)
             jsop_stricteq(op);
           END_CASE(JSOP_STRICTEQ)
 
@@ -2110,32 +2127,38 @@ mjit::Compiler::generateMethod()
           END_CASE(JSOP_STRICTNE)
 
           BEGIN_CASE(JSOP_ITER)
             if (!iter(PC[1]))
                 return Compile_Error;
           END_CASE(JSOP_ITER)
 
           BEGIN_CASE(JSOP_MOREITER)
+          {
             /* At the byte level, this is always fused with IFNE or IFNEX. */
             if (!iterMore())
                 return Compile_Error;
+            JSOp next = JSOp(PC[JSOP_MOREITER_LENGTH]);
+            PC += JSOP_MOREITER_LENGTH;
+            PC += js_CodeSpec[next].length;
             break;
+          }
           END_CASE(JSOP_MOREITER)
 
           BEGIN_CASE(JSOP_ENDITER)
             iterEnd();
           END_CASE(JSOP_ENDITER)
 
           BEGIN_CASE(JSOP_POP)
             frame.pop();
           END_CASE(JSOP_POP)
 
           BEGIN_CASE(JSOP_NEW)
           {
+            REJOIN_SITE_ANY();
             JaegerSpew(JSpew_Insns, " --- NEW OPERATOR --- \n");
             inlineCallHelper(GET_ARGC(PC), true);
             JaegerSpew(JSpew_Insns, " --- END NEW OPERATOR --- \n");
           }
           END_CASE(JSOP_NEW)
 
           BEGIN_CASE(JSOP_GETARG)
           {
@@ -2306,37 +2329,43 @@ mjit::Compiler::generateMethod()
           BEGIN_CASE(JSOP_SETNAME)
           BEGIN_CASE(JSOP_SETMETHOD)
             if (!jsop_setprop(script->getAtom(fullAtomIndex(PC)), true))
                 return Compile_Error;
           END_CASE(JSOP_SETNAME)
 
           BEGIN_CASE(JSOP_THROW)
             prepareStubCall(Uses(1));
-            INLINE_STUBCALL(stubs::Throw);
+            INLINE_STUBCALL_NO_REJOIN(stubs::Throw);
             frame.pop();
           END_CASE(JSOP_THROW)
 
           BEGIN_CASE(JSOP_IN)
+          {
+            REJOIN_SITE_ANY();
             prepareStubCall(Uses(2));
             INLINE_STUBCALL(stubs::In);
             frame.popn(2);
             frame.takeReg(Registers::ReturnReg);
             frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, Registers::ReturnReg);
+          }
           END_CASE(JSOP_IN)
 
           BEGIN_CASE(JSOP_INSTANCEOF)
             if (!jsop_instanceof())
                 return Compile_Error;
           END_CASE(JSOP_INSTANCEOF)
 
           BEGIN_CASE(JSOP_EXCEPTION)
+          {
+            REJOIN_SITE_ANY();
             prepareStubCall(Uses(0));
             INLINE_STUBCALL(stubs::Exception);
             frame.pushSynced(JSVAL_TYPE_UNKNOWN);
+          }
           END_CASE(JSOP_EXCEPTION)
 
           BEGIN_CASE(JSOP_LINENO)
           END_CASE(JSOP_LINENO)
 
           BEGIN_CASE(JSOP_ENUMELEM)
             // Normally, SETELEM transforms the stack
             //  from: OBJ ID VALUE
@@ -2368,56 +2397,60 @@ mjit::Compiler::generateMethod()
           END_CASE(JSOP_NULLBLOCKCHAIN)
 
           BEGIN_CASE(JSOP_CONDSWITCH)
             /* No-op for the decompiler. */
           END_CASE(JSOP_CONDSWITCH)
 
           BEGIN_CASE(JSOP_DEFFUN)
           {
+            REJOIN_SITE_ANY();
             uint32 index = fullAtomIndex(PC);
             JSFunction *innerFun = script->getFunction(index);
 
             if (script->fun && script->bindings.hasBinding(cx, innerFun->atom))
                 frame.syncAndForgetEverything();
 
             prepareStubCall(Uses(0));
             masm.move(ImmPtr(innerFun), Registers::ArgReg1);
             INLINE_STUBCALL(STRICT_VARIANT(stubs::DefFun));
           }
           END_CASE(JSOP_DEFFUN)
 
           BEGIN_CASE(JSOP_DEFVAR)
           BEGIN_CASE(JSOP_DEFCONST)
           {
+            REJOIN_SITE_ANY();
             uint32 index = fullAtomIndex(PC);
             JSAtom *atom = script->getAtom(index);
 
             prepareStubCall(Uses(0));
             masm.move(ImmPtr(atom), Registers::ArgReg1);
             INLINE_STUBCALL(stubs::DefVarOrConst);
           }
           END_CASE(JSOP_DEFVAR)
 
           BEGIN_CASE(JSOP_SETCONST)
           {
+            REJOIN_SITE_ANY();
             uint32 index = fullAtomIndex(PC);
             JSAtom *atom = script->getAtom(index);
 
             if (script->fun && script->bindings.hasBinding(cx, atom))
                 frame.syncAndForgetEverything();
 
             prepareStubCall(Uses(1));
             masm.move(ImmPtr(atom), Registers::ArgReg1);
             INLINE_STUBCALL(stubs::SetConst);
           }
           END_CASE(JSOP_SETCONST)
 
           BEGIN_CASE(JSOP_DEFLOCALFUN_FC)
           {
+            REJOIN_SITE_ANY();
             uint32 slot = GET_SLOTNO(PC);
             JSFunction *fun = script->getFunction(fullAtomIndex(&PC[SLOTNO_LEN]));
             prepareStubCall(Uses(frame.frameSlots()));
             masm.move(ImmPtr(fun), Registers::ArgReg1);
             INLINE_STUBCALL(stubs::DefLocalFun_FC);
             frame.takeReg(Registers::ReturnReg);
             frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
             frame.storeLocal(slot, JSVAL_TYPE_OBJECT, true);
@@ -2448,21 +2481,21 @@ mjit::Compiler::generateMethod()
                     stub = stubs::LambdaJoinableForNull;
                 }
             }
 
             prepareStubCall(Uses(uses));
             masm.move(ImmPtr(fun), Registers::ArgReg1);
 
             if (stub == stubs::Lambda) {
-                INLINE_STUBCALL(stub);
+                INLINE_STUBCALL_NO_REJOIN(stub);
             } else {
                 jsbytecode *savedPC = PC;
                 PC = pc2;
-                INLINE_STUBCALL(stub);
+                INLINE_STUBCALL_NO_REJOIN(stub);
                 PC = savedPC;
             }
 
             frame.takeReg(Registers::ReturnReg);
             frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
           }
           END_CASE(JSOP_LAMBDA)
 
@@ -2487,30 +2520,37 @@ mjit::Compiler::generateMethod()
             frame.freeReg(reg);
             frame.push(Address(reg, index * sizeof(Value)), knownPushedType(0));
             if (op == JSOP_CALLFCSLOT)
                 frame.push(UndefinedValue());
           }
           END_CASE(JSOP_CALLFCSLOT)
 
           BEGIN_CASE(JSOP_ARGSUB)
+          {
+            REJOIN_SITE_ANY();
             prepareStubCall(Uses(0));
             masm.move(Imm32(GET_ARGNO(PC)), Registers::ArgReg1);
             INLINE_STUBCALL(stubs::ArgSub);
             pushSyncedEntry(0);
+          }
           END_CASE(JSOP_ARGSUB)
 
           BEGIN_CASE(JSOP_ARGCNT)
+          {
+            REJOIN_SITE_ANY();
             prepareStubCall(Uses(0));
             INLINE_STUBCALL(stubs::ArgCnt);
             pushSyncedEntry(0);
+          }
           END_CASE(JSOP_ARGCNT)
 
           BEGIN_CASE(JSOP_DEFLOCALFUN)
           {
+            REJOIN_SITE_ANY();
             uint32 slot = GET_SLOTNO(PC);
             JSFunction *fun = script->getFunction(fullAtomIndex(&PC[SLOTNO_LEN]));
             prepareStubCall(Uses(0));
             masm.move(ImmPtr(fun), Registers::ArgReg1);
             INLINE_STUBCALL(stubs::DefLocalFun);
             frame.takeReg(Registers::ReturnReg);
             frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
             frame.storeLocal(slot, JSVAL_TYPE_OBJECT, true);
@@ -2537,34 +2577,51 @@ mjit::Compiler::generateMethod()
             jsop_setgname(script->getAtom(fullAtomIndex(PC)), true);
           END_CASE(JSOP_SETGNAME)
 
           BEGIN_CASE(JSOP_REGEXP)
           {
             JSObject *regex = script->getRegExp(fullAtomIndex(PC));
             prepareStubCall(Uses(0));
             masm.move(ImmPtr(regex), Registers::ArgReg1);
-            INLINE_STUBCALL(stubs::RegExp);
+            INLINE_STUBCALL_NO_REJOIN(stubs::RegExp);
             frame.takeReg(Registers::ReturnReg);
             frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
           }
           END_CASE(JSOP_REGEXP)
 
           BEGIN_CASE(JSOP_OBJECT)
           {
             JSObject *object = script->getObject(fullAtomIndex(PC));
             RegisterID reg = frame.allocReg();
             masm.move(ImmPtr(object), reg);
             frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
           }
           END_CASE(JSOP_OBJECT)
 
           BEGIN_CASE(JSOP_CALLPROP)
-            if (!jsop_callprop(script->getAtom(fullAtomIndex(PC))))
-                return Compile_Error;
+          {
+              /*
+               * We can rejoin from a getprop if we took the callprop_str case
+               * in an earlier compilation. Rejoin from this differently as
+               * after the getprop the top two stack values will be reversed.
+               */
+              AutoRejoinSite rejoinGetProp(this, JS_FUNC_TO_DATA_PTR(void *, stubs::GetProp),
+                                           JS_FUNC_TO_DATA_PTR(void *, ic::GetProp));
+
+              if (!jsop_callprop(script->getAtom(fullAtomIndex(PC))))
+                  return Compile_Error;
+
+              if (needRejoins(PC)) {
+                  rejoinGetProp.oolRejoin(stubcc.masm.label());
+                  stubcc.masm.infallibleVMCall(JS_FUNC_TO_DATA_PTR(void *, stubs::CallPropSwap),
+                                               frame.totalDepth());
+                  stubcc.rejoin(Changes(2));
+              }
+          }
           END_CASE(JSOP_CALLPROP)
 
           BEGIN_CASE(JSOP_UINT24)
             frame.push(Value(Int32Value((int32_t) GET_UINT24(PC))));
           END_CASE(JSOP_UINT24)
 
           BEGIN_CASE(JSOP_CALLELEM)
             jsop_getelem(true);
@@ -2611,36 +2668,39 @@ mjit::Compiler::generateMethod()
             frame.push(MagicValue(JS_ARRAY_HOLE));
           END_CASE(JSOP_HOLE)
 
           BEGIN_CASE(JSOP_LAMBDA_FC)
           {
             JSFunction *fun = script->getFunction(fullAtomIndex(PC));
             prepareStubCall(Uses(frame.frameSlots()));
             masm.move(ImmPtr(fun), Registers::ArgReg1);
-            INLINE_STUBCALL(stubs::FlatLambda);
+            INLINE_STUBCALL_NO_REJOIN(stubs::FlatLambda);
             frame.takeReg(Registers::ReturnReg);
             frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
           }
           END_CASE(JSOP_LAMBDA_FC)
 
           BEGIN_CASE(JSOP_TRACE)
           BEGIN_CASE(JSOP_NOTRACE)
           {
             if (a->analysis.jumpTarget(PC)) {
                 interruptCheckHelper();
                 recompileCheckHelper();
             }
           }
           END_CASE(JSOP_TRACE)
 
           BEGIN_CASE(JSOP_DEBUGGER)
+          {
+            REJOIN_SITE_ANY();
             prepareStubCall(Uses(0));
             masm.move(ImmPtr(PC), Registers::ArgReg1);
             INLINE_STUBCALL(stubs::Debugger);
+          }
           END_CASE(JSOP_DEBUGGER)
 
           BEGIN_CASE(JSOP_UNBRAND)
             jsop_unbrand();
           END_CASE(JSOP_UNBRAND)
 
           BEGIN_CASE(JSOP_UNBRANDTHIS)
             jsop_this();
@@ -2735,16 +2795,18 @@ mjit::Compiler::jumpInScript(Jump j, jsb
         return true;
     }
     return branchPatches.append(BranchPatch(j, pc, a->inlineIndex));
 }
 
 void
 mjit::Compiler::jsop_getglobal(uint32 index)
 {
+    REJOIN_SITE_ANY();
+
     JS_ASSERT(globalObj);
     uint32 slot = script->getGlobalSlot(index);
 
     JSObject *singleton = pushedSingleton(0);
     if (singleton && !globalObj->getSlot(slot).isUndefined()) {
         frame.push(ObjectValue(*singleton));
         return;
     }
@@ -2929,16 +2991,17 @@ void
 mjit::Compiler::emitReturn(FrameEntry *fe)
 {
     JS_ASSERT_IF(!script->fun, JSOp(*PC) == JSOP_STOP);
 
     /* Only the top of the stack can be returned. */
     JS_ASSERT_IF(fe, fe == frame.peek(-1));
 
     if (debugMode() || Probes::callTrackingActive(cx)) {
+        REJOIN_SITE(stubs::ScriptDebugEpilogue);
         prepareStubCall(Uses(0));
         INLINE_STUBCALL(stubs::ScriptDebugEpilogue);
     }
 
     if (a != outer) {
         /*
          * Returning from an inlined script. The checks we do for inlineability
          * and recompilation triggered by args object construction ensure that
@@ -2990,26 +3053,26 @@ mjit::Compiler::emitReturn(FrameEntry *f
      * perform this branch (by instead using a trampoline at the return address
      * to handle exiting mjit code) and thus always puts activation objects,
      * even on the entry frame. To avoid double-putting, EnterMethodJIT clears
      * out the entry frame's activation objects.
      */
     if (script->fun && script->fun->isHeavyweight()) {
         /* There will always be a call object. */
         prepareStubCall(Uses(fe ? 1 : 0));
-        INLINE_STUBCALL(stubs::PutActivationObjects);
+        INLINE_STUBCALL_NO_REJOIN(stubs::PutActivationObjects);
     } else {
         /* if (hasCallObj() || hasArgsObj()) */
         Jump putObjs = masm.branchTest32(Assembler::NonZero,
                                          Address(JSFrameReg, JSStackFrame::offsetOfFlags()),
                                          Imm32(JSFRAME_HAS_CALL_OBJ | JSFRAME_HAS_ARGS_OBJ));
         stubcc.linkExit(putObjs, Uses(frame.frameSlots()));
 
         stubcc.leave();
-        OOL_STUBCALL(stubs::PutActivationObjects);
+        OOL_STUBCALL_NO_REJOIN(stubs::PutActivationObjects);
 
         emitReturnValue(&stubcc.masm, fe);
         emitFinalReturn(stubcc.masm);
     }
 
     emitReturnValue(&masm, fe);
     emitFinalReturn(masm);
 
@@ -3043,16 +3106,18 @@ mjit::Compiler::emitStubCall(void *ptr, 
     }
     JaegerSpew(JSpew_Insns, " ---- END STUB CALL ---- \n");
     return cl;
 }
 
 void
 mjit::Compiler::interruptCheckHelper()
 {
+    REJOIN_SITE(stubs::Interrupt);
+
     /*
      * Bake in and test the address of the interrupt counter for the runtime.
      * This is faster than doing two additional loads for the context's
      * thread data, but will cause this thread to run slower if there are
      * pending interrupts on some other thread.  For non-JS_THREADSAFE builds
      * we can skip this, as there is only one flag to poll.
      */
 #ifdef JS_THREADSAFE
@@ -3077,26 +3142,22 @@ mjit::Compiler::interruptCheckHelper()
     stubcc.masm.move(ImmPtr(PC), Registers::ArgReg1);
     OOL_STUBCALL(stubs::Interrupt);
     stubcc.rejoin(Changes(0));
 }
 
 void
 mjit::Compiler::recompileCheckHelper()
 {
-    if (!a->analysis.hasFunctionCalls() || !cx->typeInferenceEnabled())
+    REJOIN_SITE(stubs::RecompileForInline);
+
+    if (!a->analysis.hasFunctionCalls() || !cx->typeInferenceEnabled() ||
+        script->callCount() >= CALLS_BACKEDGES_BEFORE_INLINING) {
         return;
-
-    if (inlining) {
-        OOL_STUBCALL(stubs::RecompileForInline);
-        stubcc.rejoin(Changes(0));
-        return;
-    }
-
-    JS_ASSERT(script->callCount() < CALLS_BACKEDGES_BEFORE_INLINING);
+    }
 
     size_t *addr = script->addressOfCallCount();
     masm.add32(Imm32(1), AbsoluteAddress(addr));
 #if defined(JS_CPU_X86) || defined(JS_CPU_ARM)
     Jump jump = masm.branch32(Assembler::GreaterThanOrEqual, AbsoluteAddress(addr),
                               Imm32(CALLS_BACKEDGES_BEFORE_INLINING));
 #else
     /* Handle processors that can't load from absolute addresses. */
@@ -3109,68 +3170,64 @@ mjit::Compiler::recompileCheckHelper()
     stubcc.linkExit(jump, Uses(0));
     stubcc.leave();
 
     OOL_STUBCALL(stubs::RecompileForInline);
     stubcc.rejoin(Changes(0));
 }
 
 void
-mjit::Compiler::addReturnSite(Label joinPoint, bool ool)
+mjit::Compiler::addReturnSite()
 {
-    InternalCallSite site(masm.distanceOf(joinPoint), a->inlineIndex, PC,
-                          CallSite::NCODE_RETURN_ID, false, ool);
+    InternalCallSite site(masm.distanceOf(masm.label()), a->inlineIndex, PC,
+                          CallSite::NCODE_RETURN_ID, false, true);
     addCallSite(site);
+    masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfPrev()), JSFrameReg);
+
+    if (needRejoins(PC))
+        addRejoinSite((void *) CallSite::NCODE_RETURN_ID, false, Label());
 }
 
 void
 mjit::Compiler::emitUncachedCall(uint32 argc, bool callingNew)
 {
     CallPatchInfo callPatch;
 
     RegisterID r0 = Registers::ReturnReg;
     VoidPtrStubUInt32 stub = callingNew ? stubs::UncachedNew : stubs::UncachedCall;
 
-    frame.syncAndKill(Uses(argc + 2));
-    prepareStubCall(Uses(argc + 2));
-    masm.move(Imm32(argc), Registers::ArgReg1);
-    INLINE_STUBCALL(stub);
-
-    if (recompiling) {
-        /* In case we recompiled this call to an uncached call. */
-        OOL_STUBCALL(JS_FUNC_TO_DATA_PTR(void *, callingNew ? ic::New : ic::Call));
-        stubcc.crossJump(stubcc.masm.jump(), masm.label());
+    {
+        REJOIN_SITE_2(stub, callingNew ? ic::New : ic::Call);
+
+        frame.syncAndKill(Uses(argc + 2));
+        prepareStubCall(Uses(argc + 2));
+        masm.move(Imm32(argc), Registers::ArgReg1);
+        INLINE_STUBCALL(stub);
     }
 
     Jump notCompiled = masm.branchTestPtr(Assembler::Zero, r0, r0);
 
     if (!cx->typeInferenceEnabled())
         masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
 
     callPatch.hasFastNcode = true;
     callPatch.fastNcodePatch =
         masm.storePtrWithPatch(ImmPtr(NULL),
                                Address(JSFrameReg, JSStackFrame::offsetOfncode()));
 
     masm.jump(r0);
     callPatch.joinPoint = masm.label();
-    addReturnSite(callPatch.joinPoint);
-    masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfPrev()), JSFrameReg);
+    addReturnSite();
 
     frame.popn(argc + 2);
 
     frame.takeReg(JSReturnReg_Type);
     frame.takeReg(JSReturnReg_Data);
     frame.pushRegs(JSReturnReg_Type, JSReturnReg_Data, knownPushedType(0));
 
-    if (recompiling) {
-        /* Native call case for recompilation. */
-        OOL_STUBCALL(JS_FUNC_TO_DATA_PTR(void *, callingNew ? ic::NativeNew : ic::NativeCall));
-    }
-
     stubcc.linkExitDirect(notCompiled, stubcc.masm.label());
     stubcc.rejoin(Changes(1));
     callPatches.append(callPatch);
 }
 
 static bool
 IsLowerableFunCallOrApply(jsbytecode *pc)
 {
@@ -3206,35 +3263,40 @@ mjit::Compiler::checkCallApplySpeculatio
                                    Address(origCalleeData, JSFunction::offsetOfNativeOrScript()),
                                    ImmPtr(JS_FUNC_TO_DATA_PTR(void *, native)));
 
     /*
      * If speculation fails, we can't use the ic, since it is compiled on the
      * assumption that speculation succeeds. Instead, just do an uncached call.
      */
     {
+        AutoRejoinSite autoRejoin(this, JS_FUNC_TO_DATA_PTR(void *, stubs::UncachedCall),
+                                  JS_FUNC_TO_DATA_PTR(void *, ic::Call));
+
         if (isObj.isSet())
             stubcc.linkExitDirect(isObj.getJump(), stubcc.masm.label());
         stubcc.linkExitDirect(isFun, stubcc.masm.label());
         stubcc.linkExitDirect(isNative, stubcc.masm.label());
 
         int32 frameDepthAdjust;
         if (applyTricks == LazyArgsObj) {
-            OOL_STUBCALL(stubs::Arguments);
+            OOL_STUBCALL_NO_REJOIN(stubs::Arguments);
             frameDepthAdjust = +1;
         } else {
             frameDepthAdjust = 0;
         }
 
         stubcc.masm.move(Imm32(callImmArgc), Registers::ArgReg1);
         JaegerSpew(JSpew_Insns, " ---- BEGIN SLOW CALL CODE ---- \n");
         OOL_STUBCALL_LOCAL_SLOTS(JS_FUNC_TO_DATA_PTR(void *, stubs::UncachedCall),
                                  frame.totalDepth() + frameDepthAdjust);
         JaegerSpew(JSpew_Insns, " ---- END SLOW CALL CODE ---- \n");
 
+        autoRejoin.oolRejoin(stubcc.masm.label());
+
         RegisterID r0 = Registers::ReturnReg;
         Jump notCompiled = stubcc.masm.branchTestPtr(Assembler::Zero, r0, r0);
 
         if (!cx->typeInferenceEnabled())
             stubcc.masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
 
         Address ncodeAddr(JSFrameReg, JSStackFrame::offsetOfncode());
         uncachedCallPatch->hasSlowNcode = true;
@@ -3333,22 +3395,16 @@ mjit::Compiler::inlineCallHelper(uint32 
 #endif
         if (applyTricks == LazyArgsObj) {
             /* frame.pop() above reset us to pre-JSOP_ARGUMENTS state */
             jsop_arguments();
             frame.pushSynced(JSVAL_TYPE_UNKNOWN);
         }
         emitUncachedCall(callImmArgc, callingNew);
         applyTricks = NoApplyTricks;
-
-        /* Rejoin from inlined native slow path. */
-        if (recompiling) {
-            OOL_STUBCALL(stubs::SlowCall);
-            stubcc.rejoin(Changes(1));
-        }
         return true;
 #ifdef JS_MONOIC
     }
 
     frame.forgetConstantData(origCallee);
     if (lowerFunCallOrApply) {
         frame.forgetConstantData(origThis);
         if (origThis->isNotType(JSVAL_TYPE_OBJECT))
@@ -3482,16 +3538,22 @@ mjit::Compiler::inlineCallHelper(uint32 
     Jump j = masm.branchPtrWithPatch(Assembler::NotEqual, icCalleeData, callIC.funGuard);
     callIC.funJump = j;
 
     /* Reserve space just before initialization of slowPathStart. */
     RESERVE_OOL_SPACE(stubcc.masm);
 
     Jump rejoin1, rejoin2;
     {
+        AutoRejoinSite autoRejoin(this,
+            JS_FUNC_TO_DATA_PTR(void *, callingNew ? ic::New : ic::Call),
+            JS_FUNC_TO_DATA_PTR(void *, callingNew ? stubs::UncachedNew : stubs::UncachedCall));
+        AutoRejoinSite autoRejoinSplat(this,
+            JS_FUNC_TO_DATA_PTR(void *, ic::SplatApplyArgs));
+
         RESERVE_OOL_SPACE(stubcc.masm);
         stubcc.linkExitDirect(j, stubcc.masm.label());
         callIC.slowPathStart = stubcc.masm.label();
 
         /*
          * Test if the callee is even a function. If this doesn't match, we
          * take a _really_ slow path later.
          */
@@ -3505,18 +3567,20 @@ mjit::Compiler::inlineCallHelper(uint32 
         Jump isNative = stubcc.masm.branch32(Assembler::Below, tmp, Imm32(JSFUN_INTERPRETED));
         tempRegs.putReg(tmp);
 
         /*
          * N.B. After this call, the frame will have a dynamic frame size.
          * Check after the function is known not to be a native so that the
          * catch-all/native path has a static depth.
          */
-        if (callIC.frameSize.isDynamic())
+        if (callIC.frameSize.isDynamic()) {
             OOL_STUBCALL(ic::SplatApplyArgs);
+            autoRejoinSplat.oolRejoin(stubcc.masm.label());
+        }
 
         /*
          * No-op jump that gets patched by ic::New/Call to the stub generated
          * by generateFullCallStub.
          */
         Jump toPatch = stubcc.masm.jump();
         toPatch.linkTo(stubcc.masm.label(), &stubcc.masm);
         callIC.oolJump = toPatch;
@@ -3532,16 +3596,18 @@ mjit::Compiler::inlineCallHelper(uint32 
         if (callIC.frameSize.isStatic())
             callIC.oolCall = OOL_STUBCALL_LOCAL_SLOTS(icFunPtr, frame.totalDepth());
         else
             callIC.oolCall = OOL_STUBCALL_LOCAL_SLOTS(icFunPtr, -1);
 
         callIC.funObjReg = icCalleeData;
         callIC.funPtrReg = funPtrReg;
 
+        autoRejoin.oolRejoin(stubcc.masm.label());
+
         /*
          * The IC call either returns NULL, meaning call completed, or a
          * function pointer to jump to.
          */
         rejoin1 = stubcc.masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
                                             Registers::ReturnReg);
         if (callIC.frameSize.isStatic())
             stubcc.masm.move(Imm32(callIC.frameSize.staticArgc()), JSParamReg_Argc);
@@ -3550,16 +3616,18 @@ mjit::Compiler::inlineCallHelper(uint32 
         if (!cx->typeInferenceEnabled())
             stubcc.masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
         callPatch.hasSlowNcode = true;
         callPatch.slowNcodePatch =
             stubcc.masm.storePtrWithPatch(ImmPtr(NULL),
                                           Address(JSFrameReg, JSStackFrame::offsetOfncode()));
         stubcc.masm.jump(Registers::ReturnReg);
 
+
+
         /*
          * This ool path is the catch-all for everything but scripted function
          * callees. For native functions, ic::NativeNew/NativeCall will repatch
          * funGaurd/funJump with a fast call stub. All other cases
          * (non-function callable objects and invalid callees) take the slow
          * path through js::Invoke.
          */
         if (notObjectJump.isSet())
@@ -3585,20 +3653,19 @@ mjit::Compiler::inlineCallHelper(uint32 
 
     InlineFrameAssembler inlFrame(masm, callIC, flags);
     callPatch.hasFastNcode = true;
     callPatch.fastNcodePatch = inlFrame.assemble(NULL);
 
     callIC.hotJump = masm.jump();
     callIC.joinPoint = callPatch.joinPoint = masm.label();
     callIC.callIndex = callSites.length();
-    addReturnSite(callPatch.joinPoint);
+    addReturnSite();
     if (lowerFunCallOrApply)
         uncachedCallPatch.joinPoint = callIC.joinPoint;
-    masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfPrev()), JSFrameReg);
 
     /*
      * We've placed hotJump, joinPoint and hotPathLabel, and no other labels are located by offset
      * in the in-line path so we can check the IC space now.
      */
     CHECK_IC_SPACE();
 
     JSValueType type = knownPushedType(0);
@@ -3630,30 +3697,17 @@ mjit::Compiler::inlineCallHelper(uint32 
     if (lowerFunCallOrApply)
         stubcc.crossJump(uncachedCallSlowRejoin, masm.label());
 
     callICs.append(callIC);
     callPatches.append(callPatch);
     if (lowerFunCallOrApply)
         callPatches.append(uncachedCallPatch);
 
-    if (!lowerFunCallOrApply && recompiling) {
-        /* Recompiled from inlined native slow path. */
-        if (!callingNew) {
-            OOL_STUBCALL(stubs::SlowCall);
-            stubcc.rejoin(Changes(1));
-        }
-
-        /* Recompiled uncached call to cached call. */
-        OOL_STUBCALL(callingNew ? stubs::UncachedNew : stubs::UncachedCall);
-        stubcc.rejoin(Changes(1));
-    }
-
     applyTricks = NoApplyTricks;
-
     return true;
 #endif
 }
 
 CompileStatus
 mjit::Compiler::callArrayBuiltin(uint32 argc, bool callingNew)
 {
     if (!script->compileAndGo)
@@ -3971,21 +4025,27 @@ mjit::Compiler::inlineScriptedFunction(u
             frame.pushTypedPayload(returnType, returnRegister.reg());
         else
             frame.pushDouble(returnRegister.fpreg());
     } else {
         frame.pushSynced(JSVAL_TYPE_UNKNOWN);
     }
 
     /* If we end up expanding inline frames here, they will need a return site to rejoin at. */
-    addReturnSite(stubcc.masm.label(), true);
-    stubcc.masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfPrev()), JSFrameReg);
-    stubcc.masm.storeValueFromComponents(JSReturnReg_Type, JSReturnReg_Data,
-                                         frame.addressOf(frame.peek(-1)));
-    stubcc.rejoin(Changes(1));
+    if (a == outer) {
+        Label oolStart = stubcc.masm.label();
+        if (needReturnValue) {
+            stubcc.masm.storeValueFromComponents(JSReturnReg_Type, JSReturnReg_Data,
+                                                 frame.addressOf(frame.peek(-1)));
+            if (!syncReturnValue && !returnRegister.isReg())
+                stubcc.masm.ensureInMemoryDouble(frame.addressOf(frame.peek(-1)));
+        }
+        stubcc.rejoin(Changes(1));
+        addRejoinSite((void *) CallSite::NCODE_RETURN_ID, true, oolStart);
+    }
 
     JaegerSpew(JSpew_Inlining, "finished inlining call to script (file \"%s\") (line \"%d\")\n",
                script->filename, script->lineno);
 
     return Compile_Okay;
 }
 
 /*
@@ -3994,16 +4054,92 @@ mjit::Compiler::inlineScriptedFunction(u
  * being set. This includes any API callbacks and any scripted or native call.
  */
 void
 mjit::Compiler::addCallSite(const InternalCallSite &site)
 {
     callSites.append(site);
 }
 
+void
+mjit::Compiler::inlineStubCall(void *stub, bool needsRejoin)
+{
+    DataLabelPtr inlinePatch;
+    Call cl = emitStubCall(stub, &inlinePatch);
+    InternalCallSite site(masm.callReturnOffset(cl), a->inlineIndex, PC,
+                          (size_t)stub, false, needsRejoin);
+    site.inlinePatch = inlinePatch;
+    addCallSite(site);
+}
+
+#ifdef DEBUG
+void
+mjit::Compiler::checkRejoinSite(uint32 nCallSites, uint32 nRejoinSites, void *stub)
+{
+    JS_ASSERT(!variadicRejoin);
+    size_t id = (size_t) stub;
+
+    if (id == RejoinSite::VARIADIC_ID) {
+        for (unsigned i = nCallSites; i < callSites.length(); i++)
+            callSites[i].needsRejoin = false;
+        variadicRejoin = true;
+        return;
+    }
+
+    for (unsigned i = nCallSites; i < callSites.length(); i++) {
+        if (callSites[i].id == id)
+            callSites[i].needsRejoin = false;
+    }
+}
+#endif
+
+void
+mjit::Compiler::addRejoinSite(void *stub, bool ool, Label oolLabel)
+{
+    JS_ASSERT(a == outer);
+
+    InternalRejoinSite rejoin(stubcc.masm.label(), PC, (size_t) stub);
+    rejoinSites.append(rejoin);
+
+    /*
+     * Get the right frame to use for restoring doubles and for subsequent code.
+     * For calls the register is restored from f.regs.fp, for scripted calls
+     * we pop the frame first.
+     */
+    if (stub == (void *) CallSite::NCODE_RETURN_ID)
+        stubcc.masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfPrev()), JSFrameReg);
+    else
+        stubcc.masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
+
+    /*
+     * Ensure that all entries which we assume are doubles are in fact doubles.
+     * Any value assumed to be a double in this compilation may instead be an
+     * int in the earlier compilation and stack frames. Other transitions
+     * between known types are not possible --- type sets can only grow, and if
+     * new non-double type tags become possible we will treat that slot as
+     * unknown in this compilation.
+     */
+    frame.ensureInMemoryDoubles(stubcc.masm);
+
+    /* Regenerate any loop invariants. */
+    if (loop && loop->generatingInvariants()) {
+        Jump j = stubcc.masm.jump();
+        Label l = stubcc.masm.label();
+        loop->addInvariantCall(j, l, true);
+    }
+
+    if (ool) {
+        /* Jump to the specified label, without syncing. */
+        stubcc.masm.jump().linkTo(oolLabel, &stubcc.masm);
+    } else {
+        /* Rejoin as from an out of line stub call. */
+        stubcc.rejoin(Changes(0));
+    }
+}
+
 bool
 mjit::Compiler::compareTwoValues(JSContext *cx, JSOp op, const Value &lhs, const Value &rhs)
 {
     JS_ASSERT(lhs.isPrimitive());
     JS_ASSERT(rhs.isPrimitive());
 
     if (lhs.isString() && rhs.isString()) {
         int32 cmp;
@@ -4057,35 +4193,40 @@ mjit::Compiler::compareTwoValues(JSConte
         }
     }
 
     JS_NOT_REACHED("NYI");
     return false;
 }
 
 bool
-mjit::Compiler::emitStubCmpOp(BoolStub stub, jsbytecode *target, JSOp fused)
+mjit::Compiler::emitStubCmpOp(BoolStub stub, AutoRejoinSite &autoRejoin, jsbytecode *target, JSOp fused)
 {
-    fixDoubleTypes(Uses(2));
+    fixDoubleTypes();
     if (target)
-        frame.syncAndForgetEverything();
+        frame.syncAndKillEverything();
     else
         frame.syncAndKill(Uses(2));
 
     prepareStubCall(Uses(2));
     INLINE_STUBCALL(stub);
     frame.pop();
     frame.pop();
 
     if (!target) {
         frame.takeReg(Registers::ReturnReg);
         frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, Registers::ReturnReg);
         return true;
     }
 
+    if (needRejoins(PC)) {
+        autoRejoin.oolRejoin(stubcc.masm.label());
+        stubcc.rejoin(Changes(0));
+    }
+
     JS_ASSERT(fused == JSOP_IFEQ || fused == JSOP_IFNE);
     Assembler::Condition cond = (fused == JSOP_IFEQ)
                                 ? Assembler::Zero
                                 : Assembler::NonZero;
     Jump j = masm.branchTest32(cond, Registers::ReturnReg,
                                Registers::ReturnReg);
     return jumpAndTrace(j, target);
 }
@@ -4110,42 +4251,27 @@ mjit::Compiler::jsop_getprop_slow(JSAtom
     if (usePropCache) {
         INLINE_STUBCALL(stubs::GetProp);
     } else {
         masm.move(ImmPtr(atom), Registers::ArgReg1);
         INLINE_STUBCALL(stubs::GetPropNoCache);
     }
     frame.pop();
     frame.pushSynced(JSVAL_TYPE_UNKNOWN);
-
-    if (recompiling) {
-        OOL_STUBCALL(usePropCache ? ic::GetProp : ic::GetPropNoCache);
-        stubcc.rejoin(Changes(1));
-    }
 }
 
 bool
 mjit::Compiler::jsop_callprop_slow(JSAtom *atom)
 {
     prepareStubCall(Uses(1));
     masm.move(ImmPtr(atom), Registers::ArgReg1);
     INLINE_STUBCALL(stubs::CallProp);
     frame.pop();
     pushSyncedEntry(0);
     pushSyncedEntry(1);
-
-    if (recompiling) {
-        OOL_STUBCALL(stubs::GetProp);
-        stubcc.rejoin(Changes(2));
-        OOL_STUBCALL(ic::CallProp);
-        stubcc.rejoin(Changes(2));
-        OOL_STUBCALL(ic::GetProp);
-        stubcc.rejoin(Changes(2));
-    }
-
     return true;
 }
 
 bool
 mjit::Compiler::jsop_length()
 {
     FrameEntry *top = frame.peek(-1);
 
@@ -4191,16 +4317,19 @@ mjit::Compiler::passICAddress(BaseICInfo
 {
     ic->paramAddr = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
 }
 
 bool
 mjit::Compiler::jsop_getprop(JSAtom *atom, JSValueType knownType,
                              bool doTypeCheck, bool usePropCache)
 {
+    REJOIN_SITE_3(usePropCache ? ic::GetProp : ic::GetPropNoCache,
+                  stubs::GetProp, stubs::GetPropNoCache);
+
     FrameEntry *top = frame.peek(-1);
 
     /* If the incoming type will never PIC, take slow path. */
     if (top->isNotType(JSVAL_TYPE_OBJECT)) {
         jsop_getprop_slow(atom, usePropCache);
         return true;
     }
 
@@ -4294,25 +4423,16 @@ mjit::Compiler::jsop_getprop(JSAtom *ato
 #else
     labels.setInlineShapeJump(masm, pic.shapeGuard, inlineShapeJump);
 #endif
 
     pic.objReg = objReg;
     frame.pushRegs(shapeReg, objReg, knownType);
 
     stubcc.rejoin(Changes(1));
-
-    if (recompiling) {
-        if (usePropCache)
-            OOL_STUBCALL(stubs::GetProp);
-        else
-            OOL_STUBCALL(stubs::GetPropNoCache);
-        stubcc.rejoin(Changes(1));
-    }
-
     pics.append(pic);
     return true;
 }
 
 bool
 mjit::Compiler::jsop_callprop_generic(JSAtom *atom)
 {
     FrameEntry *top = frame.peek(-1);
@@ -4416,26 +4536,16 @@ mjit::Compiler::jsop_callprop_generic(JS
 #ifdef JS_CPU_X64
     labels.setInlineShapeJump(masm, inlineShapeLabel, inlineShapeJump);
 #else
     labels.setInlineShapeJump(masm, pic.shapeGuard, inlineShapeJump);
 #endif
 
     stubcc.rejoin(Changes(2));
     pics.append(pic);
-
-    if (recompiling) {
-        OOL_STUBCALL(stubs::CallProp);
-        stubcc.rejoin(Changes(2));
-        OOL_STUBCALL(stubs::GetProp);
-        stubcc.rejoin(Changes(2));
-        OOL_STUBCALL(ic::GetProp);
-        stubcc.rejoin(Changes(2));
-    }
-
     return true;
 }
 
 bool
 mjit::Compiler::jsop_callprop_str(JSAtom *atom)
 {
     if (!script->compileAndGo) {
         jsop_callprop_slow(atom);
@@ -4479,23 +4589,16 @@ mjit::Compiler::jsop_callprop_str(JSAtom
         masm.move(ImmPtr(strFe->getValue().toString()), strReg);
     } else {
         strReg = frame.ownRegForData(strFe);
     }
     frame.pop();
     frame.pushTypedPayload(JSVAL_TYPE_STRING, strReg);
     frame.forgetType(frame.peek(-1));
 
-    if (recompiling) {
-        OOL_STUBCALL(stubs::CallProp);
-        stubcc.rejoin(Changes(2));
-        OOL_STUBCALL(ic::CallProp);
-        stubcc.rejoin(Changes(2));
-    }
-
     return true;
 }
 
 bool
 mjit::Compiler::jsop_callprop_obj(JSAtom *atom)
 {
     FrameEntry *top = frame.peek(-1);
 
@@ -4580,25 +4683,16 @@ mjit::Compiler::jsop_callprop_obj(JSAtom
     labels.setInlineShapeJump(masm, inlineShapeLabel, inlineShapeJump);
 #else
     labels.setInlineShapeJump(masm, pic.shapeGuard, inlineShapeJump);
 #endif
 
     stubcc.rejoin(Changes(2));
     pics.append(pic);
 
-    if (recompiling) {
-        OOL_STUBCALL(stubs::CallProp);
-        stubcc.rejoin(Changes(2));
-        OOL_STUBCALL(stubs::GetProp);
-        stubcc.rejoin(Changes(2));
-        OOL_STUBCALL(ic::GetProp);
-        stubcc.rejoin(Changes(2));
-    }
-
     return true;
 }
 
 bool
 mjit::Compiler::testSingletonProperty(JSObject *obj, jsid id)
 {
     /*
      * We would like to completely no-op property/global accesses which can
@@ -4695,16 +4789,18 @@ mjit::Compiler::testSingletonPropertyTyp
         return NULL;
 
     return testSingletonProperty(proto, id);
 }
 
 bool
 mjit::Compiler::jsop_callprop(JSAtom *atom)
 {
+    REJOIN_SITE_2(stubs::CallProp, ic::CallProp);
+
     FrameEntry *top = frame.peek(-1);
 
     bool testObject;
     JSObject *singleton = pushedSingleton(0);
     if (singleton && singleton->isFunction() &&
         testSingletonPropertyTypes(top, ATOM_TO_JSID(atom), &testObject)) {
         MaybeJump notObject;
         if (testObject)
@@ -4742,16 +4838,21 @@ mjit::Compiler::jsop_callprop(JSAtom *at
     if (top->isTypeKnown())
         return jsop_callprop_obj(atom);
     return jsop_callprop_generic(atom);
 }
 
 bool
 mjit::Compiler::jsop_setprop(JSAtom *atom, bool usePropCache)
 {
+    REJOIN_SITE_2(usePropCache
+                  ? STRICT_VARIANT(stubs::SetName)
+                  : STRICT_VARIANT(stubs::SetPropNoCache),
+                  ic::SetProp);
+
     FrameEntry *lhs = frame.peek(-2);
     FrameEntry *rhs = frame.peek(-1);
 
     /* If the incoming type will never PIC, take slow path. */
     if (lhs->isTypeKnown() && lhs->getKnownType() != JSVAL_TYPE_OBJECT) {
         jsop_setprop_slow(atom, usePropCache);
         return true;
     }
@@ -4874,16 +4975,18 @@ mjit::Compiler::jsop_setprop(JSAtom *ato
 
     pics.append(pic);
     return true;
 }
 
 void
 mjit::Compiler::jsop_name(JSAtom *atom, JSValueType type)
 {
+    REJOIN_SITE_2(ic::Name, stubs::UndefinedHelper);
+
     PICGenInfo pic(ic::PICInfo::NAME, JSOp(*PC), true);
 
     RESERVE_IC_SPACE(masm);
 
     pic.shapeReg = frame.allocReg();
     pic.objReg = frame.allocReg();
     pic.typeReg = Registers::ReturnReg;
     pic.atom = atom;
@@ -4937,16 +5040,17 @@ mjit::Compiler::jsop_name(JSAtom *atom, 
     }
 
     pics.append(pic);
 }
 
 bool
 mjit::Compiler::jsop_xname(JSAtom *atom)
 {
+    REJOIN_SITE_ANY();
     PICGenInfo pic(ic::PICInfo::XNAME, JSOp(*PC), true);
 
     FrameEntry *fe = frame.peek(-1);
     if (fe->isNotType(JSVAL_TYPE_OBJECT)) {
         return jsop_getprop(atom, knownPushedType(0));
     }
 
     if (!fe->isTypeKnown()) {
@@ -5005,16 +5109,17 @@ mjit::Compiler::jsop_xname(JSAtom *atom)
 
     pics.append(pic);
     return true;
 }
 
 void
 mjit::Compiler::jsop_bindname(JSAtom *atom, bool usePropCache)
 {
+    REJOIN_SITE(ic::BindName);
     PICGenInfo pic(ic::PICInfo::BIND, JSOp(*PC), usePropCache);
 
     // This code does not check the frame flags to see if scopeChain has been
     // set. Rather, it relies on the up-front analysis statically determining
     // whether BINDNAME can be used, which reifies the scope chain at the
     // prologue.
     JS_ASSERT(a->analysis.usesScopeChain());
 
@@ -5055,51 +5160,59 @@ mjit::Compiler::jsop_bindname(JSAtom *at
     pics.append(pic);
 }
 
 #else /* !JS_POLYIC */
 
 void
 mjit::Compiler::jsop_name(JSAtom *atom, JSValueType type, types::TypeSet *typeSet)
 {
+    REJOIN_SITE(stubs::Name);
     prepareStubCall(Uses(0));
     INLINE_STUBCALL(stubs::Name);
     frame.pushSynced(type, typeSet);
 }
 
 bool
 mjit::Compiler::jsop_xname(JSAtom *atom)
 {
     return jsop_getprop(atom, knownPushedType(0), pushedTypeSet(0));
 }
 
 bool
 mjit::Compiler::jsop_getprop(JSAtom *atom, JSValueType knownType, types::TypeSet *typeSet,
                              bool typecheck, bool usePropCache)
 {
+    REJOIN_SITE_2(ic::GetProp, ic::GetPropNoCache);
     jsop_getprop_slow(atom, usePropCache);
     return true;
 }
 
 bool
 mjit::Compiler::jsop_callprop(JSAtom *atom)
 {
+    REJOIN_SITE_2(stubs::CallProp);
     return jsop_callprop_slow(atom);
 }
 
 bool
 mjit::Compiler::jsop_setprop(JSAtom *atom, bool usePropCache)
 {
+    REJOIN_SITE(usePropCache
+                ? STRICT_VARIANT(stubs::SetName)
+                : STRICT_VARIANT(stubs::SetPropNoCache));
+
     jsop_setprop_slow(atom, usePropCache);
     return true;
 }
 
 void
 mjit::Compiler::jsop_bindname(JSAtom *atom, bool usePropCache)
 {
+    REJOIN_SITE_2(stubs::BindName, stubs::BindNameNoCache);
     RegisterID reg = frame.allocReg();
     Address scopeChain(JSFrameReg, JSStackFrame::offsetOfScopeChain());
     masm.loadPtr(scopeChain, reg);
 
     Address address(reg, offsetof(JSObject, parent));
 
     Jump j = masm.branchPtr(Assembler::NotEqual, masm.payloadOf(address), ImmPtr(0));
 
@@ -5116,16 +5229,18 @@ mjit::Compiler::jsop_bindname(JSAtom *at
 
     stubcc.rejoin(Changes(1));
 }
 #endif
 
 void
 mjit::Compiler::jsop_this()
 {
+    REJOIN_SITE(stubs::This);
+
     frame.pushThis();
 
     /* 
      * In strict mode code, we don't wrap 'this'.
      * In direct-call eval code, we wrapped 'this' before entering the eval.
      * In global code, 'this' is always an object.
      */
     if (script->fun && !script->strictModeCode) {
@@ -5416,16 +5531,17 @@ mjit::Compiler::jsop_propinc(JSOp op, Vo
 
     PC += JSOP_PROPINC_LENGTH;
     return Compile_Okay;
 }
 
 bool
 mjit::Compiler::iter(uintN flags)
 {
+    REJOIN_SITE_ANY();
     FrameEntry *fe = frame.peek(-1);
 
     /*
      * Stub the call if this is not a simple 'for in' loop or if the iterated
      * value is known to not be an object.
      */
     if ((flags != JSITER_ENUMERATE) || fe->isNotType(JSVAL_TYPE_OBJECT)) {
         prepareStubCall(Uses(1));
@@ -5531,16 +5647,18 @@ mjit::Compiler::iter(uintN flags)
 
 /*
  * This big nasty function emits a fast-path for native iterators, producing
  * a temporary value on the stack for FORLOCAL,ARG,GLOBAL,etc ops to use.
  */
 void
 mjit::Compiler::iterNext()
 {
+    REJOIN_SITE(stubs::IterNext);
+
     FrameEntry *fe = frame.peek(-1);
     RegisterID reg = frame.tempRegForData(fe);
 
     /* Is it worth trying to pin this longer? Prolly not. */
     frame.pinReg(reg);
     RegisterID T1 = frame.allocReg();
     frame.unpinReg(reg);
 
@@ -5586,25 +5704,27 @@ mjit::Compiler::iterNext()
 
     /* Join with the stub call. */
     stubcc.rejoin(Changes(1));
 }
 
 bool
 mjit::Compiler::iterMore()
 {
+    AutoRejoinSite autoRejoin(this, JS_FUNC_TO_DATA_PTR(void *, stubs::IterMore));
+
     jsbytecode *target = &PC[JSOP_MOREITER_LENGTH];
     JSOp next = JSOp(*target);
     JS_ASSERT(next == JSOP_IFNE || next == JSOP_IFNEX);
 
     target += (next == JSOP_IFNE)
               ? GET_JUMP_OFFSET(target)
               : GET_JUMPX_OFFSET(target);
 
-    fixDoubleTypes(Uses(0));
+    fixDoubleTypes();
     if (!frame.syncForBranch(target, Uses(1)))
         return false;
 
     FrameEntry *fe = frame.peek(-1);
     RegisterID reg = frame.tempRegForData(fe);
     RegisterID tempreg = frame.allocReg();
 
     /* Test clasp */
@@ -5622,31 +5742,30 @@ mjit::Compiler::iterMore()
     /* Get props_cursor, test */
     masm.loadPtr(Address(reg, offsetof(NativeIterator, props_cursor)), tempreg);
     masm.loadPtr(Address(reg, offsetof(NativeIterator, props_end)), reg);
 
     Jump jFast = masm.branchPtr(Assembler::LessThan, tempreg, reg);
 
     stubcc.leave();
     OOL_STUBCALL(stubs::IterMore);
+    autoRejoin.oolRejoin(stubcc.masm.label());
     Jump j = stubcc.masm.branchTest32(Assembler::NonZero, Registers::ReturnReg,
                                       Registers::ReturnReg);
 
-    PC += JSOP_MOREITER_LENGTH;
-    PC += js_CodeSpec[next].length;
-
     stubcc.rejoin(Changes(1));
     frame.freeReg(tempreg);
 
     return jumpAndTrace(jFast, target, &j);
 }
 
 void
 mjit::Compiler::iterEnd()
 {
+    REJOIN_SITE_ANY();
     FrameEntry *fe= frame.peek(-1);
     RegisterID reg = frame.tempRegForData(fe);
 
     frame.pinReg(reg);
     RegisterID T1 = frame.allocReg();
     frame.unpinReg(reg);
 
     /* Test clasp */
@@ -5688,16 +5807,17 @@ mjit::Compiler::iterEnd()
     frame.pop();
 
     stubcc.rejoin(Changes(1));
 }
 
 void
 mjit::Compiler::jsop_eleminc(JSOp op, VoidStub stub)
 {
+    REJOIN_SITE_ANY();
     prepareStubCall(Uses(2));
     INLINE_STUBCALL(stub);
     frame.popn(2);
     pushSyncedEntry(0);
 }
 
 void
 mjit::Compiler::jsop_getgname_slow(uint32 index)
@@ -5705,31 +5825,35 @@ mjit::Compiler::jsop_getgname_slow(uint3
     prepareStubCall(Uses(0));
     INLINE_STUBCALL(stubs::GetGlobalName);
     frame.pushSynced(JSVAL_TYPE_UNKNOWN);
 }
 
 void
 mjit::Compiler::jsop_bindgname()
 {
+    REJOIN_SITE(stubs::BindGlobalName);
+
     if (script->compileAndGo && globalObj) {
         frame.push(ObjectValue(*globalObj));
         return;
     }
 
     /* :TODO: this is slower than it needs to be. */
     prepareStubCall(Uses(0));
     INLINE_STUBCALL(stubs::BindGlobalName);
     frame.takeReg(Registers::ReturnReg);
     frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
 }
 
 void
 mjit::Compiler::jsop_getgname(uint32 index, JSValueType type)
 {
+    REJOIN_SITE_2(ic::GetGlobalName, stubs::GetGlobalName);
+
     /* Optimize undefined, NaN and Infinity. */
     JSAtom *atom = script->getAtom(index);
     if (atom == cx->runtime->atomState.typeAtoms[JSTYPE_VOID]) {
         frame.push(UndefinedValue());
         return;
     }
     if (atom == cx->runtime->atomState.NaNAtom) {
         frame.push(cx->runtime->NaNValue);
@@ -5739,20 +5863,16 @@ mjit::Compiler::jsop_getgname(uint32 ind
         frame.push(cx->runtime->positiveInfinityValue);
         return;
     }
 
     /* Optimize singletons like Math for JSOP_CALLPROP. */
     JSObject *obj = pushedSingleton(0);
     if (obj && testSingletonProperty(globalObj, ATOM_TO_JSID(atom))) {
         frame.push(ObjectValue(*obj));
-        if (recompiling) {
-            OOL_STUBCALL(ic::GetGlobalName);
-            stubcc.rejoin(Changes(1));
-        }
         return;
     }
 
 #if defined JS_MONOIC
     jsop_bindgname();
 
     FrameEntry *fe = frame.peek(-1);
     JS_ASSERT(fe->isTypeKnown() && fe->getKnownType() == JSVAL_TYPE_OBJECT);
@@ -5829,17 +5949,17 @@ mjit::Compiler::jsop_getgname(uint32 ind
 void
 mjit::Compiler::jsop_callgname_epilogue()
 {
     /*
      * This slow path does the same thing as the interpreter.
      */
     if (!script->compileAndGo) {
         prepareStubCall(Uses(1));
-        INLINE_STUBCALL(stubs::PushImplicitThisForGlobal);
+        INLINE_STUBCALL_NO_REJOIN(stubs::PushImplicitThisForGlobal);
         frame.pushSynced(JSVAL_TYPE_UNKNOWN);
         return;
     }
 
     /* Fast path for known-not-an-object callee. */
     FrameEntry *fval = frame.peek(-1);
     if (fval->isNotType(JSVAL_TYPE_OBJECT)) {
         frame.push(UndefinedValue());
@@ -5848,17 +5968,17 @@ mjit::Compiler::jsop_callgname_epilogue(
 
     /* Paths for known object callee. */
     if (fval->isConstant()) {
         JSObject *obj = &fval->getValue().toObject();
         if (obj->getParent() == globalObj) {
             frame.push(UndefinedValue());
         } else {
             prepareStubCall(Uses(1));
-            INLINE_STUBCALL(stubs::PushImplicitThisForGlobal);
+            INLINE_STUBCALL_NO_REJOIN(stubs::PushImplicitThisForGlobal);
             frame.pushSynced(JSVAL_TYPE_UNKNOWN);
         }
         return;
     }
 
     /*
      * Optimized version. This inlines the common case, calling a
      * (non-proxied) function that has the same global as the current
@@ -5891,17 +6011,17 @@ mjit::Compiler::jsop_callgname_epilogue(
      */
     masm.loadPtr(Address(objReg, offsetof(JSObject, parent)), objReg);
     Jump globalMismatch = masm.branchPtr(Assembler::NotEqual, objReg, ImmPtr(globalObj));
     stubcc.linkExit(globalMismatch, Uses(1));
     frame.freeReg(objReg);
 
     /* OOL stub call path. */
     stubcc.leave();
-    OOL_STUBCALL(stubs::PushImplicitThisForGlobal);
+    OOL_STUBCALL_NO_REJOIN(stubs::PushImplicitThisForGlobal);
 
     /* Fast path. */
     if (isNotObj.isSet())
         isNotObj.getJump().linkTo(masm.label(), &masm);
     frame.pushUntypedValue(UndefinedValue());
 
     stubcc.rejoin(Changes(1));
 }
@@ -5917,16 +6037,21 @@ mjit::Compiler::jsop_setgname_slow(JSAto
         INLINE_STUBCALL(STRICT_VARIANT(stubs::SetGlobalNameNoCache));
     frame.popn(2);
     pushSyncedEntry(0);
 }
 
 void
 mjit::Compiler::jsop_setgname(JSAtom *atom, bool usePropertyCache)
 {
+    REJOIN_SITE_2(ic::SetGlobalName,
+                  usePropertyCache
+                  ? STRICT_VARIANT(stubs::SetGlobalName)
+                  : STRICT_VARIANT(stubs::SetGlobalNameNoCache));
+
     if (monitored(PC)) {
         /* Global accesses are monitored only for a few names like __proto__. */
         jsop_setgname_slow(atom, usePropertyCache);
         return;
     }
 
 #if defined JS_MONOIC
     FrameEntry *objFe = frame.peek(-2);
@@ -6006,47 +6131,40 @@ mjit::Compiler::jsop_setgname(JSAtom *at
 
 void
 mjit::Compiler::jsop_setelem_slow()
 {
     prepareStubCall(Uses(3));
     INLINE_STUBCALL(STRICT_VARIANT(stubs::SetElem));
     frame.popn(3);
     frame.pushSynced(JSVAL_TYPE_UNKNOWN);
-
-    if (recompiling) {
-        OOL_STUBCALL(STRICT_VARIANT(ic::SetElement));
-        stubcc.rejoin(Changes(2));
-    }
 }
 
 void
 mjit::Compiler::jsop_getelem_slow()
 {
     prepareStubCall(Uses(2));
     INLINE_STUBCALL(stubs::GetElem);
     frame.popn(2);
     pushSyncedEntry(0);
-
-    if (recompiling) {
-        OOL_STUBCALL(ic::GetElement);
-        stubcc.rejoin(Changes(1));
-    }
 }
 
 void
 mjit::Compiler::jsop_unbrand()
 {
+    REJOIN_SITE(stubs::Unbrand);
     prepareStubCall(Uses(1));
     INLINE_STUBCALL(stubs::Unbrand);
 }
 
 bool
 mjit::Compiler::jsop_instanceof()
 {
+    REJOIN_SITE_ANY();
+
     FrameEntry *lhs = frame.peek(-2);
     FrameEntry *rhs = frame.peek(-1);
 
     // The fast path applies only when both operands are objects.
     if (rhs->isNotType(JSVAL_TYPE_OBJECT) || lhs->isNotType(JSVAL_TYPE_OBJECT)) {
         stubcc.linkExit(masm.jump(), Uses(2));
         frame.discardFe(lhs);
         frame.discardFe(rhs);
@@ -6140,16 +6258,17 @@ mjit::Compiler::emitEval(uint32 argc)
     INLINE_STUBCALL(stubs::Eval);
     frame.popn(argc + 2);
     pushSyncedEntry(0);
 }
 
 void
 mjit::Compiler::jsop_arguments()
 {
+    REJOIN_SITE(stubs::Arguments);
     prepareStubCall(Uses(0));
     INLINE_STUBCALL(stubs::Arguments);
 }
 
 bool
 mjit::Compiler::jsop_newinit()
 {
     bool isArray;
@@ -6180,20 +6299,20 @@ mjit::Compiler::jsop_newinit()
         type = script->getTypeInitObject(cx, PC, isArray);
         if (!type)
             return false;
     }
     masm.storePtr(ImmPtr(type), FrameAddress(offsetof(VMFrame, scratch)));
 
     if (isArray) {
         masm.move(Imm32(count), Registers::ArgReg1);
-        INLINE_STUBCALL(stubs::NewInitArray);
+        INLINE_STUBCALL_NO_REJOIN(stubs::NewInitArray);
     } else {
         masm.move(ImmPtr(baseobj), Registers::ArgReg1);
-        INLINE_STUBCALL(stubs::NewInitObject);
+        INLINE_STUBCALL_NO_REJOIN(stubs::NewInitObject);
     }
     frame.takeReg(Registers::ReturnReg);
     frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
 
     frame.extra(frame.peek(-1)).initArray = (*PC == JSOP_NEWARRAY);
     frame.extra(frame.peek(-1)).initObject = baseobj;
 
     return true;
@@ -6255,39 +6374,44 @@ mjit::Compiler::finishLoop(jsbytecode *h
         frame.dumpAllocation(alloc);
     }
 #endif
 
     Vector<Jump> hoistJumps(cx);
 
     loop->entryJump().linkTo(masm.label(), &masm);
 
-    if (!loop->checkHoistedBounds(entryTarget, masm, &hoistJumps))
-        return false;
-    for (unsigned i = 0; i < hoistJumps.length(); i++)
-        stubcc.linkExitDirect(hoistJumps[i], stubcc.masm.label());
-    OOL_STUBCALL(stubs::MissedBoundsCheckEntry);
-    stubcc.crossJump(stubcc.masm.jump(), masm.label());
-    hoistJumps.clear();
+        if (!loop->checkHoistedBounds(entryTarget, masm, &hoistJumps))
+            return false;
+        for (unsigned i = 0; i < hoistJumps.length(); i++)
+            stubcc.linkExitDirect(hoistJumps[i], stubcc.masm.label());
+
+    {
+        REJOIN_SITE(stubs::MissedBoundsCheckEntry);
+        OOL_STUBCALL(stubs::MissedBoundsCheckEntry);
+        stubcc.crossJump(stubcc.masm.jump(), masm.label());
+        hoistJumps.clear();
+    }
 
     frame.prepareForJump(entryTarget, masm, true);
 
     if (!jumpInScript(masm.jump(), entryTarget))
         return false;
 
     if (!a->analysis.getCode(head).safePoint) {
         /*
          * Emit a stub into the OOL path which loads registers from a synced state
          * and jumps to the loop head, for rejoining from the interpreter.
          */
         LoopEntry entry;
         entry.pcOffset = head - script->code;
         entry.label = stubcc.masm.label();
         loopEntries.append(entry);
 
+        REJOIN_SITE(stubs::MissedBoundsCheckHead);
         if (!loop->checkHoistedBounds(head, stubcc.masm, &hoistJumps))
             return false;
         Jump skipCall = stubcc.masm.jump();
         for (unsigned i = 0; i < hoistJumps.length(); i++)
             hoistJumps[i].linkTo(stubcc.masm.label(), &stubcc.masm);
         OOL_STUBCALL(stubs::MissedBoundsCheckHead);
         skipCall.linkTo(stubcc.masm.label(), &stubcc.masm);
         hoistJumps.clear();
@@ -6500,47 +6624,48 @@ mjit::Compiler::enterBlock(JSObject *obj
         masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
         interruptCheckHelper();
     }
 
     /* For now, don't bother doing anything for this opcode. */
     frame.syncAndForgetEverything();
     masm.move(ImmPtr(obj), Registers::ArgReg1);
     uint32 n = js_GetEnterBlockStackDefs(cx, script, PC);
-    INLINE_STUBCALL(stubs::EnterBlock);
+    INLINE_STUBCALL_NO_REJOIN(stubs::EnterBlock);
     frame.enterBlock(n);
 }
 
 void
 mjit::Compiler::leaveBlock()
 {
     /*
      * Note: After bug 535912, we can pass the block obj directly, inline
      * PutBlockObject, and do away with the muckiness in PutBlockObject.
      */
     uint32 n = js_GetVariableStackUses(JSOP_LEAVEBLOCK, PC);
     JSObject *obj = script->getObject(fullAtomIndex(PC + UINT16_LEN));
     prepareStubCall(Uses(n));
     masm.move(ImmPtr(obj), Registers::ArgReg1);
-    INLINE_STUBCALL(stubs::LeaveBlock);
+    INLINE_STUBCALL_NO_REJOIN(stubs::LeaveBlock);
     frame.leaveBlock(n);
 }
 
 // Creates the new object expected for constructors, and places it in |thisv|.
 // It is broken down into the following operations:
 //   CALLEE
 //   GETPROP "prototype"
 //   IFPRIMTOP:
 //       NULL
 //   call js_CreateThisFromFunctionWithProto(...)
 //
 bool
 mjit::Compiler::constructThis()
 {
     JS_ASSERT(isConstructing);
+    REJOIN_SITE(stubs::CreateThis);
 
     // Load the callee.
     frame.pushCallee();
 
     // Get callee.prototype.
     if (!jsop_getprop(cx->runtime->atomState.classPrototypeAtom, JSVAL_TYPE_UNKNOWN, false, false))
         return false;
 
@@ -6597,17 +6722,17 @@ mjit::Compiler::jsop_tableswitch(jsbytec
     }
 
     FrameEntry *fe = frame.peek(-1);
     if (fe->isNotType(JSVAL_TYPE_INT32) || numJumps > 256) {
         frame.syncAndForgetEverything();
         masm.move(ImmPtr(originalPC), Registers::ArgReg1);
 
         /* prepareStubCall() is not needed due to forgetEverything() */
-        INLINE_STUBCALL(stubs::TableSwitch);
+        INLINE_STUBCALL_NO_REJOIN(stubs::TableSwitch);
         frame.pop();
         masm.jump(Registers::ReturnReg);
         return true;
     }
 
     RegisterID dataReg;
     if (fe->isConstant()) {
         JS_ASSERT(fe->isType(JSVAL_TYPE_INT32));
@@ -6642,37 +6767,32 @@ mjit::Compiler::jsop_tableswitch(jsbytec
     Jump defaultCase = masm.branch32(Assembler::AboveOrEqual, dataReg, Imm32(numJumps));
     BaseIndex jumpTarget(reg, dataReg, Assembler::ScalePtr);
     masm.jump(jumpTarget);
 
     if (notInt.isSet()) {
         stubcc.linkExitDirect(notInt.get(), stubcc.masm.label());
         stubcc.leave();
         stubcc.masm.move(ImmPtr(originalPC), Registers::ArgReg1);
-        OOL_STUBCALL(stubs::TableSwitch);
+        OOL_STUBCALL_NO_REJOIN(stubs::TableSwitch);
         stubcc.masm.jump(Registers::ReturnReg);
     }
     frame.pop();
     return jumpAndTrace(defaultCase, originalPC + defaultTarget);
 #endif
 }
 
 void
 mjit::Compiler::jsop_callelem_slow()
 {
     prepareStubCall(Uses(2));
     INLINE_STUBCALL(stubs::CallElem);
     frame.popn(2);
     pushSyncedEntry(0);
     pushSyncedEntry(1);
-
-    if (recompiling) {
-        OOL_STUBCALL(ic::CallElement);
-        stubcc.rejoin(Changes(2));
-    }
 }
 
 void
 mjit::Compiler::jsop_forprop(JSAtom *atom)
 {
     // Before: ITER OBJ
     // After:  ITER OBJ ITER
     frame.dupAt(-2);
@@ -6733,60 +6853,81 @@ mjit::Compiler::jsop_forgname(JSAtom *at
 
 /*
  * For any locals or args which we know to be integers but are treated as
  * doubles by the type inference, convert to double.  These will be assumed to be
  * doubles at control flow join points.  This function must be called before branching
  * to another opcode.
  */
 
+/*
+ * Whether to ensure that locals/args known to be ints or doubles should be
+ * preserved as doubles across control flow edges.
+ */
+
+inline bool
+mjit::Compiler::preserveLocalType(unsigned i)
+{
+    return !a->analysis.localEscapes(i);
+}
+
+inline bool
+mjit::Compiler::preserveArgType(unsigned i)
+{
+    /*
+     * Don't preserve double arguments in inline calls across branches, as we
+     * can't mutate them when inlining. :XXX: could be more precise here.
+     */
+    return !a->analysis.argEscapes(i) && !a->parent;
+}
+
 void
-mjit::Compiler::fixDoubleTypes(Uses uses)
+mjit::Compiler::fixDoubleTypes()
 {
     if (!cx->typeInferenceEnabled())
         return;
 
-    for (uint32 i = 0; script->fun && i < script->fun->nargs; i++) {
+    for (uint32 i = 0; !a->parent && script->fun && i < script->fun->nargs; i++) {
         JSValueType type = knownArgumentType(i);
-        if (type == JSVAL_TYPE_DOUBLE && !a->analysis.argEscapes(i)) {
+        if (type == JSVAL_TYPE_DOUBLE && preserveArgType(i)) {
             FrameEntry *fe = frame.getArg(i);
             if (!fe->isType(JSVAL_TYPE_DOUBLE))
                 frame.ensureDouble(fe);
         }
     }
 
     for (uint32 i = 0; i < script->nfixed; i++) {
         JSValueType type = knownLocalType(i);
-        if (type == JSVAL_TYPE_DOUBLE && !a->analysis.localEscapes(i)) {
+        if (type == JSVAL_TYPE_DOUBLE && preserveLocalType(i)) {
             FrameEntry *fe = frame.getLocal(i);
             if (!fe->isType(JSVAL_TYPE_DOUBLE))
                 frame.ensureDouble(fe);
         }
     }
 }
 
 void
 mjit::Compiler::restoreAnalysisTypes(uint32 stackDepth)
 {
     if (!cx->typeInferenceEnabled())
         return;
 
     /* Restore known types of locals/args, for join points or after forgetting everything. */
     for (uint32 i = 0; i < script->nfixed; i++) {
         JSValueType type = knownLocalType(i);
-        if (type != JSVAL_TYPE_UNKNOWN && (type != JSVAL_TYPE_DOUBLE || !a->analysis.localEscapes(i))) {
+        if (type != JSVAL_TYPE_UNKNOWN && (type != JSVAL_TYPE_DOUBLE || preserveLocalType(i))) {
             FrameEntry *fe = frame.getLocal(i);
             JS_ASSERT_IF(fe->isTypeKnown(), fe->isType(type));
             if (!fe->isTypeKnown())
                 frame.learnType(fe, type, false);
         }
     }
     for (uint32 i = 0; script->fun && i < script->fun->nargs; i++) {
         JSValueType type = knownArgumentType(i);
-        if (type != JSVAL_TYPE_UNKNOWN && (type != JSVAL_TYPE_DOUBLE || !a->analysis.argEscapes(i))) {
+        if (type != JSVAL_TYPE_UNKNOWN && (type != JSVAL_TYPE_DOUBLE || preserveArgType(i))) {
             FrameEntry *fe = frame.getArg(i);
             JS_ASSERT_IF(fe->isTypeKnown(), fe->isType(type));
             if (!fe->isTypeKnown())
                 frame.learnType(fe, type, false);
         }
     }
 }
 
--- a/js/src/methodjit/Compiler.h
+++ b/js/src/methodjit/Compiler.h
@@ -314,33 +314,98 @@ class Compiler : public BaseCompiler
         Defs(uint32 ndefs)
           : ndefs(ndefs)
         { }
         uint32 ndefs;
     };
 
     struct InternalCallSite {
         uint32 returnOffset;
-        DataLabelPtr callPatch;
         DataLabelPtr inlinePatch;
         uint32 inlineIndex;
         jsbytecode *inlinepc;
         size_t id;
-        bool call;
         bool ool;
 
+        // An AutoRejoinSite needs to capture this call site.
+        bool needsRejoin;
+
         InternalCallSite(uint32 returnOffset,
                          uint32 inlineIndex, jsbytecode *inlinepc, size_t id,
-                         bool call, bool ool)
+                         bool ool, bool needsRejoin)
           : returnOffset(returnOffset),
             inlineIndex(inlineIndex), inlinepc(inlinepc), id(id),
-            call(call), ool(ool)
+            ool(ool), needsRejoin(needsRejoin)
+        { }
+    };
+
+    struct InternalRejoinSite {
+        Label label;
+        jsbytecode *pc;
+        size_t id;
+
+        InternalRejoinSite(Label label, jsbytecode *pc, size_t id)
+            : label(label), pc(pc), id(id)
         { }
     };
 
+    struct AutoRejoinSite {
+        Compiler *cc;
+        jsbytecode *pc;
+
+        bool ool;
+        Label oolLabel;
+
+        // number of call/rejoin sites when this AutoRejoinSite was created.
+        uint32 startSites;
+        uint32 rejoinSites;
+
+        void *stub1;
+        void *stub2;
+        void *stub3;
+
+        AutoRejoinSite(Compiler *cc, void *stub1, void *stub2 = NULL, void *stub3 = NULL)
+            : cc(cc), pc(cc->PC), ool(false),
+              startSites(cc->callSites.length()),
+              rejoinSites(cc->rejoinSites.length()),
+              stub1(stub1), stub2(stub2), stub3(stub3)
+        {}
+
+        /*
+         * Rejoin a particular slow path label in a synced state, rather than
+         * the current point of the fast path when the AutoRejoinSite finishes.
+         */
+        void oolRejoin(Label label)
+        {
+            ool = true;
+            oolLabel = label;
+        }
+
+        ~AutoRejoinSite()
+        {
+            if (cc->a != cc->outer)
+                return;
+#ifdef DEBUG
+            JS_ASSERT(pc == cc->PC);
+            cc->checkRejoinSite(startSites, rejoinSites, stub1);
+            if (stub2)
+                cc->checkRejoinSite(startSites, rejoinSites, stub2);
+            if (stub3)
+                cc->checkRejoinSite(startSites, rejoinSites, stub3);
+#endif
+            if (cc->needRejoins(pc)) {
+                cc->addRejoinSite(stub1, ool, oolLabel);
+                if (stub2)
+                    cc->addRejoinSite(stub2, ool, oolLabel);
+                if (stub3)
+                    cc->addRejoinSite(stub3, ool, oolLabel);
+            }
+        }
+    };
+
     struct DoublePatch {
         double d;
         DataLabelPtr label;
         bool ool;
     };
 
     struct JumpTable {
         DataLabelPtr label;
@@ -401,16 +466,17 @@ class Compiler : public BaseCompiler
         ActiveFrame(JSContext *cx);
         ~ActiveFrame();
     };
     ActiveFrame *a;
     ActiveFrame *outer;
 
     JSScript *script;
     jsbytecode *PC;
+    bool variadicRejoin;  /* There is a variadic rejoin for PC. */
 
     LoopState *loop;
 
     /* State spanning all stack frames. */
 
     js::Vector<ActiveFrame*, 4, CompilerAllocPolicy> inlineFrames;
     js::Vector<BranchPatch, 64, CompilerAllocPolicy> branchPatches;
 #if defined JS_MONOIC
@@ -422,58 +488,82 @@ class Compiler : public BaseCompiler
 #endif
 #if defined JS_POLYIC
     js::Vector<PICGenInfo, 16, CompilerAllocPolicy> pics;
     js::Vector<GetElementICInfo, 16, CompilerAllocPolicy> getElemICs;
     js::Vector<SetElementICInfo, 16, CompilerAllocPolicy> setElemICs;
 #endif
     js::Vector<CallPatchInfo, 64, CompilerAllocPolicy> callPatches;
     js::Vector<InternalCallSite, 64, CompilerAllocPolicy> callSites;
+    js::Vector<InternalRejoinSite, 64, CompilerAllocPolicy> rejoinSites;
     js::Vector<DoublePatch, 16, CompilerAllocPolicy> doubleList;
     js::Vector<JumpTable, 16> jumpTables;
     js::Vector<uint32, 16> jumpTableOffsets;
     js::Vector<LoopEntry, 16> loopEntries;
     StubCompiler stubcc;
     Label invokeLabel;
     Label arityLabel;
 #ifdef JS_MONOIC
     Label argsCheckStub;
     Label argsCheckFallthrough;
     Jump argsCheckJump;
 #endif
     bool debugMode_;
     bool addTraceHints;
-    bool recompiling;
     bool inlining;
     bool oomInVector;       // True if we have OOM'd appending to a vector. 
     enum { NoApplyTricks, LazyArgsObj } applyTricks;
 
     Compiler *thisFromCtor() { return this; }
 
     friend class CompilerAllocPolicy;
   public:
     // Special atom index used to indicate that the atom is 'length'. This
     // follows interpreter usage in JSOP_LENGTH.
     enum { LengthAtomIndex = uint32(-2) };
 
     Compiler(JSContext *cx, JSScript *outerScript, bool isConstructing,
-             const Vector<PatchableFrame> *patchFrames, bool recompiling);
+             const Vector<PatchableFrame> *patchFrames);
     ~Compiler();
 
     CompileStatus compile();
 
     Label getLabel() { return masm.label(); }
     bool knownJump(jsbytecode *pc);
     Label labelOf(jsbytecode *target, uint32 inlineIndex);
     void addCallSite(const InternalCallSite &callSite);
-    void addReturnSite(Label joinPoint, bool ool = false);
+    void addReturnSite();
+    void inlineStubCall(void *stub, bool needsRejoin);
     bool loadOldTraps(const Vector<CallSite> &site);
 
     bool debugMode() { return debugMode_; }
 
+#ifdef DEBUG
+    void checkRejoinSite(uint32 nCallSites, uint32 nRejoinSites, void *stub);
+#endif
+    void addRejoinSite(void *stub, bool ool, Label oolLabel);
+
+    bool needRejoins(jsbytecode *pc)
+    {
+        // We'll never rejoin into an inlined frame.
+        if (a != outer)
+            return false;
+
+        // We need all rejoin points if we might expand an inline frame.
+        if (outerScript->inlineParents)
+            return true;
+
+        // Otherwise, only add rejoin points where there are active frames on stack.
+        for (unsigned i = 0; patchFrames && i < patchFrames->length(); i++) {
+            if ((*patchFrames)[i].pc == pc)
+                return true;
+        }
+        return false;
+    }
+
     jsbytecode *outerPC() {
         if (a == outer)
             return PC;
         ActiveFrame *scan = a;
         while (scan && scan->parent != outer)
             scan = scan->parent;
         return scan->parentPC;
     }
@@ -493,17 +583,19 @@ class Compiler : public BaseCompiler
     CompileStatus generateEpilogue();
     CompileStatus finishThisUp(JITScript **jitp);
     CompileStatus pushActiveFrame(JSScript *script, uint32 argc);
     void popActiveFrame();
     void generateInlinePrologue();
 
     /* Analysis helpers. */
     CompileStatus prepareInferenceTypes(JSScript *script, ActiveFrame *a);
-    void fixDoubleTypes(Uses uses);
+    inline bool preserveLocalType(unsigned i);
+    inline bool preserveArgType(unsigned i);
+    void fixDoubleTypes();
     void restoreAnalysisTypes(uint32 stackDepth);
     JSValueType knownThisType();
     JSValueType knownArgumentType(uint32 arg);
     JSValueType knownLocalType(uint32 local);
     JSValueType knownPushedType(uint32 pushed);
     bool arrayPrototypeHasIndexedProperty();
     bool mayPushUndefined(uint32 pushed);
     types::TypeSet *argTypeSet(uint32 arg);
@@ -516,17 +608,17 @@ class Compiler : public BaseCompiler
     /* Non-emitting helpers. */
     void pushSyncedEntry(uint32 pushed);
     uint32 fullAtomIndex(jsbytecode *pc);
     bool jumpInScript(Jump j, jsbytecode *pc);
     bool compareTwoValues(JSContext *cx, JSOp op, const Value &lhs, const Value &rhs);
     bool canUseApplyTricks();
 
     /* Emitting helpers. */
-    bool emitStubCmpOp(BoolStub stub, jsbytecode *target, JSOp fused);
+    bool emitStubCmpOp(BoolStub stub, AutoRejoinSite &rejoin, jsbytecode *target, JSOp fused);
     bool iter(uintN flags);
     void iterNext();
     bool iterMore();
     void iterEnd();
     MaybeJump loadDouble(FrameEntry *fe, FPRegisterID *fpReg, bool *allocated);
 #ifdef JS_POLYIC
     void passICAddress(BaseICInfo *ic);
 #endif
@@ -617,21 +709,20 @@ class Compiler : public BaseCompiler
     void jsop_binary_double(FrameEntry *lhs, FrameEntry *rhs, JSOp op, VoidStub stub,
                             JSValueType type);
     void slowLoadConstantDouble(Assembler &masm, FrameEntry *fe,
                                 FPRegisterID fpreg);
     void maybeJumpIfNotInt32(Assembler &masm, MaybeJump &mj, FrameEntry *fe,
                              MaybeRegisterID &mreg);
     void maybeJumpIfNotDouble(Assembler &masm, MaybeJump &mj, FrameEntry *fe,
                               MaybeRegisterID &mreg);
-    bool jsop_relational(JSOp op, BoolStub stub, jsbytecode *target, JSOp fused);
-    bool jsop_relational_self(JSOp op, BoolStub stub, jsbytecode *target, JSOp fused);
-    bool jsop_relational_full(JSOp op, BoolStub stub, jsbytecode *target, JSOp fused);
-    bool jsop_relational_double(JSOp op, BoolStub stub, jsbytecode *target, JSOp fused);
-    bool jsop_relational_int(JSOp op, jsbytecode *target, JSOp fused);
+    bool jsop_relational(JSOp op, BoolStub stub, AutoRejoinSite &rejoin, jsbytecode *target, JSOp fused);
+    bool jsop_relational_full(JSOp op, BoolStub stub, AutoRejoinSite &rejoin, jsbytecode *target, JSOp fused);
+    bool jsop_relational_double(JSOp op, BoolStub stub, AutoRejoinSite &rejoin, jsbytecode *target, JSOp fused);
+    bool jsop_relational_int(JSOp op, AutoRejoinSite &rejoin, jsbytecode *target, JSOp fused);
 
     void emitLeftDoublePath(FrameEntry *lhs, FrameEntry *rhs, FrameState::BinaryAlloc &regs,
                             MaybeJump &lhsNotDouble, MaybeJump &rhsNotNumber,
                             MaybeJump &lhsUnknownDone);
     void emitRightDoublePath(FrameEntry *lhs, FrameEntry *rhs, FrameState::BinaryAlloc &regs,
                              MaybeJump &rhsNotNumber2);
     bool tryBinaryConstantFold(JSContext *cx, FrameState &frame, JSOp op,
                                FrameEntry *lhs, FrameEntry *rhs, Value *vp);
@@ -653,18 +744,18 @@ class Compiler : public BaseCompiler
     void jsop_initprop();
     void jsop_initelem();
     void jsop_setelem_dense();
     bool jsop_setelem(bool popGuaranteed);
     bool jsop_getelem(bool isCall);
     void jsop_getelem_dense(bool isPacked);
     bool isCacheableBaseAndIndex(FrameEntry *obj, FrameEntry *id);
     void jsop_stricteq(JSOp op);
-    bool jsop_equality(JSOp op, BoolStub stub, jsbytecode *target, JSOp fused);
-    bool jsop_equality_int_string(JSOp op, BoolStub stub, jsbytecode *target, JSOp fused);
+    bool jsop_equality(JSOp op, BoolStub stub, AutoRejoinSite &autoRejoin, jsbytecode *target, JSOp fused);
+    bool jsop_equality_int_string(JSOp op, BoolStub stub, AutoRejoinSite &autoRejoin, jsbytecode *target, JSOp fused);
     void jsop_pos();
 
     static inline Assembler::Condition
     GetCompareCondition(JSOp op, JSOp fused)
     {
         bool ifeq = fused == JSOP_IFEQ;
         switch (op) {
           case JSOP_GT:
@@ -680,16 +771,22 @@ class Compiler : public BaseCompiler
           case JSOP_NE:
             return ifeq ? Assembler::Equal : Assembler::NotEqual;
           default:
             JS_NOT_REACHED("unrecognized op");
             return Assembler::Equal;
         }
     }
 
+    static inline Assembler::Condition
+    GetStubCompareCondition(JSOp fused)
+    {
+        return (fused == JSOP_IFEQ) ? Assembler::Zero : Assembler::NonZero;
+    }
+
     /* Fast builtins. */
     JSObject *pushedSingleton(unsigned pushed);
     CompileStatus callArrayBuiltin(uint32 argc, bool callingNew);
     CompileStatus inlineNativeFunction(uint32 argc, bool callingNew);
     CompileStatus inlineScriptedFunction(uint32 argc, bool callingNew);
     CompileStatus compileMathAbsInt(FrameEntry *arg);
     CompileStatus compileMathAbsDouble(FrameEntry *arg);
     CompileStatus compileMathSqrt(FrameEntry *arg);
@@ -703,33 +800,54 @@ class Compiler : public BaseCompiler
 
     void prepareStubCall(Uses uses);
     Call emitStubCall(void *ptr, DataLabelPtr *pinline);
 };
 
 // Given a stub call, emits the call into the inline assembly path. If
 // debug mode is on, adds the appropriate instrumentation for recompilation.
 #define INLINE_STUBCALL(stub)                                               \
-    do {                                                                    \
-        void *nstub = JS_FUNC_TO_DATA_PTR(void *, (stub));                  \
-        DataLabelPtr inlinePatch;                                           \
-        Call cl = emitStubCall(nstub, &inlinePatch);                        \
-        InternalCallSite site(masm.callReturnOffset(cl), a->inlineIndex, PC, (size_t)nstub, \
-                              true, false);                                 \
-        site.inlinePatch = inlinePatch;                                     \
-        addCallSite(site);                                                  \
-    } while (0)                                                             \
+    inlineStubCall(JS_FUNC_TO_DATA_PTR(void *, (stub)), true)
+
+// Same as INLINE_STUBCALL, but cannot trigger recompilation.
+#define INLINE_STUBCALL_NO_REJOIN(stub)                                     \
+    inlineStubCall(JS_FUNC_TO_DATA_PTR(void *, (stub)), false)
 
 // Given a stub call, emits the call into the out-of-line assembly path. If
 // debug mode is on, adds the appropriate instrumentation for recompilation.
 // Unlike the INLINE_STUBCALL variant, this returns the Call offset.
 #define OOL_STUBCALL(stub)                                                  \
-    stubcc.emitStubCall(JS_FUNC_TO_DATA_PTR(void *, (stub)))
+    stubcc.emitStubCall(JS_FUNC_TO_DATA_PTR(void *, (stub)), true)
 
 // Same as OOL_STUBCALL, but specifies a slot depth.
 #define OOL_STUBCALL_LOCAL_SLOTS(stub, slots)                               \
-    stubcc.emitStubCall(JS_FUNC_TO_DATA_PTR(void *, (stub)), (slots))       \
+    stubcc.emitStubCall(JS_FUNC_TO_DATA_PTR(void *, (stub)), true, (slots))
+
+// Same as OOL_STUBCALL, but cannot trigger recompilation.
+#define OOL_STUBCALL_NO_REJOIN(stub)                                        \
+    stubcc.emitStubCall(JS_FUNC_TO_DATA_PTR(void *, (stub)), false)
+
+// Define rejoin sites at a PC. For every stub or scripted call emitted, there
+// must be a rejoin site which captures it. These are scope based, so the
+// rejoin site must be declared before the stub call and finish its scope after
+// the call has been emitted. If it is emitted, the rejoin site will rejoin
+// the inline code once the scope is finished.
+
+#define REJOIN_SITE(stub)                                                   \
+    AutoRejoinSite autoRejoin(this, JS_FUNC_TO_DATA_PTR(void *, (stub)))
+
+#define REJOIN_SITE_2(stub1, stub2)                                         \
+    AutoRejoinSite autoRejoin(this, JS_FUNC_TO_DATA_PTR(void *, (stub1)),   \
+                              JS_FUNC_TO_DATA_PTR(void *, (stub2)))
+
+#define REJOIN_SITE_3(stub1, stub2, stub3)                                  \
+    AutoRejoinSite autoRejoin(this, JS_FUNC_TO_DATA_PTR(void *, (stub1)),   \
+                              JS_FUNC_TO_DATA_PTR(void *, (stub2)),         \
+                              JS_FUNC_TO_DATA_PTR(void *, (stub3)))
+
+#define REJOIN_SITE_ANY()                                                   \
+    AutoRejoinSite autoRejoin(this, (void *) RejoinSite::VARIADIC_ID)
 
 } /* namespace js */
 } /* namespace mjit */
 
 #endif
 
--- a/js/src/methodjit/FastArithmetic.cpp
+++ b/js/src/methodjit/FastArithmetic.cpp
@@ -186,16 +186,18 @@ mjit::Compiler::maybeJumpIfNotDouble(Ass
     } else if (fe->getKnownType() != JSVAL_TYPE_DOUBLE) {
         mj.setJump(masm.jump());
     }
 }
 
 bool
 mjit::Compiler::jsop_binary(JSOp op, VoidStub stub, JSValueType type, types::TypeSet *typeSet)
 {
+    REJOIN_SITE(stub);
+
     FrameEntry *rhs = frame.peek(-1);
     FrameEntry *lhs = frame.peek(-2);
 
     Value v;
     if (tryBinaryConstantFold(cx, frame, op, lhs, rhs, &v)) {
         if (!v.isInt32() && typeSet && !typeSet->hasType(types::TYPE_DOUBLE)) {
             /*
              * OK to ignore failure here, we aren't performing the operation
@@ -796,21 +798,16 @@ mjit::Compiler::jsop_neg()
         masm.negateDouble(res);
 
         if (!fe->isType(JSVAL_TYPE_DOUBLE))
             frame.freeReg(fpreg);
 
         frame.pop();
         frame.pushDouble(res);
 
-        if (recompiling) {
-            OOL_STUBCALL(stubs::Neg);
-            stubcc.rejoin(Changes(1));
-        }
-
         return;
     }
 
     /* Inline integer path for known integers. */
     if (fe->isType(JSVAL_TYPE_INT32) && type == JSVAL_TYPE_INT32) {
         RegisterID reg = frame.copyDataIntoReg(fe);
 
         /* Test for 0 and -2147483648 (both result in a double). */
@@ -903,16 +900,18 @@ mjit::Compiler::jsop_neg()
         stubcc.crossJump(jmpIntRejoin.getJump(), masm.label());
 
     stubcc.rejoin(Changes(1));
 }
 
 bool
 mjit::Compiler::jsop_mod()
 {
+    REJOIN_SITE_ANY();
+
 #if defined(JS_CPU_X86)
     JSValueType type = knownPushedType(0);
 
     FrameEntry *lhs = frame.peek(-2);
     FrameEntry *rhs = frame.peek(-1);
 
     Value v;
     if (tryBinaryConstantFold(cx, frame, JSOP_MOD, lhs, rhs, &v)) {
@@ -931,21 +930,16 @@ mjit::Compiler::jsop_mod()
         (rhs->isTypeKnown() && rhs->getKnownType() != JSVAL_TYPE_INT32) ||
         (type != JSVAL_TYPE_INT32 && type != JSVAL_TYPE_UNKNOWN))
 #endif
     {
         prepareStubCall(Uses(2));
         INLINE_STUBCALL(stubs::Mod);
         frame.popn(2);
         frame.pushSynced(knownPushedType(0));
-
-        if (recompiling) {
-            OOL_STUBCALL(stubs::NegZeroHelper);
-            stubcc.rejoin(Changes(1));
-        }
         return true;
     }
 
 #if defined(JS_CPU_X86)
     if (!lhs->isTypeKnown()) {
         Jump j = frame.testInt32(Assembler::NotEqual, lhs);
         stubcc.linkExit(j, Uses(2));
     }
@@ -1064,17 +1058,18 @@ mjit::Compiler::jsop_mod()
         stubcc.rejoin(Changes(1));
     }
 #endif
 
     return true;
 }
 
 bool
-mjit::Compiler::jsop_equality_int_string(JSOp op, BoolStub stub, jsbytecode *target, JSOp fused)
+mjit::Compiler::jsop_equality_int_string(JSOp op, BoolStub stub, AutoRejoinSite &autoRejoin,
+                                         jsbytecode *target, JSOp fused)
 {
     FrameEntry *rhs = frame.peek(-1);
     FrameEntry *lhs = frame.peek(-2);
 
     /* Swap the LHS and RHS if it makes register allocation better... or possible. */
     if (lhs->isConstant() ||
         (frame.shouldAvoidDataRemat(lhs) && !rhs->isConstant())) {
         FrameEntry *temp = rhs;
@@ -1113,17 +1108,17 @@ mjit::Compiler::jsop_equality_int_string
         ValueRemat lvr, rvr;
         frame.pinEntry(lhs, lvr);
         frame.pinEntry(rhs, rvr);
 
         /*
          * Sync everything except the top two entries.
          * We will handle the lhs/rhs in the stub call path.
          */
-        fixDoubleTypes(Uses(2));
+        fixDoubleTypes();
         frame.syncAndKill(Registers(Registers::AvailRegs), Uses(frame.frameSlots()), Uses(2));
 
         RegisterID tempReg = frame.allocReg();
 
         JaegerSpew(JSpew_Insns, " ---- BEGIN STUB CALL CODE ---- \n");
 
         RESERVE_OOL_SPACE(stubcc.masm);
 
@@ -1166,21 +1161,19 @@ mjit::Compiler::jsop_equality_int_string
 
         if (needStub)
             OOL_STUBCALL_LOCAL_SLOTS(stub, frame.totalDepth() + 2);
 
         /*
          * The stub call has no need to rejoin, since state is synced.
          * Instead, we can just test the return value.
          */
-        Assembler::Condition ncond = (fused == JSOP_IFEQ)
-                                   ? Assembler::Zero
-                                   : Assembler::NonZero;
-        Jump stubBranch =
-            stubcc.masm.branchTest32(ncond, Registers::ReturnReg, Registers::ReturnReg);
+        autoRejoin.oolRejoin(stubcc.masm.label());
+        Jump stubBranch = stubcc.masm.branchTest32(GetStubCompareCondition(fused),
+                                                   Registers::ReturnReg, Registers::ReturnReg);
         Jump stubFallthrough = stubcc.masm.jump();
 
         JaegerSpew(JSpew_Insns, " ---- END STUB CALL CODE ---- \n");
         CHECK_OOL_SPACE();
 
         Jump fast;
         MaybeJump firstStubJump;
 
@@ -1384,23 +1377,23 @@ DoubleCondForOp(JSOp op, JSOp fused)
                : Assembler::DoubleLessThanOrEqual;
       default:
         JS_NOT_REACHED("unrecognized op");
         return Assembler::DoubleLessThan;
     }
 }
 
 bool
-mjit::Compiler::jsop_relational_double(JSOp op, BoolStub stub, jsbytecode *target, JSOp fused)
+mjit::Compiler::jsop_relational_double(JSOp op, BoolStub stub, AutoRejoinSite &autoRejoin, jsbytecode *target, JSOp fused)
 {
     FrameEntry *rhs = frame.peek(-1);
     FrameEntry *lhs = frame.peek(-2);
 
     if (target)
-        fixDoubleTypes(Uses(2));
+        fixDoubleTypes();
 
     JS_ASSERT_IF(!target, fused != JSOP_IFEQ);
 
     FPRegisterID fpLeft, fpRight;
     bool allocateLeft, allocateRight;
 
     MaybeJump lhsNotNumber = loadDouble(lhs, &fpLeft, &allocateLeft);
     if (!allocateLeft)
@@ -1415,25 +1408,29 @@ mjit::Compiler::jsop_relational_double(J
     if (target) {
         if (lhsNotNumber.isSet())
             stubcc.linkExitForBranch(lhsNotNumber.get());
         if (rhsNotNumber.isSet())
             stubcc.linkExitForBranch(rhsNotNumber.get());
         stubcc.leave();
         OOL_STUBCALL(stub);
 
-        frame.syncAndForgetEverything();
+        frame.syncAndKillEverything();
         Jump j = masm.branchDouble(dblCond, fpLeft, fpRight);
 
+        if (allocateLeft)
+            frame.freeReg(fpLeft);
+        if (allocateRight)
+            frame.freeReg(fpRight);
+
         frame.popn(2);
 
-        Assembler::Condition cond = (fused == JSOP_IFEQ)
-                                    ? Assembler::Zero
-                                    : Assembler::NonZero;
-        Jump sj = stubcc.masm.branchTest32(cond, Registers::ReturnReg, Registers::ReturnReg);
+        autoRejoin.oolRejoin(stubcc.masm.label());
+        Jump sj = stubcc.masm.branchTest32(GetStubCompareCondition(fused),
+                                           Registers::ReturnReg, Registers::ReturnReg);
 
         /* Rejoin from the slow path. */
         stubcc.rejoin(Changes(0));
 
         /*
          * NB: jumpAndTrace emits to the OOL path, so make sure not to use it
          * in the middle of an in-progress slow path.
          */
@@ -1466,17 +1463,17 @@ mjit::Compiler::jsop_relational_double(J
         if (allocateRight)
             frame.freeReg(fpRight);
     }
 
     return true;
 }
 
 bool
-mjit::Compiler::jsop_relational_int(JSOp op, jsbytecode *target, JSOp fused)
+mjit::Compiler::jsop_relational_int(JSOp op, AutoRejoinSite &autoRejoin, jsbytecode *target, JSOp fused)
 {
     FrameEntry *rhs = frame.peek(-1);
     FrameEntry *lhs = frame.peek(-2);
 
     /* Reverse N cmp A comparisons.  The left side must be in a register. */
     if (lhs->isConstant()) {
         JS_ASSERT(!rhs->isConstant());
         FrameEntry *tmp = lhs;
@@ -1484,32 +1481,37 @@ mjit::Compiler::jsop_relational_int(JSOp
         rhs = tmp;
         op = analyze::ReverseCompareOp(op);
     }
 
     JS_ASSERT_IF(!target, fused != JSOP_IFEQ);
     Assembler::Condition cond = GetCompareCondition(op, fused);
 
     if (target) {
-        fixDoubleTypes(Uses(2));
+        fixDoubleTypes();
         if (!frame.syncForBranch(target, Uses(2)))
             return false;
 
         RegisterID lreg = frame.tempRegForData(lhs);
         Jump fast;
         if (rhs->isConstant()) {
             fast = masm.branch32(cond, lreg, Imm32(rhs->getValue().toInt32()));
         } else {
             frame.pinReg(lreg);
             RegisterID rreg = frame.tempRegForData(rhs);
             frame.unpinReg(lreg);
             fast = masm.branch32(cond, lreg, rreg);
         }
         frame.popn(2);
-        return jumpAndTrace(fast, target);
+
+        autoRejoin.oolRejoin(stubcc.masm.label());
+        Jump sj = stubcc.masm.branchTest32(GetStubCompareCondition(fused),
+                                           Registers::ReturnReg, Registers::ReturnReg);
+
+        return jumpAndTrace(fast, target, &sj);
     } else {
         RegisterID result = frame.allocReg();
         RegisterID lreg = frame.tempRegForData(lhs);
 
         if (rhs->isConstant()) {
             masm.branchValue(cond, lreg, rhs->getValue().toInt32(), result);
         } else {
             frame.pinReg(lreg);
@@ -1520,39 +1522,27 @@ mjit::Compiler::jsop_relational_int(JSOp
 
         frame.popn(2);
         frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, result);
     }
 
     return true;
 }
 
-bool
-mjit::Compiler::jsop_relational_self(JSOp op, BoolStub stub, jsbytecode *target, JSOp fused)
-{
-#ifdef DEBUG
-    FrameEntry *rhs = frame.peek(-1);
-    FrameEntry *lhs = frame.peek(-2);
-
-    JS_ASSERT(frame.haveSameBacking(lhs, rhs));
-#endif
-
-    /* :TODO: optimize this?  */
-    return emitStubCmpOp(stub, target, fused);
-}
-
 /* See jsop_binary_full() for more information on how this works. */
 bool
-mjit::Compiler::jsop_relational_full(JSOp op, BoolStub stub, jsbytecode *target, JSOp fused)
+mjit::Compiler::jsop_relational_full(JSOp op, BoolStub stub, AutoRejoinSite &rejoin, jsbytecode *target, JSOp fused)
 {
+    AutoRejoinSite autoRejoin(this, JS_FUNC_TO_DATA_PTR(void *, stub));
+
     FrameEntry *rhs = frame.peek(-1);
     FrameEntry *lhs = frame.peek(-2);
 
     if (target)
-        fixDoubleTypes(Uses(2));
+        fixDoubleTypes();
 
     /* Allocate all registers up-front. */
     FrameState::BinaryAlloc regs;
     frame.allocForBinary(lhs, rhs, op, regs, !target);
 
     MaybeJump lhsNotDouble, rhsNotNumber, lhsUnknownDone;
     if (!lhs->isTypeKnown())
         emitLeftDoublePath(lhs, rhs, regs, lhsNotDouble, rhsNotNumber, lhsUnknownDone);
@@ -1617,49 +1607,50 @@ mjit::Compiler::jsop_relational_full(JSO
             /*
              * For fusions, spill the tracker state. xmm* remain intact. Note
              * that frame.sync() must be used directly, to avoid syncExit()'s
              * jumping logic.
              */
             frame.sync(stubcc.masm, Uses(frame.frameSlots()));
             stubcc.leave();
             OOL_STUBCALL(stub);
+            autoRejoin.oolRejoin(stubcc.masm.label());
         }
 
         /* Forget the world, preserving data. */
         frame.pinReg(cmpReg);
         if (reg.isSet())
             frame.pinReg(reg.reg());
         
         frame.popn(2);
 
         frame.syncAndKillEverything();
         frame.unpinKilledReg(cmpReg);
         if (reg.isSet())
             frame.unpinKilledReg(reg.reg());
-        frame.syncAndForgetEverything();
-        
+        frame.freeReg(regs.lhsFP);
+        frame.freeReg(regs.rhsFP);
+
         /* Operands could have been reordered, so use cmpOp. */
         Assembler::Condition i32Cond = GetCompareCondition(cmpOp, fused);
 
         /* Emit the i32 path. */
         Jump fast;
         if (reg.isSet())
             fast = masm.branch32(i32Cond, cmpReg, reg.reg());
         else
             fast = masm.branch32(i32Cond, cmpReg, Imm32(value));
 
         /*
          * The stub call has no need to rejoin since state is synced. Instead,
          * we can just test the return value.
          */
-        Assembler::Condition cond = (fused == JSOP_IFEQ)
-                                    ? Assembler::Zero
-                                    : Assembler::NonZero;
-        Jump j = stubcc.masm.branchTest32(cond, Registers::ReturnReg, Registers::ReturnReg);
+        autoRejoin.oolRejoin(stubcc.masm.label());
+        Jump j = stubcc.masm.branchTest32(GetStubCompareCondition(fused),
+                                          Registers::ReturnReg, Registers::ReturnReg);
 
         /* Rejoin from the slow path. */
         Jump j2 = stubcc.masm.jump();
         stubcc.crossJump(j2, masm.label());
 
         /* :TODO: make double path invoke tracer. */
         if (hasDoublePath) {
             j.linkTo(stubcc.masm.label(), &stubcc.masm);
--- a/js/src/methodjit/FastOps.cpp
+++ b/js/src/methodjit/FastOps.cpp
@@ -120,16 +120,18 @@ mjit::Compiler::ensureInteger(FrameEntry
 
         frame.learnType(fe, JSVAL_TYPE_INT32, dataReg);
     }
 }
 
 void
 mjit::Compiler::jsop_bitnot()
 {
+    REJOIN_SITE_ANY();
+
     FrameEntry *top = frame.peek(-1);
 
     /* We only want to handle integers here. */
     if (top->isNotType(JSVAL_TYPE_INT32) && top->isNotType(JSVAL_TYPE_DOUBLE)) {
         prepareStubCall(Uses(1));
         INLINE_STUBCALL(stubs::BitNot);
         frame.pop();
         frame.pushSynced(JSVAL_TYPE_INT32);
@@ -147,16 +149,18 @@ mjit::Compiler::jsop_bitnot()
     frame.pushTypedPayload(JSVAL_TYPE_INT32, reg);
 
     stubcc.rejoin(Changes(1));
 }
 
 void
 mjit::Compiler::jsop_bitop(JSOp op)
 {
+    REJOIN_SITE_ANY();
+
     FrameEntry *rhs = frame.peek(-1);
     FrameEntry *lhs = frame.peek(-2);
 
     /* The operands we ensure are integers cannot be copied by each other. */
     frame.separateBinaryEntries(lhs, rhs);
 
     VoidStub stub;
     switch (op) {
@@ -394,67 +398,73 @@ CheckNullOrUndefined(FrameEntry *fe)
 {
     if (!fe->isTypeKnown())
         return false;
     JSValueType type = fe->getKnownType();
     return type == JSVAL_TYPE_NULL || type == JSVAL_TYPE_UNDEFINED;
 }
 
 bool
-mjit::Compiler::jsop_equality(JSOp op, BoolStub stub, jsbytecode *target, JSOp fused)
+mjit::Compiler::jsop_equality(JSOp op, BoolStub stub, AutoRejoinSite &autoRejoin, jsbytecode *target, JSOp fused)
 {
     FrameEntry *rhs = frame.peek(-1);
     FrameEntry *lhs = frame.peek(-2);
 
     /* The compiler should have handled constant folding. */
     JS_ASSERT(!(rhs->isConstant() && lhs->isConstant()));
 
     bool lhsTest;
     if ((lhsTest = CheckNullOrUndefined(lhs)) || CheckNullOrUndefined(rhs)) {
         /* What's the other mask? */
         FrameEntry *test = lhsTest ? rhs : lhs;
 
         if (test->isTypeKnown())
-            return emitStubCmpOp(stub, target, fused);
+            return emitStubCmpOp(stub, autoRejoin, target, fused);
 
         /* The other side must be null or undefined. */
         RegisterID reg = frame.ownRegForType(test);
         frame.pop();
         frame.pop();
 
         /*
          * :FIXME: Easier test for undefined || null?
          * Maybe put them next to each other, subtract, do a single compare?
          */
 
         if (target) {
-            frame.syncAndForgetEverything();
+            fixDoubleTypes();
+            frame.syncAndKillEverything();
+            frame.freeReg(reg);
+
+            autoRejoin.oolRejoin(stubcc.masm.label());
+            Jump sj = stubcc.masm.branchTest32(GetStubCompareCondition(fused),
+                                               Registers::ReturnReg, Registers::ReturnReg);
 
             if ((op == JSOP_EQ && fused == JSOP_IFNE) ||
                 (op == JSOP_NE && fused == JSOP_IFEQ)) {
                 /*
                  * It would be easier to just have two jumpAndTrace calls here, but since
                  * each jumpAndTrace creates a TRACE IC, and since we want the bytecode
                  * to have a reference to the TRACE IC at the top of the loop, it's much
                  * better to have only one TRACE IC per loop, and hence at most one
                  * jumpAndTrace.
                  */
                 Jump b1 = masm.branchPtr(Assembler::Equal, reg, ImmType(JSVAL_TYPE_UNDEFINED));
                 Jump b2 = masm.branchPtr(Assembler::Equal, reg, ImmType(JSVAL_TYPE_NULL));
                 Jump j1 = masm.jump();
                 b1.linkTo(masm.label(), &masm);
                 b2.linkTo(masm.label(), &masm);
                 Jump j2 = masm.jump();
-                if (!jumpAndTrace(j2, target))
+                if (!jumpAndTrace(j2, target, &sj))
                     return false;
                 j1.linkTo(masm.label(), &masm);
             } else {
                 Jump j = masm.branchPtr(Assembler::Equal, reg, ImmType(JSVAL_TYPE_UNDEFINED));
                 Jump j2 = masm.branchPtr(Assembler::NotEqual, reg, ImmType(JSVAL_TYPE_NULL));
-                if (!jumpAndTrace(j2, target))
+                if (!jumpAndTrace(j2, target, &sj))
                     return false;
                 j.linkTo(masm.label(), &masm);
             }
         } else {
             Jump j = masm.branchPtr(Assembler::Equal, reg, ImmType(JSVAL_TYPE_UNDEFINED));
             Jump j2 = masm.branchPtr(Assembler::Equal, reg, ImmType(JSVAL_TYPE_NULL));
             masm.move(Imm32(op == JSOP_NE), reg);
             Jump j3 = masm.jump();
@@ -483,93 +493,99 @@ mjit::Compiler::jsop_equality(JSOp op, B
 
         if (lhsKind != types::OBJECT_UNKNOWN && rhsKind != types::OBJECT_UNKNOWN) {
             /* :TODO: Merge with jsop_relational_int? */
             JS_ASSERT_IF(!target, fused != JSOP_IFEQ);
             frame.forgetConstantData(lhs);
             frame.forgetConstantData(rhs);
             Assembler::Condition cond = GetCompareCondition(op, fused);
             if (target) {
-                fixDoubleTypes(Uses(2));
+                fixDoubleTypes();
+                autoRejoin.oolRejoin(stubcc.masm.label());
+                Jump sj = stubcc.masm.branchTest32(GetStubCompareCondition(fused),
+                                                   Registers::ReturnReg, Registers::ReturnReg);
                 if (!frame.syncForBranch(target, Uses(2)))
                     return false;
                 RegisterID lreg = frame.tempRegForData(lhs);
                 frame.pinReg(lreg);
                 RegisterID rreg = frame.tempRegForData(rhs);
                 frame.unpinReg(lreg);
                 Jump fast = masm.branchPtr(cond, lreg, rreg);
                 frame.popn(2);
-                return jumpAndTrace(fast, target);
+                return jumpAndTrace(fast, target, &sj);
             } else {
                 RegisterID result = frame.allocReg();
                 RegisterID lreg = frame.tempRegForData(lhs);
                 frame.pinReg(lreg);
                 RegisterID rreg = frame.tempRegForData(rhs);
                 frame.unpinReg(lreg);
                 masm.branchValue(cond, lreg, rreg, result);
 
                 frame.popn(2);
                 frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, result);
                 return true;
             }
         }
     }
 
-    return emitStubCmpOp(stub, target, fused);
+    return emitStubCmpOp(stub, autoRejoin, target, fused);
 }
 
 bool
-mjit::Compiler::jsop_relational(JSOp op, BoolStub stub, jsbytecode *target, JSOp fused)
+mjit::Compiler::jsop_relational(JSOp op, BoolStub stub, AutoRejoinSite &autoRejoin,
+                                jsbytecode *target, JSOp fused)
 {
     FrameEntry *rhs = frame.peek(-1);
     FrameEntry *lhs = frame.peek(-2);
 
     /* The compiler should have handled constant folding. */
     JS_ASSERT(!(rhs->isConstant() && lhs->isConstant()));
 
     /* Always slow path... */
     if ((lhs->isNotType(JSVAL_TYPE_INT32) && lhs->isNotType(JSVAL_TYPE_DOUBLE) &&
          lhs->isNotType(JSVAL_TYPE_STRING)) ||
         (rhs->isNotType(JSVAL_TYPE_INT32) && rhs->isNotType(JSVAL_TYPE_DOUBLE) &&
          rhs->isNotType(JSVAL_TYPE_STRING))) {
         if (op == JSOP_EQ || op == JSOP_NE)
-            return jsop_equality(op, stub, target, fused);
-        return emitStubCmpOp(stub, target, fused);
+            return jsop_equality(op, stub, autoRejoin, target, fused);
+        return emitStubCmpOp(stub, autoRejoin, target, fused);
     }
 
     if (op == JSOP_EQ || op == JSOP_NE) {
         if ((lhs->isNotType(JSVAL_TYPE_INT32) && lhs->isNotType(JSVAL_TYPE_STRING)) ||
             (rhs->isNotType(JSVAL_TYPE_INT32) && rhs->isNotType(JSVAL_TYPE_STRING))) {
-            return emitStubCmpOp(stub, target, fused);
+            return emitStubCmpOp(stub, autoRejoin, target, fused);
         } else if (!target && (lhs->isType(JSVAL_TYPE_STRING) || rhs->isType(JSVAL_TYPE_STRING))) {
-            return emitStubCmpOp(stub, target, fused);
+            return emitStubCmpOp(stub, autoRejoin, target, fused);
         } else if (frame.haveSameBacking(lhs, rhs)) {
-            return emitStubCmpOp(stub, target, fused);
+            return emitStubCmpOp(stub, autoRejoin, target, fused);
         } else {
-            return jsop_equality_int_string(op, stub, target, fused);
+            return jsop_equality_int_string(op, stub, autoRejoin, target, fused);
         }
     }
 
     if (frame.haveSameBacking(lhs, rhs)) {
-        return jsop_relational_self(op, stub, target, fused);
+        return emitStubCmpOp(stub, autoRejoin, target, fused);
     } else if (lhs->isType(JSVAL_TYPE_STRING) || rhs->isType(JSVAL_TYPE_STRING)) {
-        return emitStubCmpOp(stub, target, fused);
+        return emitStubCmpOp(stub, autoRejoin, target, fused);
     } else if (lhs->isType(JSVAL_TYPE_DOUBLE) || rhs->isType(JSVAL_TYPE_DOUBLE)) {
-        return jsop_relational_double(op, stub, target, fused);
+        return jsop_relational_double(op, stub, autoRejoin, target, fused);
     } else if (cx->typeInferenceEnabled() &&
                lhs->isType(JSVAL_TYPE_INT32) && rhs->isType(JSVAL_TYPE_INT32)) {
-        return jsop_relational_int(op, target, fused);
+        return jsop_relational_int(op, autoRejoin, target, fused);
     } else {
-        return jsop_relational_full(op, stub, target, fused);
+        return jsop_relational_full(op, stub, autoRejoin, target, fused);
     }
 }
 
 void
 mjit::Compiler::jsop_not()
 {
+    REJOIN_SITE_ANY();
+
     FrameEntry *top = frame.peek(-1);
 
     if (top->isConstant()) {
         const Value &v = top->getValue();
         frame.pop();
         frame.push(BooleanValue(!js_ValueToBoolean(v)));
         return;
     }
@@ -763,17 +779,17 @@ mjit::Compiler::jsop_typeof()
                 frame.pop();
                 frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, result);
                 return;
             }
         }
     }
 
     prepareStubCall(Uses(1));
-    INLINE_STUBCALL(stubs::TypeOf);
+    INLINE_STUBCALL_NO_REJOIN(stubs::TypeOf);
     frame.pop();
     frame.takeReg(Registers::ReturnReg);
     frame.pushTypedPayload(JSVAL_TYPE_STRING, Registers::ReturnReg);
 }
 
 bool
 mjit::Compiler::booleanJumpScript(JSOp op, jsbytecode *target)
 {
@@ -864,17 +880,16 @@ mjit::Compiler::jsop_ifneq(JSOp op, jsby
     if (fe->isConstant()) {
         JSBool b = js_ValueToBoolean(fe->getValue());
 
         frame.pop();
 
         if (op == JSOP_IFEQ)
             b = !b;
         if (b) {
-            fixDoubleTypes(Uses(0));
             if (!frame.syncForBranch(target, Uses(0)))
                 return false;
             if (!jumpAndTrace(masm.jump(), target))
                 return false;
         } else {
             if (target < PC && !finishLoop(target))
                 return Compile_Error;
         }
@@ -890,17 +905,17 @@ mjit::Compiler::jsop_andor(JSOp op, jsby
     FrameEntry *fe = frame.peek(-1);
 
     if (fe->isConstant()) {
         JSBool b = js_ValueToBoolean(fe->getValue());
         
         /* Short-circuit. */
         if ((op == JSOP_OR && b == JS_TRUE) ||
             (op == JSOP_AND && b == JS_FALSE)) {
-            fixDoubleTypes(Uses(0));
+            fixDoubleTypes();
             if (!frame.syncForBranch(target, Uses(0)))
                 return false;
             if (!jumpAndTrace(masm.jump(), target))
                 return false;
         }
 
         frame.pop();
         return true;
@@ -1173,26 +1188,23 @@ mjit::Compiler::jsop_setelem_dense()
 
     stubcc.leave();
     OOL_STUBCALL(STRICT_VARIANT(stubs::SetElem));
 
     if (!hoisted)
         frame.freeReg(slotsReg);
     frame.shimmy(2);
     stubcc.rejoin(Changes(2));
-
-    if (recompiling) {
-        OOL_STUBCALL(STRICT_VARIANT(ic::SetElement));
-        stubcc.rejoin(Changes(2));
-    }
 }
 
 bool
 mjit::Compiler::jsop_setelem(bool popGuaranteed)
 {
+    REJOIN_SITE_2(STRICT_VARIANT(ic::SetElement), STRICT_VARIANT(stubs::SetElem));
+
     FrameEntry *obj = frame.peek(-3);
     FrameEntry *id = frame.peek(-2);
     FrameEntry *value = frame.peek(-1);
 
     if (!IsCacheableSetElem(obj, id, value) || monitored(PC)) {
         jsop_setelem_slow();
         return true;
     }
@@ -1359,21 +1371,16 @@ mjit::Compiler::jsop_setelem(bool popGua
 
     frame.freeReg(ic.objReg);
     frame.shimmy(2);
     stubcc.rejoin(Changes(2));
 
 #if defined JS_POLYIC
     if (!setElemICs.append(ic))
         return false;
-
-    if (recompiling) {
-        OOL_STUBCALL(STRICT_VARIANT(stubs::SetElem));
-        stubcc.rejoin(Changes(2));
-    }
 #endif
 
     return true;
 }
 
 static inline bool
 IsCacheableGetElem(FrameEntry *obj, FrameEntry *id)
 {
@@ -1512,26 +1519,24 @@ mjit::Compiler::jsop_getelem_dense(bool 
             stubcc.linkExitDirect(holeCheck, stubcc.masm.label());
         JS_ASSERT(type == JSVAL_TYPE_UNKNOWN || type == JSVAL_TYPE_UNDEFINED);
         if (type == JSVAL_TYPE_UNDEFINED)
             stubcc.masm.loadValuePayload(UndefinedValue(), dataReg);
         else
             stubcc.masm.loadValueAsComponents(UndefinedValue(), typeReg.reg(), dataReg);
         stubcc.linkRejoin(stubcc.masm.jump());
     }
-
-    if (recompiling) {
-        OOL_STUBCALL(ic::GetElement);
-        stubcc.rejoin(Changes(1));
-    }
 }
 
 bool
 mjit::Compiler::jsop_getelem(bool isCall)
 {
+    REJOIN_SITE_2(isCall ? ic::CallElement : ic::GetElement,
+                  isCall ? stubs::CallElem : stubs::GetElem);
+
     FrameEntry *obj = frame.peek(-2);
     FrameEntry *id = frame.peek(-1);
 
     if (!IsCacheableGetElem(obj, id)) {
         if (isCall)
             jsop_callelem_slow();
         else
             jsop_getelem_slow();
@@ -1674,21 +1679,16 @@ mjit::Compiler::jsop_getelem(bool isCall
     if (isCall)
         frame.pushSynced(knownPushedType(1));
 
     stubcc.rejoin(Changes(isCall ? 2 : 1));
 
 #ifdef JS_POLYIC
     if (!getElemICs.append(ic))
         return false;
-
-    if (recompiling) {
-        OOL_STUBCALL(isCall ? stubs::CallElem : stubs::GetElem);
-        stubcc.rejoin(Changes(isCall ? 2 : 1));
-    }
 #endif
 
     return true;
 }
 
 static inline bool
 ReallySimpleStrictTest(FrameEntry *fe)
 {
@@ -1858,19 +1858,19 @@ mjit::Compiler::jsop_stricteq(JSOp op)
     }
 
     /* Is it impossible that both Values are ints? */
     if ((lhs->isTypeKnown() && lhs->isNotType(JSVAL_TYPE_INT32)) ||
         (rhs->isTypeKnown() && rhs->isNotType(JSVAL_TYPE_INT32))) {
         prepareStubCall(Uses(2));
 
         if (op == JSOP_STRICTEQ)
-            INLINE_STUBCALL(stubs::StrictEq);
+            INLINE_STUBCALL_NO_REJOIN(stubs::StrictEq);
         else
-            INLINE_STUBCALL(stubs::StrictNe);
+            INLINE_STUBCALL_NO_REJOIN(stubs::StrictNe);
 
         frame.popn(2);
         frame.pushSynced(JSVAL_TYPE_BOOLEAN);
         return;
     }
 
 #ifndef JS_CPU_ARM
     /* Try an integer fast-path. */
@@ -1912,44 +1912,46 @@ mjit::Compiler::jsop_stricteq(JSOp op)
         masm.set32(cond, testReg, otherReg, resultReg);
     }
 
     frame.unpinReg(testReg);
 
     if (needStub) {
         stubcc.leave();
         if (op == JSOP_STRICTEQ)
-            OOL_STUBCALL(stubs::StrictEq);
+            OOL_STUBCALL_NO_REJOIN(stubs::StrictEq);
         else
-            OOL_STUBCALL(stubs::StrictNe);
+            OOL_STUBCALL_NO_REJOIN(stubs::StrictNe);
     }
 
     frame.popn(2);
     frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, resultReg);
 
     if (needStub)
         stubcc.rejoin(Changes(1));
 #else
     /* TODO: Port set32() logic to ARM. */
     prepareStubCall(Uses(2));
 
     if (op == JSOP_STRICTEQ)
-        INLINE_STUBCALL(stubs::StrictEq);
+        INLINE_STUBCALL_NO_REJOIN(stubs::StrictEq);
     else
-        INLINE_STUBCALL(stubs::StrictNe);
+        INLINE_STUBCALL_NO_REJOIN(stubs::StrictNe);
 
     frame.popn(2);
     frame.pushSyncedType(JSVAL_TYPE_BOOLEAN);
     return;
 #endif
 }
 
 void
 mjit::Compiler::jsop_pos()
 {
+    REJOIN_SITE(stubs::Pos);
+
     FrameEntry *top = frame.peek(-1);
 
     if (top->isTypeKnown()) {
         if (top->getKnownType() <= JSVAL_TYPE_INT32)
             return;
         prepareStubCall(Uses(1));
         INLINE_STUBCALL(stubs::Pos);
         frame.pop();
@@ -1970,32 +1972,36 @@ mjit::Compiler::jsop_pos()
     OOL_STUBCALL(stubs::Pos);
 
     stubcc.rejoin(Changes(1));
 }
 
 void
 mjit::Compiler::jsop_initmethod()
 {
+    REJOIN_SITE_ANY();
+
 #ifdef DEBUG
     FrameEntry *obj = frame.peek(-2);
 #endif
     JSAtom *atom = script->getAtom(fullAtomIndex(PC));
 
     /* Initializers with INITMETHOD are not fast yet. */
     JS_ASSERT(!frame.extra(obj).initObject);
 
     prepareStubCall(Uses(2));
     masm.move(ImmPtr(atom), Registers::ArgReg1);
     INLINE_STUBCALL(stubs::InitMethod);
 }
 
 void
 mjit::Compiler::jsop_initprop()
 {
+    REJOIN_SITE_ANY();
+
     FrameEntry *obj = frame.peek(-2);
     FrameEntry *fe = frame.peek(-1);
     JSAtom *atom = script->getAtom(fullAtomIndex(PC));
 
     JSObject *baseobj = frame.extra(obj).initObject;
 
     if (!baseobj || monitored(PC)) {
         prepareStubCall(Uses(2));
@@ -2020,16 +2026,18 @@ mjit::Compiler::jsop_initprop()
     Address address = masm.objPropAddress(baseobj, objReg, shape->slot);
     frame.storeTo(fe, address);
     frame.freeReg(objReg);
 }
 
 void
 mjit::Compiler::jsop_initelem()
 {
+    REJOIN_SITE_ANY();
+
     FrameEntry *obj = frame.peek(-3);
     FrameEntry *id = frame.peek(-2);
     FrameEntry *fe = frame.peek(-1);
 
     /*
      * The initialized index is always a constant, but we won't remember which
      * constant if there are branches inside the code computing the initializer
      * expression (e.g. the expression uses the '?' operator).  Slow path those
--- a/js/src/methodjit/FrameState.cpp
+++ b/js/src/methodjit/FrameState.cpp
@@ -1714,17 +1714,17 @@ FrameState::merge(Assembler &masm, Chang
     /*
      * For any changed values we are merging back which we consider to be doubles,
      * ensure they actually are doubles.  They must be doubles or ints, but we
      * do not require stub paths to always generate a double when needed.
      * :FIXME: we check this on OOL stub calls, but not inline stub calls.
      */
     for (unsigned i = 0; i < changes.nchanges; i++) {
         FrameEntry *fe = sp - 1 - i;
-        if (fe->isType(JSVAL_TYPE_DOUBLE))
+        if (fe->isTracked() && fe->isType(JSVAL_TYPE_DOUBLE))
             masm.ensureInMemoryDouble(addressOf(fe));
     }
 
     uint32 mask = Registers::AvailAnyRegs & ~a->freeRegs.freeMask;
     Registers search(mask);
 
     while (!search.empty(mask)) {
         AnyRegisterID reg = search.peekReg(mask);
@@ -2052,16 +2052,29 @@ FrameState::ensureDouble(FrameEntry *fe)
     fe->data.setFPRegister(fpreg);
     regstate(fpreg).associate(fe, RematInfo::DATA);
 
     fe->data.unsync();
     fe->type.unsync();
 }
 
 void
+FrameState::ensureInMemoryDoubles(Assembler &masm)
+{
+    JS_ASSERT(!a->parent);
+    for (uint32 i = 0; i < a->tracker.nentries; i++) {
+        FrameEntry *fe = a->tracker[i];
+        if (!deadEntry(fe) && fe->isType(JSVAL_TYPE_DOUBLE) &&
+            !fe->isCopy() && !fe->isInvariant() && !fe->isConstant()) {
+            masm.ensureInMemoryDouble(addressOf(fe));
+        }
+    }
+}
+
+void
 FrameState::pushCopyOf(uint32 index)
 {
     FrameEntry *backing = entryFor(index);
     FrameEntry *fe = rawPush();
     fe->resetUnsynced();
     if (backing->isConstant()) {
         fe->setConstant(Jsvalify(backing->getValue()));
     } else {
--- a/js/src/methodjit/FrameState.h
+++ b/js/src/methodjit/FrameState.h
@@ -280,16 +280,22 @@ class FrameState
 
     /* Push a value which is definitely a double. */
     void pushDouble(FPRegisterID fpreg);
     void pushDouble(Address address);
 
     /* Ensure that fe is definitely a double.  It must already be either int or double. */
     void ensureDouble(FrameEntry *fe);
 
+    /*
+     * Emit code to masm ensuring that all in memory slots thought to be
+     * doubles are in fact doubles.
+     */
+    void ensureInMemoryDoubles(Assembler &masm);
+
     /* Forget that fe is definitely a double. */
     void forgetKnownDouble(FrameEntry *fe);
 
     /*
      * Pushes a known type and allocated payload onto the operation stack.
      * This must be used when the type is known, but cannot be propagated
      * because it is not known to be correct at a slow-path merge point.
      *
--- a/js/src/methodjit/MethodJIT.cpp
+++ b/js/src/methodjit/MethodJIT.cpp
@@ -769,39 +769,45 @@ js::mjit::JaegerShotAtSafePoint(JSContex
 }
 
 NativeMapEntry *
 JITScript::nmap() const
 {
     return (NativeMapEntry *)((char*)this + sizeof(JITScript));
 }
 
-char *
-JITScript::nmapSectionLimit() const
-{
-    return (char *)nmap() + sizeof(NativeMapEntry) * nNmapPairs;
-}
-
 js::mjit::InlineFrame *
 JITScript::inlineFrames() const
 {
-    return (js::mjit::InlineFrame *)nmapSectionLimit();
+    return (js::mjit::InlineFrame *)((char *)nmap() + sizeof(NativeMapEntry) * nNmapPairs);
 }
 
 js::mjit::CallSite *
 JITScript::callSites() const
 {
     return (js::mjit::CallSite *)((char *)inlineFrames() + sizeof(js::mjit::InlineFrame) * nInlineFrames);
 }
 
+js::mjit::RejoinSite *
+JITScript::rejoinSites() const
+{
+    return (js::mjit::RejoinSite *)((char *)callSites() + sizeof(js::mjit::CallSite) * nCallSites);
+}
+
+char *
+JITScript::commonSectionLimit() const
+{
+    return (char *)rejoinSites() + sizeof(js::mjit::RejoinSite) * nRejoinSites;
+}
+
 #ifdef JS_MONOIC
 ic::GetGlobalNameIC *
 JITScript::getGlobalNames() const
 {
-    return (ic::GetGlobalNameIC *)((char *)callSites() + sizeof(js::mjit::CallSite) * nCallSites);
+    return (ic::GetGlobalNameIC *) commonSectionLimit();
 }
 
 ic::SetGlobalNameIC *
 JITScript::setGlobalNames() const
 {
     return (ic::SetGlobalNameIC *)((char *)getGlobalNames() +
             sizeof(ic::GetGlobalNameIC) * nGetGlobalNames);
 }
@@ -829,17 +835,17 @@ char *
 JITScript::monoICSectionsLimit() const
 {
     return (char *)traceICs() + sizeof(ic::TraceICInfo) * nTraceICs;
 }
 #else   // JS_MONOIC
 char *
 JITScript::monoICSectionsLimit() const
 {
-    return nmapSectionsLimit();
+    return commonSectionsLimit();
 }
 #endif  // JS_MONOIC
 
 #ifdef JS_POLYIC
 ic::GetElementIC *
 JITScript::getElems() const
 {
     return (ic::GetElementIC *)monoICSectionsLimit();
--- a/js/src/methodjit/MethodJIT.h
+++ b/js/src/methodjit/MethodJIT.h
@@ -333,16 +333,17 @@ typedef void (JS_FASTCALL *VoidStubPIC)(
 typedef void (JS_FASTCALL *VoidStubGetElemIC)(VMFrame &, js::mjit::ic::GetElementIC *);
 typedef void (JS_FASTCALL *VoidStubSetElemIC)(VMFrame &f, js::mjit::ic::SetElementIC *);
 #endif
 
 namespace mjit {
 
 struct InlineFrame;
 struct CallSite;
+struct RejoinSite;
 
 struct NativeMapEntry {
     size_t          bcOff;  /* bytecode offset in script */
     void            *ncode; /* pointer to native code */
 };
 
 struct JITScript {
     typedef JSC::MacroAssemblerCodeRef CodeRef;
@@ -360,19 +361,21 @@ struct JITScript {
      * because JITScripts are common -- we only record their lengths.  We can
      * find any of the sections from the lengths because we know their order.
      * Therefore, do not change the section ordering in finishThisUp() without
      * changing nMICs() et al as well.
      */
     uint32          nNmapPairs:30;      /* The NativeMapEntrys are sorted by .bcOff.
                                            .ncode values may not be NULL. */
     bool            singleStepMode:1;   /* compiled in "single step mode" */
-    bool            rejoinPoints:1;     /* compiled with recompilation rejoin points */
+    bool            rejoinPoints:1;     /* compiled with all rejoin points for
+                                           inline frame expansions */
     uint32          nInlineFrames;
     uint32          nCallSites;
+    uint32          nRejoinSites;
 #ifdef JS_MONOIC
     uint32          nGetGlobalNames;
     uint32          nSetGlobalNames;
     uint32          nCallICs;
     uint32          nEqualityICs;
     uint32          nTraceICs;
 #endif
 #ifdef JS_POLYIC
@@ -397,16 +400,17 @@ struct JITScript {
     // Additional ExecutablePools that IC stubs were generated into.
     typedef Vector<JSC::ExecutablePool *, 0, SystemAllocPolicy> ExecPoolVector;
     ExecPoolVector execPools;
 #endif
 
     NativeMapEntry *nmap() const;
     js::mjit::InlineFrame *inlineFrames() const;
     js::mjit::CallSite *callSites() const;
+    js::mjit::RejoinSite *rejoinSites() const;
 #ifdef JS_MONOIC
     ic::GetGlobalNameIC *getGlobalNames() const;
     ic::SetGlobalNameIC *setGlobalNames() const;
     ic::CallICInfo *callICs() const;
     ic::EqualityICInfo *equalityICs() const;
     ic::TraceICInfo *traceICs() const;
 #endif
 #ifdef JS_POLYIC
@@ -433,17 +437,17 @@ struct JITScript {
     size_t mainCodeSize() { return code.m_size; } /* doesn't account for fragmentation */
 
     jsbytecode *nativeToPC(void *returnAddress, CallSite **pinline) const;
 
     void trace(JSTracer *trc);
 
   private:
     /* Helpers used to navigate the variable-length sections. */
-    char *nmapSectionLimit() const;
+    char *commonSectionLimit() const;
     char *monoICSectionsLimit() const;
     char *polyICSectionsLimit() const;
 };
 
 /*
  * Execute the given mjit code. This is a low-level call and callers must
  * provide the same guarantees as JaegerShot/CheckStackAndEnterMethodJIT.
  */
@@ -542,16 +546,42 @@ struct CallSite
         this->id = id;
     }
 
     bool isTrap() const {
         return id == MAGIC_TRAP_ID;
     }
 };
 
+struct RejoinSite
+{
+    // When doing on stack recompilation, we take a frame that made a call at
+    // some CallSite in the original JIT and redirect it to a corresponding
+    // RejoinSite in the new JIT. The rejoin sites are similar to call sites,
+    // with the exception that they do additional checking and coercions from
+    // int to double to ensure the stack types are consistent with what the new
+    // JIT expects.
+
+    // Note: we don't rejoin at sites within inline calls, such inline frames
+    // are expanded first.
+    uint32 codeOffset;
+    uint32 pcOffset;
+    size_t id;
+
+    // Identifier which can match any callsite ID in the original script for
+    // this PC. This should appear after all other rejoin sites at the PC.
+    static const size_t VARIADIC_ID = 2;
+
+    void initialize(uint32 codeOffset, uint32 pcOffset, size_t id) {
+        this->codeOffset = codeOffset;
+        this->pcOffset = pcOffset;
+        this->id = id;
+    }
+};
+
 /*
  * Re-enables a tracepoint in the method JIT. When full is true, we
  * also reset the iteration counter.
  */
 void
 ResetTraceHint(JSScript *script, jsbytecode *pc, uint16_t index, bool full);
 
 uintN
--- a/js/src/methodjit/Retcon.cpp
+++ b/js/src/methodjit/Retcon.cpp
@@ -72,56 +72,75 @@ AutoScriptRetrapper::untrap(jsbytecode *
     if (!traps.append(pc))
         return false;
     *pc = JS_GetTrapOpcode(cx, script, pc);
     return true;
 }
 
 Recompiler::PatchableAddress
 Recompiler::findPatch(JITScript *jit, void **location)
-{ 
+{
     uint8* codeStart = (uint8 *)jit->code.m_code.executableAddress();
+
     CallSite *callSites_ = jit->callSites();
     for (uint32 i = 0; i < jit->nCallSites; i++) {
         if (callSites_[i].codeOffset + codeStart == *location) {
+            JS_ASSERT(callSites_[i].inlineIndex == uint32(-1));
             PatchableAddress result;
             result.location = location;
             result.callSite = callSites_[i];
             return result;
         }
     }
 
+    RejoinSite *rejoinSites_ = jit->rejoinSites();
+    for (uint32 i = 0; i < jit->nRejoinSites; i++) {
+        const RejoinSite &rs = rejoinSites_[i];
+        if (rs.codeOffset + codeStart == *location) {
+            PatchableAddress result;
+            result.location = location;
+            result.callSite.initialize(rs.codeOffset, uint32(-1), rs.pcOffset, rs.id);
+            return result;
+        }
+    }
+
     JS_NOT_REACHED("failed to find call site");
     return PatchableAddress();
 }
 
 void *
-Recompiler::findCallSite(JITScript *jit, const CallSite &callSite)
+Recompiler::findRejoin(JITScript *jit, const CallSite &callSite)
 {
     JS_ASSERT(callSite.inlineIndex == uint32(-1));
 
-    CallSite *callSites_ = jit->callSites();
-    for (uint32 i = 0; i < jit->nCallSites; i++) {
-        CallSite &cs = callSites_[i];
-        if (cs.inlineIndex == uint32(-1) &&
-            cs.pcOffset == callSite.pcOffset && cs.id == callSite.id) {
+    RejoinSite *rejoinSites_ = jit->rejoinSites();
+    for (uint32 i = 0; i < jit->nRejoinSites; i++) {
+        RejoinSite &rs = rejoinSites_[i];
+        if (rs.pcOffset == callSite.pcOffset &&
+            (rs.id == callSite.id || rs.id == RejoinSite::VARIADIC_ID)) {
+            /*
+             * We should not catch rejoin sites for scripted calls with a
+             * variadic id, the rejoin code for these is different.
+             */
+            JS_ASSERT_IF(rs.id == RejoinSite::VARIADIC_ID,
+                         callSite.id != CallSite::NCODE_RETURN_ID);
             uint8* codeStart = (uint8 *)jit->code.m_code.executableAddress();
-            return codeStart + cs.codeOffset;
+            return codeStart + rs.codeOffset;
         }
     }
 
     /* We have no idea where to patch up to. */
     JS_NOT_REACHED("Call site vanished.");
     return NULL;
 }
 
 void
 Recompiler::applyPatch(JITScript *jit, PatchableAddress& toPatch)
 {
-    void *result = findCallSite(jit, toPatch.callSite);
+    void *result = findRejoin(jit, toPatch.callSite);
     JS_ASSERT(result);
     *toPatch.location = result;
 }
 
 Recompiler::PatchableNative
 Recompiler::stealNative(JITScript *jit, jsbytecode *pc)
 {
     /*
@@ -580,17 +599,17 @@ Recompiler::recompile(JSScript *script, 
                       Vector<PatchableAddress> &patches, Vector<CallSite> &sites,
                       Vector<PatchableNative> &natives)
 {
     JaegerSpew(JSpew_Recompile, "On stack recompilation, %u frames, %u patches, %u natives\n",
                frames.length(), patches.length(), natives.length());
 
     CompileStatus status = Compile_Retry;
     while (status == Compile_Retry) {
-        Compiler cc(cx, script, isConstructing, &frames, true);
+        Compiler cc(cx, script, isConstructing, &frames);
         if (!cc.loadOldTraps(sites))
             return false;
         status = cc.compile();
     }
     if (status != Compile_Okay)
         return false;
 
     JITScript *jit = script->getJIT(isConstructing);
--- a/js/src/methodjit/Retcon.h
+++ b/js/src/methodjit/Retcon.h
@@ -106,17 +106,17 @@ public:
     expandInlineFrames(JSContext *cx, JSStackFrame *fp, mjit::CallSite *inlined,
                        JSStackFrame *next, VMFrame *f);
 
 private:
     JSContext *cx;
     JSScript *script;
 
     static PatchableAddress findPatch(JITScript *jit, void **location);
-    static void * findCallSite(JITScript *jit, const CallSite &callSite);
+    static void * findRejoin(JITScript *jit, const CallSite &callSite);
 
     static void applyPatch(JITScript *jit, PatchableAddress& toPatch);
     PatchableNative stealNative(JITScript *jit, jsbytecode *pc);
     void patchNative(JITScript *jit, PatchableNative &native);
     bool recompile(JSScript *script, bool isConstructing,
                    Vector<PatchableFrame> &frames,
                    Vector<PatchableAddress> &patches, Vector<CallSite> &sites,
                    Vector<PatchableNative> &natives);
--- a/js/src/methodjit/StubCalls.cpp
+++ b/js/src/methodjit/StubCalls.cpp
@@ -1,9 +1,9 @@
-/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+/* -*- Mode: C++ tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
  * vim: set ts=4 sw=4 et tw=99:
  *
  * ***** BEGIN LICENSE BLOCK *****
  * Version: MPL 1.1/GPL 2.0/LGPL 2.1
  *
  * The contents of this file are subject to the Mozilla Public License Version
  * 1.1 (the "License"); you may not use this file except in compliance with
  * the License. You may obtain a copy of the License at
@@ -590,22 +590,28 @@ stubs::SetElem(VMFrame &f)
             jsuint length = obj->getDenseArrayInitializedLength();
             jsint i = JSID_TO_INT(id);
             if ((jsuint)i < length) {
                 if (obj->getDenseArrayElement(i).isMagic(JS_ARRAY_HOLE)) {
                     if (js_PrototypeHasIndexedProperties(cx, obj))
                         break;
                     if ((jsuint)i >= obj->getArrayLength() && !obj->setArrayLength(cx, i + 1))
                         THROW();
-                    *f.pc() = JSOP_SETHOLE;
+                    /*
+                     * Note: this stub is used for ENUMELEM, so watch out
+                     * before overwriting the op.
+                     */
+                    if (JSOp(*f.pc()) == JSOP_SETELEM)
+                        *f.pc() = JSOP_SETHOLE;
                 }
                 obj->setDenseArrayElement(i, rval);
                 goto end_setelem;
             } else {
-                *f.pc() = JSOP_SETHOLE;
+                if (JSOp(*f.pc()) == JSOP_SETELEM)
+                    *f.pc() = JSOP_SETHOLE;
             }
         }
     } while (0);
     if (!obj->setProperty(cx, id, &rval, strict))
         THROW();
   end_setelem:
     /* :FIXME: Moving the assigned object into the lowest stack slot
      * is a temporary hack. What we actually want is an implementation
@@ -1296,16 +1302,17 @@ stubs::Interrupt(VMFrame &f, jsbytecode 
 {
     if (!js_HandleExecutionInterrupt(f.cx))
         THROW();
 }
 
 void JS_FASTCALL
 stubs::RecompileForInline(VMFrame &f)
 {
+    ExpandInlineFrames(f.cx, true);
     Recompiler recompiler(f.cx, f.script());
     if (!recompiler.recompile())
         THROW();
 }
 
 void JS_FASTCALL
 stubs::Trap(VMFrame &f, uint32 trapTypes)
 {
@@ -2796,16 +2803,35 @@ void JS_FASTCALL
 stubs::NegZeroHelper(VMFrame &f)
 {
     if (!f.script()->typeMonitorOverflow(f.cx, f.pc()))
         THROW();
     f.regs.sp[-1].setDouble(-0.0);
 }
 
 void JS_FASTCALL
+stubs::CallPropSwap(VMFrame &f)
+{
+    /*
+     * CALLPROP operations on strings are implemented in terms of GETPROP.
+     * If we rejoin from such a GETPROP, we come here at the end of the
+     * CALLPROP to fix up the stack. Right now the stack looks like:
+     *
+     * STRING PROP
+     *
+     * We need it to be:
+     *
+     * PROP STRING
+     */
+    Value v = f.regs.sp[-1];
+    f.regs.sp[-1] = f.regs.sp[-2];
+    f.regs.sp[-2] = v;
+}
+
+void JS_FASTCALL
 stubs::CheckArgumentTypes(VMFrame &f)
 {
     JSStackFrame *fp = f.fp();
     JSFunction *fun = fp->fun();
     JSScript *script = fun->script();
     RecompilationMonitor monitor(f.cx);
 
     /* Postpone recompilations until all args have been updated. */
--- a/js/src/methodjit/StubCalls.h
+++ b/js/src/methodjit/StubCalls.h
@@ -215,16 +215,17 @@ JSBool JS_FASTCALL InstanceOf(VMFrame &f
 void JS_FASTCALL FastInstanceOf(VMFrame &f);
 void JS_FASTCALL ArgCnt(VMFrame &f);
 void JS_FASTCALL Unbrand(VMFrame &f);
 
 /* Helper for triggering recompilation should a name read produce an undefined value or -0. */
 void JS_FASTCALL UndefinedHelper(VMFrame &f);
 void JS_FASTCALL NegZeroHelper(VMFrame &f);
 
+void JS_FASTCALL CallPropSwap(VMFrame &f);
 void JS_FASTCALL CheckArgumentTypes(VMFrame &f);
 
 #ifdef DEBUG
 void JS_FASTCALL AssertArgumentTypes(VMFrame &f);
 #endif
 
 void JS_FASTCALL MissedBoundsCheckEntry(VMFrame &f);
 void JS_FASTCALL MissedBoundsCheckHead(VMFrame &f);
--- a/js/src/methodjit/StubCompiler.cpp
+++ b/js/src/methodjit/StubCompiler.cpp
@@ -165,23 +165,23 @@ StubCompiler::linkRejoin(Jump j)
 }
 
 typedef JSC::MacroAssembler::RegisterID RegisterID;
 typedef JSC::MacroAssembler::ImmPtr ImmPtr;
 typedef JSC::MacroAssembler::Imm32 Imm32;
 typedef JSC::MacroAssembler::DataLabelPtr DataLabelPtr;
 
 JSC::MacroAssembler::Call
-StubCompiler::emitStubCall(void *ptr)
+StubCompiler::emitStubCall(void *ptr, bool needsRejoin)
 {
-    return emitStubCall(ptr, frame.totalDepth());
+    return emitStubCall(ptr, needsRejoin, frame.totalDepth());
 }
 
 JSC::MacroAssembler::Call
-StubCompiler::emitStubCall(void *ptr, int32 slots)
+StubCompiler::emitStubCall(void *ptr, bool needsRejoin, int32 slots)
 {
     JaegerSpew(JSpew_Insns, " ---- BEGIN SLOW CALL CODE ---- \n");
     DataLabelPtr inlinePatch;
     Call cl = masm.fallibleVMCall(cx->typeInferenceEnabled(),
                                   ptr, cc.outerPC(), &inlinePatch, slots);
     JaegerSpew(JSpew_Insns, " ---- END SLOW CALL CODE ---- \n");
 
     /* Add a hook for restoring loop invariants if necessary. */
@@ -189,17 +189,17 @@ StubCompiler::emitStubCall(void *ptr, in
         Jump j = masm.jump();
         Label l = masm.label();
         cc.loop->addInvariantCall(j, l, true);
     }
 
     /* Add the call site for debugging and recompilation. */
     Compiler::InternalCallSite site(masm.callReturnOffset(cl),
                                     cc.inlineIndex(), cc.inlinePC(),
-                                    (size_t)ptr, true, true);
+                                    (size_t)ptr, true, needsRejoin);
     site.inlinePatch = inlinePatch;
     cc.addCallSite(site);
     return cl;
 }
 
 void
 StubCompiler::fixCrossJumps(uint8 *ncode, size_t offset, size_t total)
 {
--- a/js/src/methodjit/StubCompiler.h
+++ b/js/src/methodjit/StubCompiler.h
@@ -132,18 +132,18 @@ class StubCompiler
     void rejoin(Changes changes);
     void linkRejoin(Jump j);
 
     /* Finish all native code patching. */
     void fixCrossJumps(uint8 *ncode, size_t offset, size_t total);
     bool jumpInScript(Jump j, jsbytecode *target);
     unsigned crossJump(Jump j, Label l);
 
-    Call emitStubCall(void *ptr);
-    Call emitStubCall(void *ptr, int32 slots);
+    Call emitStubCall(void *ptr, bool needsRejoin);
+    Call emitStubCall(void *ptr, bool needsRejoin, int32 slots);
 
     void patchJoin(unsigned i, bool script, Assembler::Address address, AnyRegisterID reg);
 };
 
 } /* namepsace mjit */
 } /* namespace js */
 
 #endif /* jsstub_compiler_h__ */