Backed out changeset ac46097e312c
authorDavid Anderson <danderson@mozilla.com>
Wed, 06 Oct 2010 19:25:49 -0700
changeset 74596 66356ff98dc2d6a1d7a52049dd7125720460f13d
parent 74595 ac46097e312cf854d997e3bdc566c7c35583f940
child 74597 2bb1ec3b12a46e520748c11478424757815c1b5c
push id2
push userbsmedberg@mozilla.com
push dateFri, 19 Aug 2011 14:38:13 +0000
milestone2.0b7pre
backs outac46097e312cf854d997e3bdc566c7c35583f940
Backed out changeset ac46097e312c
js/src/methodjit/Compiler.cpp
js/src/methodjit/FastArithmetic.cpp
js/src/methodjit/FastOps.cpp
js/src/methodjit/FrameEntry.h
js/src/methodjit/FrameState-inl.h
js/src/methodjit/FrameState.cpp
js/src/methodjit/FrameState.h
js/src/methodjit/ImmutableSync.cpp
js/src/methodjit/ImmutableSync.h
js/src/methodjit/MachineRegs.h
--- a/js/src/methodjit/Compiler.cpp
+++ b/js/src/methodjit/Compiler.cpp
@@ -597,17 +597,17 @@ mjit::Compiler::generateMethod()
     mjit::AutoScriptRetrapper trapper(cx, script);
 
     for (;;) {
         JSOp op = JSOp(*PC);
 
         OpcodeStatus &opinfo = analysis[PC];
         frame.setInTryBlock(opinfo.inTryBlock);
         if (opinfo.nincoming || opinfo.trap) {
-            frame.syncAndForgetEverything(opinfo.stackDepth);
+            frame.forgetEverything(opinfo.stackDepth);
             opinfo.safePoint = true;
         }
         jumpMap[uint32(PC - script->code)] = masm.label();
 
         if (opinfo.trap) {
             if (!trapper.untrap(PC))
                 return Compile_Error;
             op = JSOp(*PC);
@@ -678,17 +678,17 @@ mjit::Compiler::generateMethod()
             frame.pop();
             emitReturn();
           }
           END_CASE(JSOP_RETURN)
 
           BEGIN_CASE(JSOP_GOTO)
           {
             /* :XXX: this isn't really necessary if we follow the branch. */
-            frame.syncAndForgetEverything();
+            frame.forgetEverything();
             Jump j = masm.jump();
             jumpAndTrace(j, PC + GET_JUMP_OFFSET(PC));
           }
           END_CASE(JSOP_GOTO)
 
           BEGIN_CASE(JSOP_IFEQ)
           BEGIN_CASE(JSOP_IFNE)
             jsop_ifneq(op, PC + GET_JUMP_OFFSET(PC));
@@ -781,17 +781,17 @@ mjit::Compiler::generateMethod()
                     if (!target) {
                         frame.push(Value(BooleanValue(result)));
                     } else {
                         if (fused == JSOP_IFEQ)
                             result = !result;
 
                         /* Branch is never taken, don't bother doing anything. */
                         if (result) {
-                            frame.syncAndForgetEverything();
+                            frame.forgetEverything();
                             Jump j = masm.jump();
                             jumpAndTrace(j, target);
                         }
                     }
                 } else {
                     emitStubCmpOp(stub, target, fused);
                 }
             } else {
@@ -1100,33 +1100,33 @@ mjit::Compiler::generateMethod()
           END_CASE(JSOP_TRUE)
 
           BEGIN_CASE(JSOP_OR)
           BEGIN_CASE(JSOP_AND)
             jsop_andor(op, PC + GET_JUMP_OFFSET(PC));
           END_CASE(JSOP_AND)
 
           BEGIN_CASE(JSOP_TABLESWITCH)
-            frame.syncAndForgetEverything();
+            frame.forgetEverything();
             masm.move(ImmPtr(PC), Registers::ArgReg1);
 
-            /* prepareStubCall() is not needed due to syncAndForgetEverything() */
+            /* prepareStubCall() is not needed due to forgetEverything() */
             stubCall(stubs::TableSwitch);
             frame.pop();
 
             masm.jump(Registers::ReturnReg);
             PC += js_GetVariableBytecodeLength(PC);
             break;
           END_CASE(JSOP_TABLESWITCH)
 
           BEGIN_CASE(JSOP_LOOKUPSWITCH)
-            frame.syncAndForgetEverything();
+            frame.forgetEverything();
             masm.move(ImmPtr(PC), Registers::ArgReg1);
 
-            /* prepareStubCall() is not needed due to syncAndForgetEverything() */
+            /* prepareStubCall() is not needed due to forgetEverything() */
             stubCall(stubs::LookupSwitch);
             frame.pop();
 
             masm.jump(Registers::ReturnReg);
             PC += js_GetVariableBytecodeLength(PC);
             break;
           END_CASE(JSOP_LOOKUPSWITCH)
 
@@ -1208,33 +1208,22 @@ mjit::Compiler::generateMethod()
           BEGIN_CASE(JSOP_GETLOCAL)
           {
             uint32 slot = GET_SLOTNO(PC);
             frame.pushLocal(slot);
           }
           END_CASE(JSOP_GETLOCAL)
 
           BEGIN_CASE(JSOP_SETLOCAL)
-          {
-            jsbytecode *next = &PC[JSOP_SETLOCAL_LENGTH];
-            bool pop = JSOp(*next) == JSOP_POP && !analysis[next].nincoming;
-            frame.storeLocal(GET_SLOTNO(PC), pop);
-            if (pop) {
+          BEGIN_CASE(JSOP_SETLOCALPOP)
+            frame.storeLocal(GET_SLOTNO(PC));
+            if (op == JSOP_SETLOCALPOP)
                 frame.pop();
-                PC += JSOP_SETLOCAL_LENGTH + JSOP_POP_LENGTH;
-                break;
-            }
-          }
           END_CASE(JSOP_SETLOCAL)
 
-          BEGIN_CASE(JSOP_SETLOCALPOP)
-            frame.storeLocal(GET_SLOTNO(PC), true);
-            frame.pop();
-          END_CASE(JSOP_SETLOCALPOP)
-
           BEGIN_CASE(JSOP_UINT16)
             frame.push(Value(Int32Value((int32_t) GET_UINT16(PC))));
           END_CASE(JSOP_UINT16)
 
           BEGIN_CASE(JSOP_NEWINIT)
           {
             jsint i = GET_INT8(PC);
             JS_ASSERT(i == JSProto_Array || i == JSProto_Object);
@@ -1367,17 +1356,17 @@ mjit::Compiler::generateMethod()
           BEGIN_CASE(JSOP_DEFFUN)
           {
             uint32 index = fullAtomIndex(PC);
             JSFunction *inner = script->getFunction(index);
 
             if (fun) {
                 JSLocalKind localKind = fun->lookupLocal(cx, inner->atom, NULL);
                 if (localKind != JSLOCAL_NONE)
-                    frame.syncAndForgetEverything();
+                    frame.forgetEverything();
             }
 
             prepareStubCall(Uses(0));
             masm.move(ImmPtr(inner), Registers::ArgReg1);
             stubCall(STRICT_VARIANT(stubs::DefFun));
           }
           END_CASE(JSOP_DEFFUN)
 
@@ -1434,17 +1423,16 @@ mjit::Compiler::generateMethod()
             stubCall(stub);
 
             frame.takeReg(Registers::ReturnReg);
             frame.pushTypedPayload(JSVAL_TYPE_OBJECT, Registers::ReturnReg);
           }
           END_CASE(JSOP_LAMBDA)
 
           BEGIN_CASE(JSOP_TRY)
-            frame.syncAndForgetEverything();
           END_CASE(JSOP_TRY)
 
           BEGIN_CASE(JSOP_GETFCSLOT)
           BEGIN_CASE(JSOP_CALLFCSLOT)
           {
             uintN index = GET_UINT16(PC);
             // JSObject *obj = &fp->argv[-2].toObject();
             RegisterID reg = frame.allocReg();
@@ -1559,17 +1547,17 @@ mjit::Compiler::generateMethod()
             // VMFrame::fp to the correct fp for the entry point. We need to copy
             // that value here to FpReg so that FpReg also has the correct sp.
             // Otherwise, we would simply be using a stale FpReg value.
             if (analysis[PC].exceptionEntry)
                 restoreFrameRegs(masm);
 
             /* For now, don't bother doing anything for this opcode. */
             JSObject *obj = script->getObject(fullAtomIndex(PC));
-            frame.syncAndForgetEverything();
+            frame.forgetEverything();
             masm.move(ImmPtr(obj), Registers::ArgReg1);
             uint32 n = js_GetEnterBlockStackDefs(cx, script, PC);
             stubCall(stubs::EnterBlock);
             frame.enterBlock(n);
           }
           END_CASE(JSOP_ENTERBLOCK)
 
           BEGIN_CASE(JSOP_LEAVEBLOCK)
@@ -1828,24 +1816,24 @@ mjit::Compiler::emitReturn()
      * However, it's an optimization to throw it away early - the tracker
      * won't be spilled on further exits or join points.
      */
     if (fun) {
         if (fun->isHeavyweight()) {
             /* There will always be a call object. */
             prepareStubCall(Uses(0));
             stubCall(stubs::PutCallObject);
-            frame.discardFrame();
+            frame.throwaway();
         } else {
             /* if (hasCallObj() || hasArgsObj()) stubs::PutActivationObjects() */
             Jump putObjs = masm.branchTest32(Assembler::NonZero,
                                              Address(JSFrameReg, JSStackFrame::offsetOfFlags()),
                                              Imm32(JSFRAME_HAS_CALL_OBJ | JSFRAME_HAS_ARGS_OBJ));
             stubcc.linkExit(putObjs, Uses(frame.frameDepth()));
-            frame.discardFrame();
+            frame.throwaway();
 
             stubcc.leave();
             stubcc.call(stubs::PutActivationObjects);
             stubcc.rejoin(Changes(0));
         }
     }
 
     /*
@@ -1921,27 +1909,27 @@ mjit::Compiler::interruptCheckHelper()
      * interrupt is on another thread.
      */
     stubcc.masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), reg);
     stubcc.masm.loadPtr(Address(reg, offsetof(JSContext, thread)), reg);
     Address flag(reg, offsetof(JSThread, data.interruptFlags));
     Jump noInterrupt = stubcc.masm.branchTest32(Assembler::Zero, flag);
 #endif
 
-    frame.freeReg(reg);
-
     frame.sync(stubcc.masm, Uses(0));
     stubcc.masm.move(ImmPtr(PC), Registers::ArgReg1);
     stubcc.call(stubs::Interrupt);
     ADD_CALLSITE(true);
     stubcc.rejoin(Changes(0));
 
 #ifdef JS_THREADSAFE
     stubcc.linkRejoin(noInterrupt);
 #endif
+
+    frame.freeReg(reg);
 }
 
 void
 mjit::Compiler::emitPrimitiveTestForNew(uint32 argc)
 {
     Jump primitive = masm.testPrimitive(Assembler::Equal, JSReturnReg_Type);
     stubcc.linkExitDirect(primitive, stubcc.masm.label());
     FrameEntry *fe = frame.peek(-int(argc + 1));
@@ -2023,19 +2011,17 @@ mjit::Compiler::inlineCallHelper(uint32 
     RegisterID dataReg = frame.tempRegForData(fe);
     frame.pinReg(dataReg);
 
     /*
      * We rely on the fact that syncAndKill() is not allowed to touch the
      * registers we've preserved.
      */
     frame.syncAndKill(Registers(Registers::AvailRegs), Uses(argc + 2));
-    frame.unpinKilledReg(dataReg);
-    if (typeReg.isSet())
-        frame.unpinKilledReg(typeReg.reg());
+    frame.resetRegState();
 
     Registers tempRegs;
 
     /* Test the type if necessary. Failing this always takes a really slow path. */
     MaybeJump notObjectJump;
     if (typeReg.isSet())
         notObjectJump = masm.testObject(Assembler::NotEqual, typeReg.reg());
 
@@ -2289,17 +2275,17 @@ mjit::Compiler::emitStubCmpOp(BoolStub s
     frame.pop();
 
     if (!target) {
         frame.takeReg(Registers::ReturnReg);
         frame.pushTypedPayload(JSVAL_TYPE_BOOLEAN, Registers::ReturnReg);
     } else {
         JS_ASSERT(fused == JSOP_IFEQ || fused == JSOP_IFNE);
 
-        frame.syncAndForgetEverything();
+        frame.forgetEverything();
         Assembler::Condition cond = (fused == JSOP_IFEQ)
                                     ? Assembler::Zero
                                     : Assembler::NonZero;
         Jump j = masm.branchTest32(cond, Registers::ReturnReg,
                                    Registers::ReturnReg);
         jumpAndTrace(j, target);
     }
 }
@@ -3689,17 +3675,17 @@ mjit::Compiler::iterMore()
     Jump notFast = masm.branchPtr(Assembler::NotEqual, T1, ImmPtr(&js_IteratorClass));
     stubcc.linkExitForBranch(notFast);
 
     /* Get private from iter obj. */
     masm.loadFunctionPrivate(reg, T1);
 
     /* Get props_cursor, test */
     RegisterID T2 = frame.allocReg();
-    frame.syncAndForgetEverything();
+    frame.forgetEverything();
     masm.loadPtr(Address(T1, offsetof(NativeIterator, props_cursor)), T2);
     masm.loadPtr(Address(T1, offsetof(NativeIterator, props_end)), T1);
     Jump jFast = masm.branchPtr(Assembler::LessThan, T2, T1);
 
     jsbytecode *target = &PC[JSOP_MOREITER_LENGTH];
     JSOp next = JSOp(*target);
     JS_ASSERT(next == JSOP_IFNE || next == JSOP_IFNEX);
 
@@ -3926,17 +3912,18 @@ mjit::Compiler::jsop_setgname(uint32 ind
         objReg = frame.allocReg();
 
         masm.load32FromImm(&obj->objShape, objReg);
         shapeGuard = masm.branch32WithPatch(Assembler::NotEqual, objReg,
                                             Imm32(int32(JSObjectMap::INVALID_SHAPE)),
                                             mic.shape);
         masm.move(ImmPtr(obj), objReg);
     } else {
-        objReg = frame.copyDataIntoReg(objFe);
+        objReg = frame.tempRegForData(objFe);
+        frame.pinReg(objReg);
         RegisterID reg = frame.allocReg();
 
         masm.loadShape(objReg, reg);
         shapeGuard = masm.branch32WithPatch(Assembler::NotEqual, reg,
                                             Imm32(int32(JSObjectMap::INVALID_SHAPE)),
                                             mic.shape);
         frame.freeReg(reg);
     }
@@ -4020,17 +4007,18 @@ mjit::Compiler::jsop_setgname(uint32 ind
      * used. Since we only need to patch the last instruction in
      * both paths above, remember the distance between the
      * load label and after the instruction to be patched.
      */
     mic.patchValueOffset = masm.differenceBetween(mic.load, masm.label());
     JS_ASSERT(mic.patchValueOffset == masm.differenceBetween(mic.load, masm.label()));
 #endif
 
-    frame.freeReg(objReg);
+    if (objFe->isConstant())
+        frame.freeReg(objReg);
     frame.popn(2);
     if (mic.u.name.dataConst) {
         frame.push(v);
     } else {
         if (mic.u.name.typeConst)
             frame.pushTypedPayload(typeTag, dataReg);
         else
             frame.pushRegs(typeReg, dataReg);
--- a/js/src/methodjit/FastArithmetic.cpp
+++ b/js/src/methodjit/FastArithmetic.cpp
@@ -995,17 +995,17 @@ mjit::Compiler::jsop_equality_int_string
          */
         frame.syncAndKill(Registers(Registers::AvailRegs), Uses(frame.frameDepth()), Uses(2));
 
         /* Temporary for OOL string path. */
         RegisterID T1 = frame.allocReg();
 
         frame.pop();
         frame.pop();
-        frame.discardFrame();
+        frame.throwaway();
 
         /* Start of the slow path for equality stub call. */
         Label stubCall = stubcc.masm.label();
 
         JaegerSpew(JSpew_Insns, " ---- BEGIN STUB CALL CODE ---- \n");
 
         /* The lhs/rhs need to be synced in the stub call path. */
         frame.syncEntry(stubcc.masm, lhs, lvr);
@@ -1281,17 +1281,17 @@ mjit::Compiler::jsop_relational_double(J
         if (lhsNotNumber.isSet())
             stubcc.linkExitForBranch(lhsNotNumber.get());
         if (rhsNotNumber.isSet())
             stubcc.linkExitForBranch(rhsNotNumber.get());
         stubcc.leave();
         stubcc.call(stub);
 
         frame.popn(2);
-        frame.syncAndForgetEverything();
+        frame.forgetEverything();
 
         Jump j = masm.branchDouble(dblCond, fpLeft, fpRight);
 
         /*
          * The stub call has no need to rejoin since the state is synced.
          * Instead, we can just test the return value.
          */
         Assembler::Condition cond = (fused == JSOP_IFEQ)
@@ -1448,22 +1448,17 @@ mjit::Compiler::jsop_relational_full(JSO
         }
 
         /* Forget the world, preserving data. */
         frame.pinReg(cmpReg);
         if (reg.isSet())
             frame.pinReg(reg.reg());
         
         frame.popn(2);
-
-        frame.syncAndKillEverything();
-        frame.unpinKilledReg(cmpReg);
-        if (reg.isSet())
-            frame.unpinKilledReg(reg.reg());
-        frame.syncAndForgetEverything();
+        frame.forgetEverything();
         
         /* Operands could have been reordered, so use cmpOp. */
         Assembler::Condition i32Cond;
         bool ifeq = fused == JSOP_IFEQ;
         switch (cmpOp) {
           case JSOP_GT:
             i32Cond = ifeq ? Assembler::LessThanOrEqual : Assembler::GreaterThan;
             break;
--- a/js/src/methodjit/FastOps.cpp
+++ b/js/src/methodjit/FastOps.cpp
@@ -655,17 +655,17 @@ mjit::Compiler::jsop_equality(JSOp op, B
         frame.pop();
 
         /*
          * :FIXME: Easier test for undefined || null?
          * Maybe put them next to each other, subtract, do a single compare?
          */
 
         if (target) {
-            frame.syncAndForgetEverything();
+            frame.forgetEverything();
 
             if ((op == JSOP_EQ && fused == JSOP_IFNE) ||
                 (op == JSOP_NE && fused == JSOP_IFEQ)) {
                 Jump j = masm.branchPtr(Assembler::Equal, reg, ImmType(JSVAL_TYPE_UNDEFINED));
                 jumpAndTrace(j, target);
                 j = masm.branchPtr(Assembler::Equal, reg, ImmType(JSVAL_TYPE_NULL));
                 jumpAndTrace(j, target);
             } else {
@@ -902,17 +902,18 @@ mjit::Compiler::booleanJumpScript(JSOp o
 
     MaybeRegisterID type;
     MaybeRegisterID data;
 
     if (!fe->isTypeKnown() && !frame.shouldAvoidTypeRemat(fe))
         type.setReg(frame.copyTypeIntoReg(fe));
     data.setReg(frame.copyDataIntoReg(fe));
 
-    frame.syncAndForgetEverything();
+    /* :FIXME: Can something more lightweight be used? */
+    frame.forgetEverything();
 
     Assembler::Condition cond = (op == JSOP_IFNE || op == JSOP_OR)
                                 ? Assembler::NonZero
                                 : Assembler::Zero;
     Assembler::Condition ncond = (op == JSOP_IFNE || op == JSOP_OR)
                                  ? Assembler::Zero
                                  : Assembler::NonZero;
 
@@ -989,17 +990,17 @@ mjit::Compiler::jsop_ifneq(JSOp op, jsby
     if (fe->isConstant()) {
         JSBool b = js_ValueToBoolean(fe->getValue());
 
         frame.pop();
 
         if (op == JSOP_IFEQ)
             b = !b;
         if (b) {
-            frame.syncAndForgetEverything();
+            frame.forgetEverything();
             jumpAndTrace(masm.jump(), target);
         }
         return;
     }
 
     booleanJumpScript(op, target);
 }
 
@@ -1009,17 +1010,17 @@ mjit::Compiler::jsop_andor(JSOp op, jsby
     FrameEntry *fe = frame.peek(-1);
 
     if (fe->isConstant()) {
         JSBool b = js_ValueToBoolean(fe->getValue());
         
         /* Short-circuit. */
         if ((op == JSOP_OR && b == JS_TRUE) ||
             (op == JSOP_AND && b == JS_FALSE)) {
-            frame.syncAndForgetEverything();
+            frame.forgetEverything();
             jumpAndTrace(masm.jump(), target);
         }
 
         frame.pop();
         return;
     }
 
     booleanJumpScript(op, target);
--- a/js/src/methodjit/FrameEntry.h
+++ b/js/src/methodjit/FrameEntry.h
@@ -123,17 +123,16 @@ class FrameEntry
 #endif
         knownType = type_;
         JS_ASSERT(!isNumber);
     }
 
     void track(uint32 index) {
         clear();
         index_ = index;
-        tracked = true;
     }
 
     void clear() {
         copied = false;
         copy = NULL;
         isNumber = false;
     }
 
@@ -206,34 +205,25 @@ class FrameEntry
      * Set copy index.
      */
     void setCopyOf(FrameEntry *fe) {
         JS_ASSERT_IF(fe, !fe->isConstant());
         JS_ASSERT(!isCopied());
         copy = fe;
     }
 
-    inline bool isTracked() const {
-        return tracked;
-    }
-
-    inline void untrack() {
-        tracked = false;
-    }
-
   private:
     JSValueType knownType;
     jsval_layout v_;
     RematInfo  type;
     RematInfo  data;
     uint32     index_;
     FrameEntry *copy;
     bool       copied;
     bool       isNumber;
-    bool       tracked;
-    char       padding[1];
+    char       padding[2];
 };
 
 } /* namespace mjit */
 } /* namespace js */
 
 #endif /* jsjaeger_valueinfo_h__ */
 
--- a/js/src/methodjit/FrameState-inl.h
+++ b/js/src/methodjit/FrameState-inl.h
@@ -38,33 +38,36 @@
  * ***** END LICENSE BLOCK ***** */
 
 #if !defined jsjaeger_framestate_inl_h__ && defined JS_METHODJIT
 #define jsjaeger_framestate_inl_h__
 
 namespace js {
 namespace mjit {
 
-inline void
-FrameState::addToTracker(FrameEntry *fe)
+inline FrameEntry *
+FrameState::addToTracker(uint32 index)
 {
-    JS_ASSERT(!fe->isTracked());
+    JS_ASSERT(!base[index]);
+    FrameEntry *fe = &entries[index];
+    base[index] = fe;
     fe->track(tracker.nentries);
     tracker.add(fe);
     JS_ASSERT(tracker.nentries <= script->nslots);
+    return fe;
 }
 
 inline FrameEntry *
 FrameState::peek(int32 depth)
 {
     JS_ASSERT(depth < 0);
     JS_ASSERT(sp + depth >= spBase);
-    FrameEntry *fe = &sp[depth];
-    if (!fe->isTracked()) {
-        addToTracker(fe);
+    FrameEntry *fe = sp[depth];
+    if (!fe) {
+        fe = addToTracker(indexOf(depth));
         fe->resetSynced();
     }
     return fe;
 }
 
 inline void
 FrameState::popn(uint32 n)
 {
@@ -81,53 +84,45 @@ FrameState::haveSameBacking(FrameEntry *
         rhs = rhs->copyOf();
     return lhs == rhs;
 }
 
 inline JSC::MacroAssembler::RegisterID
 FrameState::allocReg()
 {
     RegisterID reg;
-    if (!freeRegs.empty()) {
+    if (!freeRegs.empty())
         reg = freeRegs.takeAnyReg();
-    } else {
+    else
         reg = evictSomeReg();
-        regstate[reg].forget();
-    }
-
+    regstate[reg].fe = NULL;
     return reg;
 }
 
 inline JSC::MacroAssembler::RegisterID
 FrameState::allocReg(uint32 mask)
 {
     RegisterID reg;
-    if (freeRegs.hasRegInMask(mask)) {
+    if (freeRegs.hasRegInMask(mask))
         reg = freeRegs.takeRegInMask(mask);
-    } else {
+    else
         reg = evictSomeReg(mask);
-        regstate[reg].forget();
-    }
-
+    regstate[reg].fe = NULL;
     return reg;
 }
 
 inline JSC::MacroAssembler::RegisterID
 FrameState::allocReg(FrameEntry *fe, RematInfo::RematType type)
 {
     RegisterID reg;
-    if (!freeRegs.empty()) {
+    if (!freeRegs.empty())
         reg = freeRegs.takeAnyReg();
-    } else {
+    else
         reg = evictSomeReg();
-        regstate[reg].forget();
-    }
-
-    regstate[reg].associate(fe, type);
-
+    regstate[reg] = RegisterState(fe, type);
     return reg;
 }
 
 inline void
 FrameState::emitLoadTypeTag(FrameEntry *fe, RegisterID reg) const
 {
     emitLoadTypeTag(this->masm, fe, reg);
 }
@@ -162,77 +157,75 @@ FrameState::peekTypeInRegister(FrameEntr
     return fe->type.inRegister();
 }
 
 inline void
 FrameState::pop()
 {
     JS_ASSERT(sp > spBase);
 
-    FrameEntry *fe = --sp;
-    if (!fe->isTracked())
+    FrameEntry *fe = *--sp;
+    if (!fe)
         return;
 
     forgetAllRegs(fe);
 }
 
 inline void
 FrameState::freeReg(RegisterID reg)
 {
-    JS_ASSERT(!regstate[reg].usedBy());
-
+    JS_ASSERT(regstate[reg].fe == NULL);
     freeRegs.putReg(reg);
 }
 
 inline void
 FrameState::forgetReg(RegisterID reg)
 {
     /*
      * Important: Do not touch the fe here. We can peephole optimize away
      * loads and stores by re-using the contents of old FEs.
      */
-    JS_ASSERT_IF(regstate[reg].fe(), !regstate[reg].fe()->isCopy());
-
-    if (!regstate[reg].isPinned()) {
-        regstate[reg].forget();
-        freeRegs.putReg(reg);
-    }
+    JS_ASSERT_IF(regstate[reg].fe, !regstate[reg].fe->isCopy());
+    freeRegs.putReg(reg);
 }
 
 inline void
-FrameState::syncAndForgetEverything(uint32 newStackDepth)
+FrameState::forgetEverything(uint32 newStackDepth)
 {
-    syncAndForgetEverything();
+    forgetEverything();
     sp = spBase + newStackDepth;
 }
 
 inline FrameEntry *
 FrameState::rawPush()
 {
-    JS_ASSERT(unsigned(sp - entries) < nargs + script->nslots);
+    JS_ASSERT(unsigned(sp - base) < nargs + script->nslots);
+
+    sp++;
 
-    if (!sp->isTracked())
-        addToTracker(sp);
+    if (FrameEntry *fe = sp[-1])
+        return fe;
 
-    return sp++;
+    return addToTracker(&sp[-1] - base);
 }
 
 inline void
 FrameState::push(const Value &v)
 {
     FrameEntry *fe = rawPush();
     fe->setConstant(Jsvalify(v));
 }
 
 inline void
 FrameState::pushSynced()
 {
-    if (sp->isTracked())
-        sp->resetSynced();
     sp++;
+
+    if (FrameEntry *fe = sp[-1])
+        fe->resetSynced();
 }
 
 inline void
 FrameState::pushSyncedType(JSValueType type)
 {
     FrameEntry *fe = rawPush();
 
     fe->resetSynced();
@@ -244,17 +237,17 @@ FrameState::pushSynced(JSValueType type,
 {
     FrameEntry *fe = rawPush();
 
     fe->resetUnsynced();
     fe->type.sync();
     fe->data.sync();
     fe->setType(type);
     fe->data.setRegister(reg);
-    regstate[reg].associate(fe, RematInfo::DATA);
+    regstate[reg] = RegisterState(fe, RematInfo::DATA);
 }
 
 inline void
 FrameState::push(Address address)
 {
     FrameEntry *fe = rawPush();
 
     /* :XXX: X64 */
@@ -283,31 +276,31 @@ FrameState::pushRegs(RegisterID type, Re
 {
     JS_ASSERT(!freeRegs.hasReg(type) && !freeRegs.hasReg(data));
 
     FrameEntry *fe = rawPush();
 
     fe->resetUnsynced();
     fe->type.setRegister(type);
     fe->data.setRegister(data);
-    regstate[type].associate(fe, RematInfo::TYPE);
-    regstate[data].associate(fe, RematInfo::DATA);
+    regstate[type] = RegisterState(fe, RematInfo::TYPE);
+    regstate[data] = RegisterState(fe, RematInfo::DATA);
 }
 
 inline void
 FrameState::pushTypedPayload(JSValueType type, RegisterID payload)
 {
     JS_ASSERT(!freeRegs.hasReg(payload));
 
     FrameEntry *fe = rawPush();
 
     fe->resetUnsynced();
     fe->setType(type);
     fe->data.setRegister(payload);
-    regstate[payload].associate(fe, RematInfo::DATA);
+    regstate[payload] = RegisterState(fe, RematInfo::DATA);
 }
 
 inline void
 FrameState::pushNumber(MaybeRegisterID payload, bool asInt32)
 {
     JS_ASSERT_IF(payload.isSet(), !freeRegs.hasReg(payload.reg()));
 
     FrameEntry *fe = rawPush();
@@ -322,17 +315,17 @@ FrameState::pushNumber(MaybeRegisterID p
     } else {
         fe->type.setMemory();
     }
 
     fe->isNumber = true;
     if (payload.isSet()) {
         fe->data.unsync();
         fe->data.setRegister(payload.reg());
-        regstate[payload.reg()].associate(fe, RematInfo::DATA);
+        regstate[payload.reg()] = RegisterState(fe, RematInfo::DATA);
     } else {
         fe->data.setMemory();
     }
 }
 
 inline void
 FrameState::pushInt32(RegisterID payload)
 {
@@ -341,17 +334,17 @@ FrameState::pushInt32(RegisterID payload
     JS_ASSERT(!fe->isNumber);
 
     masm.storeTypeTag(ImmType(JSVAL_TYPE_INT32), addressOf(fe));
     fe->type.setMemory();
 
     fe->isNumber = true;
     fe->data.unsync();
     fe->data.setRegister(payload);
-    regstate[payload].associate(fe, RematInfo::DATA);
+    regstate[payload] = RegisterState(fe, RematInfo::DATA);
 }
 
 inline void
 FrameState::pushUntypedPayload(JSValueType type, RegisterID payload)
 {
     JS_ASSERT(!freeRegs.hasReg(payload));
 
     FrameEntry *fe = rawPush();
@@ -364,23 +357,23 @@ FrameState::pushUntypedPayload(JSValueTy
 #ifdef DEBUG
     fe->type.unsync();
 #endif
     fe->type.setMemory();
     fe->data.unsync();
     fe->setNotCopied();
     fe->setCopyOf(NULL);
     fe->data.setRegister(payload);
-    regstate[payload].associate(fe, RematInfo::DATA);
+    regstate[payload] = RegisterState(fe, RematInfo::DATA);
 }
 
 inline JSC::MacroAssembler::RegisterID
 FrameState::tempRegForType(FrameEntry *fe, RegisterID fallback)
 {
-    JS_ASSERT(!regstate[fallback].fe());
+    JS_ASSERT(regstate[fallback].fe == NULL);
     if (fe->isCopy())
         fe = fe->copyOf();
 
     JS_ASSERT(!fe->type.isConstant());
 
     if (fe->type.inRegister())
         return fe->type.reg();
 
@@ -437,25 +430,25 @@ FrameState::tempRegInMaskForData(FrameEn
 
     RegisterID reg;
     if (fe->data.inRegister()) {
         RegisterID old = fe->data.reg();
         if (Registers::maskReg(old) & mask)
             return old;
 
         /* Keep the old register pinned. */
-        regstate[old].forget();
+        regstate[old].fe = NULL;
         reg = allocReg(mask);
         masm.move(old, reg);
         freeReg(old);
     } else {
         reg = allocReg(mask);
         masm.loadPayload(addressOf(fe), reg);
     }
-    regstate[reg].associate(fe, RematInfo::DATA);
+    regstate[reg] = RegisterState(fe, RematInfo::DATA);
     fe->data.setRegister(reg);
     return reg;
 }
 
 inline JSC::MacroAssembler::RegisterID
 FrameState::tempRegForData(FrameEntry *fe, RegisterID reg, Assembler &masm) const
 {
     JS_ASSERT(!fe->data.isConstant());
@@ -620,52 +613,55 @@ FrameState::testString(Assembler::Condit
         return masm.testString(cond, addressOf(fe));
     return masm.testString(cond, tempRegForType(fe));
 }
 
 inline FrameEntry *
 FrameState::getLocal(uint32 slot)
 {
     uint32 index = nargs + slot;
-    FrameEntry *fe = &entries[index];
-    if (!fe->isTracked()) {
-        addToTracker(fe);
-        fe->resetSynced();
-    }
+    if (FrameEntry *fe = base[index])
+        return fe;
+    FrameEntry *fe = addToTracker(index);
+    fe->resetSynced();
     return fe;
 }
 
 inline void
 FrameState::pinReg(RegisterID reg)
 {
-    regstate[reg].pin();
+    JS_ASSERT(!freeRegs.hasReg(reg));
+    JS_ASSERT(regstate[reg].fe);
+    regstate[reg].save = regstate[reg].fe;
+    regstate[reg].fe = NULL;
 }
 
 inline void
 FrameState::unpinReg(RegisterID reg)
 {
-    regstate[reg].unpin();
-}
-
-inline void
-FrameState::unpinKilledReg(RegisterID reg)
-{
-    regstate[reg].unpinUnsafe();
-    freeRegs.putReg(reg);
+    JS_ASSERT(!freeRegs.hasReg(reg));
+    JS_ASSERT(!regstate[reg].fe);
+    regstate[reg].fe = regstate[reg].save;
 }
 
 inline void
 FrameState::forgetAllRegs(FrameEntry *fe)
 {
     if (fe->type.inRegister())
         forgetReg(fe->type.reg());
     if (fe->data.inRegister())
         forgetReg(fe->data.reg());
 }
 
+inline FrameEntry *
+FrameState::tosFe() const
+{
+    return &entries[uint32(sp - base)];
+}
+
 inline void
 FrameState::swapInTracker(FrameEntry *lhs, FrameEntry *rhs)
 {
     uint32 li = lhs->trackerIndex();
     uint32 ri = rhs->trackerIndex();
     JS_ASSERT(tracker[li] == lhs);
     JS_ASSERT(tracker[ri] == rhs);
     tracker.entries[ri] = lhs;
@@ -710,18 +706,18 @@ FrameState::pushLocal(uint32 n)
         pushCopyOf(indexOfFe(getLocal(n)));
     } else {
 #ifdef DEBUG
         /*
          * We really want to assert on local variables, but in the presence of
          * SETLOCAL equivocation of stack slots, and let expressions, just
          * weakly assert on the fixed local vars.
          */
-        FrameEntry *fe = &locals[n];
-        if (fe->isTracked() && n < script->nfixed) {
+        FrameEntry *fe = base[localIndex(n)];
+        if (fe && n < script->nfixed) {
             JS_ASSERT(fe->type.inMemory());
             JS_ASSERT(fe->data.inMemory());
         }
 #endif
         push(Address(JSFrameReg, sizeof(JSStackFrame) + n * sizeof(Value)));
     }
 }
 
--- a/js/src/methodjit/FrameState.cpp
+++ b/js/src/methodjit/FrameState.cpp
@@ -59,73 +59,75 @@ FrameState::~FrameState()
 
 bool
 FrameState::init(uint32 nargs)
 {
     this->nargs = nargs;
 
     uint32 nslots = script->nslots + nargs;
     if (!nslots) {
-        sp = spBase = locals = args = NULL;
+        sp = spBase = locals = args = base = NULL;
         return true;
     }
 
-    eval = script->usesEval || cx->compartment->debugMode;
+    uint32 nlocals = script->nslots;
+    if ((eval = script->usesEval || cx->compartment->debugMode))
+        nlocals = 0;
 
-    size_t totalBytes = sizeof(FrameEntry) * nslots +         // entries[]
-                        sizeof(FrameEntry *) * nslots +       // tracker.entries
-                        (eval ? 0 : sizeof(uint32) * nslots); // closedVars[]
-
-    uint8 *cursor = (uint8 *)cx->calloc(totalBytes);
+    uint8 *cursor = (uint8 *)cx->malloc(sizeof(FrameEntry) * nslots +       // entries[]
+                                        sizeof(FrameEntry *) * nslots +     // base[]
+                                        sizeof(FrameEntry *) * nslots +     // tracker.entries[]
+                                        sizeof(uint32) * nlocals            // escaping[]
+                                        );
     if (!cursor)
         return false;
 
     if (!reifier.init(nslots))
         return false;
 
     entries = (FrameEntry *)cursor;
     cursor += sizeof(FrameEntry) * nslots;
 
-    args = entries;
-    locals = args + nargs;
+    base = (FrameEntry **)cursor;
+    args = base;
+    locals = base + nargs;
     spBase = locals + script->nfixed;
     sp = spBase;
+    memset(base, 0, sizeof(FrameEntry *) * nslots);
+    cursor += sizeof(FrameEntry *) * nslots;
 
     tracker.entries = (FrameEntry **)cursor;
     cursor += sizeof(FrameEntry *) * nslots;
 
-    if (!eval && nslots) {
+    if (nlocals) {
         escaping = (uint32 *)cursor;
-        cursor += sizeof(uint32) * nslots;
+        memset(escaping, 0, sizeof(uint32) * nlocals);
     }
 
-    JS_ASSERT(reinterpret_cast<uint8 *>(entries) + totalBytes == cursor);
-
     return true;
 }
 
 void
 FrameState::takeReg(RegisterID reg)
 {
     if (freeRegs.hasReg(reg)) {
         freeRegs.takeReg(reg);
-        JS_ASSERT(!regstate[reg].usedBy());
     } else {
-        JS_ASSERT(regstate[reg].fe());
+        JS_ASSERT(regstate[reg].fe);
         evictReg(reg);
-        regstate[reg].forget();
     }
+    regstate[reg].fe = NULL;
 }
 
 void
 FrameState::evictReg(RegisterID reg)
 {
-    FrameEntry *fe = regstate[reg].fe();
+    FrameEntry *fe = regstate[reg].fe;
 
-    if (regstate[reg].type() == RematInfo::TYPE) {
+    if (regstate[reg].type == RematInfo::TYPE) {
         if (!fe->type.synced()) {
             syncType(fe, addressOf(fe), masm);
             fe->type.sync();
         }
         fe->type.setMemory();
     } else {
         if (!fe->data.synced()) {
             syncData(fe, addressOf(fe), masm);
@@ -146,81 +148,63 @@ FrameState::evictSomeReg(uint32 mask)
     for (uint32 i = 0; i < JSC::MacroAssembler::TotalRegisters; i++) {
         RegisterID reg = RegisterID(i);
 
         /* Register is not allocatable, don't bother.  */
         if (!(Registers::maskReg(reg) & mask))
             continue;
 
         /* Register is not owned by the FrameState. */
-        FrameEntry *fe = regstate[i].fe();
+        FrameEntry *fe = regstate[i].fe;
         if (!fe)
             continue;
 
         /* Try to find a candidate... that doesn't need spilling. */
 #ifdef DEBUG
         fallbackSet = true;
 #endif
         fallback = reg;
 
-        if (regstate[i].type() == RematInfo::TYPE && fe->type.synced()) {
+        if (regstate[i].type == RematInfo::TYPE && fe->type.synced()) {
             fe->type.setMemory();
             return fallback;
         }
-        if (regstate[i].type() == RematInfo::DATA && fe->data.synced()) {
+        if (regstate[i].type == RematInfo::DATA && fe->data.synced()) {
             fe->data.setMemory();
             return fallback;
         }
     }
 
     JS_ASSERT(fallbackSet);
 
     evictReg(fallback);
     return fallback;
 }
 
 
 void
-FrameState::syncAndForgetEverything()
+FrameState::forgetEverything()
 {
     syncAndKill(Registers(Registers::AvailRegs), Uses(frameDepth()));
-    forgetEverything();
+
+    throwaway();
 }
 
+
 void
-FrameState::resetInternalState()
+FrameState::throwaway()
 {
     for (uint32 i = 0; i < tracker.nentries; i++)
-        tracker[i]->untrack();
+        base[indexOfFe(tracker[i])] = NULL;
 
     tracker.reset();
     freeRegs.reset();
 }
 
 void
-FrameState::discardFrame()
-{
-    resetInternalState();
-
-    memset(regstate, 0, sizeof(regstate));
-}
-
-void
-FrameState::forgetEverything()
-{
-    resetInternalState();
-
-#ifdef DEBUG
-    for (uint32 i = 0; i < JSC::MacroAssembler::TotalRegisters; i++) {
-        JS_ASSERT(!regstate[i].usedBy());
-    }
-#endif
-}
-
-void
 FrameState::storeTo(FrameEntry *fe, Address address, bool popped)
 {
     if (fe->isConstant()) {
         masm.storeValue(fe->getValue(), address);
         return;
     }
 
     if (fe->isCopy())
@@ -264,136 +248,112 @@ FrameState::storeTo(FrameEntry *fe, Addr
 }
 
 #ifdef DEBUG
 void
 FrameState::assertValidRegisterState() const
 {
     Registers checkedFreeRegs;
 
+    FrameEntry *tos = tosFe();
     for (uint32 i = 0; i < tracker.nentries; i++) {
         FrameEntry *fe = tracker[i];
-        if (fe >= sp)
+        if (fe >= tos)
             continue;
 
         JS_ASSERT(i == fe->trackerIndex());
         JS_ASSERT_IF(fe->isCopy(),
                      fe->trackerIndex() > fe->copyOf()->trackerIndex());
-        JS_ASSERT_IF(fe->isCopy(), fe > fe->copyOf());
         JS_ASSERT_IF(fe->isCopy(), !fe->type.inRegister() && !fe->data.inRegister());
-        JS_ASSERT_IF(fe->isCopy(), fe->copyOf() < sp);
+        JS_ASSERT_IF(fe->isCopy(), fe->copyOf() < tos);
         JS_ASSERT_IF(fe->isCopy(), fe->copyOf()->isCopied());
 
         if (fe->isCopy())
             continue;
         if (fe->type.inRegister()) {
             checkedFreeRegs.takeReg(fe->type.reg());
-            JS_ASSERT(regstate[fe->type.reg()].fe() == fe);
+            JS_ASSERT(regstate[fe->type.reg()].fe == fe);
         }
         if (fe->data.inRegister()) {
             checkedFreeRegs.takeReg(fe->data.reg());
-            JS_ASSERT(regstate[fe->data.reg()].fe() == fe);
+            JS_ASSERT(regstate[fe->data.reg()].fe == fe);
         }
     }
 
     JS_ASSERT(checkedFreeRegs == freeRegs);
-
-    for (uint32 i = 0; i < JSC::MacroAssembler::TotalRegisters; i++) {
-        JS_ASSERT(!regstate[i].isPinned());
-        JS_ASSERT_IF(regstate[i].fe(), !freeRegs.hasReg(RegisterID(i)));
-        JS_ASSERT_IF(regstate[i].fe(), regstate[i].fe()->isTracked());
-    }
 }
 #endif
 
 void
-FrameState::syncFancy(Assembler &masm, Registers avail, FrameEntry *resumeAt,
+FrameState::syncFancy(Assembler &masm, Registers avail, uint32 resumeAt,
                       FrameEntry *bottom) const
 {
-    reifier.reset(&masm, avail, resumeAt, bottom);
+    /* :TODO: can be resumeAt? */
+    reifier.reset(&masm, avail, tracker.nentries, bottom);
 
-    for (FrameEntry *fe = resumeAt; fe >= bottom; fe--) {
-        if (!fe->isTracked())
+    FrameEntry *tos = tosFe();
+    for (uint32 i = resumeAt; i < tracker.nentries; i--) {
+        FrameEntry *fe = tracker[i];
+        if (fe >= tos)
             continue;
 
         reifier.sync(fe);
     }
 }
 
 void
 FrameState::sync(Assembler &masm, Uses uses) const
 {
-    if (!entries)
-        return;
-
-    /* Sync all registers up-front. */
-    for (uint32 i = 0; i < JSC::MacroAssembler::TotalRegisters; i++) {
-        RegisterID reg = RegisterID(i);
-        FrameEntry *fe = regstate[reg].usedBy();
-        if (!fe)
-            continue;
-
-        JS_ASSERT(fe->isTracked());
-
-        if (regstate[reg].type() == RematInfo::DATA) {
-            JS_ASSERT(fe->data.reg() == reg);
-            if (!fe->data.synced())
-                syncData(fe, addressOf(fe), masm);
-        } else {
-            JS_ASSERT(fe->type.reg() == reg);
-            if (!fe->type.synced())
-                syncType(fe, addressOf(fe), masm);
-        }
-    }
-
     /*
      * Keep track of free registers using a bitmask. If we have to drop into
      * syncFancy(), then this mask will help avoid eviction.
      */
     Registers avail(freeRegs);
     Registers temp(Registers::TempRegs);
 
-    FrameEntry *bottom = sp - uses.nuses;
+    FrameEntry *tos = tosFe();
+    FrameEntry *bottom = tos - uses.nuses;
 
-    for (FrameEntry *fe = sp - 1; fe >= bottom; fe--) {
-        if (!fe->isTracked())
+    if (inTryBlock)
+        bottom = NULL;
+
+    for (uint32 i = tracker.nentries - 1; i < tracker.nentries; i--) {
+        FrameEntry *fe = tracker[i];
+        if (fe >= tos)
             continue;
 
         Address address = addressOf(fe);
 
         if (!fe->isCopy()) {
-            /*
-             * If this |fe| has registers, track them as available. They've
-             * already been synced. Otherwise, see if a constant needs to be
-             * synced.
-             */
-            if (fe->data.inRegister()) {
+            /* Keep track of registers that can be clobbered. */
+            if (fe->data.inRegister())
                 avail.putReg(fe->data.reg());
-            } else if (!fe->data.synced()) {
+            if (fe->type.inRegister())
+                avail.putReg(fe->type.reg());
+
+            /* Sync. */
+            if (!fe->data.synced() && (fe->data.inRegister() || fe >= bottom)) {
                 syncData(fe, address, masm);
                 if (fe->isConstant())
                     continue;
             }
-
-            if (fe->type.inRegister())
-                avail.putReg(fe->type.reg());
-            else if (!fe->type.synced())
-                syncType(fe, address, masm);
-        } else {
+            if (!fe->type.synced() && (fe->type.inRegister() || fe >= bottom))
+                syncType(fe, addressOf(fe), masm);
+        } else if (fe >= bottom) {
             FrameEntry *backing = fe->copyOf();
             JS_ASSERT(backing != fe);
             JS_ASSERT(!backing->isConstant() && !fe->isConstant());
 
             /*
              * If the copy is backed by something not in a register, fall back
              * to a slower sync algorithm.
              */
             if ((!fe->type.synced() && !backing->type.inRegister()) ||
                 (!fe->data.synced() && !backing->data.inRegister())) {
-                syncFancy(masm, avail, fe, bottom);
+                syncFancy(masm, avail, i, bottom);
                 return;
             }
 
             if (!fe->type.synced()) {
                 /* :TODO: we can do better, the type is learned for all copies. */
                 if (fe->isTypeKnown()) {
                     //JS_ASSERT(fe->getTypeTag() == backing->getTypeTag());
                     masm.storeTypeTag(ImmType(fe->getKnownType()), address);
@@ -406,140 +366,103 @@ FrameState::sync(Assembler &masm, Uses u
                 masm.storePayload(backing->data.reg(), address);
         }
     }
 }
 
 void
 FrameState::syncAndKill(Registers kill, Uses uses, Uses ignore)
 {
-    FrameEntry *spStop = sp - ignore.nuses;
+    /* Backwards, so we can allocate registers to backing slots better. */
+    FrameEntry *tos = tosFe();
+    FrameEntry *bottom = tos - uses.nuses;
 
-    /* Sync all kill-registers up-front. */
-    Registers search(kill.freeMask & ~freeRegs.freeMask);
-    while (!search.empty()) {
-        RegisterID reg = search.takeAnyReg();
-        FrameEntry *fe = regstate[reg].usedBy();
-        if (!fe || fe >= spStop)
-            continue;
-
-        JS_ASSERT(fe->isTracked());
+    tos -= ignore.nuses;
 
-        if (regstate[reg].type() == RematInfo::DATA) {
-            JS_ASSERT(fe->data.reg() == reg);
-            if (!fe->data.synced()) {
-                syncData(fe, addressOf(fe), masm);
-                fe->data.sync();
-            }
-        } else {
-            JS_ASSERT(fe->type.reg() == reg);
-            if (!fe->type.synced()) {
-                syncType(fe, addressOf(fe), masm);
-                fe->type.sync();
-            }
-        }
-    }
+    if (inTryBlock)
+        bottom = NULL;
 
-    uint32 maxvisits = tracker.nentries;
-    FrameEntry *bottom = sp - uses.nuses;
-
-    for (FrameEntry *fe = sp - 1; fe >= bottom && maxvisits; fe--) {
-        if (!fe->isTracked())
-            continue;
-
-        maxvisits--;
-
-        if (fe >= spStop)
+    for (uint32 i = tracker.nentries - 1; i < tracker.nentries; i--) {
+        FrameEntry *fe = tracker[i];
+        if (fe >= tos)
             continue;
 
         Address address = addressOf(fe);
         FrameEntry *backing = fe;
-
-        if (fe->isCopy())
+        if (fe->isCopy()) {
+            if (!inTryBlock && fe < bottom)
+                continue;
             backing = fe->copyOf();
+        }
 
-        if (!fe->data.synced()) {
+        JS_ASSERT_IF(i == 0, !fe->isCopy());
+
+        bool killData = fe->data.inRegister() && kill.hasReg(fe->data.reg());
+        if (!fe->data.synced() && (killData || fe >= bottom)) {
             if (backing != fe && backing->data.inMemory())
                 tempRegForData(backing);
             syncData(backing, address, masm);
             fe->data.sync();
-            if (fe->isConstant() && !fe->type.synced()) {
+            if (fe->isConstant() && !fe->type.synced())
                 fe->type.sync();
-            } else if (fe->data.inRegister() && kill.hasReg(fe->data.reg())) {
+        }
+        if (killData) {
+            JS_ASSERT(backing == fe);
+            JS_ASSERT(fe->data.synced());
+            if (regstate[fe->data.reg()].fe)
                 forgetReg(fe->data.reg());
-                fe->data.setMemory();
-            }
+            fe->data.setMemory();
         }
-        if (!fe->type.synced()) {
+        bool killType = fe->type.inRegister() && kill.hasReg(fe->type.reg());
+        if (!fe->type.synced() && (killType || fe >= bottom)) {
             if (backing != fe && backing->type.inMemory())
                 tempRegForType(backing);
             syncType(backing, address, masm);
             fe->type.sync();
-            if (fe->type.inRegister() && kill.hasReg(fe->type.reg())) {
+        }
+        if (killType) {
+            JS_ASSERT(backing == fe);
+            JS_ASSERT(fe->type.synced());
+            if (regstate[fe->type.reg()].fe)
                 forgetReg(fe->type.reg());
-                fe->type.setMemory();
-            }
+            fe->type.setMemory();
         }
     }
-
-    /*
-     * Anything still alive at this point is guaranteed to be synced. However,
-     * it is necessary to evict temporary registers.
-     */
-    search = Registers(kill.freeMask & ~freeRegs.freeMask);
-    while (!search.empty()) {
-        RegisterID reg = search.takeAnyReg();
-        FrameEntry *fe = regstate[reg].usedBy();
-        if (!fe || fe >= spStop)
-            continue;
+}
 
-        JS_ASSERT(fe->isTracked());
-
-        if (regstate[reg].type() == RematInfo::DATA) {
-            JS_ASSERT(fe->data.reg() == reg);
-            JS_ASSERT(fe->data.synced());
-            fe->data.setMemory();
-        } else {
-            JS_ASSERT(fe->type.reg() == reg);
-            JS_ASSERT(fe->type.synced());
-            fe->type.setMemory();
-        }
-
-        forgetReg(reg);
-    }
+void
+FrameState::resetRegState()
+{
+    freeRegs = Registers();
 }
 
 void
 FrameState::merge(Assembler &masm, Changes changes) const
 {
-    Registers search(Registers::AvailRegs & ~freeRegs.freeMask);
+    FrameEntry *tos = tosFe();
+    Registers temp(Registers::TempRegs);
 
-    while (!search.empty()) {
-        RegisterID reg = search.peekReg();
-        FrameEntry *fe = regstate[reg].usedBy();
+    for (uint32 i = 0; i < tracker.nentries; i++) {
+        FrameEntry *fe = tracker[i];
+        if (fe >= tos)
+            continue;
 
-        if (!fe) {
-            search.takeReg(reg);
+        /* Copies do not have registers. */
+        if (fe->isCopy()) {
+            JS_ASSERT(!fe->data.inRegister());
+            JS_ASSERT(!fe->type.inRegister());
             continue;
         }
 
-        if (fe->data.inRegister() && fe->type.inRegister()) {
-            search.takeReg(fe->data.reg());
-            search.takeReg(fe->type.reg());
+        if (fe->data.inRegister() && fe->type.inRegister())
             masm.loadValueAsComponents(addressOf(fe), fe->type.reg(), fe->data.reg());
-        } else {
-            if (fe->data.inRegister()) {
-                search.takeReg(fe->data.reg());
-                masm.loadPayload(addressOf(fe), fe->data.reg());
-            }
-            if (fe->type.inRegister()) {
-                search.takeReg(fe->type.reg());
-                masm.loadTypeTag(addressOf(fe), fe->type.reg());
-            }
-        }
+        else if (fe->data.inRegister())
+            masm.loadPayload(addressOf(fe), fe->data.reg());
+        else if (fe->type.inRegister())
+            masm.loadTypeTag(addressOf(fe), fe->type.reg());
     }
 }
 
 JSC::MacroAssembler::RegisterID
 FrameState::copyDataIntoReg(FrameEntry *fe)
 {
     return copyDataIntoReg(this->masm, fe);
 }
@@ -560,19 +483,19 @@ FrameState::copyDataIntoReg(FrameEntry *
         if (freeRegs.empty()) {
             if (!fe->data.synced())
                 syncData(fe, addressOf(fe), masm);
             fe->data.setMemory();
         } else {
             reg = allocReg();
             masm.move(hint, reg);
             fe->data.setRegister(reg);
-            regstate[reg].associate(regstate[hint].fe(), RematInfo::DATA);
+            regstate[reg] = regstate[hint];
         }
-        regstate[hint].forget();
+        regstate[hint].fe = NULL;
     } else {
         pinReg(reg);
         takeReg(hint);
         unpinReg(reg);
         masm.move(reg, hint);
     }
 }
 
@@ -585,17 +508,17 @@ FrameState::copyDataIntoReg(Assembler &m
         fe = fe->copyOf();
 
     if (fe->data.inRegister()) {
         RegisterID reg = fe->data.reg();
         if (freeRegs.empty()) {
             if (!fe->data.synced())
                 syncData(fe, addressOf(fe), masm);
             fe->data.setMemory();
-            regstate[reg].forget();
+            regstate[reg].fe = NULL;
         } else {
             RegisterID newReg = allocReg();
             masm.move(reg, newReg);
             reg = newReg;
         }
         return reg;
     }
 
@@ -618,17 +541,17 @@ FrameState::copyTypeIntoReg(FrameEntry *
         fe = fe->copyOf();
 
     if (fe->type.inRegister()) {
         RegisterID reg = fe->type.reg();
         if (freeRegs.empty()) {
             if (!fe->type.synced())
                 syncType(fe, addressOf(fe), masm);
             fe->type.setMemory();
-            regstate[reg].forget();
+            regstate[reg].fe = NULL;
         } else {
             RegisterID newReg = allocReg();
             masm.move(reg, newReg);
             reg = newReg;
         }
         return reg;
     }
 
@@ -705,31 +628,30 @@ FrameState::ownRegForType(FrameEntry *fe
         }
 
         if (freeRegs.empty()) {
             /* For now... just steal the register that already exists. */
             if (!backing->type.synced())
                 syncType(backing, addressOf(backing), masm);
             reg = backing->type.reg();
             backing->type.setMemory();
-            regstate[reg].forget();
+            moveOwnership(reg, NULL);
         } else {
             reg = allocReg();
             masm.move(backing->type.reg(), reg);
         }
         return reg;
     }
 
     if (fe->type.inRegister()) {
         reg = fe->type.reg();
-
         /* Remove ownership of this register. */
-        JS_ASSERT(regstate[reg].fe() == fe);
-        JS_ASSERT(regstate[reg].type() == RematInfo::TYPE);
-        regstate[reg].forget();
+        JS_ASSERT(regstate[reg].fe == fe);
+        JS_ASSERT(regstate[reg].type == RematInfo::TYPE);
+        regstate[reg].fe = NULL;
         fe->type.invalidate();
     } else {
         JS_ASSERT(fe->type.inMemory());
         reg = allocReg();
         masm.loadTypeTag(addressOf(fe), reg);
     }
     return reg;
 }
@@ -749,17 +671,17 @@ FrameState::ownRegForData(FrameEntry *fe
         }
 
         if (freeRegs.empty()) {
             /* For now... just steal the register that already exists. */
             if (!backing->data.synced())
                 syncData(backing, addressOf(backing), masm);
             reg = backing->data.reg();
             backing->data.setMemory();
-            regstate[reg].forget();
+            moveOwnership(reg, NULL);
         } else {
             reg = allocReg();
             masm.move(backing->data.reg(), reg);
         }
         return reg;
     }
 
     if (fe->isCopied()) {
@@ -769,19 +691,19 @@ FrameState::ownRegForData(FrameEntry *fe
             fe->data.invalidate();
             return copyDataIntoReg(copy);
         }
     }
     
     if (fe->data.inRegister()) {
         reg = fe->data.reg();
         /* Remove ownership of this register. */
-        JS_ASSERT(regstate[reg].fe() == fe);
-        JS_ASSERT(regstate[reg].type() == RematInfo::DATA);
-        regstate[reg].forget();
+        JS_ASSERT(regstate[reg].fe == fe);
+        JS_ASSERT(regstate[reg].type == RematInfo::DATA);
+        regstate[reg].fe = NULL;
         fe->data.invalidate();
     } else {
         JS_ASSERT(fe->data.inMemory());
         reg = allocReg();
         masm.loadPayload(addressOf(fe), reg);
     }
     return reg;
 }
@@ -812,53 +734,73 @@ FrameState::pushCopyOf(uint32 index)
         /* Maintain tracker ordering guarantees for copies. */
         JS_ASSERT(backing->isCopied());
         if (fe->trackerIndex() < backing->trackerIndex())
             swapInTracker(fe, backing);
     }
 }
 
 FrameEntry *
-FrameState::walkTrackerForUncopy(FrameEntry *original)
+FrameState::uncopy(FrameEntry *original)
 {
+    JS_ASSERT(original->isCopied());
+
+    /*
+     * Copies have two critical invariants:
+     *  1) The backing store precedes all copies in the tracker.
+     *  2) The backing store of a copy cannot be popped from the stack
+     *     while the copy is still live.
+     *
+     * Maintaining this invariant iteratively is kind of hard, so we choose
+     * the "lowest" copy in the frame up-front.
+     *
+     * For example, if the stack is:
+     *    [A, B, C, D]
+     * And the tracker has:
+     *    [A, D, C, B]
+     *
+     * If B, C, and D are copies of A - we will walk the tracker to the end
+     * and select D, not B (see bug 583684).
+     */
     uint32 firstCopy = InvalidIndex;
+    FrameEntry *tos = tosFe();
     FrameEntry *bestFe = NULL;
     uint32 ncopies = 0;
-    for (uint32 i = original->trackerIndex() + 1; i < tracker.nentries; i++) {
+    for (uint32 i = 0; i < tracker.nentries; i++) {
         FrameEntry *fe = tracker[i];
-        if (fe >= sp)
+        if (fe >= tos)
             continue;
         if (fe->isCopy() && fe->copyOf() == original) {
             if (firstCopy == InvalidIndex) {
                 firstCopy = i;
                 bestFe = fe;
             } else if (fe < bestFe) {
                 bestFe = fe;
             }
             ncopies++;
         }
     }
 
     if (!ncopies) {
         JS_ASSERT(firstCopy == InvalidIndex);
         JS_ASSERT(!bestFe);
+        original->copied = false;
         return NULL;
     }
 
     JS_ASSERT(firstCopy != InvalidIndex);
     JS_ASSERT(bestFe);
-    JS_ASSERT(bestFe > original);
 
     /* Mark all extra copies as copies of the new backing index. */
     bestFe->setCopyOf(NULL);
     if (ncopies > 1) {
         bestFe->setCopied();
         for (uint32 i = firstCopy; i < tracker.nentries; i++) {
             FrameEntry *other = tracker[i];
-            if (other >= sp || other == bestFe)
+            if (other >= tos || other == bestFe)
                 continue;
 
             /* The original must be tracked before copies. */
             JS_ASSERT(other != original);
 
             if (!other->isCopy() || other->copyOf() != original)
                 continue;
 
@@ -873,229 +815,123 @@ FrameState::walkTrackerForUncopy(FrameEn
              */
             if (other->trackerIndex() < bestFe->trackerIndex())
                 swapInTracker(bestFe, other);
         }
     } else {
         bestFe->setNotCopied();
     }
 
-    return bestFe;
-}
-
-FrameEntry *
-FrameState::walkFrameForUncopy(FrameEntry *original)
-{
-    FrameEntry *bestFe = NULL;
-    uint32 ncopies = 0;
-
-    /* It's only necessary to visit as many FEs are being tracked. */
-    uint32 maxvisits = tracker.nentries;
-
-    for (FrameEntry *fe = original + 1; fe < sp && maxvisits; fe++) {
-        if (!fe->isTracked())
-            continue;
-
-        maxvisits--;
-
-        if (fe->isCopy() && fe->copyOf() == original) {
-            if (!bestFe) {
-                bestFe = fe;
-                bestFe->setCopyOf(NULL);
-            } else {
-                fe->setCopyOf(bestFe);
-                if (fe->trackerIndex() < bestFe->trackerIndex())
-                    swapInTracker(bestFe, fe);
-            }
-            ncopies++;
-        }
-    }
-
-    if (ncopies)
-        bestFe->setCopied();
-
-    return bestFe;
-}
-
-FrameEntry *
-FrameState::uncopy(FrameEntry *original)
-{
-    JS_ASSERT(original->isCopied());
-
-    /*
-     * Copies have three critical invariants:
-     *  1) The backing store precedes all copies in the tracker.
-     *  2) The backing store precedes all copies in the FrameState.
-     *  3) The backing store of a copy cannot be popped from the stack
-     *     while the copy is still live.
-     *
-     * Maintaining this invariant iteratively is kind of hard, so we choose
-     * the "lowest" copy in the frame up-front.
-     *
-     * For example, if the stack is:
-     *    [A, B, C, D]
-     * And the tracker has:
-     *    [A, D, C, B]
-     *
-     * If B, C, and D are copies of A - we will walk the tracker to the end
-     * and select B, not D (see bug 583684).
-     *
-     * Note: |tracker.nentries <= (nslots + nargs)|. However, this walk is
-     * sub-optimal if |tracker.nentries - original->trackerIndex() > sp - original|.
-     * With large scripts this may be a problem worth investigating. Note that
-     * the tracker is walked twice, so we multiply by 2 for pessimism.
-     */
-    FrameEntry *fe;
-    if ((tracker.nentries - original->trackerIndex()) * 2 > uint32(sp - original))
-        fe = walkFrameForUncopy(original);
-    else
-        fe = walkTrackerForUncopy(original);
-    if (!fe) {
-        original->setNotCopied();
-        return NULL;
-    }
+    FrameEntry *fe = bestFe;
 
     /*
      * Switch the new backing store to the old backing store. During
      * this process we also necessarily make sure the copy can be
      * synced.
      */
     if (!original->isTypeKnown()) {
         /*
          * If the copy is unsynced, and the original is in memory,
          * give the original a register. We do this below too; it's
          * okay if it's spilled.
          */
         if (original->type.inMemory() && !fe->type.synced())
             tempRegForType(original);
         fe->type.inherit(original->type);
         if (fe->type.inRegister())
-            regstate[fe->type.reg()].reassociate(fe);
+            moveOwnership(fe->type.reg(), fe);
     } else {
         JS_ASSERT(fe->isTypeKnown());
         JS_ASSERT(fe->getKnownType() == original->getKnownType());
     }
     if (original->data.inMemory() && !fe->data.synced())
         tempRegForData(original);
     fe->data.inherit(original->data);
     if (fe->data.inRegister())
-        regstate[fe->data.reg()].reassociate(fe);
+        moveOwnership(fe->data.reg(), fe);
 
     return fe;
 }
 
 void
 FrameState::storeLocal(uint32 n, bool popGuaranteed, bool typeChange)
 {
-    FrameEntry *local = getLocal(n);
-
-    storeTop(local, popGuaranteed, typeChange);
+    FrameEntry *localFe = getLocal(n);
+    bool cacheable = !eval && !escaping[n];
 
-    bool closed = eval || escaping[n];
-    if (closed || inTryBlock) {
-        /* Ensure that the local variable remains synced. */
-        if (local->isCopy()) {
-            FrameEntry *backing = local->copyOf();
-            if (!local->data.synced()) {
-                if (backing->data.inMemory())
-                    tempRegForData(backing);
-                syncData(backing, addressOf(local), masm);
-            }
-            if (!local->type.synced()) {
-                if (backing->type.inMemory())
-                    tempRegForType(backing);
-                syncType(backing, addressOf(local), masm);
-            }
-        } else if (local->isConstant()) {
-            if (!local->data.synced())
-                syncData(local, addressOf(local), masm);
-        } else {
-            if (!local->data.synced()) {
-                syncData(local, addressOf(local), masm);
-                local->data.sync();
-            }
-            if (!local->type.synced()) {
-                syncType(local, addressOf(local), masm);
-                local->type.sync();
-            }
-            if (closed)
-                forgetEntry(local);
-        }
+    if (!popGuaranteed && !cacheable) {
+        JS_ASSERT_IF(base[localIndex(n)] && (!eval || n < script->nfixed),
+                     entries[localIndex(n)].type.inMemory() &&
+                     entries[localIndex(n)].data.inMemory());
+        Address local(JSFrameReg, sizeof(JSStackFrame) + n * sizeof(Value));
+        storeTo(peek(-1), local, false);
+        forgetAllRegs(getLocal(n));
+        localFe->resetSynced();
+        return;
+    }
 
-        if (closed)
-            local->resetSynced();
-    }
-}
+    bool wasSynced = localFe->type.synced();
 
-void
-FrameState::forgetEntry(FrameEntry *fe)
-{
-    if (fe->isCopied()) {
-        uncopy(fe);
-        if (!fe->isCopied())
-            forgetAllRegs(fe);
-    } else {
-        forgetAllRegs(fe);
-    }
-}
-
-void
-FrameState::storeTop(FrameEntry *target, bool popGuaranteed, bool typeChange)
-{
-    bool wasSynced = target->type.synced();
     /* Detect something like (x = x) which is a no-op. */
     FrameEntry *top = peek(-1);
-    if (top->isCopy() && top->copyOf() == target) {
-        JS_ASSERT(target->isCopied());
+    if (top->isCopy() && top->copyOf() == localFe) {
+        JS_ASSERT(localFe->isCopied());
         return;
     }
 
     /* Completely invalidate the local variable. */
-    forgetEntry(target);
-    target->resetUnsynced();
+    if (localFe->isCopied()) {
+        uncopy(localFe);
+        if (!localFe->isCopied())
+            forgetAllRegs(localFe);
+    } else {
+        forgetAllRegs(localFe);
+    }
+
+    localFe->resetUnsynced();
 
     /* Constants are easy to propagate. */
     if (top->isConstant()) {
-        target->setCopyOf(NULL);
-        target->setNotCopied();
-        target->setConstant(Jsvalify(top->getValue()));
+        localFe->setCopyOf(NULL);
+        localFe->setNotCopied();
+        localFe->setConstant(Jsvalify(top->getValue()));
         return;
     }
 
     /*
-     * When dealing with copies, there are three important invariants:
+     * When dealing with copies, there are two important invariants:
      *
      * 1) The backing store precedes all copies in the tracker.
-     * 2) The backing store precedes all copies in the FrameState.
      * 2) The backing store of a local is never a stack slot, UNLESS the local
      *    variable itself is a stack slot (blocks) that precedes the stack
      *    slot.
      *
      * If the top is a copy, and the second condition holds true, the local
      * can be rewritten as a copy of the original backing slot. If the first
      * condition does not hold, force it to hold by swapping in-place.
      */
     FrameEntry *backing = top;
-    bool copied = false;
     if (top->isCopy()) {
         backing = top->copyOf();
         JS_ASSERT(backing->trackerIndex() < top->trackerIndex());
 
-        if (backing < target) {
+        uint32 backingIndex = indexOfFe(backing);
+        uint32 tol = uint32(spBase - base);
+        if (backingIndex < tol || backingIndex < localIndex(n)) {
             /* local.idx < backing.idx means local cannot be a copy yet */
-            if (target->trackerIndex() < backing->trackerIndex())
-                swapInTracker(backing, target);
-            target->setNotCopied();
-            target->setCopyOf(backing);
+            if (localFe->trackerIndex() < backing->trackerIndex())
+                swapInTracker(backing, localFe);
+            localFe->setNotCopied();
+            localFe->setCopyOf(backing);
             if (backing->isTypeKnown())
-                target->setType(backing->getKnownType());
+                localFe->setType(backing->getKnownType());
             else
-                target->type.invalidate();
-            target->data.invalidate();
-            target->isNumber = backing->isNumber;
+                localFe->type.invalidate();
+            localFe->data.invalidate();
+            localFe->isNumber = backing->isNumber;
             return;
         }
 
         /*
          * If control flow lands here, then there was a bytecode sequence like
          *
          *  ENTERBLOCK 2
          *  GETLOCAL 1
@@ -1107,94 +943,93 @@ FrameState::storeTop(FrameEntry *target,
          * 
          * Because of |let| expressions, it's kind of hard to really know
          * whether a region on the stack will be popped all at once. Bleh!
          *
          * This should be rare except in browser code (and maybe even then),
          * but even so there's a quick workaround. We take all copies of the
          * backing fe, and redirect them to be copies of the destination.
          */
+        FrameEntry *tos = tosFe();
         for (uint32 i = backing->trackerIndex() + 1; i < tracker.nentries; i++) {
             FrameEntry *fe = tracker[i];
-            if (fe >= sp)
+            if (fe >= tos)
                 continue;
-            if (fe->isCopy() && fe->copyOf() == backing) {
-                fe->setCopyOf(target);
-                copied = true;
-            }
+            if (fe->isCopy() && fe->copyOf() == backing)
+                fe->setCopyOf(localFe);
         }
     }
     backing->setNotCopied();
     
     /*
      * This is valid from the top->isCopy() path because we're guaranteed a
      * consistent ordering - all copies of |backing| are tracked after 
      * |backing|. Transitively, only one swap is needed.
      */
-    if (backing->trackerIndex() < target->trackerIndex())
-        swapInTracker(backing, target);
+    if (backing->trackerIndex() < localFe->trackerIndex())
+        swapInTracker(backing, localFe);
 
     /*
      * Move the backing store down - we spill registers here, but we could be
      * smarter and re-use the type reg.
      */
     RegisterID reg = tempRegForData(backing);
-    target->data.setRegister(reg);
-    regstate[reg].reassociate(target);
+    localFe->data.setRegister(reg);
+    moveOwnership(reg, localFe);
 
     if (typeChange) {
         if (backing->isTypeKnown()) {
-            target->setType(backing->getKnownType());
+            localFe->setType(backing->getKnownType());
         } else {
             RegisterID reg = tempRegForType(backing);
-            target->type.setRegister(reg);
-            regstate[reg].reassociate(target);
+            localFe->type.setRegister(reg);
+            moveOwnership(reg, localFe);
         }
     } else {
         if (!wasSynced)
-            masm.storeTypeTag(ImmType(backing->getKnownType()), addressOf(target));
-        target->type.setMemory();
+            masm.storeTypeTag(ImmType(backing->getKnownType()), addressOf(localFe));
+        localFe->type.setMemory();
     }
 
     if (!backing->isTypeKnown())
         backing->type.invalidate();
     backing->data.invalidate();
-    backing->setCopyOf(target);
-    backing->isNumber = target->isNumber;
-
-    JS_ASSERT(top->copyOf() == target);
+    backing->setCopyOf(localFe);
+    backing->isNumber = localFe->isNumber;
+    localFe->setCopied();
 
-    /*
-     * Right now, |backing| is a copy of |target| (note the reversal), but
-     * |target| is not marked as copied. This is an optimization so uncopy()
-     * may avoid frame traversal.
-     *
-     * There are two cases where we must set the copy bit, however:
-     *  - The fixup phase redirected more copies to |target|.
-     *  - An immediate pop is not guaranteed.
-     */
-    if (copied || !popGuaranteed)
-        target->setCopied();
+    if (!cacheable) {
+        /* TODO: x64 optimization */
+        if (!localFe->type.synced())
+            syncType(localFe, addressOf(localFe), masm);
+        if (!localFe->data.synced())
+            syncData(localFe, addressOf(localFe), masm);
+        forgetAllRegs(localFe);
+        localFe->type.setMemory();
+        localFe->data.setMemory();
+    }
+
+    JS_ASSERT(top->copyOf() == localFe);
 }
 
 void
 FrameState::shimmy(uint32 n)
 {
     JS_ASSERT(sp - n >= spBase);
     int32 depth = 0 - int32(n);
-    storeTop(&sp[depth - 1], true);
+    storeLocal(uint32(&sp[depth - 1] - locals), true);
     popn(n);
 }
 
 void
 FrameState::shift(int32 n)
 {
     JS_ASSERT(n < 0);
     JS_ASSERT(sp + n - 1 >= spBase);
-    storeTop(&sp[n - 1], true);
+    storeLocal(uint32(&sp[n - 1] - locals), true);
     pop();
 }
 
 void
 FrameState::pinEntry(FrameEntry *fe, ValueRemat &vr)
 {
     vr.isDataSynced = fe->data.synced();
     vr.isTypeSynced = fe->type.synced();
--- a/js/src/methodjit/FrameState.h
+++ b/js/src/methodjit/FrameState.h
@@ -170,113 +170,32 @@ class FrameState
             JS_ASSERT(n < nentries);
             return entries[n];
         }
 
         FrameEntry **entries;
         uint32 nentries;
     };
 
-    /*
-     * Some RegisterState invariants.
-     *
-     *  If |fe| is non-NULL, |save| is NULL.
-     *  If |save| is non-NULL, |fe| is NULL.
-     *  That is, both |fe| and |save| cannot be non-NULL.
-     *
-     *  If either |fe| or |save| is non-NULL, the register is not in freeRegs.
-     *  If both |fe| and |save| are NULL, the register is either in freeRegs,
-     *  or owned by the compiler.
-     */
     struct RegisterState {
-        RegisterState() : fe_(NULL), save_(NULL)
+        RegisterState()
         { }
 
         RegisterState(FrameEntry *fe, RematInfo::RematType type)
-          : fe_(fe), save_(NULL), type_(type)
-        {
-            JS_ASSERT(!save_);
-        }
-
-        bool isPinned() const {
-            assertConsistency();
-            return !!save_;
-        }
-
-        void assertConsistency() const {
-            JS_ASSERT_IF(fe_, !save_);
-            JS_ASSERT_IF(save_, !fe_);
-        }
-
-        FrameEntry *fe() const {
-            assertConsistency();
-            return fe_;
-        }
-
-        RematInfo::RematType type() const {
-            assertConsistency();
-            return type_;
-        }
-
-        FrameEntry *usedBy() const {
-            if (fe_)
-                return fe_;
-            return save_;
-        }
-
-        void associate(FrameEntry *fe, RematInfo::RematType type) {
-            JS_ASSERT(!fe_);
-            JS_ASSERT(!save_);
+          : fe(fe), type(type)
+        { }
 
-            fe_ = fe;
-            type_ = type;
-            JS_ASSERT(!save_);
-        }
-
-        /* Change ownership. */
-        void reassociate(FrameEntry *fe) {
-            assertConsistency();
-            JS_ASSERT(fe);
-
-            fe_ = fe;
-        }
-
-        /* Unassociate this register from the FE. */
-        void forget() {
-            JS_ASSERT(fe_);
-            fe_ = NULL;
-            JS_ASSERT(!save_);
-        }
-
-        void pin() {
-            assertConsistency();
-            save_ = fe_;
-            fe_ = NULL;
-        }
-
-        void unpin() {
-            assertConsistency();
-            fe_ = save_;
-            save_ = NULL;
-        }
-
-        void unpinUnsafe() {
-            assertConsistency();
-            save_ = NULL;
-        }
-
-      private:
         /* FrameEntry owning this register, or NULL if not owned by a frame. */
-        FrameEntry *fe_;
+        FrameEntry *fe;
 
         /* Hack - simplifies register allocation for pairs. */
-        FrameEntry *save_;
+        FrameEntry *save;
         
         /* Part of the FrameEntry that owns the FE. */
-        RematInfo::RematType type_;
+        RematInfo::RematType type;
     };
 
   public:
     FrameState(JSContext *cx, JSScript *script, Assembler &masm);
     ~FrameState();
     bool init(uint32 nargs);
 
     /*
@@ -581,20 +500,19 @@ class FrameState
 
     /*
      * Fully stores a FrameEntry at an arbitrary address. popHint specifies
      * how hard the register allocator should try to keep the FE in registers.
      */
     void storeTo(FrameEntry *fe, Address address, bool popHint);
 
     /*
-     * Stores the top stack slot back to a slot.
+     * Stores the top stack slot back to a local variable.
      */
     void storeLocal(uint32 n, bool popGuaranteed = false, bool typeChange = true);
-    void storeTop(FrameEntry *target, bool popGuaranteed = false, bool typeChange = true);
 
     /*
      * Restores state from a slow path.
      */
     void merge(Assembler &masm, Changes changes) const;
 
     /*
      * Writes unsynced stores to an arbitrary buffer.
@@ -603,44 +521,38 @@ class FrameState
 
     /*
      * Syncs all outstanding stores to memory and possibly kills regs in the
      * process.  The top [ignored..uses-1] frame entries will be synced.
      */
     void syncAndKill(Registers kill, Uses uses, Uses ignored);
     void syncAndKill(Registers kill, Uses uses) { syncAndKill(kill, uses, Uses(0)); }
 
-    /* Syncs and kills everything. */
-    void syncAndKillEverything() {
-        syncAndKill(Registers(Registers::AvailRegs), Uses(frameDepth()));
-    }
+    /*
+     * Reset the register state.
+     */
+    void resetRegState();
 
     /*
      * Clear all tracker entries, syncing all outstanding stores in the process.
      * The stack depth is in case some merge points' edges did not immediately
      * precede the current instruction.
      */
-    inline void syncAndForgetEverything(uint32 newStackDepth);
+    inline void forgetEverything(uint32 newStackDepth);
 
     /*
      * Same as above, except the stack depth is not changed. This is used for
      * branching opcodes.
      */
-    void syncAndForgetEverything();
+    void forgetEverything();
 
     /*
      * Throw away the entire frame state, without syncing anything.
-     * This can only be called after a syncAndKill() against all registers.
      */
-    void forgetEverything();
-
-    /*
-     * Discard the entire framestate forcefully.
-     */
-    void discardFrame();
+    void throwaway();
 
     /*
      * Mark an existing slot with a type.
      */
     inline void learnType(FrameEntry *fe, JSValueType type);
 
     /*
      * Forget a type, syncing in the process.
@@ -686,33 +598,27 @@ class FrameState
     /*
      * Helper function. Tests if a slot's type is primitve. Condition should
      * be Equal or NotEqual.
      */
     inline Jump testPrimitive(Assembler::Condition cond, FrameEntry *fe);
 
     /*
      * Marks a register such that it cannot be spilled by the register
-     * allocator. Any pinned registers must be unpinned at the end of the op,
-     * no matter what. In addition, pinReg() can only be used on registers
-     * which are associated with FrameEntries.
+     * allocator. Any pinned registers must be unpinned at the end of the op.
+     * Note: This function should only be used on registers tied to FEs.
      */
     inline void pinReg(RegisterID reg);
 
     /*
      * Unpins a previously pinned register.
      */
     inline void unpinReg(RegisterID reg);
 
     /*
-     * Same as unpinReg(), but does not restore the FrameEntry.
-     */
-    inline void unpinKilledReg(RegisterID reg);
-
-    /*
      * Dups the top item on the stack.
      */
     inline void dup();
 
     /*
      * Dups the top 2 items on the stack.
      */
     inline void dup2();
@@ -728,16 +634,17 @@ class FrameState
      */
     inline void giveOwnRegs(FrameEntry *fe);
 
     /*
      * Returns the current stack depth of the frame.
      */
     uint32 stackDepth() const { return sp - spBase; }
     uint32 frameDepth() const { return stackDepth() + script->nfixed; }
+    inline FrameEntry *tosFe() const;
 
 #ifdef DEBUG
     void assertValidRegisterState() const;
 #endif
 
     Address addressOf(const FrameEntry *fe) const;
     Address addressForDataRemat(const FrameEntry *fe) const;
 
@@ -775,87 +682,85 @@ class FrameState
     }
 
   private:
     inline RegisterID allocReg(FrameEntry *fe, RematInfo::RematType type);
     inline void forgetReg(RegisterID reg);
     RegisterID evictSomeReg(uint32 mask);
     void evictReg(RegisterID reg);
     inline FrameEntry *rawPush();
-    inline void addToTracker(FrameEntry *fe);
+    inline FrameEntry *addToTracker(uint32 index);
     inline void syncType(const FrameEntry *fe, Address to, Assembler &masm) const;
     inline void syncData(const FrameEntry *fe, Address to, Assembler &masm) const;
     inline FrameEntry *getLocal(uint32 slot);
     inline void forgetAllRegs(FrameEntry *fe);
     inline void swapInTracker(FrameEntry *lhs, FrameEntry *rhs);
     inline uint32 localIndex(uint32 n);
     void pushCopyOf(uint32 index);
-    void syncFancy(Assembler &masm, Registers avail, FrameEntry *resumeAt,
+    void syncFancy(Assembler &masm, Registers avail, uint32 resumeAt,
                    FrameEntry *bottom) const;
     inline bool tryFastDoubleLoad(FrameEntry *fe, FPRegisterID fpReg, Assembler &masm) const;
-    void resetInternalState();
 
     /*
      * "Uncopies" the backing store of a FrameEntry that has been copied. The
      * original FrameEntry is not invalidated; this is the responsibility of
      * the caller. The caller can check isCopied() to see if the registers
      * were moved to a copy.
      *
      * Later addition: uncopy() returns the first copy found.
      */
     FrameEntry *uncopy(FrameEntry *original);
-    FrameEntry *walkTrackerForUncopy(FrameEntry *original);
-    FrameEntry *walkFrameForUncopy(FrameEntry *original);
-
-    /*
-     * All registers in the FE are forgotten. If it is copied, it is uncopied
-     * beforehand.
-     */
-    void forgetEntry(FrameEntry *fe);
 
     FrameEntry *entryFor(uint32 index) const {
-        JS_ASSERT(entries[index].isTracked());
+        JS_ASSERT(base[index]);
         return &entries[index];
     }
 
+    void moveOwnership(RegisterID reg, FrameEntry *newFe) {
+        regstate[reg].fe = newFe;
+    }
+
     RegisterID evictSomeReg() {
         return evictSomeReg(Registers::AvailRegs);
     }
 
     uint32 indexOf(int32 depth) {
-        return uint32((sp + depth) - entries);
+        return uint32((sp + depth) - base);
     }
 
-    uint32 indexOfFe(FrameEntry *fe) const {
+    uint32 indexOfFe(FrameEntry *fe) {
         return uint32(fe - entries);
     }
 
   private:
     JSContext *cx;
     JSScript *script;
     uint32 nargs;
     Assembler &masm;
 
     /* All allocated registers. */
     Registers freeRegs;
 
     /* Cache of FrameEntry objects. */
     FrameEntry *entries;
 
+    /* Base pointer of the FrameEntry vector. */
+    FrameEntry **base;
+
     /* Base pointer for arguments. */
-    FrameEntry *args;
+    FrameEntry **args;
 
     /* Base pointer for local variables. */
-    FrameEntry *locals;
+    FrameEntry **locals;
 
     /* Base pointer for the stack. */
-    FrameEntry *spBase;
+    FrameEntry **spBase;
 
     /* Dynamic stack pointer. */
-    FrameEntry *sp;
+    FrameEntry **sp;
 
     /* Vector of tracked slot indexes. */
     Tracker tracker;
 
     /*
      * Register ownership state. This can't be used alone; to find whether an
      * entry is active, you must check the allocated registers.
      */
--- a/js/src/methodjit/ImmutableSync.cpp
+++ b/js/src/methodjit/ImmutableSync.cpp
@@ -40,40 +40,41 @@
 #include "FrameState.h"
 #include "FrameState-inl.h"
 #include "ImmutableSync.h"
 
 using namespace js;
 using namespace js::mjit;
 
 ImmutableSync::ImmutableSync(JSContext *cx, const FrameState &frame)
-  : cx(cx), entries(NULL), frame(frame), generation(0)
+  : cx(cx), entries(NULL), frame(frame)
 {
 }
 
 ImmutableSync::~ImmutableSync()
 {
     cx->free(entries);
 }
 
 bool
 ImmutableSync::init(uint32 nentries)
 {
-    entries = (SyncEntry *)cx->calloc(sizeof(SyncEntry) * nentries);
+    entries = (SyncEntry *)cx->malloc(sizeof(SyncEntry) * nentries);
     return !!entries;
 }
 
 void
-ImmutableSync::reset(Assembler *masm, Registers avail, FrameEntry *top, FrameEntry *bottom)
+ImmutableSync::reset(Assembler *masm, Registers avail, uint32 n,
+                     FrameEntry *bottom)
 {
     this->avail = avail;
+    this->nentries = n;
     this->masm = masm;
-    this->top = top;
     this->bottom = bottom;
-    this->generation++;
+    memset(entries, 0, sizeof(SyncEntry) * nentries);
     memset(regs, 0, sizeof(regs));
 }
 
 JSC::MacroAssembler::RegisterID
 ImmutableSync::allocReg()
 {
     if (!avail.empty())
         return avail.takeAnyReg();
@@ -86,40 +87,51 @@ ImmutableSync::allocReg()
         RegisterID reg = RegisterID(i);
         if (!(Registers::maskReg(reg) & Registers::AvailRegs))
             continue;
 
         lastResort = 0;
 
         if (!regs[i]) {
             /* If the frame does not own this register, take it! */
-            FrameEntry *fe = frame.regstate[i].fe();
+            FrameEntry *fe = frame.regstate[i].fe;
             if (!fe)
                 return reg;
 
+            /*
+             * The Reifier does not own this register, but the frame does.
+             * This must mean that we've not yet processed this entry, and
+             * that it's data has not been clobbered.
+             */
+            JS_ASSERT(fe->trackerIndex() < nentries);
+
             evictFromFrame = i;
 
             /*
              * If not copied, we can sync and not have to load again later.
              * That's about as good as it gets, so just break out now.
              */
             if (!fe->isCopied())
                 break;
         }
     }
 
     if (evictFromFrame != FrameState::InvalidIndex) {
-        FrameEntry *fe = frame.regstate[evictFromFrame].fe();
+        FrameEntry *fe = frame.regstate[evictFromFrame].fe;
         SyncEntry &e = entryFor(fe);
-        if (frame.regstate[evictFromFrame].type() == RematInfo::TYPE) {
+        if (frame.regstate[evictFromFrame].type == RematInfo::TYPE) {
             JS_ASSERT(!e.typeClobbered);
+            e.typeSynced = true;
             e.typeClobbered = true;
+            masm->storeTypeTag(fe->type.reg(), frame.addressOf(fe));
         } else {
             JS_ASSERT(!e.dataClobbered);
+            e.dataSynced = true;
             e.dataClobbered = true;
+            masm->storePayload(fe->data.reg(), frame.addressOf(fe));
         }
         return RegisterID(evictFromFrame);
     }
 
     JS_ASSERT(lastResort != FrameState::InvalidIndex);
     JS_ASSERT(regs[lastResort]);
 
     SyncEntry *e = regs[lastResort];
@@ -133,48 +145,49 @@ ImmutableSync::allocReg()
     }
 
     return reg;
 }
 
 inline ImmutableSync::SyncEntry &
 ImmutableSync::entryFor(FrameEntry *fe)
 {
-    JS_ASSERT(fe <= top);
-    SyncEntry &e = entries[frame.indexOfFe(fe)];
-    if (e.generation != generation)
-        e.reset(generation);
-    return e;
+    JS_ASSERT(fe->trackerIndex() < nentries);
+    return entries[fe->trackerIndex()];
 }
 
 void
 ImmutableSync::sync(FrameEntry *fe)
 {
-#ifdef DEBUG
-    top = fe;
-#endif
-
+    JS_ASSERT(nentries);
     if (fe->isCopy())
         syncCopy(fe);
     else
         syncNormal(fe);
+    nentries--;
 }
 
 bool
 ImmutableSync::shouldSyncType(FrameEntry *fe, SyncEntry &e)
 {
-    /* Registers are synced up-front. */
-    return !fe->type.synced() && !fe->type.inRegister();
+    if (fe->type.inRegister() && !e.typeClobbered)
+        return true;
+    if (e.hasTypeReg)
+        return true;
+    return frame.inTryBlock || fe >= bottom;
 }
 
 bool
 ImmutableSync::shouldSyncData(FrameEntry *fe, SyncEntry &e)
 {
-    /* Registers are synced up-front. */
-    return !fe->data.synced() && !fe->data.inRegister();
+    if (fe->data.inRegister() && !e.dataClobbered)
+        return true;
+    if (e.hasDataReg)
+        return true;
+    return frame.inTryBlock || fe >= bottom;
 }
 
 JSC::MacroAssembler::RegisterID
 ImmutableSync::ensureTypeReg(FrameEntry *fe, SyncEntry &e)
 {
     if (fe->type.inRegister() && !e.typeClobbered)
         return fe->type.reg();
     if (e.hasTypeReg)
@@ -198,17 +211,18 @@ ImmutableSync::ensureDataReg(FrameEntry 
     regs[e.dataReg] = &e;
     masm->loadPayload(frame.addressOf(fe), e.dataReg);
     return e.dataReg;
 }
 
 void
 ImmutableSync::syncCopy(FrameEntry *fe)
 {
-    JS_ASSERT(fe >= bottom);
+    if (!frame.inTryBlock && fe < bottom)
+        return;
 
     FrameEntry *backing = fe->copyOf();
     SyncEntry &e = entryFor(backing);
 
     JS_ASSERT(!backing->isConstant());
 
     Address addr = frame.addressOf(fe);
 
@@ -235,38 +249,38 @@ ImmutableSync::syncNormal(FrameEntry *fe
 
     Address addr = frame.addressOf(fe);
 
     if (fe->isTypeKnown()) {
         e.learnedType = true;
         e.type = fe->getKnownType();
     }
 
-    if (shouldSyncData(fe, e)) {
+    if (!fe->data.synced() && !e.dataSynced && shouldSyncData(fe, e)) {
         if (fe->isConstant()) {
             masm->storeValue(fe->getValue(), addr);
             return;
         }
         masm->storePayload(ensureDataReg(fe, e), addr);
     }
 
-    if (shouldSyncType(fe, e)) {
+    if (!fe->type.synced() && !e.typeSynced && shouldSyncType(fe, e)) {
         if (e.learnedType)
             masm->storeTypeTag(ImmType(e.type), addr);
         else
             masm->storeTypeTag(ensureTypeReg(fe, e), addr);
     }
 
     if (e.hasDataReg) {
         avail.putReg(e.dataReg);
         regs[e.dataReg] = NULL;
-    } else if (!e.dataClobbered && fe->data.inRegister() && frame.regstate[fe->data.reg()].fe()) {
+    } else if (!e.dataClobbered && fe->data.inRegister() && frame.regstate[fe->data.reg()].fe) {
         avail.putReg(fe->data.reg());
     }
 
     if (e.hasTypeReg) {
         avail.putReg(e.typeReg);
         regs[e.typeReg] = NULL;
-    } else if (!e.typeClobbered && fe->type.inRegister() && frame.regstate[fe->type.reg()].fe()) {
+    } else if (!e.typeClobbered && fe->type.inRegister() && frame.regstate[fe->type.reg()].fe) {
         avail.putReg(fe->type.reg());
     }
 }
 
--- a/js/src/methodjit/ImmutableSync.h
+++ b/js/src/methodjit/ImmutableSync.h
@@ -65,42 +65,35 @@ class ImmutableSync
 
     struct SyncEntry {
         /*
          * NB: clobbered and sync mean the same thing: the register associated
          * in the FrameEntry is no longer valid, and has been written back.
          *
          * They are separated for readability.
          */
-        uint32 generation;
+        bool dataSynced;
+        bool typeSynced;
         bool dataClobbered;
         bool typeClobbered;
+        RegisterID dataReg;
+        RegisterID typeReg;
         bool hasDataReg;
         bool hasTypeReg;
         bool learnedType;
-        RegisterID dataReg;
-        RegisterID typeReg;
         JSValueType type;
-
-        void reset(uint32 gen) {
-            dataClobbered = false;
-            typeClobbered = false;
-            hasDataReg = false;
-            hasTypeReg = false;
-            learnedType = false;
-            generation = gen;
-        }
     };
 
   public:
     ImmutableSync(JSContext *cx, const FrameState &frame);
     ~ImmutableSync();
     bool init(uint32 nentries);
 
-    void reset(Assembler *masm, Registers avail, FrameEntry *top, FrameEntry *bottom);
+    void reset(Assembler *masm, Registers avail, uint32 n,
+               FrameEntry *bottom);
     void sync(FrameEntry *fe);
 
   private:
     void syncCopy(FrameEntry *fe);
     void syncNormal(FrameEntry *fe);
     RegisterID ensureDataReg(FrameEntry *fe, SyncEntry &e);
     RegisterID ensureTypeReg(FrameEntry *fe, SyncEntry &e);
     RegisterID allocReg();
@@ -113,18 +106,16 @@ class ImmutableSync
   private:
     JSContext *cx;
     SyncEntry *entries;
     const FrameState &frame;
     uint32 nentries;
     Registers avail;
     Assembler *masm;
     SyncEntry *regs[Assembler::TotalRegisters];
-    FrameEntry *top;
     FrameEntry *bottom;
-    uint32 generation;
 };
 
 } /* namespace mjit */
 } /* namespace js */
 
 #endif /* jsjaeger_imm_sync_h__ */
 
--- a/js/src/methodjit/MachineRegs.h
+++ b/js/src/methodjit/MachineRegs.h
@@ -185,26 +185,21 @@ struct Registers {
     bool empty() const {
         return !freeMask;
     }
 
     bool empty(uint32 mask) const {
         return !(freeMask & mask);
     }
 
-    RegisterID peekReg() {
+    RegisterID takeAnyReg() {
         JS_ASSERT(!empty());
         int ireg;
         JS_FLOOR_LOG2(ireg, freeMask);
         RegisterID reg = (RegisterID)ireg;
-        return reg;
-    }
-
-    RegisterID takeAnyReg() {
-        RegisterID reg = peekReg();
         takeReg(reg);
         return reg;
     }
 
     bool hasRegInMask(uint32 mask) const {
         Registers temp(freeMask & mask);
         return !temp.empty();
     }