Merge.
authorDavid Anderson <danderson@mozilla.com>
Wed, 13 Aug 2008 17:11:22 -0700
changeset 18145 3794c4f36b2737c631631b75beb5647a7fbc6420
parent 18144 8f3c8fb8c7162c8eac0c088c8be1e49cd894e2db (current diff)
parent 18142 d96e0ab82bb857240580ce26f88172477c075eb5 (diff)
child 18146 9379f9e865f7ff5edaed99180ee2bd885d8689c2
push id1
push userroot
push dateTue, 26 Apr 2011 22:38:44 +0000
treeherdermozilla-beta@bfdb6e623a36 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
milestone1.9.1a2pre
Merge.
js/src/jstracer.cpp
--- a/js/src/builtins.tbl
+++ b/js/src/builtins.tbl
@@ -63,17 +63,18 @@ BUILTIN4(String_p_replace_str,  LO, LO, 
 BUILTIN5(String_p_replace_str3, LO, LO, LO, LO, LO, LO, JSString*, JSContext*, JSString*, JSString*, JSString*, JSString*, 1, 1)
 BUILTIN1(Math_random,           LO,     F,      jsdouble,  JSRuntime*, 1, 1)
 BUILTIN2(EqualStrings,          LO,     LO, LO, bool,      JSString*, JSString*, 1, 1)
 BUILTIN2(CompareStrings,        LO,     LO, LO, bool,      JSString*, JSString*, 1, 1)
 BUILTIN2(StringToNumber,        LO,     LO, F,  jsdouble,  JSContext*, JSString*, 1, 1)
 BUILTIN2(StringToInt32,         LO,     LO, LO, jsint,     JSContext*, JSString*, 1, 1)
 BUILTIN3(Any_getelem,           LO, LO, LO, LO, jsval,     JSContext*, JSObject*, JSString*, 1, 1)
 BUILTIN4(Any_setelem,           LO, LO, LO, LO, LO, bool,  JSContext*, JSObject*, JSString*, jsval, 1, 1)
-BUILTIN2(ValueToEnumerator,     LO,     LO, LO, JSObject*, JSContext*, jsval, 1, 1)
+BUILTIN3(FastValueToIterator,   LO, LO, LO, LO, JSObject*, JSContext*, jsuint, jsval, 1, 1)
+BUILTIN2(FastCallIteratorNext,  LO,     LO, LO, JSObject*, JSContext*, JSObject*, 1, 1)
 BUILTIN2(CloseIterator,         LO,     LO, LO, bool,      JSContext*, jsval, 1, 1)
 BUILTIN2(CallTree,              LO, LO, LO,     nanojit::GuardRecord*, avmplus::InterpState*, nanojit::Fragment*, 0, 0)
 BUILTIN2(FastNewObject,         LO,     LO, LO, JSObject*, JSContext*, JSObject*, 1, 1)
 BUILTIN3(AddProperty,           LO, LO, LO, LO, bool,      JSContext*, JSObject*, JSScopeProperty*, 1, 1)
 BUILTIN3(CallGetter,            LO, LO, LO, LO, jsval,     JSContext*, JSObject*, JSScopeProperty*, 1, 1)
 BUILTIN2(TypeOfObject,          LO,     LO, LO, JSString*, JSContext*, JSObject*, 1, 1)
 BUILTIN2(TypeOfBoolean,         LO,     LO, LO, JSString*, JSContext*, jsint, 1, 1)
 BUILTIN2(NumberToString,        LO,     F,  LO, JSString*, JSContext*, jsdouble, 1, 1)
--- a/js/src/jsbuiltins.cpp
+++ b/js/src/jsbuiltins.cpp
@@ -362,23 +362,32 @@ bool FASTCALL
 js_Any_setelem(JSContext* cx, JSObject* obj, JSString* idstr, jsval v)
 {
     if (!JSSTRING_IS_FLAT(idstr) && !js_UndependString(cx, idstr))
         return false;
     return OBJ_SET_PROPERTY(cx, obj, ATOM_TO_JSID(STRING_TO_JSVAL(idstr)), &v);
 }
 
 JSObject* FASTCALL
-js_ValueToEnumerator(JSContext* cx, jsval v)
+js_FastValueToIterator(JSContext* cx, jsuint flags, jsval v)
 {
-    if (!js_ValueToIterator(cx, JSITER_ENUMERATE, &v))
+    if (!js_ValueToIterator(cx, flags, &v))
         return NULL;
     return JSVAL_TO_OBJECT(v);
 }
 
+jsval FASTCALL
+js_FastCallIteratorNext(JSContext* cx, JSObject* iterobj)
+{
+    jsval v;
+    if (!js_CallIteratorNext(cx, iterobj, &v))
+        return JSVAL_ERROR_COOKIE;
+    return v;
+}
+
 GuardRecord* FASTCALL
 js_CallTree(InterpState* state, Fragment* f)
 {
     /* current we can't deal with inner trees that have globals so report an error */
     JS_ASSERT(!((TreeInfo*)f->vmprivate)->globalSlots.length());
     union { NIns *code; GuardRecord* (FASTCALL *func)(InterpState*, Fragment*); } u;
     u.code = f->code();
     return u.func(state, NULL);
--- a/js/src/jstracer.cpp
+++ b/js/src/jstracer.cpp
@@ -77,16 +77,19 @@
 #define MAX_CALLDEPTH 5
 
 /* Max number of type mismatchs before we trash the tree. */
 #define MAX_MISMATCH 5
 
 /* Max native stack size. */
 #define MAX_NATIVE_STACK_SLOTS 1024
 
+/* Max call stack size. */
+#define MAX_CALL_STACK_ENTRIES 64
+
 #ifdef DEBUG
 #define ABORT_TRACE(msg)   do { fprintf(stdout, "abort: %d: %s\n", __LINE__, msg); return false; } while(0)
 #else
 #define ABORT_TRACE(msg)   return false
 #endif
 
 #ifdef DEBUG
 static struct {
@@ -646,19 +649,20 @@ TraceRecorder::TraceRecorder(JSContext* 
         lirbuf->state = addName(lir->insParam(0), "state");
         lirbuf->param1 = addName(lir->insParam(1), "param1");
     }
     lirbuf->sp = addName(lir->insLoadi(lirbuf->state, offsetof(InterpState, sp)), "sp");
     lirbuf->rp = addName(lir->insLoadi(lirbuf->state, offsetof(InterpState, rp)), "rp");
     cx_ins = addName(lir->insLoadi(lirbuf->state, offsetof(InterpState, cx)), "cx");
     gp_ins = addName(lir->insLoadi(lirbuf->state, offsetof(InterpState, gp)), "gp");
     eos_ins = addName(lir->insLoadi(lirbuf->state, offsetof(InterpState, eos)), "eos");
+    eor_ins = addName(lir->insLoadi(lirbuf->state, offsetof(InterpState, eor)), "eor");
 
     /* read into registers all values on the stack and all globals we know so far */
-    import(ngslots, callDepth, globalTypeMap, stackTypeMap); 
+    import(treeInfo, lirbuf->sp, ngslots, callDepth, globalTypeMap, stackTypeMap); 
 }
 
 TraceRecorder::~TraceRecorder()
 {
 #ifdef DEBUG
     delete verbose_filter;
 #endif
     delete cse_filter;
@@ -1031,30 +1035,30 @@ TraceRecorder::import(LIns* base, ptrdif
     static const char* typestr[] = {
         "object", "int", "double", "3", "string", "5", "boolean", "any"
     };
     printf("import vp=%p name=%s type=%s flags=%d\n", p, name, typestr[t & 7], t >> 3);
 #endif
 }
 
 void
-TraceRecorder::import(unsigned ngslots, unsigned callDepth, 
+TraceRecorder::import(TreeInfo* treeInfo, LIns* sp, unsigned ngslots, unsigned callDepth, 
                       uint8* globalTypeMap, uint8* stackTypeMap)
 {
     /* the first time we compile a tree this will be empty as we add entries lazily */
     uint16* gslots = treeInfo->globalSlots.data();
     uint8* m = globalTypeMap;
     FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
         import(gp_ins, nativeGlobalOffset(vp), vp, *m, vpname, vpnum, NULL);
         m++;
     );
     ptrdiff_t offset = -treeInfo->nativeStackBase;
     m = stackTypeMap;
     FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
-        import(lirbuf->sp, offset, vp, *m, vpname, vpnum, fp);
+        import(sp, offset, vp, *m, vpname, vpnum, fp);
         m++; offset += sizeof(double);
     );
 }
 
 /* Lazily import a global slot if we don't already have it in the tracker. */
 bool
 TraceRecorder::lazilyImportGlobalSlot(unsigned slot)
 {
@@ -1332,45 +1336,56 @@ TraceRecorder::closeLoop(Fragmento* frag
 #endif
 }
 
 /* Record a call to an inner tree. */
 void
 TraceRecorder::emitTreeCall(Fragment* inner, GuardRecord* lr)
 {
     TreeInfo* ti = (TreeInfo*)inner->vmprivate;
+    LIns* inner_sp = lirbuf->sp;
     /* The inner tree expects to be called from the current scope. If the outer tree (this
        trace is currently inside a function inlining code (calldepth > 0), we have to advance
        the native stack pointer such that we match what the inner trace expects to see. We
        move it back when we come out of the inner tree call. */
     if (callDepth > 0) {
         /* Calculate the amount we have to lift the native stack pointer by to compensate for
            any outer frames that the inner tree doesn't expect but the outer tree has. */
-        unsigned sp_adj = nativeStackSlots(cx, callDepth - 1, cx->fp->down) * sizeof(double);
+        ptrdiff_t sp_adj = nativeStackOffset(&cx->fp->argv[-1]);
+        /* Calculate the amount we have to lift the call stack by */
+        ptrdiff_t rp_adj = callDepth * sizeof(FrameInfo);
         /* Guard that we have enough stack space for the tree we are trying to call on top
            of the new value for sp. */
         LIns* sp_top = lir->ins2i(LIR_add, lirbuf->sp, sp_adj + 
                 ti->maxNativeStackSlots * sizeof(double));
         guard(true, lir->ins2(LIR_lt, sp_top, eos_ins), OOM_EXIT);
-        /* We have enough space, so adjust sp to its new level. */
-        lir->insStorei(lir->ins2i(LIR_add, lirbuf->sp, sp_adj), 
+        /* Guard that we have enough call stack space. */
+        LIns* rp_top = lir->ins2i(LIR_add, lirbuf->rp, rp_adj + 
+                ti->maxCallDepth * sizeof(FrameInfo));
+        guard(true, lir->ins2(LIR_lt, rp_top, eor_ins), OOM_EXIT);
+        /* We have enough space, so adjust sp and rp to their new level. */
+        lir->insStorei(inner_sp = lir->ins2i(LIR_add, lirbuf->sp, sp_adj), 
                 lirbuf->state, offsetof(InterpState, sp));
+        lir->insStorei(lir->ins2i(LIR_add, lirbuf->rp, rp_adj),
+                lirbuf->state, offsetof(InterpState, rp));
     }
     /* Invoke the inner tree. */
     LIns* args[] = { lir->insImmPtr(inner), lirbuf->state }; /* reverse order */
     LIns* ret = lir->insCall(F_CallTree, args);
     /* Make a note that we now depend on that tree. */
     ti->dependentTrees.addUnique(fragment);
     /* Read back all registers, in case the called tree changed any of them. */
     SideExit* exit = lr->exit;
-    import(exit->numGlobalSlots, exit->calldepth, 
+    import(ti, inner_sp, exit->numGlobalSlots, exit->calldepth, 
            exit->typeMap, exit->typeMap + exit->numGlobalSlots);
-    /* Restore state->sp to its original value (we still have it in a register). */
-    if (callDepth > 0)
+    /* Restore sp and rp to their original values (we still have them in a register). */
+    if (callDepth > 0) {
         lir->insStorei(lirbuf->sp, lirbuf->state, offsetof(InterpState, sp));
+        lir->insStorei(lirbuf->rp, lirbuf->state, offsetof(InterpState, rp));
+    }
     /* Guard that we come out of the inner tree along the same side exit we came out when
        we called the inner tree at recording time. */
     guard(true, lir->ins2(LIR_eq, ret, lir->insImmPtr(lr)), NESTED_EXIT);
 }
 
 int
 nanojit::StackFilter::getTop(LInsp guard)
 {
@@ -1651,17 +1666,17 @@ js_ContinueRecording(JSContext* cx, Trac
         }
         AUDIT(traceCompleted);
         r->closeLoop(fragmento);
         js_DeleteRecorder(cx);
         return false; /* done recording */
     }
     /* does this branch go to an inner loop? */
     Fragment* f = fragmento->getLoop(cx->fp->regs->pc);
-    if (nesting_enabled && f->code()) {
+    if (nesting_enabled && f->code() && !((TreeInfo*)f->vmprivate)->globalSlots.length()) {
         JS_ASSERT(f->vmprivate);
         /* call the inner tree */
         GuardRecord* lr = js_ExecuteTree(cx, f, inlineCallCount);
         if (!lr) {
             js_AbortRecording(cx, oldpc, "Couldn't call inner tree");
             return false;
         }
         switch (lr->exit->exitType) {
@@ -1706,41 +1721,45 @@ js_ExecuteTree(JSContext* cx, Fragment* 
     }
 
     unsigned ngslots = ti->globalSlots.length();
     uint16* gslots = ti->globalSlots.data();
     unsigned globalFrameSize = STOBJ_NSLOTS(globalObj);
     double* global = (double*)alloca((globalFrameSize+1) * sizeof(double));
     debug_only(*(uint64*)&global[globalFrameSize] = 0xdeadbeefdeadbeefLL;)
     double* stack = (double*)alloca(MAX_NATIVE_STACK_SLOTS * sizeof(double));
+    
 #ifdef DEBUG
     printf("entering trace at %s:%u@%u, native stack slots: %u\n",
            cx->fp->script->filename, js_PCToLineNumber(cx, cx->fp->script, cx->fp->regs->pc),
            cx->fp->regs->pc - cx->fp->script->code, ti->maxNativeStackSlots);
 #endif
     if (!BuildNativeGlobalFrame(cx, ngslots, gslots, ti->globalTypeMap.data(), global) ||
         !BuildNativeStackFrame(cx, 0/*callDepth*/, ti->stackTypeMap.data(), stack)) {
         AUDIT(typeMapMismatchAtEntry);
         debug_only(printf("type-map mismatch.\n");)
         if (++ti->mismatchCount > MAX_MISMATCH) {
             debug_only(printf("excessive mismatches, flushing cache.\n"));
             f->blacklist();
             js_TrashTree(cx, f);
         }
         return NULL;
     }
+    
     ti->mismatchCount = 0;
-    
+
     double* entry_sp = &stack[ti->nativeStackBase/sizeof(double)];
-
-    FrameInfo* callstack = (FrameInfo*) alloca(ti->maxCallDepth * sizeof(FrameInfo));
+    //FrameInfo* callstack = (FrameInfo*) alloca(ti->maxCallDepth * sizeof(FrameInfo));
+    FrameInfo* callstack = (FrameInfo*) alloca(MAX_CALL_STACK_ENTRIES * sizeof(FrameInfo));
+    
     InterpState state;
     state.sp = (void*)entry_sp;
     state.eos = ((double*)state.sp) + MAX_NATIVE_STACK_SLOTS;
     state.rp = callstack;
+    state.eor = callstack + MAX_CALL_STACK_ENTRIES;
     state.gp = global;
     state.cx = cx;
     union { NIns *code; GuardRecord* (FASTCALL *func)(InterpState*, Fragment*); } u;
     u.code = f->code();
 
 #if defined(DEBUG) && defined(NANOJIT_IA32)
     uint64 start = rdtsc();
 #endif
@@ -1765,16 +1784,18 @@ js_ExecuteTree(JSContext* cx, Fragment* 
     printf("leaving trace at %s:%u@%u, exitType=%d, sp=%p, ip=%p, cycles=%llu\n",
            fp->script->filename, js_PCToLineNumber(cx, fp->script, fp->regs->pc),
            fp->regs->pc - fp->script->code,
            lr->exit->exitType,
            state.sp, lr->jmp,
            (rdtsc() - start));
 #endif
 
+    JS_ASSERT(lr->exit->exitType != NESTED_EXIT);
+    
     FlushNativeGlobalFrame(cx, e->numGlobalSlots, ti->globalSlots.data(), e->typeMap, global);
     FlushNativeStackFrame(cx, e->calldepth, e->typeMap + e->numGlobalSlots, stack);
     JS_ASSERT(ti->globalSlots.length() >= e->numGlobalSlots);
     JS_ASSERT(globalFrameSize == STOBJ_NSLOTS(globalObj));
     JS_ASSERT(*(uint64*)&global[globalFrameSize] == 0xdeadbeefdeadbeefLL);
 
     AUDIT(sideExitIntoInterpreter);
 
@@ -1815,31 +1836,32 @@ js_LoopEdge(JSContext* cx, jsbytecode* o
         if (++f->hits() >= HOTLOOP)
             return js_RecordTree(cx, tm, f);
         return false;
     }
     JS_ASSERT(!tm->recorder);
 
     /* if this is a local branch in the same loop, grow the tree */
     GuardRecord* lr = js_ExecuteTree(cx, f, inlineCallCount);
-    JS_ASSERT(!(lr && (lr->exit->exitType == NESTED_EXIT)));
     if (lr && (lr->from->root == f) && (lr->exit->exitType == BRANCH_EXIT))
         return js_AttemptToExtendTree(cx, lr, f);
     /* if this exits the loop, resume interpretation */
     return false;
 }
 
 void
 js_AbortRecording(JSContext* cx, jsbytecode* abortpc, const char* reason)
 {
     AUDIT(recorderAborted);
-    debug_only(if (!abortpc) abortpc = cx->fp->regs->pc;
-               printf("Abort recording (line %d, pc %d): %s.\n",
-                      js_PCToLineNumber(cx, cx->fp->script, abortpc),
-                      abortpc - cx->fp->script->code, reason);)
+    if (cx->fp) {
+        debug_only(if (!abortpc) abortpc = cx->fp->regs->pc;
+                   printf("Abort recording (line %d, pc %d): %s.\n",
+                          js_PCToLineNumber(cx, cx->fp->script, abortpc),
+                          abortpc - cx->fp->script->code, reason);)
+    }
     JS_ASSERT(JS_TRACE_MONITOR(cx).recorder != NULL);
     Fragment* f = JS_TRACE_MONITOR(cx).recorder->getFragment();
     f->blacklist();
     js_DeleteRecorder(cx);
     if (f->root == f)
         js_TrashTree(cx, f);
 }
 
@@ -1904,17 +1926,17 @@ TraceRecorder::argval(unsigned n) const
 {
     JS_ASSERT(n < cx->fp->fun->nargs);
     return cx->fp->argv[n];
 }
 
 jsval&
 TraceRecorder::varval(unsigned n) const
 {
-    JS_ASSERT(n < cx->fp->script->nfixed);
+    JS_ASSERT(n < cx->fp->script->nslots);
     return cx->fp->slots[n];
 }
 
 jsval&
 TraceRecorder::stackval(int n) const
 {
     jsval* sp = cx->fp->regs->sp;
     JS_ASSERT(size_t((sp + n) - StackBase(cx->fp)) < StackDepth(cx->fp->script));
@@ -2511,48 +2533,48 @@ TraceRecorder::box_jsval(jsval v, LIns*&
 
 bool
 TraceRecorder::unbox_jsval(jsval v, LIns*& v_ins)
 {
     if (isNumber(v)) {
         // JSVAL_IS_NUMBER(v)
         guard(false,
               lir->ins_eq0(lir->ins2(LIR_or,
-                                     lir->ins2(LIR_and, v_ins, lir->insImmPtr((void*)JSVAL_INT)),
+                                     lir->ins2(LIR_and, v_ins, INS_CONSTPTR(JSVAL_INT)),
                                      lir->ins2i(LIR_eq,
                                                 lir->ins2(LIR_and, v_ins,
-                                                          lir->insImmPtr((void*)JSVAL_TAGMASK)),
+                                                          INS_CONSTPTR(JSVAL_TAGMASK)),
                                                 JSVAL_DOUBLE))),
               MISMATCH_EXIT);
         v_ins = lir->insCall(F_UnboxDouble, &v_ins);
         return true;
     }
     switch (JSVAL_TAG(v)) {
       case JSVAL_BOOLEAN:
         guard(true,
               lir->ins2i(LIR_eq,
-                         lir->ins2(LIR_and, v_ins, lir->insImmPtr((void*)JSVAL_TAGMASK)),
+                         lir->ins2(LIR_and, v_ins, INS_CONSTPTR(JSVAL_TAGMASK)),
                          JSVAL_BOOLEAN),
               MISMATCH_EXIT);
          v_ins = lir->ins2i(LIR_ush, v_ins, JSVAL_TAGBITS);
          return true;
        case JSVAL_OBJECT:
         guard(true,
               lir->ins2i(LIR_eq,
-                         lir->ins2(LIR_and, v_ins, lir->insImmPtr((void*)JSVAL_TAGMASK)),
+                         lir->ins2(LIR_and, v_ins, INS_CONSTPTR(JSVAL_TAGMASK)),
                          JSVAL_OBJECT),
               MISMATCH_EXIT);
         return true;
       case JSVAL_STRING:
         guard(true,
               lir->ins2i(LIR_eq,
-                        lir->ins2(LIR_and, v_ins, lir->insImmPtr((void*)JSVAL_TAGMASK)),
+                        lir->ins2(LIR_and, v_ins, INS_CONSTPTR(JSVAL_TAGMASK)),
                         JSVAL_STRING),
               MISMATCH_EXIT);
-        v_ins = lir->ins2(LIR_and, v_ins, lir->insImmPtr((void*)~JSVAL_TAGMASK));
+        v_ins = lir->ins2(LIR_and, v_ins, INS_CONSTPTR(~JSVAL_TAGMASK));
         return true;
     }
     return false;
 }
 
 bool
 TraceRecorder::getThis(LIns*& this_ins)
 {
@@ -2620,27 +2642,38 @@ TraceRecorder::clearFrameSlotsFromCache(
 {
     /* Clear out all slots of this frame in the nativeFrameTracker. Different locations on the
        VM stack might map to different locations on the native stack depending on the
        number of arguments (i.e.) of the next call, so we have to make sure we map
        those in to the cache with the right offsets. */
     JSStackFrame* fp = cx->fp;
     jsval* vp;
     jsval* vpstop;
-    for (vp = &fp->argv[-1], vpstop = &fp->argv[JS_MAX(fp->fun->nargs,fp->argc)]; vp < vpstop; ++vp)
-        nativeFrameTracker.set(vp, (LIns*)0);
-    for (vp = &fp->slots[0], vpstop = &fp->slots[fp->script->nslots]; vp < vpstop; ++vp)
-        nativeFrameTracker.set(vp, (LIns*)0);
+    if (fp->callee) {
+        vp = &fp->argv[-1];
+        vpstop = &fp->argv[JS_MAX(fp->fun->nargs,fp->argc)];
+        while (vp < vpstop)
+            nativeFrameTracker.set(vp++, (LIns*)0);
+    }
+    vp = &fp->slots[0];
+    vpstop = &fp->slots[fp->script->nslots];
+    while (vp < vpstop)
+        nativeFrameTracker.set(vp++, (LIns*)0);
 }
 
 bool
 TraceRecorder::record_EnterFrame()
 {
     if (++callDepth >= MAX_CALLDEPTH)
         ABORT_TRACE("exceeded maximum call depth");
+#ifdef DEBUG
+    printf("EnterFrame %s, callDepth=%d\n", 
+           js_AtomToPrintableString(cx, cx->fp->fun->atom),
+           callDepth);
+#endif    
     JSStackFrame* fp = cx->fp;
     LIns* void_ins = lir->insImm(JSVAL_TO_BOOLEAN(JSVAL_VOID));
 
     jsval* vp = &fp->argv[fp->argc];
     jsval* vpstop = vp + (fp->fun->nargs - fp->argc);
     while (vp < vpstop) {
         if (vp >= fp->down->regs->sp)
             nativeFrameTracker.set(vp, (LIns*)0);
@@ -2652,16 +2685,22 @@ TraceRecorder::record_EnterFrame()
     while (vp < vpstop)
         set(vp++, void_ins, true);
     return true;
 }
 
 bool
 TraceRecorder::record_LeaveFrame()
 {
+#ifdef DEBUG
+    if (cx->fp->fun)
+        printf("LeaveFrame (back to %s), callDept=%d\n", 
+               js_AtomToPrintableString(cx, cx->fp->fun->atom),
+               callDepth);
+#endif    
     if (callDepth-- <= 0)
         return false;
 
     // LeaveFrame gets called after the interpreter popped the frame and
     // stored rval, so cx->fp not cx->fp->down, and -1 not 0.
     atoms = cx->fp->script->atomMap.vector;
     stack(-1, rval_ins);
     return true;
@@ -3330,17 +3369,17 @@ TraceRecorder::record_JSOP_CALLNAME()
 
 bool
 TraceRecorder::guardShapelessCallee(jsval& callee)
 {
     if (!VALUE_IS_FUNCTION(cx, callee))
         ABORT_TRACE("shapeless callee is not a function");
 
     guard(true,
-          addName(lir->ins2(LIR_eq, get(&callee), lir->insImmPtr((void*) JSVAL_TO_OBJECT(callee))),
+          addName(lir->ins2(LIR_eq, get(&callee), INS_CONSTPTR(JSVAL_TO_OBJECT(callee))),
                   "guard(shapeless callee)"),
           MISMATCH_EXIT);
     return true;
 }
 
 bool
 TraceRecorder::interpretedFunctionCall(jsval& fval, JSFunction* fun, uintN argc)
 {
@@ -3860,17 +3899,17 @@ TraceRecorder::record_JSOP_LOOKUPSWITCH(
         jsdouble d = asNumber(v);
         jsdpun u;
         u.d = d;
         guard(true,
               addName(lir->ins2(LIR_feq, get(&v), lir->insImmq(u.u64)),
                       "guard(lookupswitch numeric)"),
               BRANCH_EXIT);
     } else if (JSVAL_IS_STRING(v)) {
-        LIns* args[] = { get(&v), lir->insImmPtr((void*) JSVAL_TO_STRING(v)) };
+        LIns* args[] = { get(&v), INS_CONSTPTR(JSVAL_TO_STRING(v)) };
         guard(true,
               addName(lir->ins_eq0(lir->ins_eq0(lir->insCall(F_EqualStrings, args))),
                       "guard(lookupswitch string)"),
               BRANCH_EXIT);
     } else if (JSVAL_IS_BOOLEAN(v)) {
         guard(true,
               addName(lir->ins2(LIR_eq, get(&v), lir->insImm(JSVAL_TO_BOOLEAN(v))),
                       "guard(lookupswitch boolean)"),
@@ -4065,161 +4104,67 @@ bool
 TraceRecorder::record_JSOP_LOCALDEC()
 {
     return inc(varval(GET_SLOTNO(cx->fp->regs->pc)), -1, false);
 }
 
 bool
 TraceRecorder::record_JSOP_ITER()
 {
-    uintN flags = cx->fp->regs->pc[1];
-    if (flags & ~JSITER_ENUMERATE)
-        ABORT_TRACE("for-each-in or destructuring JSOP_ITER not traced");
-
     jsval& v = stackval(-1);
     if (!JSVAL_IS_PRIMITIVE(v)) {
-        LIns* args[] = { get(&v), cx_ins };
-        LIns* v_ins = lir->insCall(F_ValueToEnumerator, args);
+        jsuint flags = cx->fp->regs->pc[1];
+        LIns* args[] = { get(&v), INS_CONST(flags), cx_ins };
+        LIns* v_ins = lir->insCall(F_FastValueToIterator, args);
         guard(false, lir->ins_eq0(v_ins), MISMATCH_EXIT);
         set(&v, v_ins);
         return true;
     }
 
     ABORT_TRACE("for-in on a primitive value");
 }
 
 bool
-TraceRecorder::forInProlog(JSObject*& iterobj, LIns*& iterobj_ins)
-{
-    jsval& iterval = stackval(-1);
-    JS_ASSERT(!JSVAL_IS_PRIMITIVE(iterval));
-    iterobj = JSVAL_TO_OBJECT(iterval);
-
-    iterobj_ins = get(&iterval);
-    if (guardClass(iterobj, iterobj_ins, &js_IteratorClass)) {
-        // Check flags in case we did not record the JSOP_ITER (it comes before the for-in loop).
-        uintN flags = JSVAL_TO_INT(iterobj->fslots[JSSLOT_ITER_FLAGS]);
-        if (flags & ~JSITER_ENUMERATE)
-            ABORT_TRACE("for-each-in or destructuring JSOP_ITER not traced");
-
-        guard(true,
-              addName(lir->ins_eq0(lir->ins2(LIR_and,
-                                             lir->insLoadi(iterobj_ins,
-                                                           offsetof(JSObject, fslots) +
-                                                           JSSLOT_ITER_FLAGS * sizeof(jsval)),
-                                             INS_CONST(~JSITER_ENUMERATE))),
-                      "guard(iter flags is JSITER_ENUMERATE)"),
-              MISMATCH_EXIT);
-
-        JSObject* obj = STOBJ_GET_PARENT(iterobj);
-        LIns* obj_ins = stobj_get_fslot(iterobj_ins, JSSLOT_PARENT);
-        LIns* map_ins = lir->insLoadi(obj_ins, offsetof(JSObject, map));
-        LIns* ops_ins;
-        if (!map_is_native(obj->map, map_ins, ops_ins))
+TraceRecorder::forInLoop(jsval* vp)
+{
+    jsval& iterobj_val = stackval(-1);
+    if (!JSVAL_IS_PRIMITIVE(iterobj_val)) {
+        LIns* args[] = { get(&iterobj_val), cx_ins };
+        LIns* v_ins = lir->insCall(F_FastCallIteratorNext, args);
+        guard(false, lir->ins2(LIR_eq, v_ins, INS_CONSTPTR(JSVAL_ERROR_COOKIE)), OOM_EXIT);
+
+        LIns* flag_ins = lir->ins_eq0(lir->ins2(LIR_eq, v_ins, INS_CONSTPTR(JSVAL_HOLE)));
+        LIns* iter_ins = get(vp); 
+        if (!box_jsval(JSVAL_STRING, iter_ins))
             return false;
-
-        LIns* n = lir->insLoadi(ops_ins, offsetof(JSObjectOps, enumerate));
-        if (obj->map->ops->enumerate == js_ObjectOps.enumerate) {
-            guard(true,
-                  addName(lir->ins2(LIR_eq, n, lir->insImmPtr((void*)js_ObjectOps.enumerate)),
-                          "guard(native-enumerate)"),
-                  MISMATCH_EXIT);
-            return true;
-        }
+        iter_ins = lir->ins_choose(flag_ins, v_ins, iter_ins, true);
+        if (!unbox_jsval(JSVAL_STRING, iter_ins))
+            return false;
+        set(vp, iter_ins);
+        stack(0, flag_ins);
+        return true;
     }
-    return false;
-}
-
-bool
-TraceRecorder::forInLoop(LIns*& id_ins)
-{
-    JSObject* iterobj;
-    LIns* iterobj_ins;
-    if (!forInProlog(iterobj, iterobj_ins))
-        return false;
-
-    jsval stateval = iterobj->fslots[JSSLOT_ITER_STATE];
-    LIns* stateval_ins = stobj_get_fslot(iterobj_ins, JSSLOT_ITER_STATE);
-
-    // If a guarded loop termination condition is false while recording, stack
-    // unboxed false and return so the immediately subsequent JSOP_IFEQ exits
-    // the loop.
-    int flag = 0;
-    id_ins = NULL;
-
-    guard(false, addName(lir->ins_eq0(stateval_ins), "guard(non-null iter state"), MISMATCH_EXIT);
-    if (stateval == JSVAL_NULL)
-        goto done;
-    guard(false,
-          addName(lir->ins2(LIR_eq, stateval_ins, lir->insImmPtr((void*) JSVAL_ZERO)),
-                  "guard(non-empty iter state)"),
-          MISMATCH_EXIT);
-    if (stateval == JSVAL_ZERO)
-        goto done;
-
-    // Don't initialize to avoid goto over init warnings/errors.
-    LIns* state_ins; 
-    LIns* cursor_ins;
-
-    state_ins = lir->ins2(LIR_and, stateval_ins, lir->insImmPtr((void*) ~jsval(3)));
-    cursor_ins = lir->insLoadi(state_ins, offsetof(JSNativeEnumerator, cursor));
-    guard(false, addName(lir->ins_eq0(cursor_ins), "guard(ne->cursor != 0)"), MISMATCH_EXIT);
-
-    JSNativeEnumerator* ne;
-
-    // Stack an unboxed true to make JSOP_IFEQ loop continue, even if ne is
-    // exhausted. Either we'll end up in the interpreter finding no enumerable
-    // prototype properties, or we will re-enter this trace having gone up the
-    // prototype chain one level.
-    flag = 1;
-    ne = (JSNativeEnumerator*) (stateval & ~jsval(3));
-    if (ne->cursor == 0)
-        goto done;
-
-    cursor_ins = lir->ins2i(LIR_sub, cursor_ins, 1);
-    lir->insStorei(cursor_ins, state_ins, offsetof(JSNativeEnumerator, cursor));
-
-    // Don't initialize to avoid goto over init warnings/errors.
-    LIns* ids_ins;
-    LIns* id_addr_ins;
-
-    ids_ins = lir->ins2i(LIR_add, state_ins, offsetof(JSNativeEnumerator, ids));
-    id_addr_ins = lir->ins2(LIR_add, ids_ins,
-                            lir->ins2i(LIR_lsh, cursor_ins, (sizeof(jsid) == 4) ? 2 : 3));
-
-
-    id_ins = lir->insLoadi(id_addr_ins, 0);
-done:
-    stack(0, lir->insImm(flag));
-    return true;
+
+    ABORT_TRACE("for-in on a primitive value");
 }
 
 bool
 TraceRecorder::record_JSOP_ENDITER()
 {
     LIns* args[] = { stack(-1), cx_ins };
     LIns* ok_ins = lir->insCall(F_CloseIterator, args);
     guard(false, lir->ins_eq0(ok_ins), MISMATCH_EXIT);
     return true;
 }
 
 bool
 TraceRecorder::record_JSOP_FORNAME()
 {
-    LIns* id_ins; 
-    if (!forInLoop(id_ins))
-        return false;
-    if (!id_ins)
-        return true;
-
     jsval* vp;
-    if (!name(vp))
-        return false;
-    set(vp, id_ins);
-    return true;
+    return name(vp) && forInLoop(vp);
 }
 
 bool
 TraceRecorder::record_JSOP_FORPROP()
 {
     return false;
 }
 
@@ -4227,33 +4172,23 @@ bool
 TraceRecorder::record_JSOP_FORELEM()
 {
     return false;
 }
 
 bool
 TraceRecorder::record_JSOP_FORARG()
 {
-    LIns* id_ins; 
-    if (!forInLoop(id_ins))
-        return false;
-    if (id_ins)
-        arg(GET_ARGNO(cx->fp->regs->pc), id_ins);
-    return true;
+    return forInLoop(&argval(GET_ARGNO(cx->fp->regs->pc)));
 }
 
 bool
 TraceRecorder::record_JSOP_FORLOCAL()
 {
-    LIns* id_ins; 
-    if (!forInLoop(id_ins))
-        return false;
-    if (id_ins)
-        var(GET_SLOTNO(cx->fp->regs->pc), id_ins);
-    return true;
+    return forInLoop(&varval(GET_SLOTNO(cx->fp->regs->pc)));
 }
 
 bool
 TraceRecorder::record_JSOP_FORCONST()
 {
     return false;
 }
 
--- a/js/src/jstracer.h
+++ b/js/src/jstracer.h
@@ -211,25 +211,27 @@ class TraceRecorder {
     nanojit::LirBufWriter*  lir_buf_writer;
     nanojit::LirWriter*     verbose_filter;
     nanojit::LirWriter*     cse_filter;
     nanojit::LirWriter*     expr_filter;
     nanojit::LirWriter*     func_filter;
     nanojit::LIns*          cx_ins;
     nanojit::LIns*          gp_ins;
     nanojit::LIns*          eos_ins;
+    nanojit::LIns*          eor_ins;
     nanojit::LIns*          rval_ins;
     nanojit::SideExit       exit;
 
     bool isGlobal(jsval* p) const;
     ptrdiff_t nativeStackOffset(jsval* p) const;
     ptrdiff_t nativeGlobalOffset(jsval* p) const;
     void import(nanojit::LIns* base, ptrdiff_t offset, jsval* p, uint8& t, 
                 const char *prefix, uintN index, JSStackFrame *fp);
-    void import(unsigned ngslots, unsigned callDepth, uint8* globalTypeMap, uint8* stackTypeMap);
+    void import(TreeInfo* treeInfo, nanojit::LIns* sp, unsigned ngslots, unsigned callDepth, 
+                uint8* globalTypeMap, uint8* stackTypeMap);
     void trackNativeStackUse(unsigned slots);
 
     bool lazilyImportGlobalSlot(unsigned slot);
     
     nanojit::LIns* guard(bool expected, nanojit::LIns* cond, nanojit::ExitType exitType);
     nanojit::LIns* addName(nanojit::LIns* ins, const char* name);
 
     nanojit::LIns* get(jsval* p);
@@ -294,18 +296,17 @@ class TraceRecorder {
     bool unbox_jsval(jsval v, nanojit::LIns*& v_ins);
     bool guardClass(JSObject* obj, nanojit::LIns* obj_ins, JSClass* clasp);
     bool guardDenseArray(JSObject* obj, nanojit::LIns* obj_ins);
     bool guardDenseArrayIndex(JSObject* obj, jsint idx, nanojit::LIns* obj_ins,
                               nanojit::LIns* dslots_ins, nanojit::LIns* idx_ins);
     void clearFrameSlotsFromCache();
     bool guardShapelessCallee(jsval& callee);
     bool interpretedFunctionCall(jsval& fval, JSFunction* fun, uintN argc);
-    bool forInProlog(JSObject*& iterobj, nanojit::LIns*& iterobj_ins);
-    bool forInLoop(nanojit::LIns*& id_ins);
+    bool forInLoop(jsval* vp);
 
 public:
     TraceRecorder(JSContext* cx, nanojit::GuardRecord*, nanojit::Fragment*, 
             unsigned ngslots, uint8* globalTypeMap, uint8* stackTypeMap);
     ~TraceRecorder();
 
     nanojit::SideExit* snapshot(nanojit::ExitType exitType);
     nanojit::Fragment* getFragment() const { return fragment; }
--- a/js/src/nanojit/avmplus.h
+++ b/js/src/nanojit/avmplus.h
@@ -289,16 +289,17 @@ namespace avmplus
 {
     struct InterpState
     {
         void* sp;
         void* rp;
         void* gp;
         JSContext *cx;
         void* eos;
+        void* eor;
     };
 
     class String
     {
     };
 
     typedef class String AvmString;