[JAEGER] Fast paths for start/end iterators and GC hazard removal, bug 578756. r=dmandelin, gal
☠☠ backed out by 64f4b27d2435 ☠ ☠
authorBrian Hackett
Tue, 10 Aug 2010 11:12:54 -0700
changeset 53363 3a6f645100eba84c5fba4aa1b99c597b07c2e001
parent 53362 1fa8cdc8b328c8407bac7dc421f106b5878c6008
child 53364 a584696862949136f73a7f721235ea1c63eb09b9
child 53368 64f4b27d243587f42143c8299228384a1398c293
push id1
push userroot
push dateTue, 26 Apr 2011 22:38:44 +0000
treeherdermozilla-beta@bfdb6e623a36 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersdmandelin, gal
bugs578756
milestone2.0b4pre
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
[JAEGER] Fast paths for start/end iterators and GC hazard removal, bug 578756. r=dmandelin, gal
js/src/jscntxt.cpp
js/src/jscntxt.h
js/src/jsiter.cpp
js/src/jsiter.h
js/src/methodjit/Compiler.cpp
js/src/methodjit/Compiler.h
--- a/js/src/jscntxt.cpp
+++ b/js/src/jscntxt.cpp
@@ -594,16 +594,17 @@ JSThreadData::purge(JSContext *cx)
     jmData.purge(cx);
 #endif
 
     /* Destroy eval'ed scripts. */
     js_DestroyScriptsToGC(cx, this);
 
     /* Purge cached native iterators. */
     memset(cachedNativeIterators, 0, sizeof(cachedNativeIterators));
+    lastNativeIterator = NULL;
 
     dtoaCache.s = NULL;
 }
 
 #ifdef JS_THREADSAFE
 
 static JSThread *
 NewThread(void *id)
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -1060,16 +1060,19 @@ struct JSThreadData {
         jsdouble d;
         jsint    base;
         JSString *s;        // if s==NULL, d and base are not valid
     } dtoaCache;
 
     /* Cached native iterators. */
     JSObject            *cachedNativeIterators[NATIVE_ITER_CACHE_SIZE];
 
+    /* Native iterator most recently started. */
+    JSObject            *lastNativeIterator;
+
     /* Base address of the native stack for the current thread. */
     jsuword             *nativeStackBase;
 
     /* List of currently pending operations on proxies. */
     JSPendingProxyOperation *pendingProxyOperation;
 
     js::ConservativeGCThreadData conservativeGC;
 
--- a/js/src/jsiter.cpp
+++ b/js/src/jsiter.cpp
@@ -479,110 +479,121 @@ NativeIterator::allocateKeyIterator(JSCo
     ni->props_array = ni->props_cursor = (jsid *) (ni + 1);
     ni->props_end = (jsid *)ni->props_array + plength;
     if (plength)
         memcpy(ni->props_array, props.begin(), plength * sizeof(jsid));
     return ni;
 }
 
 NativeIterator *
-NativeIterator::allocateValueIterator(JSContext *cx, uint32 slength, const AutoValueVector &props)
+NativeIterator::allocateValueIterator(JSContext *cx, const AutoValueVector &props)
 {
     size_t plength = props.length();
     NativeIterator *ni = (NativeIterator *)
-        cx->malloc(sizeof(NativeIterator) + plength * sizeof(Value) + slength * sizeof(uint32));
+        cx->malloc(sizeof(NativeIterator) + plength * sizeof(Value));
     if (!ni)
         return NULL;
     ni->props_array = ni->props_cursor = (Value *) (ni + 1);
     ni->props_end = (Value *)ni->props_array + plength;
     if (plength)
         memcpy(ni->props_array, props.begin(), plength * sizeof(Value));
     return ni;
 }
 
 inline void
-NativeIterator::init(JSObject *obj, uintN flags, const uint32 *sarray, uint32 slength, uint32 key)
+NativeIterator::init(JSObject *obj, uintN flags, uint32 slength, uint32 key)
 {
     this->obj = obj;
     this->flags = flags;
     this->shapes_array = (uint32 *) this->props_end;
     this->shapes_length = slength;
     this->shapes_key = key;
-    if (slength)
-        memcpy(this->shapes_array, sarray, slength * sizeof(uint32));
 }
 
 static inline void
 RegisterEnumerator(JSContext *cx, JSObject *iterobj, NativeIterator *ni)
 {
+    JS_ASSERT(!(ni->flags & JSITER_ACTIVE));
+    ni->flags |= JSITER_ACTIVE;
+
     /* Register non-escaping native enumerators (for-in) with the current context. */
     if (ni->flags & JSITER_ENUMERATE) {
         ni->next = cx->enumerators;
         cx->enumerators = iterobj;
     }
 }
 
 static inline bool
 VectorToKeyIterator(JSContext *cx, JSObject *obj, uintN flags, AutoIdVector &keys,
-                    const uint32 *sarray, uint32 slength, uint32 key, Value *vp)
+                    uint32 slength, uint32 key, Value *vp)
 {
     JS_ASSERT(!(flags & JSITER_FOREACH));
 
     JSObject *iterobj = NewIteratorObject(cx, flags);
     if (!iterobj)
         return false;
 
-
     NativeIterator *ni = NativeIterator::allocateKeyIterator(cx, slength, keys);
     if (!ni)
-        return NULL;
-    ni->init(obj, flags, sarray, slength, key);
+        return false;
+    ni->init(obj, flags, slength, key);
+
+    if (slength) {
+        /*
+         * Fill in the shape array from scratch.  We can't use the array that was
+         * computed for the cache lookup earlier, as constructing iterobj could
+         * have triggered a shape-regenerating GC.  Don't bother with regenerating
+         * the shape key; if such a GC *does* occur, we can only get hits through
+         * the one-slot lastNativeIterator cache.
+         */
+        JSObject *pobj = obj;
+        size_t ind = 0;
+        do {
+            ni->shapes_array[ind++] = pobj->shape();
+            pobj = pobj->getProto();
+        } while (pobj);
+        JS_ASSERT(ind == slength);
+    }
 
     iterobj->setNativeIterator(ni);
     vp->setObject(*iterobj);
 
     RegisterEnumerator(cx, iterobj, ni);
     return true;
 }
 
 bool
 VectorToKeyIterator(JSContext *cx, JSObject *obj, uintN flags, AutoIdVector &props, Value *vp)
 {
-    return VectorToKeyIterator(cx, obj, flags, props, NULL, 0, 0, vp);
+    return VectorToKeyIterator(cx, obj, flags, props, 0, 0, vp);
 }
 
-static inline bool
+bool
 VectorToValueIterator(JSContext *cx, JSObject *obj, uintN flags, AutoValueVector &vals,
-                      const uint32 *sarray, uint32 slength, uint32 key, Value *vp)
+                      Value *vp)
 {
     JS_ASSERT(flags & JSITER_FOREACH);
 
     JSObject *iterobj = NewIteratorObject(cx, flags);
     if (!iterobj)
         return false;
 
-    NativeIterator *ni = NativeIterator::allocateValueIterator(cx, slength, vals);
+    NativeIterator *ni = NativeIterator::allocateValueIterator(cx, vals);
     if (!ni)
-        return NULL;
-    ni->init(obj, flags, sarray, slength, key);
+        return false;
+    ni->init(obj, flags, 0, 0);
 
     iterobj->setNativeIterator(ni);
     vp->setObject(*iterobj);
 
     RegisterEnumerator(cx, iterobj, ni);
     return true;
 }
 
 bool
-VectorToValueIterator(JSContext *cx, JSObject *obj, uintN flags, AutoValueVector &props, Value *vp)
-{
-    return VectorToValueIterator(cx, obj, flags, props, NULL, 0, 0, vp);
-}
-
-bool
 EnumeratedIdVectorToIterator(JSContext *cx, JSObject *obj, uintN flags, AutoIdVector &props, Value *vp)
 {
     if (!(flags & JSITER_FOREACH))
         return VectorToKeyIterator(cx, obj, flags, props, vp);
 
     /* For for-each iteration, we need to look up the value of each id. */
 
     size_t plength = props.length();
@@ -609,20 +620,38 @@ GetIterator(JSContext *cx, JSObject *obj
     Vector<uint32, 8> shapes(cx);
     uint32 key = 0;
 
     bool keysOnly = (flags == JSITER_ENUMERATE);
 
     if (obj) {
         if (keysOnly) {
             /*
+             * Quick check to see if this is the same as the most recent
+             * object which was iterated over.
+             */
+            JSObject *last = JS_THREAD_DATA(cx)->lastNativeIterator;
+            JSObject *proto = obj->getProto();
+            if (last) {
+                NativeIterator *lastni = last->getNativeIterator();
+                if (!(lastni->flags & JSITER_ACTIVE) &&
+                    obj->shape() == lastni->shapes_array[0] &&
+                    proto && proto->shape() == lastni->shapes_array[1] &&
+                    !proto->getProto()) {
+                    vp->setObject(*last);
+                    RegisterEnumerator(cx, last, lastni);
+                    return true;
+                }
+            }
+
+            /*
              * The iterator object for JSITER_ENUMERATE never escapes, so we
              * don't care for the proper parent/proto to be set. This also
-             * allows us to re-use a previous iterator object that was freed
-             * by JSOP_ENDITER.
+             * allows us to re-use a previous iterator object that is not
+             * currently active.
              */
             JSObject *pobj = obj;
             do {
                 if (!pobj->isNative() ||
                     obj->getOps()->enumerate ||
                     pobj->getClass()->enumerate != JS_EnumerateStub) {
                     shapes.clear();
                     goto miss;
@@ -634,23 +663,25 @@ GetIterator(JSContext *cx, JSObject *obj
                 pobj = pobj->getProto();
             } while (pobj);
 
             hash = key % JS_ARRAY_LENGTH(JS_THREAD_DATA(cx)->cachedNativeIterators);
             hp = &JS_THREAD_DATA(cx)->cachedNativeIterators[hash];
             JSObject *iterobj = *hp;
             if (iterobj) {
                 NativeIterator *ni = iterobj->getNativeIterator();
-                if (ni->shapes_key == key &&
+                if (!(ni->flags & JSITER_ACTIVE) &&
+                    ni->shapes_key == key &&
                     ni->shapes_length == shapes.length() &&
                     Compare(ni->shapes_array, shapes.begin(), ni->shapes_length)) {
                     vp->setObject(*iterobj);
-                    *hp = ni->next;
 
                     RegisterEnumerator(cx, iterobj, ni);
+                    if (shapes.length() == 2)
+                        JS_THREAD_DATA(cx)->lastNativeIterator = iterobj;
                     return true;
                 }
             }
         }
 
       miss:
         if (obj->isProxy())
             return JSProxy::iterate(cx, obj, flags, vp);
@@ -661,23 +692,39 @@ GetIterator(JSContext *cx, JSObject *obj
     }
 
     /* NB: for (var p in null) succeeds by iterating over no properties. */
 
     if (flags & JSITER_FOREACH) {
         AutoValueVector vals(cx);
         if (JS_LIKELY(obj != NULL) && !Snapshot<ValueEnumeration>(cx, obj, flags, vals))
             return false;
-        return VectorToValueIterator(cx, obj, flags, vals, shapes.begin(), shapes.length(), key, vp);
+        JS_ASSERT(shapes.empty());
+        if (!VectorToValueIterator(cx, obj, flags, vals, vp))
+            return false;
+    } else {
+        AutoIdVector keys(cx);
+        if (JS_LIKELY(obj != NULL) && !Snapshot<KeyEnumeration>(cx, obj, flags, keys))
+            return false;
+        if (!VectorToKeyIterator(cx, obj, flags, keys, shapes.length(), key, vp))
+            return false;
     }
 
-    AutoIdVector keys(cx);
-    if (JS_LIKELY(obj != NULL) && !Snapshot<KeyEnumeration>(cx, obj, flags, keys))
-        return false;
-    return VectorToKeyIterator(cx, obj, flags, keys, shapes.begin(), shapes.length(), key, vp);
+    JSObject *iterobj = &vp->toObject();
+
+    /* Cache the iterator object if possible. */
+    if (shapes.length()) {
+        uint32 hash = key % NATIVE_ITER_CACHE_SIZE;
+        JSObject **hp = &JS_THREAD_DATA(cx)->cachedNativeIterators[hash];
+        *hp = iterobj;
+    }
+
+    if (shapes.length() == 2)
+        JS_THREAD_DATA(cx)->lastNativeIterator = iterobj;
+    return true;
 }
 
 static JSObject *
 iterator_iterator(JSContext *cx, JSObject *obj, JSBool keysonly)
 {
     return obj;
 }
 
@@ -794,31 +841,28 @@ JS_FRIEND_API(JSBool)
 js_CloseIterator(JSContext *cx, JSObject *obj)
 {
     cx->iterValue.setMagic(JS_NO_ITER_VALUE);
 
     Class *clasp = obj->getClass();
     if (clasp == &js_IteratorClass) {
         /* Remove enumerators from the active list, which is a stack. */
         NativeIterator *ni = obj->getNativeIterator();
+
+        JS_ASSERT(ni->flags & JSITER_ACTIVE);
+        ni->flags &= ~JSITER_ACTIVE;
+
         if (ni->flags & JSITER_ENUMERATE) {
             JS_ASSERT(cx->enumerators == obj);
             cx->enumerators = ni->next;
         }
 
-        /* Cache the iterator object if possible. */
-        if (ni->shapes_length) {
-            uint32 hash = ni->shapes_key % NATIVE_ITER_CACHE_SIZE;
-            JSObject **hp = &JS_THREAD_DATA(cx)->cachedNativeIterators[hash];
-            ni->props_cursor = ni->props_array;
-            ni->next = *hp;
-            *hp = obj;
-        } else {
-            iterator_finalize(cx, obj);
-        }
+        /* Reset the enumerator; it may still be in the cached iterators
+         * for this thread, and can be reused. */
+        ni->props_cursor = ni->props_array;
     }
 #if JS_HAS_GENERATORS
     else if (clasp == &js_GeneratorClass) {
         return CloseGenerator(cx, obj);
     }
 #endif
     return JS_TRUE;
 }
--- a/js/src/jsiter.h
+++ b/js/src/jsiter.h
@@ -53,26 +53,29 @@
  * JSXDR_BYTECODE_VERSION.
  */
 #define JSITER_ENUMERATE  0x1   /* for-in compatible hidden default iterator */
 #define JSITER_FOREACH    0x2   /* return [key, value] pair rather than key */
 #define JSITER_KEYVALUE   0x4   /* destructuring for-in wants [key, value] */
 #define JSITER_OWNONLY    0x8   /* iterate over obj's own properties only */
 #define JSITER_HIDDEN     0x10  /* also enumerate non-enumerable properties */
 
+/* Whether the iterator is currently active. Not serialized by XDR. */
+#define JSITER_ACTIVE     0x1000
+
 struct NativeIterator {
     JSObject  *obj;
     void      *props_array;
     void      *props_cursor;
     void      *props_end;
     uint32    *shapes_array;
     uint32    shapes_length;
     uint32    shapes_key;
-    uintN     flags;
-    JSObject  *next;
+    uint32    flags;
+    JSObject  *next;  /* Forms cx->enumerators list, garbage otherwise. */
 
     bool isKeyIter() const { return (flags & JSITER_FOREACH) == 0; }
 
     inline jsid *beginKey() const {
         JS_ASSERT(isKeyIter());
         return (jsid *)props_array;
     }
 
@@ -116,19 +119,19 @@ struct NativeIterator {
 
     void incValueCursor() {
         JS_ASSERT(!isKeyIter());
         props_cursor = reinterpret_cast<js::Value *>(props_cursor) + 1;
     }
 
     static NativeIterator *allocateKeyIterator(JSContext *cx, uint32 slength,
                                                const js::AutoIdVector &props);
-    static NativeIterator *allocateValueIterator(JSContext *cx, uint32 slength,
+    static NativeIterator *allocateValueIterator(JSContext *cx,
                                                  const js::AutoValueVector &props);
-    void init(JSObject *obj, uintN flags, const uint32 *sarray, uint32 slength, uint32 key);
+    void init(JSObject *obj, uintN flags, uint32 slength, uint32 key);
 
     void mark(JSTracer *trc);
 };
 
 bool
 VectorToIdArray(JSContext *cx, js::AutoIdVector &props, JSIdArray **idap);
 
 bool
--- a/js/src/methodjit/Compiler.cpp
+++ b/js/src/methodjit/Compiler.cpp
@@ -940,35 +940,27 @@ mjit::Compiler::generateMethod()
             jsop_stricteq(op);
           END_CASE(JSOP_STRICTEQ)
 
           BEGIN_CASE(JSOP_STRICTNE)
             jsop_stricteq(op);
           END_CASE(JSOP_STRICTNE)
 
           BEGIN_CASE(JSOP_ITER)
-          {
-            prepareStubCall(Uses(1));
-            masm.move(Imm32(PC[1]), Registers::ArgReg1);
-            stubCall(stubs::Iter);
-            frame.pop();
-            frame.pushSynced();
-          }
+            iter(PC[1]);
           END_CASE(JSOP_ITER)
 
           BEGIN_CASE(JSOP_MOREITER)
             /* This MUST be fused with IFNE or IFNEX. */
             iterMore();
             break;
           END_CASE(JSOP_MOREITER)
 
           BEGIN_CASE(JSOP_ENDITER)
-            prepareStubCall(Uses(1));
-            stubCall(stubs::EndIter);
-            frame.pop();
+            iterEnd();
           END_CASE(JSOP_ENDITER)
 
           BEGIN_CASE(JSOP_POP)
             frame.pop();
           END_CASE(JSOP_POP)
 
           BEGIN_CASE(JSOP_NEW)
           {
@@ -3083,16 +3075,130 @@ mjit::Compiler::jsop_propinc(JSOp op, Vo
         stubCall(stub);
         frame.pop();
         frame.pushSynced();
     }
 
     PC += JSOP_PROPINC_LENGTH;
 }
 
+void
+mjit::Compiler::iter(uintN flags)
+{
+    FrameEntry *fe = frame.peek(-1);
+
+    /*
+     * Stub the call if this is not a simple 'for in' loop or if the iterated
+     * value is known to not be an object.
+     */
+    if ((flags != JSITER_ENUMERATE) || fe->isNotType(JSVAL_TYPE_OBJECT)) {
+        prepareStubCall(Uses(1));
+        masm.move(Imm32(flags), Registers::ArgReg1);
+        stubCall(stubs::Iter);
+        frame.pop();
+        frame.pushSynced();
+        return;
+    }
+
+    if (!fe->isTypeKnown()) {
+        Jump notObject = frame.testObject(Assembler::NotEqual, fe);
+        stubcc.linkExit(notObject, Uses(1));
+    }
+
+    RegisterID reg = frame.tempRegForData(fe);
+
+    frame.pinReg(reg);
+    RegisterID ioreg = frame.allocReg();  /* Will hold iterator JSObject */
+    RegisterID nireg = frame.allocReg();  /* Will hold NativeIterator */
+    RegisterID T1 = frame.allocReg();
+    RegisterID T2 = frame.allocReg();
+    frame.unpinReg(reg);
+
+    /*
+     * Fetch the most recent iterator. TODO: bake this pointer in when
+     * iterator caches become per-compartment.
+     */
+    masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), T1);
+#ifdef JS_THREADSAFE
+    masm.loadPtr(Address(T1, offsetof(JSContext, thread)), T1);
+    masm.loadPtr(Address(T1, offsetof(JSThread, data.lastNativeIterator)), ioreg);
+#else
+    masm.loadPtr(Address(T1, offsetof(JSContext, runtime)), T1);
+    masm.loadPtr(Address(T1, offsetof(JSRuntime, threadData.lastNativeIterator)), ioreg);
+#endif
+
+    /* Test for NULL. */
+    Jump nullIterator = masm.branchTest32(Assembler::Zero, ioreg, ioreg);
+    stubcc.linkExit(nullIterator, Uses(1));
+
+    /* Get NativeIterator from iter obj. :FIXME: X64, also most of this function */
+    Address privSlot(ioreg, offsetof(JSObject, fslots) + sizeof(Value) * JSSLOT_PRIVATE);
+    masm.loadPayload(privSlot, nireg);
+
+    /* Test for active iterator. */
+    Address flagsAddr(nireg, offsetof(NativeIterator, flags));
+    masm.load32(flagsAddr, T1);
+    masm.and32(Imm32(JSITER_ACTIVE), T1);
+    Jump activeIterator = masm.branchTest32(Assembler::NonZero, T1, T1);
+    stubcc.linkExit(activeIterator, Uses(1));
+
+    /* Compare shape of object with iterator. */
+    masm.loadShape(reg, T1);
+    masm.loadPtr(Address(nireg, offsetof(NativeIterator, shapes_array)), T2);
+    masm.load32(Address(T2, 0), T2);
+    Jump mismatchedObject = masm.branch32(Assembler::NotEqual, T1, T2);
+    stubcc.linkExit(mismatchedObject, Uses(1));
+
+    /* Compare shape of object's prototype with iterator. */
+    masm.loadPtr(Address(reg, offsetof(JSObject, proto)), T1);
+    masm.loadShape(T1, T1);
+    masm.loadPtr(Address(nireg, offsetof(NativeIterator, shapes_array)), T2);
+    masm.load32(Address(T2, sizeof(uint32)), T2);
+    Jump mismatchedProto = masm.branch32(Assembler::NotEqual, T1, T2);
+    stubcc.linkExit(mismatchedProto, Uses(1));
+
+    /*
+     * Compare object's prototype's prototype with NULL. The last native
+     * iterator will always have a prototype chain length of one
+     * (i.e. it must be a plain object), so we do not need to generate
+     * a loop here.
+     */
+    masm.loadPtr(Address(reg, offsetof(JSObject, proto)), T1);
+    masm.loadPtr(Address(T1, offsetof(JSObject, proto)), T1);
+    Jump overlongChain = masm.branchPtr(Assembler::NonZero, T1, T1);
+    stubcc.linkExit(overlongChain, Uses(1));
+
+    /* Found a match with the most recent iterator. Hooray! */
+
+    /* Mark iterator as active. */
+    masm.load32(flagsAddr, T1);
+    masm.or32(Imm32(JSITER_ACTIVE), T1);
+    masm.store32(T1, flagsAddr);
+
+    /* Chain onto the active iterator stack. */
+    masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), T1);
+    masm.loadPtr(Address(T1, offsetof(JSContext, enumerators)), T2);
+    masm.storePtr(T2, Address(nireg, offsetof(NativeIterator, next)));
+    masm.storePtr(ioreg, Address(T1, offsetof(JSContext, enumerators)));
+
+    frame.freeReg(nireg);
+    frame.freeReg(T1);
+    frame.freeReg(T2);
+
+    stubcc.leave();
+    stubcc.masm.move(Imm32(flags), Registers::ArgReg1);
+    stubcc.call(stubs::Iter);
+
+    /* Push the iterator object. */
+    frame.pop();
+    frame.pushTypedPayload(JSVAL_TYPE_OBJECT, ioreg);
+
+    stubcc.rejoin(Changes(1));
+}
+
 /*
  * This big nasty function emits a fast-path for native iterators, producing
  * a temporary value on the stack for FORLOCAL,ARG,GLOBAL,etc ops to use.
  */
 void
 mjit::Compiler::iterNext()
 {
     FrameEntry *fe = frame.peek(-1);
@@ -3191,16 +3297,70 @@ mjit::Compiler::iterMore()
     PC += js_CodeSpec[next].length;
 
     stubcc.rejoin(Changes(1));
 
     jumpAndTrace(jFast, target, &j);
 }
 
 void
+mjit::Compiler::iterEnd()
+{
+    FrameEntry *fe= frame.peek(-1);
+    RegisterID reg = frame.tempRegForData(fe);
+
+    frame.pinReg(reg);
+    RegisterID T1 = frame.allocReg();
+    frame.unpinReg(reg);
+
+    /* Test clasp */
+    masm.loadPtr(Address(reg, offsetof(JSObject, clasp)), T1);
+    Jump notIterator = masm.branchPtr(Assembler::NotEqual, T1, ImmPtr(&js_IteratorClass));
+    stubcc.linkExit(notIterator, Uses(1));
+
+    /* Get private from iter obj. :FIXME: X64 */
+    Address privSlot(reg, offsetof(JSObject, fslots) + sizeof(Value) * JSSLOT_PRIVATE);
+    masm.loadPayload(privSlot, T1);
+
+    RegisterID T2 = frame.allocReg();
+
+    /* Load flags. */
+    Address flagAddr(T1, offsetof(NativeIterator, flags));
+    masm.loadPtr(flagAddr, T2);
+
+    /* Test for (flags == ENUMERATE | ACTIVE). */
+    Jump notEnumerate = masm.branch32(Assembler::NotEqual, T2,
+                                      Imm32(JSITER_ENUMERATE | JSITER_ACTIVE));
+    stubcc.linkExit(notEnumerate, Uses(1));
+
+    /* Clear active bit. */
+    masm.and32(Imm32(~JSITER_ACTIVE), T2);
+    masm.storePtr(T2, flagAddr);
+
+    /* Reset property cursor. */
+    masm.loadPtr(Address(T1, offsetof(NativeIterator, props_array)), T2);
+    masm.storePtr(T2, Address(T1, offsetof(NativeIterator, props_cursor)));
+
+    /* Advance enumerators list. */
+    masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), T2);
+    masm.loadPtr(Address(T1, offsetof(NativeIterator, next)), T1);
+    masm.storePtr(T1, Address(T2, offsetof(JSContext, enumerators)));
+
+    frame.freeReg(T1);
+    frame.freeReg(T2);
+
+    stubcc.leave();
+    stubcc.call(stubs::EndIter);
+
+    frame.pop();
+
+    stubcc.rejoin(Changes(1));
+}
+
+void
 mjit::Compiler::jsop_eleminc(JSOp op, VoidStub stub)
 {
     prepareStubCall(Uses(2));
     stubCall(stub);
     frame.popn(2);
     frame.pushSynced();
 }
 
--- a/js/src/methodjit/Compiler.h
+++ b/js/src/methodjit/Compiler.h
@@ -221,18 +221,20 @@ class Compiler
     void jumpInScript(Jump j, jsbytecode *pc);
     JSC::ExecutablePool *getExecPool(size_t size);
     bool compareTwoValues(JSContext *cx, JSOp op, const Value &lhs, const Value &rhs);
     void addCallSite(uint32 id, bool stub);
 
     /* Emitting helpers. */
     void restoreFrameRegs(Assembler &masm);
     void emitStubCmpOp(BoolStub stub, jsbytecode *target, JSOp fused);
+    void iter(uintN flags);
     void iterNext();
     void iterMore();
+    void iterEnd();
     MaybeJump loadDouble(FrameEntry *fe, FPRegisterID fpReg);
 
     /* Opcode handlers. */
     void jumpAndTrace(Jump j, jsbytecode *target, Jump *slow = NULL);
     void jsop_bindname(uint32 index);
     void jsop_setglobal(uint32 index);
     void jsop_getglobal(uint32 index);
     void jsop_getprop_slow();