Bug 496674 - Replace FORALL macros with template functions and visitors, take 2, r=gal.
authorGraydon Hoare <graydon@mozilla.com>
Fri, 19 Jun 2009 18:48:05 -0700
changeset 29880 6d41b2eaaae16af67a9190a5e70f767792727b68
parent 29879 8f1fe0257cc83f95fc79c8ddbe10aa521d77d26b
child 29881 8ff867fdc1f4b8e4dfa1fe7e598bedd5ec7e1bf1
push id7810
push userrsayre@mozilla.com
push dateTue, 30 Jun 2009 19:21:13 +0000
treeherdermozilla-central@83e105e5f0db [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersgal
bugs496674
milestone1.9.2a1pre
Bug 496674 - Replace FORALL macros with template functions and visitors, take 2, r=gal.
js/src/jstracer.cpp
js/src/jstracer.h
--- a/js/src/jstracer.cpp
+++ b/js/src/jstracer.cpp
@@ -1140,203 +1140,266 @@ public:
             }
             if (s0->isCall() && s0->callInfo() == &js_UnboxDouble_ci)
                 return callArgN(s0, 0);
         }
         return out->insCall(ci, args);
     }
 };
 
-/* In debug mode vpname contains a textual description of the type of the
-   slot during the forall iteration over all slots. If JS_JIT_SPEW is not
-   defined, vpnum is set to a very large integer to catch invalid uses of
-   it. Non-debug code should never use vpnum. */
+template <typename Visitor>
+JS_REQUIRES_STACK static bool
+visitFrameSlots(Visitor &visitor, unsigned depth, JSStackFrame *fp,
+                JSStackFrame *up) {
+
+    if (depth > 0 &&
+        !visitFrameSlots(visitor, depth-1, fp->down, fp))
+        return false;
+
+    if (fp->callee) {
+        if (depth == 0) {
+#ifdef JS_JIT_SPEW
+            visitor.setStackSlotKind("args");
+#endif
+            if (!visitor.visitStackSlots(&fp->argv[-2], argSlots(fp) + 2, fp))
+                return false;
+        }
+#ifdef JS_JIT_SPEW
+        visitor.setStackSlotKind("var");
+#endif
+        if (!visitor.visitStackSlots(fp->slots, fp->script->nfixed, fp))
+            return false;
+    }
+#ifdef JS_JIT_SPEW
+    visitor.setStackSlotKind("stack");
+#endif
+    JS_ASSERT(fp->regs->sp >= StackBase(fp));
+    if (!visitor.visitStackSlots(StackBase(fp),
+                                 size_t(fp->regs->sp - StackBase(fp)),
+                                 fp))
+        return false;
+    if (up) {
+        int missing = up->fun->nargs - up->argc;
+        if (missing > 0) {
 #ifdef JS_JIT_SPEW
-#define DEF_VPNAME          const char* vpname; unsigned vpnum
-#define SET_VPNAME(name)    do { vpname = name; vpnum = 0; } while(0)
-#define INC_VPNUM()         do { ++vpnum; } while(0)
-#else
-#define DEF_VPNAME          do {} while (0)
-#define vpname ""
-#define vpnum 0x40000000
-#define SET_VPNAME(name)    ((void)0)
-#define INC_VPNUM()         ((void)0)
-#endif
-
-/* Iterate over all interned global variables. */
-#define FORALL_GLOBAL_SLOTS(cx, ngslots, gslots, code)                        \
-    JS_BEGIN_MACRO                                                            \
-        DEF_VPNAME;                                                           \
-        JSObject* globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);  \
-        unsigned n;                                                           \
-        jsval* vp;                                                            \
-        SET_VPNAME("global");                                                 \
-        for (n = 0; n < ngslots; ++n) {                                       \
-            vp = &STOBJ_GET_SLOT(globalObj, gslots[n]);                       \
-            { code; }                                                         \
-            INC_VPNUM();                                                      \
-        }                                                                     \
-    JS_END_MACRO
-
-/* Iterate over all slots in the frame, consisting of args, vars, and stack
-   (except for the top-level frame which does not have args or vars. */
-#define FORALL_FRAME_SLOTS(fp, depth, code)                                   \
-    JS_BEGIN_MACRO                                                            \
-        jsval* vp;                                                            \
-        jsval* vpstop;                                                        \
-        if (fp->callee) {                                                     \
-            if (depth == 0) {                                                 \
-                SET_VPNAME("callee");                                         \
-                vp = &fp->argv[-2];                                           \
-                { code; }                                                     \
-                SET_VPNAME("this");                                           \
-                vp = &fp->argv[-1];                                           \
-                { code; }                                                     \
-                SET_VPNAME("argv");                                           \
-                vp = &fp->argv[0]; vpstop = &fp->argv[argSlots(fp)];          \
-                while (vp < vpstop) { code; ++vp; INC_VPNUM(); }              \
-            }                                                                 \
-            SET_VPNAME("vars");                                               \
-            vp = fp->slots; vpstop = &fp->slots[fp->script->nfixed];          \
-            while (vp < vpstop) { code; ++vp; INC_VPNUM(); }                  \
-        }                                                                     \
-        SET_VPNAME("stack");                                                  \
-        vp = StackBase(fp); vpstop = fp->regs->sp;                            \
-        while (vp < vpstop) { code; ++vp; INC_VPNUM(); }                      \
-        if (fsp < fspstop - 1) {                                              \
-            JSStackFrame* fp2 = fsp[1];                                       \
-            int missing = fp2->fun->nargs - fp2->argc;                        \
-            if (missing > 0) {                                                \
-                SET_VPNAME("missing");                                        \
-                vp = fp->regs->sp;                                            \
-                vpstop = vp + missing;                                        \
-                while (vp < vpstop) { code; ++vp; INC_VPNUM(); }              \
-            }                                                                 \
-        }                                                                     \
-    JS_END_MACRO
-
-/* Iterate over all slots in each pending frame. */
-#define FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth, code)                   \
-    JS_BEGIN_MACRO                                                            \
-        DEF_VPNAME;                                                           \
-        JSStackFrame* currentFrame = cx->fp;                                  \
-        JSStackFrame* entryFrame;                                             \
-        JSStackFrame* fp = currentFrame;                                      \
-        for (unsigned n = 0; n != callDepth; ++n) { fp = fp->down; }          \
-        entryFrame = fp;                                                      \
-        unsigned frames = callDepth+1;                                        \
-        JSStackFrame** fstack =                                               \
-            (JSStackFrame**) alloca(frames * sizeof (JSStackFrame*));         \
-        JSStackFrame** fspstop = &fstack[frames];                             \
-        JSStackFrame** fsp = fspstop-1;                                       \
-        fp = currentFrame;                                                    \
-        for (;; fp = fp->down) { *fsp-- = fp; if (fp == entryFrame) break; }  \
-        unsigned depth;                                                       \
-        for (depth = 0, fsp = fstack; fsp < fspstop; ++fsp, ++depth) {        \
-            fp = *fsp;                                                        \
-            FORALL_FRAME_SLOTS(fp, depth, code);                              \
-        }                                                                     \
-    JS_END_MACRO
-
-#define FORALL_SLOTS(cx, ngslots, gslots, callDepth, code)                    \
-    JS_BEGIN_MACRO                                                            \
-        FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth, code);                  \
-        FORALL_GLOBAL_SLOTS(cx, ngslots, gslots, code);                       \
-    JS_END_MACRO
+            visitor.setStackSlotKind("missing");
+#endif
+            if (!visitor.visitStackSlots(fp->regs->sp, size_t(missing), fp))
+                return false;
+        }
+    }
+    return true;
+}
+
+template <typename Visitor>
+JS_REQUIRES_STACK static JS_ALWAYS_INLINE bool
+visitStackSlots(Visitor &visitor, JSContext *cx, unsigned callDepth) {
+    return visitFrameSlots(visitor, callDepth, cx->fp, NULL);
+}
+
+template <typename Visitor>
+JS_REQUIRES_STACK static JS_ALWAYS_INLINE void
+visitGlobalSlots(Visitor &visitor, JSContext *cx, JSObject *globalObj,
+                 unsigned ngslots, uint16 *gslots) {
+    for (unsigned n = 0; n < ngslots; ++n) {
+        unsigned slot = gslots[n];
+        visitor.visitGlobalSlot(&STOBJ_GET_SLOT(globalObj, slot), n, slot);
+    }
+}
+
+class AdjustCallerTypeVisitor;
+
+template <typename Visitor>
+JS_REQUIRES_STACK static JS_ALWAYS_INLINE void
+visitGlobalSlots(Visitor &visitor, JSContext *cx, SlotList &gslots) {
+    visitGlobalSlots(visitor, cx, JS_GetGlobalForObject(cx, cx->fp->scopeChain),
+                     gslots.length(), gslots.data());
+}
+
+
+template <typename Visitor>
+JS_REQUIRES_STACK static JS_ALWAYS_INLINE void
+visitSlots(Visitor& visitor, JSContext* cx, JSObject* globalObj,
+           unsigned callDepth, unsigned ngslots, uint16* gslots) {
+    if (visitStackSlots(visitor, cx, callDepth))
+        visitGlobalSlots(visitor, cx, globalObj, ngslots, gslots);
+}
+
+template <typename Visitor>
+JS_REQUIRES_STACK static JS_ALWAYS_INLINE void
+visitSlots(Visitor& visitor, JSContext* cx, unsigned callDepth, 
+           unsigned ngslots, uint16* gslots) {
+    visitSlots(visitor, cx, JS_GetGlobalForObject(cx, cx->fp->scopeChain),
+               callDepth, ngslots, gslots);
+}
+
+template <typename Visitor>
+JS_REQUIRES_STACK static JS_ALWAYS_INLINE void
+visitSlots(Visitor &visitor, JSContext *cx, JSObject *globalObj,
+           unsigned callDepth, const SlotList& slots) {
+    visitSlots(visitor, cx, globalObj, callDepth, slots.length(),
+               slots.data());
+}
+
+template <typename Visitor>
+JS_REQUIRES_STACK static JS_ALWAYS_INLINE void
+visitSlots(Visitor &visitor, JSContext *cx, unsigned callDepth,
+           const SlotList& slots) {
+    visitSlots(visitor, cx, JS_GetGlobalForObject(cx, cx->fp->scopeChain),
+               callDepth, slots.length(), slots.data());
+}
+
+
+class SlotVisitorBase {
+protected:
+    char const *mStackSlotKind;
+public:
+    SlotVisitorBase() : 
+        mStackSlotKind(NULL)
+    {}
+#ifdef JS_JIT_SPEW
+    JS_ALWAYS_INLINE void setStackSlotKind(char const *k) {
+        mStackSlotKind = k;
+    }
+#endif
+};
+
+struct CountSlotsVisitor :
+    public SlotVisitorBase {
+    unsigned mCount;
+    bool mDone;
+    jsval* mStop;
+public:
+    JS_ALWAYS_INLINE CountSlotsVisitor(jsval* stop = NULL) :
+        mCount(0),
+        mDone(false),
+        mStop(stop)
+    {}
+
+    JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
+    visitStackSlots(jsval *vp, size_t count, JSStackFrame* fp) {
+        if (mDone)
+            return false;
+        if (mStop && size_t(mStop - vp) < count) {
+            mCount += size_t(mStop - vp);
+            mDone = true;
+            return false;
+        }
+        mCount += count;
+        return true;
+    }
+
+    JS_ALWAYS_INLINE unsigned count() {
+        return mCount;
+    }
+
+    JS_ALWAYS_INLINE bool stopped() {
+        return mDone;
+    }
+};
 
 /* Calculate the total number of native frame slots we need from this frame
    all the way back to the entry frame, including the current stack usage. */
 JS_REQUIRES_STACK unsigned
 js_NativeStackSlots(JSContext *cx, unsigned callDepth)
 {
     JSStackFrame* fp = cx->fp;
     unsigned slots = 0;
-#if defined _DEBUG
-    unsigned int origCallDepth = callDepth;
-#endif
+    unsigned depth = callDepth;
     for (;;) {
         unsigned operands = fp->regs->sp - StackBase(fp);
         slots += operands;
         if (fp->callee)
             slots += fp->script->nfixed;
-        if (callDepth-- == 0) {
+        if (depth-- == 0) {
             if (fp->callee)
                 slots += 2/*callee,this*/ + argSlots(fp);
-#if defined _DEBUG
-            unsigned int m = 0;
-            FORALL_SLOTS_IN_PENDING_FRAMES(cx, origCallDepth, m++);
-            JS_ASSERT(m == slots);
+#ifdef DEBUG
+            CountSlotsVisitor visitor;
+            visitStackSlots(visitor, cx, callDepth);
+            JS_ASSERT(visitor.count() == slots && !visitor.stopped());
 #endif
             return slots;
         }
         JSStackFrame* fp2 = fp;
         fp = fp->down;
         int missing = fp2->fun->nargs - fp2->argc;
         if (missing > 0)
             slots += missing;
     }
     JS_NOT_REACHED("js_NativeStackSlots");
 }
 
+class CaptureTypesVisitor :
+    public SlotVisitorBase {
+    JSContext* mCx;
+    uint8* mTypeMap;
+    uint8* mPtr;
+
+public:
+    JS_ALWAYS_INLINE CaptureTypesVisitor(JSContext* cx, uint8* typeMap) :
+        mCx(cx),
+        mTypeMap(typeMap),
+        mPtr(typeMap)
+    {}
+
+    JS_REQUIRES_STACK JS_ALWAYS_INLINE void
+    visitGlobalSlot(jsval *vp, unsigned n, unsigned slot) {
+            uint8 type = getCoercedType(*vp);
+            if ((type == JSVAL_INT) &&
+                oracle.isGlobalSlotUndemotable(mCx, slot))
+                type = JSVAL_DOUBLE;
+            JS_ASSERT(type != JSVAL_BOXED);
+            debug_only_v(nj_dprintf("capture type global%d: %d=%c\n",
+                                    n, type, typeChar[type]);)
+            *mPtr++ = type;
+    }
+
+    JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
+    visitStackSlots(jsval *vp, int count, JSStackFrame* fp) {
+        for (int i = 0; i < count; ++i) {
+            uint8 type = getCoercedType(vp[i]);
+            if ((type == JSVAL_INT) &&
+                oracle.isStackSlotUndemotable(mCx, length()))
+                type = JSVAL_DOUBLE;
+            JS_ASSERT(type != JSVAL_BOXED);
+            debug_only_v(nj_dprintf("capture type %s%d: %d=%c\n",
+                                    mStackSlotKind, i, type, typeChar[type]);)
+            *mPtr++ = type;
+        }
+        return true;
+    }
+
+    JS_ALWAYS_INLINE uintptr_t length() {
+        return mPtr - mTypeMap;
+    }
+};
+
 /*
  * Capture the type map for the selected slots of the global object and currently pending
  * stack frames.
  */
 JS_REQUIRES_STACK void
-TypeMap::captureTypes(JSContext* cx, SlotList& slots, unsigned callDepth)
-{
-    unsigned ngslots = slots.length();
-    uint16* gslots = slots.data();
-    setLength(js_NativeStackSlots(cx, callDepth) + ngslots);
-    uint8* map = data();
-    uint8* m = map;
-    FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
-        uint8 type = getCoercedType(*vp);
-        if ((type == JSVAL_INT) && oracle.isStackSlotUndemotable(cx, unsigned(m - map)))
-            type = JSVAL_DOUBLE;
-        JS_ASSERT(type != JSVAL_BOXED);
-        debug_only_v(nj_dprintf("capture stack type %s%d: %d=%c\n", vpname, vpnum, type, typeChar[type]);)
-        JS_ASSERT(uintptr_t(m - map) < length());
-        *m++ = type;
-    );
-    FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
-        uint8 type = getCoercedType(*vp);
-        if ((type == JSVAL_INT) && oracle.isGlobalSlotUndemotable(cx, gslots[n]))
-            type = JSVAL_DOUBLE;
-        JS_ASSERT(type != JSVAL_BOXED);
-        debug_only_v(nj_dprintf("capture global type %s%d: %d=%c\n", vpname, vpnum, type, typeChar[type]);)
-        JS_ASSERT(uintptr_t(m - map) < length());
-        *m++ = type;
-    );
-    JS_ASSERT(uintptr_t(m - map) == length());
+TypeMap::captureTypes(JSContext* cx, JSObject* globalObj, SlotList& slots, unsigned callDepth)
+{
+    setLength(js_NativeStackSlots(cx, callDepth) + slots.length());
+    CaptureTypesVisitor visitor(cx, data());
+    visitSlots(visitor, cx, globalObj, callDepth, slots);
+    JS_ASSERT(visitor.length() == length());
 }
 
 JS_REQUIRES_STACK void
-TypeMap::captureMissingGlobalTypes(JSContext* cx, SlotList& slots, unsigned stackSlots)
+TypeMap::captureMissingGlobalTypes(JSContext* cx, JSObject* globalObj, SlotList& slots, unsigned stackSlots)
 {
     unsigned oldSlots = length() - stackSlots;
     int diff = slots.length() - oldSlots;
     JS_ASSERT(diff >= 0);
-    unsigned ngslots = slots.length();
-    uint16* gslots = slots.data();
     setLength(length() + diff);
-    uint8* map = data() + stackSlots;
-    uint8* m = map;
-    FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
-        if (n >= oldSlots) {
-            uint8 type = getCoercedType(*vp);
-            if ((type == JSVAL_INT) && oracle.isGlobalSlotUndemotable(cx, gslots[n]))
-                type = JSVAL_DOUBLE;
-            JS_ASSERT(type != JSVAL_BOXED);
-            debug_only_v(nj_dprintf("capture global type %s%d: %d=%c\n", vpname, vpnum, type, typeChar[type]);)
-            *m = type;
-            JS_ASSERT((m > map + oldSlots) || (*m == type));
-        }
-        m++;
-    );
+    CaptureTypesVisitor visitor(cx, data() + stackSlots + oldSlots);
+    visitGlobalSlots(visitor, cx, globalObj, diff, slots.data() + oldSlots);
 }
 
 /* Compare this type map to another one and see whether they match. */
 bool
 TypeMap::matches(TypeMap& other) const
 {
     if (length() != other.length())
         return false;
@@ -1353,33 +1416,33 @@ mergeTypeMaps(uint8** partial, unsigned*
     memcpy(mem, *partial, l * sizeof(uint8));
     memcpy(mem + l, complete + l, (clength - l) * sizeof(uint8));
     *partial = mem;
     *plength = clength;
 }
 
 /* Specializes a tree to any missing globals, including any dependent trees. */
 static JS_REQUIRES_STACK void
-specializeTreesToMissingGlobals(JSContext* cx, TreeInfo* root)
+specializeTreesToMissingGlobals(JSContext* cx, JSObject* globalObj, TreeInfo* root)
 {
     TreeInfo* ti = root;
 
-    ti->typeMap.captureMissingGlobalTypes(cx, *ti->globalSlots, ti->nStackTypes);
+    ti->typeMap.captureMissingGlobalTypes(cx, globalObj, *ti->globalSlots, ti->nStackTypes);
     JS_ASSERT(ti->globalSlots->length() == ti->typeMap.length() - ti->nStackTypes);
 
     for (unsigned i = 0; i < root->dependentTrees.length(); i++) {
         ti = (TreeInfo*)root->dependentTrees.data()[i]->vmprivate;
         /* ti can be NULL if we hit the recording tree in emitTreeCall; this is harmless. */
         if (ti && ti->nGlobalTypes() < ti->globalSlots->length())
-            specializeTreesToMissingGlobals(cx, ti);
+            specializeTreesToMissingGlobals(cx, globalObj, ti);
     }
     for (unsigned i = 0; i < root->linkedTrees.length(); i++) {
         ti = (TreeInfo*)root->linkedTrees.data()[i]->vmprivate;
         if (ti && ti->nGlobalTypes() < ti->globalSlots->length())
-            specializeTreesToMissingGlobals(cx, ti);
+            specializeTreesToMissingGlobals(cx, globalObj, ti);
     }
 }
 
 static void
 js_TrashTree(JSContext* cx, Fragment* f);
 
 JS_REQUIRES_STACK
 TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* _anchor, Fragment* _fragment,
@@ -1434,24 +1497,24 @@ TraceRecorder::TraceRecorder(JSContext* 
     lirbuf->sp = addName(lir->insLoad(LIR_ldp, lirbuf->state, (int)offsetof(InterpState, sp)), "sp");
     lirbuf->rp = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, rp)), "rp");
     cx_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, cx)), "cx");
     eos_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eos)), "eos");
     eor_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eor)), "eor");
 
     /* If we came from exit, we might not have enough global types. */
     if (ti->globalSlots->length() > ti->nGlobalTypes())
-        specializeTreesToMissingGlobals(cx, ti);
+        specializeTreesToMissingGlobals(cx, globalObj, ti);
 
     /* read into registers all values on the stack and all globals we know so far */
     import(treeInfo, lirbuf->sp, stackSlots, ngslots, callDepth, typeMap);
 
     if (fragment == fragment->root) {
         /*
-         * We poll the operation callback request flag. It is updated asynchronously whenever 
+         * We poll the operation callback request flag. It is updated asynchronously whenever
          * the callback is to be invoked.
          */
         LIns* x = lir->insLoadi(cx_ins, offsetof(JSContext, operationCallbackFlag));
         guard(true, lir->ins_eq0(x), snapshot(TIMEOUT_EXIT));
     }
 
     /* If we are attached to a tree call guard, make sure the guard the inner tree exited from
        is what we expect it to be. */
@@ -1557,82 +1620,28 @@ TraceRecorder::isGlobal(jsval* p) const
     return ((size_t(p - globalObj->fslots) < JS_INITIAL_NSLOTS) ||
             (size_t(p - globalObj->dslots) < (STOBJ_NSLOTS(globalObj) - JS_INITIAL_NSLOTS)));
 }
 
 /* Determine the offset in the native stack for a jsval we track */
 JS_REQUIRES_STACK ptrdiff_t
 TraceRecorder::nativeStackOffset(jsval* p) const
 {
-#ifdef DEBUG
-    size_t slow_offset = 0;
-    FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
-        if (vp == p) goto done;
-        slow_offset += sizeof(double)
-    );
-
+    CountSlotsVisitor visitor(p);
+    visitStackSlots(visitor, cx, callDepth);
+    size_t offset = visitor.count() * sizeof(double);
     /*
      * If it's not in a pending frame, it must be on the stack of the current frame above
      * sp but below fp->slots + script->nslots.
      */
-    JS_ASSERT(size_t(p - cx->fp->slots) < cx->fp->script->nslots);
-    slow_offset += size_t(p - cx->fp->regs->sp) * sizeof(double);
-
-done:
-#define RETURN(offset) { JS_ASSERT((offset) == slow_offset); return offset; }
-#else
-#define RETURN(offset) { return offset; }
-#endif
-    size_t offset = 0;
-    JSStackFrame* currentFrame = cx->fp;
-    JSStackFrame* entryFrame;
-    JSStackFrame* fp = currentFrame;
-    for (unsigned n = 0; n < callDepth; ++n) { fp = fp->down; }
-    entryFrame = fp;
-    unsigned frames = callDepth+1;
-    JSStackFrame** fstack = (JSStackFrame **)alloca(frames * sizeof (JSStackFrame *));
-    JSStackFrame** fspstop = &fstack[frames];
-    JSStackFrame** fsp = fspstop-1;
-    fp = currentFrame;
-    for (;; fp = fp->down) { *fsp-- = fp; if (fp == entryFrame) break; }
-    for (fsp = fstack; fsp < fspstop; ++fsp) {
-        fp = *fsp;
-        if (fp->callee) {
-            if (fsp == fstack) {
-                if (size_t(p - &fp->argv[-2]) < size_t(2/*callee,this*/ + argSlots(fp)))
-                    RETURN(offset + size_t(p - &fp->argv[-2]) * sizeof(double));
-                offset += (2/*callee,this*/ + argSlots(fp)) * sizeof(double);
-            }
-            if (size_t(p - &fp->slots[0]) < fp->script->nfixed)
-                RETURN(offset + size_t(p - &fp->slots[0]) * sizeof(double));
-            offset += fp->script->nfixed * sizeof(double);
-        }
-        jsval* spbase = StackBase(fp);
-        if (size_t(p - spbase) < size_t(fp->regs->sp - spbase))
-            RETURN(offset + size_t(p - spbase) * sizeof(double));
-        offset += size_t(fp->regs->sp - spbase) * sizeof(double);
-        if (fsp < fspstop - 1) {
-            JSStackFrame* fp2 = fsp[1];
-            int missing = fp2->fun->nargs - fp2->argc;
-            if (missing > 0) {
-                if (size_t(p - fp->regs->sp) < size_t(missing))
-                    RETURN(offset + size_t(p - fp->regs->sp) * sizeof(double));
-                offset += size_t(missing) * sizeof(double);
-            }
-        }
-    }
-
-    /*
-     * If it's not in a pending frame, it must be on the stack of the current frame above
-     * sp but below fp->slots + script->nslots.
-     */
-    JS_ASSERT(size_t(p - currentFrame->slots) < currentFrame->script->nslots);
-    offset += size_t(p - currentFrame->regs->sp) * sizeof(double);
-    RETURN(offset);
-#undef RETURN
+    if (!visitor.stopped()) {
+        JS_ASSERT(size_t(p - cx->fp->slots) < cx->fp->script->nslots);
+        offset += size_t(p - cx->fp->regs->sp) * sizeof(double);
+    }
+    return offset;
 }
 
 /* Track the maximum number of native frame slots we need during
    execution. */
 void
 TraceRecorder::trackNativeStackUse(unsigned slots)
 {
     if (slots > treeInfo->maxNativeStackSlots)
@@ -1849,53 +1858,125 @@ NativeToValue(JSContext* cx, jsval& v, u
                                 ? JS_GetStringBytes(ATOM_TO_STRING(fun->atom))
                                 : "unnamed");)
 #endif
         break;
       }
     }
 }
 
-/* Attempt to unbox the given list of interned globals onto the native global frame. */
+class BuildNativeFrameVisitor :
+    public SlotVisitorBase {
+    JSContext *mCx;
+    uint8 *mTypeMap;
+    double *mGlobal;
+    double *mStack;
+public:
+    BuildNativeFrameVisitor(JSContext *cx,
+                            uint8 *typemap,
+                            double *global,
+                            double *stack) :
+        mCx(cx),
+        mTypeMap(typemap),
+        mGlobal(global),
+        mStack(stack)
+    {}
+
+    JS_REQUIRES_STACK JS_ALWAYS_INLINE void
+    visitGlobalSlot(jsval *vp, unsigned n, unsigned slot) {
+        debug_only_v(nj_dprintf("global%d: ", n);)
+        ValueToNative(mCx, *vp, *mTypeMap++, &mGlobal[slot]);
+    }
+
+    JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
+    visitStackSlots(jsval *vp, int count, JSStackFrame* fp) {
+        for (int i = 0; i < count; ++i) {
+            debug_only_v(nj_dprintf("%s%d: ", mStackSlotKind, i);)
+            ValueToNative(mCx, *vp++, *mTypeMap++, mStack++);
+        }
+        return true;
+    }
+};
+
 static JS_REQUIRES_STACK void
-BuildNativeGlobalFrame(JSContext* cx, unsigned ngslots, uint16* gslots, uint8* mp, double* np)
-{
-    debug_only_v(nj_dprintf("global: ");)
-    FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
-        ValueToNative(cx, *vp, *mp, np + gslots[n]);
-        ++mp;
-    );
+BuildNativeFrame(JSContext *cx, JSObject *globalObj, unsigned callDepth,
+                 unsigned ngslots, uint16 *gslots,
+                 uint8 *typeMap, double *global, double *stack)
+{
+    BuildNativeFrameVisitor visitor(cx, typeMap, global, stack);
+    visitSlots(visitor, cx, globalObj, callDepth, ngslots, gslots);
     debug_only_v(nj_dprintf("\n");)
 }
 
-/* Attempt to unbox the given JS frame onto a native frame. */
-static JS_REQUIRES_STACK void
-BuildNativeStackFrame(JSContext* cx, unsigned callDepth, uint8* mp, double* np)
-{
-    debug_only_v(nj_dprintf("stack: ");)
-    FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
-        debug_only_v(nj_dprintf("%s%u=", vpname, vpnum);)
-        ValueToNative(cx, *vp, *mp, np);
-        ++mp; ++np;
-    );
-    debug_only_v(nj_dprintf("\n");)
-}
+
+class FlushNativeGlobalFrameVisitor :
+    public SlotVisitorBase {
+    JSContext *mCx;
+    uint8 *mTypeMap;
+    double *mGlobal;
+public:
+    FlushNativeGlobalFrameVisitor(JSContext *cx,
+                                  uint8 *typeMap,
+                                  double *global) :
+        mCx(cx),
+        mTypeMap(typeMap),
+        mGlobal(global)
+    {}
+
+    JS_REQUIRES_STACK JS_ALWAYS_INLINE void
+    visitGlobalSlot(jsval *vp, unsigned n, unsigned slot) {
+        debug_only_v(nj_dprintf("global%d=", n);)
+        NativeToValue(mCx, *vp, *mTypeMap++, &mGlobal[slot]);
+    }
+};
+
+
+class FlushNativeStackFrameVisitor :
+    public SlotVisitorBase {
+    JSContext *mCx;
+    uint8 *mTypeMap;
+    double *mStack;
+    jsval *mStop;
+public:
+    FlushNativeStackFrameVisitor(JSContext *cx,
+                                 uint8 *typeMap,
+                                 double *stack,
+                                 jsval *stop) :
+        mCx(cx),
+        mTypeMap(typeMap),
+        mStack(stack),
+        mStop(stop)
+    {}
+
+    uint8* getTypeMap()
+    {
+        return mTypeMap;
+    }
+
+    JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
+    visitStackSlots(jsval *vp, size_t count, JSStackFrame* fp) {
+        for (size_t i = 0; i < count; ++i) {
+            if (vp == mStop)
+                return false;
+            debug_only_v(nj_dprintf("%s%u=", mStackSlotKind, i);)
+            NativeToValue(mCx, *vp++, *mTypeMap++, mStack++);
+        }
+        return true;
+    }
+};
 
 /* Box the given native frame into a JS frame. This is infallible. */
-static JS_REQUIRES_STACK int
-FlushNativeGlobalFrame(JSContext* cx, unsigned ngslots, uint16* gslots, uint8* mp, double* np)
-{
-    uint8* mp_base = mp;
-    FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
-        debug_only_v(nj_dprintf("%s%u=", vpname, vpnum);)
-        NativeToValue(cx, *vp, *mp, np + gslots[n]);
-        ++mp;
-    );
+static JS_REQUIRES_STACK void
+FlushNativeGlobalFrame(JSContext *cx, double *global, unsigned ngslots,
+                       uint16 *gslots, uint8 *typemap)
+{
+    FlushNativeGlobalFrameVisitor visitor(cx, typemap, global);
+    JSObject *globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
+    visitGlobalSlots(visitor, cx, globalObj, ngslots, gslots);
     debug_only_v(nj_dprintf("\n");)
-    return mp - mp_base;
 }
 
 /*
  * Generic function to read upvars on trace.
  *     T   Traits type parameter. Must provide static functions:
  *             interp_get(fp, slot)     Read the value out of an interpreter frame.
  *             native_slot(argc, slot)  Return the position of the desired value in the on-trace
  *                                      stack frame (with position 0 being callee).
@@ -2031,25 +2112,21 @@ js_GetUpvarStackOnTrace(JSContext* cx, u
  *                  be restored.
  * @return the number of things we popped off of np.
  */
 static JS_REQUIRES_STACK int
 FlushNativeStackFrame(JSContext* cx, unsigned callDepth, uint8* mp, double* np,
                       JSStackFrame* stopFrame)
 {
     jsval* stopAt = stopFrame ? &stopFrame->argv[-2] : NULL;
-    uint8* mp_base = mp;
+
     /* Root all string and object references first (we don't need to call the GC for this). */
-    FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
-        if (vp == stopAt) goto skip;
-        debug_only_v(nj_dprintf("%s%u=", vpname, vpnum);)
-        NativeToValue(cx, *vp, *mp, np);
-        ++mp; ++np
-    );
-skip:
+    FlushNativeStackFrameVisitor visitor(cx, mp, np, stopAt);
+    visitStackSlots(visitor, cx, callDepth);
+
     // Restore thisp from the now-restored argv[-1] in each pending frame.
     // Keep in mind that we didn't restore frames at stopFrame and above!
     // Scope to keep |fp| from leaking into the macros we're using.
     {
         unsigned n = callDepth+1; // +1 to make sure we restore the entry frame
         JSStackFrame* fp = cx->fp;
         if (stopFrame) {
             for (; fp != stopFrame; fp = fp->down) {
@@ -2105,17 +2182,17 @@ skip:
                 }
                 fp->thisp = JSVAL_TO_OBJECT(fp->argv[-1]);
                 if (fp->flags & JSFRAME_CONSTRUCTING) // constructors always compute 'this'
                     fp->flags |= JSFRAME_COMPUTED_THIS;
             }
         }
     }
     debug_only_v(nj_dprintf("\n");)
-    return mp - mp_base;
+    return visitor.getTypeMap() - mp;
 }
 
 /* Emit load instructions onto the trace that read the initial stack state. */
 JS_REQUIRES_STACK void
 TraceRecorder::import(LIns* base, ptrdiff_t offset, jsval* p, uint8 t,
                       const char *prefix, uintN index, JSStackFrame *fp)
 {
     LIns* ins;
@@ -2173,16 +2250,111 @@ TraceRecorder::import(LIns* base, ptrdif
     static const char* typestr[] = {
         "object", "int", "double", "boxed", "string", "null", "boolean", "function"
     };
     debug_only_v(nj_dprintf("import vp=%p name=%s type=%s flags=%d\n",
                             (void*)p, name, typestr[t & 7], t >> 3);)
 #endif
 }
 
+class
+ImportGlobalSlotVisitor :
+    public SlotVisitorBase {
+    TraceRecorder &mRecorder;
+    LIns *mBase;
+    uint8 *mTypemap;
+public:
+    ImportGlobalSlotVisitor(TraceRecorder &recorder,
+                            LIns *base,
+                            uint8 *typemap) :
+        mRecorder(recorder),
+        mBase(base),
+        mTypemap(typemap)
+    {}
+
+    JS_REQUIRES_STACK JS_ALWAYS_INLINE void
+    visitGlobalSlot(jsval *vp, unsigned n, unsigned slot) {
+        JS_ASSERT(*mTypemap != JSVAL_BOXED);
+        mRecorder.import(mBase, mRecorder.nativeGlobalOffset(vp),
+                         vp, *mTypemap++, "global", n, NULL);
+    }
+};
+
+
+class
+ImportBoxedStackSlotVisitor :
+public SlotVisitorBase {
+    TraceRecorder &mRecorder;
+    LIns *mBase;
+    ptrdiff_t mStackOffset;
+    uint8 *mTypemap;
+    JSStackFrame *mFp;
+public:
+    ImportBoxedStackSlotVisitor(TraceRecorder &recorder,
+                                LIns *base,
+                                ptrdiff_t stackOffset,
+                                uint8 *typemap) :
+        mRecorder(recorder),
+        mBase(base),
+        mStackOffset(stackOffset),
+        mTypemap(typemap)
+    {}
+
+    JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
+    visitStackSlots(jsval *vp, size_t count, JSStackFrame* fp) {
+        for (size_t i = 0; i < count; ++i) {
+            if (*mTypemap == JSVAL_BOXED) {
+                mRecorder.import(mBase, mStackOffset, vp, JSVAL_BOXED,
+                                 "boxed", i, fp);
+                LIns *vp_ins = mRecorder.get(vp);
+                mRecorder.unbox_jsval(*vp, vp_ins,
+                                      mRecorder.copy(mRecorder.anchor));
+                mRecorder.set(vp, vp_ins);
+            }
+            vp++;
+            mTypemap++;
+            mStackOffset += sizeof(double);
+        }
+        return true;
+    }
+};
+
+class
+ImportUnboxedStackSlotVisitor :
+public SlotVisitorBase {
+    TraceRecorder &mRecorder;
+    LIns *mBase;
+    ptrdiff_t mStackOffset;
+    uint8 *mTypemap;
+    JSStackFrame *mFp;
+public:
+    ImportUnboxedStackSlotVisitor(TraceRecorder &recorder,
+                                  LIns *base,
+                                  ptrdiff_t stackOffset,
+                                  uint8 *typemap) :
+        mRecorder(recorder),
+        mBase(base),
+        mStackOffset(stackOffset),
+        mTypemap(typemap)
+    {}
+
+    JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
+    visitStackSlots(jsval *vp, size_t count, JSStackFrame* fp) {
+        for (size_t i = 0; i < count; ++i) {
+            if (*mTypemap != JSVAL_BOXED) {
+                mRecorder.import(mBase, mStackOffset, vp++, *mTypemap,
+                                 mStackSlotKind, i, fp);
+            }
+            mTypemap++;
+            mStackOffset += sizeof(double);
+        }
+        return true;
+    }
+};
+
 JS_REQUIRES_STACK void
 TraceRecorder::import(TreeInfo* treeInfo, LIns* sp, unsigned stackSlots, unsigned ngslots,
                       unsigned callDepth, uint8* typeMap)
 {
     /* If we get a partial list that doesn't have all the types (i.e. recording from a side
        exit that was recorded but we added more global slots later), merge the missing types
        from the entry type map. This is safe because at the loop edge we verify that we
        have compatible types for all globals (entry type and loop edge type match). While
@@ -2201,50 +2373,33 @@ TraceRecorder::import(TreeInfo* treeInfo
      * global type map.
      */
     if (ngslots < length) {
         mergeTypeMaps(&globalTypeMap/*out param*/, &ngslots/*out param*/,
                       treeInfo->globalTypeMap(), length,
                       (uint8*)alloca(sizeof(uint8) * length));
     }
     JS_ASSERT(ngslots == treeInfo->nGlobalTypes());
+    ptrdiff_t offset = -treeInfo->nativeStackBase;
 
     /*
-     * Check whether there are any values on the stack we have to unbox and do that first
-     * before we waste any time fetching the state from the stack.
+     * Check whether there are any values on the stack we have to unbox and
+     * do that first before we waste any time fetching the state from the
+     * stack.
      */
-    ptrdiff_t offset = -treeInfo->nativeStackBase;
-    uint8* m = typeMap;
-    FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
-        if (*m == JSVAL_BOXED) {
-            import(sp, offset, vp, JSVAL_BOXED, "boxed", vpnum, cx->fp);
-            LIns* vp_ins = get(vp);
-            unbox_jsval(*vp, vp_ins, copy(anchor));
-            set(vp, vp_ins);
-        }
-        m++; offset += sizeof(double);
-    );
-
-    /*
-     * The first time we compile a tree this will be empty as we add entries lazily.
-     */
-    uint16* gslots = treeInfo->globalSlots->data();
-    m = globalTypeMap;
-    FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
-        JS_ASSERT(*m != JSVAL_BOXED);
-        import(lirbuf->state, nativeGlobalOffset(vp), vp, *m, vpname, vpnum, NULL);
-        m++;
-    );
-    offset = -treeInfo->nativeStackBase;
-    m = typeMap;
-    FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
-        if (*m != JSVAL_BOXED)
-            import(sp, offset, vp, *m, vpname, vpnum, fp);
-        m++; offset += sizeof(double);
-    );
+    ImportBoxedStackSlotVisitor boxedStackVisitor(*this, sp, offset, typeMap);
+    visitStackSlots(boxedStackVisitor, cx, callDepth);
+
+    ImportGlobalSlotVisitor globalVisitor(*this, lirbuf->state, globalTypeMap);
+    visitGlobalSlots(globalVisitor, cx, globalObj, ngslots,
+                     treeInfo->globalSlots->data());
+
+    ImportUnboxedStackSlotVisitor unboxedStackVisitor(*this, sp, offset,
+                                                      typeMap);
+    visitStackSlots(unboxedStackVisitor, cx, callDepth);
 }
 
 JS_REQUIRES_STACK bool
 TraceRecorder::isValidSlot(JSScope* scope, JSScopeProperty* sprop)
 {
     uint32 setflags = (js_CodeSpec[*cx->fp->regs->pc].format & (JOF_SET | JOF_INCDEC | JOF_FOR));
 
     if (setflags) {
@@ -2283,17 +2438,17 @@ TraceRecorder::lazilyImportGlobalSlot(un
     JS_ASSERT(treeInfo->nGlobalTypes() == treeInfo->globalSlots->length());
     treeInfo->globalSlots->add(slot);
     uint8 type = getCoercedType(*vp);
     if ((type == JSVAL_INT) && oracle.isGlobalSlotUndemotable(cx, slot))
         type = JSVAL_DOUBLE;
     treeInfo->typeMap.add(type);
     import(lirbuf->state, sizeof(struct InterpState) + slot*sizeof(double),
            vp, type, "global", index, NULL);
-    specializeTreesToMissingGlobals(cx, treeInfo);
+    specializeTreesToMissingGlobals(cx, globalObj, treeInfo);
     return true;
 }
 
 /* Write back a value onto the stack or global frames. */
 LIns*
 TraceRecorder::writeBack(LIns* i, LIns* base, ptrdiff_t offset)
 {
     /* Sink all type casts targeting the stack into the side exit by simply storing the original
@@ -2387,57 +2542,116 @@ js_IsLoopEdge(jsbytecode* pc, jsbytecode
         return ((pc + GET_JUMPX_OFFSET(pc)) == header);
       default:
         JS_ASSERT((*pc == JSOP_AND) || (*pc == JSOP_ANDX) ||
                   (*pc == JSOP_OR) || (*pc == JSOP_ORX));
     }
     return false;
 }
 
+class AdjustCallerGlobalTypesVisitor :
+    public SlotVisitorBase {
+    TraceRecorder &mRecorder;
+    JSContext *mCx;
+    nanojit::LirBuffer *mLirbuf;
+    nanojit::LirWriter *mLir;
+    uint8 *mTypeMap;
+public:
+    AdjustCallerGlobalTypesVisitor(TraceRecorder &recorder,
+                                   uint8 *typeMap) :
+        mRecorder(recorder),
+        mCx(mRecorder.cx),
+        mLirbuf(mRecorder.lirbuf),
+        mLir(mRecorder.lir),
+        mTypeMap(typeMap)
+    {}
+
+    uint8* getTypeMap()
+    {
+        return mTypeMap;
+    }
+
+    JS_REQUIRES_STACK JS_ALWAYS_INLINE void
+    visitGlobalSlot(jsval *vp, unsigned n, unsigned slot) {
+        LIns *ins = mRecorder.get(vp);
+        bool isPromote = isPromoteInt(ins);
+        if (isPromote && *mTypeMap == JSVAL_DOUBLE) {
+            mLir->insStorei(mRecorder.get(vp), mLirbuf->state,
+                            mRecorder.nativeGlobalOffset(vp));
+            /* Aggressively undo speculation so the inner tree will compile
+               if this fails. */
+            oracle.markGlobalSlotUndemotable(mCx, slot);
+        }
+        JS_ASSERT(!(!isPromote && *mTypeMap == JSVAL_INT));
+        ++mTypeMap;
+    }
+};
+
+class AdjustCallerStackTypesVisitor :
+    public SlotVisitorBase {
+    TraceRecorder &mRecorder;
+    JSContext *mCx;
+    nanojit::LirBuffer *mLirbuf;
+    nanojit::LirWriter *mLir;
+    unsigned mSlotnum;
+    uint8 *mTypeMap;
+public:
+    AdjustCallerStackTypesVisitor(TraceRecorder &recorder,
+                                  uint8 *typeMap) :
+        mRecorder(recorder),
+        mCx(mRecorder.cx),
+        mLirbuf(mRecorder.lirbuf),
+        mLir(mRecorder.lir),
+        mSlotnum(0),
+        mTypeMap(typeMap)
+    {}
+
+    uint8* getTypeMap()
+    {
+        return mTypeMap;
+    }
+
+    JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
+    visitStackSlots(jsval *vp, size_t count, JSStackFrame* fp) {
+        for (size_t i = 0; i < count; ++i) {
+            LIns *ins = mRecorder.get(vp);
+            bool isPromote = isPromoteInt(ins);
+            if (isPromote && *mTypeMap == JSVAL_DOUBLE) {
+                mLir->insStorei(mRecorder.get(vp), mLirbuf->sp,
+                                -mRecorder.treeInfo->nativeStackBase + 
+                                mRecorder.nativeStackOffset(vp));
+                /* Aggressively undo speculation so the inner tree will
+                   compile if this fails. */
+                oracle.markStackSlotUndemotable(mCx, mSlotnum);
+            }
+            JS_ASSERT(!(!isPromote && *mTypeMap == JSVAL_INT));
+            ++vp;
+            ++mTypeMap;
+            ++mSlotnum;
+        }
+        return true;
+    }
+};
+
 /*
  * Promote slots if necessary to match the called tree's type map. This function is
  * infallible and must only be called if we are certain that it is possible to
  * reconcile the types for each slot in the inner and outer trees.
  */
 JS_REQUIRES_STACK void
 TraceRecorder::adjustCallerTypes(Fragment* f)
 {
-    uint16* gslots = treeInfo->globalSlots->data();
-    unsigned ngslots = treeInfo->globalSlots->length();
-    JS_ASSERT(ngslots == treeInfo->nGlobalTypes());
     TreeInfo* ti = (TreeInfo*)f->vmprivate;
-    uint8* map = ti->globalTypeMap();
-    uint8* m = map;
-    FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
-        LIns* i = get(vp);
-        bool isPromote = isPromoteInt(i);
-        if (isPromote && *m == JSVAL_DOUBLE) {
-            lir->insStorei(get(vp), lirbuf->state, nativeGlobalOffset(vp));
-            /* Aggressively undo speculation so the inner tree will compile if this fails. */
-            oracle.markGlobalSlotUndemotable(cx, gslots[n]);
-        }
-        JS_ASSERT(!(!isPromote && *m == JSVAL_INT));
-        ++m;
-    );
-    JS_ASSERT(unsigned(m - map) == ti->nGlobalTypes());
-    map = ti->stackTypeMap();
-    m = map;
-    FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,
-        LIns* i = get(vp);
-        bool isPromote = isPromoteInt(i);
-        if (isPromote && *m == JSVAL_DOUBLE) {
-            lir->insStorei(get(vp), lirbuf->sp,
-                           -treeInfo->nativeStackBase + nativeStackOffset(vp));
-            /* Aggressively undo speculation so the inner tree will compile if this fails. */
-            oracle.markStackSlotUndemotable(cx, unsigned(m - map));
-        }
-        JS_ASSERT(!(!isPromote && *m == JSVAL_INT));
-        ++m;
-    );
-    JS_ASSERT(unsigned(m - map) == ti->nStackTypes);
+
+    AdjustCallerGlobalTypesVisitor globalVisitor(*this, ti->globalTypeMap());
+    visitGlobalSlots(globalVisitor, cx, *treeInfo->globalSlots);
+
+    AdjustCallerStackTypesVisitor stackVisitor(*this, ti->stackTypeMap());
+    visitStackSlots(stackVisitor, cx, 0);
+
     JS_ASSERT(f == f->root);
 }
 
 JS_REQUIRES_STACK uint8
 TraceRecorder::determineSlotType(jsval* vp)
 {
     uint8 m;
     LIns* i = get(vp);
@@ -2452,16 +2666,46 @@ TraceRecorder::determineSlotType(jsval* 
             m = JSVAL_OBJECT;
     } else {
         m = JSVAL_TAG(*vp);
     }
     JS_ASSERT((m != JSVAL_INT) || isInt32(*vp));
     return m;
 }
 
+class DetermineTypesVisitor :
+    public SlotVisitorBase {
+    TraceRecorder &mRecorder;
+    uint8 *mTypeMap;
+public:
+    DetermineTypesVisitor(TraceRecorder &recorder,
+                          uint8 *typeMap) :
+        mRecorder(recorder),
+        mTypeMap(typeMap)
+    {}
+
+    JS_REQUIRES_STACK JS_ALWAYS_INLINE void
+    visitGlobalSlot(jsval *vp, unsigned n, unsigned slot) {
+        *mTypeMap++ = mRecorder.determineSlotType(vp);
+    }
+
+    JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
+    visitStackSlots(jsval *vp, size_t count, JSStackFrame* fp) {
+        for (size_t i = 0; i < count; ++i)
+            *mTypeMap++ = mRecorder.determineSlotType(vp++);
+        return true;
+    }
+
+    uint8* getTypeMap()
+    {
+        return mTypeMap;
+    }
+};
+
+
 JS_REQUIRES_STACK VMSideExit*
 TraceRecorder::snapshot(ExitType exitType)
 {
     JSStackFrame* fp = cx->fp;
     JSFrameRegs* regs = fp->regs;
     jsbytecode* pc = regs->pc;
 
     /* Check for a return-value opcode that needs to restart at the next instruction. */
@@ -2487,25 +2731,28 @@ TraceRecorder::snapshot(ExitType exitTyp
     /* It's sufficient to track the native stack use here since all stores above the
        stack watermark defined by guards are killed. */
     trackNativeStackUse(stackSlots + 1);
 
     /* Capture the type map into a temporary location. */
     unsigned ngslots = treeInfo->globalSlots->length();
     unsigned typemap_size = (stackSlots + ngslots) * sizeof(uint8);
     uint8* typemap = (uint8*)alloca(typemap_size);
-    uint8* m = typemap;
-
-    /* Determine the type of a store by looking at the current type of the actual value the
-       interpreter is using. For numbers we have to check what kind of store we used last
-       (integer or double) to figure out what the side exit show reflect in its typemap. */
-    FORALL_SLOTS(cx, ngslots, treeInfo->globalSlots->data(), callDepth,
-        *m++ = determineSlotType(vp);
-    );
-    JS_ASSERT(unsigned(m - typemap) == ngslots + stackSlots);
+
+    /* 
+     * Determine the type of a store by looking at the current type of the
+     * actual value the interpreter is using. For numbers we have to check
+     * what kind of store we used last (integer or double) to figure out
+     * what the side exit show reflect in its typemap.
+     */
+    DetermineTypesVisitor detVisitor(*this, typemap);
+    visitSlots(detVisitor, cx, callDepth, ngslots,
+               treeInfo->globalSlots->data());
+    JS_ASSERT(unsigned(detVisitor.getTypeMap() - typemap) == 
+              ngslots + stackSlots);
 
     /*
      * If we are currently executing a traceable native or we are attaching a second trace
      * to it, the value on top of the stack is boxed. Make a note of this in the typemap.
      */
     if (pendingTraceableNative && (pendingTraceableNative->flags & JSTN_UNBOX_AFTER))
         typemap[stackSlots - 1] = JSVAL_BOXED;
 
@@ -2725,33 +2972,225 @@ TraceRecorder::checkType(jsval& v, uint8
                                 typeChar[t]);)
     }
 #endif
     debug_only_v(nj_dprintf("checkType(vt=%d, t=%d) stage_count=%d\n",
                             (int) vt, t, stage_count);)
     return vt == t;
 }
 
+class SelfTypeStabilityVisitor :
+    public SlotVisitorBase {
+
+    TraceRecorder &mRecorder;
+    uint8 *mTypeMap;
+    JSContext *mCx;
+    bool &mDemote;
+    jsval **&mStageVals;
+    LIns **&mStageIns;
+    unsigned &mStageCount;
+    unsigned mStackSlotNum;
+    bool mOk;
+
+public:
+    
+    SelfTypeStabilityVisitor(TraceRecorder &recorder,
+                             uint8 *typeMap,
+                             bool &demote,
+                             jsval **&stageVals,
+                             LIns **&stageIns,
+                             unsigned &stageCount) :
+        mRecorder(recorder),
+        mTypeMap(typeMap),
+        mCx(mRecorder.cx),
+        mDemote(demote),
+        mStageVals(stageVals),
+        mStageIns(stageIns),
+        mStageCount(stageCount),
+        mStackSlotNum(0),
+        mOk(true)        
+    {}
+
+    JS_REQUIRES_STACK JS_ALWAYS_INLINE void
+    visitGlobalSlot(jsval *vp, unsigned n, unsigned slot) {
+        if (mOk) {
+            debug_only_v(nj_dprintf("global%d ", n);)
+            if (!mRecorder.checkType(*vp, *mTypeMap,
+                                     mStageVals[mStageCount], 
+                                     mStageIns[mStageCount], 
+                                     mStageCount)) {
+                /* If the failure was an int->double, tell the oracle. */
+                if (*mTypeMap == JSVAL_INT && isNumber(*vp) &&
+                    !isPromoteInt(mRecorder.get(vp))) {
+                    oracle.markGlobalSlotUndemotable(mCx, slot);
+                    mDemote = true;
+                } else {
+                    mOk = false;
+                }
+            }
+            mTypeMap++;
+        }
+    }
+
+    JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
+    visitStackSlots(jsval *vp, size_t count, JSStackFrame* fp) {
+        for (size_t i = 0; i < count; ++i) {
+            debug_only_v(nj_dprintf("%s%d ", mStackSlotKind, i);)
+            if (!mRecorder.checkType(*vp, *mTypeMap,
+                                     mStageVals[mStageCount], 
+                                     mStageIns[mStageCount],
+                                     mStageCount)) {
+                if (*mTypeMap == JSVAL_INT && isNumber(*vp) &&
+                    !isPromoteInt(mRecorder.get(vp))) {
+                    oracle.markStackSlotUndemotable(mCx, mStackSlotNum);
+                    mDemote = true;
+                } else {
+                    mOk = false;
+                    break;
+                }
+            }
+            vp++;
+            mTypeMap++;
+            mStackSlotNum++;
+        }
+        return mOk;
+    }
+
+    bool isOk() {
+        return mOk;
+    }
+};
+
+class PeerTypeStabilityVisitor :
+    public SlotVisitorBase {
+
+    TraceRecorder &mRecorder;
+    uint8 *mTypeMap;
+    jsval **&mStageVals;
+    LIns **&mStageIns;
+    unsigned &mStageCount;
+    bool mOk;
+
+public:
+    
+    PeerTypeStabilityVisitor(TraceRecorder &recorder,
+                              uint8 *typeMap,
+                              jsval **&stageVals,
+                              LIns **&stageIns,
+                              unsigned &stageCount) :
+        mRecorder(recorder),
+        mTypeMap(typeMap),
+        mStageVals(stageVals),
+        mStageIns(stageIns),
+        mStageCount(stageCount),
+        mOk(true)        
+    {}
+
+    JS_REQUIRES_STACK JS_ALWAYS_INLINE void check(jsval *vp) {
+        if (!mRecorder.checkType(*vp, *mTypeMap++,
+                                 mStageVals[mStageCount],
+                                 mStageIns[mStageCount],
+                                 mStageCount))
+            mOk = false;
+    }
+
+    JS_REQUIRES_STACK JS_ALWAYS_INLINE void
+    visitGlobalSlot(jsval *vp, unsigned n, unsigned slot) {
+        if (mOk)
+            check(vp);
+    }
+
+    JS_REQUIRES_STACK JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
+    visitStackSlots(jsval *vp, size_t count, JSStackFrame* fp) {
+        for (size_t i = 0; i < count; ++i) {
+            check(vp++);
+            if (!mOk)
+                break;
+        }
+        return mOk;
+    }
+
+    bool isOk() {
+        return mOk;
+    }
+};
+
+
+class UndemoteVisitor :
+    public SlotVisitorBase {
+    TraceRecorder &mRecorder;
+    JSContext *mCx;
+    uint8 *mTypeMap;
+    unsigned mStackSlotNum;
+public:
+    UndemoteVisitor(TraceRecorder &recorder,
+                    uint8 *typeMap) :
+        mRecorder(recorder),
+        mCx(mRecorder.cx),
+        mTypeMap(typeMap),
+        mStackSlotNum(0)
+    {}
+
+    JS_REQUIRES_STACK JS_ALWAYS_INLINE void
+    visitGlobalSlot(jsval *vp, unsigned n, unsigned slot) {
+        if (*mTypeMap == JSVAL_INT) {
+            JS_ASSERT(isNumber(*vp));
+            if (!isPromoteInt(mRecorder.get(vp)))
+                oracle.markGlobalSlotUndemotable(mCx, slot);
+        } else if (*mTypeMap == JSVAL_DOUBLE) {
+            JS_ASSERT(isNumber(*vp));
+            oracle.markGlobalSlotUndemotable(mCx, slot);
+        } else {
+            JS_ASSERT(*mTypeMap == JSVAL_TAG(*vp));
+        }
+        mTypeMap++;
+    }
+
+    JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
+    visitStackSlots(jsval *vp, size_t count, JSStackFrame* fp) {
+        for (size_t i = 0; i < count; ++i) {
+            if (*mTypeMap == JSVAL_INT) {
+                JS_ASSERT(isNumber(*vp));
+                if (!isPromoteInt(mRecorder.get(vp)))
+                    oracle.markStackSlotUndemotable(mCx, mStackSlotNum);
+            } else if (*mTypeMap == JSVAL_DOUBLE) {
+                JS_ASSERT(isNumber(*vp));
+                oracle.markStackSlotUndemotable(mCx, mStackSlotNum);
+            } else {
+                JS_ASSERT((*mTypeMap == JSVAL_TNULL)
+                          ? JSVAL_IS_NULL(*vp)
+                          : *mTypeMap == JSVAL_TFUN
+                          ? (!JSVAL_IS_PRIMITIVE(*vp) && 
+                             HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(*vp)))
+                          : *mTypeMap == JSVAL_TAG(*vp));
+            }
+            mStackSlotNum++;
+            mTypeMap++;
+            vp++;
+        }
+        return true;
+    }
+};
+
+
+
 /**
  * Make sure that the current values in the given stack frame and all stack frames
  * up and including entryFrame are type-compatible with the entry map.
  *
  * @param root_peer         First fragment in peer list.
  * @param stable_peer       Outparam for first type stable peer.
  * @param demote            True if stability was achieved through demotion.
  * @return                  True if type stable, false otherwise.
  */
 JS_REQUIRES_STACK bool
 TraceRecorder::deduceTypeStability(Fragment* root_peer, Fragment** stable_peer, bool& demote)
 {
-    uint8* m;
-    uint8* typemap;
-    unsigned ngslots = treeInfo->globalSlots->length();
-    uint16* gslots = treeInfo->globalSlots->data();
-    JS_ASSERT(ngslots == treeInfo->nGlobalTypes());
+    JS_ASSERT(treeInfo->globalSlots->length() == 
+              treeInfo->nGlobalTypes());
 
     if (stable_peer)
         *stable_peer = NULL;
 
     /*
      * Rather than calculate all of this stuff twice, it gets cached locally.  The "stage" buffers
      * are for calls to set() that will change the exit types.
      */
@@ -2760,48 +3199,21 @@ TraceRecorder::deduceTypeStability(Fragm
     jsval** stage_vals = (jsval**)alloca(sizeof(jsval*) * (treeInfo->typeMap.length()));
     LIns** stage_ins = (LIns**)alloca(sizeof(LIns*) * (treeInfo->typeMap.length()));
 
     /* First run through and see if we can close ourselves - best case! */
     stage_count = 0;
     success = false;
 
     debug_only_v(nj_dprintf("Checking type stability against self=%p\n", (void*)fragment);)
-
-    m = typemap = treeInfo->globalTypeMap();
-    FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
-        debug_only_v(nj_dprintf("%s%d ", vpname, vpnum);)
-        if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count)) {
-            /* If the failure was an int->double, tell the oracle. */
-            if (*m == JSVAL_INT && isNumber(*vp) && !isPromoteInt(get(vp))) {
-                oracle.markGlobalSlotUndemotable(cx, gslots[n]);
-                demote = true;
-            } else {
-                goto checktype_fail_1;
-            }
-        }
-        ++m;
-    );
-    m = typemap = treeInfo->stackTypeMap();
-    FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,
-        debug_only_v(nj_dprintf("%s%d ", vpname, vpnum);)
-        if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count)) {
-            if (*m == JSVAL_INT && isNumber(*vp) && !isPromoteInt(get(vp))) {
-                oracle.markStackSlotUndemotable(cx, unsigned(m - typemap));
-                demote = true;
-            } else {
-                goto checktype_fail_1;
-            }
-        }
-        ++m;
-    );
-
-    success = true;
-
-checktype_fail_1:
+    SelfTypeStabilityVisitor selfVisitor(*this, treeInfo->stackTypeMap(), demote,
+                                         stage_vals, stage_ins, stage_count);
+    visitSlots(selfVisitor, cx, 0, *treeInfo->globalSlots);
+    success = selfVisitor.isOk();
+
     /* If we got a success and we don't need to recompile, we should just close here. */
     if (success && !demote) {
         for (unsigned i = 0; i < stage_count; i++)
             set(stage_vals[i], stage_ins[i]);
         return true;
     /* If we need to trash, don't bother checking peers. */
     } else if (trashSelf) {
         return false;
@@ -2822,33 +3234,21 @@ checktype_fail_1:
         /* Don't allow varying stack depths */
         if ((ti->nStackTypes != treeInfo->nStackTypes) ||
             (ti->typeMap.length() != treeInfo->typeMap.length()) ||
             (ti->globalSlots->length() != treeInfo->globalSlots->length()))
             continue;
         stage_count = 0;
         success = false;
 
-        m = ti->globalTypeMap();
-        FORALL_GLOBAL_SLOTS(cx, treeInfo->globalSlots->length(), treeInfo->globalSlots->data(),
-                if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count))
-                    goto checktype_fail_2;
-                ++m;
-            );
-
-        m = ti->stackTypeMap();
-        FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,
-                if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count))
-                    goto checktype_fail_2;
-                ++m;
-            );
-
-        success = true;
-
-checktype_fail_2:
+        PeerTypeStabilityVisitor peerVisitor(*this, ti->stackTypeMap(),
+                                             stage_vals, stage_ins, stage_count);
+        visitSlots(peerVisitor, cx, 0, *treeInfo->globalSlots);
+        success = peerVisitor.isOk();
+
         if (success) {
             /*
              * There was a successful match.  We don't care about restoring the saved staging, but
              * we do need to clear the original undemote list.
              */
             for (unsigned i = 0; i < stage_count; i++)
                 set(stage_vals[i], stage_ins[i]);
             if (stable_peer)
@@ -2859,49 +3259,18 @@ checktype_fail_2:
     }
 
     /*
      * If this is a loop trace and it would be stable with demotions, build an undemote list
      * and return true.  Our caller should sniff this and trash the tree, recording a new one
      * that will assumedly stabilize.
      */
     if (demote && fragment->kind == LoopTrace) {
-        typemap = m = treeInfo->globalTypeMap();
-        FORALL_GLOBAL_SLOTS(cx, treeInfo->globalSlots->length(), treeInfo->globalSlots->data(),
-            if (*m == JSVAL_INT) {
-                JS_ASSERT(isNumber(*vp));
-                if (!isPromoteInt(get(vp)))
-                    oracle.markGlobalSlotUndemotable(cx, gslots[n]);
-            } else if (*m == JSVAL_DOUBLE) {
-                JS_ASSERT(isNumber(*vp));
-                oracle.markGlobalSlotUndemotable(cx, gslots[n]);
-            } else {
-                JS_ASSERT(*m == JSVAL_TAG(*vp));
-            }
-            m++;
-        );
-
-        typemap = m = treeInfo->stackTypeMap();
-        FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,
-            if (*m == JSVAL_INT) {
-                JS_ASSERT(isNumber(*vp));
-                if (!isPromoteInt(get(vp)))
-                    oracle.markStackSlotUndemotable(cx, unsigned(m - typemap));
-            } else if (*m == JSVAL_DOUBLE) {
-                JS_ASSERT(isNumber(*vp));
-                oracle.markStackSlotUndemotable(cx, unsigned(m - typemap));
-            } else {
-                JS_ASSERT((*m == JSVAL_TNULL)
-                          ? JSVAL_IS_NULL(*vp)
-                          : *m == JSVAL_TFUN
-                          ? !JSVAL_IS_PRIMITIVE(*vp) && HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(*vp))
-                          : *m == JSVAL_TAG(*vp));
-            }
-            m++;
-        );
+        UndemoteVisitor visitor(*this, treeInfo->stackTypeMap());
+        visitSlots(visitor, cx, 0, *treeInfo->globalSlots);
         return true;
     } else {
         demote = false;
     }
 
     return false;
 }
 
@@ -3113,25 +3482,25 @@ TraceRecorder::closeLoop(JSTraceMonitor*
 
     if (fragmento->assm()->error() != nanojit::None)
         return;
 
     joinEdgesToEntry(fragmento, peer_root);
 
     debug_only_v(nj_dprintf("updating specializations on dependent and linked trees\n"))
     if (fragment->root->vmprivate)
-        specializeTreesToMissingGlobals(cx, (TreeInfo*)fragment->root->vmprivate);
-
-    /* 
+        specializeTreesToMissingGlobals(cx, globalObj, (TreeInfo*)fragment->root->vmprivate);
+
+    /*
      * If this is a newly formed tree, and the outer tree has not been compiled yet, we
      * should try to compile the outer tree again.
      */
     if (outer)
         js_AttemptCompilation(cx, tm, globalObj, outer, outerArgc);
-    
+
     debug_only_v(nj_dprintf("recording completed at %s:%u@%u via closeLoop\n",
                             cx->fp->script->filename,
                             js_FramePCToLineNumber(cx, cx->fp),
                             FramePCOffset(cx->fp));)
 }
 
 JS_REQUIRES_STACK void
 TraceRecorder::joinEdgesToEntry(Fragmento* fragmento, VMFragment* peer_root)
@@ -3229,29 +3598,29 @@ TraceRecorder::endLoop(JSTraceMonitor* t
     compile(tm);
 
     if (tm->fragmento->assm()->error() != nanojit::None)
         return;
 
     VMFragment* root = (VMFragment*)fragment->root;
     joinEdgesToEntry(tm->fragmento, getLoop(tm, root->ip, root->globalObj, root->globalShape, root->argc));
 
-    /* Note: this must always be done, in case we added new globals on trace and haven't yet 
+    /* Note: this must always be done, in case we added new globals on trace and haven't yet
        propagated those to linked and dependent trees. */
     debug_only_v(nj_dprintf("updating specializations on dependent and linked trees\n"))
     if (fragment->root->vmprivate)
-        specializeTreesToMissingGlobals(cx, (TreeInfo*)fragment->root->vmprivate);
-
-    /* 
+        specializeTreesToMissingGlobals(cx, globalObj, (TreeInfo*)fragment->root->vmprivate);
+
+    /*
      * If this is a newly formed tree, and the outer tree has not been compiled yet, we
      * should try to compile the outer tree again.
      */
     if (outer)
         js_AttemptCompilation(cx, tm, globalObj, outer, outerArgc);
-    
+
     debug_only_v(nj_dprintf("recording completed at %s:%u@%u via endLoop\n",
                             cx->fp->script->filename,
                             js_FramePCToLineNumber(cx, cx->fp),
                             FramePCOffset(cx->fp));)
 }
 
 /* Emit code to adjust the stack to match the inner tree's stack expectations. */
 JS_REQUIRES_STACK void
@@ -3838,18 +4207,18 @@ SynthesizeSlowNativeFrame(JSContext *cx,
     fp->xmlNamespace = NULL;
     fp->displaySave = NULL;
 
     ifp->mark = mark;
     cx->fp = fp;
 }
 
 JS_REQUIRES_STACK bool
-js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, jsbytecode* outer, 
-              uint32 outerArgc, JSObject* globalObj, uint32 globalShape, 
+js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, jsbytecode* outer,
+              uint32 outerArgc, JSObject* globalObj, uint32 globalShape,
               SlotList* globalSlots, uint32 argc)
 {
     JS_ASSERT(f->root == f);
 
     /* Make sure the global type map didn't change on us. */
     if (!CheckGlobalObjectShape(cx, tm, globalObj)) {
         js_Backoff(cx, (jsbytecode*) f->root->ip);
         return false;
@@ -3879,17 +4248,17 @@ js_RecordTree(JSContext* cx, JSTraceMoni
     }
 
     JS_ASSERT(!f->code() && !f->vmprivate);
 
     /* setup the VM-private treeInfo structure for this fragment */
     TreeInfo* ti = new (&gc) TreeInfo(f, globalSlots);
 
     /* capture the coerced type of each active slot in the type map */
-    ti->typeMap.captureTypes(cx, *globalSlots, 0/*callDepth*/);
+    ti->typeMap.captureTypes(cx, globalObj, *globalSlots, 0/*callDepth*/);
     ti->nStackTypes = ti->typeMap.length() - globalSlots->length();
 
 #ifdef DEBUG
     ensureTreeIsUnique(tm, (VMFragment*)f, ti);
     ti->treeFileName = cx->fp->script->filename;
     ti->treeLineNumber = js_FramePCToLineNumber(cx, cx->fp);
     ti->treePCOffset = FramePCOffset(cx->fp);
 #endif
@@ -3932,17 +4301,18 @@ isSlotUndemotable(JSContext* cx, TreeInf
     if (slot < ti->nStackTypes)
         return oracle.isStackSlotUndemotable(cx, slot);
 
     uint16* gslots = ti->globalSlots->data();
     return oracle.isGlobalSlotUndemotable(cx, gslots[slot - ti->nStackTypes]);
 }
 
 JS_REQUIRES_STACK static bool
-js_AttemptToStabilizeTree(JSContext* cx, VMSideExit* exit, jsbytecode* outer, uint32 outerArgc)
+js_AttemptToStabilizeTree(JSContext* cx, JSObject* globalObj, VMSideExit* exit,
+                          jsbytecode* outer, uint32 outerArgc)
 {
     JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
     if (tm->needFlush) {
         FlushJITCache(cx);
         return false;
     }
 
     VMFragment* from = (VMFragment*)exit->from->root;
@@ -4013,17 +4383,17 @@ js_AttemptToStabilizeTree(JSContext* cx,
             JS_ASSERT(from_ti->globalSlots == ti->globalSlots);
             JS_ASSERT(from_ti->nStackTypes == ti->nStackTypes);
             /* Capture missing globals on both trees and link the fragments together. */
             if (from != f) {
                 ti->dependentTrees.addUnique(from);
                 from_ti->linkedTrees.addUnique(f);
             }
             if (ti->nGlobalTypes() < ti->globalSlots->length())
-                specializeTreesToMissingGlobals(cx, ti);
+                specializeTreesToMissingGlobals(cx, globalObj, ti);
             exit->target = f;
             tm->fragmento->assm()->patch(exit);
             /* Now erase this exit from the unstable exit list. */
             UnstableExit** tail = &from_ti->unstableExits;
             for (UnstableExit* uexit = from_ti->unstableExits; uexit != NULL; uexit = uexit->next) {
                 if (uexit->exit == exit) {
                     *tail = uexit->next;
                     delete uexit;
@@ -4253,17 +4623,17 @@ js_RecordLoopEdge(JSContext* cx, TraceRe
             return js_AttemptToExtendTree(cx, innermostNestedGuard, lr, outer);
         }
         /* emit a call to the inner tree and continue recording the outer tree trace */
         r->emitTreeCall(f, lr);
         return true;
       case UNSTABLE_LOOP_EXIT:
         /* abort recording so the inner loop can become type stable. */
         js_AbortRecording(cx, "Inner tree is trying to stabilize, abort outer recording");
-        return js_AttemptToStabilizeTree(cx, lr, outer, outerFragment->argc);
+        return js_AttemptToStabilizeTree(cx, globalObj, lr, outer, outerFragment->argc);
       case OVERFLOW_EXIT:
         oracle.markInstructionUndemotable(cx->fp->regs->pc);
         /* fall through */
       case BRANCH_EXIT:
       case CASE_EXIT:
         /* abort recording the outer tree, extend the inner tree */
         js_AbortRecording(cx, "Inner tree is trying to grow, abort outer recording");
         return js_AttemptToExtendTree(cx, lr, NULL, outer);
@@ -4326,141 +4696,197 @@ js_IsEntryTypeCompatible(jsval* vp, uint
             HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(*vp))) {
             return true;
         }
         debug_only_v(nj_dprintf("fun != tag%u ", tag);)
         return false;
     }
 }
 
+class TypeCompatibilityVisitor :
+    public SlotVisitorBase {
+    TraceRecorder &mRecorder;
+    JSContext *mCx;
+    uint8 *mTypeMap;
+    unsigned mStackSlotNum;
+    bool mOk;
+public:
+    TypeCompatibilityVisitor (TraceRecorder &recorder,
+                              uint8 *typeMap) :
+        mRecorder(recorder),
+        mCx(mRecorder.cx),
+        mTypeMap(typeMap),
+        mStackSlotNum(0),
+        mOk(true)
+    {}
+
+    JS_REQUIRES_STACK JS_ALWAYS_INLINE void
+    visitGlobalSlot(jsval *vp, unsigned n, unsigned slot) {
+        debug_only_v(nj_dprintf("global%d=", n);)
+        if (!js_IsEntryTypeCompatible(vp, mTypeMap)) {
+            mOk = false;
+        } else if (!isPromoteInt(mRecorder.get(vp)) && 
+                   *mTypeMap == JSVAL_INT) {
+            oracle.markGlobalSlotUndemotable(mCx, slot);
+            mOk = false;
+        } else if (JSVAL_IS_INT(*vp) && *mTypeMap == JSVAL_DOUBLE) {
+            oracle.markGlobalSlotUndemotable(mCx, slot);
+        }
+        mTypeMap++;
+    }
+
+    JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
+    visitStackSlots(jsval *vp, size_t count, JSStackFrame* fp) {
+        for (size_t i = 0; i < count; ++i) {
+            debug_only_v(nj_dprintf("%s%d=", mStackSlotKind, i);)
+            if (!js_IsEntryTypeCompatible(vp, mTypeMap)) {
+                mOk = false;
+            } else if (!isPromoteInt(mRecorder.get(vp)) &&
+                       *mTypeMap == JSVAL_INT) {
+                oracle.markStackSlotUndemotable(mCx, mStackSlotNum);
+                mOk = false;
+            } else if (JSVAL_IS_INT(*vp) && *mTypeMap == JSVAL_DOUBLE) {
+                oracle.markStackSlotUndemotable(mCx, mStackSlotNum);
+            }
+            vp++;
+            mTypeMap++;
+            mStackSlotNum++;
+        }
+        return true;
+    }
+
+    bool isOk() {
+        return mOk;
+    }
+};
+
 JS_REQUIRES_STACK Fragment*
 TraceRecorder::findNestedCompatiblePeer(Fragment* f)
 {
     JSTraceMonitor* tm;
 
     tm = &JS_TRACE_MONITOR(cx);
     unsigned int ngslots = treeInfo->globalSlots->length();
-    uint16* gslots = treeInfo->globalSlots->data();
 
     TreeInfo* ti;
     for (; f != NULL; f = f->peer) {
         if (!f->code())
             continue;
 
         ti = (TreeInfo*)f->vmprivate;
 
         debug_only_v(nj_dprintf("checking nested types %p: ", (void*)f);)
 
         if (ngslots > ti->nGlobalTypes())
-            specializeTreesToMissingGlobals(cx, ti);
-
-        uint8* typemap = ti->typeMap.data();
+            specializeTreesToMissingGlobals(cx, globalObj, ti);
 
         /*
          * Determine whether the typemap of the inner tree matches the outer tree's
          * current state. If the inner tree expects an integer, but the outer tree
          * doesn't guarantee an integer for that slot, we mark the slot undemotable
          * and mismatch here. This will force a new tree to be compiled that accepts
          * a double for the slot. If the inner tree expects a double, but the outer
          * tree has an integer, we can proceed, but we mark the location undemotable.
          */
-        bool ok = true;
-        uint8* m = typemap;
-        FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,
-            debug_only_v(nj_dprintf("%s%d=", vpname, vpnum);)
-            if (!js_IsEntryTypeCompatible(vp, m)) {
-                ok = false;
-            } else if (!isPromoteInt(get(vp)) && *m == JSVAL_INT) {
-                oracle.markStackSlotUndemotable(cx, unsigned(m - typemap));
-                ok = false;
-            } else if (JSVAL_IS_INT(*vp) && *m == JSVAL_DOUBLE) {
-                oracle.markStackSlotUndemotable(cx, unsigned(m - typemap));
-            }
-            m++;
-        );
-        FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
-            debug_only_v(nj_dprintf("%s%d=", vpname, vpnum);)
-            if (!js_IsEntryTypeCompatible(vp, m)) {
-                ok = false;
-            } else if (!isPromoteInt(get(vp)) && *m == JSVAL_INT) {
-                oracle.markGlobalSlotUndemotable(cx, gslots[n]);
-                ok = false;
-            } else if (JSVAL_IS_INT(*vp) && *m == JSVAL_DOUBLE) {
-                oracle.markGlobalSlotUndemotable(cx, gslots[n]);
-            }
-            m++;
-        );
-        JS_ASSERT(unsigned(m - ti->typeMap.data()) == ti->typeMap.length());
-
-        debug_only_v(nj_dprintf(" %s\n", ok ? "match" : "");)
-
-        if (ok)
+
+        TypeCompatibilityVisitor visitor(*this, ti->typeMap.data());
+        visitSlots(visitor, cx, 0, *treeInfo->globalSlots);
+
+        debug_only_v(nj_dprintf(" %s\n", visitor.isOk() ? "match" : "");)
+        if (visitor.isOk())
             return f;
     }
 
     return NULL;
 }
 
+class CheckEntryTypeVisitor :
+    public SlotVisitorBase {
+    bool mOk;
+    uint8 *mTypeMap;
+public:
+    CheckEntryTypeVisitor(uint8 *typeMap) :
+        mOk(true),
+        mTypeMap(typeMap)
+    {}
+
+    JS_ALWAYS_INLINE void checkSlot(jsval *vp, char const *name, int i) {
+        debug_only_v(nj_dprintf("%s%d=", name, i);)
+        JS_ASSERT(*mTypeMap != 0xCD);
+        mOk = js_IsEntryTypeCompatible(vp, mTypeMap++);
+    }
+
+    JS_REQUIRES_STACK JS_ALWAYS_INLINE void
+    visitGlobalSlot(jsval *vp, unsigned n, unsigned slot) {
+        if (mOk)
+            checkSlot(vp, "global", n);
+    }
+
+    JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
+    visitStackSlots(jsval *vp, size_t count, JSStackFrame* fp) {
+        for (size_t i = 0; i < count; ++i) {
+            if (!mOk)
+                break;
+            checkSlot(vp++, mStackSlotKind, i);
+        }
+        return mOk;
+    }
+
+    bool isOk() {
+        return mOk;
+    }
+};
+
+
 /**
  * Check if types are usable for trace execution.
  *
  * @param cx            Context.
  * @param ti            Tree info of peer we're testing.
  * @return              True if compatible (with or without demotions), false otherwise.
  */
 static JS_REQUIRES_STACK bool
-js_CheckEntryTypes(JSContext* cx, TreeInfo* ti)
+js_CheckEntryTypes(JSContext* cx, JSObject* globalObj, TreeInfo* ti)
 {
     unsigned int ngslots = ti->globalSlots->length();
-    uint16* gslots = ti->globalSlots->data();
 
     JS_ASSERT(ti->nStackTypes == js_NativeStackSlots(cx, 0));
 
     if (ngslots > ti->nGlobalTypes())
-        specializeTreesToMissingGlobals(cx, ti);
-
-    uint8* m = ti->typeMap.data();
+        specializeTreesToMissingGlobals(cx, globalObj, ti);
 
     JS_ASSERT(ti->typeMap.length() == js_NativeStackSlots(cx, 0) + ngslots);
     JS_ASSERT(ti->typeMap.length() == ti->nStackTypes + ngslots);
     JS_ASSERT(ti->nGlobalTypes() == ngslots);
-    FORALL_SLOTS(cx, ngslots, gslots, 0,
-        debug_only_v(nj_dprintf("%s%d=", vpname, vpnum);)
-        JS_ASSERT(*m != 0xCD);
-        if (!js_IsEntryTypeCompatible(vp, m))
-            goto check_fail;
-        m++;
-    );
-    JS_ASSERT(unsigned(m - ti->typeMap.data()) == ti->typeMap.length());
+
+    CheckEntryTypeVisitor visitor(ti->typeMap.data());
+    visitSlots(visitor, cx, 0, *ti->globalSlots);
 
     debug_only_v(nj_dprintf("\n");)
-    return true;
-
-check_fail:
-    debug_only_v(nj_dprintf("\n");)
-    return false;
+    return visitor.isOk();
 }
 
 /**
  * Find an acceptable entry tree given a PC.
  *
  * @param cx            Context.
+ * @param globalObj     Global object.
  * @param f             First peer fragment.
  * @param nodemote      If true, will try to find a peer that does not require demotion.
  * @out   count         Number of fragments consulted.
  */
 static JS_REQUIRES_STACK Fragment*
-js_FindVMCompatiblePeer(JSContext* cx, Fragment* f, uintN& count)
+js_FindVMCompatiblePeer(JSContext* cx, JSObject* globalObj, Fragment* f, uintN& count)
 {
     count = 0;
     for (; f != NULL; f = f->peer) {
         if (f->vmprivate == NULL)
             continue;
         debug_only_v(nj_dprintf("checking vm types %p (ip: %p): ", (void*)f, f->ip);)
-        if (js_CheckEntryTypes(cx, (TreeInfo*)f->vmprivate))
+        if (js_CheckEntryTypes(cx, globalObj, (TreeInfo*)f->vmprivate))
             return f;
         ++count;
     }
     return NULL;
 }
 
 static void
 LeaveTree(InterpState&, VMSideExit* lr);
@@ -4541,20 +4967,18 @@ js_ExecuteTree(JSContext* cx, Fragment* 
     debug_only_v(nj_dprintf("entering trace at %s:%u@%u, native stack slots: %u code: %p\n",
                             cx->fp->script->filename,
                             js_FramePCToLineNumber(cx, cx->fp),
                             FramePCOffset(cx->fp),
                             ti->maxNativeStackSlots,
                             f->code());)
 
     JS_ASSERT(ti->nGlobalTypes() == ngslots);
-
-    if (ngslots)
-        BuildNativeGlobalFrame(cx, ngslots, gslots, ti->globalTypeMap(), global);
-    BuildNativeStackFrame(cx, 0/*callDepth*/, ti->typeMap.data(), stack_buffer);
+    BuildNativeFrame(cx, globalObj, 0/*callDepth*/, ngslots, gslots,
+                     ti->typeMap.data(), global, stack_buffer);
 
     union { NIns *code; GuardRecord* (FASTCALL *func)(InterpState*, Fragment*); } u;
     u.code = f->code();
 
 #ifdef EXECUTE_TREE_TIMER
     state->startTime = rdtsc();
 #endif
 
@@ -4806,17 +5230,18 @@ LeaveTree(InterpState& state, VMSideExit
                               stack, NULL);
     JS_ASSERT(unsigned(slots) == innermost->numStackSlots);
 
     if (innermost->nativeCalleeWord)
         SynthesizeSlowNativeFrame(cx, innermost);
 
     /* write back interned globals */
     double* global = (double*)(&state + 1);
-    FlushNativeGlobalFrame(cx, ngslots, gslots, globalTypeMap, global);
+    FlushNativeGlobalFrame(cx, global,
+                           ngslots, gslots, globalTypeMap);
     JS_ASSERT(*(uint64*)&global[STOBJ_NSLOTS(JS_GetGlobalForObject(cx, cx->fp->scopeChain))] ==
               0xdeadbeefdeadbeefLL);
 
     cx->nativeVp = NULL;
 
 #ifdef DEBUG
     // Verify that our state restoration worked.
     for (JSStackFrame* fp = cx->fp; fp; fp = fp->down) {
@@ -4931,17 +5356,17 @@ js_MonitorLoopEdge(JSContext* cx, uintN&
         return rv;
     }
 
     debug_only_v(nj_dprintf("Looking for compat peer %d@%d, from %p (ip: %p)\n",
                             js_FramePCToLineNumber(cx, cx->fp),
                             FramePCOffset(cx->fp), (void*)f, f->ip);)
 
     uintN count;
-    Fragment* match = js_FindVMCompatiblePeer(cx, f, count);
+    Fragment* match = js_FindVMCompatiblePeer(cx, globalObj, f, count);
     if (!match) {
         if (count < MAXPEERS)
             goto record;
         /* If we hit the max peers ceiling, don't try to lookup fragments all the time. Thats
            expensive. This must be a rather type-unstable loop. */
         debug_only_v(nj_dprintf("Blacklisted: too many peer trees.\n");)
         js_Blacklist((jsbytecode*) f->root->ip);
 #ifdef MOZ_TRACEVIS
@@ -4962,17 +5387,17 @@ js_MonitorLoopEdge(JSContext* cx, uintN&
     }
 
     /* If we exit on a branch, or on a tree call guard, try to grow the inner tree (in case
        of a branch exit), or the tree nested around the tree we exited from (in case of the
        tree call guard). */
     bool rv;
     switch (lr->exitType) {
       case UNSTABLE_LOOP_EXIT:
-          rv = js_AttemptToStabilizeTree(cx, lr, NULL, NULL);
+          rv = js_AttemptToStabilizeTree(cx, globalObj, lr, NULL, NULL);
 #ifdef MOZ_TRACEVIS
           if (!rv)
               tvso.r = R_FAIL_STABILIZE;
 #endif
           return rv;
       case OVERFLOW_EXIT:
         oracle.markInstructionUndemotable(cx->fp->regs->pc);
         /* fall through */
@@ -7891,17 +8316,17 @@ TraceRecorder::callTraceableNative(JSFun
         if (argc != knownargc)
             continue;
 
         intN prefixc = strlen(known->prefix);
         JS_ASSERT(prefixc <= 3);
         LIns** argp = &args[argc + prefixc - 1];
         char argtype;
 
-#if defined _DEBUG
+#if defined DEBUG
         memset(args, 0xCD, sizeof(args));
 #endif
 
         uintN i;
         for (i = prefixc; i--; ) {
             argtype = known->prefix[i];
             if (argtype == 'C') {
                 *argp = cx_ins;
@@ -7960,17 +8385,17 @@ TraceRecorder::callTraceableNative(JSFun
                     goto next_specialization;
             } else if (argtype == 'v') {
                 box_jsval(arg, *argp);
             } else {
                 goto next_specialization;
             }
             argp--;
         }
-#if defined _DEBUG
+#if defined DEBUG
         JS_ASSERT(args[0] != (LIns *)0xcdcdcdcd);
 #endif
         return emitNativeCall(known, argc, args);
 
 next_specialization:;
     } while ((known++)->flags & JSTN_MORE);
 
     return JSRS_STOP;
@@ -8936,23 +9361,19 @@ TraceRecorder::interpretedFunctionCall(j
 
     // Generate a type map for the outgoing frame and stash it in the LIR
     unsigned stackSlots = js_NativeStackSlots(cx, 0/*callDepth*/);
     if (sizeof(FrameInfo) + stackSlots * sizeof(uint8) > NJ_MAX_SKIP_PAYLOAD_SZB)
         ABORT_TRACE("interpreted function call requires saving too much stack");
     LIns* data = lir->insSkip(sizeof(FrameInfo) + stackSlots * sizeof(uint8));
     FrameInfo* fi = (FrameInfo*)data->payload();
     uint8* typemap = (uint8 *)(fi + 1);
-    uint8* m = typemap;
-    /* Determine the type of a store by looking at the current type of the actual value the
-       interpreter is using. For numbers we have to check what kind of store we used last
-       (integer or double) to figure out what the side exit show reflect in its typemap. */
-    FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0/*callDepth*/,
-        *m++ = determineSlotType(vp);
-    );
+
+    DetermineTypesVisitor detVisitor(*this, typemap);
+    visitStackSlots(detVisitor, cx, 0);
 
     if (argc >= 0x8000)
         ABORT_TRACE("too many arguments");
 
     fi->callee = JSVAL_TO_OBJECT(fval);
     fi->block = fp->blockChain;
     fi->pc = fp->regs->pc;
     fi->imacpc = fp->imacpc;
--- a/js/src/jstracer.h
+++ b/js/src/jstracer.h
@@ -188,19 +188,18 @@ public:
         clearDemotability();
     }
 };
 
 typedef Queue<uint16> SlotList;
 
 class TypeMap : public Queue<uint8> {
 public:
-    JS_REQUIRES_STACK void captureTypes(JSContext* cx, SlotList& slots, unsigned callDepth);
-    JS_REQUIRES_STACK void captureMissingGlobalTypes(JSContext* cx,
-                                                     SlotList& slots,
+    JS_REQUIRES_STACK void captureTypes(JSContext* cx, JSObject* globalObj, SlotList& slots, unsigned callDepth);
+    JS_REQUIRES_STACK void captureMissingGlobalTypes(JSContext* cx, JSObject* globalObj, SlotList& slots,
                                                      unsigned stackSlots);
     bool matches(TypeMap& other) const;
 };
 
 enum ExitType {
     /*
      * An exit at a possible branch-point in the trace at which to attach a
      * future secondary trace. Therefore the recorder must generate different
@@ -715,16 +714,26 @@ public:
 
     bool wasDeepAborted() { return deepAborted; }
     TreeInfo* getTreeInfo() { return treeInfo; }
 
 #define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format)               \
     JS_REQUIRES_STACK JSRecordingStatus record_##op();
 # include "jsopcode.tbl"
 #undef OPDEF
+
+    friend class ImportBoxedStackSlotVisitor;
+    friend class ImportUnboxedStackSlotVisitor;
+    friend class ImportGlobalSlotVisitor;
+    friend class AdjustCallerGlobalTypesVisitor;
+    friend class AdjustCallerStackTypesVisitor;
+    friend class TypeCompatibilityVisitor;
+    friend class SelfTypeStabilityVisitor;
+    friend class PeerTypeStabilityVisitor;
+    friend class UndemoteVisitor;
 };
 #define TRACING_ENABLED(cx)       JS_HAS_OPTION(cx, JSOPTION_JIT)
 #define TRACE_RECORDER(cx)        (JS_TRACE_MONITOR(cx).recorder)
 #define SET_TRACE_RECORDER(cx,tr) (JS_TRACE_MONITOR(cx).recorder = (tr))
 
 #define JSOP_IN_RANGE(op,lo,hi)   (uintN((op) - (lo)) <= uintN((hi) - (lo)))
 #define JSOP_IS_BINARY(op)        JSOP_IN_RANGE(op, JSOP_BITOR, JSOP_MOD)
 #define JSOP_IS_UNARY(op)         JSOP_IN_RANGE(op, JSOP_NEG, JSOP_POS)