Merge tracemonkey to mozilla-central.
authorRobert Sayre <sayrer@gmail.com>
Sat, 31 Jan 2009 11:45:24 -0800
changeset 24505 f1cade532f6fca3578d5589eb7f5f5f46ec975ff
parent 24485 ee1c6881037509e655199f613b351b14cff68547 (current diff)
parent 24504 d146645d2e7258b50e9147869624304ea2a74c75 (diff)
child 24506 f46ab3393b32fe877a81c761181443f7e0a9114c
push id5074
push userrsayre@mozilla.com
push dateSat, 31 Jan 2009 19:45:42 +0000
treeherdermozilla-central@f1cade532f6f [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
milestone1.9.2a1pre
Merge tracemonkey to mozilla-central.
js/src/trace-test.js
--- a/js/src/builtins.tbl
+++ b/js/src/builtins.tbl
@@ -72,20 +72,16 @@ BUILTIN1(extern, DOUBLE,    js_UnboxDoub
 BUILTIN1(extern, INT32,     js_UnboxInt32, JSVAL,                               1, 1)
 BUILTIN2(extern, DOUBLE,    js_dmod, DOUBLE, DOUBLE,                            1, 1)
 BUILTIN2(extern, INT32,     js_imod, INT32, INT32,                              1, 1)
 BUILTIN1(extern, INT32,     js_DoubleToInt32, DOUBLE,                           1, 1)
 BUILTIN1(extern, UINT32,    js_DoubleToUint32, DOUBLE,                          1, 1)
 
 BUILTIN2(extern, DOUBLE,    js_StringToNumber, CONTEXT, STRING,                 1, 1)
 BUILTIN2(extern, INT32,     js_StringToInt32, CONTEXT, STRING,                  1, 1)
-BUILTIN3(extern, JSVAL,     js_Any_getprop, CONTEXT, OBJECT, STRING,            0, 0)
-BUILTIN4(extern, BOOL,      js_Any_setprop, CONTEXT, OBJECT, STRING, JSVAL,     0, 0)
-BUILTIN3(extern, JSVAL,     js_Any_getelem, CONTEXT, OBJECT, INT32,             0, 0)
-BUILTIN4(extern, BOOL,      js_Any_setelem, CONTEXT, OBJECT, INT32, JSVAL,      0, 0)
 BUILTIN2(FRIEND, BOOL,      js_CloseIterator, CONTEXT, JSVAL,                   0, 0)
 BUILTIN2(extern, SIDEEXIT,  js_CallTree, INTERPSTATE, FRAGMENT,                 0, 0)
 BUILTIN2(extern, OBJECT,    js_FastNewObject, CONTEXT, OBJECT,                  0, 0)
 BUILTIN3(extern, BOOL,      js_AddProperty, CONTEXT, OBJECT, SCOPEPROP,         0, 0)
 BUILTIN3(extern, BOOL,      js_HasNamedProperty, CONTEXT, OBJECT, STRING,       0, 0)
 BUILTIN3(extern, BOOL,      js_HasNamedPropertyInt32, CONTEXT, OBJECT, INT32,   0, 0)
 BUILTIN3(extern, JSVAL,     js_CallGetter, CONTEXT, OBJECT, SCOPEPROP,          0, 0)
 BUILTIN2(extern, STRING,    js_TypeOfObject, CONTEXT, OBJECT,                   1, 1)
--- a/js/src/imacros.c.out
+++ b/js/src/imacros.c.out
@@ -589,16 +589,74 @@ static struct {
 /* 2*/  JSOP_CALLBUILTIN, ((JSBUILTIN_CallIteratorNext) & 0xff00) >> 8, ((JSBUILTIN_CallIteratorNext) & 0xff),
 /* 5*/  JSOP_CALL, 0, 0,
 /* 8*/  JSOP_DUP,
 /* 9*/  JSOP_HOLE,
 /*10*/  JSOP_STRICTNE,
 /*11*/  JSOP_STOP,
     },
 };
+static struct {
+    jsbytecode getprop[10];
+    jsbytecode getelem[10];
+} getelem_imacros = {
+    {
+/* 0*/  JSOP_SWAP,
+/* 1*/  JSOP_CALLBUILTIN, ((JSBUILTIN_GetProperty) & 0xff00) >> 8, ((JSBUILTIN_GetProperty) & 0xff),
+/* 4*/  JSOP_PICK, 2,
+/* 6*/  JSOP_CALL, 0, 1,
+/* 9*/  JSOP_STOP,
+    },
+    {
+/* 0*/  JSOP_SWAP,
+/* 1*/  JSOP_CALLBUILTIN, ((JSBUILTIN_GetElement) & 0xff00) >> 8, ((JSBUILTIN_GetElement) & 0xff),
+/* 4*/  JSOP_PICK, 2,
+/* 6*/  JSOP_CALL, 0, 1,
+/* 9*/  JSOP_STOP,
+    },
+};
+static struct {
+    jsbytecode setprop[15];
+    jsbytecode setelem[15];
+} setelem_imacros = {
+    {
+/* 0*/  JSOP_DUP,
+/* 1*/  JSOP_PICK, 3,
+/* 3*/  JSOP_CALLBUILTIN, ((JSBUILTIN_SetProperty) & 0xff00) >> 8, ((JSBUILTIN_SetProperty) & 0xff),
+/* 6*/  JSOP_PICK, 4,
+/* 8*/  JSOP_PICK, 4,
+/*10*/  JSOP_CALL, 0, 2,
+/*13*/  JSOP_POP,
+/*14*/  JSOP_STOP,
+    },
+    {
+/* 0*/  JSOP_DUP,
+/* 1*/  JSOP_PICK, 3,
+/* 3*/  JSOP_CALLBUILTIN, ((JSBUILTIN_SetElement) & 0xff00) >> 8, ((JSBUILTIN_SetElement) & 0xff),
+/* 6*/  JSOP_PICK, 4,
+/* 8*/  JSOP_PICK, 4,
+/*10*/  JSOP_CALL, 0, 2,
+/*13*/  JSOP_POP,
+/*14*/  JSOP_STOP,
+    },
+};
+static struct {
+    jsbytecode initelem[15];
+} initelem_imacros = {
+    {
+/* 0*/  JSOP_PICK, 2,
+/* 2*/  JSOP_DUP,
+/* 3*/  JSOP_CALLBUILTIN, ((JSBUILTIN_SetElement) & 0xff00) >> 8, ((JSBUILTIN_SetElement) & 0xff),
+/* 6*/  JSOP_PICK, 4,
+/* 8*/  JSOP_PICK, 4,
+/*10*/  JSOP_CALL, 0, 2,
+/*13*/  JSOP_POP,
+/*14*/  JSOP_STOP,
+    },
+};
 uint8 js_opcode2extra[JSOP_LIMIT] = {
     0,  /* JSOP_NOP */
     0,  /* JSOP_PUSH */
     0,  /* JSOP_POPV */
     0,  /* JSOP_ENTERWITH */
     0,  /* JSOP_LEAVEWITH */
     0,  /* JSOP_RETURN */
     0,  /* JSOP_GOTO */
@@ -645,18 +703,18 @@ uint8 js_opcode2extra[JSOP_LIMIT] = {
     0,  /* JSOP_NAMEINC */
     0,  /* JSOP_PROPINC */
     0,  /* JSOP_ELEMINC */
     0,  /* JSOP_NAMEDEC */
     0,  /* JSOP_PROPDEC */
     0,  /* JSOP_ELEMDEC */
     0,  /* JSOP_GETPROP */
     0,  /* JSOP_SETPROP */
-    0,  /* JSOP_GETELEM */
-    0,  /* JSOP_SETELEM */
+    2,  /* JSOP_GETELEM */
+    2,  /* JSOP_SETELEM */
     0,  /* JSOP_CALLNAME */
     0,  /* JSOP_CALL */
     0,  /* JSOP_NAME */
     0,  /* JSOP_DOUBLE */
     0,  /* JSOP_STRING */
     0,  /* JSOP_ZERO */
     0,  /* JSOP_ONE */
     0,  /* JSOP_NULL */
@@ -682,17 +740,17 @@ uint8 js_opcode2extra[JSOP_LIMIT] = {
     0,  /* JSOP_GETARG */
     0,  /* JSOP_SETARG */
     0,  /* JSOP_GETLOCAL */
     0,  /* JSOP_SETLOCAL */
     0,  /* JSOP_UINT16 */
     0,  /* JSOP_NEWINIT */
     0,  /* JSOP_ENDINIT */
     0,  /* JSOP_INITPROP */
-    0,  /* JSOP_INITELEM */
+    2,  /* JSOP_INITELEM */
     0,  /* JSOP_DEFSHARP */
     0,  /* JSOP_USESHARP */
     0,  /* JSOP_INCARG */
     0,  /* JSOP_DECARG */
     0,  /* JSOP_ARGINC */
     0,  /* JSOP_ARGDEC */
     0,  /* JSOP_INCLOCAL */
     0,  /* JSOP_DECLOCAL */
--- a/js/src/imacros.jsasm
+++ b/js/src/imacros.jsasm
@@ -1,8 +1,9 @@
+# -*- indent-tabs-mode: nil; -*-
 # vim: set sw=4 ts=8 et tw=78 ft=asm:
 # ***** BEGIN LICENSE BLOCK *****
 # Version: MPL 1.1/GPL 2.0/LGPL 2.1
 #
 # The contents of this file are subject to the Mozilla Public License Version
 # 1.1 (the "License"); you may not use this file except in compliance with
 # the License. You may obtain a copy of the License at
 # http://www.mozilla.org/MPL/
@@ -630,8 +631,69 @@ 4:      imacop                          
         call 0                                      # iterobj nextval?
         dup                                         # iterobj nextval? nextval?
         hole                                        # iterobj nextval? nextval? hole
         strictne                                    # iterobj nextval? boolean
         stop
     .end
 
 .end
+
+.igroup getelem JSOP_GETELEM
+
+    .imacro getprop                                 # obj name
+        swap                                        # name obj
+        callbuiltin (JSBUILTIN_GetProperty)         # name fun obj
+        pick 2                                      # fun obj name
+        call 1                                      # propval
+        stop
+    .end
+
+    .imacro getelem                                 # obj i
+        swap                                        # i obj
+        callbuiltin (JSBUILTIN_GetElement)          # i fun obj
+        pick 2                                      # fun obj i
+        call 1                                      # propval
+        stop
+    .end
+
+.end
+
+.igroup setelem JSOP_SETELEM
+
+    .imacro setprop                                 # obj name val
+        dup                                         # obj name val val
+        pick 3                                      # name val val obj
+        callbuiltin (JSBUILTIN_SetProperty)         # name val val fun obj
+        pick 4                                      # val val fun obj name
+        pick 4                                      # val fun obj name val
+        call 2                                      # val junk
+        pop                                         # val
+        stop
+    .end
+
+    .imacro setelem                                 # obj i val
+        dup                                         # obj i val val
+        pick 3                                      # i val val obj
+        callbuiltin (JSBUILTIN_SetElement)          # i val val fun obj
+        pick 4                                      # val val fun obj i
+        pick 4                                      # val fun obj i val
+        call 2                                      # val junk
+        pop                                         # val
+        stop
+    .end
+
+.end
+
+.igroup initelem JSOP_INITELEM
+
+    .imacro initelem                                # obj i val
+        pick 2                                      # i val obj
+        dup                                         # i val obj obj
+        callbuiltin (JSBUILTIN_SetElement)          # i val obj fun obj
+        pick 4                                      # val obj fun obj i
+        pick 4                                      # obj fun obj i val
+        call 2                                      # obj junk
+        pop                                         # obj
+        stop
+    .end
+
+.end
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -74,24 +74,21 @@
 #include "jsobj.h"
 #include "jsopcode.h"
 #include "jsparse.h"
 #include "jsregexp.h"
 #include "jsscan.h"
 #include "jsscope.h"
 #include "jsscript.h"
 #include "jsstr.h"
+#include "jstracer.h"
 #include "jsdbgapi.h"
 #include "prmjtime.h"
 #include "jsstaticcheck.h"
 
-#if !defined JS_THREADSAFE && defined JS_TRACER
-#include "jstracer.h"
-#endif
-
 #if JS_HAS_FILE_OBJECT
 #include "jsfile.h"
 #endif
 
 #if JS_HAS_XML_SUPPORT
 #include "jsxml.h"
 #endif
 
@@ -318,16 +315,17 @@ JS_PushArgumentsVA(JSContext *cx, void *
         /*
          * Count non-space non-star characters as individual jsval arguments.
          * This may over-allocate stack, but we'll fix below.
          */
         if (isspace(c) || c == '*')
             continue;
         argc++;
     }
+    js_LeaveTrace(cx);
     sp = js_AllocStack(cx, argc, markp);
     if (!sp)
         return NULL;
     argv = sp;
     while ((c = *format++) != '\0') {
         if (isspace(c) || c == '*')
             continue;
         switch (c) {
@@ -414,16 +412,17 @@ bad:
     js_FreeStack(cx, *markp);
     return NULL;
 }
 
 JS_PUBLIC_API(void)
 JS_PopArguments(JSContext *cx, void *mark)
 {
     CHECK_REQUEST(cx);
+    JS_ASSERT_NOT_ON_TRACE(cx);
     js_FreeStack(cx, mark);
 }
 
 JS_PUBLIC_API(JSBool)
 JS_AddArgumentFormatter(JSContext *cx, const char *format,
                         JSArgumentFormatter formatter)
 {
     size_t length;
@@ -2062,16 +2061,17 @@ JS_SetExtraGCRoots(JSRuntime *rt, JSTrac
     rt->gcExtraRootsData = data;
 }
 
 JS_PUBLIC_API(void)
 JS_TraceRuntime(JSTracer *trc)
 {
     JSBool allAtoms = trc->context->runtime->gcKeepAtoms != 0;
 
+    js_LeaveTrace(trc->context);
     js_TraceRuntime(trc, allAtoms);
 }
 
 #ifdef DEBUG
 
 #ifdef HAVE_XPCONNECT
 #include "dump_xpc.h"
 #endif
@@ -2478,16 +2478,18 @@ extern JS_PUBLIC_API(JSBool)
 JS_IsGCMarkingTracer(JSTracer *trc)
 {
     return IS_GC_MARKING_TRACER(trc);
 }
 
 JS_PUBLIC_API(void)
 JS_GC(JSContext *cx)
 {
+    js_LeaveTrace(cx);
+
     /* Don't nuke active arenas if executing or compiling. */
     if (cx->stackPool.current == &cx->stackPool.first)
         JS_FinishArenaPool(&cx->stackPool);
     if (cx->tempPool.current == &cx->tempPool.first)
         JS_FinishArenaPool(&cx->tempPool);
     js_GC(cx, GC_NORMAL);
 }
 
@@ -5441,18 +5443,17 @@ JS_SaveFrameChain(JSContext *cx)
     cx->dormantFrameChain = fp;
     cx->fp = NULL;
     return fp;
 }
 
 JS_PUBLIC_API(void)
 JS_RestoreFrameChain(JSContext *cx, JSStackFrame *fp)
 {
-    JS_ASSERT(!JS_ON_TRACE(cx));
-    VOUCH_DOES_NOT_REQUIRE_STACK();
+    JS_ASSERT_NOT_ON_TRACE(cx);
     JS_ASSERT(!cx->fp);
     if (!fp)
         return;
 
     JS_ASSERT(fp == cx->dormantFrameChain);
     cx->fp = fp;
     cx->dormantFrameChain = fp->dormantNext;
     fp->dormantNext = NULL;
--- a/js/src/jsarray.cpp
+++ b/js/src/jsarray.cpp
@@ -1615,17 +1615,17 @@ array_reverse(JSContext *cx, uintN argc,
 typedef struct MSortArgs {
     size_t       elsize;
     JSComparator cmp;
     void         *arg;
     JSBool       fastcopy;
 } MSortArgs;
 
 /* Helper function for js_MergeSort. */
-static JSBool
+static JS_REQUIRES_STACK JSBool
 MergeArrays(MSortArgs *msa, void *src, void *dest, size_t run1, size_t run2)
 {
     void *arg, *a, *b, *c;
     size_t elsize, runtotal;
     int cmp_result;
     JSComparator cmp;
     JSBool fastcopy;
 
@@ -1676,17 +1676,17 @@ MergeArrays(MSortArgs *msa, void *src, v
 
     return JS_TRUE;
 }
 
 /*
  * This sort is stable, i.e. sequence of equal elements is preserved.
  * See also bug #224128.
  */
-JSBool
+JS_REQUIRES_STACK JSBool
 js_MergeSort(void *src, size_t nel, size_t elsize,
              JSComparator cmp, void *arg, void *tmp)
 {
     void *swap, *vec1, *vec2;
     MSortArgs msa;
     size_t i, j, lo, hi, run;
     JSBool fastcopy;
     int cmp_result;
@@ -1762,17 +1762,17 @@ js_MergeSort(void *src, size_t nel, size
 }
 
 typedef struct CompareArgs {
     JSContext   *context;
     jsval       fval;
     jsval       *elemroot;      /* stack needed for js_Invoke */
 } CompareArgs;
 
-static JSBool
+static JS_REQUIRES_STACK JSBool
 sort_compare(void *arg, const void *a, const void *b, int *result)
 {
     jsval av = *(const jsval *)a, bv = *(const jsval *)b;
     CompareArgs *ca = (CompareArgs *) arg;
     JSContext *cx = ca->context;
     jsval *invokevp, *sp;
     jsdouble cmp;
 
@@ -1830,17 +1830,17 @@ sort_compare_strings(void *arg, const vo
 
 /*
  * The array_sort function below assumes JSVAL_NULL is zero in order to
  * perform initialization using memset.  Other parts of SpiderMonkey likewise
  * "know" that JSVAL_NULL is zero; this static assertion covers all cases.
  */
 JS_STATIC_ASSERT(JSVAL_NULL == 0);
 
-static JSBool
+static JS_REQUIRES_STACK JSBool
 array_sort(JSContext *cx, uintN argc, jsval *vp)
 {
     jsval *argv, fval, *vec, *mergesort_tmp, v;
     JSObject *obj;
     CompareArgs ca;
     jsuint len, newlen, i, undefs;
     JSTempValueRooter tvr;
     JSBool hole;
@@ -2741,17 +2741,17 @@ typedef enum ArrayExtraMode {
     MAP,
     FILTER,
     SOME,
     EVERY
 } ArrayExtraMode;
 
 #define REDUCE_MODE(mode) ((mode) == REDUCE || (mode) == REDUCE_RIGHT)
 
-static JSBool
+static JS_REQUIRES_STACK JSBool
 array_extra(JSContext *cx, ArrayExtraMode mode, uintN argc, jsval *vp)
 {
     JSObject *obj;
     jsuint length, newlen;
     jsval *argv, *elemroot, *invokevp, *sp;
     JSBool ok, cond, hole;
     JSObject *callable, *thisp, *newarr;
     jsint start, end, step, i;
@@ -2925,53 +2925,53 @@ array_extra(JSContext *cx, ArrayExtraMod
 
   out:
     js_FreeStack(cx, mark);
     if (ok && mode == FILTER)
         ok = js_SetLengthProperty(cx, newarr, newlen);
     return ok;
 }
 
-static JSBool
+static JS_REQUIRES_STACK JSBool
 array_forEach(JSContext *cx, uintN argc, jsval *vp)
 {
     return array_extra(cx, FOREACH, argc, vp);
 }
 
-static JSBool
+static JS_REQUIRES_STACK JSBool
 array_map(JSContext *cx, uintN argc, jsval *vp)
 {
     return array_extra(cx, MAP, argc, vp);
 }
 
-static JSBool
+static JS_REQUIRES_STACK JSBool
 array_reduce(JSContext *cx, uintN argc, jsval *vp)
 {
     return array_extra(cx, REDUCE, argc, vp);
 }
 
-static JSBool
+static JS_REQUIRES_STACK JSBool
 array_reduceRight(JSContext *cx, uintN argc, jsval *vp)
 {
     return array_extra(cx, REDUCE_RIGHT, argc, vp);
 }
 
-static JSBool
+static JS_REQUIRES_STACK JSBool
 array_filter(JSContext *cx, uintN argc, jsval *vp)
 {
     return array_extra(cx, FILTER, argc, vp);
 }
 
-static JSBool
+static JS_REQUIRES_STACK JSBool
 array_some(JSContext *cx, uintN argc, jsval *vp)
 {
     return array_extra(cx, SOME, argc, vp);
 }
 
-static JSBool
+static JS_REQUIRES_STACK JSBool
 array_every(JSContext *cx, uintN argc, jsval *vp)
 {
     return array_extra(cx, EVERY, argc, vp);
 }
 #endif
 
 static JSPropertySpec array_props[] = {
     {js_length_str,   -1,   JSPROP_SHARED | JSPROP_PERMANENT,
--- a/js/src/jsarray.h
+++ b/js/src/jsarray.h
@@ -117,17 +117,17 @@ typedef JSBool (*JSComparator)(void *arg
 /*
  * NB: vec is the array to be sorted, tmp is temporary space at least as big
  * as vec. Both should be GC-rooted if appropriate.
  *
  * The sorted result is in vec. vec may be in an inconsistent state if the
  * comparator function cmp returns an error inside a comparison, so remember
  * to check the return value of this function.
  */
-extern JSBool
+extern JS_REQUIRES_STACK JSBool
 js_MergeSort(void *vec, size_t nel, size_t elsize, JSComparator cmp,
              void *arg, void *tmp);
 
 #ifdef DEBUG_ARRAYS
 extern JSBool
 js_ArrayInfo(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval);
 #endif
 
--- a/js/src/jsbuiltins.cpp
+++ b/js/src/jsbuiltins.cpp
@@ -182,72 +182,16 @@ js_StringToInt32(JSContext* cx, JSString
     jsdouble d;
 
     JSSTRING_CHARS_AND_END(str, bp, end);
     if (!js_strtod(cx, bp, end, &ep, &d) || js_SkipWhiteSpace(ep, end) != end)
         return 0;
     return js_DoubleToECMAInt32(d);
 }
 
-static inline JSBool
-js_Int32ToId(JSContext* cx, int32 index, jsid* id)
-{
-    if (index <= JSVAL_INT_MAX) {
-        *id = INT_TO_JSID(index);
-        return JS_TRUE;
-    }
-    JSString* str = js_NumberToString(cx, index);
-    if (!str)
-        return JS_FALSE;
-    return js_ValueToStringId(cx, STRING_TO_JSVAL(str), id);
-}
-
-jsval FASTCALL
-js_Any_getprop(JSContext* cx, JSObject* obj, JSString* idstr)
-{
-    jsval v;
-    jsid id;
-
-    if (!js_ValueToStringId(cx, STRING_TO_JSVAL(idstr), &id))
-        return JSVAL_ERROR_COOKIE;
-    if (!OBJ_GET_PROPERTY(cx, obj, id, &v))
-        return JSVAL_ERROR_COOKIE;
-    return v;
-}
-
-JSBool FASTCALL
-js_Any_setprop(JSContext* cx, JSObject* obj, JSString* idstr, jsval v)
-{
-    jsid id;
-    if (!js_ValueToStringId(cx, STRING_TO_JSVAL(idstr), &id))
-        return JS_FALSE;
-    return OBJ_SET_PROPERTY(cx, obj, id, &v);
-}
-
-jsval FASTCALL
-js_Any_getelem(JSContext* cx, JSObject* obj, int32 index)
-{
-    jsval v;
-    jsid id;
-    if (!js_Int32ToId(cx, index, &id))
-        return JSVAL_ERROR_COOKIE;
-    if (!OBJ_GET_PROPERTY(cx, obj, id, &v))
-        return JSVAL_ERROR_COOKIE;
-    return v;
-}
-
-JSBool FASTCALL
-js_Any_setelem(JSContext* cx, JSObject* obj, int32 index, jsval v)
-{
-    jsid id;
-    if (!js_Int32ToId(cx, index, &id))
-        return JSVAL_ERROR_COOKIE;
-    return OBJ_SET_PROPERTY(cx, obj, id, &v);
-}
-
 SideExit* FASTCALL
 js_CallTree(InterpState* state, Fragment* f)
 {
     union { NIns *code; GuardRecord* (FASTCALL *func)(InterpState*, Fragment*); } u;
 
     u.code = f->code();
     JS_ASSERT(u.code);
 
--- a/js/src/jsbuiltins.h
+++ b/js/src/jsbuiltins.h
@@ -317,16 +317,29 @@ struct JSTraceableNative {
 #define _JS_DEFINE_CALLINFO_n(n, args)  JS_DEFINE_CALLINFO_##n args
 
 jsdouble FASTCALL
 js_StringToNumber(JSContext* cx, JSString* str);
 
 jsdouble FASTCALL
 js_BooleanOrUndefinedToNumber(JSContext* cx, int32 unboxed);
 
+static JS_INLINE JSBool
+js_Int32ToId(JSContext* cx, int32 index, jsid* id)
+{
+    if (index <= JSVAL_INT_MAX) {
+        *id = INT_TO_JSID(index);
+        return JS_TRUE;
+    }
+    JSString* str = js_NumberToString(cx, index);
+    if (!str)
+        return JS_FALSE;
+    return js_ValueToStringId(cx, STRING_TO_JSVAL(str), id);
+}
+
 #else
 
 #define JS_DEFINE_CALLINFO_1(linkage, rt, op, at0, cse, fold)
 #define JS_DEFINE_CALLINFO_2(linkage, rt, op, at0, at1, cse, fold)
 #define JS_DEFINE_CALLINFO_3(linkage, rt, op, at0, at1, at2, cse, fold)
 #define JS_DEFINE_CALLINFO_4(linkage, rt, op, at0, at1, at2, at3, cse, fold)
 #define JS_DEFINE_CALLINFO_5(linkage, rt, op, at0, at1, at2, at3, at4, cse, fold)
 #define JS_DECLARE_CALLINFO(name)
--- a/js/src/jscntxt.cpp
+++ b/js/src/jscntxt.cpp
@@ -59,16 +59,17 @@
 #include "jsgc.h"
 #include "jslock.h"
 #include "jsnum.h"
 #include "jsobj.h"
 #include "jsopcode.h"
 #include "jsscan.h"
 #include "jsscope.h"
 #include "jsscript.h"
+#include "jsstaticcheck.h"
 #include "jsstr.h"
 #include "jstracer.h"
 
 #ifdef JS_THREADSAFE
 #include "prtypes.h"
 
 /*
  * The index for JSThread info, returned by PR_NewThreadPrivateIndex.  The
@@ -289,16 +290,17 @@ js_NewContext(JSRuntime *rt, size_t stac
     /*
      * First we do the infallible, every-time per-context initializations.
      * Should a later, fallible initialization (js_InitRegExpStatics, e.g.,
      * or the stuff under 'if (first)' below) fail, at least the version
      * and arena-pools will be valid and safe to use (say, from the last GC
      * done by js_DestroyContext).
      */
     cx->version = JSVERSION_DEFAULT;
+    VOUCH_DOES_NOT_REQUIRE_STACK();
     JS_INIT_ARENA_POOL(&cx->stackPool, "stack", stackChunkSize, sizeof(jsval),
                        &cx->scriptStackQuota);
 
     JS_INIT_ARENA_POOL(&cx->tempPool, "temp",
                        1024,  /* FIXME: bug 421435 */
                        sizeof(jsdouble), &cx->scriptStackQuota);
 
     js_InitRegExpStatics(cx);
@@ -502,16 +504,17 @@ js_DestroyContext(JSContext *cx, JSDestr
         if (mode == JSDCM_FORCE_GC)
             js_GC(cx, GC_NORMAL);
         else if (mode == JSDCM_MAYBE_GC)
             JS_MaybeGC(cx);
     }
 
     /* Free the stuff hanging off of cx. */
     js_FreeRegExpStatics(cx);
+    VOUCH_DOES_NOT_REQUIRE_STACK();
     JS_FinishArenaPool(&cx->stackPool);
     JS_FinishArenaPool(&cx->tempPool);
 
     if (cx->lastMessage)
         free(cx->lastMessage);
 
     /* Remove any argument formatters. */
     map = cx->argumentFormatMap;
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -106,38 +106,44 @@ typedef Queue<uint16> SlotList;
 # define CLS(T)  T*
 #else
 # define CLS(T)  void*
 #endif
 
 #define FRAGMENT_TABLE_SIZE 512
 struct VMFragment;
 
+#define MONITOR_N_GLOBAL_STATES 4
+struct GlobalState {
+    uint32                  globalShape;
+    CLS(SlotList)           globalSlots;
+};
+
 /*
  * Trace monitor. Every JSThread (if JS_THREADSAFE) or JSRuntime (if not
  * JS_THREADSAFE) has an associated trace monitor that keeps track of loop
  * frequencies for all JavaScript code loaded into that runtime.
  */
 typedef struct JSTraceMonitor {
     /*
      * Flag set when running (or recording) JIT-compiled code. This prevents
      * both interpreter activation and last-ditch garbage collection when up
      * against our runtime's memory limits. This flag also suppresses calls to
      * JS_ReportOutOfMemory when failing due to runtime limits.
      */
     JSBool                  onTrace;
     CLS(nanojit::LirBuffer) lirbuf;
     CLS(nanojit::Fragmento) fragmento;
     CLS(TraceRecorder)      recorder;
-    uint32                  globalShape;
-    CLS(SlotList)           globalSlots;
     jsval                   *reservedDoublePool;
     jsval                   *reservedDoublePoolPtr;
 
+    struct GlobalState globalStates[MONITOR_N_GLOBAL_STATES];
     struct VMFragment* vmfragments[FRAGMENT_TABLE_SIZE];
+    JSBool needFlush;
 
     /*
      * reservedObjects is a linked list (via fslots[0]) of preallocated JSObjects.
      * The JIT uses this to ensure that leaving a trace tree can't fail.
      */
     JSObject                *reservedObjects;
     JSBool                  useReservedObjects;
 
@@ -252,16 +258,20 @@ typedef enum JSRuntimeState {
     JSRTS_LAUNCHING,
     JSRTS_UP,
     JSRTS_LANDING
 } JSRuntimeState;
 
 typedef enum JSBuiltinFunctionId {
     JSBUILTIN_ObjectToIterator,
     JSBUILTIN_CallIteratorNext,
+    JSBUILTIN_GetProperty,
+    JSBUILTIN_GetElement,
+    JSBUILTIN_SetProperty,
+    JSBUILTIN_SetElement,
     JSBUILTIN_LIMIT
 } JSBuiltinFunctionId;
 
 typedef struct JSPropertyTreeEntry {
     JSDHashEntryHdr     hdr;
     JSScopeProperty     *child;
 } JSPropertyTreeEntry;
 
@@ -874,16 +884,17 @@ struct JSContext {
 
     /* Quota on the size of arenas used to compile and execute scripts. */
     size_t              scriptStackQuota;
 
     /* Data shared by threads in an address space. */
     JSRuntime           *runtime;
 
     /* Stack arena pool and frame pointer register. */
+    JS_REQUIRES_STACK
     JSArenaPool         stackPool;
 
     JS_REQUIRES_STACK
     JSStackFrame        *fp;
 
     /* Temporary arena pool used while compiling and decompiling. */
     JSArenaPool         tempPool;
 
--- a/js/src/jsdbgapi.cpp
+++ b/js/src/jsdbgapi.cpp
@@ -669,17 +669,17 @@ js_watch_set(JSContext *cx, JSObject *ob
             DBG_LOCK(rt);
             return DropWatchPointAndUnlock(cx, wp, JSWP_HELD) && ok;
         }
     }
     DBG_UNLOCK(rt);
     return JS_TRUE;
 }
 
-JSBool
+JS_REQUIRES_STACK JSBool
 js_watch_set_wrapper(JSContext *cx, JSObject *obj, uintN argc, jsval *argv,
                      jsval *rval)
 {
     JSObject *funobj;
     JSFunction *wrapper;
     jsval userid;
 
     funobj = JSVAL_TO_OBJECT(argv[-2]);
--- a/js/src/jsdbgapi.h
+++ b/js/src/jsdbgapi.h
@@ -115,20 +115,20 @@ js_FindWatchPoint(JSRuntime *rt, JSScope
 
 /*
  * NB: callers outside of jsdbgapi.c must pass non-null scope.
  */
 extern JSPropertyOp
 js_GetWatchedSetter(JSRuntime *rt, JSScope *scope,
                     const JSScopeProperty *sprop);
 
-extern JSBool
+extern JS_REQUIRES_STACK JSBool
 js_watch_set(JSContext *cx, JSObject *obj, jsval id, jsval *vp);
 
-extern JSBool
+extern JS_REQUIRES_STACK JSBool
 js_watch_set_wrapper(JSContext *cx, JSObject *obj, uintN argc, jsval *argv,
                      jsval *rval);
 
 extern JSPropertyOp
 js_WrapWatchedSetter(JSContext *cx, jsid id, uintN attrs, JSPropertyOp setter);
 
 #endif /* JS_HAS_OBJ_WATCHPOINT */
 
--- a/js/src/jsfun.cpp
+++ b/js/src/jsfun.cpp
@@ -1582,17 +1582,17 @@ fun_toString(JSContext *cx, uintN argc, 
 #if JS_HAS_TOSOURCE
 static JSBool
 fun_toSource(JSContext *cx, uintN argc, jsval *vp)
 {
     return fun_toStringHelper(cx, JS_DONT_PRETTY_PRINT, argc, vp);
 }
 #endif
 
-JSBool
+JS_REQUIRES_STACK JSBool
 js_fun_call(JSContext *cx, uintN argc, jsval *vp)
 {
     JSObject *obj;
     jsval fval, *argv, *invokevp;
     JSString *str;
     void *mark;
     JSBool ok;
 
@@ -1641,17 +1641,17 @@ js_fun_call(JSContext *cx, uintN argc, j
     memcpy(invokevp + 2, argv, argc * sizeof *argv);
 
     ok = js_Invoke(cx, argc, invokevp, 0);
     *vp = *invokevp;
     js_FreeStack(cx, mark);
     return ok;
 }
 
-JSBool
+JS_REQUIRES_STACK JSBool
 js_fun_apply(JSContext *cx, uintN argc, jsval *vp)
 {
     JSObject *obj, *aobj;
     jsval fval, *invokevp, *sp;
     JSString *str;
     jsuint length;
     JSBool arraylike, ok;
     void *mark;
@@ -1732,17 +1732,17 @@ js_fun_apply(JSContext *cx, uintN argc, 
     ok = js_Invoke(cx, argc, invokevp, 0);
     *vp = *invokevp;
 out:
     js_FreeStack(cx, mark);
     return ok;
 }
 
 #ifdef NARCISSUS
-static JSBool
+static JS_REQUIRES_STACK JSBool
 fun_applyConstructor(JSContext *cx, uintN argc, jsval *vp)
 {
     JSObject *aobj;
     uintN length, i;
     void *mark;
     jsval *invokevp, *sp;
     JSBool ok;
 
--- a/js/src/jsfun.h
+++ b/js/src/jsfun.h
@@ -279,18 +279,18 @@ js_GetLocalNameArray(JSContext *cx, JSFu
     ((JSAtom *) ((nameWord) & ~(jsuword) 1))
 
 #define JS_LOCAL_NAME_IS_CONST(nameWord)                                      \
     ((((nameWord) & (jsuword) 1)) != 0)
 
 extern void
 js_FreezeLocalNames(JSContext *cx, JSFunction *fun);
 
-extern JSBool
+extern JS_REQUIRES_STACK JSBool
 js_fun_apply(JSContext *cx, uintN argc, jsval *vp);
 
-extern JSBool
+extern JS_REQUIRES_STACK JSBool
 js_fun_call(JSContext *cx, uintN argc, jsval *vp);
 
 
 JS_END_EXTERN_C
 
 #endif /* jsfun_h___ */
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -69,16 +69,17 @@
 #include "jsinterp.h"
 #include "jsiter.h"
 #include "jslock.h"
 #include "jsnum.h"
 #include "jsobj.h"
 #include "jsparse.h"
 #include "jsscope.h"
 #include "jsscript.h"
+#include "jsstaticcheck.h"
 #include "jsstr.h"
 #include "jstracer.h"
 
 #if JS_HAS_XML_SUPPORT
 #include "jsxml.h"
 #endif
 
 /*
@@ -2952,17 +2953,17 @@ TraceWeakRoots(JSTracer *trc, JSWeakRoot
         }
     }
 
     JS_CALL_VALUE_TRACER(trc, wr->lastAtom, "lastAtom");
     JS_SET_TRACING_NAME(trc, "lastInternalResult");
     js_CallValueTracerIfGCThing(trc, wr->lastInternalResult);
 }
 
-JS_FRIEND_API(void)
+JS_REQUIRES_STACK JS_FRIEND_API(void)
 js_TraceContext(JSTracer *trc, JSContext *acx)
 {
     JSStackFrame *fp, *nextChain;
     JSStackHeader *sh;
     JSTempValueRooter *tvr;
 
     if (IS_GC_MARKING_TRACER(trc)) {
 
@@ -3092,30 +3093,29 @@ js_TraceContext(JSTracer *trc, JSContext
 }
 
 void
 js_TraceTraceMonitor(JSTracer *trc, JSTraceMonitor *tm)
 {
     if (IS_GC_MARKING_TRACER(trc)) {
         tm->reservedDoublePoolPtr = tm->reservedDoublePool;
 
-        /* Make sure the global shape changes and will force a flush
-           of the code cache. */
-        tm->globalShape = -1; 
+        tm->needFlush = JS_TRUE;
+
         /* Keep the reserved objects. */
         for (JSObject *obj = tm->reservedObjects; obj; obj = JSVAL_TO_OBJECT(obj->fslots[0])) {
             uint8 *flagp = GetGCThingFlags(obj);
             JS_ASSERT((*flagp & GCF_TYPEMASK) == GCX_OBJECT);
             JS_ASSERT(*flagp != GCF_FINAL);
             *flagp |= GCF_MARK;
         }
     }
 }
 
-void
+JS_REQUIRES_STACK void
 js_TraceRuntime(JSTracer *trc, JSBool allAtoms)
 {
     JSRuntime *rt = trc->context->runtime;
     JSContext *iter, *acx;
 
     JS_DHashTableEnumerate(&rt->gcRootsHash, gc_root_traversal, trc);
     if (rt->gcLocksHash)
         JS_DHashTableEnumerate(rt->gcLocksHash, gc_lock_traversal, trc);
@@ -3452,16 +3452,17 @@ js_GC(JSContext *cx, JSGCInvocationKind 
         goto restart_at_beginning;
     }
 
     JS_UNLOCK_GC(rt);
 
 #ifdef JS_TRACER
     if (JS_ON_TRACE(cx))
         goto out;
+    VOUCH_HAVE_STACK();
 #endif
 
     /* Reset malloc counter. */
     rt->gcMallocBytes = 0;
 
 #ifdef JS_DUMP_SCOPE_METERS
   { extern void js_DumpScopeMeters(JSRuntime *rt);
     js_DumpScopeMeters(rt);
@@ -3774,17 +3775,18 @@ js_GC(JSContext *cx, JSGCInvocationKind 
 out:
 #endif
     JS_LOCK_GC(rt);
 
     /*
      * We want to restart GC if js_GC was called recursively or if any of the
      * finalizers called js_RemoveRoot or js_UnlockGCThingRT.
      */
-    if (rt->gcLevel > 1 || rt->gcPoke) {
+    if (!JS_ON_TRACE(cx) && (rt->gcLevel > 1 || rt->gcPoke)) {
+        VOUCH_HAVE_STACK();
         rt->gcLevel = 1;
         rt->gcPoke = JS_FALSE;
         JS_UNLOCK_GC(rt);
         goto restart;
     }
 
     if (rt->shapeGen >= SHAPE_OVERFLOW_BIT - 1) {
         /*
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -249,20 +249,20 @@ JS_STATIC_ASSERT(JSTRACE_STRING + 1 == J
  * type behind v.
  */
 extern void
 js_CallValueTracerIfGCThing(JSTracer *trc, jsval v);
 
 extern void
 js_TraceStackFrame(JSTracer *trc, JSStackFrame *fp);
 
-extern void
+extern JS_REQUIRES_STACK void
 js_TraceRuntime(JSTracer *trc, JSBool allAtoms);
 
-extern JS_FRIEND_API(void)
+extern JS_REQUIRES_STACK JS_FRIEND_API(void)
 js_TraceContext(JSTracer *trc, JSContext *acx);
 
 /*
  * Kinds of js_GC invocation.
  */
 typedef enum JSGCInvocationKind {
     /* Normal invocation. */
     GC_NORMAL           = 0,
--- a/js/src/jsinterp.cpp
+++ b/js/src/jsinterp.cpp
@@ -528,17 +528,17 @@ js_EnablePropertyCache(JSContext *cx)
     --JS_PROPERTY_CACHE(cx).disabled;
     JS_ASSERT(JS_PROPERTY_CACHE(cx).disabled >= 0);
 }
 
 /*
  * Check if the current arena has enough space to fit nslots after sp and, if
  * so, reserve the necessary space.
  */
-static JSBool
+static JS_REQUIRES_STACK JSBool
 AllocateAfterSP(JSContext *cx, jsval *sp, uintN nslots)
 {
     uintN surplus;
     jsval *sp2;
 
     JS_ASSERT((jsval *) cx->stackPool.current->base <= sp);
     JS_ASSERT(sp <= (jsval *) cx->stackPool.current->avail);
     surplus = (jsval *) cx->stackPool.current->avail - sp;
@@ -553,17 +553,17 @@ AllocateAfterSP(JSContext *cx, jsval *sp
         return JS_FALSE;
 
     JS_ARENA_ALLOCATE_CAST(sp2, jsval *, &cx->stackPool,
                            (nslots - surplus) * sizeof(jsval));
     JS_ASSERT(sp2 == sp + surplus);
     return JS_TRUE;
 }
 
-JS_STATIC_INTERPRET jsval *
+JS_STATIC_INTERPRET JS_REQUIRES_STACK jsval *
 js_AllocRawStack(JSContext *cx, uintN nslots, void **markp)
 {
     jsval *sp;
 
     if (!cx->stackPool.first.next) {
         int64 *timestamp;
 
         JS_ARENA_ALLOCATE_CAST(timestamp, int64 *,
@@ -578,23 +578,23 @@ js_AllocRawStack(JSContext *cx, uintN ns
     if (markp)
         *markp = JS_ARENA_MARK(&cx->stackPool);
     JS_ARENA_ALLOCATE_CAST(sp, jsval *, &cx->stackPool, nslots * sizeof(jsval));
     if (!sp)
         js_ReportOutOfScriptQuota(cx);
     return sp;
 }
 
-JS_STATIC_INTERPRET void
+JS_STATIC_INTERPRET JS_REQUIRES_STACK void
 js_FreeRawStack(JSContext *cx, void *mark)
 {
     JS_ARENA_RELEASE(&cx->stackPool, mark);
 }
 
-JS_FRIEND_API(jsval *)
+JS_REQUIRES_STACK JS_FRIEND_API(jsval *)
 js_AllocStack(JSContext *cx, uintN nslots, void **markp)
 {
     jsval *sp;
     JSArena *a;
     JSStackHeader *sh;
 
     /* Callers don't check for zero nslots: we do to avoid empty segments. */
     if (nslots == 0) {
@@ -630,17 +630,17 @@ js_AllocStack(JSContext *cx, uintN nslot
      * Store JSVAL_NULL using memset, to let compilers optimize as they see
      * fit, in case a caller allocates and pushes GC-things one by one, which
      * could nest a last-ditch GC that will scan this segment.
      */
     memset(sp, 0, nslots * sizeof(jsval));
     return sp;
 }
 
-JS_FRIEND_API(void)
+JS_REQUIRES_STACK JS_FRIEND_API(void)
 js_FreeStack(JSContext *cx, void *mark)
 {
     JSStackHeader *sh;
     jsuword slotdiff;
 
     /* Check for zero nslots allocation special case. */
     if (!mark)
         return;
@@ -983,17 +983,17 @@ js_OnUnknownMethod(JSContext *cx, jsval 
     }
     ok = JS_TRUE;
 
   out:
     JS_POP_TEMP_ROOT(cx, &tvr);
     return ok;
 }
 
-static JSBool
+static JS_REQUIRES_STACK JSBool
 NoSuchMethod(JSContext *cx, uintN argc, jsval *vp, uint32 flags)
 {
     jsval *invokevp;
     void *mark;
     JSBool ok;
     JSObject *obj, *argsobj;
 
     invokevp = js_AllocStack(cx, 2 + 2, &mark);
@@ -1044,17 +1044,17 @@ const uint16 js_PrimitiveTestFlags[] = {
 };
 
 /*
  * Find a function reference and its 'this' object implicit first parameter
  * under argc arguments on cx's stack, and call the function.  Push missing
  * required arguments, allocate declared local variables, and pop everything
  * when done.  Then push the return value.
  */
-JS_FRIEND_API(JSBool)
+JS_REQUIRES_STACK JS_FRIEND_API(JSBool)
 js_Invoke(JSContext *cx, uintN argc, jsval *vp, uintN flags)
 {
     void *mark;
     JSStackFrame frame;
     jsval *sp, *argv, *newvp;
     jsval v;
     JSObject *funobj, *parent;
     JSBool ok;
@@ -1260,17 +1260,17 @@ have_fun:
     frame.script = script;
     frame.callee = funobj;
     frame.fun = fun;
     frame.argc = argc;
     frame.argv = argv;
 
     /* Default return value for a constructor is the new object. */
     frame.rval = (flags & JSINVOKE_CONSTRUCT) ? vp[1] : JSVAL_VOID;
-    frame.down = js_GetTopStackFrame(cx);
+    frame.down = cx->fp;
     frame.annotation = NULL;
     frame.scopeChain = NULL;    /* set below for real, after cx->fp is set */
     frame.regs = NULL;
     frame.imacpc = NULL;
     frame.slots = NULL;
     frame.sharpDepth = 0;
     frame.sharpArray = NULL;
     frame.flags = flags | rootedArgsFlag;
@@ -1367,17 +1367,17 @@ out2:
     return ok;
 
 bad:
     js_ReportIsNotFunction(cx, vp, flags & JSINVOKE_FUNFLAGS);
     ok = JS_FALSE;
     goto out2;
 }
 
-JSBool
+JS_REQUIRES_STACK JSBool
 js_InternalInvoke(JSContext *cx, JSObject *obj, jsval fval, uintN flags,
                   uintN argc, jsval *argv, jsval *rval)
 {
     jsval *invokevp;
     void *mark;
     JSBool ok;
 
     invokevp = js_AllocStack(cx, 2 + argc, &mark);
@@ -1413,16 +1413,18 @@ js_InternalInvoke(JSContext *cx, JSObjec
 }
 
 JSBool
 js_InternalGetOrSet(JSContext *cx, JSObject *obj, jsid id, jsval fval,
                     JSAccessMode mode, uintN argc, jsval *argv, jsval *rval)
 {
     JSSecurityCallbacks *callbacks;
 
+    js_LeaveTrace(cx);
+
     /*
      * js_InternalInvoke could result in another try to get or set the same id
      * again, see bug 355497.
      */
     JS_CHECK_RECURSION(cx, return JS_FALSE);
 
     /*
      * Check general (not object-ops/class-specific) access from the running
@@ -1726,17 +1728,17 @@ js_StrictlyEqual(JSContext *cx, jsval lv
     if (JSVAL_IS_INT(lval) && rtag == JSVAL_DOUBLE) {
         ld = JSVAL_TO_INT(lval);
         rd = *JSVAL_TO_DOUBLE(rval);
         return JSDOUBLE_COMPARE(ld, ==, rd, JS_FALSE);
     }
     return lval == rval;
 }
 
-JSBool
+JS_REQUIRES_STACK JSBool
 js_InvokeConstructor(JSContext *cx, uintN argc, JSBool clampReturn, jsval *vp)
 {
     JSFunction *fun, *fun2;
     JSObject *obj, *obj2, *proto, *parent;
     jsval lval, rval;
     JSClass *clasp;
 
     fun = NULL;
@@ -2517,17 +2519,26 @@ js_Interpret(JSContext *cx)
                                 JS_EXTENSION_(goto *jumpTable[op]);           \
                             JS_END_MACRO
 # define DO_NEXT_OP(n)      JS_BEGIN_MACRO                                    \
                                 METER_OP_PAIR(op, regs.pc[n]);                \
                                 op = (JSOp) *(regs.pc += (n));                \
                                 DO_OP();                                      \
                             JS_END_MACRO
 
-# define BEGIN_CASE(OP)     L_##OP: CHECK_RECORDER();
+# ifdef DEBUG
+#  define TRACE_OPCODE(OP)  JS_BEGIN_MACRO                                    \
+                                if (cx->tracefp)                              \
+                                    js_TraceOpcode(cx, len);                  \
+                            JS_END_MACRO
+# else
+#  define TRACE_OPCODE(OP)  (void)0
+# endif
+
+# define BEGIN_CASE(OP)     L_##OP: TRACE_OPCODE(OP); CHECK_RECORDER();
 # define END_CASE(OP)       DO_NEXT_OP(OP##_LENGTH);
 # define END_VARLEN_CASE    DO_NEXT_OP(len);
 # define ADD_EMPTY_CASE(OP) BEGIN_CASE(OP)                                    \
                                 JS_ASSERT(js_CodeSpec[OP].length == 1);       \
                                 op = (JSOp) *++regs.pc;                       \
                                 DO_OP();
 
 # define END_EMPTY_CASES
--- a/js/src/jsinterp.h
+++ b/js/src/jsinterp.h
@@ -363,20 +363,20 @@ extern void
 js_DisablePropertyCache(JSContext *cx);
 
 extern void
 js_EnablePropertyCache(JSContext *cx);
 
 /*
  * Interpreter stack arena-pool alloc and free functions.
  */
-extern JS_FRIEND_API(jsval *)
+extern JS_REQUIRES_STACK JS_FRIEND_API(jsval *)
 js_AllocStack(JSContext *cx, uintN nslots, void **markp);
 
-extern JS_FRIEND_API(void)
+extern JS_REQUIRES_STACK JS_FRIEND_API(void)
 js_FreeStack(JSContext *cx, void *mark);
 
 /*
  * Refresh and return fp->scopeChain.  It may be stale if block scopes are
  * active but not yet reflected by objects in the scope chain.  If a block
  * scope contains a with, eval, XML filtering predicate, or similar such
  * dynamically scoped construct, then compile-time block scope at fp->blocks
  * must reflect at runtime.
@@ -417,17 +417,17 @@ extern const uint16 js_PrimitiveTestFlag
  * NB: js_Invoke requires that cx is currently running JS (i.e., that cx->fp
  * is non-null), and that vp points to the callee, |this| parameter, and
  * actual arguments of the call. [vp .. vp + 2 + argc) must belong to the last
  * JS stack segment that js_AllocStack allocated. The function may use the
  * space available after vp + 2 + argc in the stack segment for temporaries,
  * so the caller should not use that space for values that must be preserved
  * across the call.
  */
-extern JS_FRIEND_API(JSBool)
+extern JS_REQUIRES_STACK JS_FRIEND_API(JSBool)
 js_Invoke(JSContext *cx, uintN argc, jsval *vp, uintN flags);
 
 /*
  * Consolidated js_Invoke flags simply rename certain JSFRAME_* flags, so that
  * we can share bits stored in JSStackFrame.flags and passed to:
  *
  *   js_Invoke
  *   js_InternalInvoke
@@ -463,17 +463,17 @@ js_InternalInvoke(JSContext *cx, JSObjec
 extern JSBool
 js_InternalGetOrSet(JSContext *cx, JSObject *obj, jsid id, jsval fval,
                     JSAccessMode mode, uintN argc, jsval *argv, jsval *rval);
 
 extern JSBool
 js_Execute(JSContext *cx, JSObject *chain, JSScript *script,
            JSStackFrame *down, uintN flags, jsval *result);
 
-extern JSBool
+extern JS_REQUIRES_STACK JSBool
 js_InvokeConstructor(JSContext *cx, uintN argc, JSBool clampReturn, jsval *vp);
 
 extern JS_REQUIRES_STACK JSBool
 js_Interpret(JSContext *cx);
 
 #define JSPROP_INITIALIZER 0x100   /* NB: Not a valid property attribute. */
 
 extern JSBool
@@ -502,20 +502,20 @@ js_StrictlyEqual(JSContext *cx, jsval lv
 # endif
 #endif
 
 #if !JS_LONE_INTERPRET
 # define JS_STATIC_INTERPRET    static
 #else
 # define JS_STATIC_INTERPRET
 
-extern jsval *
+extern JS_REQUIRES_STACK jsval *
 js_AllocRawStack(JSContext *cx, uintN nslots, void **markp);
 
-extern void
+extern JS_REQUIRES_STACK void
 js_FreeRawStack(JSContext *cx, void *mark);
 
 /*
  * ECMA requires "the global object", but in embeddings such as the browser,
  * which have multiple top-level objects (windows, frames, etc. in the DOM),
  * we prefer fun's parent.  An example that causes this code to run:
  *
  *   // in window w1
--- a/js/src/jsiter.cpp
+++ b/js/src/jsiter.cpp
@@ -59,28 +59,30 @@
 #include "jsiter.h"
 #include "jslock.h"
 #include "jsnum.h"
 #include "jsobj.h"
 #include "jsopcode.h"
 #include "jsscan.h"
 #include "jsscope.h"
 #include "jsscript.h"
+#include "jsstaticcheck.h"
+#include "jstracer.h"
 
 #if JS_HAS_XML_SUPPORT
 #include "jsxml.h"
 #endif
 
 #if JSSLOT_ITER_FLAGS >= JS_INITIAL_NSLOTS
 #error JS_INITIAL_NSLOTS must be greater than JSSLOT_ITER_FLAGS.
 #endif
 
 #if JS_HAS_GENERATORS
 
-static JSBool
+static JS_REQUIRES_STACK JSBool
 CloseGenerator(JSContext *cx, JSObject *genobj);
 
 #endif
 
 /*
  * Shared code to close iterator's state either through an explicit call or
  * when GC detects that the iterator is no longer reachable.
  */
@@ -402,16 +404,17 @@ js_ValueToIterator(JSContext *cx, uintN 
                 goto bad;
 
             /* Store in *vp to protect it from GC (callers must root vp). */
             *vp = OBJECT_TO_JSVAL(iterobj);
 
             if (!InitNativeIterator(cx, iterobj, obj, flags))
                 goto bad;
         } else {
+            js_LeaveTrace(cx);
             arg = BOOLEAN_TO_JSVAL((flags & JSITER_FOREACH) == 0);
             if (!js_InternalInvoke(cx, obj, *vp, JSINVOKE_ITERATOR, 1, &arg,
                                    vp)) {
                 goto bad;
             }
             if (JSVAL_IS_PRIMITIVE(*vp)) {
                 const char *printable = js_AtomToPrintableString(cx, atom);
                 if (printable) {
@@ -444,16 +447,17 @@ js_CloseIterator(JSContext *cx, jsval v)
     obj = JSVAL_TO_OBJECT(v);
     clasp = OBJ_GET_CLASS(cx, obj);
 
     if (clasp == &js_IteratorClass) {
         js_CloseNativeIterator(cx, obj);
     }
 #if JS_HAS_GENERATORS
     else if (clasp == &js_GeneratorClass) {
+        JS_ASSERT_NOT_ON_TRACE(cx);
         if (!CloseGenerator(cx, obj))
             return JS_FALSE;
     }
 #endif
     return JS_TRUE;
 }
 
 static JSBool
@@ -809,17 +813,17 @@ typedef enum JSGeneratorOp {
     JSGENOP_THROW,
     JSGENOP_CLOSE
 } JSGeneratorOp;
 
 /*
  * Start newborn or restart yielding generator and perform the requested
  * operation inside its frame.
  */
-static JSBool
+static JS_REQUIRES_STACK JSBool
 SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj,
                 JSGenerator *gen, jsval arg)
 {
     JSStackFrame *fp;
     JSArena *arena;
     JSBool ok;
 
     if (gen->state == JSGEN_RUNNING || gen->state == JSGEN_CLOSING) {
@@ -899,17 +903,17 @@ SendToGenerator(JSContext *cx, JSGenerat
 
     /*
      * An error, silent termination by operation callback or an exception.
      * Propagate the condition to the caller.
      */
     return JS_FALSE;
 }
 
-static JSBool
+static JS_REQUIRES_STACK JSBool
 CloseGenerator(JSContext *cx, JSObject *obj)
 {
     JSGenerator *gen;
 
     JS_ASSERT(STOBJ_GET_CLASS(obj) == &js_GeneratorClass);
     gen = (JSGenerator *) JS_GetPrivate(cx, obj);
     if (!gen) {
         /* Generator prototype object. */
@@ -927,16 +931,18 @@ CloseGenerator(JSContext *cx, JSObject *
  */
 static JSBool
 generator_op(JSContext *cx, JSGeneratorOp op, jsval *vp, uintN argc)
 {
     JSObject *obj;
     JSGenerator *gen;
     jsval arg;
 
+    js_LeaveTrace(cx);
+
     obj = JS_THIS_OBJECT(cx, vp);
     if (!JS_InstanceOf(cx, obj, &js_GeneratorClass, vp + 2))
         return JS_FALSE;
 
     gen = (JSGenerator *) JS_GetPrivate(cx, obj);
     if (gen == NULL) {
         /* This happens when obj is the generator prototype. See bug 352885. */
         goto closed_generator;
@@ -971,16 +977,18 @@ generator_op(JSContext *cx, JSGeneratorO
             JS_SetPendingException(cx, argc >= 1 ? vp[2] : JSVAL_VOID);
             return JS_FALSE;
           default:
             JS_ASSERT(op == JSGENOP_CLOSE);
             return JS_TRUE;
         }
     }
 
+    js_LeaveTrace(cx);
+
     arg = ((op == JSGENOP_SEND || op == JSGENOP_THROW) && argc != 0)
           ? vp[2]
           : JSVAL_VOID;
     if (!SendToGenerator(cx, op, obj, gen, arg))
         return JS_FALSE;
     *vp = gen->frame.rval;
     return JS_TRUE;
 }
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -64,19 +64,20 @@
 #include "jsinterp.h"
 #include "jslock.h"
 #include "jsnum.h"
 #include "jsobj.h"
 #include "jsopcode.h"
 #include "jsparse.h"
 #include "jsscope.h"
 #include "jsscript.h"
+#include "jsstaticcheck.h"
 #include "jsstr.h"
+#include "jstracer.h"
 #include "jsdbgapi.h"   /* whether or not JS_HAS_OBJ_WATCHPOINT */
-#include "jsstaticcheck.h"
 
 #if JS_HAS_GENERATORS
 #include "jsiter.h"
 #endif
 
 #if JS_HAS_XML_SUPPORT
 #include "jsxml.h"
 #endif
@@ -3978,17 +3979,17 @@ js_GetPropertyHelper(JSContext *cx, JSOb
         return OBJ_GET_PROPERTY(cx, obj2, id, vp);
     }
 
     sprop = (JSScopeProperty *) prop;
     if (!js_NativeGet(cx, obj, obj2, sprop, vp))
         return JS_FALSE;
 
     if (entryp) {
-        JS_ASSERT_NOT_EXECUTING_TRACE(cx);
+        JS_ASSERT_NOT_ON_TRACE(cx);
         js_FillPropertyCache(cx, obj, shape, 0, protoIndex, obj2, sprop, entryp);
     }
     JS_UNLOCK_OBJ(cx, obj2);
     return JS_TRUE;
 }
 
 JSBool
 js_GetProperty(JSContext *cx, JSObject *obj, jsid id, jsval *vp)
@@ -4184,17 +4185,17 @@ js_SetPropertyHelper(JSContext *cx, JSOb
                             JS_UNLOCK_SCOPE(cx, scope);
                             return JS_FALSE);
     }
 
     if (!js_NativeSet(cx, obj, sprop, vp))
         return JS_FALSE;
 
     if (entryp) {
-        JS_ASSERT_NOT_EXECUTING_TRACE(cx);
+        JS_ASSERT_NOT_ON_TRACE(cx);
         if (!(attrs & JSPROP_SHARED))
             js_FillPropertyCache(cx, obj, shape, 0, 0, obj, sprop, entryp);
         else
             PCMETER(JS_PROPERTY_CACHE(cx).nofills++);
     }
     JS_UNLOCK_SCOPE(cx, scope);
     return JS_TRUE;
 
@@ -5125,18 +5126,19 @@ js_TryMethod(JSContext *cx, JSObject *ob
 #endif
     {
         ok = OBJ_GET_PROPERTY(cx, obj, id, &fval);
     }
     if (!ok)
         JS_ClearPendingException(cx);
     JS_SetErrorReporter(cx, older);
 
-    return JSVAL_IS_PRIMITIVE(fval) ||
-           js_InternalCall(cx, obj, fval, argc, argv, rval);
+    if (JSVAL_IS_PRIMITIVE(fval))
+        return JS_TRUE;
+    return js_InternalCall(cx, obj, fval, argc, argv, rval);
 }
 
 #if JS_HAS_XDR
 
 JSBool
 js_XDRObject(JSXDRState *xdr, JSObject **objp)
 {
     JSContext *cx;
--- a/js/src/jsopcode.cpp
+++ b/js/src/jsopcode.cpp
@@ -4105,16 +4105,17 @@ Decompile(SprintStack *ss, jsbytecode *p
                             table[j].order = j;
                             j++;
                         }
                         pc2 += jmplen;
                     }
                     tmp = (TableEntry *)
                           JS_malloc(cx, (size_t)j * sizeof *table);
                     if (tmp) {
+                        VOUCH_DOES_NOT_REQUIRE_STACK();
                         ok = js_MergeSort(table, (size_t)j, sizeof(TableEntry),
                                           CompareOffsets, NULL, tmp);
                         JS_free(cx, tmp);
                     } else {
                         ok = JS_FALSE;
                     }
                 }
 
--- a/js/src/jsopcode.tbl
+++ b/js/src/jsopcode.tbl
@@ -484,18 +484,17 @@ OPDEF(JSOP_TYPEOFEXPR,    198,"typeofexp
  * Block-local scope support.
  */
 OPDEF(JSOP_ENTERBLOCK,    199,"enterblock",  NULL,    3,  0, -1,  0,  JOF_OBJECT)
 OPDEF(JSOP_LEAVEBLOCK,    200,"leaveblock",  NULL,    3, -1,  0,  0,  JOF_UINT16)
 
 /*
  * Pick an element from the stack.
  */
-OPDEF(JSOP_PICK,          201,"pick",        NULL,    2,  1,  0,  0,  JOF_UINT8)
-
+OPDEF(JSOP_PICK,          201,"pick",        NULL,    2,  0,  0,  0,  JOF_UINT8)
 
 /* Throws a TypeError if the value at the top of the stack is not primitive. */
 OPDEF(JSOP_PRIMTOP,       202, "primtop",    NULL,    1,  1,  1,  0,  JOF_BYTE)
 
 OPDEF(JSOP_UNUSED203,     203,"unused203",   NULL,    1,  0,  0,  0,  JOF_BYTE)
 OPDEF(JSOP_UNUSED204,     204,"unused204",   NULL,    1,  0,  0,  0,  JOF_BYTE)
 OPDEF(JSOP_UNUSED205,     205,"unused205",   NULL,    1,  0,  0,  0,  JOF_BYTE)
 OPDEF(JSOP_UNUSED206,     206,"unused206",   NULL,    1,  0,  0,  0,  JOF_BYTE)
--- a/js/src/jsscan.cpp
+++ b/js/src/jsscan.cpp
@@ -1797,25 +1797,31 @@ skipline:
                     continue;
             }
             UngetChar(ts, c);
             ts->cursor = (ts->cursor - 1) & NTOKENS_MASK;
             goto retry;
         }
 
         if (MatchChar(ts, '*')) {
+            uintN lineno = ts->lineno;
             while ((c = GetChar(ts)) != EOF &&
                    !(c == '*' && MatchChar(ts, '/'))) {
                 /* Ignore all characters until comment close. */
             }
             if (c == EOF) {
                 js_ReportCompileErrorNumber(cx, ts, NULL, JSREPORT_ERROR,
                                             JSMSG_UNTERMINATED_COMMENT);
                 goto error;
             }
+            if ((ts->flags & TSF_NEWLINES) && lineno != ts->lineno) {
+                ts->flags &= ~TSF_DIRTYLINE;
+                tt = TOK_EOL;
+                goto eol_out;
+            }
             ts->cursor = (ts->cursor - 1) & NTOKENS_MASK;
             goto retry;
         }
 
         if (ts->flags & TSF_OPERAND) {
             uintN flags;
             JSBool inCharClass = JS_FALSE;
 
--- a/js/src/jsstaticcheck.h
+++ b/js/src/jsstaticcheck.h
@@ -48,21 +48,22 @@ inline __attribute__ ((unused)) void MUS
 }
 
 /* avoid unused goto-label warnings */
 #define MUST_FLOW_LABEL(label) goto label; label:
 
 inline JS_FORCES_STACK void VOUCH_DOES_NOT_REQUIRE_STACK() {}
 
 inline JS_FORCES_STACK void
-JS_ASSERT_NOT_EXECUTING_TRACE(JSContext *cx)
+JS_ASSERT_NOT_ON_TRACE(JSContext *cx)
 {
     JS_ASSERT(!JS_ON_TRACE(cx));
 }
 
 #else
 #define MUST_FLOW_THROUGH(label)            ((void) 0)
 #define MUST_FLOW_LABEL(label)
 #define VOUCH_DOES_NOT_REQUIRE_STACK()      ((void) 0)
-#define JS_ASSERT_NOT_EXECUTING_TRACE(cx)   JS_ASSERT(!JS_ON_TRACE(cx))
+#define JS_ASSERT_NOT_ON_TRACE(cx)          JS_ASSERT(!JS_ON_TRACE(cx))
 #endif
+#define VOUCH_HAVE_STACK                    VOUCH_DOES_NOT_REQUIRE_STACK
 
 #endif /* jsstaticcheck_h___ */
--- a/js/src/jsstr.cpp
+++ b/js/src/jsstr.cpp
@@ -65,16 +65,17 @@
 #include "jsgc.h"
 #include "jsinterp.h"
 #include "jslock.h"
 #include "jsnum.h"
 #include "jsobj.h"
 #include "jsopcode.h"
 #include "jsregexp.h"
 #include "jsscope.h"
+#include "jsstaticcheck.h"
 #include "jsstr.h"
 #include "jsbit.h"
 
 #define JSSTRDEP_RECURSION_LIMIT        100
 
 size_t
 js_MinimizeDependentStrings(JSString *str, int level, JSString **basep)
 {
@@ -1575,16 +1576,18 @@ find_replen(JSContext *cx, ReplaceData *
 
     lambda = rdata->lambda;
     if (lambda) {
         uintN argc, i, j, m, n, p;
         jsval *invokevp, *sp;
         void *mark;
         JSBool ok;
 
+        JS_ASSERT_NOT_ON_TRACE(cx);
+
         /*
          * Save the regExpStatics from the current regexp, since they may be
          * clobbered by a RegExp usage in the lambda function.  Note that all
          * members of JSRegExpStatics are JSSubStrings, so not GC roots, save
          * input, which is rooted otherwise via vp[1] in str_replace.
          */
         JSRegExpStatics save = cx->regExpStatics;
         JSBool freeMoreParens = JS_FALSE;
--- a/js/src/jstracer.cpp
+++ b/js/src/jstracer.cpp
@@ -218,17 +218,17 @@ using namespace avmplus;
 using namespace nanojit;
 
 static GC gc = GC();
 static avmplus::AvmCore s_core = avmplus::AvmCore();
 static avmplus::AvmCore* core = &s_core;
 
 #ifdef JS_JIT_SPEW
 void
-js_DumpPeerStability(JSTraceMonitor* tm, const void* ip);
+js_DumpPeerStability(JSTraceMonitor* tm, const void* ip, uint32 globalShape);
 #endif
 
 /* We really need a better way to configure the JIT. Shaver, where is my fancy JIT object? */
 static bool nesting_enabled = true;
 #if defined(NANOJIT_IA32)
 static bool did_we_check_sse2 = false;
 #endif
 
@@ -238,17 +238,17 @@ bool js_verboseDebug = getenv("TRACEMONK
 
 /* The entire VM shares one oracle. Collisions and concurrent updates are tolerated and worst
    case cause performance regressions. */
 static Oracle oracle;
 
 /* Blacklists the root peer fragment at a fragment's PC.  This is so blacklisting stays at the 
    top of the peer list and not scattered around. */
 void
-js_BlacklistPC(JSTraceMonitor* tm, Fragment* frag);
+js_BlacklistPC(JSTraceMonitor* tm, Fragment* frag, uint32 globalShape);
 
 Tracker::Tracker()
 {
     pagelist = 0;
 }
 
 Tracker::~Tracker()
 {
@@ -535,27 +535,24 @@ getVMFragment(JSTraceMonitor* tm, const 
     while (vf && 
            ! (vf->globalShape == globalShape &&
               vf->ip == ip)) {
         vf = vf->next;
     }
     return vf;
 }
 
-// FIXME: remove the default parameters for globalShape when we're
-// actually keying by it.
-
 static Fragment*
-getLoop(JSTraceMonitor* tm, const void *ip, uint32 globalShape = 0)
+getLoop(JSTraceMonitor* tm, const void *ip, uint32 globalShape)
 {
     return getVMFragment(tm, ip, globalShape);
 }
 
 static Fragment*
-getAnchor(JSTraceMonitor* tm, const void *ip, uint32 globalShape = 0)
+getAnchor(JSTraceMonitor* tm, const void *ip, uint32 globalShape)
 {
     LirBufWriter writer(tm->lirbuf);
     char *fragmem = (char*) writer.skip(sizeof(VMFragment))->payload();
     if (!fragmem)
         return NULL;
     VMFragment *f = new (fragmem) VMFragment(ip, globalShape);
     JS_ASSERT(f);
 
@@ -1222,20 +1219,20 @@ TraceRecorder::TraceRecorder(JSContext* 
     lirbuf->rp = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, rp)), "rp");
     cx_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, cx)), "cx");
     gp_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, gp)), "gp");
     eos_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eos)), "eos");
     eor_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eor)), "eor");
     globalObj_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, globalObj)), "globalObj");
 
     /* If we came from exit, we might not have enough global types. */
-    if (JS_TRACE_MONITOR(cx).globalSlots->length() > ti->globalSlots()) {
+    if (ti->globalSlots->length() > ti->nGlobalTypes()) {
         ti->typeMap.captureMissingGlobalTypes(cx,
-                                              *JS_TRACE_MONITOR(cx).globalSlots,
-                                              ti->stackSlots);
+                                              *(ti->globalSlots),
+                                              ti->nStackTypes);
     }
 
     /* read into registers all values on the stack and all globals we know so far */
     import(treeInfo, lirbuf->sp, stackSlots, ngslots, callDepth, typeMap);
 
     if (fragment == fragment->root) {
         LIns* counter = lir->insLoadi(cx_ins,
                                       offsetof(JSContext, operationCount));
@@ -1773,28 +1770,28 @@ TraceRecorder::import(TreeInfo* treeInfo
        a different trace of the tree might have had a guard with a different type map for
        these slots we just filled in here (the guard we continue from didn't know about them),
        since we didn't take that particular guard the only way we could have ended up here
        is if that other trace had at its end a compatible type distribution with the entry
        map. Since thats exactly what we used to fill in the types our current side exit
        didn't provide, this is always safe to do. */
 
     uint8* globalTypeMap = typeMap + stackSlots;
-    unsigned length = treeInfo->globalSlots();
+    unsigned length = treeInfo->nGlobalTypes();
 
     /* This is potentially the typemap of the side exit and thus shorter than the tree's
        global type map. */
     if (ngslots < length)
         mergeTypeMaps(&globalTypeMap/*out param*/, &ngslots/*out param*/,
                       treeInfo->globalTypeMap(), length,
                       (uint8*)alloca(sizeof(uint8) * length));
-    JS_ASSERT(ngslots == treeInfo->globalSlots());
+    JS_ASSERT(ngslots == treeInfo->nGlobalTypes());
 
     /* the first time we compile a tree this will be empty as we add entries lazily */
-    uint16* gslots = traceMonitor->globalSlots->data();
+    uint16* gslots = treeInfo->globalSlots->data();
     uint8* m = globalTypeMap;
     FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
         import(gp_ins, nativeGlobalOffset(vp), vp, *m, vpname, vpnum, NULL);
         m++;
     );
     ptrdiff_t offset = -treeInfo->nativeStackBase;
     m = typeMap;
     FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
@@ -1807,22 +1804,19 @@ TraceRecorder::import(TreeInfo* treeInfo
 JS_REQUIRES_STACK bool
 TraceRecorder::lazilyImportGlobalSlot(unsigned slot)
 {
     if (slot != uint16(slot)) /* we use a table of 16-bit ints, bail out if that's not enough */
         return false;
     jsval* vp = &STOBJ_GET_SLOT(globalObj, slot);
     if (known(vp))
         return true; /* we already have it */
-    unsigned index = traceMonitor->globalSlots->length();
-    /* If this the first global we are adding, remember the shape of the global object. */
-    if (index == 0)
-        traceMonitor->globalShape = OBJ_SHAPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain));
+    unsigned index = treeInfo->globalSlots->length();
     /* Add the slot to the list of interned global slots. */
-    traceMonitor->globalSlots->add(slot);
+    treeInfo->globalSlots->add(slot);
     uint8 type = getCoercedType(*vp);
     if ((type == JSVAL_INT) && oracle.isGlobalSlotUndemotable(cx, slot))
         type = JSVAL_DOUBLE;
     treeInfo->typeMap.add(type);
     import(gp_ins, slot*sizeof(double), vp, type, "global", index, NULL);
     return true;
 }
 
@@ -1979,37 +1973,36 @@ js_IsLoopEdge(jsbytecode* pc, jsbytecode
     return false;
 }
 
 /* Promote slots if necessary to match the called tree' type map and report error if thats
    impossible. */
 JS_REQUIRES_STACK bool
 TraceRecorder::adjustCallerTypes(Fragment* f)
 {
-    JSTraceMonitor* tm = traceMonitor;
-    uint16* gslots = tm->globalSlots->data();
-    unsigned ngslots = tm->globalSlots->length();
-    JS_ASSERT(ngslots == treeInfo->globalSlots());
+    uint16* gslots = treeInfo->globalSlots->data();
+    unsigned ngslots = treeInfo->globalSlots->length();
+    JS_ASSERT(ngslots == treeInfo->nGlobalTypes());
     TreeInfo* ti = (TreeInfo*)f->vmprivate;
     bool ok = true;
     uint8* map = ti->globalTypeMap();
     uint8* m = map;
     FORALL_GLOBAL_SLOTS(cx, ngslots, gslots, 
         LIns* i = get(vp);
         bool isPromote = isPromoteInt(i);
         if (isPromote && *m == JSVAL_DOUBLE) 
             lir->insStorei(get(vp), gp_ins, nativeGlobalOffset(vp));
         else if (!isPromote && *m == JSVAL_INT) {
             debug_only_v(printf("adjusting will fail, %s%d, slot %d\n", vpname, vpnum, m - map);)
             oracle.markGlobalSlotUndemotable(cx, gslots[n]);
             ok = false;
         }
         ++m;
     );
-    JS_ASSERT(unsigned(m - map) == ti->globalSlots());
+    JS_ASSERT(unsigned(m - map) == ti->nGlobalTypes());
     map = ti->stackTypeMap();
     m = map;
     FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,
         LIns* i = get(vp);
         bool isPromote = isPromoteInt(i);
         if (isPromote && *m == JSVAL_DOUBLE) {
             lir->insStorei(get(vp), lirbuf->sp, 
                            -treeInfo->nativeStackBase + nativeStackOffset(vp));
@@ -2020,17 +2013,17 @@ TraceRecorder::adjustCallerTypes(Fragmen
             ok = false;
             oracle.markStackSlotUndemotable(cx, unsigned(m - map));
         } else if (JSVAL_IS_INT(*vp) && *m == JSVAL_DOUBLE) {
             /* Aggressively undo speculation so the inner tree will compile if this fails. */
             oracle.markStackSlotUndemotable(cx, unsigned(m - map));
         }
         ++m;
     );
-    JS_ASSERT(unsigned(m - map) == ti->stackSlots);
+    JS_ASSERT(unsigned(m - map) == ti->nStackTypes);
     JS_ASSERT(f == f->root);
     return ok;
 }
 
 JS_REQUIRES_STACK uint8
 TraceRecorder::determineSlotType(jsval* vp)
 {
     uint8 m;
@@ -2115,25 +2108,25 @@ TraceRecorder::snapshot(ExitType exitTyp
     /* Generate the entry map for the (possibly advanced) pc and stash it in the trace. */
     unsigned stackSlots = js_NativeStackSlots(cx, callDepth);
 
     /* It's sufficient to track the native stack use here since all stores above the
        stack watermark defined by guards are killed. */
     trackNativeStackUse(stackSlots + 1);
 
     /* Capture the type map into a temporary location. */
-    unsigned ngslots = traceMonitor->globalSlots->length();
+    unsigned ngslots = treeInfo->globalSlots->length();
     unsigned typemap_size = (stackSlots + ngslots) * sizeof(uint8);
     uint8* typemap = (uint8*)alloca(typemap_size);
     uint8* m = typemap;
 
     /* Determine the type of a store by looking at the current type of the actual value the
        interpreter is using. For numbers we have to check what kind of store we used last
        (integer or double) to figure out what the side exit show reflect in its typemap. */
-    FORALL_SLOTS(cx, ngslots, traceMonitor->globalSlots->data(), callDepth,
+    FORALL_SLOTS(cx, ngslots, treeInfo->globalSlots->data(), callDepth,
         *m++ = determineSlotType(vp);
     );
     JS_ASSERT(unsigned(m - typemap) == ngslots + stackSlots);
 
     /* If we are capturing the stack state on a specific instruction, the value on
        the top of the stack is a boxed value. */
     if (resumeAfter) {
         typemap[stackSlots - 1] = JSVAL_BOXED;
@@ -2319,19 +2312,19 @@ TraceRecorder::checkType(jsval& v, uint8
  * @param demote            True if stability was achieved through demotion.
  * @return                  True if type stable, false otherwise.
  */
 JS_REQUIRES_STACK bool
 TraceRecorder::deduceTypeStability(Fragment* root_peer, Fragment** stable_peer, bool& demote)
 {
     uint8* m;
     uint8* typemap;
-    unsigned ngslots = traceMonitor->globalSlots->length();
-    uint16* gslots = traceMonitor->globalSlots->data();
-    JS_ASSERT(ngslots == treeInfo->globalSlots());
+    unsigned ngslots = treeInfo->globalSlots->length();
+    uint16* gslots = treeInfo->globalSlots->data();
+    JS_ASSERT(ngslots == treeInfo->nGlobalTypes());
 
     if (stable_peer)
         *stable_peer = NULL;
 
     /*
      * Rather than calculate all of this stuff twice, it gets cached locally.  The "stage" buffers 
      * are for calls to set() that will change the exit types.
      */
@@ -2395,24 +2388,25 @@ checktype_fail_1:
     Fragment* f;
     TreeInfo* ti;
     for (f = root_peer; f != NULL; f = f->peer) {
         debug_only_v(printf("Checking type stability against peer=%p (code=%p)\n", f, f->code());)
         if (!f->code())
             continue;
         ti = (TreeInfo*)f->vmprivate;
         /* Don't allow varying stack depths */
-        if ((ti->stackSlots != treeInfo->stackSlots) ||
-            (ti->typeMap.length() != treeInfo->typeMap.length()))
+        if ((ti->nStackTypes != treeInfo->nStackTypes) ||
+            (ti->typeMap.length() != treeInfo->typeMap.length()) || 
+            (ti->globalSlots->length() != treeInfo->globalSlots->length()))
             continue;
         stage_count = 0;
         success = false;
 
         m = ti->globalTypeMap();
-        FORALL_GLOBAL_SLOTS(cx, traceMonitor->globalSlots->length(), traceMonitor->globalSlots->data(),
+        FORALL_GLOBAL_SLOTS(cx, treeInfo->globalSlots->length(), treeInfo->globalSlots->data(),
                 if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count))
                     goto checktype_fail_2;
                 ++m;
             );
         
         m = ti->stackTypeMap();
         FORALL_SLOTS_IN_PENDING_FRAMES(cx, 0,
                 if (!checkType(*vp, *m, stage_vals[stage_count], stage_ins[stage_count], stage_count))
@@ -2439,17 +2433,17 @@ checktype_fail_2:
 
     /*
      * If this is a loop trace and it would be stable with demotions, build an undemote list 
      * and return true.  Our caller should sniff this and trash the tree, recording a new one 
      * that will assumedly stabilize.
      */
     if (demote && fragment->kind == LoopTrace) {
         typemap = m = treeInfo->globalTypeMap();
-        FORALL_GLOBAL_SLOTS(cx, traceMonitor->globalSlots->length(), traceMonitor->globalSlots->data(),
+        FORALL_GLOBAL_SLOTS(cx, treeInfo->globalSlots->length(), treeInfo->globalSlots->data(),
             if (*m == JSVAL_INT) {
                 JS_ASSERT(isNumber(*vp));
                 if (!isPromoteInt(get(vp)))
                     oracle.markGlobalSlotUndemotable(cx, gslots[n]);
             } else if (*m == JSVAL_DOUBLE) {
                 JS_ASSERT(isNumber(*vp));
                 oracle.markGlobalSlotUndemotable(cx, gslots[n]);
             } else {
@@ -2489,29 +2483,29 @@ TraceRecorder::isLoopHeader(JSContext* c
 
 /* Compile the current fragment. */
 JS_REQUIRES_STACK void
 TraceRecorder::compile(JSTraceMonitor* tm)
 {
     Fragmento* fragmento = tm->fragmento;
     if (treeInfo->maxNativeStackSlots >= MAX_NATIVE_STACK_SLOTS) {
         debug_only_v(printf("Trace rejected: excessive stack use.\n"));
-        js_BlacklistPC(tm, fragment);
+        js_BlacklistPC(tm, fragment, treeInfo->globalShape);
         return;
     }
     ++treeInfo->branchCount;
     if (lirbuf->outOMem()) {
         fragmento->assm()->setError(nanojit::OutOMem);
         return;
     }
     ::compile(fragmento->assm(), fragment);
     if (fragmento->assm()->error() == nanojit::OutOMem)
         return;
     if (fragmento->assm()->error() != nanojit::None) {
-        js_BlacklistPC(tm, fragment);
+        js_BlacklistPC(tm, fragment, treeInfo->globalShape);
         return;
     }
     if (anchor) 
         fragmento->assm()->patch(anchor);
     JS_ASSERT(fragment->code());
     JS_ASSERT(!fragment->vmprivate);
     if (fragment == fragment->root)
         fragment->vmprivate = treeInfo;
@@ -2526,17 +2520,17 @@ TraceRecorder::compile(JSTraceMonitor* t
 #endif
     AUDIT(traceCompleted);
 }
 
 static bool
 js_JoinPeersIfCompatible(Fragmento* frago, Fragment* stableFrag, TreeInfo* stableTree, 
                          VMSideExit* exit)
 {
-    JS_ASSERT(exit->numStackSlots == stableTree->stackSlots);
+    JS_ASSERT(exit->numStackSlots == stableTree->nStackTypes);
 
     /* Must have a matching type unstable exit. */
     if ((exit->numGlobalSlots + exit->numStackSlots != stableTree->typeMap.length()) ||
         memcmp(getFullTypeMap(exit), stableTree->typeMap.data(), stableTree->typeMap.length())) {
        return false; 
     }
 
     exit->target = stableFrag;
@@ -2558,24 +2552,24 @@ TraceRecorder::closeLoop(JSTraceMonitor*
     Fragment* peer_root;
     Fragmento* fragmento = tm->fragmento;
 
     exitIns = snapshot(UNSTABLE_LOOP_EXIT);
     exit = (VMSideExit*)((GuardRecord*)exitIns->payload())->exit;
 
     if (callDepth != 0) {
         debug_only_v(printf("Stack depth mismatch, possible recursion\n");)
-        js_BlacklistPC(tm, fragment);
+        js_BlacklistPC(tm, fragment, treeInfo->globalShape);
         trashSelf = true;
         return false;
     }
 
-    JS_ASSERT(exit->numStackSlots == treeInfo->stackSlots);
-
-    peer_root = getLoop(traceMonitor, fragment->root->ip);
+    JS_ASSERT(exit->numStackSlots == treeInfo->nStackTypes);
+
+    peer_root = getLoop(traceMonitor, fragment->root->ip, treeInfo->globalShape);
     JS_ASSERT(peer_root != NULL);
     stable = deduceTypeStability(peer_root, &peer, demote);
 
     #if DEBUG
     if (!stable)
         AUDIT(unstableLoopVariable);
     #endif
 
@@ -2642,23 +2636,22 @@ TraceRecorder::closeLoop(JSTraceMonitor*
                         FramePCOffset(cx->fp));)
     return true;
 }
 
 JS_REQUIRES_STACK void
 TraceRecorder::joinEdgesToEntry(Fragmento* fragmento, Fragment* peer_root)
 {
     if (fragment->kind == LoopTrace) {
-        JSTraceMonitor* tm = traceMonitor;
         TreeInfo* ti;
         Fragment* peer;
         uint8* t1, *t2;
         UnstableExit* uexit, **unext;
-        uint32* stackDemotes = (uint32*)alloca(sizeof(uint32) * treeInfo->stackSlots);
-        uint32* globalDemotes = (uint32*)alloca(sizeof(uint32) * treeInfo->globalSlots());
+        uint32* stackDemotes = (uint32*)alloca(sizeof(uint32) * treeInfo->nStackTypes);
+        uint32* globalDemotes = (uint32*)alloca(sizeof(uint32) * treeInfo->nGlobalTypes());
 
         for (peer = peer_root; peer != NULL; peer = peer->peer) {
             if (!peer->code())
                 continue;
             ti = (TreeInfo*)peer->vmprivate;
             uexit = ti->unstableExits;
             unext = &ti->unstableExits;
             while (uexit != NULL) {
@@ -2694,17 +2687,17 @@ TraceRecorder::joinEdgesToEntry(Fragment
                             stackCount = 0;
                             break;
                         }
                     }
                     if (stackCount || globalCount) {
                         for (unsigned i = 0; i < stackCount; i++)
                             oracle.markStackSlotUndemotable(cx, stackDemotes[i]);
                         for (unsigned i = 0; i < globalCount; i++)
-                            oracle.markGlobalSlotUndemotable(cx, tm->globalSlots->data()[globalDemotes[i]]);
+                            oracle.markGlobalSlotUndemotable(cx, ti->globalSlots->data()[globalDemotes[i]]);
                         JS_ASSERT(peer == uexit->fragment->root);
                         if (fragment == peer)
                             trashSelf = true;
                         else
                             whichTreesToTrash.addUnique(uexit->fragment->root);
                         break;
                     }
                 }
@@ -2715,39 +2708,39 @@ TraceRecorder::joinEdgesToEntry(Fragment
                 } else {
                     unext = &uexit->next;
                     uexit = uexit->next;
                 }
             } 
         } 
     }
 
-    debug_only_v(js_DumpPeerStability(traceMonitor, peer_root->ip);)
+    debug_only_v(js_DumpPeerStability(traceMonitor, peer_root->ip, treeInfo->globalShape);)
 }
 
 /* Emit an always-exit guard and compile the tree (used for break statements. */
 JS_REQUIRES_STACK void
 TraceRecorder::endLoop(JSTraceMonitor* tm)
 {
     LIns* exitIns = snapshot(LOOP_EXIT);
 
     if (callDepth != 0) {
         debug_only_v(printf("Stack depth mismatch, possible recursion\n");)
-        js_BlacklistPC(tm, fragment);
+        js_BlacklistPC(tm, fragment, treeInfo->globalShape);
         trashSelf = true;
         return;
     }
 
     fragment->lastIns = lir->insGuard(LIR_x, lir->insImm(1), exitIns);
     compile(tm);
 
     if (tm->fragmento->assm()->error() != nanojit::None)
         return;
 
-    joinEdgesToEntry(tm->fragmento, getLoop(tm, fragment->root->ip));
+    joinEdgesToEntry(tm->fragmento, getLoop(tm, fragment->root->ip, treeInfo->globalShape));
 
     debug_only_v(printf("recording completed at %s:%u@%u via endLoop\n",
                         cx->fp->script->filename,
                         js_FramePCToLineNumber(cx, cx->fp),
                         FramePCOffset(cx->fp));)
 }
 
 /* Emit code to adjust the stack to match the inner tree's stack expectations. */
@@ -2955,48 +2948,100 @@ nanojit::LirNameMap::formatGuard(LIns *i
 #endif
 
 void
 nanojit::Fragment::onDestroy()
 {
     delete (TreeInfo *)vmprivate;
 }
 
-void
+static JS_REQUIRES_STACK bool
 js_DeleteRecorder(JSContext* cx)
 {
     JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
 
     /* Aborting and completing a trace end up here. */
     delete tm->recorder;
     tm->recorder = NULL;
+
+    /* 
+     * If we ran out of memory, flush the code cache.
+     */
+    if (JS_TRACE_MONITOR(cx).fragmento->assm()->error() == OutOMem) {
+        js_FlushJITCache(cx);
+        return false;
+    }
+
+    return true;
 }
 
 /**
  * Checks whether the shape of the global object has changed.
  */
 static inline bool
-js_CheckGlobalObjectShape(JSContext* cx, JSTraceMonitor* tm, JSObject* globalObj)
-{
-    /* Check the global shape. */
-    if (OBJ_SHAPE(globalObj) != tm->globalShape) {
-        AUDIT(globalShapeMismatchAtEntry);
-        debug_only_v(printf("Global shape mismatch (%u vs. %u), flushing cache.\n",
-                            OBJ_SHAPE(globalObj), tm->globalShape);)
+js_CheckGlobalObjectShape(JSContext* cx, JSTraceMonitor* tm, JSObject* globalObj, 
+                          uint32 *shape=NULL, SlotList** slots=NULL)
+{
+    if (tm->needFlush) {
+        tm->needFlush = JS_FALSE;
         return false;
     }
-    return true;
+
+    uint32 globalShape = OBJ_SHAPE(globalObj);
+
+    if (tm->recorder) {
+        TreeInfo* ti = tm->recorder->getTreeInfo();
+        /* Check the global shape matches the recorder's treeinfo's shape. */
+        if (globalShape != ti->globalShape) {
+            AUDIT(globalShapeMismatchAtEntry);
+            debug_only_v(printf("Global shape mismatch (%u vs. %u), flushing cache.\n",
+                                globalShape, ti->globalShape);)
+                return false;
+        }
+        if (shape)
+            *shape = globalShape;
+        if (slots)
+            *slots = ti->globalSlots;
+        return true;
+    }
+
+    /* No recorder, search for a tracked global-state (or allocate one). */
+    for (size_t i = 0; i < MONITOR_N_GLOBAL_STATES; ++i) {
+
+        GlobalState &state = tm->globalStates[i];
+
+        if (state.globalShape == -1) {
+            state.globalShape = globalShape;
+            JS_ASSERT(state.globalSlots);
+            JS_ASSERT(state.globalSlots->length() == 0);
+        }
+
+        if (tm->globalStates[i].globalShape == globalShape) {
+            if (shape)
+                *shape = globalShape;
+            if (slots)
+                *slots = state.globalSlots;
+            return true;
+        }
+    }
+
+    /* No currently-tracked-global found and no room to allocate, abort. */
+    AUDIT(globalShapeMismatchAtEntry);
+    debug_only_v(printf("No global slotlist for global shape %u, flushing cache.\n",
+                        globalShape));
+    return false;
 }
 
 static JS_REQUIRES_STACK bool
 js_StartRecorder(JSContext* cx, VMSideExit* anchor, Fragment* f, TreeInfo* ti,
                  unsigned stackSlots, unsigned ngslots, uint8* typeMap, 
                  VMSideExit* expectedInnerExit, Fragment* outer)
 {
     JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
+    JS_ASSERT(f->root != f || !cx->fp->imacpc);
 
     /* start recording if no exception during construction */
     tm->recorder = new (&gc) TraceRecorder(cx, anchor, f, ti,
                                            stackSlots, ngslots, typeMap,
                                            expectedInnerExit, outer);
     if (cx->throwing) {
         js_AbortRecording(cx, "setting up recorder failed");
         return false;
@@ -3218,17 +3263,18 @@ js_dumpMap(TypeMap const & tm) {
     uint8 *data = tm.data();
     for (unsigned i = 0; i < tm.length(); ++i) {
         printf("typemap[%d] = %c\n", i, typeChar[data[i]]);
     }
 }
 #endif
 
 JS_REQUIRES_STACK bool
-js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, Fragment* outer)
+js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, Fragment* outer, 
+              uint32 globalShape, SlotList* globalSlots)
 {
     JS_ASSERT(f->root == f);
     
     /* Avoid recording loops in overlarge scripts. */
     if (cx->fp->script->length >= SCRIPT_PC_ADJ_LIMIT) {
         js_AbortRecording(cx, "script too large");
         return false;
     }
@@ -3241,17 +3287,17 @@ js_RecordTree(JSContext* cx, JSTraceMoni
     }
 
     AUDIT(recorderStarted);
 
     /* Try to find an unused peer fragment, or allocate a new one. */
     while (f->code() && f->peer)
         f = f->peer;
     if (f->code())
-        f = getAnchor(&JS_TRACE_MONITOR(cx), f->root->ip);
+        f = getAnchor(&JS_TRACE_MONITOR(cx), f->root->ip, globalShape);
 
     if (!f) {
         js_FlushJITCache(cx);
         return false;
     }
 
     f->recordAttempts++;
     f->root = f;
@@ -3261,164 +3307,159 @@ js_RecordTree(JSContext* cx, JSTraceMoni
         js_FlushJITCache(cx);
         debug_only_v(printf("Out of memory recording new tree, flushing cache.\n");)
         return false;
     }
 
     JS_ASSERT(!f->code() && !f->vmprivate);
 
     /* setup the VM-private treeInfo structure for this fragment */
-    TreeInfo* ti = new (&gc) TreeInfo(f);
+    TreeInfo* ti = new (&gc) TreeInfo(f, globalShape, globalSlots);
 
     /* capture the coerced type of each active slot in the type map */
-    SlotList& globalSlots = *tm->globalSlots;
-    ti->typeMap.captureTypes(cx, globalSlots, 0/*callDepth*/);
-    ti->stackSlots = ti->typeMap.length() - globalSlots.length();
+    ti->typeMap.captureTypes(cx, *globalSlots, 0/*callDepth*/);
+    ti->nStackTypes = ti->typeMap.length() - globalSlots->length();
 
     /* Check for duplicate entry type maps.  This is always wrong and hints at trace explosion 
        since we are trying to stabilize something without properly connecting peer edges. */
     #ifdef DEBUG
     TreeInfo* ti_other;
-    for (Fragment* peer = getLoop(tm, f->root->ip); peer != NULL; peer = peer->peer) {
+    for (Fragment* peer = getLoop(tm, f->root->ip, globalShape); peer != NULL; peer = peer->peer) {
         if (!peer->code() || peer == f)
             continue;
         ti_other = (TreeInfo*)peer->vmprivate;
         JS_ASSERT(ti_other);
         JS_ASSERT(!ti->typeMap.matches(ti_other->typeMap));
     }
     #endif
 
     /* determine the native frame layout at the entry point */
-    unsigned entryNativeStackSlots = ti->stackSlots;
+    unsigned entryNativeStackSlots = ti->nStackTypes;
     JS_ASSERT(entryNativeStackSlots == js_NativeStackSlots(cx, 0/*callDepth*/));
     ti->nativeStackBase = (entryNativeStackSlots -
             (cx->fp->regs->sp - StackBase(cx->fp))) * sizeof(double);
     ti->maxNativeStackSlots = entryNativeStackSlots;
     ti->maxCallDepth = 0;
     ti->script = cx->fp->script;
 
     /* recording primary trace */
     if (!js_StartRecorder(cx, NULL, f, ti,
-                          ti->stackSlots,
-                          tm->globalSlots->length(), 
+                          ti->nStackTypes,
+                          ti->globalSlots->length(), 
                           ti->typeMap.data(), NULL, outer)) {
         return false;
     }
 
     return true;
 }
 
-JS_REQUIRES_STACK static inline bool isSlotUndemotable(JSContext* cx, TreeInfo* ti, unsigned slot)
-{
-    if (slot < ti->stackSlots)
+JS_REQUIRES_STACK static inline bool 
+isSlotUndemotable(JSContext* cx, TreeInfo* ti, unsigned slot)
+{
+    if (slot < ti->nStackTypes)
         return oracle.isStackSlotUndemotable(cx, slot);
-
-    JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
-    uint16* gslots = tm->globalSlots->data();
-    return oracle.isGlobalSlotUndemotable(cx, gslots[slot - ti->stackSlots]);
+    
+    uint16* gslots = ti->globalSlots->data();
+    return oracle.isGlobalSlotUndemotable(cx, gslots[slot - ti->nStackTypes]);
 }
 
 JS_REQUIRES_STACK static bool
 js_AttemptToStabilizeTree(JSContext* cx, VMSideExit* exit, Fragment* outer)
 {
     JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
     Fragment* from = exit->from->root;
+    TreeInfo* from_ti = (TreeInfo*)from->vmprivate;
 
     JS_ASSERT(exit->from->root->code());
 
     /* Make sure any doubles are not accidentally undemoted */
     uint8* m = getStackTypeMap(exit);
     for (unsigned i = 0; i < exit->numStackSlots; i++) {
         if (m[i] == JSVAL_DOUBLE)
             oracle.markStackSlotUndemotable(cx, i);
     }
     m = getGlobalTypeMap(exit);
     for (unsigned i = 0; i < exit->numGlobalSlots; i++) {
         if (m[i] == JSVAL_DOUBLE)
-            oracle.markGlobalSlotUndemotable(cx, tm->globalSlots->data()[i]);
+            oracle.markGlobalSlotUndemotable(cx, from_ti->globalSlots->data()[i]);
     }
 
     /* If this exit does not have enough globals, there might exist a peer with more globals that we 
      * can join to.
      */
-    uint8* m2;
-    Fragment* f;
-    TreeInfo* ti;
-    bool matched;
-    bool undemote;
     bool bound = false;
-    unsigned int checkSlots;
-    for (f = from->first; f != NULL; f = f->peer) {
+    for (Fragment* f = from->first; f != NULL; f = f->peer) {
         if (!f->code())
             continue;
-        ti = (TreeInfo*)f->vmprivate;
-        JS_ASSERT(exit->numStackSlots == ti->stackSlots);
+        TreeInfo* ti = (TreeInfo*)f->vmprivate;
+        JS_ASSERT(exit->numStackSlots == ti->nStackTypes);
         /* Check the minimum number of slots that need to be compared. */
-        checkSlots = JS_MIN(exit->numStackSlots + exit->numGlobalSlots, ti->typeMap.length());
+        unsigned checkSlots = JS_MIN(exit->numStackSlots + exit->numGlobalSlots, ti->typeMap.length());
         m = getFullTypeMap(exit);
-        m2 = ti->typeMap.data();
+        uint8* m2 = ti->typeMap.data();
         /* Analyze the exit typemap against the peer typemap. 
          * Two conditions are important:
          * 1) Typemaps are identical: these peers can be attached.
          * 2) Typemaps do not match, but only contain I->D mismatches.
          *    In this case, the original tree must be trashed because it 
          *    will never connect to any peer.
          */
-        matched = true;
-        undemote = false;
+        bool matched = true;
+        bool undemote = false;
         for (uint32 i = 0; i < checkSlots; i++) {
             /* If the types are equal we're okay. */
             if (m[i] == m2[i])
                 continue;
             matched = false;
             /* If there's an I->D that cannot be resolved, flag it.
              * Otherwise, break and go to the next peer.
              */
             if (m[i] == JSVAL_INT && m2[i] == JSVAL_DOUBLE && isSlotUndemotable(cx, ti, i)) {
                 undemote = true;
             } else {
                 undemote = false;
                 break;
             }
         }
-        if (matched) {
+        if (matched) {            
+            JS_ASSERT(from_ti->globalSlots == ti->globalSlots);
+            JS_ASSERT(from_ti->nStackTypes == ti->nStackTypes);
             /* Capture missing globals on both trees and link the fragments together. */
             if (from != f) {
                 ti->dependentTrees.addUnique(from);
-                ti->typeMap.captureMissingGlobalTypes(cx, *tm->globalSlots, ti->stackSlots);
+                ti->typeMap.captureMissingGlobalTypes(cx, *ti->globalSlots, ti->nStackTypes);
             }
-            ti = (TreeInfo*)from->vmprivate;
-            ti->typeMap.captureMissingGlobalTypes(cx, *tm->globalSlots, ti->stackSlots);
+            from_ti->typeMap.captureMissingGlobalTypes(cx, *from_ti->globalSlots, from_ti->nStackTypes);
             exit->target = f;
             tm->fragmento->assm()->patch(exit);
             /* Now erase this exit from the unstable exit list. */
-            UnstableExit** tail = &ti->unstableExits;
-            for (UnstableExit* uexit = ti->unstableExits; uexit != NULL; uexit = uexit->next) {
+            UnstableExit** tail = &from_ti->unstableExits;
+            for (UnstableExit* uexit = from_ti->unstableExits; uexit != NULL; uexit = uexit->next) {
                 if (uexit->exit == exit) {
                     *tail = uexit->next;
                     delete uexit;
                     bound = true;
                     break;
                 }
                 tail = &uexit->next;
             }
             JS_ASSERT(bound);
-            debug_only_v(js_DumpPeerStability(tm, f->ip);)
+            debug_only_v(js_DumpPeerStability(tm, f->ip, from_ti->globalShape);)
             break;
         } else if (undemote) {
             /* The original tree is unconnectable, so trash it. */
             js_TrashTree(cx, f);
             /* We shouldn't attempt to record now, since we'll hit a duplicate. */
             return false;
         }
     }
     if (bound)
         return false;
 
-    return js_RecordTree(cx, tm, from->first, outer);
+    return js_RecordTree(cx, tm, from->first, outer, from_ti->globalShape, from_ti->globalSlots);
 }
 
 static JS_REQUIRES_STACK bool
 js_AttemptToExtendTree(JSContext* cx, VMSideExit* anchor, VMSideExit* exitedFrom, Fragment* outer)
 {
     Fragment* f = anchor->from->root;
     JS_ASSERT(f->vmprivate);
     TreeInfo* ti = (TreeInfo*)f->vmprivate;
@@ -3484,71 +3525,78 @@ js_CloseLoop(JSContext* cx)
     JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
     Fragmento* fragmento = tm->fragmento;
     TraceRecorder* r = tm->recorder;
     JS_ASSERT(fragmento && r);
     bool walkedOutOfLoop = r->walkedOutOfLoop();
     
     if (fragmento->assm()->error()) {
         js_AbortRecording(cx, "Error during recording");
-
-        /* If we ran out of memory, flush the code cache and abort. */
-        if (fragmento->assm()->error() == OutOMem)
-            js_FlushJITCache(cx);
         return false;
     }
 
     bool demote = false;
     Fragment* f = r->getFragment();
+    TreeInfo* ti = r->getTreeInfo();
+    uint32 globalShape = ti->globalShape;
+    SlotList* globalSlots = ti->globalSlots;
     r->closeLoop(tm, demote);
-    js_DeleteRecorder(cx);
-    
+
+    /* 
+     * If js_DeleteRecorder flushed the code cache, we can't rely on f any more.
+     */
+    if (!js_DeleteRecorder(cx))
+        return false;
+
     /*
      * If we just walked out of a thin loop, we can't immediately start the 
      * compiler again here since we didn't return to the loop header.
      */
     if (demote && !walkedOutOfLoop)
-        return js_RecordTree(cx, tm, f, NULL);
+        return js_RecordTree(cx, tm, f, NULL, globalShape, globalSlots);
     return false;
 }
 
 JS_REQUIRES_STACK bool
 js_RecordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCallCount)
 {
 #ifdef JS_THREADSAFE
     if (OBJ_SCOPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain))->title.ownercx != cx) {
         js_AbortRecording(cx, "Global object not owned by this context");
         return false; /* we stay away from shared global objects */
     }
 #endif
     JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
+    TreeInfo* ti = r->getTreeInfo();
     /* Process deep abort requests. */
     if (r->wasDeepAborted()) {
         js_AbortRecording(cx, "deep abort requested");
         return false;
     }
     /* If we hit our own loop header, close the loop and compile the trace. */
     if (r->isLoopHeader(cx))
         return js_CloseLoop(cx);
     /* does this branch go to an inner loop? */
-    Fragment* f = getLoop(&JS_TRACE_MONITOR(cx), cx->fp->regs->pc);
+    Fragment* f = getLoop(&JS_TRACE_MONITOR(cx), cx->fp->regs->pc, ti->globalShape);
     Fragment* peer_root = f;
     if (nesting_enabled && f) {
         
         /* Make sure inner tree call will not run into an out-of-memory condition. */
         if (tm->reservedDoublePoolPtr < (tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS) &&
             !js_ReplenishReservedPool(cx, tm)) {
             js_AbortRecording(cx, "Couldn't call inner tree (out of memory)");
             return false; 
         }
         
         /* Make sure the shape of the global object still matches (this might flush 
            the JIT cache). */
         JSObject* globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
-        if (!js_CheckGlobalObjectShape(cx, tm, globalObj)) {
+        uint32 globalShape = -1;
+        SlotList* globalSlots = NULL;
+        if (!js_CheckGlobalObjectShape(cx, tm, globalObj, &globalShape, &globalSlots)) {
             js_AbortRecording(cx, "Couldn't call inner tree (prep failed)");
             return false;
         }
         
         debug_only_v(printf("Looking for type-compatible peer (%s:%d@%d)\n",
                             cx->fp->script->filename,
                             js_FramePCToLineNumber(cx, cx->fp),
                             FramePCOffset(cx->fp));)
@@ -3560,33 +3608,33 @@ js_RecordLoopEdge(JSContext* cx, TraceRe
         f = r->findNestedCompatiblePeer(f, &empty);
         if (f && f->code()) 
             success = r->adjustCallerTypes(f);
 
         if (!success) {
             AUDIT(noCompatInnerTrees);
             debug_only_v(printf("No compatible inner tree (%p).\n", f);)
 
-            Fragment* old = getLoop(tm, tm->recorder->getFragment()->root->ip);
+            Fragment* old = getLoop(tm, tm->recorder->getFragment()->root->ip, ti->globalShape);
             if (old == NULL)
                 old = tm->recorder->getFragment();
             js_AbortRecording(cx, "No compatible inner tree");
             if (!f && oracle.hit(peer_root->ip) < MAX_INNER_RECORD_BLACKLIST)
                 return false;
             if (old->recordAttempts < MAX_MISMATCH)
                 oracle.resetHits(old->ip);
             f = empty;
             if (!f) {
-                f = getAnchor(tm, cx->fp->regs->pc);
+                f = getAnchor(tm, cx->fp->regs->pc, globalShape);
                 if (!f) {
                     js_FlushJITCache(cx);
                     return false;
                 }
             }
-            return js_RecordTree(cx, tm, f, old);
+            return js_RecordTree(cx, tm, f, old, globalShape, globalSlots);
         }
 
         r->prepareTreeCall(f);
         VMSideExit* innermostNestedGuard = NULL;
         VMSideExit* lr = js_ExecuteTree(cx, f, inlineCallCount, &innermostNestedGuard);
         if (!lr) {
             js_AbortRecording(cx, "Couldn't call inner tree");
             return false;
@@ -3599,23 +3647,23 @@ js_RecordLoopEdge(JSContext* cx, TraceRe
                 js_AbortRecording(cx, "Inner tree took different side exit, abort recording");
                 return js_AttemptToExtendTree(cx, innermostNestedGuard, lr, NULL);
             }
             /* emit a call to the inner tree and continue recording the outer tree trace */
             r->emitTreeCall(f, lr);
             return true;
         case UNSTABLE_LOOP_EXIT:
             /* abort recording so the inner loop can become type stable. */
-            old = getLoop(tm, tm->recorder->getFragment()->root->ip);
+            old = getLoop(tm, tm->recorder->getFragment()->root->ip, ti->globalShape);
             js_AbortRecording(cx, "Inner tree is trying to stabilize, abort outer recording");
             oracle.resetHits(old->ip);
             return js_AttemptToStabilizeTree(cx, lr, old);
         case BRANCH_EXIT:
             /* abort recording the outer tree, extend the inner tree */
-            old = getLoop(tm, tm->recorder->getFragment()->root->ip);
+            old = getLoop(tm, tm->recorder->getFragment()->root->ip, ti->globalShape);
             js_AbortRecording(cx, "Inner tree is trying to grow, abort outer recording");
             oracle.resetHits(old->ip);
             return js_AttemptToExtendTree(cx, lr, NULL, old);
         default:
             debug_only_v(printf("exit_type=%d\n", lr->exitType);)
             js_AbortRecording(cx, "Inner tree not suitable for calling");
             return false;
         }
@@ -3680,18 +3728,18 @@ TraceRecorder::findNestedCompatiblePeer(
     JSTraceMonitor* tm;
     unsigned max_demotes;
 
     if (empty)
         *empty = NULL;
     demote = NULL;
 
     tm = &JS_TRACE_MONITOR(cx);
-    unsigned int ngslots = tm->globalSlots->length();
-    uint16* gslots = tm->globalSlots->data();
+    unsigned int ngslots = treeInfo->globalSlots->length();
+    uint16* gslots = treeInfo->globalSlots->data();
 
     /* We keep a maximum tally - we want to select the peer most likely to work so we don't keep 
      * recording.
      */
     max_demotes = 0;
 
     TreeInfo* ti;
     for (; f != NULL; f = f->peer) {
@@ -3701,18 +3749,18 @@ TraceRecorder::findNestedCompatiblePeer(
             continue;
         }
 
         unsigned demotes = 0;
         ti = (TreeInfo*)f->vmprivate;
 
         debug_only_v(printf("checking nested types %p: ", f);)
 
-        if (ngslots > ti->globalSlots())
-            ti->typeMap.captureMissingGlobalTypes(cx, *tm->globalSlots, ti->stackSlots);
+        if (ngslots > ti->nGlobalTypes())
+            ti->typeMap.captureMissingGlobalTypes(cx, *ti->globalSlots, ti->nStackTypes);
 
         uint8* m = ti->typeMap.data();
 
         FORALL_SLOTS(cx, ngslots, gslots, 0,
             debug_only_v(printf("%s%d=", vpname, vpnum);)
             if (!js_IsEntryTypeCompatible(vp, m))
                 goto check_fail;
             if (*m == JSVAL_STRING && *vp == JSVAL_VOID)
@@ -3751,32 +3799,29 @@ check_fail:
  *
  * @param cx            Context.
  * @param ti            Tree info of peer we're testing.
  * @return              True if compatible (with or without demotions), false otherwise.
  */
 static JS_REQUIRES_STACK bool
 js_CheckEntryTypes(JSContext* cx, TreeInfo* ti)
 {
-    JSTraceMonitor* tm;
-
-    tm = &JS_TRACE_MONITOR(cx);
-    unsigned int ngslots = tm->globalSlots->length();
-    uint16* gslots = tm->globalSlots->data();
-
-    JS_ASSERT(ti->stackSlots == js_NativeStackSlots(cx, 0));
-
-    if (ngslots > ti->globalSlots())
-        ti->typeMap.captureMissingGlobalTypes(cx, *tm->globalSlots, ti->stackSlots);
+    unsigned int ngslots = ti->globalSlots->length();
+    uint16* gslots = ti->globalSlots->data();
+
+    JS_ASSERT(ti->nStackTypes == js_NativeStackSlots(cx, 0));
+
+    if (ngslots > ti->nGlobalTypes())
+        ti->typeMap.captureMissingGlobalTypes(cx, *ti->globalSlots, ti->nStackTypes);
 
     uint8* m = ti->typeMap.data();
 
     JS_ASSERT(ti->typeMap.length() == js_NativeStackSlots(cx, 0) + ngslots);
-    JS_ASSERT(ti->typeMap.length() == ti->stackSlots + ngslots);
-    JS_ASSERT(ti->globalSlots() == ngslots);
+    JS_ASSERT(ti->typeMap.length() == ti->nStackTypes + ngslots);
+    JS_ASSERT(ti->nGlobalTypes() == ngslots);
     FORALL_SLOTS(cx, ngslots, gslots, 0,
         debug_only_v(printf("%s%d=", vpname, vpnum);)
         JS_ASSERT(*m != 0xCD);
         if (!js_IsEntryTypeCompatible(vp, m))
             goto check_fail;
         m++;
     );
     JS_ASSERT(unsigned(m - ti->typeMap.data()) == ti->typeMap.length());
@@ -3816,25 +3861,25 @@ static VMSideExit*
 js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount, 
                VMSideExit** innermostNestedGuardp)
 {
     JS_ASSERT(f->code() && f->vmprivate);
 
     JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
     JSObject* globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
     TreeInfo* ti = (TreeInfo*)f->vmprivate;
-    unsigned ngslots = tm->globalSlots->length();
-    uint16* gslots = tm->globalSlots->data();
+    unsigned ngslots = ti->globalSlots->length();
+    uint16* gslots = ti->globalSlots->data();
     unsigned globalFrameSize = STOBJ_NSLOTS(globalObj);
     double* global = (double*)alloca((globalFrameSize+1) * sizeof(double));
     double stack_buffer[MAX_NATIVE_STACK_SLOTS];
     double* stack = stack_buffer;
 
     /* Make sure the global object is sane. */
-    JS_ASSERT(!ngslots || (OBJ_SHAPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain)) == tm->globalShape)); 
+    JS_ASSERT(!ngslots || (OBJ_SHAPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain)) == ti->globalShape)); 
     /* Make sure our caller replenished the double pool. */
     JS_ASSERT(tm->reservedDoublePoolPtr >= tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS);
 
     /* Reserve objects and stack space now, to make leaving the tree infallible. */
     void *reserve;
     void *stackMark = JS_ARENA_MARK(&cx->stackPool);
     if (!js_ReserveObjects(cx, MAX_CALL_STACK_ENTRIES))
         return NULL;
@@ -3851,17 +3896,17 @@ js_ExecuteTree(JSContext* cx, Fragment* 
     debug_only(*(uint64*)&global[globalFrameSize] = 0xdeadbeefdeadbeefLL;)
     debug_only_v(printf("entering trace at %s:%u@%u, native stack slots: %u code: %p\n",
                         cx->fp->script->filename,
                         js_FramePCToLineNumber(cx, cx->fp),
                         FramePCOffset(cx->fp),
                         ti->maxNativeStackSlots,
                         f->code());)
     
-    JS_ASSERT(ti->globalSlots() == ngslots);
+    JS_ASSERT(ti->nGlobalTypes() == ngslots);
     
     if (ngslots) 
         BuildNativeGlobalFrame(cx, ngslots, gslots, ti->globalTypeMap(), global);
     BuildNativeStackFrame(cx, 0/*callDepth*/, ti->typeMap.data(), stack);
 
     double* entry_sp = &stack[ti->nativeStackBase/sizeof(double)];
     FrameInfo* callstack_buffer[MAX_CALL_STACK_ENTRIES];
     FrameInfo** callstack = callstack_buffer;
@@ -4020,23 +4065,23 @@ js_ExecuteTree(JSContext* cx, Fragment* 
                         calldepth,
                         cycles));
 
     /* If this trace is part of a tree, later branches might have added additional globals for
        which we don't have any type information available in the side exit. We merge in this
        information from the entry type-map. See also comment in the constructor of TraceRecorder
        why this is always safe to do. */
     unsigned exit_gslots = innermost->numGlobalSlots;
-    JS_ASSERT(ngslots == ti->globalSlots());
+    JS_ASSERT(ngslots == ti->nGlobalTypes());
     JS_ASSERT(ngslots >= exit_gslots);
     uint8* globalTypeMap = getGlobalTypeMap(innermost);
     if (exit_gslots < ngslots)
         mergeTypeMaps(&globalTypeMap, &exit_gslots, ti->globalTypeMap(), ngslots,
                       (uint8*)alloca(sizeof(uint8) * ngslots));
-    JS_ASSERT(exit_gslots == ti->globalSlots());
+    JS_ASSERT(exit_gslots == ti->nGlobalTypes());
 
     /* write back interned globals */
     FlushNativeGlobalFrame(cx, exit_gslots, gslots, globalTypeMap, global);
     JS_ASSERT_IF(ngslots != 0, globalFrameSize == STOBJ_NSLOTS(globalObj));
     JS_ASSERT(*(uint64*)&global[globalFrameSize] == 0xdeadbeefdeadbeefLL);
 
     /* write back native stack frame */
 #ifdef DEBUG
@@ -4066,58 +4111,75 @@ js_ExecuteTree(JSContext* cx, Fragment* 
 
 JS_REQUIRES_STACK bool
 js_MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount)
 {
     JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
 
     /* Is the recorder currently active? */
     if (tm->recorder) {
+        jsbytecode* innerLoopHeaderPC = cx->fp->regs->pc;
+
         if (js_RecordLoopEdge(cx, tm->recorder, inlineCallCount))
             return true;
-        /* recording was aborted, treat like a regular loop edge hit */
+
+        /* 
+         * js_RecordLoopEdge will invoke an inner tree if we have a matching one. If we
+         * arrive here, that tree didn't run to completion and instead we mis-matched
+         * or the inner tree took a side exit other than the loop exit. We are thus
+         * no longer guaranteed to be parked on the same loop header js_MonitorLoopEdge
+         * was called for. In fact, this might not even be a loop header at all. Hence
+         * if the program counter no longer hovers over the inner loop header, return to
+         * the interpreter and do not attempt to trigger or record a new tree at this
+         * location.
+         */
+        if (innerLoopHeaderPC != cx->fp->regs->pc)
+            return false;
     }
     JS_ASSERT(!tm->recorder);
 
     /* Check the pool of reserved doubles (this might trigger a GC). */
     if (tm->reservedDoublePoolPtr < (tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS) &&
         !js_ReplenishReservedPool(cx, tm)) {
         return false; /* Out of memory, don't try to record now. */
     }
     
     /* Make sure the shape of the global object still matches (this might flush the JIT cache). */
     JSObject* globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
-    if (!js_CheckGlobalObjectShape(cx, tm, globalObj))
+    uint32 globalShape = -1;
+    SlotList* globalSlots = NULL;
+    
+    if (!js_CheckGlobalObjectShape(cx, tm, globalObj, &globalShape, &globalSlots))
         js_FlushJITCache(cx);
     
     jsbytecode* pc = cx->fp->regs->pc;
 
     if (oracle.getHits(pc) >= 0 && 
         oracle.getHits(pc)+1 < HOTLOOP) {
         oracle.hit(pc);
         return false;
     }
 
-    Fragment* f = getLoop(tm, pc);
+    Fragment* f = getLoop(tm, pc, globalShape);
     if (!f)
-        f = getAnchor(tm, pc);
+        f = getAnchor(tm, pc, globalShape);
 
     if (!f) {
         js_FlushJITCache(cx);
         return false;
     }
 
     /* If we have no code in the anchor and no peers, we definitively won't be able to 
        activate any trees so, start compiling. */
     if (!f->code() && !f->peer) {
 monitor_loop:
         if (oracle.hit(pc) >= HOTLOOP) {
             /* We can give RecordTree the root peer. If that peer is already taken, it will
                walk the peer list and find us a free slot or allocate a new tree if needed. */
-            return js_RecordTree(cx, tm, f->first, NULL);
+            return js_RecordTree(cx, tm, f->first, NULL, globalShape, globalSlots);
         }
         /* Threshold not reached yet. */
         return false;
     }
     
     debug_only_v(printf("Looking for compat peer %d@%d, from %p (ip: %p, hits=%d)\n",
                         js_FramePCToLineNumber(cx, cx->fp), 
                         FramePCOffset(cx->fp),
@@ -4212,16 +4274,17 @@ TraceRecorder::monitorRecording(JSContex
                             (cx->fp->imacpc)                                  \
                             ? 0                                               \
                             : PTRDIFF(cx->fp->regs->pc,                       \
                                       cx->fp->script->code,                   \
                                       jsbytecode),                            \
                             !cx->fp->imacpc, stdout);)                        \
         flag = tr->record_##x();                                              \
         if (x == JSOP_ITER || x == JSOP_NEXTITER || x == JSOP_APPLY ||        \
+            x == JSOP_GETELEM || x == JSOP_SETELEM || x== JSOP_INITELEM ||    \
             JSOP_IS_BINARY(x) || JSOP_IS_UNARY(x) ||                          \
             JSOP_IS_EQUALITY(x)) {                                            \
             goto imacro;                                                      \
         }                                                                     \
         break;
 # include "jsopcode.tbl"
 # undef OPDEF
     }
@@ -4245,20 +4308,20 @@ TraceRecorder::monitorRecording(JSContex
 
   abort_recording:
     js_AbortRecording(cx, js_CodeName[op]);
     return JSMRS_STOP;
 }
 
 /* If used on a loop trace, blacklists the root peer instead of the given fragment. */
 void
-js_BlacklistPC(JSTraceMonitor* tm, Fragment* frag)
+js_BlacklistPC(JSTraceMonitor* tm, Fragment* frag, uint32 globalShape)
 {
     if (frag->kind == LoopTrace)
-        frag = getLoop(tm, frag->ip);
+        frag = getLoop(tm, frag->ip, globalShape);
     oracle.blacklist(frag->ip);
 }
 
 JS_REQUIRES_STACK void
 js_AbortRecording(JSContext* cx, const char* reason)
 {
     JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
     JS_ASSERT(tm->recorder != NULL);
@@ -4273,23 +4336,33 @@ js_AbortRecording(JSContext* cx, const c
                             reason);)
     }
     Fragment* f = tm->recorder->getFragment();
     if (!f) {
         js_DeleteRecorder(cx);
         return;
     }
     JS_ASSERT(!f->vmprivate);
-    js_BlacklistPC(tm, f);
+    uint32 globalShape = tm->recorder->getTreeInfo()->globalShape;
+    js_BlacklistPC(tm, f, globalShape);
     Fragment* outer = tm->recorder->getOuterToBlacklist();
     /* Give outer two chances to stabilize before we start blacklisting. */
     if (outer != NULL && outer->recordAttempts >= 2)
-        js_BlacklistPC(tm, outer);
-    js_DeleteRecorder(cx);
-    /* If this is the primary trace and we didn't succeed compiling, trash the TreeInfo object. */
+        js_BlacklistPC(tm, outer, globalShape);
+
+    /* 
+     * If js_DeleteRecorder flushed the code cache, we can't rely on f any more.
+     */
+    if (!js_DeleteRecorder(cx))
+        return;
+
+    /*
+     * If this is the primary trace and we didn't succeed compiling, trash the
+     * TreeInfo object.
+     */
     if (!f->code() && (f->root == f)) 
         js_TrashTree(cx, f);
 }
 
 #if defined NANOJIT_IA32
 static bool
 js_CheckForSSE2()
 {
@@ -4335,25 +4408,29 @@ js_InitJIT(JSTraceMonitor *tm)
 #if defined NANOJIT_IA32
     if (!did_we_check_sse2) {
         avmplus::AvmCore::config.use_cmov =
         avmplus::AvmCore::config.sse2 = js_CheckForSSE2();
         did_we_check_sse2 = true;
     }
 #endif
     if (!tm->fragmento) {
-        JS_ASSERT(!tm->globalSlots && !tm->reservedDoublePool);
+        JS_ASSERT(!tm->reservedDoublePool);
         Fragmento* fragmento = new (&gc) Fragmento(core, 24);
         verbose_only(fragmento->labels = new (&gc) LabelMap(core, NULL);)
         tm->fragmento = fragmento;
         tm->lirbuf = new (&gc) LirBuffer(fragmento, NULL);
 #ifdef DEBUG
         tm->lirbuf->names = new (&gc) LirNameMap(&gc, NULL, tm->fragmento->labels);
 #endif
-        tm->globalSlots = new (&gc) SlotList();
+        for (size_t i = 0; i < MONITOR_N_GLOBAL_STATES; ++i) {
+            tm->globalStates[i].globalShape = -1;
+            JS_ASSERT(!tm->globalStates[i].globalSlots);
+            tm->globalStates[i].globalSlots = new (&gc) SlotList();
+        }
         tm->reservedDoublePoolPtr = tm->reservedDoublePool = new jsval[MAX_NATIVE_STACK_SLOTS];
         memset(tm->vmfragments, 0, sizeof(tm->vmfragments));
     }
     if (!tm->reFragmento) {
         Fragmento* fragmento = new (&gc) Fragmento(core, 20);
         verbose_only(fragmento->labels = new (&gc) LabelMap(core, NULL);)
         tm->reFragmento = fragmento;
         tm->reLirBuf = new (&gc) LirBuffer(fragmento, NULL);
@@ -4377,28 +4454,30 @@ js_FinishJIT(JSTraceMonitor *tm)
                jitstats.unstableLoopVariable, jitstats.breakLoopExits, jitstats.returnLoopExits,
                jitstats.noCompatInnerTrees);
         printf("monitor: triggered(%llu), exits(%llu), type mismatch(%llu), "
                "global mismatch(%llu)\n", jitstats.traceTriggered, jitstats.sideExitIntoInterpreter,
                jitstats.typeMapMismatchAtEntry, jitstats.globalShapeMismatchAtEntry);
     }
 #endif
     if (tm->fragmento != NULL) {
-        JS_ASSERT(tm->globalSlots && tm->reservedDoublePool);
+        JS_ASSERT(tm->reservedDoublePool);
         verbose_only(delete tm->fragmento->labels;)
 #ifdef DEBUG
         delete tm->lirbuf->names;
         tm->lirbuf->names = NULL;
 #endif
         delete tm->lirbuf;
         tm->lirbuf = NULL;
         delete tm->fragmento;
         tm->fragmento = NULL;
-        delete tm->globalSlots;
-        tm->globalSlots = NULL;
+        for (size_t i = 0; i < MONITOR_N_GLOBAL_STATES; ++i) {
+            JS_ASSERT(tm->globalStates[i].globalSlots);
+            delete tm->globalStates[i].globalSlots;
+        }
         delete[] tm->reservedDoublePool;
         tm->reservedDoublePool = tm->reservedDoublePoolPtr = NULL;
     }
     if (tm->reFragmento != NULL) {
         delete tm->reLirBuf;
         verbose_only(delete tm->reFragmento->labels;)
         delete tm->reFragmento;
     }
@@ -4454,20 +4533,20 @@ js_FlushJITCache(JSContext* cx)
         fragmento->clearFrags();
 #ifdef DEBUG
         JS_ASSERT(fragmento->labels);
         delete fragmento->labels;
         fragmento->labels = new (&gc) LabelMap(core, NULL);
 #endif
         tm->lirbuf->rewind();
         memset(tm->vmfragments, 0, sizeof(tm->vmfragments));
-    }
-    if (cx->fp) {
-        tm->globalShape = OBJ_SHAPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain));
-        tm->globalSlots->clear();
+        for (size_t i = 0; i < MONITOR_N_GLOBAL_STATES; ++i) {
+            tm->globalStates[i].globalShape = -1;
+            tm->globalStates[i].globalSlots->clear();
+        }
     }
     oracle.clearHitCounts();
 }
 
 JS_FORCES_STACK JSStackFrame *
 js_GetTopStackFrame(JSContext *cx)
 {
     if (JS_ON_TRACE(cx)) {
@@ -5761,17 +5840,17 @@ JS_REQUIRES_STACK bool
 TraceRecorder::guardElemOp(JSObject* obj, LIns* obj_ins, jsid id, size_t op_offset, jsval* vp)
 {
     LIns* map_ins = lir->insLoad(LIR_ldp, obj_ins, (int)offsetof(JSObject, map));
     LIns* ops_ins;
     if (!map_is_native(obj->map, map_ins, ops_ins, op_offset))
         return false;
 
     uint32 shape = OBJ_SHAPE(obj);
-    if (JSID_IS_ATOM(id) && shape == traceMonitor->globalShape)
+    if (JSID_IS_ATOM(id) && shape == treeInfo->globalShape)
         ABORT_TRACE("elem op probably aliases global");
 
     JSObject* pobj;
     JSProperty* prop;
     if (!js_LookupProperty(cx, obj, id, &pobj, &prop))
         return false;
 
     if (vp)
@@ -6784,16 +6863,79 @@ TraceRecorder::record_SetPropMiss(JSProp
     JS_ASSERT(scope->object == obj);
     JS_ASSERT(scope->shape == PCVCAP_SHAPE(entry->vcap));
     JS_ASSERT(SCOPE_HAS_PROPERTY(scope, sprop));
 #endif
 
     return record_SetPropHit(entry, sprop);
 }
 
+/* Functions used by JSOP_GETELEM. */
+
+static JSBool
+GetProperty(JSContext *cx, uintN argc, jsval *vp)
+{
+    jsval *argv;
+    jsid id;
+
+    JS_ASSERT(argc == 1);
+    argv = JS_ARGV(cx, vp);
+    JS_ASSERT(JSVAL_IS_STRING(argv[0]));
+    if (!js_ValueToStringId(cx, argv[0], &id))
+        return JS_FALSE;
+    argv[0] = ID_TO_VALUE(id);
+    return OBJ_GET_PROPERTY(cx, JS_THIS_OBJECT(cx, vp), id, &JS_RVAL(cx, vp));
+}
+
+static jsval FASTCALL
+GetProperty_tn(JSContext *cx, JSObject *obj, JSString *name)
+{
+    jsid id;
+    jsval v;
+
+    if (!js_ValueToStringId(cx, STRING_TO_JSVAL(name), &id) ||
+        !OBJ_GET_PROPERTY(cx, obj, id, &v)) {
+        return JSVAL_ERROR_COOKIE;
+    }
+    return v;
+}
+
+static JSBool
+GetElement(JSContext *cx, uintN argc, jsval *vp)
+{
+    jsval *argv;
+    jsid id;
+
+    JS_ASSERT(argc == 1);
+    argv = JS_ARGV(cx, vp);
+    JS_ASSERT(JSVAL_IS_NUMBER(argv[0]));
+    if (!JS_ValueToId(cx, argv[0], &id))
+        return JS_FALSE;
+    argv[0] = ID_TO_VALUE(id);
+    return OBJ_GET_PROPERTY(cx, JS_THIS_OBJECT(cx, vp), id, &JS_RVAL(cx, vp));
+}
+
+static jsval FASTCALL
+GetElement_tn(JSContext* cx, JSObject* obj, int32 index)
+{
+    jsval v;
+    jsid id;
+
+    if (!js_Int32ToId(cx, index, &id))
+        return JSVAL_ERROR_COOKIE;
+    if (!OBJ_GET_PROPERTY(cx, obj, id, &v))
+        return JSVAL_ERROR_COOKIE;
+    return v;
+}
+
+JS_DEFINE_TRCINFO_1(GetProperty,
+    (3, (static, JSVAL_FAIL,    GetProperty_tn, CONTEXT, THIS, STRING,          0, 0)))
+JS_DEFINE_TRCINFO_1(GetElement,
+    (3, (extern, JSVAL_FAIL,    GetElement_tn,  CONTEXT, THIS, INT32,           0, 0)))
+
 JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_GETELEM()
 {
     jsval& idx = stackval(-1);
     jsval& lval = stackval(-2);
 
     LIns* obj_ins = get(&lval);
     LIns* idx_ins = get(&idx);
@@ -6828,54 +6970,108 @@ TraceRecorder::record_JSOP_GETELEM()
         if (js_IdIsIndex(idx, &index) && guardDenseArray(obj, obj_ins, BRANCH_EXIT)) {
             v = (index >= ARRAY_DENSE_LENGTH(obj)) ? JSVAL_HOLE : obj->dslots[index];
             if (v == JSVAL_HOLE)
                 ABORT_TRACE("can't see through hole in dense array");
         } else {
             if (!guardElemOp(obj, obj_ins, id, offsetof(JSObjectOps, getProperty), &v))
                 return false;
         }
-        LIns* args[] = { idx_ins, obj_ins, cx_ins };
-        v_ins = lir->insCall(&js_Any_getprop_ci, args);
-        guard(false, lir->ins2(LIR_eq, v_ins, INS_CONST(JSVAL_ERROR_COOKIE)), MISMATCH_EXIT);
-        unbox_jsval(v, v_ins);
-        set(&lval, v_ins);
-        return true;
+        return call_imacro(getelem_imacros.getprop);
     }
 
     /* At this point we expect a whole number or we bail. */
     if (!JSVAL_IS_INT(idx))
         ABORT_TRACE("non-string, non-int JSOP_GETELEM index");
     if (JSVAL_TO_INT(idx) < 0)
         ABORT_TRACE("negative JSOP_GETELEM index");
 
     /* Accessing an object using integer index but not a dense array. */
     if (!OBJ_IS_DENSE_ARRAY(cx, obj)) {
         idx_ins = makeNumberInt32(idx_ins);
-        LIns* args[] = { idx_ins, obj_ins, cx_ins };
         if (!js_IndexToId(cx, JSVAL_TO_INT(idx), &id))
             return false;
-        idx = ID_TO_VALUE(id);
         if (!guardElemOp(obj, obj_ins, id, offsetof(JSObjectOps, getProperty), &v))
             return false;
-        LIns* v_ins = lir->insCall(&js_Any_getelem_ci, args);
-        guard(false, lir->ins2(LIR_eq, v_ins, INS_CONST(JSVAL_ERROR_COOKIE)), MISMATCH_EXIT);
-        unbox_jsval(v, v_ins);
-        set(&lval, v_ins);
-        return true;
+        return call_imacro(getelem_imacros.getelem);
     }
 
     jsval* vp;
     LIns* addr_ins;
     if (!elem(lval, idx, vp, v_ins, addr_ins))
         return false;
     set(&lval, v_ins);
     return true;
 }
 
+/* Functions used by JSOP_SETELEM */
+
+static JSBool
+SetProperty(JSContext *cx, uintN argc, jsval *vp)
+{
+    jsval *argv;
+    jsid id;
+
+    JS_ASSERT(argc == 2);
+    argv = JS_ARGV(cx, vp);
+    JS_ASSERT(JSVAL_IS_STRING(argv[0]));
+    if (!js_ValueToStringId(cx, argv[0], &id))
+        return JS_FALSE;
+    argv[0] = ID_TO_VALUE(id);
+    if (!OBJ_SET_PROPERTY(cx, JS_THIS_OBJECT(cx, vp), id, &argv[1]))
+        return JS_FALSE;
+    JS_SET_RVAL(cx, vp, JSVAL_VOID);
+    return JS_TRUE;
+}
+
+static int32 FASTCALL
+SetProperty_tn(JSContext* cx, JSObject* obj, JSString* idstr, jsval v)
+{
+    jsid id;
+
+    if (!js_ValueToStringId(cx, STRING_TO_JSVAL(idstr), &id) ||
+        !OBJ_SET_PROPERTY(cx, obj, id, &v)) {
+        return JSVAL_TO_BOOLEAN(JSVAL_VOID);
+    }
+    return JSVAL_TRUE;
+}
+
+static JSBool
+SetElement(JSContext *cx, uintN argc, jsval *vp)
+{
+    jsval *argv;
+    jsid id;
+
+    JS_ASSERT(argc == 2);
+    argv = JS_ARGV(cx, vp);
+    JS_ASSERT(JSVAL_IS_NUMBER(argv[0]));
+    if (!JS_ValueToId(cx, argv[0], &id))
+        return JS_FALSE;
+    argv[0] = ID_TO_VALUE(id);
+    if (!OBJ_SET_PROPERTY(cx, JS_THIS_OBJECT(cx, vp), id, &argv[1]))
+        return JS_FALSE;
+    JS_SET_RVAL(cx, vp, JSVAL_VOID);
+    return JS_TRUE;
+}
+
+static int32 FASTCALL
+SetElement_tn(JSContext* cx, JSObject* obj, int32 index, jsval v)
+{
+    jsid id;
+
+    if (!js_Int32ToId(cx, index, &id) || !OBJ_SET_PROPERTY(cx, obj, id, &v))
+        return JSVAL_TO_BOOLEAN(JSVAL_VOID);
+    return JSVAL_TRUE;
+}
+
+JS_DEFINE_TRCINFO_1(SetProperty,
+    (4, (extern, BOOL_FAIL,     SetProperty_tn, CONTEXT, THIS, STRING, JSVAL,   0, 0)))
+JS_DEFINE_TRCINFO_1(SetElement,
+    (4, (extern, BOOL_FAIL,     SetElement_tn,  CONTEXT, THIS, INT32, JSVAL,    0, 0)))
+
 JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_SETELEM()
 {
     jsval& v = stackval(-1);
     jsval& idx = stackval(-2);
     jsval& lval = stackval(-3);
 
     /* no guards for type checks, trace specialized this already */
@@ -6893,45 +7089,46 @@ TraceRecorder::record_JSOP_SETELEM()
 
     if (JSVAL_IS_STRING(idx)) {
         if (!js_ValueToStringId(cx, idx, &id))
             return false;
         // Store the interned string to the stack to save the interpreter from redoing this work.
         idx = ID_TO_VALUE(id);
         if (!guardElemOp(obj, obj_ins, id, offsetof(JSObjectOps, setProperty), NULL))
             return false;
-        LIns* args[] = { boxed_v_ins, idx_ins, obj_ins, cx_ins };
-        LIns* ok_ins = lir->insCall(&js_Any_setprop_ci, args);
-        guard(false, lir->ins_eq0(ok_ins), MISMATCH_EXIT);    
-    } else if (JSVAL_IS_INT(idx)) {
+        return call_imacro(setelem_imacros.setprop);
+    }
+    if (JSVAL_IS_INT(idx)) {
         if (JSVAL_TO_INT(idx) < 0)
             ABORT_TRACE("negative JSOP_SETELEM index");
         idx_ins = makeNumberInt32(idx_ins);
-        LIns* args[] = { boxed_v_ins, idx_ins, obj_ins, cx_ins };
-        LIns* res_ins;
-        if (guardDenseArray(obj, obj_ins, BRANCH_EXIT)) {
-            res_ins = lir->insCall(&js_Array_dense_setelem_ci, args);
-        } else {
+
+        if (!guardDenseArray(obj, obj_ins, BRANCH_EXIT)) {
             if (!js_IndexToId(cx, JSVAL_TO_INT(idx), &id))
                 return false;
             idx = ID_TO_VALUE(id);
             if (!guardElemOp(obj, obj_ins, id, offsetof(JSObjectOps, setProperty), NULL))
                 return false;
-            res_ins = lir->insCall(&js_Any_setelem_ci, args);
+            jsbytecode* pc = cx->fp->regs->pc;
+            return call_imacro((*pc == JSOP_INITELEM)
+                               ? initelem_imacros.initelem
+                               : setelem_imacros.setelem);
         }
+
+        LIns* args[] = { boxed_v_ins, idx_ins, obj_ins, cx_ins };
+        LIns* res_ins = lir->insCall(&js_Array_dense_setelem_ci, args);
         guard(false, lir->ins_eq0(res_ins), MISMATCH_EXIT);
-    } else {
-        ABORT_TRACE("non-string, non-int JSOP_SETELEM index");
-    }
-
-    jsbytecode* pc = cx->fp->regs->pc;
-    if (*pc == JSOP_SETELEM && pc[JSOP_SETELEM_LENGTH] != JSOP_POP)
-        set(&lval, v_ins);
-
-    return true;
+
+        jsbytecode* pc = cx->fp->regs->pc;
+        if (*pc == JSOP_SETELEM && pc[JSOP_SETELEM_LENGTH] != JSOP_POP)
+            set(&lval, v_ins);
+
+        return true;
+    }
+    ABORT_TRACE("non-string, non-int JSOP_SETELEM index");
 }
 
 JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_CALLNAME()
 {
     JSObject* obj = cx->fp->scopeChain;
     if (obj != globalObj) {
         jsval* vp;
@@ -8076,17 +8273,16 @@ JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_GOTOX()
 {
     return true;
 }
 
 JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_IFEQX()
 {
-    trackCfgMerges(cx->fp->regs->pc);
     return record_JSOP_IFEQ();
 }
 
 JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_IFNEX()
 {
     return record_JSOP_IFNE();
 }
@@ -8786,17 +8982,21 @@ JS_DEFINE_TRCINFO_1(ObjectToIterator,
 JS_DEFINE_TRCINFO_1(CallIteratorNext,
     (2, (static, JSVAL_FAIL,       CallIteratorNext_tn, CONTEXT, THIS,          0, 0)))
 
 static const struct BuiltinFunctionInfo {
     JSTraceableNative *tn;
     int nargs;
 } builtinFunctionInfo[JSBUILTIN_LIMIT] = {
     {ObjectToIterator_trcinfo,   1},
-    {CallIteratorNext_trcinfo,   0}
+    {CallIteratorNext_trcinfo,   0},
+    {GetProperty_trcinfo,        1},
+    {GetElement_trcinfo,         1},
+    {SetProperty_trcinfo,        2},
+    {SetElement_trcinfo,         2}
 };
 
 JSObject *
 js_GetBuiltinFunction(JSContext *cx, uintN index)
 {
     JSRuntime *rt = cx->runtime;
     JSObject *funobj = rt->builtinFunctions[index];
 
@@ -8940,47 +9140,47 @@ TraceRecorder::record_JSOP_HOLE()
 {
     stack(0, INS_CONST(JSVAL_TO_BOOLEAN(JSVAL_HOLE)));
     return true;
 }
 
 #ifdef JS_JIT_SPEW
 /* Prints information about entry typemaps and unstable exits for all peers at a PC */
 void
-js_DumpPeerStability(JSTraceMonitor* tm, const void* ip)
+js_DumpPeerStability(JSTraceMonitor* tm, const void* ip, uint32 globalShape)
 {
     Fragment* f;
     TreeInfo* ti;
     bool looped = false;
     unsigned length = 0;
 
-    for (f = getLoop(tm, ip); f != NULL; f = f->peer) {
+    for (f = getLoop(tm, ip, globalShape); f != NULL; f = f->peer) {
         if (!f->vmprivate)
             continue;
         printf("fragment %p:\nENTRY: ", f);
         ti = (TreeInfo*)f->vmprivate;
         if (looped)
-            JS_ASSERT(ti->stackSlots == length);
-        for (unsigned i = 0; i < ti->stackSlots; i++)
+            JS_ASSERT(ti->nStackTypes == length);
+        for (unsigned i = 0; i < ti->nStackTypes; i++)
             printf("S%d ", ti->stackTypeMap()[i]);
-        for (unsigned i = 0; i < ti->globalSlots(); i++)
+        for (unsigned i = 0; i < ti->nGlobalTypes(); i++)
             printf("G%d ", ti->globalTypeMap()[i]);
         printf("\n");
         UnstableExit* uexit = ti->unstableExits;
         while (uexit != NULL) {
             printf("EXIT:  ");
             uint8* m = getFullTypeMap(uexit->exit);
             for (unsigned i = 0; i < uexit->exit->numStackSlots; i++)
                 printf("S%d ", m[i]);
             for (unsigned i = 0; i < uexit->exit->numGlobalSlots; i++)
                 printf("G%d ", m[uexit->exit->numStackSlots + i]);
             printf("\n");
             uexit = uexit->next;
         }
-        length = ti->stackSlots;
+        length = ti->nStackTypes;
         looped = true;
     }
 }
 #endif
 
 /*
  * 17 potentially-converting binary operators:
  *  | ^ & == != < <= > >= << >> >>> + - * / %
@@ -9002,16 +9202,19 @@ InitIMacroCode()
     for (uintN op = JSOP_BITOR; op <= JSOP_MOD; op++)
         imacro_code[op] = (jsbytecode*)&binary_imacros - 1;
 
     // NB: above loop mis-set JSOP_ADD's entry, so order here is crucial.
     imacro_code[JSOP_ADD] = (jsbytecode*)&add_imacros - 1;
 
     imacro_code[JSOP_ITER] = (jsbytecode*)&iter_imacros - 1;
     imacro_code[JSOP_NEXTITER] = (jsbytecode*)&nextiter_imacros - 1;
+    imacro_code[JSOP_GETELEM] = (jsbytecode*)&getelem_imacros - 1;
+    imacro_code[JSOP_SETELEM] = (jsbytecode*)&setelem_imacros - 1;
+    imacro_code[JSOP_INITELEM] = (jsbytecode*)&initelem_imacros - 1;
     imacro_code[JSOP_APPLY] = (jsbytecode*)&apply_imacros - 1;
 
     imacro_code[JSOP_NEG] = (jsbytecode*)&unary_imacros - 1;
     imacro_code[JSOP_POS] = (jsbytecode*)&unary_imacros - 1;
 
     imacro_code[JSOP_EQ] = (jsbytecode*)&equality_imacros - 1;
     imacro_code[JSOP_NE] = (jsbytecode*)&equality_imacros - 1;
 }
--- a/js/src/jstracer.h
+++ b/js/src/jstracer.h
@@ -269,32 +269,45 @@ struct UnstableExit
 class TreeInfo MMGC_SUBCLASS_DECL {
     nanojit::Fragment*      fragment;
 public:
     JSScript*               script;
     unsigned                maxNativeStackSlots;
     ptrdiff_t               nativeStackBase;
     unsigned                maxCallDepth;
     TypeMap                 typeMap;
-    unsigned                stackSlots;
+    unsigned                nStackTypes;
+    uint32                  globalShape;
+    SlotList*               globalSlots;
     Queue<nanojit::Fragment*> dependentTrees;
     unsigned                branchCount;
     Queue<VMSideExit*>      sideExits;
     UnstableExit*           unstableExits;
 
-    TreeInfo(nanojit::Fragment* _fragment) : unstableExits(NULL) {
-        fragment = _fragment;
-    }
+    TreeInfo(nanojit::Fragment* _fragment,
+             uint32 _globalShape,
+             SlotList* _globalSlots)
+      : fragment(_fragment),
+        script(NULL),
+        maxNativeStackSlots(0),
+        nativeStackBase(0),
+        maxCallDepth(0),
+        nStackTypes(0),
+        globalShape(_globalShape),
+        globalSlots(_globalSlots),
+        branchCount(0),
+        unstableExits(NULL)
+            {}
     ~TreeInfo();
 
-    inline unsigned globalSlots() {
-        return typeMap.length() - stackSlots;
+    inline unsigned nGlobalTypes() {
+        return typeMap.length() - nStackTypes;
     }
     inline uint8* globalTypeMap() {
-        return typeMap.data() + stackSlots;
+        return typeMap.data() + nStackTypes;
     }
     inline uint8* stackTypeMap() {
         return typeMap.data();
     }
 };
 
 struct FrameInfo {
     JSObject*       callee;     // callee function object
@@ -582,9 +595,16 @@ js_GetBuiltinFunction(JSContext *cx, uin
 #else  /* !JS_TRACER */
 
 #define TRACE_0(x)              ((void)0)
 #define TRACE_1(x,a)            ((void)0)
 #define TRACE_2(x,a,b)          ((void)0)
 
 #endif /* !JS_TRACER */
 
+static JS_INLINE JS_FORCES_STACK void
+js_LeaveTrace(JSContext *cx)
+{
+    if (JS_ON_TRACE(cx))
+        js_GetTopStackFrame(cx);
+}
+
 #endif /* jstracer_h___ */
--- a/js/src/liveconnect/nsCLiveconnect.cpp
+++ b/js/src/liveconnect/nsCLiveconnect.cpp
@@ -158,17 +158,17 @@ AutoPushJSContext::AutoPushJSContext(nsI
 
     memset(&mFrame, 0, sizeof(mFrame));
 
     if (NS_SUCCEEDED(mPushResult))
     {
         // See if there are any scripts on the stack.
         // If not, we need to add a dummy frame with a principal.
         JSStackFrame* tempFP = JS_GetScriptedCaller(cx, NULL);
-        JS_ASSERT_NOT_EXECUTING_TRACE(cx);
+        JS_ASSERT_NOT_ON_TRACE(cx);
 
         if (!tempFP)
         {
             JSPrincipals* jsprinc;
             principal->GetJSPrincipals(cx, &jsprinc);
 
             JSFunction *fun = JS_CompileFunctionForPrincipals(cx, JS_GetGlobalObject(cx),
                                                               jsprinc, "anonymous", 0, nsnull,
--- a/js/src/nanojit/Assembler.cpp
+++ b/js/src/nanojit/Assembler.cpp
@@ -849,16 +849,21 @@ namespace nanojit
         }
 		else {
 			_nIns = _startingIns;  // in case of failure reset nIns ready for the next assembly run
 		}
 	}
 
 	void Assembler::endAssembly(Fragment* frag, NInsList& loopJumps)
 	{
+		// don't try to patch code if we are in an error state since we might have partially 
+		// overwritten the code cache already
+		if (error())
+			return;
+
 	    NIns* SOT = 0;
 	    if (frag->isRoot()) {
 	        SOT = frag->loopEntry;
             verbose_only( verbose_outputf("        %p:",_nIns); )
 	    } else {
 	        SOT = frag->root->fragEntry;
 	    }
         AvmAssert(SOT);
--- a/js/src/shell/js.cpp
+++ b/js/src/shell/js.cpp
@@ -3162,17 +3162,16 @@ CheckCallbackTime(JSContext *cx, JSShell
         JS_TriggerOperationCallback(cx);
     }
 }
 
 static void
 WatchdogMain(void *arg)
 {
     JSRuntime *rt = (JSRuntime *) arg;
-    PRBool isRunning = JS_TRUE;
 
     JS_LOCK_GC(rt);
     while (gWatchdogThread) {
         PRIntervalTime now = PR_IntervalNow();
         PRIntervalTime sleepDuration = PR_INTERVAL_NO_TIMEOUT;
         JSContext *iter = NULL;
         JSContext *acx;
 
--- a/js/src/trace-test.js
+++ b/js/src/trace-test.js
@@ -2482,17 +2482,17 @@ function testWeirdDateParse() {
 }
 testWeirdDateParse.expected = "11,17,2008,11,17,2008,11,17,2008,11,17,2008,11,17,2008";
 testWeirdDateParse.jitstats = {
     recorderStarted: 7,
     recorderAborted: 1,
     traceCompleted: 6,
     traceTriggered: 14,
     unstableLoopVariable: 3,
-    noCompatInnerTrees: 1
+    noCompatInnerTrees: 0
 };
 test(testWeirdDateParse);
 
 function testUndemotableBinaryOp() {
     var out = [];
     for (let j = 0; j < 5; ++j) { out.push(6 - ((void 0) ^ 0x80000005)); }
     return out.join(",");
 }
@@ -4122,16 +4122,57 @@ function testInterpreterReentry3() {
     for (let i=0;i<5;++i) this["y" + i] = function(){};
     this.__defineGetter__('e', function (x2) { yield; });
     [1 for each (a in this) for (b in {})];
     return 1;
 }
 testInterpreterReentry3.expected = 1;
 test(testInterpreterReentry3);
 
+function testInterpreterReentry4() {
+    var obj = {a:1, b:1, c:1, d:1, get e() 1000 };
+    for (var p in obj)
+        obj[p];
+}
+test(testInterpreterReentry4);
+
+function testInterpreterReentry5() {
+    var arr = [0, 1, 2, 3, 4];
+    arr.__defineGetter__("4", function() 1000);
+    for (var i = 0; i < 5; i++)
+        arr[i];
+    for (var p in arr)
+        arr[p];
+}
+test(testInterpreterReentry5);
+
+/* // These tests should pass but currently crash, pending bug 462027.
+function testInterpreterReentry6() {
+    var obj = {a:1, b:1, c:1, d:1, set e(x) { this._e = x; }};
+    for (var p in obj)
+        obj[p] = "grue";
+    return obj._e;
+}
+testInterpreterReentry6.expected = "grue";
+test(testInterpreterReentry6);
+
+function testInterpreterReentry7() {
+    var arr = [0, 1, 2, 3, 4];
+    arr.__defineSetter__("4", function(x) { this._4 = x; });
+    for (var i = 0; i < 5; i++)
+        arr[i] = "grue";
+    var tmp = arr._4;
+    for (var p in arr)
+        arr[p] = "bleen";
+    return tmp + " " + arr._4;
+}
+testInterpreterReentry7.expected = "grue bleen";
+test(testInterpreterReentry7);
+*/
+
 /*****************************************************************************
  *                                                                           *
  *  _____ _   _  _____ ______ _____ _______                                  *
  * |_   _| \ | |/ ____|  ____|  __ \__   __|                                 *
  *   | | |  \| | (___ | |__  | |__) | | |                                    *
  *   | | | . ` |\___ \|  __| |  _  /  | |                                    *
  *  _| |_| |\  |____) | |____| | \ \  | |                                    *
  * |_____|_| \_|_____/|______|_|  \_\ |_|                                    *