Merge tm to m-c
authorRobert Sayre <sayrer@gmail.com>
Wed, 04 Feb 2009 12:47:50 -0800
changeset 24626 76ca30e94e5c70d17b3068786e2e7694de5c30a1
parent 24589 d3d68bd09144608f4d04ec47ec3ff3a294d56405 (current diff)
parent 24625 569acf636d508785bd5a126d5341eb4f79f903db (diff)
child 24628 e2a26d16bf06577d5fa235e91d0ed3afd888e188
child 24637 ad04b0283f8a9ef648c79e8e9fe448e6d4378895
child 24837 8a2b9464d6c5a4e7c165687ccf6f81896e167d51
push id5132
push userrsayre@mozilla.com
push dateWed, 04 Feb 2009 20:48:09 +0000
treeherdermozilla-central@76ca30e94e5c [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
milestone1.9.2a1pre
Merge tm to m-c
js/src/trace-test.js
--- a/js/src/jsarray.cpp
+++ b/js/src/jsarray.cpp
@@ -828,18 +828,16 @@ array_defineProperty(JSContext *cx, JSOb
 
     if (id == ATOM_TO_JSID(cx->runtime->atomState.lengthAtom))
         return JS_TRUE;
 
     isIndex = js_IdIsIndex(ID_TO_VALUE(id), &i);
     if (!isIndex || attrs != JSPROP_ENUMERATE) {
         if (!ENSURE_SLOW_ARRAY(cx, obj))
             return JS_FALSE;
-        if (isIndex && STOBJ_IS_DELEGATE(obj))
-            cx->runtime->anyArrayProtoHasElement = JS_TRUE;
         return js_DefineProperty(cx, obj, id, value, getter, setter, attrs, propp);
     }
 
     return array_setProperty(cx, obj, id, &value);
 }
 
 static JSBool
 array_getAttributes(JSContext *cx, JSObject *obj, jsid id, JSProperty *prop,
@@ -1539,18 +1537,20 @@ InitArrayObject(JSContext *cx, JSObject 
     return JS_TRUE;
 }
 
 #ifdef JS_TRACER
 static JSString* FASTCALL
 Array_p_join(JSContext* cx, JSObject* obj, JSString *str)
 {
     jsval v;
-    if (!array_join_sub(cx, obj, TO_STRING, str, &v))
+    if (!array_join_sub(cx, obj, TO_STRING, str, &v)) {
+        cx->builtinStatus |= JSBUILTIN_ERROR;
         return NULL;
+    }
     JS_ASSERT(JSVAL_IS_STRING(v));
     return JSVAL_TO_STRING(v);
 }
 
 static JSString* FASTCALL
 Array_p_toString(JSContext* cx, JSObject* obj)
 {
     jsval v;
@@ -2141,17 +2141,18 @@ array_push1_dense(JSContext* cx, JSObjec
 static jsval FASTCALL
 Array_p_push1(JSContext* cx, JSObject* obj, jsval v)
 {
     if (OBJ_IS_DENSE_ARRAY(cx, obj) 
         ? array_push1_dense(cx, obj, v, &v)
         : array_push_slowly(cx, obj, 1, &v, &v)) {
         return v;
     }
-    return JSVAL_ERROR_COOKIE;
+    cx->builtinStatus |= JSBUILTIN_ERROR;
+    return JSVAL_VOID;
 }
 #endif
 
 static JSBool
 array_push(JSContext *cx, uintN argc, jsval *vp)
 {
     JSObject *obj;
 
@@ -2208,22 +2209,23 @@ array_pop_dense(JSContext *cx, JSObject*
     
 }
 
 #ifdef JS_TRACER
 static jsval FASTCALL
 Array_p_pop(JSContext* cx, JSObject* obj)
 {
     jsval v;
-    if (OBJ_IS_DENSE_ARRAY(cx, obj) 
+    if (OBJ_IS_DENSE_ARRAY(cx, obj)
         ? array_pop_dense(cx, obj, &v)
         : array_pop_slowly(cx, obj, &v)) {
         return v;
     }
-    return JSVAL_ERROR_COOKIE;
+    cx->builtinStatus |= JSBUILTIN_ERROR;
+    return JSVAL_VOID;
 }
 #endif
 
 static JSBool
 array_pop(JSContext *cx, uintN argc, jsval *vp)
 {
     JSObject *obj;
 
--- a/js/src/jsbuiltins.h
+++ b/js/src/jsbuiltins.h
@@ -44,18 +44,18 @@
 
 #include "nanojit/nanojit.h"
 #include "jstracer.h"
 
 #ifdef THIS
 #undef THIS
 #endif
 
-enum JSTNErrType { INFALLIBLE, FAIL_NULL, FAIL_NEG, FAIL_VOID, FAIL_JSVAL };
-enum { JSTN_ERRTYPE_MASK = 7, JSTN_MORE = 8 };
+enum JSTNErrType { INFALLIBLE, FAIL_STATUS, FAIL_NULL, FAIL_NEG, FAIL_VOID, FAIL_COOKIE };
+enum { JSTN_ERRTYPE_MASK = 0x07, JSTN_UNBOX_AFTER = 0x08, JSTN_MORE = 0x10 };
 
 #define JSTN_ERRTYPE(jstn)  ((jstn)->flags & JSTN_ERRTYPE_MASK)
 
 /*
  * |prefix| and |argtypes| declare what arguments should be passed to the
  * native function.  |prefix| can contain the following characters:
  *
  * 'C': a JSContext* argument
@@ -80,17 +80,17 @@ enum { JSTN_ERRTYPE_MASK = 7, JSTN_MORE 
  * 'f': a JSObject* argument that is of class js_FunctionClass
  * 'v': a jsval argument (boxing whatever value is actually being passed in)
  */
 struct JSTraceableNative {
     JSFastNative            native;
     const nanojit::CallInfo *builtin;
     const char              *prefix;
     const char              *argtypes;
-    uintN                   flags;  /* JSTN_MORE | JSTNErrType */
+    uintN                   flags;  /* JSTNErrType | JSTN_UNBOX_AFTER | JSTN_MORE */
 };
 
 /*
  * We use a magic boxed pointer value to represent error conditions that
  * trigger a side exit. The address is so low that it should never be actually
  * in use. If it is, a performance regression occurs, not an actual runtime
  * error.
  */
@@ -115,61 +115,95 @@ struct JSTraceableNative {
 #endif
 
 /*
  * Supported types for builtin functions. 
  *
  * Types with -- for the two string fields are not permitted as argument types
  * in JS_DEFINE_TRCINFO.
  *
- * If a traceable native can fail, the values that indicate failure are part of
- * the return type:
- *     JSVAL_FAIL:       JSVAL_ERROR_COOKIE
- *     BOOL_FAIL:        JSVAL_TO_BOOLEAN(JSVAL_VOID)
- *     INT32_FAIL:       any negative value
- *     STRING_FAIL:      NULL
- *     OBJECT_FAIL_NULL: NULL
+ * There are three kinds of traceable-native error handling.
+ *
+ *   - If a traceable native's return type ends with _FAIL, it always runs to
+ *     completion.  It can either succeed or fail with an error or exception;
+ *     on success, it may or may not stay on trace.  There may be side effects
+ *     in any case.  If the call succeeds but bails off trace, we resume in the
+ *     interpreter at the next opcode.
+ *
+ *     _FAIL builtins indicate failure or bailing off trace by setting bits in
+ *     cx->builtinStatus.
+ *
+ *   - If a traceable native's return type contains _RETRY, it can either
+ *     succeed, fail with a JS exception, or tell the caller to bail off trace
+ *     and retry the call from the interpreter.  The last case happens if the
+ *     builtin discovers that it can't do its job without examining the JS
+ *     stack, reentering the interpreter, accessing properties of the global
+ *     object, etc.
+ *
+ *     The builtin must detect the need to retry before committing any side
+ *     effects.  If a builtin can't do this, it must use a _FAIL return type
+ *     instead of _RETRY.
+ *
+ *     _RETRY builtins indicate failure with a special return value that
+ *     depends on the return type:
+ *
+ *         BOOL_RETRY: JSVAL_TO_BOOLEAN(JSVAL_VOID)
+ *         INT32_RETRY: any negative value
+ *         STRING_RETRY: NULL
+ *         OBJECT_RETRY_NULL: NULL
+ *         JSVAL_RETRY: JSVAL_ERROR_COOKIE
+ *
+ *     _RETRY function calls are faster than _FAIL calls.  Each _RETRY call
+ *     saves a write to cx->bailExit and a read from cx->builtinStatus.
+ *
+ *   - All other traceable natives are infallible (e.g. Date.now, Math.log).
  *
  * Special builtins known to the tracer can have their own idiosyncratic
  * error codes.
  *
  * When a traceable native returns a value indicating failure, we fall off
  * trace.  If an exception is pending, it is thrown; otherwise, we assume the
  * builtin had no side effects and retry the current bytecode in the
  * interpreter.
  * 
  * So a builtin must not return a value indicating failure after causing side
  * effects (such as reporting an error), without setting an exception pending.
  * The operation would be retried, despite the first attempt's observable
  * effects.
  */
 #define _JS_CTYPE(ctype, size, pch, ach, flags)     (ctype, size, pch, ach, flags)
-#define _JS_CTYPE_CONTEXT          _JS_CTYPE(JSContext *,            _JS_PTR,"C", "", INFALLIBLE)
-#define _JS_CTYPE_RUNTIME          _JS_CTYPE(JSRuntime *,            _JS_PTR,"R", "", INFALLIBLE)
-#define _JS_CTYPE_THIS             _JS_CTYPE(JSObject *,             _JS_PTR,"T", "", INFALLIBLE)
-#define _JS_CTYPE_THIS_DOUBLE      _JS_CTYPE(jsdouble,               _JS_F64,"D", "", INFALLIBLE)
-#define _JS_CTYPE_THIS_STRING      _JS_CTYPE(JSString *,             _JS_PTR,"S", "", INFALLIBLE)
-#define _JS_CTYPE_PC               _JS_CTYPE(jsbytecode *,           _JS_PTR,"P", "", INFALLIBLE)
-#define _JS_CTYPE_JSVAL            _JS_CTYPE(jsval,                  _JS_PTR, "","v", INFALLIBLE)
-#define _JS_CTYPE_JSVAL_FAIL       _JS_CTYPE(jsval,                  _JS_PTR, --, --, FAIL_JSVAL)
-#define _JS_CTYPE_BOOL             _JS_CTYPE(JSBool,                 _JS_I32, "","i", INFALLIBLE)
-#define _JS_CTYPE_BOOL_FAIL        _JS_CTYPE(int32,                  _JS_I32, --, --, FAIL_VOID)
-#define _JS_CTYPE_INT32            _JS_CTYPE(int32,                  _JS_I32, "","i", INFALLIBLE)
-#define _JS_CTYPE_INT32_FAIL       _JS_CTYPE(int32,                  _JS_I32, --, --, FAIL_NEG)
-#define _JS_CTYPE_UINT32           _JS_CTYPE(uint32,                 _JS_I32, --, --, INFALLIBLE)
-#define _JS_CTYPE_DOUBLE           _JS_CTYPE(jsdouble,               _JS_F64, "","d", INFALLIBLE)
-#define _JS_CTYPE_STRING           _JS_CTYPE(JSString *,             _JS_PTR, "","s", INFALLIBLE)
-#define _JS_CTYPE_STRING_FAIL      _JS_CTYPE(JSString *,             _JS_PTR, --, --, FAIL_NULL)
-#define _JS_CTYPE_OBJECT           _JS_CTYPE(JSObject *,             _JS_PTR, "","o", INFALLIBLE)
-#define _JS_CTYPE_OBJECT_FAIL_NULL _JS_CTYPE(JSObject *,             _JS_PTR, --, --, FAIL_NULL)
-#define _JS_CTYPE_REGEXP           _JS_CTYPE(JSObject *,             _JS_PTR, "","r", INFALLIBLE)
-#define _JS_CTYPE_SCOPEPROP        _JS_CTYPE(JSScopeProperty *,      _JS_PTR, --, --, INFALLIBLE)
-#define _JS_CTYPE_SIDEEXIT         _JS_CTYPE(SideExit *,             _JS_PTR, --, --, INFALLIBLE)
-#define _JS_CTYPE_INTERPSTATE      _JS_CTYPE(InterpState *,          _JS_PTR, --, --, INFALLIBLE)
-#define _JS_CTYPE_FRAGMENT         _JS_CTYPE(nanojit::Fragment *,    _JS_PTR, --, --, INFALLIBLE)
+#define _JS_JSVAL_CTYPE(size, pch, ach, flags)  (jsval, size, pch, ach, (flags | JSTN_UNBOX_AFTER))
+
+#define _JS_CTYPE_CONTEXT           _JS_CTYPE(JSContext *,            _JS_PTR,"C", "", INFALLIBLE)
+#define _JS_CTYPE_RUNTIME           _JS_CTYPE(JSRuntime *,            _JS_PTR,"R", "", INFALLIBLE)
+#define _JS_CTYPE_THIS              _JS_CTYPE(JSObject *,             _JS_PTR,"T", "", INFALLIBLE)
+#define _JS_CTYPE_THIS_DOUBLE       _JS_CTYPE(jsdouble,               _JS_F64,"D", "", INFALLIBLE)
+#define _JS_CTYPE_THIS_STRING       _JS_CTYPE(JSString *,             _JS_PTR,"S", "", INFALLIBLE)
+#define _JS_CTYPE_PC                _JS_CTYPE(jsbytecode *,           _JS_PTR,"P", "", INFALLIBLE)
+#define _JS_CTYPE_JSVAL             _JS_JSVAL_CTYPE(                  _JS_PTR, "","v", INFALLIBLE)
+#define _JS_CTYPE_JSVAL_RETRY       _JS_JSVAL_CTYPE(                  _JS_PTR, --, --, FAIL_COOKIE)
+#define _JS_CTYPE_JSVAL_FAIL        _JS_JSVAL_CTYPE(                  _JS_PTR, --, --, FAIL_STATUS)
+#define _JS_CTYPE_BOOL              _JS_CTYPE(JSBool,                 _JS_I32, "","i", INFALLIBLE)
+#define _JS_CTYPE_BOOL_RETRY        _JS_CTYPE(int32,                  _JS_I32, --, --, FAIL_VOID)
+#define _JS_CTYPE_BOOL_FAIL         _JS_CTYPE(int32,                  _JS_I32, --, --, FAIL_STATUS)
+#define _JS_CTYPE_INT32             _JS_CTYPE(int32,                  _JS_I32, "","i", INFALLIBLE)
+#define _JS_CTYPE_INT32_RETRY       _JS_CTYPE(int32,                  _JS_I32, --, --, FAIL_NEG)
+#define _JS_CTYPE_UINT32            _JS_CTYPE(uint32,                 _JS_I32, --, --, INFALLIBLE)
+#define _JS_CTYPE_DOUBLE            _JS_CTYPE(jsdouble,               _JS_F64, "","d", INFALLIBLE)
+#define _JS_CTYPE_STRING            _JS_CTYPE(JSString *,             _JS_PTR, "","s", INFALLIBLE)
+#define _JS_CTYPE_STRING_RETRY      _JS_CTYPE(JSString *,             _JS_PTR, --, --, FAIL_NULL)
+#define _JS_CTYPE_STRING_FAIL       _JS_CTYPE(JSString *,             _JS_PTR, --, --, FAIL_STATUS)
+#define _JS_CTYPE_OBJECT            _JS_CTYPE(JSObject *,             _JS_PTR, "","o", INFALLIBLE)
+#define _JS_CTYPE_OBJECT_RETRY_NULL _JS_CTYPE(JSObject *,             _JS_PTR, --, --, FAIL_NULL)
+#define _JS_CTYPE_OBJECT_FAIL       _JS_CTYPE(JSObject *,             _JS_PTR, --, --, FAIL_STATUS)
+#define _JS_CTYPE_REGEXP            _JS_CTYPE(JSObject *,             _JS_PTR, "","r", INFALLIBLE)
+#define _JS_CTYPE_SCOPEPROP         _JS_CTYPE(JSScopeProperty *,      _JS_PTR, --, --, INFALLIBLE)
+#define _JS_CTYPE_SIDEEXIT          _JS_CTYPE(SideExit *,             _JS_PTR, --, --, INFALLIBLE)
+#define _JS_CTYPE_INTERPSTATE       _JS_CTYPE(InterpState *,          _JS_PTR, --, --, INFALLIBLE)
+#define _JS_CTYPE_FRAGMENT          _JS_CTYPE(nanojit::Fragment *,    _JS_PTR, --, --, INFALLIBLE)
 
 #define _JS_EXPAND(tokens)  tokens
 
 #define _JS_CTYPE_TYPE2(t,s,p,a,f)      t
 #define _JS_CTYPE_TYPE(tyname)          _JS_EXPAND(_JS_CTYPE_TYPE2    _JS_CTYPE_##tyname)
 #define _JS_CTYPE_RETSIZE2(t,s,p,a,f)   s##_RETSIZE
 #define _JS_CTYPE_RETSIZE(tyname)       _JS_EXPAND(_JS_CTYPE_RETSIZE2 _JS_CTYPE_##tyname)
 #define _JS_CTYPE_ARGSIZE2(t,s,p,a,f)   s##_ARGSIZE
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -88,16 +88,19 @@ typedef struct JSGSNCache {
         }                                                                     \
         GSN_CACHE_METER(cache, clears);                                       \
     JS_END_MACRO
 
 /* These helper macros take a cx as parameter and operate on its GSN cache. */
 #define JS_CLEAR_GSN_CACHE(cx)      GSN_CACHE_CLEAR(&JS_GSN_CACHE(cx))
 #define JS_METER_GSN_CACHE(cx,cnt)  GSN_CACHE_METER(&JS_GSN_CACHE(cx), cnt)
 
+typedef struct InterpState InterpState;
+typedef struct VMSideExit VMSideExit;
+
 #ifdef __cplusplus
 namespace nanojit {
     class Fragment;
     class Fragmento;
     class LirBuffer;
 }
 class TraceRecorder;
 extern "C++" { template<typename T> class Queue; }
@@ -123,45 +126,65 @@ struct GlobalState {
  * frequencies for all JavaScript code loaded into that runtime.
  */
 typedef struct JSTraceMonitor {
     /*
      * Flag set when running (or recording) JIT-compiled code. This prevents
      * both interpreter activation and last-ditch garbage collection when up
      * against our runtime's memory limits. This flag also suppresses calls to
      * JS_ReportOutOfMemory when failing due to runtime limits.
+     *
+     * !onTrace && !recorder: not on trace.
+     * onTrace && recorder: recording a trace.
+     * onTrace && !recorder: executing a trace.
+     * !onTrace && recorder && !prohibitRecording:
+     *      not on trace; deep-aborted while recording.
+     * !onTrace && recorder && prohibitRecording:
+     *      not on trace; deep-bailed in SpiderMonkey code called from a
+     *      trace. JITted code is on the stack.
      */
-    JSBool                  onTrace;
+    JSPackedBool            onTrace;
+
+    /*
+     * Do not start recording after a deep bail.  That would free JITted code
+     * pages that we will later return to.
+     */
+    JSPackedBool            prohibitRecording;
+
+    /* See reservedObjects below. */
+    JSPackedBool            useReservedObjects;
+
     CLS(nanojit::LirBuffer) lirbuf;
     CLS(nanojit::Fragmento) fragmento;
     CLS(TraceRecorder)      recorder;
     jsval                   *reservedDoublePool;
     jsval                   *reservedDoublePoolPtr;
 
     struct GlobalState globalStates[MONITOR_N_GLOBAL_STATES];
     struct VMFragment* vmfragments[FRAGMENT_TABLE_SIZE];
     JSBool needFlush;
 
     /*
      * reservedObjects is a linked list (via fslots[0]) of preallocated JSObjects.
      * The JIT uses this to ensure that leaving a trace tree can't fail.
      */
     JSObject                *reservedObjects;
-    JSBool                  useReservedObjects;
 
     /* Fragmento for the regular expression compiler. This is logically
      * a distinct compiler but needs to be managed in exactly the same
      * way as the real tracing Fragmento. */
     CLS(nanojit::LirBuffer) reLirBuf;
     CLS(nanojit::Fragmento) reFragmento;
 
     /* Keep a list of recorders we need to abort on cache flush. */
     CLS(TraceRecorder)      abortStack;
 } JSTraceMonitor;
 
+typedef struct InterpStruct InterpStruct;
+
 #ifdef JS_TRACER
 # define JS_ON_TRACE(cx)            (JS_TRACE_MONITOR(cx).onTrace)
 #else
 # define JS_ON_TRACE(cx)            JS_FALSE
 #endif
 
 #ifdef DEBUG
 # define JS_EVAL_CACHE_METERING 1
@@ -255,16 +278,24 @@ typedef enum JSDestroyContextMode {
 
 typedef enum JSRuntimeState {
     JSRTS_DOWN,
     JSRTS_LAUNCHING,
     JSRTS_UP,
     JSRTS_LANDING
 } JSRuntimeState;
 
+#ifdef JS_TRACER
+typedef enum JSBuiltinStatus {
+    JSBUILTIN_OK = 0,
+    JSBUILTIN_BAILED = 1,
+    JSBUILTIN_ERROR = 2
+} JSBuiltinStatus;
+#endif
+
 typedef enum JSBuiltinFunctionId {
     JSBUILTIN_ObjectToIterator,
     JSBUILTIN_CallIteratorNext,
     JSBUILTIN_GetProperty,
     JSBUILTIN_GetElement,
     JSBUILTIN_SetProperty,
     JSBUILTIN_SetElement,
     JSBUILTIN_LIMIT
@@ -975,18 +1006,41 @@ struct JSContext {
     /* Security callbacks that override any defined on the runtime. */
     JSSecurityCallbacks *securityCallbacks;
 
     /* Pinned regexp pool used for regular expressions. */
     JSArenaPool         regexpPool;
 
     /* Stored here to avoid passing it around as a parameter. */
     uintN               resolveFlags;
+    
+    /* Current bytecode location (or NULL if no hint was supplied). */
+    jsbytecode         *pcHint;
+
+#ifdef JS_TRACER
+    /*
+     * State for the current tree execution.  bailExit is valid if the tree has
+     * called back into native code via a _FAIL builtin and has not yet bailed,
+     * else garbage (NULL in debug builds).
+     */
+    InterpState         *interpState;
+    VMSideExit          *bailExit;
+
+    /*
+     * Used by _FAIL builtins; see jsbuiltins.h. The builtin sets the
+     * JSBUILTIN_BAILED bit if it bails off trace and the JSBUILTIN_ERROR bit
+     * if an error or exception occurred. Cleared on side exit.
+     */
+    uint32              builtinStatus;
+#endif
 };
 
+#define BEGIN_PC_HINT(pc)       (cx->pcHint = (pc))
+#define END_PC_HINT()           (cx->pcHint = NULL)
+
 #ifdef JS_THREADSAFE
 # define JS_THREAD_ID(cx)       ((cx)->thread ? (cx)->thread->id : 0)
 #endif
 
 #ifdef __cplusplus
 
 static inline JSAtom **
 FrameAtomBase(JSContext *cx, JSStackFrame *fp)
--- a/js/src/jsdate.cpp
+++ b/js/src/jsdate.cpp
@@ -2026,17 +2026,17 @@ JS_DEFINE_TRCINFO_1(date_now,
 static JSFunctionSpec date_static_methods[] = {
     JS_FN("UTC",                 date_UTC,                MAXARGS,0),
     JS_FN("parse",               date_parse,              1,0),
     JS_TN("now",                 date_now,                0,0, date_now_trcinfo),
     JS_FS_END
 };
 
 JS_DEFINE_TRCINFO_1(date_valueOf,
-    (3, (static, JSVAL_FAIL, date_valueOf_tn, CONTEXT, THIS, STRING, 0, 0)))
+    (3, (static, JSVAL_RETRY, date_valueOf_tn, CONTEXT, THIS, STRING, 0, 0)))
 
 static JSFunctionSpec date_methods[] = {
     JS_FN("getTime",             date_getTime,            0,0),
     JS_FN("getTimezoneOffset",   date_getTimezoneOffset,  0,0),
     JS_FN("getYear",             date_getYear,            0,0),
     JS_FN("getFullYear",         date_getFullYear,        0,0),
     JS_FN("getUTCFullYear",      date_getUTCFullYear,     0,0),
     JS_FN("getMonth",            date_getMonth,           0,0),
--- a/js/src/jsdbgapi.cpp
+++ b/js/src/jsdbgapi.cpp
@@ -1237,46 +1237,39 @@ JS_SetDestroyScriptHook(JSRuntime *rt, J
 
 JS_PUBLIC_API(JSBool)
 JS_EvaluateUCInStackFrame(JSContext *cx, JSStackFrame *fp,
                           const jschar *chars, uintN length,
                           const char *filename, uintN lineno,
                           jsval *rval)
 {
     JSObject *scobj;
-    JSScript *script, *oldscript;
-    JSStackFrame **disp, *displaySave;
+    JSScript *script;
     JSBool ok;
 
     scobj = JS_GetFrameScopeChain(cx, fp);
     if (!scobj)
         return JS_FALSE;
 
-    oldscript = fp->script;
+    /*
+     * NB: This function breaks the assumption that the compiler can see all
+     * calls and properly compute a static depth. In order to get around this,
+     * we use a static depth that will cause us not to attempt to optimize
+     * variable references made by this frame.
+     */
     script = js_CompileScript(cx, scobj, fp, JS_StackFramePrincipals(cx, fp),
                               TCF_COMPILE_N_GO |
-                              TCF_PUT_STATIC_DEPTH(oldscript->staticDepth + 1),
+                              TCF_PUT_STATIC_DEPTH(JS_DISPLAY_SIZE),
                               chars, length, NULL,
                               filename, lineno);
     if (!script)
         return JS_FALSE;
 
-    /* Ensure that the display is up to date for this particular stack frame. */
-    if (oldscript->staticDepth < JS_DISPLAY_SIZE) {
-        disp = &cx->display[oldscript->staticDepth];
-        displaySave = *disp;
-        *disp = fp;
-    } else {
-        disp = NULL;
-        displaySave = NULL;
-    }
     ok = js_Execute(cx, scobj, script, fp, JSFRAME_DEBUGGER | JSFRAME_EVAL,
                     rval);
-    if (disp)
-        *disp = displaySave;
     js_DestroyScript(cx, script);
     return ok;
 }
 
 JS_PUBLIC_API(JSBool)
 JS_EvaluateInStackFrame(JSContext *cx, JSStackFrame *fp,
                         const char *bytes, uintN length,
                         const char *filename, uintN lineno,
--- a/js/src/jsemit.cpp
+++ b/js/src/jsemit.cpp
@@ -1700,17 +1700,16 @@ EmitIndexOp(JSContext *cx, JSOp op, uint
  * caller's lexical environment, and embedding a false return on error.
  */
 #define EMIT_INDEX_OP(op, index)                                              \
     JS_BEGIN_MACRO                                                            \
         if (!EmitIndexOp(cx, op, index, cg))                                  \
             return JS_FALSE;                                                  \
     JS_END_MACRO
 
-
 static JSBool
 EmitAtomOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
 {
     JSAtomListElement *ale;
 
     JS_ASSERT(JOF_OPTYPE(op) == JOF_ATOM);
     if (op == JSOP_GETPROP &&
         pn->pn_atom == cx->runtime->atomState.lengthAtom) {
@@ -1887,17 +1886,17 @@ BindNameToSlot(JSContext *cx, JSCodeGene
                 return JS_TRUE;
 
             /*
              * We are compiling eval or debug script inside a function frame
              * and the scope chain matches function's variable object.
              * Optimize access to function's arguments and variable and the
              * arguments object.
              */
-            if (PN_OP(pn) != JSOP_NAME || cg->staticDepth > JS_DISPLAY_SIZE)
+            if (PN_OP(pn) != JSOP_NAME || cg->staticDepth >= JS_DISPLAY_SIZE)
                 goto arguments_check;
             localKind = js_LookupLocal(cx, caller->fun, atom, &index);
             if (localKind == JSLOCAL_NONE)
                 goto arguments_check;
 
             /*
              * Don't generate upvars on the left side of a for loop. See
              * bug 470758.
@@ -2335,21 +2334,50 @@ EmitXMLName(JSContext *cx, JSParseNode *
         return JS_FALSE;
     }
 
     return js_Emit1(cx, cg, op) >= 0;
 }
 #endif
 
 static JSBool
+EmitSpecialPropOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg)
+{
+    /*
+     * Special case for obj.__proto__, obj.__parent__, obj.__count__ to
+     * deoptimize away from fast paths in the interpreter and trace recorder,
+     * which skip dense array instances by going up to Array.prototype before
+     * looking up the property name.
+     */
+    JSAtomListElement *ale = js_IndexAtom(cx, pn->pn_atom, &cg->atomList);
+    if (!ale)
+        return JS_FALSE;
+    if (!EmitIndexOp(cx, JSOP_QNAMEPART, ALE_INDEX(ale), cg))
+        return JS_FALSE;
+    if (js_Emit1(cx, cg, op) < 0)
+        return JS_FALSE;
+    return JS_TRUE;
+}
+
+static JSBool
 EmitPropOp(JSContext *cx, JSParseNode *pn, JSOp op, JSCodeGenerator *cg,
            JSBool callContext)
 {
     JSParseNode *pn2, *pndot, *pnup, *pndown;
     ptrdiff_t top;
+    
+    /* Special case deoptimization on __proto__, __count__ and __parent__. */
+    if (pn->pn_arity == PN_NAME && 
+        (pn->pn_atom == cx->runtime->atomState.protoAtom || 
+         pn->pn_atom == cx->runtime->atomState.countAtom ||
+         pn->pn_atom == cx->runtime->atomState.parentAtom)) {
+        if (pn->pn_expr && !js_EmitTree(cx, cg, pn->pn_expr))
+            return JS_FALSE;
+        return EmitSpecialPropOp(cx, pn, callContext ? JSOP_CALLELEM : JSOP_GETELEM, cg);
+    }
 
     pn2 = pn->pn_expr;
     if (callContext) {
         JS_ASSERT(pn->pn_type == TOK_DOT);
         JS_ASSERT(op == JSOP_GETPROP);
         op = JSOP_CALLPROP;
     } else if (op == JSOP_GETPROP && pn->pn_type == TOK_DOT) {
         if (pn2->pn_op == JSOP_THIS) {
@@ -2420,18 +2448,30 @@ EmitPropOp(JSContext *cx, JSParseNode *p
             return JS_FALSE;
 
         do {
             /* Walk back up the list, emitting annotated name ops. */
             if (js_NewSrcNote2(cx, cg, SRC_PCBASE,
                                CG_OFFSET(cg) - pndown->pn_offset) < 0) {
                 return JS_FALSE;
             }
-            if (!EmitAtomOp(cx, pndot, PN_OP(pndot), cg))
-                return JS_FALSE;
+
+            /* 
+             * Special case deoptimization on __proto__, __count__ and
+             * __parent__, as above. 
+             */
+            if (pndot->pn_arity == PN_NAME && 
+                (pndot->pn_atom == cx->runtime->atomState.protoAtom || 
+                 pndot->pn_atom == cx->runtime->atomState.countAtom ||
+                 pndot->pn_atom == cx->runtime->atomState.parentAtom)) {                
+                if (!EmitSpecialPropOp(cx, pndot, JSOP_GETELEM, cg))
+                    return JS_FALSE;
+            } else if (!EmitAtomOp(cx, pndot, PN_OP(pndot), cg)) {
+                return JS_FALSE;
+            }
 
             /* Reverse the pn_expr link again. */
             pnup = pndot->pn_expr;
             pndot->pn_expr = pndown;
             pndown = pndot;
         } while ((pndot = pnup) != NULL);
     } else {
         if (!js_EmitTree(cx, cg, pn2))
@@ -5333,16 +5373,21 @@ js_EmitTree(JSContext *cx, JSCodeGenerat
                 EMIT_INDEX_OP(JSOP_GETXPROP, atomIndex);
                 break;
               case TOK_DOT:
                 if (js_Emit1(cx, cg, JSOP_DUP) < 0)
                     return JS_FALSE;
                 if (pn2->pn_atom == cx->runtime->atomState.lengthAtom) {
                     if (js_Emit1(cx, cg, JSOP_LENGTH) < 0)
                         return JS_FALSE;
+                } else if (pn2->pn_atom == cx->runtime->atomState.protoAtom) {
+                    if (!EmitIndexOp(cx, JSOP_QNAMEPART, atomIndex, cg))
+                        return JS_FALSE;
+                    if (js_Emit1(cx, cg, JSOP_GETELEM) < 0)
+                        return JS_FALSE;
                 } else {
                     EMIT_INDEX_OP(JSOP_GETPROP, atomIndex);
                 }
                 break;
               case TOK_LB:
 #if JS_HAS_LVALUE_RETURN
               case TOK_LP:
 #endif
--- a/js/src/jsinterp.cpp
+++ b/js/src/jsinterp.cpp
@@ -1367,24 +1367,25 @@ out2:
     return ok;
 
 bad:
     js_ReportIsNotFunction(cx, vp, flags & JSINVOKE_FUNFLAGS);
     ok = JS_FALSE;
     goto out2;
 }
 
-JS_REQUIRES_STACK JSBool
+JSBool
 js_InternalInvoke(JSContext *cx, JSObject *obj, jsval fval, uintN flags,
                   uintN argc, jsval *argv, jsval *rval)
 {
     jsval *invokevp;
     void *mark;
     JSBool ok;
 
+    js_LeaveTrace(cx);
     invokevp = js_AllocStack(cx, 2 + argc, &mark);
     if (!invokevp)
         return JS_FALSE;
 
     invokevp[0] = fval;
     invokevp[1] = OBJECT_TO_JSVAL(obj);
     memcpy(invokevp + 2, argv, argc * sizeof *argv);
 
@@ -1596,73 +1597,94 @@ js_CheckRedeclaration(JSContext *cx, JSO
 {
     JSObject *obj2;
     JSProperty *prop;
     uintN oldAttrs, report;
     JSBool isFunction;
     jsval value;
     const char *type, *name;
 
+    /*
+     * Both objp and propp must be either null or given. When given, *propp
+     * must be null. This way we avoid an extra "if (propp) *propp = NULL" for
+     * the common case of a non-existing property.
+     */
+    JS_ASSERT(!objp == !propp);
+    JS_ASSERT_IF(propp, !*propp);
+
+    /* The JSPROP_INITIALIZER case below may generate a warning. Since we must
+     * drop the property before reporting it, we insists on !propp to avoid
+     * looking up the property again after the reporting is done.
+     */
+    JS_ASSERT_IF(attrs & JSPROP_INITIALIZER, attrs == JSPROP_INITIALIZER);
+    JS_ASSERT_IF(attrs == JSPROP_INITIALIZER, !propp);
+
     if (!OBJ_LOOKUP_PROPERTY(cx, obj, id, &obj2, &prop))
         return JS_FALSE;
-    if (propp) {
-        *objp = obj2;
-        *propp = prop;
-    }
     if (!prop)
         return JS_TRUE;
 
-    /*
-     * Use prop as a speedup hint to OBJ_GET_ATTRIBUTES, but drop it on error.
-     * An assertion at label bad: will insist that it is null.
-     */
+    /* Use prop as a speedup hint to OBJ_GET_ATTRIBUTES. */
     if (!OBJ_GET_ATTRIBUTES(cx, obj2, id, prop, &oldAttrs)) {
         OBJ_DROP_PROPERTY(cx, obj2, prop);
-#ifdef DEBUG
-        prop = NULL;
-#endif
-        goto bad;
+        return JS_FALSE;
     }
 
     /*
-     * From here, return true, or else goto bad on failure to null out params.
      * If our caller doesn't want prop, drop it (we don't need it any longer).
      */
     if (!propp) {
         OBJ_DROP_PROPERTY(cx, obj2, prop);
         prop = NULL;
+    } else {
+        *objp = obj2;
+        *propp = prop;
     }
 
     if (attrs == JSPROP_INITIALIZER) {
         /* Allow the new object to override properties. */
         if (obj2 != obj)
             return JS_TRUE;
+
+        /* The property must be dropped already. */
+        JS_ASSERT(!prop);
         report = JSREPORT_WARNING | JSREPORT_STRICT;
     } else {
         /* We allow redeclaring some non-readonly properties. */
         if (((oldAttrs | attrs) & JSPROP_READONLY) == 0) {
-            /*
-             * Allow redeclaration of variables and functions, but insist that
-             * the new value is not a getter if the old value was, ditto for
-             * setters -- unless prop is impermanent (in which case anyone
-             * could delete it and redefine it, willy-nilly).
-             */
+            /* Allow redeclaration of variables and functions. */
             if (!(attrs & (JSPROP_GETTER | JSPROP_SETTER)))
                 return JS_TRUE;
+
+            /*
+             * Allow adding a getter only if a property already has a setter
+             * but no getter and similarly for adding a setter. That is, we
+             * allow only the following transitions:
+             *
+             *   no-property --> getter --> getter + setter
+             *   no-property --> setter --> getter + setter
+             */
             if ((~(oldAttrs ^ attrs) & (JSPROP_GETTER | JSPROP_SETTER)) == 0)
                 return JS_TRUE;
+
+            /*
+             * Allow redeclaration of an impermanent property (in which case
+             * anyone could delete it and redefine it, willy-nilly).
+             */
             if (!(oldAttrs & JSPROP_PERMANENT))
                 return JS_TRUE;
         }
+        if (prop)
+            OBJ_DROP_PROPERTY(cx, obj2, prop);
 
         report = JSREPORT_ERROR;
         isFunction = (oldAttrs & (JSPROP_GETTER | JSPROP_SETTER)) != 0;
         if (!isFunction) {
             if (!OBJ_GET_PROPERTY(cx, obj, id, &value))
-                goto bad;
+                return JS_FALSE;
             isFunction = VALUE_IS_FUNCTION(cx, value);
         }
     }
 
     type = (attrs == JSPROP_INITIALIZER)
            ? "property"
            : (oldAttrs & attrs & JSPROP_GETTER)
            ? js_getter_str
@@ -1670,29 +1692,21 @@ js_CheckRedeclaration(JSContext *cx, JSO
            ? js_setter_str
            : (oldAttrs & JSPROP_READONLY)
            ? js_const_str
            : isFunction
            ? js_function_str
            : js_var_str;
     name = js_ValueToPrintableString(cx, ID_TO_VALUE(id));
     if (!name)
-        goto bad;
+        return JS_FALSE;
     return JS_ReportErrorFlagsAndNumber(cx, report,
                                         js_GetErrorMessage, NULL,
                                         JSMSG_REDECLARED_VAR,
                                         type, name);
-
-bad:
-    if (propp) {
-        *objp = NULL;
-        *propp = NULL;
-    }
-    JS_ASSERT(!prop);
-    return JS_FALSE;
 }
 
 JSBool
 js_StrictlyEqual(JSContext *cx, jsval lval, jsval rval)
 {
     jsval ltag = JSVAL_TAG(lval), rtag = JSVAL_TAG(rval);
     jsdouble ld, rd;
 
@@ -5650,16 +5664,17 @@ js_Interpret(JSContext *cx)
             attrs = JSPROP_ENUMERATE;
             if (!(fp->flags & JSFRAME_EVAL))
                 attrs |= JSPROP_PERMANENT;
             if (op == JSOP_DEFCONST)
                 attrs |= JSPROP_READONLY;
 
             /* Lookup id in order to check for redeclaration problems. */
             id = ATOM_TO_JSID(atom);
+            prop = NULL;
             if (!js_CheckRedeclaration(cx, obj, id, attrs, &obj2, &prop))
                 goto error;
 
             /* Bind a variable only if it's not yet defined. */
             if (!prop) {
                 if (!OBJ_DEFINE_PROPERTY(cx, obj, id, JSVAL_VOID,
                                          JS_PropertyStub, JS_PropertyStub,
                                          attrs, &prop)) {
@@ -5672,21 +5687,21 @@ js_Interpret(JSContext *cx)
             /*
              * Try to optimize a property we either just created, or found
              * directly in the global object, that is permanent, has a slot,
              * and has stub getter and setter, into a "fast global" accessed
              * by the JSOP_*GVAR opcodes.
              */
             if (!fp->fun &&
                 index < GlobalVarCount(fp) &&
-                (attrs & JSPROP_PERMANENT) &&
                 obj2 == obj &&
                 OBJ_IS_NATIVE(obj)) {
                 sprop = (JSScopeProperty *) prop;
-                if (SPROP_HAS_VALID_SLOT(sprop, OBJ_SCOPE(obj)) &&
+                if ((sprop->attrs & JSPROP_PERMANENT) &&
+                    SPROP_HAS_VALID_SLOT(sprop, OBJ_SCOPE(obj)) &&
                     SPROP_HAS_STUB_GETTER(sprop) &&
                     SPROP_HAS_STUB_SETTER(sprop)) {
                     /*
                      * Fast globals use frame variables to map the global
                      * name's atom index to the permanent fp->varobj slot
                      * number, tagged as a jsval. The atom index for the
                      * global's name literal is identical to its variable
                      * index.
@@ -6872,16 +6887,19 @@ js_Interpret(JSContext *cx)
           }
 
 #if !JS_THREADED_INTERP
         } /* switch (op) */
     } /* for (;;) */
 #endif /* !JS_THREADED_INTERP */
 
   error:
+    // Reset current pc location hinting.
+    cx->pcHint = NULL;
+
     if (fp->imacpc && cx->throwing) {
         // To keep things simple, we hard-code imacro exception handlers here.
         if (*fp->imacpc == JSOP_NEXTITER) {
             // pc may point to JSOP_DUP here due to bug 474854.
             JS_ASSERT(*regs.pc == JSOP_CALL || *regs.pc == JSOP_DUP);
             if (js_ValueIsStopIteration(cx->exception)) {
                 cx->throwing = JS_FALSE;
                 cx->exception = JSVAL_VOID;
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -322,20 +322,30 @@ js_SetProtoOrParent(JSContext *cx, JSObj
                                  (slot == JSSLOT_PROTO) ? js_proto_str
                                                         : js_parent_str
 #endif
                                  );
         }
         return JS_FALSE;
     }
 
-    // Maintain the "any Array prototype has indexed properties hazard" flag.
+    /*
+     * Maintain the "any Array prototype has indexed properties hazard" flag by
+     * conservatively setting it. We simply don't know what pobj has in the way
+     * of indexed properties, either directly or along its prototype chain, and
+     * we won't expend effort here to find out. We do know that if obj is not
+     * an array or a prototype (delegate), then we're ok. And, of course, pobj
+     * must be non-null.
+     *
+     * This pessimistic approach could be improved, but setting __proto__ is
+     * quite rare and arguably deserving of deoptimization.
+     */
     if (slot == JSSLOT_PROTO &&
-        OBJ_IS_ARRAY(cx, pobj) &&
-        pobj->fslots[JSSLOT_ARRAY_LENGTH] != 0) {
+        pobj &&
+        (OBJ_IS_ARRAY(cx, obj) || OBJ_IS_DELEGATE(cx, obj))) {
         rt->anyArrayProtoHasElement = JS_TRUE;
     }
     return JS_TRUE;
 }
 
 static JSHashNumber
 js_hash_object(const void *key)
 {
@@ -1677,20 +1687,22 @@ js_HasOwnProperty(JSContext *cx, JSLooku
 
 #ifdef JS_TRACER
 static int32 FASTCALL
 Object_p_hasOwnProperty(JSContext* cx, JSObject* obj, JSString *str)
 {
     jsid id;
     jsval v;
 
-    if (!js_ValueToStringId(cx, STRING_TO_JSVAL(str), &id))
+    if (!js_ValueToStringId(cx, STRING_TO_JSVAL(str), &id) ||
+        !js_HasOwnProperty(cx, obj->map->ops->lookupProperty, obj, id, &v)) {
+        cx->builtinStatus |= JSBUILTIN_ERROR;
         return JSVAL_TO_BOOLEAN(JSVAL_VOID);
-    if (!js_HasOwnProperty(cx, obj->map->ops->lookupProperty, obj, id, &v))
-        return JSVAL_TO_BOOLEAN(JSVAL_VOID);
+    }
+
     JS_ASSERT(JSVAL_IS_BOOLEAN(v));
     return JSVAL_TO_BOOLEAN(v);
 }
 #endif
 
 /* Proposed ECMA 15.2.4.6. */
 static JSBool
 obj_isPrototypeOf(JSContext *cx, uintN argc, jsval *vp)
@@ -1720,18 +1732,22 @@ obj_propertyIsEnumerable(JSContext *cx, 
 }
 
 #ifdef JS_TRACER
 static int32 FASTCALL
 Object_p_propertyIsEnumerable(JSContext* cx, JSObject* obj, JSString *str)
 {
     jsid id = ATOM_TO_JSID(STRING_TO_JSVAL(str));
     jsval v;
-    if (!js_PropertyIsEnumerable(cx, obj, id, &v))
+
+    if (!js_PropertyIsEnumerable(cx, obj, id, &v)) {
+        cx->builtinStatus |= JSBUILTIN_ERROR;
         return JSVAL_TO_BOOLEAN(JSVAL_VOID);
+    }
+
     JS_ASSERT(JSVAL_IS_BOOLEAN(v));
     return JSVAL_TO_BOOLEAN(v);
 }
 #endif
 
 JSBool
 js_PropertyIsEnumerable(JSContext *cx, JSObject *obj, jsid id, jsval *vp)
 {
@@ -1927,21 +1943,21 @@ const char js_propertyIsEnumerable_str[]
 #if JS_HAS_GETTER_SETTER
 const char js_defineGetter_str[] = "__defineGetter__";
 const char js_defineSetter_str[] = "__defineSetter__";
 const char js_lookupGetter_str[] = "__lookupGetter__";
 const char js_lookupSetter_str[] = "__lookupSetter__";
 #endif
 
 JS_DEFINE_TRCINFO_1(obj_valueOf,
-    (3, (static, JSVAL, Object_p_valueOf, CONTEXT, THIS, STRING,                  0, 0)))
+    (3, (static, JSVAL,      Object_p_valueOf,              CONTEXT, THIS, STRING,  0, 0)))
 JS_DEFINE_TRCINFO_1(obj_hasOwnProperty,
-    (3, (static, BOOL_FAIL, Object_p_hasOwnProperty, CONTEXT, THIS, STRING,       0, 0)))
+    (3, (static, BOOL_FAIL, Object_p_hasOwnProperty,        CONTEXT, THIS, STRING,  0, 0)))
 JS_DEFINE_TRCINFO_1(obj_propertyIsEnumerable,
-    (3, (static, BOOL_FAIL, Object_p_propertyIsEnumerable, CONTEXT, THIS, STRING, 0, 0)))
+    (3, (static, BOOL_FAIL, Object_p_propertyIsEnumerable,  CONTEXT, THIS, STRING,  0, 0)))
 
 static JSFunctionSpec object_methods[] = {
 #if JS_HAS_TOSOURCE
     JS_FN(js_toSource_str,             obj_toSource,                0,0),
 #endif
     JS_FN(js_toString_str,             obj_toString,                0,0),
     JS_FN(js_toLocaleString_str,       obj_toLocaleString,          0,0),
     JS_TN(js_valueOf_str,              obj_valueOf,                 0,0,
@@ -3384,29 +3400,34 @@ js_DefineProperty(JSContext *cx, JSObjec
                                    0, 0, propp);
 }
 
 /*
  * Backward compatibility requires allowing addProperty hooks to mutate the
  * nominal initial value of a slot-full property, while GC safety wants that
  * value to be stored before the call-out through the hook.  Optimize to do
  * both while saving cycles for classes that stub their addProperty hook.
+ *
+ * As in js_SetProtoOrParent (see above), we maintain the "any Array prototype
+ * has indexed properties hazard" flag by conservatively setting it.
  */
 #define ADD_PROPERTY_HELPER(cx,clasp,obj,scope,sprop,vp,cleanup)              \
     JS_BEGIN_MACRO                                                            \
         if ((clasp)->addProperty != JS_PropertyStub) {                        \
             jsval nominal_ = *(vp);                                           \
             if (!(clasp)->addProperty(cx, obj, SPROP_USERID(sprop), vp)) {    \
                 cleanup;                                                      \
             }                                                                 \
             if (*(vp) != nominal_) {                                          \
                 if (SPROP_HAS_VALID_SLOT(sprop, scope))                       \
                     LOCKED_OBJ_WRITE_BARRIER(cx, obj, (sprop)->slot, *(vp));  \
             }                                                                 \
         }                                                                     \
+        if (STOBJ_IS_DELEGATE(obj) && JSID_IS_INT(sprop->id))                 \
+            cx->runtime->anyArrayProtoHasElement = JS_TRUE;                   \
     JS_END_MACRO
 
 JSBool
 js_DefineNativeProperty(JSContext *cx, JSObject *obj, jsid id, jsval value,
                         JSPropertyOp getter, JSPropertyOp setter, uintN attrs,
                         uintN flags, intN shortid, JSProperty **propp)
 {
     JSClass *clasp;
@@ -3891,59 +3912,79 @@ js_NativeSet(JSContext *cx, JSObject *ob
          SCOPE_GET_PROPERTY(scope, sprop->id) == sprop)) {
   set_slot:
         LOCKED_OBJ_WRITE_BARRIER(cx, obj, slot, *vp);
     }
 
     return JS_TRUE;
 }
 
+/*
+ * Find out where we currently are in the code. If no hint was supplied,
+ * de-optimize and consult the stack frame.
+ */
+static jsbytecode*
+js_GetCurrentBytecodePC(JSContext* cx)
+{
+    jsbytecode *pc = cx->pcHint;
+    if (!pc || !JS_ON_TRACE(cx)) {
+        JSStackFrame* fp = js_GetTopStackFrame(cx);
+        if (fp && fp->regs) {
+            pc = fp->regs->pc;
+            // FIXME: Set pc to imacpc when recording JSOP_CALL inside the 
+            //        JSOP_GETELEM imacro (bug 476559).
+            if (*pc == JSOP_CALL && fp->imacpc && *fp->imacpc == JSOP_GETELEM)
+                pc = fp->imacpc;
+        } else {
+            pc = NULL;
+        }
+    }
+    return pc;
+}
+
 JSBool
 js_GetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, jsval *vp,
                      JSPropCacheEntry **entryp)
 {
     uint32 shape;
     int protoIndex;
     JSObject *obj2;
     JSProperty *prop;
-    JSStackFrame *fp;
     JSScopeProperty *sprop;
 
     JS_ASSERT_IF(entryp, !JS_ON_TRACE(cx));
     /* Convert string indices to integers if appropriate. */
     CHECK_FOR_STRING_INDEX(id);
     JS_COUNT_OPERATION(cx, JSOW_GET_PROPERTY);
 
     shape = OBJ_SHAPE(obj);
     protoIndex = js_LookupPropertyWithFlags(cx, obj, id, cx->resolveFlags,
                                             &obj2, &prop);
     if (protoIndex < 0)
         return JS_FALSE;
     if (!prop) {
-        jsbytecode *pc;
-
         *vp = JSVAL_VOID;
 
         if (!OBJ_GET_CLASS(cx, obj)->getProperty(cx, obj, ID_TO_VALUE(id), vp))
             return JS_FALSE;
 
         if (entryp) {
             PCMETER(JS_PROPERTY_CACHE(cx).nofills++);
             *entryp = NULL;
         }
 
         /*
          * Give a strict warning if foo.bar is evaluated by a script for an
          * object foo with no property named 'bar'.
          */
-        if (JSVAL_IS_VOID(*vp) && (fp = js_GetTopStackFrame(cx)) && fp->regs) {
+        jsbytecode *pc;
+        if (JSVAL_IS_VOID(*vp) && ((pc = js_GetCurrentBytecodePC(cx)) != NULL)) {
             JSOp op;
             uintN flags;
 
-            pc = fp->regs->pc;
             op = (JSOp) *pc;
             if (op == JSOP_GETXPROP) {
                 flags = JSREPORT_ERROR;
             } else {
                 if (!JS_HAS_STRICT_OPTION(cx) ||
                     (op != JSOP_GETPROP && op != JSOP_GETELEM)) {
                     return JS_TRUE;
                 }
@@ -3951,17 +3992,16 @@ js_GetPropertyHelper(JSContext *cx, JSOb
                 /*
                  * XXX do not warn about missing __iterator__ as the function
                  * may be called from JS_GetMethodById. See bug 355145.
                  */
                 if (id == ATOM_TO_JSID(cx->runtime->atomState.iteratorAtom))
                     return JS_TRUE;
 
                 /* Kludge to allow (typeof foo == "undefined") tests. */
-                JS_ASSERT(fp->script);
                 pc += js_CodeSpec[op].length;
                 if (Detecting(cx, pc))
                     return JS_TRUE;
 
                 flags = JSREPORT_WARNING | JSREPORT_STRICT;
             }
 
             /* Ok, bad undefined property reference: whine about it. */
--- a/js/src/jsregexp.cpp
+++ b/js/src/jsregexp.cpp
@@ -59,16 +59,17 @@
 #include "jsinterp.h"
 #include "jslock.h"
 #include "jsnum.h"
 #include "jsobj.h"
 #include "jsopcode.h"
 #include "jsregexp.h"
 #include "jsscan.h"
 #include "jsscope.h"
+#include "jsstaticcheck.h"
 #include "jsstr.h"
 
 #ifdef JS_TRACER
 #include "jstracer.h"
 using namespace avmplus;
 using namespace nanojit;
 #endif
 
@@ -3827,24 +3828,27 @@ MatchRegExp(REGlobalData *gData, REMatch
     NativeRegExp native;
 
     /* Run with native regexp if possible. */
     if (TRACING_ENABLED(gData->cx) && 
         (native = GetNativeRegExp(gData->cx, gData->regexp))) {
         gData->skipped = (ptrdiff_t) x->cp;
 
 #ifdef JS_JIT_SPEW
-        {
-            JSStackFrame *caller = js_GetScriptedCaller(gData->cx, NULL);
-            debug_only_v(printf("entering REGEXP trace at %s:%u@%u, code: %p\n",
-                                caller ? caller->script->filename : "<unknown>",
-                                caller ? js_FramePCToLineNumber(gData->cx, caller) : 0,
-                                caller ? FramePCOffset(caller) : 0,
-                                (void *) native););
-        }
+        debug_only_v({
+            VOUCH_DOES_NOT_REQUIRE_STACK();
+            JSStackFrame *caller = (JS_ON_TRACE(gData->cx))
+                                   ? NULL
+                                   : js_GetScriptedCaller(gData->cx, NULL);
+            printf("entering REGEXP trace at %s:%u@%u, code: %p\n",
+                   caller ? caller->script->filename : "<unknown>",
+                   caller ? js_FramePCToLineNumber(gData->cx, caller) : 0,
+                   caller ? FramePCOffset(caller) : 0,
+                   (void *) native);
+        })
 #endif
 
 #if defined(JS_NO_FASTCALL) && defined(NANOJIT_IA32)
         SIMULATE_FASTCALL(result, x, gData, native);
 #else
         result = native(x, gData);
 #endif
 
@@ -4814,17 +4818,17 @@ Regexp_p_test(JSContext* cx, JSObject* r
 {
     jsval vp[3] = { JSVAL_NULL, OBJECT_TO_JSVAL(regexp), STRING_TO_JSVAL(str) };
     if (!regexp_exec_sub(cx, regexp, 1, vp + 2, JS_TRUE, vp))
         return JSVAL_TO_BOOLEAN(JSVAL_VOID);
     return *vp == JSVAL_TRUE;
 }
 
 JS_DEFINE_TRCINFO_1(regexp_test,
-    (3, (static, BOOL_FAIL, Regexp_p_test, CONTEXT, THIS, STRING,  1, 1)))
+    (3, (static, BOOL_RETRY, Regexp_p_test, CONTEXT, THIS, STRING,  1, 1)))
 
 #endif
 
 static JSFunctionSpec regexp_methods[] = {
 #if JS_HAS_TOSOURCE
     JS_FN(js_toSource_str,  regexp_toString,    0,0),
 #endif
     JS_FN(js_toString_str,  regexp_toString,    0,0),
--- a/js/src/jsstr.cpp
+++ b/js/src/jsstr.cpp
@@ -1339,52 +1339,43 @@ match_or_replace(JSContext *cx,
                         break;
                     index++;
                 }
             }
             if (!ok && destroy)
                 destroy(cx, data);
         }
     } else {
-        jsval savedObject = JSVAL_NULL;
-
         if (GET_MODE(data->flags) == MODE_REPLACE) {
             test = JS_TRUE;
         } else {
             /*
              * MODE_MATCH implies str_match is being called from a script or a
              * scripted function.  If the caller cares only about testing null
              * vs. non-null return value, optimize away the array object that
-             * would normally be returned in *vp.  Instead return an arbitrary
-             * object (not JSVAL_TRUE, for type map integrity; see bug 453564).
-             * The caller provides the object in *vp and is responsible for
-             * rooting it elsewhere.
+             * would normally be returned in *vp.
              *
              * Assume a full array result is required, then prove otherwise.
              */
             test = JS_FALSE;
             if (data->pc && (*data->pc == JSOP_CALL || *data->pc == JSOP_NEW)) {
                 JS_ASSERT(js_CodeSpec[*data->pc].length == 3);
                 switch (data->pc[3]) {
                   case JSOP_POP:
                   case JSOP_IFEQ:
                   case JSOP_IFNE:
                   case JSOP_IFEQX:
                   case JSOP_IFNEX:
                     test = JS_TRUE;
-                    savedObject = *vp;
-                    JS_ASSERT(!JSVAL_IS_PRIMITIVE(savedObject));
                     break;
                   default:;
                 }
             }
         }
         ok = js_ExecuteRegExp(cx, re, str, &index, test, vp);
-        if (ok && !JSVAL_IS_NULL(savedObject) && *vp == JSVAL_TRUE)
-            *vp = savedObject;
     }
 
     DROP_REGEXP(cx, re);
     if (reobj) {
         /* Tell our caller that it doesn't need to destroy data->regexp. */
         data->flags &= ~KEEP_REGEXP;
     } else if (!ok || !(data->flags & KEEP_REGEXP)) {
         /* Caller didn't want to keep data->regexp, so null and destroy it.  */
@@ -1449,42 +1440,39 @@ StringMatchHelper(JSContext *cx, uintN a
 
 static JSBool
 str_match(JSContext *cx, uintN argc, jsval *vp)
 {
     JSStackFrame *fp;
 
     for (fp = js_GetTopStackFrame(cx); fp && !fp->regs; fp = fp->down)
         JS_ASSERT(!fp->script);
-
-    /* Root the object in vp[0].  See comment in match_or_replace. */
-    JSAutoTempValueRooter tvr(cx, vp[0]);
     return StringMatchHelper(cx, argc, vp, fp ? fp->regs->pc : NULL);
 }
 
 #ifdef JS_TRACER
 static jsval FASTCALL
 String_p_match(JSContext* cx, JSString* str, jsbytecode *pc, JSObject* regexp)
 {
-    /* arbitrary object in vp[0] */
-    jsval vp[3] = { OBJECT_TO_JSVAL(regexp), STRING_TO_JSVAL(str), OBJECT_TO_JSVAL(regexp) };
-    if (!StringMatchHelper(cx, 1, vp, pc))
-        return JSVAL_ERROR_COOKIE;
-    JS_ASSERT(JSVAL_IS_OBJECT(vp[0]));
+    jsval vp[3] = { JSVAL_NULL, STRING_TO_JSVAL(str), OBJECT_TO_JSVAL(regexp) };
+    if (!StringMatchHelper(cx, 1, vp, pc)) {
+        cx->builtinStatus |= JSBUILTIN_ERROR;
+        return JSVAL_VOID;
+    }
     return vp[0];
 }
 
 static jsval FASTCALL
 String_p_match_obj(JSContext* cx, JSObject* str, jsbytecode *pc, JSObject* regexp)
 {
-    /* arbitrary object in vp[0] */
-    jsval vp[3] = { OBJECT_TO_JSVAL(regexp), OBJECT_TO_JSVAL(str), OBJECT_TO_JSVAL(regexp) };
-    if (!StringMatchHelper(cx, 1, vp, pc))
-        return JSVAL_ERROR_COOKIE;
-    JS_ASSERT(JSVAL_IS_OBJECT(vp[0]));
+    jsval vp[3] = { JSVAL_NULL, OBJECT_TO_JSVAL(str), OBJECT_TO_JSVAL(regexp) };
+    if (!StringMatchHelper(cx, 1, vp, pc)) {
+        cx->builtinStatus |= JSBUILTIN_ERROR;
+        return JSVAL_VOID;
+    }
     return vp[0];
 }
 #endif
 
 static JSBool
 str_search(JSContext *cx, uintN argc, jsval *vp)
 {
     GlobData data;
@@ -2495,42 +2483,42 @@ js_String_getelem(JSContext* cx, JSStrin
     return js_GetUnitString(cx, str, (size_t)i);
 }
 #endif
 
 JS_DEFINE_CALLINFO_2(extern, BOOL,   js_EqualStrings, STRING, STRING,                       1, 1)
 JS_DEFINE_CALLINFO_2(extern, INT32,  js_CompareStrings, STRING, STRING,                     1, 1)
 
 JS_DEFINE_TRCINFO_1(str_toString,
-    (2, (extern, STRING_FAIL,      String_p_toString, CONTEXT, THIS,                        1, 1)))
+    (2, (extern, STRING_FAIL,       String_p_toString, CONTEXT, THIS,                        1, 1)))
 JS_DEFINE_TRCINFO_2(str_substring,
-    (4, (static, STRING_FAIL,      String_p_substring, CONTEXT, THIS_STRING, INT32, INT32,   1, 1)),
-    (3, (static, STRING_FAIL,      String_p_substring_1, CONTEXT, THIS_STRING, INT32,        1, 1)))
+    (4, (static, STRING_RETRY,      String_p_substring, CONTEXT, THIS_STRING, INT32, INT32,   1, 1)),
+    (3, (static, STRING_RETRY,      String_p_substring_1, CONTEXT, THIS_STRING, INT32,        1, 1)))
 JS_DEFINE_TRCINFO_1(str_charAt,
-    (3, (extern, STRING_FAIL,      js_String_getelem, CONTEXT, THIS_STRING, INT32,           1, 1)))
+    (3, (extern, STRING_RETRY,      js_String_getelem, CONTEXT, THIS_STRING, INT32,           1, 1)))
 JS_DEFINE_TRCINFO_1(str_charCodeAt,
-    (2, (extern, INT32_FAIL,       js_String_p_charCodeAt, THIS_STRING, INT32,               1, 1)))
+    (2, (extern, INT32_RETRY,       js_String_p_charCodeAt, THIS_STRING, INT32,               1, 1)))
 JS_DEFINE_TRCINFO_4(str_concat,
-    (3, (static, STRING_FAIL,      String_p_concat_1int, CONTEXT, THIS_STRING, INT32,        1, 1)),
-    (3, (extern, STRING_FAIL,      js_ConcatStrings, CONTEXT, THIS_STRING, STRING,           1, 1)),
-    (4, (static, STRING_FAIL,      String_p_concat_2str, CONTEXT, THIS_STRING, STRING, STRING, 1, 1)),
-    (5, (static, STRING_FAIL,      String_p_concat_3str, CONTEXT, THIS_STRING, STRING, STRING, STRING, 1, 1)))
+    (3, (static, STRING_RETRY,      String_p_concat_1int, CONTEXT, THIS_STRING, INT32,        1, 1)),
+    (3, (extern, STRING_RETRY,      js_ConcatStrings, CONTEXT, THIS_STRING, STRING,           1, 1)),
+    (4, (static, STRING_RETRY,      String_p_concat_2str, CONTEXT, THIS_STRING, STRING, STRING, 1, 1)),
+    (5, (static, STRING_RETRY,      String_p_concat_3str, CONTEXT, THIS_STRING, STRING, STRING, STRING, 1, 1)))
 JS_DEFINE_TRCINFO_2(str_match,
-    (4, (static, JSVAL_FAIL,       String_p_match, CONTEXT, THIS_STRING, PC, REGEXP,         1, 1)),
-    (4, (static, JSVAL_FAIL,       String_p_match_obj, CONTEXT, THIS, PC, REGEXP,            1, 1)))
+    (4, (static, JSVAL_FAIL,        String_p_match, CONTEXT, THIS_STRING, PC, REGEXP,         1, 1)),
+    (4, (static, JSVAL_FAIL,        String_p_match_obj, CONTEXT, THIS, PC, REGEXP,            1, 1)))
 JS_DEFINE_TRCINFO_3(str_replace,
-    (4, (static, STRING_FAIL,      String_p_replace_str, CONTEXT, THIS_STRING, REGEXP, STRING, 1, 1)),
-    (4, (static, STRING_FAIL,      String_p_replace_str2, CONTEXT, THIS_STRING, STRING, STRING, 1, 1)),
-    (5, (static, STRING_FAIL,      String_p_replace_str3, CONTEXT, THIS_STRING, STRING, STRING, STRING, 1, 1)))
+    (4, (static, STRING_RETRY,      String_p_replace_str, CONTEXT, THIS_STRING, REGEXP, STRING, 1, 1)),
+    (4, (static, STRING_RETRY,      String_p_replace_str2, CONTEXT, THIS_STRING, STRING, STRING, 1, 1)),
+    (5, (static, STRING_RETRY,      String_p_replace_str3, CONTEXT, THIS_STRING, STRING, STRING, STRING, 1, 1)))
 JS_DEFINE_TRCINFO_1(str_split,
-    (3, (static, OBJECT_FAIL_NULL, String_p_split, CONTEXT, THIS_STRING, STRING,             0, 0)))
+    (3, (static, OBJECT_RETRY_NULL, String_p_split, CONTEXT, THIS_STRING, STRING,             0, 0)))
 JS_DEFINE_TRCINFO_1(str_toLowerCase,
-    (2, (extern, STRING_FAIL,      js_toLowerCase, CONTEXT, THIS_STRING,                     1, 1)))
+    (2, (extern, STRING_RETRY,      js_toLowerCase, CONTEXT, THIS_STRING,                     1, 1)))
 JS_DEFINE_TRCINFO_1(str_toUpperCase,
-    (2, (extern, STRING_FAIL,      js_toUpperCase, CONTEXT, THIS_STRING,                     1, 1)))
+    (2, (extern, STRING_RETRY,      js_toUpperCase, CONTEXT, THIS_STRING,                     1, 1)))
 
 #define GENERIC           JSFUN_GENERIC_NATIVE
 #define PRIMITIVE         JSFUN_THISP_PRIMITIVE
 #define GENERIC_PRIMITIVE (GENERIC | PRIMITIVE)
 
 static JSFunctionSpec string_methods[] = {
 #if JS_HAS_TOSOURCE
     JS_FN("quote",             str_quote,             0,GENERIC_PRIMITIVE),
@@ -2657,17 +2645,17 @@ String_fromCharCode(JSContext* cx, int32
     jschar c = (jschar)i;
     if (c < UNIT_STRING_LIMIT)
         return js_GetUnitStringForChar(cx, c);
     return js_NewStringCopyN(cx, &c, 1);
 }
 #endif
 
 JS_DEFINE_TRCINFO_1(str_fromCharCode,
-    (2, (static, STRING_FAIL, String_fromCharCode, CONTEXT, INT32, 1, 1)))
+    (2, (static, STRING_RETRY, String_fromCharCode, CONTEXT, INT32, 1, 1)))
 
 static JSFunctionSpec string_static_methods[] = {
     JS_TN("fromCharCode", str_fromCharCode, 1, 0, str_fromCharCode_trcinfo),
     JS_FS_END
 };
 
 static JSHashNumber
 js_hash_string_pointer(const void *key)
--- a/js/src/jstracer.cpp
+++ b/js/src/jstracer.cpp
@@ -1213,17 +1213,17 @@ TraceRecorder::TraceRecorder(JSContext* 
     lir->ins0(LIR_start);
 
     if (!nanojit::AvmCore::config.tree_opt || fragment->root == fragment) 
         lirbuf->state = addName(lir->insParam(0, 0), "state");
 
     lirbuf->sp = addName(lir->insLoad(LIR_ldp, lirbuf->state, (int)offsetof(InterpState, sp)), "sp");
     lirbuf->rp = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, rp)), "rp");
     cx_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, cx)), "cx");
-    gp_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, gp)), "gp");
+    gp_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, global)), "gp");
     eos_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eos)), "eos");
     eor_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eor)), "eor");
     globalObj_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, globalObj)), "globalObj");
 
     /* If we came from exit, we might not have enough global types. */
     if (ti->globalSlots->length() > ti->nGlobalTypes()) {
         ti->typeMap.captureMissingGlobalTypes(cx,
                                               *(ti->globalSlots),
@@ -1587,16 +1587,17 @@ NativeToValue(JSContext* cx, jsval& v, u
         JS_ASSERT(*(JSObject**)slot == NULL);
         v = JSVAL_NULL;
         debug_only_v(printf("null<%p> ", *(JSObject**)slot));
         break;
       default:
         JS_ASSERT(type == JSVAL_OBJECT);
         v = OBJECT_TO_JSVAL(*(JSObject**)slot);
         JS_ASSERT(JSVAL_TAG(v) == JSVAL_OBJECT); /* if this fails the pointer was not aligned */
+        JS_ASSERT(v != JSVAL_ERROR_COOKIE); /* don't leak JSVAL_ERROR_COOKIE */
         debug_only_v(printf("object<%p:%s> ", JSVAL_TO_OBJECT(v),
                             JSVAL_IS_NULL(v)
                             ? "null"
                             : STOBJ_GET_CLASS(JSVAL_TO_OBJECT(v))->name);)
         break;
     }
 }
 
@@ -2092,17 +2093,17 @@ TraceRecorder::snapshot(ExitType exitTyp
     if (exitType == BRANCH_EXIT && js_IsLoopExit(pc, (jsbytecode*)fragment->root->ip))
         exitType = LOOP_EXIT;
 
     /* Check for a return-value opcode that needs to restart at the next instruction. */
     const JSCodeSpec& cs = js_CodeSpec[*pc];
 
     /* WARNING: don't return before restoring the original pc if (resumeAfter). */
     bool resumeAfter = (pendingTraceableNative &&
-                        JSTN_ERRTYPE(pendingTraceableNative) == FAIL_JSVAL);
+                        JSTN_ERRTYPE(pendingTraceableNative) == FAIL_STATUS);
     if (resumeAfter) {
         JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_APPLY);
         pc += cs.length;
         regs->pc = pc;
         MUST_FLOW_THROUGH("restore_pc");
     }
 
     /* Generate the entry map for the (possibly advanced) pc and stash it in the trace. */
@@ -2124,17 +2125,18 @@ TraceRecorder::snapshot(ExitType exitTyp
     FORALL_SLOTS(cx, ngslots, treeInfo->globalSlots->data(), callDepth,
         *m++ = determineSlotType(vp);
     );
     JS_ASSERT(unsigned(m - typemap) == ngslots + stackSlots);
 
     /* If we are capturing the stack state on a specific instruction, the value on
        the top of the stack is a boxed value. */
     if (resumeAfter) {
-        typemap[stackSlots - 1] = JSVAL_BOXED;
+        if (pendingTraceableNative->flags & JSTN_UNBOX_AFTER)
+            typemap[stackSlots - 1] = JSVAL_BOXED;
 
         /* Now restore the the original pc (after which early returns are ok). */
         MUST_FLOW_LABEL(restore_pc);
         regs->pc = pc - cs.length;
     } else {
         /* If we take a snapshot on a goto, advance to the target address. This avoids inner
            trees returning on a break goto, which the outer recorder then would confuse with
            a break in the outer tree. */
@@ -3003,17 +3005,17 @@ js_CheckGlobalObjectShape(JSContext* cx,
         return true;
     }
 
     /* No recorder, search for a tracked global-state (or allocate one). */
     for (size_t i = 0; i < MONITOR_N_GLOBAL_STATES; ++i) {
 
         GlobalState &state = tm->globalStates[i];
 
-        if (state.globalShape == -1) {
+        if (state.globalShape == (uint32) -1) {
             state.globalShape = globalShape;
             JS_ASSERT(state.globalSlots);
             JS_ASSERT(state.globalSlots->length() == 0);
         }
 
         if (tm->globalStates[i].globalShape == globalShape) {
             if (shape)
                 *shape = globalShape;
@@ -3033,16 +3035,19 @@ js_CheckGlobalObjectShape(JSContext* cx,
 static JS_REQUIRES_STACK bool
 js_StartRecorder(JSContext* cx, VMSideExit* anchor, Fragment* f, TreeInfo* ti,
                  unsigned stackSlots, unsigned ngslots, uint8* typeMap, 
                  VMSideExit* expectedInnerExit, Fragment* outer)
 {
     JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
     JS_ASSERT(f->root != f || !cx->fp->imacpc);
 
+    if (JS_TRACE_MONITOR(cx).prohibitRecording)
+        return false;
+
     /* start recording if no exception during construction */
     tm->recorder = new (&gc) TraceRecorder(cx, anchor, f, ti,
                                            stackSlots, ngslots, typeMap,
                                            expectedInnerExit, outer);
     if (cx->throwing) {
         js_AbortRecording(cx, "setting up recorder failed");
         return false;
     }
@@ -3849,114 +3854,138 @@ js_FindVMCompatiblePeer(JSContext* cx, F
             continue;
         debug_only_v(printf("checking vm types %p (ip: %p): ", f, f->ip);)
         if (js_CheckEntryTypes(cx, (TreeInfo*)f->vmprivate))
             return f;
     }
     return NULL;
 }
 
+static void
+LeaveTree(InterpState&, VMSideExit* lr);
+
 /**
  * Executes a tree.
  */
-static VMSideExit*
-js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount, 
+static JS_REQUIRES_STACK VMSideExit*
+js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
                VMSideExit** innermostNestedGuardp)
 {
     JS_ASSERT(f->code() && f->vmprivate);
+    JS_ASSERT(cx->builtinStatus == 0);
 
     JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
     JSObject* globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
     TreeInfo* ti = (TreeInfo*)f->vmprivate;
     unsigned ngslots = ti->globalSlots->length();
     uint16* gslots = ti->globalSlots->data();
-    unsigned globalFrameSize = STOBJ_NSLOTS(globalObj);
-    double* global = (double*)alloca((globalFrameSize+1) * sizeof(double));
-    double stack_buffer[MAX_NATIVE_STACK_SLOTS];
-    double* stack = stack_buffer;
+
+    InterpState state;
+
+    state.cx = cx;
+    state.globalObj = globalObj;
+    state.inlineCallCountp = &inlineCallCount;
+    state.innermostNestedGuardp = innermostNestedGuardp;
+    state.outermostTree = ti;
+    state.lastTreeExitGuard = NULL;
+    state.lastTreeCallGuard = NULL;
+    state.rpAtLastTreeCall = NULL;
 
     /* Make sure the global object is sane. */
-    JS_ASSERT(!ngslots || (OBJ_SHAPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain)) == ti->globalShape)); 
+    JS_ASSERT(!ngslots || (OBJ_SHAPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain)) == ti->globalShape));
     /* Make sure our caller replenished the double pool. */
     JS_ASSERT(tm->reservedDoublePoolPtr >= tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS);
 
     /* Reserve objects and stack space now, to make leaving the tree infallible. */
-    void *reserve;
-    void *stackMark = JS_ARENA_MARK(&cx->stackPool);
     if (!js_ReserveObjects(cx, MAX_CALL_STACK_ENTRIES))
         return NULL;
+    
+    /* Setup the native global frame. */
+    unsigned globalFrameSize = STOBJ_NSLOTS(globalObj);
+    state.global = (double*)alloca((globalFrameSize+1) * sizeof(double));
+
+    /* Setup the native stack frame. */
+    double stack_buffer[MAX_NATIVE_STACK_SLOTS];
+    state.stackBase = stack_buffer;
+    double* entry_sp = &stack_buffer[ti->nativeStackBase/sizeof(double)];
+    state.sp = entry_sp;
+    state.eos = state.sp + MAX_NATIVE_STACK_SLOTS;
+
+    /* Setup the native call stack frame. */
+    FrameInfo* callstack_buffer[MAX_CALL_STACK_ENTRIES];
+    state.callstackBase = callstack_buffer;
+    state.rp = callstack_buffer;
+    state.eor = callstack_buffer + MAX_CALL_STACK_ENTRIES;
+
+    void *reserve;
+    state.stackMark = JS_ARENA_MARK(&cx->stackPool);
     JS_ARENA_ALLOCATE(reserve, &cx->stackPool, MAX_INTERP_STACK_BYTES);
     if (!reserve)
         return NULL;
 
 #ifdef DEBUG
-    bool jsframe_pop_blocks_set_on_entry = bool(cx->fp->flags & JSFRAME_POP_BLOCKS);
+    state.jsframe_pop_blocks_set_on_entry = bool(cx->fp->flags & JSFRAME_POP_BLOCKS);
     memset(stack_buffer, 0xCD, sizeof(stack_buffer));
-    memset(global, 0xCD, (globalFrameSize+1)*sizeof(double));
-#endif    
-
-    debug_only(*(uint64*)&global[globalFrameSize] = 0xdeadbeefdeadbeefLL;)
+    memset(state.global, 0xCD, (globalFrameSize+1)*sizeof(double));
+#endif
+
+    debug_only(*(uint64*)&state.global[globalFrameSize] = 0xdeadbeefdeadbeefLL;)
     debug_only_v(printf("entering trace at %s:%u@%u, native stack slots: %u code: %p\n",
                         cx->fp->script->filename,
                         js_FramePCToLineNumber(cx, cx->fp),
                         FramePCOffset(cx->fp),
                         ti->maxNativeStackSlots,
                         f->code());)
-    
+
     JS_ASSERT(ti->nGlobalTypes() == ngslots);
-    
-    if (ngslots) 
-        BuildNativeGlobalFrame(cx, ngslots, gslots, ti->globalTypeMap(), global);
-    BuildNativeStackFrame(cx, 0/*callDepth*/, ti->typeMap.data(), stack);
-
-    double* entry_sp = &stack[ti->nativeStackBase/sizeof(double)];
-    FrameInfo* callstack_buffer[MAX_CALL_STACK_ENTRIES];
-    FrameInfo** callstack = callstack_buffer;
-
-    InterpState state;
-    state.sp = (void*)entry_sp;
-    state.eos = ((double*)state.sp) + MAX_NATIVE_STACK_SLOTS;
-    state.rp = callstack;
-    state.eor = callstack + MAX_CALL_STACK_ENTRIES;
-    state.gp = global;
-    state.cx = cx;
-    state.globalObj = globalObj;
-    state.lastTreeExitGuard = NULL;
-    state.lastTreeCallGuard = NULL;
-    state.rpAtLastTreeCall = NULL;
+
+    if (ngslots)
+        BuildNativeGlobalFrame(cx, ngslots, gslots, ti->globalTypeMap(), state.global);
+    BuildNativeStackFrame(cx, 0/*callDepth*/, ti->typeMap.data(), stack_buffer);
+
     union { NIns *code; GuardRecord* (FASTCALL *func)(InterpState*, Fragment*); } u;
     u.code = f->code();
 
-#ifdef JS_JIT_SPEW
-#if defined(NANOJIT_IA32) || (defined(NANOJIT_AMD64) && defined(__GNUC__))
-    uint64 start = rdtsc();
-#endif
+#ifdef EXECUTE_TREE_TIMER
+    state.startTime = rdtsc();
 #endif
 
     /* Set a flag that indicates to the runtime system that we are running in native code
        now and we don't want automatic GC to happen. Instead we will get a silent failure,
        which will cause a trace exit at which point the interpreter re-tries the operation
        and eventually triggers the GC. */
     JS_ASSERT(!tm->onTrace);
     tm->onTrace = true;
-    
+    cx->interpState = &state;
+
     debug_only(fflush(NULL);)
     GuardRecord* rec;
 #if defined(JS_NO_FASTCALL) && defined(NANOJIT_IA32)
     SIMULATE_FASTCALL(rec, &state, NULL, u.func);
 #else
     rec = u.func(&state, NULL);
 #endif
     VMSideExit* lr = (VMSideExit*)rec->exit;
 
     AUDIT(traceTriggered);
 
     JS_ASSERT(lr->exitType != LOOP_EXIT || !lr->calldepth);
-
     tm->onTrace = false;
+    LeaveTree(state, lr);
+    return state.innermost;
+}
+
+static JS_FORCES_STACK void
+LeaveTree(InterpState& state, VMSideExit* lr)
+{
+    VOUCH_DOES_NOT_REQUIRE_STACK();
+
+    JSContext* cx = state.cx;
+    FrameInfo** callstack = state.callstackBase;
+    double* stack = state.stackBase;
 
     /* Except if we find that this is a nested bailout, the guard the call returned is the
        one we have to use to adjust pc and sp. */
     VMSideExit* innermost = lr;
 
     /* While executing a tree we do not update state.sp and state.rp even if they grow. Instead,
        guards tell us by how much sp and rp should be incremented in case of a side exit. When
        calling a nested tree, however, we actively adjust sp and rp. If we have such frames
@@ -3979,81 +4008,122 @@ js_ExecuteTree(JSContext* cx, Fragment* 
             rp += lr->calldepth;
         } else {
             /* During unwinding state.rp gets overwritten at every step and we restore
                it here to its state at the innermost nested guard. The builtin already
                added the calldepth of that innermost guard to rpAtLastTreeCall. */
             rp = (FrameInfo**)state.rpAtLastTreeCall;
         }
         innermost = state.lastTreeExitGuard;
-        if (innermostNestedGuardp)
-            *innermostNestedGuardp = nested;
+        if (state.innermostNestedGuardp)
+            *state.innermostNestedGuardp = nested;
         JS_ASSERT(nested);
         JS_ASSERT(nested->exitType == NESTED_EXIT);
         JS_ASSERT(state.lastTreeExitGuard);
         JS_ASSERT(state.lastTreeExitGuard->exitType != NESTED_EXIT);
     }
 
-    JS_ARENA_RELEASE(&cx->stackPool, stackMark);
+    int32_t bs = cx->builtinStatus;
+    cx->builtinStatus = 0;
+    bool bailed = innermost->exitType == STATUS_EXIT && (bs & JSBUILTIN_BAILED);
+    if (bailed)
+        JS_TRACE_MONITOR(cx).prohibitRecording = false;
+    if (bailed && !(bs & JSBUILTIN_ERROR)) {
+        /*
+         * Deep-bail case.
+         *
+         * A _FAIL native already called LeaveTree. We already reconstructed
+         * the interpreter stack, in pre-call state, with pc pointing to the
+         * CALL/APPLY op, for correctness. Then we continued in native code.
+         * The native succeeded (no exception or error). After it returned, the
+         * trace stored the return value (at the top of the native stack) and
+         * then immediately flunked the guard on cx->builtinStatus.
+         *
+         * Now LeaveTree has been called again from the tail of
+         * js_ExecuteTree. We are about to return to the interpreter. Adjust
+         * the top stack frame to resume on the next op.
+         */
+        JS_ASSERT(*cx->fp->regs->pc == JSOP_CALL || *cx->fp->regs->pc == JSOP_APPLY);
+        uintN argc = GET_ARGC(cx->fp->regs->pc);
+        cx->fp->regs->pc += JSOP_CALL_LENGTH;
+        cx->fp->regs->sp -= argc + 1;
+        JS_ASSERT_IF(!cx->fp->imacpc,
+                     cx->fp->slots + cx->fp->script->nfixed +
+                     js_ReconstructStackDepth(cx, cx->fp->script, cx->fp->regs->pc) ==
+                     cx->fp->regs->sp);
+
+        /*
+         * The return value was not available when we reconstructed the stack,
+         * but we have it now. Box it.
+         */
+        uint8* typeMap = getStackTypeMap(innermost);
+        NativeToValue(cx,
+                      cx->fp->regs->sp[-1],
+                      typeMap[innermost->numStackSlots - 1],
+                      (jsdouble *) state.sp + innermost->sp_adj / sizeof(jsdouble) - 1);
+        return;
+    }
+
+    JS_ARENA_RELEASE(&cx->stackPool, state.stackMark);
     while (callstack < rp) {
         /* Synthesize a stack frame and write out the values in it using the type map pointer
            on the native call stack. */
         js_SynthesizeFrame(cx, **callstack);
         int slots = FlushNativeStackFrame(cx, 1/*callDepth*/, (uint8*)(*callstack+1), stack, cx->fp);
 #ifdef DEBUG
         JSStackFrame* fp = cx->fp;
         debug_only_v(printf("synthesized deep frame for %s:%u@%u, slots=%d\n",
                             fp->script->filename,
                             js_FramePCToLineNumber(cx, fp),
                             FramePCOffset(fp),
                             slots);)
 #endif
         /* Keep track of the additional frames we put on the interpreter stack and the native
            stack slots we consumed. */
-        ++inlineCallCount;
+        ++*state.inlineCallCountp;
         ++callstack;
         stack += slots;
     }
 
     /* We already synthesized the frames around the innermost guard. Here we just deal
        with additional frames inside the tree we are bailing out from. */
     JS_ASSERT(rp == callstack);
     unsigned calldepth = innermost->calldepth;
     unsigned calldepth_slots = 0;
     for (unsigned n = 0; n < calldepth; ++n) {
         calldepth_slots += js_SynthesizeFrame(cx, *callstack[n]);
-        ++inlineCallCount;
-#ifdef DEBUG
+        ++*state.inlineCallCountp;
+#ifdef DEBUG        
         JSStackFrame* fp = cx->fp;
         debug_only_v(printf("synthesized shallow frame for %s:%u@%u\n",
                             fp->script->filename, js_FramePCToLineNumber(cx, fp),
                             FramePCOffset(fp));)
 #endif
     }
 
     /* Adjust sp and pc relative to the tree we exited from (not the tree we entered into).
        These are our final values for sp and pc since js_SynthesizeFrame has already taken
        care of all frames in between. But first we recover fp->blockChain, which comes from
        the side exit struct. */
     JSStackFrame* fp = cx->fp;
 
     JS_ASSERT_IF(fp->flags & JSFRAME_POP_BLOCKS,
-                 calldepth == 0 && jsframe_pop_blocks_set_on_entry);
+                 calldepth == 0 && state.jsframe_pop_blocks_set_on_entry);
     fp->blockChain = innermost->block;
 
     /* If we are not exiting from an inlined frame the state->sp is spbase, otherwise spbase
        is whatever slots frames around us consume. */
     DECODE_IP_ADJ(innermost->ip_adj, fp);
     fp->regs->sp = StackBase(fp) + (innermost->sp_adj / sizeof(double)) - calldepth_slots;
     JS_ASSERT_IF(!fp->imacpc,
                  fp->slots + fp->script->nfixed +
                  js_ReconstructStackDepth(cx, fp->script, fp->regs->pc) == fp->regs->sp);
 
-#if defined(JS_JIT_SPEW) && (defined(NANOJIT_IA32) || (defined(NANOJIT_AMD64) && defined(__GNUC__)))
-    uint64 cycles = rdtsc() - start;
+#ifdef EXECUTE_TREE_TIMER
+    uint64 cycles = rdtsc() - state.startTime;
 #elif defined(JS_JIT_SPEW)
     uint64 cycles = 0;
 #endif
 
     debug_only_v(printf("leaving trace at %s:%u@%u, op=%s, lr=%p, exitType=%d, sp=%d, "
                         "calldepth=%d, cycles=%llu\n",
                         fp->script->filename,
                         js_FramePCToLineNumber(cx, fp),
@@ -4064,29 +4134,31 @@ js_ExecuteTree(JSContext* cx, Fragment* 
                         fp->regs->sp - StackBase(fp), 
                         calldepth,
                         cycles));
 
     /* If this trace is part of a tree, later branches might have added additional globals for
        which we don't have any type information available in the side exit. We merge in this
        information from the entry type-map. See also comment in the constructor of TraceRecorder
        why this is always safe to do. */
+    TreeInfo* outermostTree = state.outermostTree;
+    uint16* gslots = outermostTree->globalSlots->data();
+    unsigned ngslots = outermostTree->globalSlots->length();
+    JS_ASSERT(ngslots == outermostTree->nGlobalTypes());
     unsigned exit_gslots = innermost->numGlobalSlots;
-    JS_ASSERT(ngslots == ti->nGlobalTypes());
-    JS_ASSERT(ngslots >= exit_gslots);
+    JS_ASSERT(exit_gslots <= ngslots);
     uint8* globalTypeMap = getGlobalTypeMap(innermost);
     if (exit_gslots < ngslots)
-        mergeTypeMaps(&globalTypeMap, &exit_gslots, ti->globalTypeMap(), ngslots,
+        mergeTypeMaps(&globalTypeMap, &exit_gslots, outermostTree->globalTypeMap(), ngslots,
                       (uint8*)alloca(sizeof(uint8) * ngslots));
-    JS_ASSERT(exit_gslots == ti->nGlobalTypes());
+    JS_ASSERT(exit_gslots == outermostTree->globalSlots->length());
 
     /* write back interned globals */
-    FlushNativeGlobalFrame(cx, exit_gslots, gslots, globalTypeMap, global);
-    JS_ASSERT_IF(ngslots != 0, globalFrameSize == STOBJ_NSLOTS(globalObj));
-    JS_ASSERT(*(uint64*)&global[globalFrameSize] == 0xdeadbeefdeadbeefLL);
+    FlushNativeGlobalFrame(cx, exit_gslots, gslots, globalTypeMap, state.global);
+    JS_ASSERT(*(uint64*)&state.global[STOBJ_NSLOTS(state.globalObj)] == 0xdeadbeefdeadbeefLL);
 
     /* write back native stack frame */
 #ifdef DEBUG
     int slots =
 #endif
         FlushNativeStackFrame(cx, innermost->calldepth,
                               getStackTypeMap(innermost),
                               stack, NULL);
@@ -4101,17 +4173,17 @@ js_ExecuteTree(JSContext* cx, Fragment* 
 #endif
 #ifdef JS_JIT_SPEW
     if (innermost->exitType != TIMEOUT_EXIT)
         AUDIT(sideExitIntoInterpreter);
     else
         AUDIT(timeoutIntoInterpreter);
 #endif
 
-    return innermost;
+    state.innermost = innermost;
 }
 
 JS_REQUIRES_STACK bool
 js_MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount)
 {
     JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
 
     /* Is the recorder currently active? */
@@ -4528,41 +4600,42 @@ js_FlushJITCache(JSContext* cx)
         tr->deepAbort();
         tr->popAbortStack();
     }
     Fragmento* fragmento = tm->fragmento;
     if (fragmento) {
         fragmento->clearFrags();
 #ifdef DEBUG
         JS_ASSERT(fragmento->labels);
-        delete fragmento->labels;
-        fragmento->labels = new (&gc) LabelMap(core, NULL);
+        fragmento->labels->clear();
 #endif
         tm->lirbuf->rewind();
         memset(tm->vmfragments, 0, sizeof(tm->vmfragments));
         for (size_t i = 0; i < MONITOR_N_GLOBAL_STATES; ++i) {
             tm->globalStates[i].globalShape = -1;
             tm->globalStates[i].globalSlots->clear();
         }
     }
     oracle.clearHitCounts();
 }
 
 JS_FORCES_STACK JSStackFrame *
 js_GetTopStackFrame(JSContext *cx)
 {
     if (JS_ON_TRACE(cx)) {
-        /*
-         * TODO: If executing a tree, synthesize stack frames and bail off
-         * trace. See bug 462027.
-         */
-        debug_only_v(printf("Internal error: getting top stack frame on trace.\n"));
-#ifdef DEBUG_jason
-        JS_ASSERT(0);
+        /* It's a bug if a non-FAIL_STATUS builtin gets here. */
+        JS_ASSERT(cx->bailExit);
+
+        JS_TRACE_MONITOR(cx).onTrace = false;
+        JS_TRACE_MONITOR(cx).prohibitRecording = true;
+        LeaveTree(*cx->interpState, cx->bailExit);
+#ifdef DEBUG
+        cx->bailExit = NULL;
 #endif
+        cx->builtinStatus |= JSBUILTIN_BAILED;
     }
     return cx->fp;
 }
 
 JS_REQUIRES_STACK jsval&
 TraceRecorder::argval(unsigned n) const
 {
     JS_ASSERT(n < cx->fp->fun->nargs);
@@ -6508,17 +6581,22 @@ TraceRecorder::functionCall(bool constru
             } else if (argtype == 'f') {
                 *argp = INS_CONSTPTR(JSVAL_TO_OBJECT(fval));
             } else if (argtype == 'p') {
                 if (!getClassPrototype(JSVAL_TO_OBJECT(fval), *argp))
                     return false;
             } else if (argtype == 'R') {
                 *argp = INS_CONSTPTR(cx->runtime);
             } else if (argtype == 'P') {
-                *argp = INS_CONSTPTR(pc);
+                // FIXME: Set pc to imacpc when recording JSOP_CALL inside the 
+                //        JSOP_GETELEM imacro (bug 476559).
+                if (*pc == JSOP_CALL && fp->imacpc && *fp->imacpc == JSOP_GETELEM)
+                    *argp = INS_CONSTPTR(fp->imacpc);
+                else
+                    *argp = INS_CONSTPTR(pc);
             } else if (argtype == 'D') {  /* this, as a number */
                 if (!isNumber(tval))
                     goto next_specialization;
                 *argp = this_ins;
             } else {
                 JS_NOT_REACHED("unknown prefix arg type");
             }
             argp--;
@@ -6588,16 +6666,31 @@ next_specialization:;
         ABORT_TRACE("can't trace native constructor");
     ABORT_TRACE("can't trace unknown constructor");
 
 success:
 #if defined _DEBUG
     JS_ASSERT(args[0] != (LIns *)0xcdcdcdcd);
 #endif
 
+    if (JSTN_ERRTYPE(known) == FAIL_STATUS) {
+        // This needs to capture the pre-call state of the stack. So do not set
+        // pendingTraceableNative before taking this snapshot.
+        JS_ASSERT(!pendingTraceableNative);
+
+        // Take snapshot for deep LeaveTree and store it in cx->bailExit.
+        LIns* rec_ins = snapshot(DEEP_BAIL_EXIT);
+        GuardRecord* rec = (GuardRecord *) rec_ins->payload();
+        JS_ASSERT(rec->exit);
+        lir->insStorei(INS_CONSTPTR(rec->exit), cx_ins, offsetof(JSContext, bailExit));
+
+        // Tell nanojit not to discard or defer stack writes before this call.
+        lir->insGuard(LIR_xbarrier, rec_ins, rec_ins);
+    }
+
     LIns* res_ins = lir->insCall(known->builtin, args);
     if (!constructing)
         rval_ins = res_ins;
     switch (JSTN_ERRTYPE(known)) {
       case FAIL_NULL:
         guard(false, lir->ins_eq0(res_ins), OOM_EXIT);
         break;
       case FAIL_NEG:
@@ -6606,16 +6699,19 @@ success:
         jsdpun u;
         u.d = 0.0;
         guard(false, lir->ins2(LIR_flt, res_ins, lir->insImmq(u.u64)), OOM_EXIT);
         break;
       }
       case FAIL_VOID:
         guard(false, lir->ins2i(LIR_eq, res_ins, JSVAL_TO_BOOLEAN(JSVAL_VOID)), OOM_EXIT);
         break;
+      case FAIL_COOKIE:
+        guard(false, lir->ins2(LIR_eq, res_ins, INS_CONST(JSVAL_ERROR_COOKIE)), OOM_EXIT);
+        break;
       default:;
     }
     set(&fval, res_ins);
 
     if (!constructing) {
         /*
          * The return value will be processed by FastNativeCallComplete since
          * we have to know the actual return value type for calls that return
@@ -6871,70 +6967,79 @@ TraceRecorder::record_SetPropMiss(JSProp
 /* Functions used by JSOP_GETELEM. */
 
 static JSBool
 GetProperty(JSContext *cx, uintN argc, jsval *vp)
 {
     jsval *argv;
     jsid id;
 
-    JS_ASSERT(argc == 1);
+    JS_ASSERT(!JS_ON_TRACE(cx) && cx->fp->imacpc && argc == 1);
     argv = JS_ARGV(cx, vp);
     JS_ASSERT(JSVAL_IS_STRING(argv[0]));
     if (!js_ValueToStringId(cx, argv[0], &id))
         return JS_FALSE;
     argv[0] = ID_TO_VALUE(id);
     return OBJ_GET_PROPERTY(cx, JS_THIS_OBJECT(cx, vp), id, &JS_RVAL(cx, vp));
 }
 
 static jsval FASTCALL
-GetProperty_tn(JSContext *cx, JSObject *obj, JSString *name)
+GetProperty_tn(JSContext *cx, jsbytecode *pc, JSObject *obj, JSString *name)
 {
     jsid id;
     jsval v;
 
-    if (!js_ValueToStringId(cx, STRING_TO_JSVAL(name), &id) ||
-        !OBJ_GET_PROPERTY(cx, obj, id, &v)) {
-        return JSVAL_ERROR_COOKIE;
-    }
+    BEGIN_PC_HINT(pc);
+        if (!js_ValueToStringId(cx, STRING_TO_JSVAL(name), &id) ||
+            !OBJ_GET_PROPERTY(cx, obj, id, &v)) {
+            cx->builtinStatus |= JSBUILTIN_ERROR;
+            v = JSVAL_ERROR_COOKIE;
+        }
+    END_PC_HINT();
     return v;
 }
 
 static JSBool
 GetElement(JSContext *cx, uintN argc, jsval *vp)
 {
     jsval *argv;
     jsid id;
 
-    JS_ASSERT(argc == 1);
+    JS_ASSERT(!JS_ON_TRACE(cx) && cx->fp->imacpc && argc == 1);
     argv = JS_ARGV(cx, vp);
     JS_ASSERT(JSVAL_IS_NUMBER(argv[0]));
     if (!JS_ValueToId(cx, argv[0], &id))
         return JS_FALSE;
     argv[0] = ID_TO_VALUE(id);
     return OBJ_GET_PROPERTY(cx, JS_THIS_OBJECT(cx, vp), id, &JS_RVAL(cx, vp));
 }
 
 static jsval FASTCALL
-GetElement_tn(JSContext* cx, JSObject* obj, int32 index)
+GetElement_tn(JSContext* cx, jsbytecode *pc, JSObject* obj, int32 index)
 {
     jsval v;
     jsid id;
 
-    if (!js_Int32ToId(cx, index, &id))
+    if (!js_Int32ToId(cx, index, &id)) {
+        cx->builtinStatus |= JSBUILTIN_ERROR;
         return JSVAL_ERROR_COOKIE;
-    if (!OBJ_GET_PROPERTY(cx, obj, id, &v))
-        return JSVAL_ERROR_COOKIE;
+    }
+    BEGIN_PC_HINT(pc);
+        if (!OBJ_GET_PROPERTY(cx, obj, id, &v)) {
+            cx->builtinStatus |= JSBUILTIN_ERROR;
+            v = JSVAL_ERROR_COOKIE;
+        }
+    END_PC_HINT();
     return v;
 }
 
 JS_DEFINE_TRCINFO_1(GetProperty,
-    (3, (static, JSVAL_FAIL,    GetProperty_tn, CONTEXT, THIS, STRING,          0, 0)))
+    (4, (static, JSVAL_FAIL,    GetProperty_tn, CONTEXT, PC, THIS, STRING,      0, 0)))
 JS_DEFINE_TRCINFO_1(GetElement,
-    (3, (extern, JSVAL_FAIL,    GetElement_tn,  CONTEXT, THIS, INT32,           0, 0)))
+    (4, (extern, JSVAL_FAIL,    GetElement_tn,  CONTEXT, PC, THIS, INT32,       0, 0)))
 
 JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_GETELEM()
 {
     jsval& idx = stackval(-1);
     jsval& lval = stackval(-2);
 
     LIns* obj_ins = get(&lval);
@@ -7024,19 +7129,19 @@ SetProperty(JSContext *cx, uintN argc, j
 
 static int32 FASTCALL
 SetProperty_tn(JSContext* cx, JSObject* obj, JSString* idstr, jsval v)
 {
     jsid id;
 
     if (!js_ValueToStringId(cx, STRING_TO_JSVAL(idstr), &id) ||
         !OBJ_SET_PROPERTY(cx, obj, id, &v)) {
-        return JSVAL_TO_BOOLEAN(JSVAL_VOID);
-    }
-    return JSVAL_TRUE;
+        cx->builtinStatus |= JSBUILTIN_ERROR;
+    }
+    return JSVAL_TO_BOOLEAN(JSVAL_VOID);
 }
 
 static JSBool
 SetElement(JSContext *cx, uintN argc, jsval *vp)
 {
     jsval *argv;
     jsid id;
 
@@ -7053,18 +7158,18 @@ SetElement(JSContext *cx, uintN argc, js
 }
 
 static int32 FASTCALL
 SetElement_tn(JSContext* cx, JSObject* obj, int32 index, jsval v)
 {
     jsid id;
 
     if (!js_Int32ToId(cx, index, &id) || !OBJ_SET_PROPERTY(cx, obj, id, &v))
-        return JSVAL_TO_BOOLEAN(JSVAL_VOID);
-    return JSVAL_TRUE;
+        cx->builtinStatus |= JSBUILTIN_ERROR;
+    return JSVAL_TO_BOOLEAN(JSVAL_VOID);
 }
 
 JS_DEFINE_TRCINFO_1(SetProperty,
     (4, (extern, BOOL_FAIL,     SetProperty_tn, CONTEXT, THIS, STRING, JSVAL,   0, 0)))
 JS_DEFINE_TRCINFO_1(SetElement,
     (4, (extern, BOOL_FAIL,     SetElement_tn,  CONTEXT, THIS, INT32, JSVAL,    0, 0)))
 
 JS_REQUIRES_STACK bool
@@ -7354,45 +7459,55 @@ TraceRecorder::record_JSOP_APPLY()
     
     return call_imacro(call_imacro_table[argc]);
 }
 
 JS_REQUIRES_STACK bool
 TraceRecorder::record_FastNativeCallComplete()
 {
     JS_ASSERT(pendingTraceableNative);
-    
+
     /* At this point the generated code has already called the native function
        and we can no longer fail back to the original pc location (JSOP_CALL)
        because that would cause the interpreter to re-execute the native 
        function, which might have side effects.
 
-       Instead, snapshot(), which is invoked from unbox_jsval(), will see that
-       we are currently parked on a traceable native's JSOP_CALL instruction,
-       and it will advance the pc to restore by the length of the current
-       opcode, and indicate in the type map that the element on top of the
-       stack is a boxed value which doesn't need to be boxed if the type guard
-       generated by unbox_jsval() fails. */
+       Instead, snapshot(), which is invoked from unbox_jsval() below, will see
+       that we are currently parked on a traceable native's JSOP_CALL
+       instruction, and it will advance the pc to restore by the length of the
+       current opcode.  If the native's return type is jsval, snapshot() will
+       also indicate in the type map that the element on top of the stack is a
+       boxed value which doesn't need to be boxed if the type guard generated
+       by unbox_jsval() fails. */
+
+    if (JSTN_ERRTYPE(pendingTraceableNative) == FAIL_STATUS) {
+#ifdef DEBUG
+        // Keep cx->bailExit null when it's invalid.
+        lir->insStorei(INS_CONSTPTR(NULL), cx_ins, (int) offsetof(JSContext, bailExit));
+#endif
+        guard(true,
+              lir->ins_eq0(
+                  lir->insLoad(LIR_ld, cx_ins, (int) offsetof(JSContext, builtinStatus))),
+              STATUS_EXIT);
+    }
+
     JS_ASSERT(*cx->fp->regs->pc == JSOP_CALL || 
               *cx->fp->regs->pc == JSOP_APPLY);
 
     jsval& v = stackval(-1);
     LIns* v_ins = get(&v);
     
     bool ok = true;
-    switch (JSTN_ERRTYPE(pendingTraceableNative)) {
-      case FAIL_JSVAL:
+    if (pendingTraceableNative->flags & JSTN_UNBOX_AFTER) {
         unbox_jsval(v, v_ins);
         set(&v, v_ins);
-        break;
-      case FAIL_NEG:
+    } else if (JSTN_ERRTYPE(pendingTraceableNative) == FAIL_NEG) {
         /* Already added i2f in functionCall. */
         JS_ASSERT(JSVAL_IS_NUMBER(v));
-        break;
-      default:
+    } else {
         /* Convert the result to double if the builtin returns int32. */
         if (JSVAL_IS_NUMBER(v) &&
             (pendingTraceableNative->builtin->_argtypes & 3) == nanojit::ARGSIZE_LO) {
             set(&v, lir->ins1(LIR_i2f, v_ins));
         }
     }
 
     // We'll null pendingTraceableNative in monitorRecording, on the next op cycle.
@@ -8471,17 +8586,17 @@ JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_ANYNAME()
 {
     return false;
 }
 
 JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_QNAMEPART()
 {
-    return false;
+    return record_JSOP_STRING();
 }
 
 JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_QNAMECONST()
 {
     return false;
 }
 
@@ -8949,43 +9064,61 @@ ObjectToIterator(JSContext *cx, uintN ar
 {
     jsval *argv = JS_ARGV(cx, vp);
     JS_ASSERT(JSVAL_IS_INT(argv[0]));
     JS_SET_RVAL(cx, vp, JS_THIS(cx, vp));
     return js_ValueToIterator(cx, JSVAL_TO_INT(argv[0]), &JS_RVAL(cx, vp));
 }
 
 static JSObject* FASTCALL
-ObjectToIterator_tn(JSContext* cx, JSObject *obj, int32 flags)
+ObjectToIterator_tn(JSContext* cx, jsbytecode* pc, JSObject *obj, int32 flags)
 {
     jsval v = OBJECT_TO_JSVAL(obj);
-    if (!js_ValueToIterator(cx, flags, &v))
+
+    BEGIN_PC_HINT(pc);
+        bool ok = js_ValueToIterator(cx, flags, &v);
+    END_PC_HINT();
+
+    if (!ok) {
+        cx->builtinStatus |= JSBUILTIN_ERROR;
         return NULL;
+    }
+    if (OBJ_GET_CLASS(cx, JSVAL_TO_OBJECT(v)) == &js_GeneratorClass) {
+        js_LeaveTrace(cx);
+        return NULL;
+    }
     return JSVAL_TO_OBJECT(v);
 }
 
 static JSBool
 CallIteratorNext(JSContext *cx, uintN argc, jsval *vp)
 {
     return js_CallIteratorNext(cx, JS_THIS_OBJECT(cx, vp), &JS_RVAL(cx, vp));
 }
 
 static jsval FASTCALL
-CallIteratorNext_tn(JSContext* cx, JSObject* iterobj)
+CallIteratorNext_tn(JSContext* cx, jsbytecode* pc, JSObject* iterobj)
 {
     jsval v;
-    if (!js_CallIteratorNext(cx, iterobj, &v))
+
+    BEGIN_PC_HINT(pc);
+        bool ok = js_CallIteratorNext(cx, iterobj, &v);
+    END_PC_HINT();
+
+    if (!ok) {
+        cx->builtinStatus |= JSBUILTIN_ERROR;
         return JSVAL_ERROR_COOKIE;
+    }
     return v;
 }
 
 JS_DEFINE_TRCINFO_1(ObjectToIterator,
-    (3, (static, OBJECT_FAIL_NULL, ObjectToIterator_tn, CONTEXT, THIS, INT32,   0, 0)))
+    (4, (static, OBJECT_FAIL, ObjectToIterator_tn, CONTEXT, PC, THIS, INT32, 0, 0)))
 JS_DEFINE_TRCINFO_1(CallIteratorNext,
-    (2, (static, JSVAL_FAIL,       CallIteratorNext_tn, CONTEXT, THIS,          0, 0)))
+    (3, (static, JSVAL_FAIL,  CallIteratorNext_tn, CONTEXT, PC, THIS,        0, 0)))
 
 static const struct BuiltinFunctionInfo {
     JSTraceableNative *tn;
     int nargs;
 } builtinFunctionInfo[JSBUILTIN_LIMIT] = {
     {ObjectToIterator_trcinfo,   1},
     {CallIteratorNext_trcinfo,   0},
     {GetProperty_trcinfo,        1},
@@ -9063,17 +9196,17 @@ TraceRecorder::record_JSOP_INT32()
 }
 
 JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_LENGTH()
 {
     jsval& l = stackval(-1);
     if (JSVAL_IS_PRIMITIVE(l)) {
         if (!JSVAL_IS_STRING(l))
-            ABORT_TRACE("non-string primitives unsupported");
+            ABORT_TRACE("non-string primitive JSOP_LENGTH unsupported");
         LIns* str_ins = get(&l);
         LIns* len_ins = lir->insLoad(LIR_ldp, str_ins, (int)offsetof(JSString, length));
 
         LIns* masked_len_ins = lir->ins2(LIR_piand,
                                          len_ins,
                                          INS_CONSTPTR(JSSTRING_LENGTH_MASK));
 
         LIns *choose_len_ins =
--- a/js/src/jstracer.h
+++ b/js/src/jstracer.h
@@ -208,17 +208,19 @@ public:
 enum ExitType {
     BRANCH_EXIT, 
     LOOP_EXIT, 
     NESTED_EXIT,
     MISMATCH_EXIT,
     OOM_EXIT,
     OVERFLOW_EXIT,
     UNSTABLE_LOOP_EXIT,
-    TIMEOUT_EXIT
+    TIMEOUT_EXIT,
+    DEEP_BAIL_EXIT,
+    STATUS_EXIT
 };
 
 struct VMSideExit : public nanojit::SideExit
 {
     JSObject* block;
     intptr_t ip_adj;
     intptr_t sp_adj;
     intptr_t rp_adj;
@@ -239,30 +241,28 @@ static inline uint8* getGlobalTypeMap(na
     return getStackTypeMap(exit) + ((VMSideExit*)exit)->numStackSlots;
 }
 
 static inline uint8* getFullTypeMap(nanojit::SideExit* exit)
 {
     return getStackTypeMap(exit);
 }
 
-struct InterpState
-{
-    void* sp; /* native stack pointer, stack[0] is spbase[0] */
-    void* rp; /* call stack pointer */
-    void* gp; /* global frame pointer */
-    JSContext *cx; /* current VM context handle */
-    void* eos; /* first unusable word after the native stack */
-    void* eor; /* first unusable word after the call stack */
-    VMSideExit* lastTreeExitGuard; /* guard we exited on during a tree call */
-    VMSideExit* lastTreeCallGuard; /* guard we want to grow from if the tree
-                                      call exit guard mismatched */
-    void* rpAtLastTreeCall; /* value of rp at innermost tree call guard */
-    JSObject* globalObj; /* pointer to the global object */
-}; 
+struct FrameInfo {
+    JSObject*       callee;     // callee function object
+    JSObject*       block;      // caller block chain head
+    intptr_t        ip_adj;     // caller script-based pc index and imacro pc
+    union {
+        struct {
+            uint16  spdist;     // distance from fp->slots to fp->regs->sp at JSOP_CALL
+            uint16  argc;       // actual argument count, may be < fun->nargs
+        } s;
+        uint32      word;       // for spdist/argc LIR store in record_JSOP_CALL
+    };
+};
 
 struct UnstableExit
 {
     nanojit::Fragment* fragment;
     VMSideExit* exit;
     UnstableExit* next;
 };
 
@@ -304,28 +304,47 @@ public:
     inline uint8* globalTypeMap() {
         return typeMap.data() + nStackTypes;
     }
     inline uint8* stackTypeMap() {
         return typeMap.data();
     }
 };
 
-struct FrameInfo {
-    JSObject*       callee;     // callee function object
-    JSObject*       block;      // caller block chain head
-    intptr_t        ip_adj;     // caller script-based pc index and imacro pc
-    union {
-        struct {
-            uint16  spdist;     // distance from fp->slots to fp->regs->sp at JSOP_CALL
-            uint16  argc;       // actual argument count, may be < fun->nargs
-        } s;
-        uint32      word;       // for spdist/argc LIR store in record_JSOP_CALL
-    };
-};
+#if defined(JS_JIT_SPEW) && (defined(NANOJIT_IA32) || (defined(NANOJIT_AMD64) && defined(__GNUC__)))
+# define EXECUTE_TREE_TIMER
+#endif
+
+struct InterpState
+{
+    double        *sp;                  // native stack pointer, stack[0] is spbase[0]
+    void          *rp;                  // call stack pointer
+    double        *global;              // global frame pointer
+    JSContext     *cx;                  // current VM context handle
+    double        *eos;                 // first unusable word after the native stack
+    void          *eor;                 // first unusable word after the call stack
+    VMSideExit*    lastTreeExitGuard;   // guard we exited on during a tree call
+    VMSideExit*    lastTreeCallGuard;   // guard we want to grow from if the tree
+                                        // call exit guard mismatched
+    void*          rpAtLastTreeCall;    // value of rp at innermost tree call guard
+    TreeInfo*      outermostTree;       // the outermost tree we initially invoked
+    JSObject*      globalObj;           // pointer to the global object
+    double*        stackBase;           // native stack base
+    FrameInfo**    callstackBase;       // call stack base
+    uintN*         inlineCallCountp;    // inline call count counter
+    VMSideExit** innermostNestedGuardp;
+    void*          stackMark;
+    VMSideExit*    innermost;
+#ifdef EXECUTE_TREE_TIMER
+    uint64         startTime;
+#endif
+#ifdef DEBUG
+    bool           jsframe_pop_blocks_set_on_entry;
+#endif
+}; 
 
 enum JSMonitorRecordingStatus {
     JSMRS_CONTINUE,
     JSMRS_STOP,
     JSMRS_IMACRO
 };
 
 class TraceRecorder : public avmplus::GCObject {
--- a/js/src/nanojit/LIR.cpp
+++ b/js/src/nanojit/LIR.cpp
@@ -2202,18 +2202,22 @@ namespace nanojit
 
 #if defined(NJ_VERBOSE)
     LabelMap::LabelMap(AvmCore *core, LabelMap* parent)
         : parent(parent), names(core->gc), addrs(core->config.verbose_addrs), end(buf), core(core)
 	{}
 
     LabelMap::~LabelMap()
     {
+        clear();
+    }
+
+    void LabelMap::clear()
+    {
         Entry *e;
-        
         while ((e = names.removeLast()) != NULL) {
             core->freeString(e->name);
             NJ_DELETE(e);
         } 
     }
 
     void LabelMap::add(const void *p, size_t size, size_t align, const char *name)
 	{
--- a/js/src/nanojit/LIR.h
+++ b/js/src/nanojit/LIR.h
@@ -485,16 +485,17 @@ namespace nanojit
         avmplus::AvmCore *core;
         LabelMap(avmplus::AvmCore *, LabelMap* parent);
         ~LabelMap();
         void add(const void *p, size_t size, size_t align, const char *name);
 		void add(const void *p, size_t size, size_t align, avmplus::String*);
 		const char *dup(const char *);
 		const char *format(const void *p);
 		void promoteAll(const void *newbase);
+		void clear();
     };
 
 	class LirNameMap MMGC_SUBCLASS_DECL
 	{
 		template <class Key>
 		class CountMap: public avmplus::SortedMap<Key, int, avmplus::LIST_NonGCObjects> {
 		public:
 			CountMap(avmplus::GC*gc) : avmplus::SortedMap<Key, int, avmplus::LIST_NonGCObjects>(gc) {}
--- a/js/src/shell/js.cpp
+++ b/js/src/shell/js.cpp
@@ -971,18 +971,20 @@ ReadLine(JSContext *cx, uintN argc, jsva
     return JS_TRUE;
 }
 
 #ifdef JS_TRACER
 static jsval JS_FASTCALL
 Print_tn(JSContext *cx, JSString *str)
 {
     char *bytes = JS_EncodeString(cx, str);
-    if (!bytes)
-        return JSVAL_ERROR_COOKIE;
+    if (!bytes) {
+        cx->builtinStatus |= JSBUILTIN_ERROR;
+        return JSVAL_VOID;
+    }
     fprintf(gOutFile, "%s\n", bytes);
     JS_free(cx, bytes);
     fflush(gOutFile);
     return JSVAL_VOID;
 }
 #endif
 
 static JSBool
--- a/js/src/trace-test.js
+++ b/js/src/trace-test.js
@@ -8,16 +8,22 @@
 // object in debug builds.
 const haveTracemonkey = !!(this.tracemonkey)
 const HOTLOOP = haveTracemonkey ? tracemonkey.HOTLOOP : 2;
 // The loop count at which we trace
 const RECORDLOOP = HOTLOOP;
 // The loop count at which we run the trace
 const RUNLOOP = HOTLOOP + 1;
 
+var gDoMandelbrotTest = true;
+if ("gSkipSlowTests" in this && gSkipSlowTests) {
+    print("** Skipping slow tests");
+    gDoMandelbrotTest = false;
+}
+
 var testName = null;
 if ("arguments" in this && arguments.length > 0)
   testName = arguments[0];
 var fails = [], passes=[];
 
 function jitstatHandler(f)
 {
     if (!haveTracemonkey)
@@ -2089,16 +2095,94 @@ function testArrayPushPop() {
         sum2 += a.pop();
     a.push(sum1);
     a.push(sum2);
     return a.join(",");
 }
 testArrayPushPop.expected = "55,45";
 test(testArrayPushPop);
 
+function testSlowArrayPop() {
+    var a = [];
+    for (var i = 0; i < RUNLOOP; i++)
+        a[i] = [0];
+    a[RUNLOOP-1].__defineGetter__("0", function () { return 'xyzzy'; });
+
+    var last;
+    for (var i = 0; i < RUNLOOP; i++)
+        last = a[i].pop();  // reenters interpreter in getter
+    return last;
+}
+testSlowArrayPop.expected = 'xyzzy';
+test(testSlowArrayPop);
+
+// Same thing but it needs to reconstruct multiple stack frames (so,
+// multiple functions called inside the loop)
+function testSlowArrayPopMultiFrame() {    
+    var a = [];
+    for (var i = 0; i < RUNLOOP; i++)
+        a[i] = [0];
+    a[RUNLOOP-1].__defineGetter__("0", function () { return 23; });
+
+    function child(a, i) {
+        return a[i].pop();  // reenters interpreter in getter
+    }
+    function parent(a, i) {
+        return child(a, i);
+    }
+    function gramps(a, i) { 
+        return parent(a, i);
+    }
+
+    var last;
+    for (var i = 0; i < RUNLOOP; i++)
+        last = gramps(a, i);
+    return last;
+}
+testSlowArrayPopMultiFrame.expected = 23;
+test(testSlowArrayPopMultiFrame);
+
+// Same thing but nested trees, each reconstructing one or more stack frames 
+// (so, several functions with loops, such that the loops end up being
+// nested though they are not lexically nested)
+
+function testSlowArrayPopNestedTrees() {    
+    var a = [];
+    for (var i = 0; i < RUNLOOP; i++)
+        a[i] = [0];
+    a[RUNLOOP-1].__defineGetter__("0", function () { return 3.14159 });
+
+    function child(a, i, j, k) {
+        var last = 2.71828;
+        for (var l = 0; l < RUNLOOP; l++)
+            if (i == RUNLOOP-1 && j == RUNLOOP-1 && k == RUNLOOP-1)
+                last = a[l].pop();  // reenters interpreter in getter
+        return last;
+    }
+    function parent(a, i, j) {
+        var last;
+        for (var k = 0; k < RUNLOOP; k++)
+            last = child(a, i, j, k);
+        return last;
+    }
+    function gramps(a, i) { 
+        var last;
+        for (var j = 0; j < RUNLOOP; j++)
+            last = parent(a, i, j);
+        return last;
+    }
+
+    var last;
+    for (var i = 0; i < RUNLOOP; i++)
+        last = gramps(a, i);
+    return last;
+}
+testSlowArrayPopNestedTrees.expected = 3.14159;
+test(testSlowArrayPopNestedTrees);
+
 function testResumeOp() {
     var a = [1,"2",3,"4",5,"6",7,"8",9,"10",11,"12",13,"14",15,"16"];
     var x = "";
     while (a.length > 0)
         x += a.pop();
     return x;
 }
 testResumeOp.expected = "16151413121110987654321";
@@ -4139,17 +4223,16 @@ function testInterpreterReentry5() {
     arr.__defineGetter__("4", function() 1000);
     for (var i = 0; i < 5; i++)
         arr[i];
     for (var p in arr)
         arr[p];
 }
 test(testInterpreterReentry5);
 
-/* // These tests should pass but currently crash, pending bug 462027.
 function testInterpreterReentry6() {
     var obj = {a:1, b:1, c:1, d:1, set e(x) { this._e = x; }};
     for (var p in obj)
         obj[p] = "grue";
     return obj._e;
 }
 testInterpreterReentry6.expected = "grue";
 test(testInterpreterReentry6);
@@ -4161,17 +4244,23 @@ function testInterpreterReentry7() {
         arr[i] = "grue";
     var tmp = arr._4;
     for (var p in arr)
         arr[p] = "bleen";
     return tmp + " " + arr._4;
 }
 testInterpreterReentry7.expected = "grue bleen";
 test(testInterpreterReentry7);
-*/
+
+// Bug 462027 comment 54.
+function testInterpreterReentery8() {
+    var e = <x><y/></x>;
+    for (var j = 0; j < 4; ++j) { +[e]; }
+}
+test(testInterpreterReentery8);
 
 /*****************************************************************************
  *                                                                           *
  *  _____ _   _  _____ ______ _____ _______                                  *
  * |_   _| \ | |/ ____|  ____|  __ \__   __|                                 *
  *   | | |  \| | (___ | |__  | |__) | | |                                    *
  *   | | | . ` |\___ \|  __| |  _  /  | |                                    *
  *  _| |_| |\  |____) | |____| | \ \  | |                                    *
@@ -4196,16 +4285,17 @@ test(testInterpreterReentry7);
  *****************************************************************************/
 
 load("math-trace-tests.js");
 
 // BEGIN MANDELBROT STUFF
 // XXXbz I would dearly like to wrap it up into a function to avoid polluting
 // the global scope, but the function ends up heavyweight, and then we lose on
 // the jit.
+if (gDoMandelbrotTest) {
 load("mandelbrot-results.js");
 //function testMandelbrotAll() {
   // Configuration options that affect which codepaths we follow.
   var doImageData = true;
   var avoidSparseArray = true;
 
   // Control of iteration numbers and sizing.  We'll do
   // scaler * colorNames.length iterations or so before deciding that we
@@ -4439,16 +4529,17 @@ load("mandelbrot-results.js");
   avoidSparseArray = false;
   test(createMandelSet);
 
   escape = escapeNorm2;
   doImageData = false;  // avoidSparseArray doesn't matter here
   test(createMandelSet);
 //}
 //testMandelbrotAll();
+} /* if (gDoMandelbrotTest) */
 // END MANDELBROT STUFF
 
 /*****************************************************************************
  *  _   _  ____     _   __  ____  _____  ______                              *
  * | \ | |/ __ \   |  \/  |/ __ \|  __ \|  ____|                             *
  * |  \| | |  | |  | \  / | |  | | |__) | |__                                *
  * | . ` | |  | |  | |\/| | |  | |  _  /|  __|                               *
  * | |\  | |__| |  | |  | | |__| | | \ \| |____                              *