Trace recursive function calls (bug 459301, r=gal).
authorDavid Anderson <danderson@mozilla.com>
Wed, 30 Sep 2009 15:28:00 -0700
changeset 33564 910f0c1ca2e5aff2814171749896d7b7fbf7e7f4
parent 33563 89e665eb99446fed79e564034c539bfb08ec2414
child 33565 ca411010382c81ca761ecb565a94606ec3189430
push idunknown
push userunknown
push dateunknown
reviewersgal
bugs459301
milestone1.9.3a1pre
Trace recursive function calls (bug 459301, r=gal).
js/src/jsbuiltins.cpp
js/src/jsbuiltins.h
js/src/jsemit.cpp
js/src/jsinterp.cpp
js/src/jsops.cpp
js/src/jsrecursion.cpp
js/src/jstracer.cpp
js/src/jstracer.h
--- a/js/src/jsbuiltins.cpp
+++ b/js/src/jsbuiltins.cpp
@@ -1,10 +1,10 @@
 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4; -*-
- * vim: set ts=8 sw=4 et tw=99:
+ * vim: set ts=4 sw=4 et tw=99:
  *
  * ***** BEGIN LICENSE BLOCK *****
  * Version: MPL 1.1/GPL 2.0/LGPL 2.1
  *
  * The contents of this file are subject to the Mozilla Public License Version
  * 1.1 (the "License"); you may not use this file except in compliance with
  * the License. You may obtain a copy of the License at
  * http://www.mozilla.org/MPL/
@@ -146,16 +146,34 @@ int32 FASTCALL
 js_UnboxInt32(jsval v)
 {
     if (JS_LIKELY(JSVAL_IS_INT(v)))
         return JSVAL_TO_INT(v);
     return js_DoubleToECMAInt32(*JSVAL_TO_DOUBLE(v));
 }
 JS_DEFINE_CALLINFO_1(extern, INT32, js_UnboxInt32, JSVAL, 1, 1)
 
+JSBool FASTCALL
+js_TryUnboxInt32(jsval v, int32* i32p)
+{
+    if (JS_LIKELY(JSVAL_IS_INT(v))) {
+        *i32p = JSVAL_TO_INT(v);
+        return JS_TRUE;
+    }
+    if (!JSVAL_IS_DOUBLE(v))
+        return JS_FALSE;
+    int32 i;
+    jsdouble d = *JSVAL_TO_DOUBLE(v);
+    if (!JSDOUBLE_IS_INT(d, i))
+        return JS_FALSE;
+    *i32p = i;
+    return JS_TRUE;
+}
+JS_DEFINE_CALLINFO_2(extern, BOOL, js_TryUnboxInt32, JSVAL, INT32PTR, 1, 1);
+
 int32 FASTCALL
 js_DoubleToInt32(jsdouble d)
 {
     return js_DoubleToECMAInt32(d);
 }
 JS_DEFINE_CALLINFO_1(extern, INT32, js_DoubleToInt32, DOUBLE, 1, 1)
 
 uint32 FASTCALL
@@ -401,16 +419,52 @@ js_NewNullClosure(JSContext* cx, JSObjec
 
     closure->map = scope;
     closure->init(&js_FunctionClass, proto, parent,
                   reinterpret_cast<jsval>(fun));
     return closure;
 }
 JS_DEFINE_CALLINFO_4(extern, OBJECT, js_NewNullClosure, CONTEXT, OBJECT, OBJECT, OBJECT, 0, 0)
 
+JS_REQUIRES_STACK JSBool FASTCALL
+js_PopInterpFrame(JSContext* cx, InterpState* state)
+{
+    JS_ASSERT(cx->fp && cx->fp->down);
+    JSInlineFrame* ifp = (JSInlineFrame*)cx->fp;
+
+    /*
+     * Mirror frame popping code from inline_return in js_Interpret. There are
+     * some things we just don't want to handle. In those cases, the trace will
+     * MISMATCH_EXIT.
+     */
+    if (ifp->hookData)
+        return JS_FALSE;
+    if (cx->version != ifp->callerVersion)
+        return JS_FALSE;
+    if (cx->fp->flags & JSFRAME_CONSTRUCTING)
+        return JS_FALSE;
+    if (cx->fp->imacpc)
+        return JS_FALSE;
+    
+    /* Update display table. */
+    if (cx->fp->script->staticLevel < JS_DISPLAY_SIZE)
+        cx->display[cx->fp->script->staticLevel] = cx->fp->displaySave;
+    
+    /* Pop the frame and its memory. */
+    cx->fp = cx->fp->down;
+    JS_ASSERT(cx->fp->regs == &ifp->callerRegs);
+    cx->fp->regs = ifp->frame.regs;
+    JS_ARENA_RELEASE(&cx->stackPool, ifp->mark);
+
+    /* Update the inline call count. */
+    *state->inlineCallCountp = *state->inlineCallCountp - 1;
+    return JS_TRUE;
+}
+JS_DEFINE_CALLINFO_2(extern, BOOL, js_PopInterpFrame, CONTEXT, INTERPSTATE, 0, 0)
+
 JSString* FASTCALL
 js_ConcatN(JSContext *cx, JSString **strArray, uint32 size)
 {
     /* Calculate total size. */
     size_t numChar = 1;
     for (uint32 i = 0; i < size; ++i) {
         size_t before = numChar;
         numChar += strArray[i]->length();
--- a/js/src/jsbuiltins.h
+++ b/js/src/jsbuiltins.h
@@ -198,16 +198,17 @@ struct ClosureVarInfo;
 #define _JS_CTYPE_JSVAL_FAIL        _JS_JSVAL_CTYPE(                  _JS_PTR, --, --, FAIL_STATUS)
 #define _JS_CTYPE_JSID              _JS_CTYPE(jsid,                   _JS_PTR, --, --, INFALLIBLE)
 #define _JS_CTYPE_BOOL              _JS_CTYPE(JSBool,                 _JS_I32, "","i", INFALLIBLE)
 #define _JS_CTYPE_BOOL_RETRY        _JS_CTYPE(JSBool,                 _JS_I32, --, --, FAIL_VOID)
 #define _JS_CTYPE_BOOL_FAIL         _JS_CTYPE(JSBool,                 _JS_I32, --, --, FAIL_STATUS)
 #define _JS_CTYPE_INT32             _JS_CTYPE(int32,                  _JS_I32, "","i", INFALLIBLE)
 #define _JS_CTYPE_INT32_RETRY       _JS_CTYPE(int32,                  _JS_I32, --, --, FAIL_NEG)
 #define _JS_CTYPE_INT32_FAIL        _JS_CTYPE(int32,                  _JS_I32, --, --, FAIL_STATUS)
+#define _JS_CTYPE_INT32PTR          _JS_CTYPE(int32 *,                _JS_PTR, --, --, INFALLIBLE)
 #define _JS_CTYPE_UINT32            _JS_CTYPE(uint32,                 _JS_I32, "","i", INFALLIBLE)
 #define _JS_CTYPE_UINT32_RETRY      _JS_CTYPE(uint32,                 _JS_I32, --, --, FAIL_NEG)
 #define _JS_CTYPE_UINT32_FAIL       _JS_CTYPE(uint32,                 _JS_I32, --, --, FAIL_STATUS)
 #define _JS_CTYPE_DOUBLE            _JS_CTYPE(jsdouble,               _JS_F64, "","d", INFALLIBLE)
 #define _JS_CTYPE_DOUBLE_FAIL       _JS_CTYPE(jsdouble,               _JS_F64, --, --, FAIL_STATUS)
 #define _JS_CTYPE_STRING            _JS_CTYPE(JSString *,             _JS_PTR, "","s", INFALLIBLE)
 #define _JS_CTYPE_STRING_RETRY      _JS_CTYPE(JSString *,             _JS_PTR, --, --, FAIL_NULL)
 #define _JS_CTYPE_STRING_FAIL       _JS_CTYPE(JSString *,             _JS_PTR, --, --, FAIL_STATUS)
@@ -222,16 +223,17 @@ struct ClosureVarInfo;
 #define _JS_CTYPE_SIDEEXIT          _JS_CTYPE(SideExit *,             _JS_PTR, --, --, INFALLIBLE)
 #define _JS_CTYPE_INTERPSTATE       _JS_CTYPE(InterpState *,          _JS_PTR, --, --, INFALLIBLE)
 #define _JS_CTYPE_FRAGMENT          _JS_CTYPE(nanojit::Fragment *,    _JS_PTR, --, --, INFALLIBLE)
 #define _JS_CTYPE_CLASS             _JS_CTYPE(JSClass *,              _JS_PTR, --, --, INFALLIBLE)
 #define _JS_CTYPE_DOUBLEPTR         _JS_CTYPE(double *,               _JS_PTR, --, --, INFALLIBLE)
 #define _JS_CTYPE_CHARPTR           _JS_CTYPE(char *,                 _JS_PTR, --, --, INFALLIBLE)
 #define _JS_CTYPE_APNPTR            _JS_CTYPE(js_ArgsPrivateNative *, _JS_PTR, --, --, INFALLIBLE)
 #define _JS_CTYPE_CVIPTR            _JS_CTYPE(const ClosureVarInfo *, _JS_PTR, --, --, INFALLIBLE)
+#define _JS_CTYPE_FRAMEINFO         _JS_CTYPE(FrameInfo *,            _JS_PTR, --, --, INFALLIBLE)
 
 #define _JS_EXPAND(tokens)  tokens
 
 #define _JS_CTYPE_TYPE2(t,s,p,a,f)      t
 #define _JS_CTYPE_TYPE(tyname)          _JS_EXPAND(_JS_CTYPE_TYPE2    _JS_CTYPE_##tyname)
 #define _JS_CTYPE_RETSIZE2(t,s,p,a,f)   s##_RETSIZE
 #define _JS_CTYPE_RETSIZE(tyname)       _JS_EXPAND(_JS_CTYPE_RETSIZE2 _JS_CTYPE_##tyname)
 #define _JS_CTYPE_ARGSIZE2(t,s,p,a,f)   s##_ARGSIZE
@@ -497,16 +499,17 @@ JS_DECLARE_CALLINFO(js_String_p_charCode
 JS_DECLARE_CALLINFO(js_String_p_charCodeAt0_int)
 JS_DECLARE_CALLINFO(js_String_p_charCodeAt_int)
 
 /* Defined in jsbuiltins.cpp. */
 JS_DECLARE_CALLINFO(js_BoxDouble)
 JS_DECLARE_CALLINFO(js_BoxInt32)
 JS_DECLARE_CALLINFO(js_UnboxDouble)
 JS_DECLARE_CALLINFO(js_UnboxInt32)
+JS_DECLARE_CALLINFO(js_TryUnboxInt32)
 JS_DECLARE_CALLINFO(js_dmod)
 JS_DECLARE_CALLINFO(js_imod)
 JS_DECLARE_CALLINFO(js_DoubleToInt32)
 JS_DECLARE_CALLINFO(js_DoubleToUint32)
 
 JS_DECLARE_CALLINFO(js_StringToNumber)
 JS_DECLARE_CALLINFO(js_StringToInt32)
 JS_DECLARE_CALLINFO(js_CloseIterator)
@@ -516,10 +519,11 @@ JS_DECLARE_CALLINFO(js_HasNamedProperty)
 JS_DECLARE_CALLINFO(js_HasNamedPropertyInt32)
 JS_DECLARE_CALLINFO(js_TypeOfObject)
 JS_DECLARE_CALLINFO(js_TypeOfBoolean)
 JS_DECLARE_CALLINFO(js_BooleanOrUndefinedToNumber)
 JS_DECLARE_CALLINFO(js_BooleanOrUndefinedToString)
 JS_DECLARE_CALLINFO(js_Arguments)
 JS_DECLARE_CALLINFO(js_NewNullClosure)
 JS_DECLARE_CALLINFO(js_ConcatN)
+JS_DECLARE_CALLINFO(js_PopInterpFrame)
 
 #endif /* jsbuiltins_h___ */
--- a/js/src/jsemit.cpp
+++ b/js/src/jsemit.cpp
@@ -6327,16 +6327,21 @@ js_EmitTree(JSContext *cx, JSCodeGenerat
         }
         cg->flags |= oldflags & TCF_IN_FOR_INIT;
         if (js_NewSrcNote2(cx, cg, SRC_PCBASE, CG_OFFSET(cg) - off) < 0)
             return JS_FALSE;
 
         argc = pn->pn_count - 1;
         if (js_Emit3(cx, cg, PN_OP(pn), ARGC_HI(argc), ARGC_LO(argc)) < 0)
             return JS_FALSE;
+        if (PN_OP(pn) == JSOP_CALL) {
+            /* Add a trace hint opcode for recursion. */
+            if (js_Emit1(cx, cg, JSOP_TRACE) < 0)
+                return JS_FALSE;
+        }
         if (PN_OP(pn) == JSOP_EVAL)
             EMIT_UINT16_IMM_OP(JSOP_LINENO, pn->pn_pos.begin.lineno);
         break;
       }
 
       case TOK_LEXICALSCOPE:
       {
         JSObjectBox *objbox;
--- a/js/src/jsinterp.cpp
+++ b/js/src/jsinterp.cpp
@@ -2843,37 +2843,42 @@ js_Interpret(JSContext *cx)
     JS_BEGIN_MACRO                                                            \
         js_EnterTraceVisState(cx, S_RECORD, R_NONE);                          \
     JS_END_MACRO
 #endif
 #else
 #define MONITOR_BRANCH_TRACEVIS
 #endif
 
-#define MONITOR_BRANCH()                                                      \
+#define RESTORE_INTERP_VARS()                                                 \
+    JS_BEGIN_MACRO                                                            \
+        fp = cx->fp;                                                          \
+        script = fp->script;                                                  \
+        atoms = FrameAtomBase(cx, fp);                                        \
+        currentVersion = (JSVersion) script->version;                         \
+        JS_ASSERT(fp->regs == &regs);                                         \
+        if (cx->throwing)                                                     \
+            goto error;                                                       \
+    JS_END_MACRO
+
+#define MONITOR_BRANCH(reason)                                                \
     JS_BEGIN_MACRO                                                            \
         if (TRACING_ENABLED(cx)) {                                            \
-            if (js_MonitorLoopEdge(cx, inlineCallCount)) {                    \
+            if (js_MonitorLoopEdge(cx, inlineCallCount, reason)) {            \
                 JS_ASSERT(TRACE_RECORDER(cx));                                \
                 MONITOR_BRANCH_TRACEVIS;                                      \
                 ENABLE_INTERRUPTS();                                          \
             }                                                                 \
-            fp = cx->fp;                                                      \
-            script = fp->script;                                              \
-            atoms = FrameAtomBase(cx, fp);                                    \
-            currentVersion = (JSVersion) script->version;                     \
-            JS_ASSERT(fp->regs == &regs);                                     \
-            if (cx->throwing)                                                 \
-                goto error;                                                   \
+            RESTORE_INTERP_VARS();                                            \
         }                                                                     \
     JS_END_MACRO
 
 #else /* !JS_TRACER */
 
-#define MONITOR_BRANCH() ((void) 0)
+#define MONITOR_BRANCH(reason) ((void) 0)
 
 #endif /* !JS_TRACER */
 
     /*
      * Prepare to call a user-supplied branch handler, and abort the script
      * if it returns false.
      */
 #define CHECK_BRANCH()                                                        \
@@ -2889,23 +2894,23 @@ js_Interpret(JSContext *cx)
 #define BRANCH(n)                                                             \
     JS_BEGIN_MACRO                                                            \
         regs.pc += (n);                                                       \
         op = (JSOp) *regs.pc;                                                 \
         if ((n) <= 0) {                                                       \
             CHECK_BRANCH();                                                   \
             if (op == JSOP_NOP) {                                             \
                 if (TRACE_RECORDER(cx)) {                                     \
-                    MONITOR_BRANCH();                                         \
+                    MONITOR_BRANCH(Monitor_Branch);                           \
                     op = (JSOp) *regs.pc;                                     \
                 } else {                                                      \
                     op = (JSOp) *++regs.pc;                                   \
                 }                                                             \
             } else if (op == JSOP_TRACE) {                                    \
-                MONITOR_BRANCH();                                             \
+                MONITOR_BRANCH(Monitor_Branch);                               \
                 op = (JSOp) *regs.pc;                                         \
             }                                                                 \
         }                                                                     \
         DO_OP();                                                              \
     JS_END_MACRO
 
     MUST_FLOW_THROUGH("exit");
     ++cx->interpLevel;
--- a/js/src/jsops.cpp
+++ b/js/src/jsops.cpp
@@ -293,33 +293,56 @@
 
                 /* Restore caller's registers. */
                 regs = ifp->callerRegs;
 
                 /* Store the return value in the caller's operand frame. */
                 regs.sp -= 1 + (size_t) ifp->frame.argc;
                 regs.sp[-1] = fp->rval;
 
+                bool recursive = fp->script == fp->down->script;
+
                 /* Restore cx->fp and release the inline frame's space. */
                 cx->fp = fp = fp->down;
                 JS_ASSERT(fp->regs == &ifp->callerRegs);
                 fp->regs = &regs;
                 JS_ARENA_RELEASE(&cx->stackPool, ifp->mark);
 
                 /* Restore the calling script's interpreter registers. */
                 script = fp->script;
                 atoms = FrameAtomBase(cx, fp);
 
                 /* Resume execution in the calling frame. */
                 inlineCallCount--;
                 if (JS_LIKELY(ok)) {
-                    TRACE_0(LeaveFrame);
                     JS_ASSERT(js_CodeSpec[js_GetOpcode(cx, script, regs.pc)].length
                               == JSOP_CALL_LENGTH);
-                    len = JSOP_CALL_LENGTH;
+#ifdef DEBUG
+                    JSOp traceOp = js_GetOpcode(cx, script, regs.pc +
+                                                JSOP_CALL_LENGTH);
+                    JS_ASSERT_IF(*regs.pc == JSOP_CALL && !fp->imacpc,
+                                 traceOp == JSOP_TRACE || traceOp == JSOP_NOP);
+#endif
+                    TRACE_0(LeaveFrame);
+                    if (!TRACE_RECORDER(cx) && recursive) {
+                        if (*(regs.pc + JSOP_CALL_LENGTH) == JSOP_TRACE) {
+                            regs.pc += JSOP_CALL_LENGTH;
+                            MONITOR_BRANCH(Monitor_LeaveFrame);
+                            op = (JSOp)*regs.pc;
+                            DO_OP();
+                        }
+                    }
+                    if (*(regs.pc + JSOP_CALL_LENGTH) == JSOP_TRACE ||
+                        *(regs.pc + JSOP_CALL_LENGTH) == JSOP_NOP) {
+                        JS_STATIC_ASSERT(JSOP_TRACE_LENGTH == JSOP_NOP_LENGTH);
+                        regs.pc += JSOP_CALL_LENGTH;
+                        len = JSOP_TRACE_LENGTH;
+                    } else {
+                        len = JSOP_CALL_LENGTH;
+                    }
                     DO_NEXT_OP(len);
                 }
                 goto error;
             }
             goto exit;
 
           BEGIN_CASE(JSOP_DEFAULT)
             (void) POP();
@@ -2186,31 +2209,46 @@
                     if (hook) {
                         newifp->hookData = hook(cx, &newifp->frame, JS_TRUE, 0,
                                                 cx->debugHooks->callHookData);
                         CHECK_INTERRUPT_HANDLER();
                     } else {
                         newifp->hookData = NULL;
                     }
 
-                    TRACE_0(EnterFrame);
-
                     inlineCallCount++;
                     JS_RUNTIME_METER(rt, inlineCalls);
 
 #ifdef INCLUDE_MOZILLA_DTRACE
                     /* DTrace function entry, inlines */
                     if (JAVASCRIPT_FUNCTION_ENTRY_ENABLED())
                         jsdtrace_function_entry(cx, fp, fun);
                     if (JAVASCRIPT_FUNCTION_INFO_ENABLED())
                         jsdtrace_function_info(cx, fp, fp->down, fun);
                     if (JAVASCRIPT_FUNCTION_ARGS_ENABLED())
                         jsdtrace_function_args(cx, fp, fun, fp->argc, fp->argv);
 #endif
 
+#ifdef JS_TRACER
+                    if (TRACE_RECORDER(cx)) {
+                        TRACE_1(EnterFrame, inlineCallCount);
+                        RESTORE_INTERP_VARS();
+                    } else if (fp->script == fp->down->script &&
+                               *fp->down->regs->pc == JSOP_CALL) {
+#ifdef DEBUG
+                        JSOp traceOp = js_GetOpcode(cx, fp->script,
+                                                    fp->regs->pc);
+                        JS_ASSERT_IF(!fp->imacpc, traceOp == JSOP_TRACE ||
+                                     traceOp == JSOP_NOP);
+#endif
+                        if (*fp->regs->pc == JSOP_TRACE)
+                            MONITOR_BRANCH(Monitor_EnterFrame);
+                    }
+#endif
+
                     /* Load first op and dispatch it (safe since JSOP_STOP). */
                     op = (JSOp) *regs.pc;
                     DO_OP();
 
                   bad_inline_call:
                     JS_ASSERT(fp->regs == &regs);
                     script = fp->script;
                     atoms = script->atomMap.vector;
@@ -2248,17 +2286,18 @@
                         /*
                          * If we are executing the JSOP_NEXTITER imacro and a Stopiteration
                          * exception is raised, transform it into a JSVAL_HOLE return value.
                          * The tracer generates equivalent code by calling CatchStopIteration_tn.
                          */
                         if (fp->imacpc && *fp->imacpc == JSOP_NEXTITER &&
                             cx->throwing && js_ValueIsStopIteration(cx->exception)) {
                             // pc may point to JSOP_DUP here due to bug 474854.
-                            JS_ASSERT(*regs.pc == JSOP_CALL || *regs.pc == JSOP_DUP);
+                            JS_ASSERT(*regs.pc == JSOP_CALL ||
+                                      *regs.pc == JSOP_DUP);
                             cx->throwing = JS_FALSE;
                             cx->exception = JSVAL_VOID;
                             regs.sp[-1] = JSVAL_HOLE;
                         } else {
                             goto error;
                         }
                     }
                     TRACE_0(NativeCallComplete);
new file mode 100644
--- /dev/null
+++ b/js/src/jsrecursion.cpp
@@ -0,0 +1,716 @@
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=4 sw=4 et tw=99 ft=cpp:
+ *
+ * ***** BEGIN LICENSE BLOCK *****
+ * Version: MPL 1.1/GPL 2.0/LGPL 2.1
+ *
+ * The contents of this file are subject to the Mozilla Public License Version
+ * 1.1 (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ * http://www.mozilla.org/MPL/
+ *
+ * Software distributed under the License is distributed on an "AS IS" basis,
+ * WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
+ * for the specific language governing rights and limitations under the
+ * License.
+ *
+ * The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
+ * June 12, 2009.
+ *
+ * The Initial Developer of the Original Code is
+ *   the Mozilla Corporation.
+ *
+ * Contributor(s):
+ *   David Anderson <danderson@mozilla.com>
+ *   Andreas Gal <gal@mozilla.com>
+ *
+ * Alternatively, the contents of this file may be used under the terms of
+ * either of the GNU General Public License Version 2 or later (the "GPL"),
+ * or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
+ * in which case the provisions of the GPL or the LGPL are applicable instead
+ * of those above. If you wish to allow use of your version of this file only
+ * under the terms of either the GPL or the LGPL, and not to allow others to
+ * use your version of this file under the terms of the MPL, indicate your
+ * decision by deleting the provisions above and replace them with the notice
+ * and other provisions required by the GPL or the LGPL. If you do not delete
+ * the provisions above, a recipient may use your version of this file under
+ * the terms of any one of the MPL, the GPL or the LGPL.
+ *
+ * ***** END LICENSE BLOCK ***** */
+
+class RecursiveSlotMap : public SlotMap
+{
+  public:
+    RecursiveSlotMap(TraceRecorder& rec)
+      : SlotMap(rec)
+    {
+    }
+
+    JS_REQUIRES_STACK void
+    adjustTypes()
+    {
+    }
+};
+
+#if defined DEBUG
+static JS_REQUIRES_STACK void
+AssertDownFrameIsConsistent(JSContext* cx, VMSideExit* anchor, FrameInfo* fi)
+{
+    JS_ASSERT(anchor->recursive_down);
+    JS_ASSERT(anchor->recursive_down->callerHeight == fi->callerHeight);
+
+    unsigned downPostSlots = fi->callerHeight;
+    JSTraceType* typeMap = fi->get_typemap();
+
+    js_CaptureStackTypes(cx, 1, typeMap);
+    const JSTraceType* m1 = anchor->recursive_down->get_typemap();
+    for (unsigned i = 0; i < downPostSlots; i++) {
+        if (m1[i] == typeMap[i])
+            continue;
+        if (typeMap[i] == TT_INT32 && m1[i] == TT_DOUBLE)
+            continue;
+        JS_NOT_REACHED("invalid RECURSIVE_MISMATCH exit");
+    }
+    JS_ASSERT(memcmp(anchor->recursive_down, fi, sizeof(FrameInfo)) == 0);
+}
+#endif
+
+JS_REQUIRES_STACK VMSideExit*
+TraceRecorder::downSnapshot(FrameInfo* downFrame)
+{
+    JS_ASSERT(!pendingSpecializedNative);
+
+    /* Build the typemap the exit will have. Note extra stack slot for return value. */
+    unsigned downPostSlots = downFrame->callerHeight;
+    unsigned ngslots = treeInfo->globalSlots->length();
+    unsigned exitTypeMapLen = downPostSlots + 1 + ngslots;
+    JSTraceType* exitTypeMap = (JSTraceType*)alloca(sizeof(JSTraceType) * exitTypeMapLen);
+    JSTraceType* typeMap = downFrame->get_typemap();
+    for (unsigned i = 0; i < downPostSlots; i++)
+        exitTypeMap[i] = typeMap[i];
+    exitTypeMap[downPostSlots] = determineSlotType(&stackval(-1));
+    determineGlobalTypes(&exitTypeMap[downPostSlots + 1]);
+
+    /* Otherwise, construct a new SideExit. */
+    if (sizeof(VMSideExit) + sizeof(JSTraceType) * exitTypeMapLen >=
+        LirBuffer::MAX_SKIP_PAYLOAD_SZB) {
+        return NULL;
+    }
+
+    VMSideExit* exit = (VMSideExit*)
+        traceMonitor->traceAlloc->alloc(sizeof(VMSideExit) + sizeof(JSTraceType) * exitTypeMapLen);
+
+    memset(exit, 0, sizeof(VMSideExit));
+    exit->from = fragment;
+    exit->calldepth = 0;
+    JS_ASSERT(unsigned(exit->calldepth) == getCallDepth());
+    exit->numGlobalSlots = ngslots;
+    exit->numStackSlots = downPostSlots + 1;
+    exit->numStackSlotsBelowCurrentFrame = cx->fp->down->argv ?
+        nativeStackOffset(&cx->fp->argv[-2]) / sizeof(double) : 0;
+    exit->exitType = UNSTABLE_LOOP_EXIT;
+    exit->block = cx->fp->down->blockChain;
+    exit->pc = downFrame->pc + JSOP_CALL_LENGTH;
+    exit->imacpc = NULL;
+    exit->sp_adj = ((downPostSlots + 1) * sizeof(double)) - treeInfo->nativeStackBase;
+    exit->rp_adj = exit->calldepth * sizeof(FrameInfo*);
+    exit->nativeCalleeWord = 0;
+    exit->lookupFlags = js_InferFlags(cx, 0);
+    memcpy(exit->fullTypeMap(), exitTypeMap, sizeof(JSTraceType) * exitTypeMapLen);
+#if defined JS_JIT_SPEW
+    TreevisLogExit(cx, exit);
+#endif
+    return exit;
+}
+
+JS_REQUIRES_STACK AbortableRecordingStatus
+TraceRecorder::upRecursion()
+{
+    JS_ASSERT((JSOp)*cx->fp->down->regs->pc == JSOP_CALL);
+    JS_ASSERT(js_CodeSpec[js_GetOpcode(cx, cx->fp->down->script,
+              cx->fp->down->regs->pc)].length == JSOP_CALL_LENGTH);
+
+    JS_ASSERT(callDepth == 0);
+
+    /*
+     * If some operation involving interpreter frame slurping failed, go to
+     * that code right away, and don't bother with emitting the up-recursive
+     * guards again.
+     */
+    if (anchor && (anchor->exitType == RECURSIVE_EMPTY_RP_EXIT ||
+        anchor->exitType == RECURSIVE_SLURP_MISMATCH_EXIT ||
+        anchor->exitType == RECURSIVE_SLURP_FAIL_EXIT)) {
+        return InjectStatus(slurpDownFrames(cx->fp->down->regs->pc));
+    }
+
+    jsbytecode* return_pc = cx->fp->down->regs->pc;
+    jsbytecode* recursive_pc = return_pc + JSOP_CALL_LENGTH;
+
+    /*
+     * It is possible that the down frame isn't the same at runtime. It's not
+     * enough to guard on the PC, since the typemap could be different as well.
+     * To deal with this, guard that the FrameInfo on the callstack is 100%
+     * identical.
+     *
+     * Note that though the counted slots is called "downPostSlots", this is
+     * the number of slots after the CALL instruction has theoretically popped
+     * callee/this/argv, but before the return value is pushed. This is
+     * intended since the FrameInfo pushed by down recursion would not have
+     * the return value yet. Instead, when closing the loop, the return value
+     * becomes the sole stack type that deduces type stability.
+     */
+    unsigned totalSlots = NativeStackSlots(cx, 1);
+    unsigned downPostSlots = totalSlots - NativeStackSlots(cx, 0);
+    FrameInfo* fi = (FrameInfo*)alloca(sizeof(FrameInfo) + totalSlots * sizeof(JSTraceType));
+    fi->block = cx->fp->blockChain;
+    fi->pc = (jsbytecode*)return_pc;
+    fi->imacpc = NULL;
+
+    /*
+     * Need to compute this from the down frame, since the stack could have
+     * moved on this one.
+     */
+    fi->spdist = cx->fp->down->regs->sp - cx->fp->down->slots;
+    JS_ASSERT(cx->fp->argc == cx->fp->down->argc);
+    fi->set_argc(cx->fp->argc, false);
+    fi->callerHeight = downPostSlots;
+    fi->callerArgc = cx->fp->down->argc;
+
+    if (anchor && anchor->exitType == RECURSIVE_MISMATCH_EXIT) {
+        /*
+         * Case 0: Anchoring off a RECURSIVE_MISMATCH guard. Guard on this FrameInfo.
+         * This is always safe because this point is only reached on simple "call myself"
+         * recursive functions.
+         */
+        #if defined DEBUG
+        AssertDownFrameIsConsistent(cx, anchor, fi);
+        #endif
+        fi = anchor->recursive_down;
+    } else if (recursive_pc != fragment->root->ip) {
+        /*
+         * Case 1: Guess that down-recursion has to started back out, infer types
+         * from the down frame.
+         */
+        js_CaptureStackTypes(cx, 1, fi->get_typemap());
+    } else {
+        /* Case 2: Guess that up-recursion is backing out, infer types from our TreeInfo. */
+        JS_ASSERT(treeInfo->nStackTypes == downPostSlots + 1);
+        JSTraceType* typeMap = fi->get_typemap();
+        for (unsigned i = 0; i < downPostSlots; i++)
+            typeMap[i] = treeInfo->typeMap[i];
+    }
+
+    fi = traceMonitor->frameCache->memoize(fi);
+
+    /*
+     * Guard that there are more recursive frames. If coming from an anchor
+     * where this was already computed, don't bother doing it again.
+     */
+    if (!anchor || anchor->exitType != RECURSIVE_MISMATCH_EXIT) {
+        VMSideExit* exit = snapshot(RECURSIVE_EMPTY_RP_EXIT);
+
+        /* Guard that rp >= sr + 1 */
+        guard(true,
+              lir->ins2(LIR_pge, lirbuf->rp,
+                        lir->ins2(LIR_piadd,
+                                  lir->insLoad(LIR_ldp, lirbuf->state,
+                                               offsetof(InterpState, sor)),
+                                  INS_CONSTWORD(sizeof(FrameInfo*)))),
+              exit);
+    }
+
+    debug_only_printf(LC_TMRecorder, "guardUpRecursive fragment->root=%p fi=%p\n", fragment->root, fi);
+
+    /* Guard that the FrameInfo above is the same FrameInfo pointer. */
+    VMSideExit* exit = snapshot(RECURSIVE_MISMATCH_EXIT);
+    LIns* prev_rp = lir->insLoad(LIR_ldp, lirbuf->rp, -int32_t(sizeof(FrameInfo*)));
+    guard(true, lir->ins2(LIR_peq, prev_rp, INS_CONSTPTR(fi)), exit);
+
+    /*
+     * Now it's time to try and close the loop. Get a special exit that points
+     * at the down frame, after the return has been propagated up.
+     */
+    exit = downSnapshot(fi);
+
+    /* Move the return value down from this frame to the one below it. */
+    rval_ins = get(&stackval(-1));
+    if (isPromoteInt(rval_ins))
+        rval_ins = demoteIns(rval_ins);
+
+    /*
+     * The native stack offset of the return value once this frame has returned, is:
+     *      -treeInfo->nativeStackBase + downPostSlots * sizeof(double)
+     *
+     * Note, not +1, since the offset is 0-based.
+     *
+     * This needs to be adjusted down one frame. The amount to adjust must be
+     * the amount down recursion added, which was just guarded as |downPostSlots|.
+     *
+     * So the offset is:
+     *      -treeInfo->nativeStackBase + downPostSlots * sizeof(double) -
+     *                                   downPostSlots * sizeof(double)
+     * Or:
+     *      -treeInfo->nativeStackBase
+     *
+     * This makes sense because this slot is just above the highest sp for the
+     * down frame.
+     */
+    lir->insStorei(rval_ins, lirbuf->sp, -treeInfo->nativeStackBase);
+
+    /* Adjust stacks. See above for |downPostSlots| reasoning. */
+    lirbuf->sp = lir->ins2(LIR_piadd, lirbuf->sp,
+                           lir->insImmWord(-int(downPostSlots) * sizeof(double)));
+    lir->insStorei(lirbuf->sp, lirbuf->state, offsetof(InterpState, sp));
+    lirbuf->rp = lir->ins2(LIR_piadd, lirbuf->rp,
+                           lir->insImmWord(-int(sizeof(FrameInfo*))));
+    lir->insStorei(lirbuf->rp, lirbuf->state, offsetof(InterpState, rp));
+
+    RecursiveSlotMap slotMap(*this);
+    for (unsigned i = 0; i < downPostSlots; i++)
+        slotMap.addSlot(exit->stackType(i));
+    slotMap.addSlot(&stackval(-1));
+    VisitGlobalSlots(slotMap, cx, *treeInfo->globalSlots);
+    if (recursive_pc == (jsbytecode*)fragment->root->ip) {
+        debug_only_print0(LC_TMTracer, "Compiling up-recursive loop...\n");
+    } else {
+        debug_only_print0(LC_TMTracer, "Compiling up-recursive branch...\n");
+        exit->exitType = RECURSIVE_UNLINKED_EXIT;
+        exit->recursive_pc = recursive_pc;
+    }
+    JS_ASSERT(treeInfo->recursion != Recursion_Disallowed);
+    if (treeInfo->recursion != Recursion_Detected)
+        treeInfo->recursion = Recursion_Unwinds;
+    return closeLoop(slotMap, exit);
+}
+
+struct SlurpInfo
+{
+    unsigned curSlot;
+    JSTraceType* typeMap;
+    VMSideExit* exit;
+    unsigned slurpFailSlot;
+};
+
+JS_REQUIRES_STACK AbortableRecordingStatus
+TraceRecorder::slurpDownFrames(jsbytecode* return_pc)
+{
+    /* Missing - no go */
+    if (cx->fp->argc != cx->fp->fun->nargs)
+        RETURN_STOP_A("argc != nargs");
+
+    LIns* argv_ins;
+    unsigned frameDepth;
+    unsigned downPostSlots;
+
+    JSStackFrame* fp = cx->fp;
+    LIns* fp_ins = addName(lir->insLoad(LIR_ldp, cx_ins, offsetof(JSContext, fp)), "fp");
+
+    /*
+     * When first emitting slurp code, do so against the down frame. After
+     * popping the interpreter frame, it is illegal to resume here, as the
+     * down frame has been moved up. So all this code should be skipped if
+     * anchoring off such an exit.
+     */
+    if (!anchor || anchor->exitType != RECURSIVE_SLURP_FAIL_EXIT) {
+        fp_ins = addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, down)), "downFp");
+        fp = fp->down;
+
+        argv_ins = addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, argv)), "argv");
+
+        /* If recovering from a SLURP_MISMATCH, all of this is unnecessary. */
+        if (!anchor || anchor->exitType != RECURSIVE_SLURP_MISMATCH_EXIT) {
+            /* fp->down should not be NULL. */
+            guard(false, lir->ins_peq0(fp_ins), RECURSIVE_LOOP_EXIT);
+
+            /* fp->down->argv should not be NULL. */
+            guard(false, lir->ins_peq0(argv_ins), RECURSIVE_LOOP_EXIT);
+
+            /*
+             * Guard on the script being the same. This might seem unnecessary,
+             * but it lets the recursive loop end cleanly if it doesn't match.
+             * With only the pc check, it is harder to differentiate between
+             * end-of-recursion and recursion-returns-to-different-pc.
+             */
+            guard(true,
+                  lir->ins2(LIR_peq,
+                            addName(lir->insLoad(LIR_ldp,
+                                                 fp_ins,
+                                                 offsetof(JSStackFrame, script)),
+                                    "script"),
+                            INS_CONSTPTR(cx->fp->down->script)),
+                  RECURSIVE_LOOP_EXIT);
+        }
+
+        /* fp->down->regs->pc should be == pc. */
+        guard(true,
+              lir->ins2(LIR_peq,
+                        lir->insLoad(LIR_ldp,
+                                     addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, regs)),
+                                             "regs"),
+                                     offsetof(JSFrameRegs, pc)),
+                        INS_CONSTPTR(return_pc)),
+              RECURSIVE_SLURP_MISMATCH_EXIT);
+
+        /* fp->down->argc should be == argc. */
+        guard(true,
+              lir->ins2(LIR_eq,
+                        addName(lir->insLoad(LIR_ld, fp_ins, offsetof(JSStackFrame, argc)),
+                                "argc"),
+                        INS_CONST(cx->fp->argc)),
+              MISMATCH_EXIT);
+
+        /* Pop the interpreter frame. */
+        LIns* args[] = { lirbuf->state, cx_ins };
+        guard(false, lir->ins_eq0(lir->insCall(&js_PopInterpFrame_ci, args)), MISMATCH_EXIT);
+
+        /* Compute slots for the down frame. */
+        downPostSlots = NativeStackSlots(cx, 1) - NativeStackSlots(cx, 0);
+        frameDepth = 1;
+    } else {
+        /* Note: loading argv from fp, not fp->down. */
+        argv_ins = addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, argv)), "argv");
+
+        /* Slots for this frame, minus the return value. */
+        downPostSlots = NativeStackSlots(cx, 0) - 1;
+        frameDepth = 0;
+    }
+
+    /*
+     * This is a special exit used as a template for the stack-slurping code.
+     * LeaveTree will ignore all but the final slot, which contains the return
+     * value. The slurpSlot variable keeps track of the last slot that has been
+     * unboxed, as to avoid re-unboxing when taking a SLURP_FAIL exit.
+     */
+    unsigned numGlobalSlots = treeInfo->globalSlots->length();
+    unsigned safeSlots = NativeStackSlots(cx, frameDepth) + 1 + numGlobalSlots;
+    jsbytecode* recursive_pc = return_pc + JSOP_CALL_LENGTH;
+    LIns* data = lir->insSkip(sizeof(VMSideExit) + sizeof(JSTraceType) * safeSlots);
+    VMSideExit* exit = (VMSideExit*)data->payload();
+    memset(exit, 0, sizeof(VMSideExit));
+    exit->pc = (jsbytecode*)recursive_pc;
+    exit->from = fragment;
+    exit->exitType = RECURSIVE_SLURP_FAIL_EXIT;
+    exit->numStackSlots = downPostSlots + 1;
+    exit->numGlobalSlots = numGlobalSlots;
+    exit->sp_adj = ((downPostSlots + 1) * sizeof(double)) - treeInfo->nativeStackBase;
+    exit->recursive_pc = recursive_pc;
+
+    /*
+     * Build the exit typemap. This may capture extra types, but they are
+     * thrown away.
+     */
+    JSTraceType* typeMap = exit->stackTypeMap();
+    jsbytecode* oldpc = cx->fp->regs->pc;
+    cx->fp->regs->pc = exit->pc;
+    js_CaptureStackTypes(cx, frameDepth, typeMap);
+    cx->fp->regs->pc = oldpc;
+    typeMap[downPostSlots] = determineSlotType(&stackval(-1));
+    if (typeMap[downPostSlots] == TT_INT32 &&
+        oracle.isStackSlotUndemotable(cx, downPostSlots, recursive_pc)) {
+        typeMap[downPostSlots] = TT_DOUBLE;
+    }
+    determineGlobalTypes(&typeMap[exit->numStackSlots]);
+#if defined JS_JIT_SPEW
+    TreevisLogExit(cx, exit);
+#endif
+
+    /*
+     * Move return value to the right place, if necessary. The previous store
+     * could have been killed so it is necessary to write it again.
+     */
+    if (!anchor || anchor->exitType != RECURSIVE_SLURP_FAIL_EXIT) {
+        JS_ASSERT(exit->sp_adj >= int(sizeof(double)));
+        ptrdiff_t actRetOffset = exit->sp_adj - sizeof(double);
+        LIns* rval = get(&stackval(-1));
+        if (typeMap[downPostSlots] == TT_INT32)
+            rval = demoteIns(rval);
+        lir->insStorei(addName(rval, "rval"), lirbuf->sp, actRetOffset);
+    }
+
+    /* Slurp */
+    SlurpInfo info;
+    info.curSlot = 0;
+    info.exit = exit;
+    info.typeMap = typeMap;
+    info.slurpFailSlot = (anchor && anchor->exitType == RECURSIVE_SLURP_FAIL_EXIT) ?
+                         anchor->slurpFailSlot : 0;
+
+    /* callee */
+    slurpSlot(lir->insLoad(LIR_ldp, argv_ins, -2 * ptrdiff_t(sizeof(jsval))),
+              &fp->argv[-2],
+              &info);
+    /* this */
+    slurpSlot(lir->insLoad(LIR_ldp, argv_ins, -1 * ptrdiff_t(sizeof(jsval))),
+              &fp->argv[-1],
+              &info);
+    /* args[0..n] */
+    for (unsigned i = 0; i < JS_MAX(fp->argc, fp->fun->nargs); i++)
+        slurpSlot(lir->insLoad(LIR_ldp, argv_ins, i * sizeof(jsval)), &fp->argv[i], &info);
+    /* argsobj */
+    slurpSlot(addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, argsobj)), "argsobj"),
+              &fp->argsobj,
+              &info);
+    /* vars */
+    LIns* slots_ins = addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, slots)),
+                              "slots");
+    for (unsigned i = 0; i < fp->script->nfixed; i++)
+        slurpSlot(lir->insLoad(LIR_ldp, slots_ins, i * sizeof(jsval)), &fp->slots[i], &info);
+    /* stack vals */
+    unsigned nfixed = fp->script->nfixed;
+    jsval* stack = StackBase(fp);
+    LIns* stack_ins = addName(lir->ins2(LIR_piadd,
+                                        slots_ins,
+                                        INS_CONSTWORD(nfixed * sizeof(jsval))),
+                              "stackBase");
+    size_t limit = size_t(fp->regs->sp - StackBase(fp));
+    if (anchor && anchor->exitType == RECURSIVE_SLURP_FAIL_EXIT)
+        limit--;
+    else
+        limit -= fp->fun->nargs + 2;
+    for (size_t i = 0; i < limit; i++)
+        slurpSlot(lir->insLoad(LIR_ldp, stack_ins, i * sizeof(jsval)), &stack[i], &info);
+
+    JS_ASSERT(info.curSlot == downPostSlots);
+
+    /* Jump back to the start */
+    exit = copy(exit);
+    exit->exitType = UNSTABLE_LOOP_EXIT;
+#if defined JS_JIT_SPEW
+    TreevisLogExit(cx, exit);
+#endif
+
+    /* Finally, close the loop. */
+    RecursiveSlotMap slotMap(*this);
+    for (unsigned i = 0; i < downPostSlots; i++)
+        slotMap.addSlot(typeMap[i]);
+    slotMap.addSlot(&stackval(-1));
+    VisitGlobalSlots(slotMap, cx, *treeInfo->globalSlots);
+    debug_only_print0(LC_TMTracer, "Compiling up-recursive slurp...\n");
+    exit = copy(exit);
+    if (exit->recursive_pc == fragment->root->ip)
+        exit->exitType = UNSTABLE_LOOP_EXIT;
+    else
+        exit->exitType = RECURSIVE_UNLINKED_EXIT;
+    debug_only_printf(LC_TMTreeVis, "TREEVIS CHANGEEXIT EXIT=%p TYPE=%s\n", exit,
+                      getExitName(exit->exitType));
+    JS_ASSERT(treeInfo->recursion >= Recursion_Unwinds);
+    return closeLoop(slotMap, exit);
+}
+
+JS_REQUIRES_STACK AbortableRecordingStatus
+TraceRecorder::downRecursion()
+{
+    JSStackFrame* fp = cx->fp;
+    if ((jsbytecode*)fragment->ip < fp->script->code ||
+        (jsbytecode*)fragment->ip >= fp->script->code + fp->script->length) {
+        RETURN_STOP_A("inner recursive call must compile first");
+    }
+
+    /* Adjust the stack by the budget the down-frame needs. */
+    int slots = NativeStackSlots(cx, 1) - NativeStackSlots(cx, 0);
+    JS_ASSERT(unsigned(slots) == NativeStackSlots(cx, 1) - fp->argc - 2 - fp->script->nfixed - 1);
+
+    /* Guard that there is enough stack space. */
+    JS_ASSERT(treeInfo->maxNativeStackSlots >= treeInfo->nativeStackBase / sizeof(double));
+    int guardSlots = slots + treeInfo->maxNativeStackSlots -
+                     treeInfo->nativeStackBase / sizeof(double);
+    LIns* sp_top = lir->ins2(LIR_piadd, lirbuf->sp, lir->insImmWord(guardSlots * sizeof(double)));
+    guard(true, lir->ins2(LIR_plt, sp_top, eos_ins), OOM_EXIT);
+
+    /* Guard that there is enough call stack space. */
+    LIns* rp_top = lir->ins2(LIR_piadd, lirbuf->rp, lir->insImmWord(sizeof(FrameInfo*)));
+    guard(true, lir->ins2(LIR_plt, rp_top, eor_ins), OOM_EXIT);
+
+    /* Add space for a new JIT frame. */
+    lirbuf->sp = lir->ins2(LIR_piadd, lirbuf->sp, lir->insImmWord(slots * sizeof(double)));
+    lir->insStorei(lirbuf->sp, lirbuf->state, offsetof(InterpState, sp));
+    lirbuf->rp = lir->ins2(LIR_piadd, lirbuf->rp, lir->insImmWord(sizeof(FrameInfo*)));
+    lir->insStorei(lirbuf->rp, lirbuf->state, offsetof(InterpState, rp));
+    --callDepth;
+    clearFrameSlotsFromCache();
+
+    /*
+     * If the callee and caller have identical call sites, this is a down-
+     * recursive loop. Otherwise something special happened. For example, a
+     * recursive call that is unwinding could nest back down recursively again.
+     * In this case, we build a fragment that ideally we'll never invoke
+     * directly, but link from a down-recursive branch. The UNLINKED_EXIT tells
+     * closeLoop() that the peer trees should match the recursive pc, not the
+     * tree pc.
+     */
+    VMSideExit* exit;
+    if ((jsbytecode*)fragment->root->ip == fp->script->code)
+        exit = snapshot(UNSTABLE_LOOP_EXIT);
+    else
+        exit = snapshot(RECURSIVE_UNLINKED_EXIT);
+    exit->recursive_pc = fp->script->code;
+    debug_only_print0(LC_TMTracer, "Compiling down-recursive function call.\n");
+    JS_ASSERT(treeInfo->recursion != Recursion_Disallowed);
+    treeInfo->recursion = Recursion_Detected;
+    return closeLoop(exit);
+}
+
+LIns*
+TraceRecorder::slurpInt32Slot(LIns* val_ins, jsval* vp, VMSideExit* exit)
+{
+    guard(true,
+          lir->ins2(LIR_or,
+                    lir->ins2(LIR_peq,
+                              lir->ins2(LIR_piand, val_ins, INS_CONSTWORD(JSVAL_TAGMASK)),
+                              INS_CONSTWORD(JSVAL_DOUBLE)),
+                    lir->ins2(LIR_peq,
+                              lir->ins2(LIR_piand, val_ins, INS_CONSTWORD(1)),
+                              INS_CONSTWORD(1))),
+          exit);
+    LIns* space = lir->insAlloc(sizeof(int32));
+    LIns* args[] = { space, val_ins };
+    LIns* result = lir->insCall(&js_TryUnboxInt32_ci, args);
+    guard(false, lir->ins_eq0(result), exit);
+    LIns* int32_ins = lir->insLoad(LIR_ld, space, 0);
+    return int32_ins;
+}
+
+LIns*
+TraceRecorder::slurpDoubleSlot(LIns* val_ins, jsval* vp, VMSideExit* exit)
+{
+    guard(true,
+          lir->ins2(LIR_or,
+                    lir->ins2(LIR_peq,
+                              lir->ins2(LIR_piand, val_ins, INS_CONSTWORD(JSVAL_TAGMASK)),
+                              INS_CONSTWORD(JSVAL_DOUBLE)),
+                    lir->ins2(LIR_peq,
+                              lir->ins2(LIR_piand, val_ins, INS_CONSTWORD(1)),
+                              INS_CONSTWORD(1))),
+          exit);
+    LIns* args[] = { val_ins };
+    LIns* dbl_ins = lir->insCall(&js_UnboxDouble_ci, args);
+    return dbl_ins;
+}
+
+LIns*
+TraceRecorder::slurpBoolSlot(LIns* val_ins, jsval* vp, VMSideExit* exit)
+{
+    guard(true,
+          lir->ins2(LIR_eq,
+                    lir->ins2(LIR_piand, val_ins, INS_CONSTWORD(JSVAL_TAGMASK)),
+                    INS_CONSTWORD(JSVAL_SPECIAL)),
+          exit);
+    LIns* bool_ins = lir->ins2(LIR_pilsh, val_ins, INS_CONSTWORD(JSVAL_TAGBITS));
+    bool_ins = p2i(bool_ins);
+    return bool_ins;
+}
+
+LIns*
+TraceRecorder::slurpStringSlot(LIns* val_ins, jsval* vp, VMSideExit* exit)
+{
+    guard(true,
+          lir->ins2(LIR_eq,
+                    lir->ins2(LIR_piand, val_ins, INS_CONSTWORD(JSVAL_TAGMASK)),
+                    INS_CONSTWORD(JSVAL_STRING)),
+          exit);
+    LIns* str_ins = lir->ins2(LIR_piand, val_ins, INS_CONSTWORD(~JSVAL_TAGMASK));
+    return str_ins;
+}
+
+LIns*
+TraceRecorder::slurpNullSlot(LIns* val_ins, jsval* vp, VMSideExit* exit)
+{
+    guard(true, lir->ins_peq0(val_ins), exit);
+    return val_ins;
+}
+
+LIns*
+TraceRecorder::slurpObjectSlot(LIns* val_ins, jsval* vp, VMSideExit* exit)
+{
+    /* Must not be NULL */
+    guard(false, lir->ins_peq0(val_ins), exit);
+
+    /* Must be an object */
+    guard(true,
+          lir->ins_peq0(lir->ins2(LIR_piand, val_ins, INS_CONSTWORD(JSVAL_TAGMASK))),
+          exit);
+
+    /* Must NOT have a function class */
+    guard(false,
+          lir->ins2(LIR_peq,
+                    lir->ins2(LIR_piand,
+                              lir->insLoad(LIR_ldp, val_ins, offsetof(JSObject, classword)),
+                              INS_CONSTWORD(~JSSLOT_CLASS_MASK_BITS)),
+                    INS_CONSTPTR(&js_FunctionClass)),
+          exit);
+    return val_ins;
+}
+
+LIns*
+TraceRecorder::slurpFunctionSlot(LIns* val_ins, jsval* vp, VMSideExit* exit)
+{
+    /* Must not be NULL */
+    guard(false, lir->ins_peq0(val_ins), exit);
+
+    /* Must be an object */
+    guard(true,
+          lir->ins_peq0(lir->ins2(LIR_piand, val_ins, INS_CONSTWORD(JSVAL_TAGMASK))),
+          exit);
+
+    /* Must have a function class */
+    guard(true,
+          lir->ins2(LIR_peq,
+                    lir->ins2(LIR_piand,
+                              lir->insLoad(LIR_ldp, val_ins, offsetof(JSObject, classword)),
+                              INS_CONSTWORD(~JSSLOT_CLASS_MASK_BITS)),
+                    INS_CONSTPTR(&js_FunctionClass)),
+          exit);
+    return val_ins;
+}
+
+JS_REQUIRES_STACK LIns*
+TraceRecorder::slurpSlot(LIns* val_ins, jsval* vp, VMSideExit* exit)
+{
+    switch (exit->slurpType)
+    {
+    case TT_PSEUDOBOOLEAN:
+        return slurpBoolSlot(val_ins, vp, exit);
+    case TT_INT32:
+        return slurpInt32Slot(val_ins, vp, exit);
+    case TT_DOUBLE:
+        return slurpDoubleSlot(val_ins, vp, exit);
+    case TT_STRING:
+        return slurpStringSlot(val_ins, vp, exit);
+    case TT_NULL:
+        return slurpNullSlot(val_ins, vp, exit);
+    case TT_OBJECT:
+        return slurpObjectSlot(val_ins, vp, exit);
+    case TT_FUNCTION:
+        return slurpFunctionSlot(val_ins, vp, exit);
+    default:
+        JS_NOT_REACHED("invalid type in typemap");
+        return NULL;
+    }
+}
+
+JS_REQUIRES_STACK void
+TraceRecorder::slurpSlot(LIns* val_ins, jsval* vp, SlurpInfo* info)
+{
+    /* Don't re-read slots that aren't needed. */
+    if (info->curSlot < info->slurpFailSlot) {
+        info->curSlot++;
+        return;
+    }
+    VMSideExit* exit = copy(info->exit);
+    exit->slurpFailSlot = info->curSlot;
+    exit->slurpType = info->typeMap[info->curSlot];
+
+#if defined DEBUG
+    /* Make sure that we don't try and record infinity branches */
+    JS_ASSERT_IF(anchor && anchor->exitType == RECURSIVE_SLURP_FAIL_EXIT &&
+                 info->curSlot == info->slurpFailSlot,
+                 anchor->slurpType != exit->slurpType);
+#endif
+
+    LIns* val = slurpSlot(val_ins, vp, exit);
+    lir->insStorei(val,
+                   lirbuf->sp,
+                   -treeInfo->nativeStackBase + ptrdiff_t(info->curSlot) * sizeof(double));
+    info->curSlot++;
+}
+
--- a/js/src/jstracer.cpp
+++ b/js/src/jstracer.cpp
@@ -131,24 +131,27 @@ static const char tagChar[]  = "OIDISIBI
 #define HOTEXIT 1
 
 /* Number of times we try to extend the tree along a side exit. */
 #define MAXEXIT 3
 
 /* Maximum number of peer trees allowed. */
 #define MAXPEERS 9
 
+/* Max number of hits to a RECURSIVE_UNLINKED exit before we trash the tree. */
+#define MAX_RECURSIVE_UNLINK_HITS 64
+
 /* Max call depths for inlining. */
 #define MAX_CALLDEPTH 10
 
 /* Max native stack size. */
-#define MAX_NATIVE_STACK_SLOTS 1024
+#define MAX_NATIVE_STACK_SLOTS 4096
 
 /* Max call stack size. */
-#define MAX_CALL_STACK_ENTRIES 64
+#define MAX_CALL_STACK_ENTRIES 500
 
 /* Max global object size. */
 #define MAX_GLOBAL_SLOTS 4096
 
 /* Max memory needed to rebuild the interpreter stack when falling off trace. */
 #define MAX_INTERP_STACK_BYTES                                                \
     (MAX_NATIVE_STACK_SLOTS * sizeof(jsval) +                                 \
      MAX_CALL_STACK_ENTRIES * sizeof(JSInlineFrame) +                         \
@@ -1027,21 +1030,21 @@ const uintptr_t HASH_SEED = 5381;
 
 static inline void
 HashAccum(uintptr_t& h, uintptr_t i, uintptr_t mask)
 {
     h = ((h << 5) + h + (mask & i)) & mask;
 }
 
 static JS_REQUIRES_STACK inline int
-StackSlotHash(JSContext* cx, unsigned slot)
+StackSlotHash(JSContext* cx, unsigned slot, const void* pc)
 {
     uintptr_t h = HASH_SEED;
     HashAccum(h, uintptr_t(cx->fp->script), ORACLE_MASK);
-    HashAccum(h, uintptr_t(cx->fp->regs->pc), ORACLE_MASK);
+    HashAccum(h, uintptr_t(pc), ORACLE_MASK);
     HashAccum(h, uintptr_t(slot), ORACLE_MASK);
     return int(h);
 }
 
 static JS_REQUIRES_STACK inline int
 GlobalSlotHash(JSContext* cx, unsigned slot)
 {
     uintptr_t h = HASH_SEED;
@@ -1069,38 +1072,66 @@ Oracle::Oracle()
     _globalDontDemote.set(ORACLE_SIZE-1);
     clear();
 }
 
 /* Tell the oracle that a certain global variable should not be demoted. */
 JS_REQUIRES_STACK void
 Oracle::markGlobalSlotUndemotable(JSContext* cx, unsigned slot)
 {
+    #ifdef DEBUG_dvander
+    printf("MGSU: %d [%08x]: %d\n", slot, GlobalSlotHash(cx, slot),
+           _globalDontDemote.get(GlobalSlotHash(cx, slot)));
+    #endif
     _globalDontDemote.set(GlobalSlotHash(cx, slot));
 }
 
 /* Consult with the oracle whether we shouldn't demote a certain global variable. */
 JS_REQUIRES_STACK bool
 Oracle::isGlobalSlotUndemotable(JSContext* cx, unsigned slot) const
 {
+    #ifdef DEBUG_dvander
+    printf("IGSU: %d [%08x]: %d\n", slot, GlobalSlotHash(cx, slot),
+           _globalDontDemote.get(GlobalSlotHash(cx, slot)));
+    #endif
     return _globalDontDemote.get(GlobalSlotHash(cx, slot));
 }
 
 /* Tell the oracle that a certain slot at a certain stack slot should not be demoted. */
 JS_REQUIRES_STACK void
+Oracle::markStackSlotUndemotable(JSContext* cx, unsigned slot, const void* pc)
+{
+    #ifdef DEBUG_dvander
+    printf("MSSU: %p:%d [%08x]: %d\n", pc, slot, StackSlotHash(cx, slot, pc),
+           _stackDontDemote.get(StackSlotHash(cx, slot, pc)));
+    #endif
+    _stackDontDemote.set(StackSlotHash(cx, slot, pc));
+}
+
+JS_REQUIRES_STACK void
 Oracle::markStackSlotUndemotable(JSContext* cx, unsigned slot)
 {
-    _stackDontDemote.set(StackSlotHash(cx, slot));
+    markStackSlotUndemotable(cx, slot, cx->fp->regs->pc);
 }
 
 /* Consult with the oracle whether we shouldn't demote a certain slot. */
 JS_REQUIRES_STACK bool
+Oracle::isStackSlotUndemotable(JSContext* cx, unsigned slot, const void* pc) const
+{
+    #ifdef DEBUG_dvander
+    printf("ISSU: %p:%d [%08x]: %d\n", pc, slot, StackSlotHash(cx, slot, pc),
+           _stackDontDemote.get(StackSlotHash(cx, slot, pc)));
+    #endif
+    return _stackDontDemote.get(StackSlotHash(cx, slot, pc));
+}
+
+JS_REQUIRES_STACK bool
 Oracle::isStackSlotUndemotable(JSContext* cx, unsigned slot) const
 {
-    return _stackDontDemote.get(StackSlotHash(cx, slot));
+    return isStackSlotUndemotable(cx, slot, cx->fp->regs->pc);
 }
 
 /* Tell the oracle that a certain slot at a certain bytecode location should not be demoted. */
 void
 Oracle::markInstructionUndemotable(jsbytecode* pc)
 {
     _pcDontDemote.set(PCHash(pc));
 }
@@ -1127,26 +1158,44 @@ MarkSlotUndemotable(JSContext* cx, TreeI
         oracle.markStackSlotUndemotable(cx, slot);
         return;
     }
 
     uint16* gslots = ti->globalSlots->data();
     oracle.markGlobalSlotUndemotable(cx, gslots[slot - ti->nStackTypes]);
 }
 
+JS_REQUIRES_STACK static JS_INLINE void
+MarkSlotUndemotable(JSContext* cx, TreeInfo* ti, unsigned slot, const void* pc)
+{
+    if (slot < ti->nStackTypes) {
+        oracle.markStackSlotUndemotable(cx, slot, pc);
+        return;
+    }
+
+    uint16* gslots = ti->globalSlots->data();
+    oracle.markGlobalSlotUndemotable(cx, gslots[slot - ti->nStackTypes]);
+}
+
 static JS_REQUIRES_STACK inline bool
-IsSlotUndemotable(JSContext* cx, TreeInfo* ti, unsigned slot)
+IsSlotUndemotable(JSContext* cx, TreeInfo* ti, unsigned slot, const void* ip)
 {
     if (slot < ti->nStackTypes)
-        return oracle.isStackSlotUndemotable(cx, slot);
+        return oracle.isStackSlotUndemotable(cx, slot, ip);
 
     uint16* gslots = ti->globalSlots->data();
     return oracle.isGlobalSlotUndemotable(cx, gslots[slot - ti->nStackTypes]);
 }
 
+static JS_REQUIRES_STACK inline bool
+IsSlotUndemotable(JSContext* cx, TreeInfo* ti, unsigned slot)
+{
+    return IsSlotUndemotable(cx, ti, slot, cx->fp->regs->pc);
+}
+
 class FrameInfoCache
 {
     struct Entry : public JSDHashEntryHdr
     {
         FrameInfo *fi;
     };
 
     static JSBool
@@ -1235,18 +1284,34 @@ struct PCHashEntry : public JSDHashEntry
 };
 
 #define PC_HASH_COUNT 1024
 
 static void
 Blacklist(jsbytecode* pc)
 {
     AUDIT(blacklisted);
-    JS_ASSERT(*pc == JSOP_TRACE || *pc == JSOP_NOP);
-    *pc = JSOP_NOP;
+    JS_ASSERT(*pc == JSOP_TRACE || *pc == JSOP_NOP || *pc == JSOP_CALL);
+    if (*pc == JSOP_CALL) {
+        JS_ASSERT(*(pc + JSOP_CALL_LENGTH) == JSOP_TRACE ||
+                  *(pc + JSOP_CALL_LENGTH) == JSOP_NOP);
+        *(pc + JSOP_CALL_LENGTH) = JSOP_NOP;
+    } else if (*pc == JSOP_TRACE) {
+        *pc = JSOP_NOP;
+    }
+}
+
+static bool
+IsBlacklisted(jsbytecode* pc)
+{
+    if (*pc == JSOP_NOP)
+        return true;
+    if (*pc == JSOP_CALL)
+        return *(pc + JSOP_CALL_LENGTH) == JSOP_NOP;
+    return false;
 }
 
 static void
 Backoff(JSContext *cx, jsbytecode* pc, Fragment* tree = NULL)
 {
     /* N.B. This code path cannot assume the recorder is/is not alive. */
     JSDHashTable *table = &JS_TRACE_MONITOR(cx).recordAttempts;
 
@@ -1419,18 +1484,19 @@ AssertTreeIsUnique(JSTraceMonitor* tm, V
 }
 #endif
 
 static void
 AttemptCompilation(JSContext *cx, JSTraceMonitor* tm, JSObject* globalObj, jsbytecode* pc,
                    uint32 argc)
 {
     /* If we already permanently blacklisted the location, undo that. */
-    JS_ASSERT(*pc == JSOP_NOP || *pc == JSOP_TRACE);
-    *pc = JSOP_TRACE;
+    JS_ASSERT(*pc == JSOP_NOP || *pc == JSOP_TRACE || *pc == JSOP_CALL);
+    if (*pc == JSOP_NOP)
+        *pc = JSOP_TRACE;
     ResetRecordingAttempts(cx, pc);
 
     /* Breathe new life into all peer fragments at the designated loop header. */
     VMFragment* f = (VMFragment*)getLoop(tm, pc, globalObj, OBJ_SHAPE(globalObj), argc);
     if (!f) {
         /*
          * If the global object's shape changed, we can't easily find the
          * corresponding loop header via a hash table lookup. In this
@@ -1991,17 +2057,17 @@ VisitSlots(Visitor &visitor, JSContext *
            const SlotList& slots)
 {
     VisitSlots(visitor, cx, JS_GetGlobalForObject(cx, cx->fp->scopeChain),
                callDepth, slots.length(), slots.data());
 }
 
 
 class SlotVisitorBase {
-#ifdef JS_JIT_SPEW
+#if defined JS_JIT_SPEW
 protected:
     char const *mStackSlotKind;
 public:
     SlotVisitorBase() : mStackSlotKind(NULL) {}
     JS_ALWAYS_INLINE const char *stackSlotKind() { return mStackSlotKind; }
     JS_ALWAYS_INLINE void setStackSlotKind(char const *k) {
         mStackSlotKind = k;
     }
@@ -2213,21 +2279,23 @@ SpecializeTreesToMissingGlobals(JSContex
 }
 
 static void
 TrashTree(JSContext* cx, Fragment* f);
 
 JS_REQUIRES_STACK
 TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* _anchor, Fragment* _fragment,
         TreeInfo* ti, unsigned stackSlots, unsigned ngslots, JSTraceType* typeMap,
-        VMSideExit* innermostNestedGuard, jsbytecode* outer, uint32 outerArgc)
+        VMSideExit* innermostNestedGuard, jsbytecode* outer, uint32 outerArgc,
+        MonitorReason reason)
     : tempAlloc(*JS_TRACE_MONITOR(cx).tempAlloc),
       mark(*JS_TRACE_MONITOR(cx).traceAlloc),
       whichTreesToTrash(&tempAlloc),
-      cfgMerges(&tempAlloc)
+      cfgMerges(&tempAlloc),
+      monitorReason(reason)
 {
     JS_ASSERT(!_fragment->vmprivate && ti && cx->fp->regs->pc == (jsbytecode*)_fragment->ip);
     /* Reset the fragment state we care about in case we got a recycled fragment.
        This includes resetting any profiling data we might have accumulated. */
     _fragment->lastIns = NULL;
     verbose_only( _fragment->profCount = 0; )
     verbose_only( _fragment->nStaticExits = 0; )
     verbose_only( _fragment->nCodeBytes = 0; )
@@ -2327,16 +2395,25 @@ TraceRecorder::TraceRecorder(JSContext* 
 
     /* If we came from exit, we might not have enough global types. */
     if (ti->globalSlots->length() > ti->nGlobalTypes())
         SpecializeTreesToMissingGlobals(cx, globalObj, ti);
 
     /* read into registers all values on the stack and all globals we know so far */
     import(treeInfo, lirbuf->sp, stackSlots, ngslots, callDepth, typeMap);
 
+    /*
+     * If slurping failed, there's no reason to start recording again. Emit LIR
+     * to capture the rest of the slots, then immediately compile. 
+     */
+    if (anchor && anchor->exitType == RECURSIVE_SLURP_FAIL_EXIT) {
+        slurpDownFrames((jsbytecode*)anchor->recursive_pc - JSOP_CALL_LENGTH);
+        return;
+    }
+
     if (fragment == fragment->root) {
         /*
          * We poll the operation callback request flag. It is updated asynchronously whenever
          * the callback is to be invoked.
          */
         LIns* x = lir->insLoad(LIR_ld, cx_ins, offsetof(JSContext, operationCallbackFlag));
         guard(true, lir->ins_eq0(x), snapshot(TIMEOUT_EXIT));
     }
@@ -2901,42 +2978,51 @@ public:
         debug_only_printf(LC_TMTracer, "global%d=", n);
         NativeToValue(mCx, *vp, *mTypeMap++, &mGlobal[slot]);
     }
 };
 
 class FlushNativeStackFrameVisitor : public SlotVisitorBase
 {
     JSContext *mCx;
+    const JSTraceType *mInitTypeMap;
     const JSTraceType *mTypeMap;
     double *mStack;
     jsval *mStop;
+    unsigned mIgnoreSlots;
 public:
     FlushNativeStackFrameVisitor(JSContext *cx,
                                  const JSTraceType *typeMap,
                                  double *stack,
-                                 jsval *stop) :
+                                 jsval *stop,
+                                 unsigned ignoreSlots) :
         mCx(cx),
+        mInitTypeMap(typeMap),
         mTypeMap(typeMap),
         mStack(stack),
-        mStop(stop)
+        mStop(stop),
+        mIgnoreSlots(ignoreSlots)
     {}
 
     const JSTraceType* getTypeMap()
     {
         return mTypeMap;
     }
 
     JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
     visitStackSlots(jsval *vp, size_t count, JSStackFrame* fp) {
         for (size_t i = 0; i < count; ++i) {
             if (vp == mStop)
                 return false;
             debug_only_printf(LC_TMTracer, "%s%u=", stackSlotKind(), unsigned(i));
-            NativeToValue(mCx, *vp++, *mTypeMap++, mStack++);
+            if (unsigned(mTypeMap - mInitTypeMap) >= mIgnoreSlots)
+                NativeToValue(mCx, *vp, *mTypeMap, mStack);
+            vp++;
+            mTypeMap++;
+            mStack++;
         }
         return true;
     }
 };
 
 /* Box the given native frame into a JS frame. This is infallible. */
 static JS_REQUIRES_STACK void
 FlushNativeGlobalFrame(JSContext *cx, double *global, unsigned ngslots,
@@ -3206,22 +3292,22 @@ GetClosureVar(JSContext* cx, JSObject* c
  * @param np pointer to the native stack.  We want to copy values from here to
  *           the JS stack as needed.
  * @param stopFrame if non-null, this frame and everything above it should not
  *                  be restored.
  * @return the number of things we popped off of np.
  */
 static JS_REQUIRES_STACK int
 FlushNativeStackFrame(JSContext* cx, unsigned callDepth, const JSTraceType* mp, double* np,
-                      JSStackFrame* stopFrame)
+                      JSStackFrame* stopFrame, unsigned ignoreSlots)
 {
     jsval* stopAt = stopFrame ? &stopFrame->argv[-2] : NULL;
 
     /* Root all string and object references first (we don't need to call the GC for this). */
-    FlushNativeStackFrameVisitor visitor(cx, mp, np, stopAt);
+    FlushNativeStackFrameVisitor visitor(cx, mp, np, stopAt, ignoreSlots);
     VisitStackSlots(visitor, cx, callDepth);
 
     // Restore thisv from the now-restored argv[-1] in each pending frame.
     // Keep in mind that we didn't restore frames at stopFrame and above!
     // Scope to keep |fp| from leaking into the macros we're using.
     {
         unsigned n = callDepth+1; // +1 to make sure we restore the entry frame
         JSStackFrame* fp = cx->fp;
@@ -3483,26 +3569,33 @@ TraceRecorder::import(TreeInfo* treeInfo
     }
     JS_ASSERT(ngslots == treeInfo->nGlobalTypes());
     ptrdiff_t offset = -treeInfo->nativeStackBase;
 
     /*
      * Check whether there are any values on the stack we have to unbox and do
      * that first before we waste any time fetching the state from the stack.
      */
-    ImportBoxedStackSlotVisitor boxedStackVisitor(*this, sp, offset, typeMap);
-    VisitStackSlots(boxedStackVisitor, cx, callDepth);
+    if (!anchor || anchor->exitType != RECURSIVE_SLURP_FAIL_EXIT) {
+        ImportBoxedStackSlotVisitor boxedStackVisitor(*this, sp, offset, typeMap);
+        VisitStackSlots(boxedStackVisitor, cx, callDepth);
+    }
 
     ImportGlobalSlotVisitor globalVisitor(*this, lirbuf->state, globalTypeMap);
     VisitGlobalSlots(globalVisitor, cx, globalObj, ngslots,
                      treeInfo->globalSlots->data());
 
-    ImportUnboxedStackSlotVisitor unboxedStackVisitor(*this, sp, offset,
-                                                      typeMap);
-    VisitStackSlots(unboxedStackVisitor, cx, callDepth);
+    if (!anchor || anchor->exitType != RECURSIVE_SLURP_FAIL_EXIT) {
+        ImportUnboxedStackSlotVisitor unboxedStackVisitor(*this, sp, offset,
+                                                          typeMap);
+        VisitStackSlots(unboxedStackVisitor, cx, callDepth);
+    } else {
+        import(sp, nativeStackOffset(&stackval(-1)), &stackval(-1),
+               typeMap[treeInfo->nStackTypes - 1], "retval", 0, cx->fp);
+    }
 }
 
 JS_REQUIRES_STACK bool
 TraceRecorder::isValidSlot(JSScope* scope, JSScopeProperty* sprop)
 {
     uint32 setflags = (js_CodeSpec[*cx->fp->regs->pc].format & (JOF_SET | JOF_INCDEC | JOF_FOR));
 
     if (setflags) {
@@ -3869,17 +3962,17 @@ TraceRecorder::snapshot(ExitType exitTyp
      * When calling a _FAIL native, make the snapshot's pc point to the next
      * instruction after the CALL or APPLY. Even on failure, a _FAIL native
      * must not be called again from the interpreter.
      */
     bool resumeAfter = (pendingSpecializedNative &&
                         JSTN_ERRTYPE(pendingSpecializedNative) == FAIL_STATUS);
     if (resumeAfter) {
         JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_APPLY || *pc == JSOP_NEW ||
-                  *pc == JSOP_SETPROP || *pc == JSOP_SETNAME || *pc == JSOP_SETMETHOD);
+                  *pc == JSOP_SETPROP || *pc == JSOP_SETNAME);
         pc += cs.length;
         regs->pc = pc;
         MUST_FLOW_THROUGH("restore_pc");
     }
 
     /*
      * Generate the entry map for the (possibly advanced) pc and stash it in
      * the trace.
@@ -4237,31 +4330,34 @@ enum TypeCheckResult
 };
 
 class SlotMap : public SlotVisitorBase
 {
   public:
     struct SlotInfo
     {
         SlotInfo()
-            : v(0), promoteInt(false), lastCheck(TypeCheck_Bad)
+          : v(NULL), promoteInt(false), lastCheck(TypeCheck_Bad)
         {}
         SlotInfo(jsval* v, bool promoteInt)
-          : v(v), promoteInt(promoteInt), lastCheck(TypeCheck_Bad)
+          : v(v), promoteInt(promoteInt), lastCheck(TypeCheck_Bad), type(getCoercedType(*v))
+        {}
+        SlotInfo(JSTraceType t)
+          : v(NULL), promoteInt(t == TT_INT32), lastCheck(TypeCheck_Bad), type(t)
         {}
         jsval           *v;
         bool            promoteInt;
         TypeCheckResult lastCheck;
+        JSTraceType     type;
     };
 
-    SlotMap(TraceRecorder& rec, unsigned slotOffset)
+    SlotMap(TraceRecorder& rec)
         : mRecorder(rec),
           mCx(rec.cx),
-          slots(NULL),
-          slotOffset(slotOffset)
+          slots(NULL)
     {
     }
 
     JS_REQUIRES_STACK JS_ALWAYS_INLINE void
     visitGlobalSlot(jsval *vp, unsigned n, unsigned slot)
     {
         addSlot(vp);
     }
@@ -4292,119 +4388,114 @@ class SlotMap : public SlotVisitorBase
      * TypeConsensus_Bad:       Types are not compatible. Individual type check results are undefined.
      * TypeConsensus_Undemotes: Types would be compatible if slots were marked as undemotable
      *                          before recording began. Caller can go through slot list and mark
      *                          such slots as undemotable.
      */
     JS_REQUIRES_STACK TypeConsensus
     checkTypes(TreeInfo* ti)
     {
-        if (ti->typeMap.length() < slotOffset || length() != ti->typeMap.length() - slotOffset)
+        if (length() != ti->typeMap.length())
             return TypeConsensus_Bad;
 
         bool has_undemotes = false;
         for (unsigned i = 0; i < length(); i++) {
-            TypeCheckResult result = checkType(i, ti->typeMap[i + slotOffset]);
+            TypeCheckResult result = checkType(i, ti->typeMap[i]);
             if (result == TypeCheck_Bad)
                 return TypeConsensus_Bad;
             if (result == TypeCheck_Undemote)
                 has_undemotes = true;
             slots[i].lastCheck = result;
         }
         if (has_undemotes)
             return TypeConsensus_Undemotes;
         return TypeConsensus_Okay;
     }
 
     JS_REQUIRES_STACK JS_ALWAYS_INLINE void
     addSlot(jsval* vp)
     {
-        slots.add(SlotInfo(vp, isNumber(*vp) && isPromoteInt(mRecorder.get(vp))));
+        slots.add(SlotInfo(vp, isPromoteInt(mRecorder.get(vp))));
+    }
+
+    JS_REQUIRES_STACK JS_ALWAYS_INLINE void
+    addSlot(JSTraceType t)
+    {
+        slots.add(SlotInfo(t));
     }
 
     JS_REQUIRES_STACK void
     markUndemotes()
     {
         for (unsigned i = 0; i < length(); i++) {
             if (get(i).lastCheck == TypeCheck_Undemote)
-                MarkSlotUndemotable(mRecorder.cx, mRecorder.treeInfo, slotOffset + i);
+                MarkSlotUndemotable(mRecorder.cx, mRecorder.treeInfo, i);
         }
     }
 
     JS_REQUIRES_STACK virtual void
     adjustTypes()
     {
         for (unsigned i = 0; i < length(); i++) {
             SlotInfo& info = get(i);
             JS_ASSERT(info.lastCheck != TypeCheck_Undemote && info.lastCheck != TypeCheck_Bad);
             if (info.lastCheck == TypeCheck_Promote) {
-                JS_ASSERT(isNumber(*info.v));
+                JS_ASSERT(info.type == TT_INT32 || info.type == TT_DOUBLE);
                 mRecorder.set(info.v, mRecorder.f2i(mRecorder.get(info.v)));
             } else if (info.lastCheck == TypeCheck_Demote) {
-                JS_ASSERT(isNumber(*info.v));
+                JS_ASSERT(info.type == TT_INT32 || info.type == TT_DOUBLE);
                 JS_ASSERT(mRecorder.get(info.v)->isQuad());
 
                 /* Never demote this final i2f. */
                 mRecorder.set(info.v, mRecorder.get(info.v), false, false);
             }
         }
     }
   private:
     TypeCheckResult
     checkType(unsigned i, JSTraceType t)
     {
         debug_only_printf(LC_TMTracer,
                           "checkType slot %d: interp=%c typemap=%c isNum=%d promoteInt=%d\n",
                           i,
-                          typeChar[getCoercedType(*slots[i].v)],
+                          typeChar[slots[i].type],
                           typeChar[t],
-                          isNumber(*slots[i].v),
+                          slots[i].type == TT_INT32 || slots[i].type == TT_DOUBLE,
                           slots[i].promoteInt);
         switch (t) {
           case TT_INT32:
-            if (!isNumber(*slots[i].v))
+            if (slots[i].type != TT_INT32 && slots[i].type != TT_DOUBLE)
                 return TypeCheck_Bad; /* Not a number? Type mismatch. */
             /* This is always a type mismatch, we can't close a double to an int. */
             if (!slots[i].promoteInt)
                 return TypeCheck_Undemote;
             /* Looks good, slot is an int32, the last instruction should be promotable. */
-            JS_ASSERT(isInt32(*slots[i].v) && slots[i].promoteInt);
-            return TypeCheck_Promote;
+            JS_ASSERT_IF(slots[i].v, isInt32(*slots[i].v) && slots[i].promoteInt);
+            return slots[i].v ? TypeCheck_Promote : TypeCheck_Okay;
           case TT_DOUBLE:
-            if (!isNumber(*slots[i].v))
+            if (slots[i].type != TT_INT32 && slots[i].type != TT_DOUBLE)
                 return TypeCheck_Bad; /* Not a number? Type mismatch. */
             if (slots[i].promoteInt)
-                return TypeCheck_Demote;
+                return slots[i].v ? TypeCheck_Demote : TypeCheck_Bad;
             return TypeCheck_Okay;
-          case TT_NULL:
-            return JSVAL_IS_NULL(*slots[i].v) ? TypeCheck_Okay : TypeCheck_Bad;
-          case TT_FUNCTION:
-            return !JSVAL_IS_PRIMITIVE(*slots[i].v) &&
-                   HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(*slots[i].v)) ?
-                   TypeCheck_Okay : TypeCheck_Bad;
-          case TT_OBJECT:
-            return !JSVAL_IS_PRIMITIVE(*slots[i].v) &&
-                   !HAS_FUNCTION_CLASS(JSVAL_TO_OBJECT(*slots[i].v)) ?
-                   TypeCheck_Okay : TypeCheck_Bad;
           default:
-            return getCoercedType(*slots[i].v) == t ? TypeCheck_Okay : TypeCheck_Bad;
+            return slots[i].type == t ? TypeCheck_Okay : TypeCheck_Bad;
         }
         JS_NOT_REACHED("shouldn't fall through type check switch");
     }
   protected:
     TraceRecorder& mRecorder;
     JSContext* mCx;
     Queue<SlotInfo> slots;
-    unsigned   slotOffset;
 };
 
 class DefaultSlotMap : public SlotMap
 {
   public:
-    DefaultSlotMap(TraceRecorder& tr) : SlotMap(tr, 0)
+    DefaultSlotMap(TraceRecorder& tr) : SlotMap(tr)
     {
     }
 
     JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
     visitStackSlots(jsval *vp, size_t count, JSStackFrame* fp)
     {
         for (size_t i = 0; i < count; i++)
             addSlot(&vp[i]);
@@ -4427,23 +4518,27 @@ TraceRecorder::selfTypeStability(SlotMap
      */
     if (consensus == TypeConsensus_Undemotes)
         slotMap.markUndemotes();
 
     return consensus;
 }
 
 JS_REQUIRES_STACK TypeConsensus
-TraceRecorder::peerTypeStability(SlotMap& slotMap, VMFragment** pPeer)
+TraceRecorder::peerTypeStability(SlotMap& slotMap, const void* ip, VMFragment** pPeer)
 {
     /* See if there are any peers that would make this stable */
     VMFragment* root = (VMFragment*)fragment->root;
-    VMFragment* peer = getLoop(traceMonitor, root->ip, root->globalObj, root->globalShape,
+    VMFragment* peer = getLoop(traceMonitor, ip, root->globalObj, root->globalShape,
                                root->argc);
-    JS_ASSERT(peer != NULL);
+
+    /* This condition is possible with recursion */
+    JS_ASSERT_IF(!peer, fragment->root->ip != ip);
+    if (!peer)
+        return TypeConsensus_Bad;
     bool onlyUndemotes = false;
     for (; peer != NULL; peer = (VMFragment*)peer->peer) {
         if (!peer->vmprivate || peer == fragment)
             continue;
         debug_only_printf(LC_TMTracer, "Checking type stability against peer=%p\n", (void*)peer);
         TypeConsensus consensus = slotMap.checkTypes((TreeInfo*)peer->vmprivate);
         if (consensus == TypeConsensus_Okay) {
             *pPeer = peer;
@@ -4455,57 +4550,70 @@ TraceRecorder::peerTypeStability(SlotMap
         if (consensus == TypeConsensus_Undemotes)
             onlyUndemotes = true;
     }
 
     return onlyUndemotes ? TypeConsensus_Undemotes : TypeConsensus_Bad;
 }
 
 JS_REQUIRES_STACK AbortableRecordingStatus
-TraceRecorder::closeLoop(TypeConsensus &consensus)
+TraceRecorder::closeLoop()
+{
+    return closeLoop(snapshot(UNSTABLE_LOOP_EXIT));
+}
+
+JS_REQUIRES_STACK AbortableRecordingStatus
+TraceRecorder::closeLoop(VMSideExit* exit)
 {
     DefaultSlotMap slotMap(*this);
     VisitSlots(slotMap, cx, 0, *treeInfo->globalSlots);
-    return closeLoop(slotMap, snapshot(UNSTABLE_LOOP_EXIT), consensus);
+    return closeLoop(slotMap, exit);
 }
 
 /*
  * Complete and compile a trace and link it to the existing tree if
  * appropriate.  Returns ARECORD_ABORTED or ARECORD_STOP, depending on whether
  * the recorder was deleted. Outparam is always set.
  */
 JS_REQUIRES_STACK AbortableRecordingStatus
-TraceRecorder::closeLoop(SlotMap& slotMap, VMSideExit* exit, TypeConsensus& consensus)
+TraceRecorder::closeLoop(SlotMap& slotMap, VMSideExit* exit)
 {
     /*
      * We should have arrived back at the loop header, and hence we don't want
      * to be in an imacro here and the opcode should be either JSOP_TRACE or, in
      * case this loop was blacklisted in the meantime, JSOP_NOP.
      */
-    JS_ASSERT((*cx->fp->regs->pc == JSOP_TRACE || *cx->fp->regs->pc == JSOP_NOP) &&
-              !cx->fp->imacpc);
+    JS_ASSERT((*cx->fp->regs->pc == JSOP_TRACE || *cx->fp->regs->pc == JSOP_NOP ||
+               *cx->fp->regs->pc == JSOP_RETURN) && !cx->fp->imacpc);
 
     if (callDepth != 0) {
         debug_only_print0(LC_TMTracer,
                           "Blacklisted: stack depth mismatch, possible recursion.\n");
         Blacklist((jsbytecode*) fragment->root->ip);
         trashSelf = true;
-        consensus = TypeConsensus_Bad;
         return ARECORD_STOP;
     }
 
-    JS_ASSERT(exit->exitType == UNSTABLE_LOOP_EXIT);
-    JS_ASSERT(exit->numStackSlots == treeInfo->nStackTypes);
+    JS_ASSERT_IF(exit->exitType == UNSTABLE_LOOP_EXIT,
+                 exit->numStackSlots == treeInfo->nStackTypes);
+    JS_ASSERT_IF(exit->exitType != UNSTABLE_LOOP_EXIT, exit->exitType == RECURSIVE_UNLINKED_EXIT);
+    JS_ASSERT_IF(exit->exitType == RECURSIVE_UNLINKED_EXIT,
+                 exit->recursive_pc != fragment->root->ip);
 
     VMFragment* peer = NULL;
     VMFragment* root = (VMFragment*)fragment->root;
 
-    consensus = selfTypeStability(slotMap);
+    TypeConsensus consensus = TypeConsensus_Bad;
+
+    if (exit->exitType == UNSTABLE_LOOP_EXIT)
+        consensus = selfTypeStability(slotMap);
     if (consensus != TypeConsensus_Okay) {
-        TypeConsensus peerConsensus = peerTypeStability(slotMap, &peer);
+        const void* ip = exit->exitType == RECURSIVE_UNLINKED_EXIT ?
+                         exit->recursive_pc : fragment->root->ip;
+        TypeConsensus peerConsensus = peerTypeStability(slotMap, ip, &peer);
         /* If there was a semblance of a stable peer (even if not linkable), keep the result. */
         if (peerConsensus != TypeConsensus_Bad)
             consensus = peerConsensus;
     }
 
 #if DEBUG
     if (consensus != TypeConsensus_Okay || peer)
         AUDIT(unstableLoopVariable);
@@ -4618,17 +4726,17 @@ TypeMapLinkability(JSContext* cx, const 
 {
     const TypeMap& peerMap = peer->getTreeInfo()->typeMap;
     unsigned minSlots = JS_MIN(typeMap.length(), peerMap.length());
     TypeConsensus consensus = TypeConsensus_Okay;
     for (unsigned i = 0; i < minSlots; i++) {
         if (typeMap[i] == peerMap[i])
             continue;
         if (typeMap[i] == TT_INT32 && peerMap[i] == TT_DOUBLE &&
-            IsSlotUndemotable(cx, peer->getTreeInfo(), i)) {
+            IsSlotUndemotable(cx, peer->getTreeInfo(), i, peer->ip)) {
             consensus = TypeConsensus_Undemotes;
         } else {
             return TypeConsensus_Bad;
         }
     }
     return consensus;
 }
 
@@ -4660,16 +4768,21 @@ TraceRecorder::joinEdgesToEntry(VMFragme
     Queue<unsigned> undemotes(NULL);
 
     for (VMFragment* peer = peer_root; peer; peer = (VMFragment*)peer->peer) {
         TreeInfo* ti = peer->getTreeInfo();
         if (!ti)
             continue;
         UnstableExit* uexit = ti->unstableExits;
         while (uexit != NULL) {
+            /* :TODO: these exits go somewhere else. */
+            if (uexit->exit->exitType == RECURSIVE_UNLINKED_EXIT) {
+                uexit = uexit->next;
+                continue;
+            }
             /* Build the full typemap for this unstable exit */
             FullMapFromExit(typeMap, uexit->exit);
             /* Check its compatibility against this tree */
             TypeConsensus consensus = TypeMapLinkability(cx, typeMap, (VMFragment*)fragment->root);
             JS_ASSERT_IF(consensus == TypeConsensus_Okay, peer != fragment);
             if (consensus == TypeConsensus_Okay) {
                 debug_only_printf(LC_TMTracer,
                                   "Joining type-stable trace to target exit %p->%p.\n",
@@ -4705,16 +4818,19 @@ TraceRecorder::endLoop(VMSideExit* exit)
 {
     if (callDepth != 0) {
         debug_only_print0(LC_TMTracer, "Blacklisted: stack depth mismatch, possible recursion.\n");
         Blacklist((jsbytecode*) fragment->root->ip);
         trashSelf = true;
         return ARECORD_STOP;
     }
 
+    if (monitorReason != Monitor_Branch)
+        RETURN_STOP_A("control flow should have been recursive");
+
     fragment->lastIns =
         lir->insGuard(LIR_x, NULL, createGuardRecord(exit));
 
     CHECK_STATUS_A(compile(traceMonitor));
 
     debug_only_printf(LC_TMTreeVis, "TREEVIS ENDLOOP EXIT=%p\n", (void*)exit);
 
     VMFragment* root = (VMFragment*)fragment->root;
@@ -4975,18 +5091,17 @@ TraceRecorder::checkTraceEnd(jsbytecode 
         if (loop) {
             JS_ASSERT(!cx->fp->imacpc && (pc == cx->fp->regs->pc || pc == cx->fp->regs->pc + 1));
             bool fused = pc != cx->fp->regs->pc;
             JSFrameRegs orig = *cx->fp->regs;
 
             cx->fp->regs->pc = (jsbytecode*)fragment->root->ip;
             cx->fp->regs->sp -= fused ? 2 : 1;
 
-            TypeConsensus consensus;
-            AbortableRecordingStatus ars = closeLoop(consensus);
+            AbortableRecordingStatus ars = closeLoop();
 
             *cx->fp->regs = orig;
             return ars;
         } else {
             return endLoop();
         }
     }
     return ARECORD_CONTINUE;
@@ -5146,30 +5261,31 @@ CheckGlobalObjectShape(JSContext* cx, JS
                       globalShape);
     ResetJIT(cx, FR_GLOBALS_FULL);
     return false;
 }
 
 static JS_REQUIRES_STACK bool
 StartRecorder(JSContext* cx, VMSideExit* anchor, Fragment* f, TreeInfo* ti,
               unsigned stackSlots, unsigned ngslots, JSTraceType* typeMap,
-              VMSideExit* expectedInnerExit, jsbytecode* outer, uint32 outerArgc)
+              VMSideExit* expectedInnerExit, jsbytecode* outer, uint32 outerArgc,
+              MonitorReason reason)
 {
     JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
     if (JS_TRACE_MONITOR(cx).needFlush) {
         ResetJIT(cx, FR_DEEP_BAIL);
         return false;
     }
 
     JS_ASSERT(f->root != f || !cx->fp->imacpc);
 
     /* Start recording if no exception during construction. */
     tm->recorder = new TraceRecorder(cx, anchor, f, ti,
                                      stackSlots, ngslots, typeMap,
-                                     expectedInnerExit, outer, outerArgc);
+                                     expectedInnerExit, outer, outerArgc, reason);
 
     if (cx->throwing) {
         js_AbortRecording(cx, "setting up recorder failed");
         return false;
     }
 
     /* Clear any leftover error state. */
     Assembler *assm = JS_TRACE_MONITOR(cx).assembler;
@@ -5398,17 +5514,17 @@ SynthesizeSlowNativeFrame(InterpState& s
 
     ifp->mark = mark;
     cx->fp = fp;
 }
 
 static JS_REQUIRES_STACK bool
 RecordTree(JSContext* cx, JSTraceMonitor* tm, VMFragment* f, jsbytecode* outer,
            uint32 outerArgc, JSObject* globalObj, uint32 globalShape,
-           SlotList* globalSlots, uint32 argc)
+           SlotList* globalSlots, uint32 argc, MonitorReason reason)
 {
     JS_ASSERT(f->root == f);
 
     /* Make sure the global type map didn't change on us. */
     if (!CheckGlobalObjectShape(cx, tm, globalObj)) {
         Backoff(cx, (jsbytecode*) f->root->ip);
         return false;
     }
@@ -5475,38 +5591,59 @@ RecordTree(JSContext* cx, JSTraceMonitor
     ti->maxNativeStackSlots = entryNativeStackSlots;
     ti->maxCallDepth = 0;
     ti->script = cx->fp->script;
 
     /* Recording primary trace. */
     return StartRecorder(cx, NULL, f, ti,
                          ti->nStackTypes,
                          ti->globalSlots->length(),
-                         ti->typeMap.data(), NULL, outer, outerArgc);
+                         ti->typeMap.data(), NULL, outer, outerArgc, reason);
 }
 
 static JS_REQUIRES_STACK TypeConsensus
 FindLoopEdgeTarget(JSContext* cx, VMSideExit* exit, VMFragment** peerp)
 {
     VMFragment* from = exit->root();
     TreeInfo* from_ti = from->getTreeInfo();
 
     JS_ASSERT(from->code());
 
     TypeMap typeMap(NULL);
     FullMapFromExit(typeMap, exit);
     JS_ASSERT(typeMap.length() - exit->numStackSlots == from_ti->nGlobalTypes());
 
     /* Mark all double slots as undemotable */
+    uint16* gslots = from_ti->globalSlots->data();
     for (unsigned i = 0; i < typeMap.length(); i++) {
-        if (typeMap[i] == TT_DOUBLE)
-            MarkSlotUndemotable(cx, from_ti, i);
-    }
-
-    VMFragment* firstPeer = ((VMFragment*)from)->first;
+        if (typeMap[i] == TT_DOUBLE) {
+            if (exit->exitType == RECURSIVE_UNLINKED_EXIT) {
+                if (i < exit->numStackSlots)
+                    oracle.markStackSlotUndemotable(cx, i, exit->recursive_pc);
+                else
+                    oracle.markGlobalSlotUndemotable(cx, gslots[i - exit->numStackSlots]);
+            }
+            if (i < from_ti->nStackTypes)
+                oracle.markStackSlotUndemotable(cx, i, from->ip);
+            else if (i >= exit->numStackSlots)
+                oracle.markGlobalSlotUndemotable(cx, gslots[i - exit->numStackSlots]);
+        }
+    }
+
+    JS_ASSERT(exit->exitType == UNSTABLE_LOOP_EXIT ||
+              (exit->exitType == RECURSIVE_UNLINKED_EXIT && exit->recursive_pc));
+
+    VMFragment* firstPeer = NULL;
+    if (exit->exitType == UNSTABLE_LOOP_EXIT || exit->recursive_pc == from->ip) {
+        firstPeer = (VMFragment*)from->first;
+    } else {
+        firstPeer = getLoop(&JS_TRACE_MONITOR(cx), exit->recursive_pc, from->globalObj,
+                            from->globalShape, from->argc);
+    }
+
     for (VMFragment* peer = firstPeer; peer; peer = peer->peer) {
         TreeInfo* peer_ti = peer->getTreeInfo();
         if (!peer_ti)
             continue;
         JS_ASSERT(peer->argc == from->argc);
         JS_ASSERT(exit->numStackSlots == peer_ti->nStackTypes);
         TypeConsensus consensus = TypeMapLinkability(cx, typeMap, peer);
         if (consensus == TypeConsensus_Okay || consensus == TypeConsensus_Undemotes) {
@@ -5547,38 +5684,61 @@ AttemptToStabilizeTree(JSContext* cx, JS
     VMFragment* from = exit->root();
     TreeInfo* from_ti = from->getTreeInfo();
 
     VMFragment* peer = NULL;
     TypeConsensus consensus = FindLoopEdgeTarget(cx, exit, &peer);
     if (consensus == TypeConsensus_Okay) {
         TreeInfo* peer_ti = peer->getTreeInfo();
         JS_ASSERT(from_ti->globalSlots == peer_ti->globalSlots);
-        JS_ASSERT(from_ti->nStackTypes == peer_ti->nStackTypes);
+        JS_ASSERT_IF(exit->exitType == UNSTABLE_LOOP_EXIT,
+                     from_ti->nStackTypes == peer_ti->nStackTypes);
+        JS_ASSERT(exit->numStackSlots == peer_ti->nStackTypes);
         /* Patch this exit to its peer */
         JoinPeers(tm->assembler, exit, peer);
         /*
          * Update peer global types. The |from| fragment should already be updated because it on
          * the execution path, and somehow connected to the entry trace.
          */
         if (peer_ti->nGlobalTypes() < peer_ti->globalSlots->length())
             SpecializeTreesToMissingGlobals(cx, globalObj, peer_ti);
         JS_ASSERT(from_ti->nGlobalTypes() == from_ti->globalSlots->length());
         /* This exit is no longer unstable, so remove it. */
-        from_ti->removeUnstableExit(exit);
+        if (exit->exitType == UNSTABLE_LOOP_EXIT)
+            from_ti->removeUnstableExit(exit);
         debug_only_stmt(DumpPeerStability(tm, peer->ip, from->globalObj, from->globalShape, from->argc);)
         return false;
     } else if (consensus == TypeConsensus_Undemotes) {
         /* The original tree is unconnectable, so trash it. */
         TrashTree(cx, peer);
         return false;
     }
 
+    /* Don't bother recording if the exit doesn't expect this PC */
+    if (exit->exitType == RECURSIVE_UNLINKED_EXIT) {
+        if (++exit->hitcount >= MAX_RECURSIVE_UNLINK_HITS) {
+            Blacklist((jsbytecode*)from->ip);
+            TrashTree(cx, from);
+            return false;
+        }
+        if (exit->recursive_pc != cx->fp->regs->pc)
+            return false;
+        from = getLoop(tm, exit->recursive_pc, from->globalObj, from->globalShape, cx->fp->argc);
+        /* use stale TI for RecordTree - since from might not have one anymore. */
+    }
+
+    JS_ASSERT(from == from->root);
+
+    /* If this tree has been blacklisted, don't try to record a new one. */
+    if (*(jsbytecode*)from->ip == JSOP_NOP)
+        return false;
+
     return RecordTree(cx, tm, from->first, outer, outerArgc, from->globalObj,
-                      from->globalShape, from_ti->globalSlots, cx->fp->argc);
+                      from->globalShape, from_ti->globalSlots, cx->fp->argc,
+                      Monitor_Branch);
 }
 
 static JS_REQUIRES_STACK bool
 AttemptToExtendTree(JSContext* cx, VMSideExit* anchor, VMSideExit* exitedFrom, jsbytecode* outer
 #ifdef MOZ_TRACEVIS
     , TraceVisStateObj* tvso = NULL
 #endif
     )
@@ -5669,21 +5829,26 @@ AttemptToExtendTree(JSContext* cx, VMSid
             stackSlots = fullMap.length();
             ngslots = BuildGlobalTypeMapFromInnerTree(fullMap, e2);
             JS_ASSERT(ngslots >= e1->numGlobalSlots); // inner tree must have all globals
             JS_ASSERT(ngslots == fullMap.length() - stackSlots);
             typeMap = fullMap.data();
         }
         JS_ASSERT(ngslots >= anchor->numGlobalSlots);
         bool rv = StartRecorder(cx, anchor, c, (TreeInfo*)f->vmprivate, stackSlots,
-                                ngslots, typeMap, exitedFrom, outer, cx->fp->argc);
+                                ngslots, typeMap, exitedFrom, outer, cx->fp->argc,
+                                Monitor_Branch);
 #ifdef MOZ_TRACEVIS
         if (!rv && tvso)
             tvso->r = R_FAIL_EXTEND_START;
 #endif
+        if (anchor->exitType == RECURSIVE_SLURP_FAIL_EXIT) {
+            DeleteRecorder(cx);
+            return false;
+        }
         return rv;
     }
 #ifdef MOZ_TRACEVIS
     if (tvso) tvso->r = R_FAIL_EXTEND_COLD;
 #endif
     return false;
 }
 
@@ -5768,67 +5933,114 @@ RecordLoopEdge(JSContext* cx, TraceRecor
         }
         if (!f || f->code()) {
             f = getAnchor(tm, cx->fp->regs->pc, globalObj, globalShape, argc);
             if (!f) {
                 ResetJIT(cx, FR_OOM);
                 return false;
             }
         }
-        return RecordTree(cx, tm, f, outer, outerArgc, globalObj, globalShape, globalSlots, argc);
-    }
-
-    r->adjustCallerTypes(f);
-    r->prepareTreeCall(f);
+        return RecordTree(cx, tm, f, outer, outerArgc, globalObj, globalShape, globalSlots, argc,
+                          Monitor_Branch);
+    }
+
+    return r->attemptTreeCall(f, inlineCallCount) == ARECORD_CONTINUE;
+}
+
+JS_REQUIRES_STACK AbortableRecordingStatus
+TraceRecorder::attemptTreeCall(VMFragment* f, uintN& inlineCallCount)
+{
+    /*
+     * It is absolutely forbidden to have recursive loops tree call themselves
+     * because it could accidentally pop frames owned by the parent call, and
+     * there is no way to deal with this yet. We could have to set a "start of
+     * poppable rp stack" variable, and if that unequals "real start of rp stack",
+     * it would be illegal to pop frames.
+     * --
+     * In the interim, just do tree calls knowing that they won't go into
+     * recursive trees that can pop parent frames.
+     */
+    if (f->getTreeInfo()->script == cx->fp->script) {
+        if (f->getTreeInfo()->recursion >= Recursion_Unwinds) {
+            Blacklist(cx->fp->script->code);
+            js_AbortRecording(cx, "Inner tree is an unsupported type of recursion");
+            return ARECORD_ABORTED;
+        } else {
+            f->getTreeInfo()->recursion = Recursion_Disallowed;
+        }
+    }
+
+    adjustCallerTypes(f);
+    prepareTreeCall(f);
+
+#ifdef DEBUG
+    unsigned oldInlineCallCount = inlineCallCount;
+#endif
 
     VMSideExit* innermostNestedGuard = NULL;
     VMSideExit* lr = ExecuteTree(cx, f, inlineCallCount, &innermostNestedGuard);
 
     /* ExecuteTree can reenter the interpreter and kill |this|. */
     if (!TRACE_RECORDER(cx))
-        return false;
+        return ARECORD_ABORTED;
 
     if (!lr) {
         js_AbortRecording(cx, "Couldn't call inner tree");
-        return false;
-    }
-
-    VMFragment* outerFragment = (VMFragment*) tm->recorder->getFragment()->root;
+        return ARECORD_ABORTED;
+    }
+
+    VMFragment* outerFragment = (VMFragment*)fragment->root;
     jsbytecode* outer = (jsbytecode*) outerFragment->ip;
     switch (lr->exitType) {
+      case RECURSIVE_LOOP_EXIT:
       case LOOP_EXIT:
         /* If the inner tree exited on an unknown loop exit, grow the tree around it. */
         if (innermostNestedGuard) {
+            JSContext* _cx = cx;
             js_AbortRecording(cx, "Inner tree took different side exit, abort current "
                               "recording and grow nesting tree");
-            return AttemptToExtendTree(cx, innermostNestedGuard, lr, outer);
-        }
+            return AttemptToExtendTree(_cx, innermostNestedGuard, lr, outer) ?
+                ARECORD_CONTINUE : ARECORD_ABORTED;
+        }
+
+        JS_ASSERT(oldInlineCallCount == inlineCallCount);
 
         /* Emit a call to the inner tree and continue recording the outer tree trace. */
-        r->emitTreeCall(f, lr);
-        return true;
+        emitTreeCall(f, lr);
+        return ARECORD_CONTINUE;
 
       case UNSTABLE_LOOP_EXIT:
+      {
         /* Abort recording so the inner loop can become type stable. */
+        JSContext* _cx = cx;
+        JSObject* _globalObj = globalObj;
         js_AbortRecording(cx, "Inner tree is trying to stabilize, abort outer recording");
-        return AttemptToStabilizeTree(cx, globalObj, lr, outer, outerFragment->argc);
+        return AttemptToStabilizeTree(_cx, _globalObj, lr, outer, outerFragment->argc) ?
+            ARECORD_CONTINUE : ARECORD_ABORTED;
+      }
 
       case OVERFLOW_EXIT:
         oracle.markInstructionUndemotable(cx->fp->regs->pc);
         /* FALL THROUGH */
+      case RECURSIVE_SLURP_FAIL_EXIT:
+      case RECURSIVE_SLURP_MISMATCH_EXIT:
+      case RECURSIVE_MISMATCH_EXIT:
+      case RECURSIVE_EMPTY_RP_EXIT:
       case BRANCH_EXIT:
-      case CASE_EXIT:
-        /* Abort recording the outer tree, extend the inner tree. */
-        js_AbortRecording(cx, "Inner tree is trying to grow, abort outer recording");
-        return AttemptToExtendTree(cx, lr, NULL, outer);
+      case CASE_EXIT: {
+          /* Abort recording the outer tree, extend the inner tree. */
+          JSContext* _cx = cx;
+          js_AbortRecording(cx, "Inner tree is trying to grow, abort outer recording");
+          return AttemptToExtendTree(_cx, lr, NULL, outer) ? ARECORD_CONTINUE : ARECORD_ABORTED;
+      }
 
       default:
         debug_only_printf(LC_TMTracer, "exit_type=%s\n", getExitName(lr->exitType));
         js_AbortRecording(cx, "Inner tree not suitable for calling");
-        return false;
+        return ARECORD_ABORTED;
     }
 }
 
 static bool
 IsEntryTypeCompatible(jsval* vp, JSTraceType* m)
 {
     unsigned tag = JSVAL_TAG(*vp);
 
@@ -5943,17 +6155,17 @@ JS_REQUIRES_STACK VMFragment*
 TraceRecorder::findNestedCompatiblePeer(VMFragment* f)
 {
     JSTraceMonitor* tm;
 
     tm = &JS_TRACE_MONITOR(cx);
     unsigned int ngslots = treeInfo->globalSlots->length();
 
     TreeInfo* ti;
-    for (; f != NULL; f = f->peer) {
+    for (; f != NULL; f = (VMFragment*)f->peer) {
         if (!f->code())
             continue;
 
         ti = (TreeInfo*)f->vmprivate;
 
         debug_only_printf(LC_TMTracer, "checking nested types %p: ", (void*)f);
 
         if (ngslots > ti->nGlobalTypes())
@@ -6154,16 +6366,17 @@ ExecuteTree(JSContext* cx, Fragment* f, 
     state->sp = stack_buffer + (ti->nativeStackBase/sizeof(double));
     state->eos = stack_buffer + MAX_NATIVE_STACK_SLOTS;
 
     /* Set up the native call stack frame. */
     FrameInfo* callstack_buffer[MAX_CALL_STACK_ENTRIES];
     state->callstackBase = callstack_buffer;
     state->rp = callstack_buffer;
     state->eor = callstack_buffer + MAX_CALL_STACK_ENTRIES;
+    state->sor = state->rp;
 
     void *reserve;
     state->stackMark = JS_ARENA_MARK(&cx->stackPool);
     JS_ARENA_ALLOCATE(reserve, &cx->stackPool, MAX_INTERP_STACK_BYTES);
     if (!reserve)
         return NULL;
 
 #ifdef DEBUG
@@ -6374,37 +6587,52 @@ LeaveTree(InterpState& state, VMSideExit
                               typeMap[innermost->numStackSlots - i],
                               (jsdouble *) state.deepBailSp
                                   + innermost->sp_adj / sizeof(jsdouble) - i);
             }
         }
         return;
     }
 
+    /* Save the innermost FrameInfo for guardUpRecursion */
+    if (innermost->exitType == RECURSIVE_MISMATCH_EXIT) {
+        /* There should never be a static calldepth for a recursive mismatch. */
+        JS_ASSERT(innermost->calldepth == 0);
+        /* There must be at least one item on the rp stack. */
+        JS_ASSERT(callstack < rp);
+        /* :TODO: don't be all squirrelin' this in here */
+        innermost->recursive_down = *(rp - 1);
+    }
+
+    /* Slurp failure should have no frames */
+    JS_ASSERT_IF(innermost->exitType == RECURSIVE_SLURP_FAIL_EXIT,
+                 innermost->calldepth == 0 && callstack == rp);
+
     JS_ARENA_RELEASE(&cx->stackPool, state.stackMark);
     while (callstack < rp) {
         FrameInfo* fi = *callstack;
         /* Peek at the callee native slot in the not-yet-synthesized down frame. */
         JSObject* callee = *(JSObject**)&stack[fi->callerHeight];
 
         /*
          * Synthesize a stack frame and write out the values in it using the
          * type map pointer on the native call stack.
          */
         SynthesizeFrame(cx, *fi, callee);
         int slots = FlushNativeStackFrame(cx, 1 /* callDepth */, (*callstack)->get_typemap(),
-                                          stack, cx->fp);
+                                          stack, cx->fp, 0);
 #ifdef DEBUG
         JSStackFrame* fp = cx->fp;
         debug_only_printf(LC_TMTracer,
-                          "synthesized deep frame for %s:%u@%u, slots=%d\n",
+                          "synthesized deep frame for %s:%u@%u, slots=%d, fi=%p\n",
                           fp->script->filename,
                           js_FramePCToLineNumber(cx, fp),
                           FramePCOffset(fp),
-                          slots);
+                          slots,
+                          *callstack);
 #endif
         /*
          * Keep track of the additional frames we put on the interpreter stack
          * and the native stack slots we consumed.
          */
         ++*state.inlineCallCountp;
         ++callstack;
         stack += slots;
@@ -6510,22 +6738,24 @@ LeaveTree(InterpState& state, VMSideExit
         unsigned check_ngslots =
 #endif
         BuildGlobalTypeMapFromInnerTree(typeMap, innermost);
         JS_ASSERT(check_ngslots == ngslots);
         globalTypeMap = typeMap.data();
     }
 
     /* Write back the topmost native stack frame. */
+    unsigned ignoreSlots = innermost->exitType == RECURSIVE_SLURP_FAIL_EXIT ?
+                           innermost->numStackSlots - 1 : 0;
 #ifdef DEBUG
     int slots =
 #endif
         FlushNativeStackFrame(cx, innermost->calldepth,
                               innermost->stackTypeMap(),
-                              stack, NULL);
+                              stack, NULL, ignoreSlots);
     JS_ASSERT(unsigned(slots) == innermost->numStackSlots);
 
     if (innermost->nativeCalleeWord)
         SynthesizeSlowNativeFrame(state, cx, innermost);
 
     /* Write back interned globals. */
     double* global = (double*)(&state + 1);
     FlushNativeGlobalFrame(cx, global,
@@ -6542,17 +6772,17 @@ LeaveTree(InterpState& state, VMSideExit
     else
         AUDIT(timeoutIntoInterpreter);
 #endif
 
     state.innermost = innermost;
 }
 
 JS_REQUIRES_STACK bool
-js_MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount)
+js_MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, MonitorReason reason)
 {
 #ifdef MOZ_TRACEVIS
     TraceVisStateObj tvso(cx, S_MONITOR);
 #endif
 
     JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
 
     /* Is the recorder currently active? */
@@ -6641,47 +6871,55 @@ js_MonitorLoopEdge(JSContext* cx, uintN&
         }
 
         /*
          * We can give RecordTree the root peer. If that peer is already taken,
          * it will walk the peer list and find us a free slot or allocate a new
          * tree if needed.
          */
         bool rv = RecordTree(cx, tm, f->first, NULL, 0, globalObj, globalShape,
-                             globalSlots, argc);
+                             globalSlots, argc, reason);
 #ifdef MOZ_TRACEVIS
         if (!rv)
             tvso.r = R_FAIL_RECORD_TREE;
 #endif
         return rv;
     }
 
     debug_only_printf(LC_TMTracer,
                       "Looking for compat peer %d@%d, from %p (ip: %p)\n",
                       js_FramePCToLineNumber(cx, cx->fp),
                       FramePCOffset(cx->fp), (void*)f, f->ip);
 
     uintN count;
-    Fragment* match = FindVMCompatiblePeer(cx, globalObj, f, count);
+    VMFragment* match = FindVMCompatiblePeer(cx, globalObj, f, count);
     if (!match) {
         if (count < MAXPEERS)
             goto record;
 
         /*
          * If we hit the max peers ceiling, don't try to lookup fragments all
          * the time. That's expensive. This must be a rather type-unstable loop.
          */
         debug_only_print0(LC_TMTracer, "Blacklisted: too many peer trees.\n");
         Blacklist((jsbytecode*) f->root->ip);
 #ifdef MOZ_TRACEVIS
         tvso.r = R_MAX_PEERS;
 #endif
         return false;
     }
 
+    /*
+     * Trees that only unwind recursive frames usually won't do much work, and
+     * most time will be spent entering and exiting ExecuteTree(). There's no
+     * benefit to doing this until the down-recursive side completes.
+     */
+    if (match->getTreeInfo()->recursion == Recursion_Unwinds)
+        return false;
+
     VMSideExit* lr = NULL;
     VMSideExit* innermostNestedGuard = NULL;
 
     lr = ExecuteTree(cx, match, inlineCallCount, &innermostNestedGuard);
     if (!lr) {
 #ifdef MOZ_TRACEVIS
         tvso.r = R_FAIL_EXECUTE_TREE;
 #endif
@@ -6690,35 +6928,41 @@ js_MonitorLoopEdge(JSContext* cx, uintN&
 
     /*
      * If we exit on a branch, or on a tree call guard, try to grow the inner
      * tree (in case of a branch exit), or the tree nested around the tree we
      * exited from (in case of the tree call guard).
      */
     bool rv;
     switch (lr->exitType) {
+      case RECURSIVE_UNLINKED_EXIT:
       case UNSTABLE_LOOP_EXIT:
-          rv = AttemptToStabilizeTree(cx, globalObj, lr, NULL, NULL);
+          rv = AttemptToStabilizeTree(cx, globalObj, lr, NULL, 0);
 #ifdef MOZ_TRACEVIS
           if (!rv)
               tvso.r = R_FAIL_STABILIZE;
 #endif
           return rv;
 
       case OVERFLOW_EXIT:
         oracle.markInstructionUndemotable(cx->fp->regs->pc);
         /* FALL THROUGH */
+      case RECURSIVE_SLURP_FAIL_EXIT:
+      case RECURSIVE_SLURP_MISMATCH_EXIT:
+      case RECURSIVE_EMPTY_RP_EXIT:
+      case RECURSIVE_MISMATCH_EXIT:
       case BRANCH_EXIT:
       case CASE_EXIT:
-          return AttemptToExtendTree(cx, lr, NULL, NULL
+        return AttemptToExtendTree(cx, lr, NULL, NULL
 #ifdef MOZ_TRACEVIS
                                           , &tvso
 #endif
                  );
 
+      case RECURSIVE_LOOP_EXIT:
       case LOOP_EXIT:
         if (innermostNestedGuard)
             return AttemptToExtendTree(cx, innermostNestedGuard, lr, NULL
 #ifdef MOZ_TRACEVIS
                                             , &tvso
 #endif
                    );
 #ifdef MOZ_TRACEVIS
@@ -9386,29 +9630,42 @@ TraceRecorder::putArguments()
             LIns* arg_ins = box_jsval(cx->fp->argv[i], get(&cx->fp->argv[i]));
             lir->insStorei(arg_ins, args_ins, i * sizeof(jsval));
         }
         LIns* args[] = { args_ins, argsobj_ins, cx_ins };
         lir->insCall(&js_PutArguments_ci, args);
     }
 }
 
-JS_REQUIRES_STACK AbortableRecordingStatus
-TraceRecorder::record_EnterFrame()
+static inline bool
+IsTraceableRecursion(JSContext *cx)
+{
+    JSStackFrame *fp = cx->fp;
+    JSStackFrame *down = cx->fp->down;
+    if (!down)
+        return false;
+    if (down->script != fp->script)
+        return false;
+    if (down->argc != fp->argc)
+        return false;
+    if (fp->imacpc || down->imacpc)
+        return false;
+    if ((fp->flags & JSFRAME_CONSTRUCTING) || (down->flags & JSFRAME_CONSTRUCTING))
+        return false;
+    return true;
+}
+
+JS_REQUIRES_STACK AbortableRecordingStatus
+TraceRecorder::record_EnterFrame(uintN& inlineCallCount)
 {
     JSStackFrame* fp = cx->fp;
 
     if (++callDepth >= MAX_CALLDEPTH)
         RETURN_STOP_A("exceeded maximum call depth");
 
-    // FIXME: Allow and attempt to inline a single level of recursion until we compile
-    //        recursive calls as independent trees (459301).
-    if (fp->script == fp->down->script && fp->down->down && fp->down->down->script == fp->script)
-        RETURN_STOP_A("recursive call");
-
     debug_only_printf(LC_TMTracer, "EnterFrame %s, callDepth=%d\n",
                       js_AtomToPrintableString(cx, cx->fp->fun->atom),
                       callDepth);
     debug_only_stmt(
         if (js_LogController.lcbits & LC_TMRecorder) {
             js_Disassemble(cx, cx->fp->script, JS_TRUE, stdout);
             debug_only_print0(LC_TMTracer, "----\n");
         }
@@ -9426,29 +9683,106 @@ TraceRecorder::record_EnterFrame()
         set(vp++, void_ins, true);
     }
 
     vp = &fp->slots[0];
     vpstop = vp + fp->script->nfixed;
     while (vp < vpstop)
         set(vp++, void_ins, true);
     set(&fp->argsobj, INS_NULL(), true);
-    return ARECORD_CONTINUE;
+
+    /*
+     * Check for recursion. This is a special check for recursive cases that can be
+     * a trace-tree, just like a loop. If recursion acts weird, for example
+     * differing argc or existence of an imacpc, it's not something this code is
+     * concerned about. That should pass through below to not regress pre-recursion
+     * functionality.
+     */
+    if (IsTraceableRecursion(cx) && treeInfo->script == cx->fp->script) {
+        if (treeInfo->recursion == Recursion_Disallowed)
+            RETURN_STOP_A("recursion not allowed in this tree");
+        if (treeInfo->script != cx->fp->script)
+            RETURN_STOP_A("recursion does not match original tree");
+        return InjectStatus(downRecursion());
+    }
+
+    /* Try inlining one level in case this recursion doesn't go too deep. */
+    if (fp->script == fp->down->script &&
+        fp->down->down && fp->down->down->script == fp->script) {
+        RETURN_STOP_A("recursion started inlining");
+    }
+
+    VMFragment* root = (VMFragment*)fragment->root;
+    VMFragment* first = getLoop(&JS_TRACE_MONITOR(cx), fp->regs->pc,
+                                root->globalObj, root->globalShape, fp->argc);
+    if (!first)
+        return ARECORD_CONTINUE;
+    VMFragment* f = findNestedCompatiblePeer(first);
+    if (!f) {
+        /*
+         * If there were no compatible peers, but there were peers at all, then it is probable that
+         * an inner recursive function is type mismatching. Start a new recorder that must be
+         * recursive.
+         */
+        for (f = first; f; f = (VMFragment*)f->peer) {
+            if (f->getTreeInfo() && f->getTreeInfo()->recursion == Recursion_Detected) {
+                /* Since this recorder is about to die, save its values. */
+                if (++first->hits() <= HOTLOOP)
+                    return ARECORD_STOP;
+                if (IsBlacklisted((jsbytecode*)f->ip))
+                    RETURN_STOP_A("inner recursive tree is blacklisted");
+                JS_ASSERT(f->getTreeInfo()->script != treeInfo->script);
+                JSContext* _cx = cx;
+                SlotList* globalSlots = treeInfo->globalSlots;
+                JSTraceMonitor* tm = traceMonitor;
+                js_AbortRecording(cx, "trying to compile inner recursive tree");
+                if (RecordTree(_cx, tm, first, NULL, 0, first->globalObj, first->globalShape,
+                               globalSlots, _cx->fp->argc, Monitor_EnterFrame)) {
+                    JS_ASSERT(tm->recorder);
+                }
+                break;
+            }
+        }
+        return ARECORD_CONTINUE;
+    } else if (f) {
+        /* Make sure inner tree call will not run into an out-of-memory condition. */
+        JSTraceMonitor* tm = traceMonitor;
+        if (tm->reservedDoublePoolPtr < (tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS) &&
+            !ReplenishReservedPool(cx, tm)) {
+            RETURN_STOP_A("Couldn't call inner tree (out of memory)");
+        }
+        /*
+         * Make sure the shape of the global object still matches (this might
+         * flush the JIT cache).
+         */
+        JSObject* globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
+        uint32 globalShape = -1;
+        SlotList* globalSlots = NULL;
+        if (!CheckGlobalObjectShape(cx, tm, globalObj, &globalShape, &globalSlots))
+            return ARECORD_ABORTED;
+        return attemptTreeCall(f, inlineCallCount);
+    }
+
+   return ARECORD_CONTINUE;
 }
 
 JS_REQUIRES_STACK AbortableRecordingStatus
 TraceRecorder::record_LeaveFrame()
 {
     debug_only_stmt(
         if (cx->fp->fun)
             debug_only_printf(LC_TMTracer,
                               "LeaveFrame (back to %s), callDepth=%d\n",
                               js_AtomToPrintableString(cx, cx->fp->fun->atom),
                               callDepth);
         );
+
+    JS_ASSERT(js_CodeSpec[js_GetOpcode(cx, cx->fp->script,
+              cx->fp->regs->pc)].length == JSOP_CALL_LENGTH);
+
     if (callDepth-- <= 0)
         RETURN_STOP_A("returned out of a loop we started tracing");
 
     // LeaveFrame gets called after the interpreter popped the frame and
     // stored rval, so cx->fp not cx->fp->down, and -1 not 0.
     atoms = FrameAtomBase(cx, cx->fp);
     set(&stackval(-1), rval_ins, true);
     return ARECORD_CONTINUE;
@@ -9485,20 +9819,25 @@ JS_REQUIRES_STACK AbortableRecordingStat
 TraceRecorder::record_JSOP_LEAVEWITH()
 {
     return ARECORD_STOP;
 }
 
 JS_REQUIRES_STACK AbortableRecordingStatus
 TraceRecorder::record_JSOP_RETURN()
 {
-    /* A return from callDepth 0 terminates the current loop. */
+    /* A return from callDepth 0 terminates the current loop, except for recursion. */
     if (callDepth == 0) {
-        AUDIT(returnLoopExits);
-        return endLoop();
+        if (IsTraceableRecursion(cx) && treeInfo->recursion != Recursion_Disallowed &&
+            treeInfo->script == cx->fp->script) {
+            return InjectStatus(upRecursion());
+        } else {
+            AUDIT(returnLoopExits);
+            return endLoop();
+        }
     }
 
     putArguments();
 
     /* If we inlined this function call, make the return value available to the caller code. */
     jsval& rval = stackval(-1);
     JSStackFrame *fp = cx->fp;
     if ((cx->fp->flags & JSFRAME_CONSTRUCTING) && JSVAL_IS_PRIMITIVE(rval)) {
@@ -10200,17 +10539,18 @@ TraceRecorder::callSpecializedNative(JSN
                 *argp = INS_CONSTOBJ(JSVAL_TO_OBJECT(fval));
             } else if (argtype == 'p') {
                 CHECK_STATUS(getClassPrototype(JSVAL_TO_OBJECT(fval), *argp));
             } else if (argtype == 'R') {
                 *argp = INS_CONSTPTR(cx->runtime);
             } else if (argtype == 'P') {
                 // FIXME: Set pc to imacpc when recording JSOP_CALL inside the
                 //        JSOP_GETELEM imacro (bug 476559).
-                if (*pc == JSOP_CALL && fp->imacpc && *fp->imacpc == JSOP_GETELEM)
+                if ((*pc == JSOP_CALL) &&
+                    fp->imacpc && *fp->imacpc == JSOP_GETELEM)
                     *argp = INS_CONSTPTR(fp->imacpc);
                 else
                     *argp = INS_CONSTPTR(pc);
             } else if (argtype == 'D') { /* this, as a number */
                 if (!isNumber(tval))
                     goto next_specialization;
                 *argp = this_ins;
             } else {
@@ -11732,16 +12072,24 @@ TraceRecorder::interpretedFunctionCall(j
     if (callDepth >= treeInfo->maxCallDepth)
         treeInfo->maxCallDepth = callDepth + 1;
 
     fi = traceMonitor->frameCache->memoize(fi);
     if (!fi)
         RETURN_STOP("out of memory");
     lir->insStorei(INS_CONSTPTR(fi), lirbuf->rp, callDepth * sizeof(FrameInfo*));
 
+#if defined JS_JIT_SPEW
+    debug_only_printf(LC_TMTracer, "iFC frameinfo=%p, stack=%d, map=", fi,
+                      fi->callerHeight);
+    for (unsigned i = 0; i < fi->callerHeight; i++)
+        debug_only_printf(LC_TMTracer, "%c", typeChar[fi->get_typemap()[i]]);
+    debug_only_print0(LC_TMTracer, "\n");
+#endif
+
     atoms = fun->u.i.script->atomMap.vector;
     return RECORD_CONTINUE;
 }
 
 JS_REQUIRES_STACK AbortableRecordingStatus
 TraceRecorder::record_JSOP_CALL()
 {
     uintN argc = GET_ARGC(cx->fp->regs->pc);
@@ -14349,8 +14697,31 @@ js_StopTraceVis(JSContext *cx, JSObject 
         fprintf(stderr, "stopped TraceVis recording\n");
     else
         JS_ReportError(cx, "TraceVis isn't running");
 
     return ok;
 }
 
 #endif /* MOZ_TRACEVIS */
+
+JS_REQUIRES_STACK void
+js_CaptureStackTypes(JSContext* cx, unsigned callDepth, JSTraceType* typeMap)
+{
+    CaptureTypesVisitor capVisitor(cx, typeMap);
+    VisitStackSlots(capVisitor, cx, callDepth);
+}
+
+JS_REQUIRES_STACK void
+TraceRecorder::determineGlobalTypes(JSTraceType* typeMap)
+{
+    DetermineTypesVisitor detVisitor(*this, typeMap);
+    VisitGlobalSlots(detVisitor, cx, *treeInfo->globalSlots);
+}
+
+LIns*
+TraceRecorder::demoteIns(LIns* ins)
+{
+    return ::demote(lir, ins);
+}
+
+#include "jsrecursion.cpp"
+
--- a/js/src/jstracer.h
+++ b/js/src/jstracer.h
@@ -275,17 +275,19 @@ class Oracle {
     avmplus::BitSet _globalDontDemote;
     avmplus::BitSet _pcDontDemote;
 public:
     Oracle();
 
     JS_REQUIRES_STACK void markGlobalSlotUndemotable(JSContext* cx, unsigned slot);
     JS_REQUIRES_STACK bool isGlobalSlotUndemotable(JSContext* cx, unsigned slot) const;
     JS_REQUIRES_STACK void markStackSlotUndemotable(JSContext* cx, unsigned slot);
+    JS_REQUIRES_STACK void markStackSlotUndemotable(JSContext* cx, unsigned slot, const void* pc);
     JS_REQUIRES_STACK bool isStackSlotUndemotable(JSContext* cx, unsigned slot) const;
+    JS_REQUIRES_STACK bool isStackSlotUndemotable(JSContext* cx, unsigned slot, const void* pc) const;
     void markInstructionUndemotable(jsbytecode* pc);
     bool isInstructionUndemotable(jsbytecode* pc) const;
 
     void clearDemotability();
     void clear() {
         clearDemotability();
     }
 };
@@ -380,38 +382,57 @@ public:
     /*                                                                          \
      * A specialization of MISMATCH_EXIT to handle allocation failures.         \
      */                                                                         \
     _(OOM)                                                                      \
     _(OVERFLOW)                                                                 \
     _(UNSTABLE_LOOP)                                                            \
     _(TIMEOUT)                                                                  \
     _(DEEP_BAIL)                                                                \
-    _(STATUS)
+    _(STATUS)                                                                   \
+    /* Exit is almost recursive and wants a peer at recursive_pc */             \
+    _(RECURSIVE_UNLINKED)                                                       \
+    /* Exit is recursive, and there are no more frames */                       \
+    _(RECURSIVE_LOOP)                                                           \
+    /* Exit is recursive, but type-mismatched guarding on a down frame */       \
+    _(RECURSIVE_MISMATCH)                                                       \
+    /* Exit is recursive, and the JIT wants to try slurping interp frames */    \
+    _(RECURSIVE_EMPTY_RP)                                                       \
+    /* Slurping interp frames in up-recursion failed */                         \
+    _(RECURSIVE_SLURP_FAIL)                                                     \
+    /* Tried to slurp an interp frame, but the pc or argc mismatched */         \
+    _(RECURSIVE_SLURP_MISMATCH)
 
 enum ExitType {
     #define MAKE_EXIT_CODE(x) x##_EXIT,
     JS_TM_EXITCODES(MAKE_EXIT_CODE)
     #undef MAKE_EXIT_CODE
     TOTAL_EXIT_TYPES
 };
 
+struct FrameInfo;
+
 struct VMSideExit : public nanojit::SideExit
 {
     JSObject* block;
     jsbytecode* pc;
     jsbytecode* imacpc;
     intptr_t sp_adj;
     intptr_t rp_adj;
     int32_t calldepth;
     uint32 numGlobalSlots;
     uint32 numStackSlots;
     uint32 numStackSlotsBelowCurrentFrame;
     ExitType exitType;
     uintN lookupFlags;
+    void* recursive_pc;
+    FrameInfo* recursive_down;
+    unsigned hitcount;
+    unsigned slurpFailSlot;
+    JSTraceType slurpType;
 
     /*
      * Ordinarily 0.  If a slow native function is atop the stack, the 1 bit is
      * set if constructing and the other bits are a pointer to the funobj.
      */
     uintptr_t nativeCalleeWord;
 
     JSObject * nativeCallee() {
@@ -425,16 +446,21 @@ struct VMSideExit : public nanojit::Side
     void setNativeCallee(JSObject *callee, bool constructing) {
         nativeCalleeWord = uintptr_t(callee) | (constructing ? 1 : 0);
     }
 
     inline JSTraceType* stackTypeMap() {
         return (JSTraceType*)(this + 1);
     }
 
+    inline JSTraceType& stackType(unsigned i) {
+        JS_ASSERT(i < numStackSlots);
+        return stackTypeMap()[i];
+    }
+
     inline JSTraceType* globalTypeMap() {
         return (JSTraceType*)(this + 1) + this->numStackSlots;
     }
 
     inline JSTraceType* fullTypeMap() {
         return stackTypeMap();
     }
 
@@ -535,16 +561,18 @@ struct REHashFn {
     static size_t hash(const REHashKey& k) {
         return
             k.re_length +
             k.re_flags +
             nanojit::murmurhash(k.re_chars, k.re_length * sizeof(jschar));
     }
 };
 
+class TreeInfo;
+
 struct FrameInfo {
     JSObject*       block;      // caller block chain head
     jsbytecode*     pc;         // caller fp->regs->pc
     jsbytecode*     imacpc;     // caller fp->imacpc
     uint32          spdist;     // distance from fp->slots to fp->regs->sp at JSOP_CALL
 
     /*
      * Bit  15 (0x8000) is a flag that is set if constructing (called through new).
@@ -569,26 +597,42 @@ struct FrameInfo {
     enum { CONSTRUCTING_FLAG = 0x10000 };
     void   set_argc(uint16 argc, bool constructing) {
         this->argc = uint32(argc) | (constructing ? CONSTRUCTING_FLAG: 0);
     }
     uint16 get_argc() const { return uint16(argc & ~CONSTRUCTING_FLAG); }
     bool   is_constructing() const { return (argc & CONSTRUCTING_FLAG) != 0; }
 
     // The typemap just before the callee is called.
+    JSTraceType* get_typemap() { return (JSTraceType*) (this+1); }
     const JSTraceType* get_typemap() const { return (JSTraceType*) (this+1); }
 };
 
 struct UnstableExit
 {
     nanojit::Fragment* fragment;
     VMSideExit* exit;
     UnstableExit* next;
 };
 
+enum MonitorReason
+{
+    Monitor_Branch,
+    Monitor_EnterFrame,
+    Monitor_LeaveFrame
+};
+
+enum RecursionStatus
+{
+    Recursion_None,             /* No recursion has been compiled yet. */
+    Recursion_Disallowed,       /* This tree cannot be recursive. */
+    Recursion_Unwinds,          /* Tree is up-recursive only. */
+    Recursion_Detected          /* Tree has down recursion and maybe up recursion. */
+};
+
 class TreeInfo {
 public:
     nanojit::Fragment* const      fragment;
     JSScript*               script;
     unsigned                maxNativeStackSlots;
     ptrdiff_t               nativeStackBase;
     unsigned                maxCallDepth;
     TypeMap                 typeMap;
@@ -604,16 +648,17 @@ public:
     /* All embedded GC things are registered here so the GC can scan them. */
     Queue<jsval>            gcthings;
     Queue<JSScopeProperty*> sprops;
 #ifdef DEBUG
     const char*             treeFileName;
     uintN                   treeLineNumber;
     uintN                   treePCOffset;
 #endif
+    RecursionStatus         recursion;
 
     TreeInfo(nanojit::Allocator* alloc,
              nanojit::Fragment* _fragment,
              SlotList* _globalSlots)
         : fragment(_fragment),
           script(NULL),
           maxNativeStackSlots(0),
           nativeStackBase(0),
@@ -622,17 +667,18 @@ public:
           nStackTypes(0),
           globalSlots(_globalSlots),
           dependentTrees(alloc),
           linkedTrees(alloc),
           branchCount(0),
           sideExits(alloc),
           unstableExits(NULL),
           gcthings(alloc),
-          sprops(alloc)
+          sprops(alloc),
+          recursion(Recursion_None)
     {}
 
     inline unsigned nGlobalTypes() {
         return typeMap.length() - nStackTypes;
     }
     inline JSTraceType* globalTypeMap() {
         return typeMap.data() + nStackTypes;
     }
@@ -654,16 +700,17 @@ typedef enum JSBuiltinStatus {
 
 struct InterpState
 {
     double        *sp;                  // native stack pointer, stack[0] is spbase[0]
     FrameInfo**   rp;                   // call stack pointer
     JSContext     *cx;                  // current VM context handle
     double        *eos;                 // first unusable word after the native stack
     void          *eor;                 // first unusable word after the call stack
+    void          *sor;                 // start of rp stack
     VMSideExit*    lastTreeExitGuard;   // guard we exited on during a tree call
     VMSideExit*    lastTreeCallGuard;   // guard we want to grow from if the tree
                                         // call exit guard mismatched
     void*          rpAtLastTreeCall;    // value of rp at innermost tree call guard
     TreeInfo*      outermostTree;       // the outermost tree we initially invoked
     double*        stackBase;           // native stack base
     FrameInfo**    callstackBase;       // call stack base
     uintN*         inlineCallCountp;    // inline call count counter
@@ -820,16 +867,17 @@ InjectStatus(AbortableRecordingStatus ar
 static JS_ALWAYS_INLINE bool
 StatusAbortsRecording(AbortableRecordingStatus ars)
 {
     return ars <= ARECORD_ABORTED;
 }
 #endif
 
 class SlotMap;
+class SlurpInfo;
 
 /* Results of trying to compare two typemaps together */
 enum TypeConsensus
 {
     TypeConsensus_Okay,         /* Two typemaps are compatible */
     TypeConsensus_Undemotes,    /* Not compatible now, but would be with pending undemotes. */
     TypeConsensus_Bad           /* Typemaps are not compatible */
 };
@@ -874,16 +922,17 @@ class TraceRecorder {
     JSSpecializedNative     generatedSpecializedNative;
     JSSpecializedNative*    pendingSpecializedNative;
     jsval*                  pendingUnboxSlot;
     nanojit::LIns*          pendingGuardCondition;
     jsbytecode*             outer;     /* outer trace header PC */
     uint32                  outerArgc; /* outer trace deepest frame argc */
     bool                    loop;
     nanojit::LIns*          loopLabel;
+    MonitorReason           monitorReason;
 
     nanojit::LIns* insImmObj(JSObject* obj);
     nanojit::LIns* insImmFun(JSFunction* fun);
     nanojit::LIns* insImmStr(JSString* str);
     nanojit::LIns* insImmSprop(JSScopeProperty* sprop);
     nanojit::LIns* p2i(nanojit::LIns* ins);
 
     bool isGlobal(jsval* p) const;
@@ -895,31 +944,42 @@ class TraceRecorder {
                                   unsigned callDepth, unsigned ngslots, JSTraceType* typeMap);
     void trackNativeStackUse(unsigned slots);
 
     JS_REQUIRES_STACK bool isValidSlot(JSScope* scope, JSScopeProperty* sprop);
     JS_REQUIRES_STACK bool lazilyImportGlobalSlot(unsigned slot);
 
     JS_REQUIRES_STACK void guard(bool expected, nanojit::LIns* cond, ExitType exitType);
     JS_REQUIRES_STACK void guard(bool expected, nanojit::LIns* cond, VMSideExit* exit);
+    nanojit::LIns* slurpInt32Slot(nanojit::LIns* val_ins, jsval* vp, VMSideExit* exit);
+    nanojit::LIns* slurpDoubleSlot(nanojit::LIns* val_ins, jsval* vp, VMSideExit* exit);
+    nanojit::LIns* slurpStringSlot(nanojit::LIns* val_ins, jsval* vp, VMSideExit* exit);
+    nanojit::LIns* slurpObjectSlot(nanojit::LIns* val_ins, jsval* vp, VMSideExit* exit);
+    nanojit::LIns* slurpFunctionSlot(nanojit::LIns* val_ins, jsval* vp, VMSideExit* exit);
+    nanojit::LIns* slurpNullSlot(nanojit::LIns* val_ins, jsval* vp, VMSideExit* exit);
+    nanojit::LIns* slurpBoolSlot(nanojit::LIns* val_ins, jsval* vp, VMSideExit* exit);
+    nanojit::LIns* slurpSlot(nanojit::LIns* val_ins, jsval* vp, VMSideExit* exit);
+    void slurpSlot(nanojit::LIns* val_ins, jsval* vp, SlurpInfo* info);
+    JS_REQUIRES_STACK AbortableRecordingStatus slurpDownFrames(jsbytecode* return_pc);
 
     nanojit::LIns* addName(nanojit::LIns* ins, const char* name);
 
     nanojit::LIns* writeBack(nanojit::LIns* i, nanojit::LIns* base, ptrdiff_t offset,
                              bool demote);
     JS_REQUIRES_STACK void set(jsval* p, nanojit::LIns* l, bool initializing = false,
                                bool demote = true);
     JS_REQUIRES_STACK nanojit::LIns* get(jsval* p);
     JS_REQUIRES_STACK nanojit::LIns* addr(jsval* p);
 
     JS_REQUIRES_STACK bool known(jsval* p);
     JS_REQUIRES_STACK void checkForGlobalObjectReallocation();
 
     JS_REQUIRES_STACK TypeConsensus selfTypeStability(SlotMap& smap);
-    JS_REQUIRES_STACK TypeConsensus peerTypeStability(SlotMap& smap, VMFragment** peer);
+    JS_REQUIRES_STACK TypeConsensus peerTypeStability(SlotMap& smap, const void* ip,
+                                                      VMFragment** peer);
 
     JS_REQUIRES_STACK jsval& argval(unsigned n) const;
     JS_REQUIRES_STACK jsval& varval(unsigned n) const;
     JS_REQUIRES_STACK jsval& stackval(int n) const;
 
     struct NameResult {
         // |tracked| is true iff the result of the name lookup is a variable that
         // is already in the tracker. The rest of the fields are set only if
@@ -1143,23 +1203,23 @@ public:
     {
         free(p);
     }
 
     JS_REQUIRES_STACK
     TraceRecorder(JSContext* cx, VMSideExit*, nanojit::Fragment*, TreeInfo*,
                   unsigned stackSlots, unsigned ngslots, JSTraceType* typeMap,
                   VMSideExit* expectedInnerExit, jsbytecode* outerTree,
-                  uint32 outerArgc);
+                  uint32 outerArgc, MonitorReason monitorReason);
     ~TraceRecorder();
 
     bool outOfMemory();
 
     static JS_REQUIRES_STACK AbortableRecordingStatus monitorRecording(JSContext* cx, TraceRecorder* tr,
-                                                                         JSOp op);
+                                                                       JSOp op);
 
     JS_REQUIRES_STACK JSTraceType determineSlotType(jsval* vp);
 
     /*
      * Examines current interpreter state to record information suitable for
      * returning to the interpreter through a side exit of the given type.
      */
     JS_REQUIRES_STACK VMSideExit* snapshot(ExitType exitType);
@@ -1176,28 +1236,37 @@ public:
      * The instruction is suitable for use as the final argument of a single
      * call to LirBuffer::insGuard; do not reuse the returned value.
      */
     JS_REQUIRES_STACK nanojit::GuardRecord* createGuardRecord(VMSideExit* exit);
 
     nanojit::Fragment* getFragment() const { return fragment; }
     TreeInfo* getTreeInfo() const { return treeInfo; }
     JS_REQUIRES_STACK AbortableRecordingStatus compile(JSTraceMonitor* tm);
-    JS_REQUIRES_STACK AbortableRecordingStatus closeLoop(TypeConsensus &consensus);
-    JS_REQUIRES_STACK AbortableRecordingStatus closeLoop(SlotMap& slotMap, VMSideExit* exit, TypeConsensus &consensus);
+    JS_REQUIRES_STACK AbortableRecordingStatus closeLoop();
+    JS_REQUIRES_STACK AbortableRecordingStatus closeLoop(VMSideExit* exit);
+    JS_REQUIRES_STACK AbortableRecordingStatus closeLoop(SlotMap& slotMap, VMSideExit* exit);
     JS_REQUIRES_STACK AbortableRecordingStatus endLoop();
     JS_REQUIRES_STACK AbortableRecordingStatus endLoop(VMSideExit* exit);
     JS_REQUIRES_STACK void joinEdgesToEntry(VMFragment* peer_root);
     JS_REQUIRES_STACK void adjustCallerTypes(nanojit::Fragment* f);
-    JS_REQUIRES_STACK VMFragment* findNestedCompatiblePeer(VMFragment* f);
     JS_REQUIRES_STACK void prepareTreeCall(VMFragment* inner);
     JS_REQUIRES_STACK void emitTreeCall(VMFragment* inner, VMSideExit* exit);
+    JS_REQUIRES_STACK VMFragment* findNestedCompatiblePeer(VMFragment* f);
+    JS_REQUIRES_STACK AbortableRecordingStatus attemptTreeCall(VMFragment* inner,
+                                                               uintN& inlineCallCount);
     unsigned getCallDepth() const;
 
-    JS_REQUIRES_STACK AbortableRecordingStatus record_EnterFrame();
+    JS_REQUIRES_STACK void determineGlobalTypes(JSTraceType* typeMap);
+    nanojit::LIns* demoteIns(nanojit::LIns* ins);
+
+    JS_REQUIRES_STACK VMSideExit* downSnapshot(FrameInfo* downFrame);
+    JS_REQUIRES_STACK AbortableRecordingStatus upRecursion();
+    JS_REQUIRES_STACK AbortableRecordingStatus downRecursion();
+    JS_REQUIRES_STACK AbortableRecordingStatus record_EnterFrame(uintN& inlineCallCount);
     JS_REQUIRES_STACK AbortableRecordingStatus record_LeaveFrame();
     JS_REQUIRES_STACK AbortableRecordingStatus record_SetPropHit(JSPropCacheEntry* entry,
                                                                   JSScopeProperty* sprop);
     JS_REQUIRES_STACK AbortableRecordingStatus record_DefLocalFunSetSlot(uint32 slot, JSObject* obj);
     JS_REQUIRES_STACK AbortableRecordingStatus record_NativeCallComplete();
 
     void forgetGuardedShapesForObject(JSObject* obj);
 
@@ -1223,16 +1292,17 @@ public:
     friend class ImportBoxedStackSlotVisitor;
     friend class ImportUnboxedStackSlotVisitor;
     friend class ImportGlobalSlotVisitor;
     friend class AdjustCallerGlobalTypesVisitor;
     friend class AdjustCallerStackTypesVisitor;
     friend class TypeCompatibilityVisitor;
     friend class SlotMap;
     friend class DefaultSlotMap;
+    friend class RecursiveSlotMap;
     friend jsval *js_ConcatPostImacroStackCleanup(uint32 argc, JSFrameRegs &regs,
                                                   TraceRecorder *recorder);
 };
 #define TRACING_ENABLED(cx)       JS_HAS_OPTION(cx, JSOPTION_JIT)
 #define TRACE_RECORDER(cx)        (JS_TRACE_MONITOR(cx).recorder)
 #define SET_TRACE_RECORDER(cx,tr) (JS_TRACE_MONITOR(cx).recorder = (tr))
 
 #define JSOP_IN_RANGE(op,lo,hi)   (uintN((op) - (lo)) <= uintN((hi) - (lo)))
@@ -1255,17 +1325,17 @@ public:
     JS_END_MACRO
 
 #define TRACE_ARGS(x,args)      TRACE_ARGS_(x, args)
 #define TRACE_0(x)              TRACE_ARGS(x, ())
 #define TRACE_1(x,a)            TRACE_ARGS(x, (a))
 #define TRACE_2(x,a,b)          TRACE_ARGS(x, (a, b))
 
 extern JS_REQUIRES_STACK bool
-js_MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount);
+js_MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, MonitorReason reason);
 
 #ifdef DEBUG
 # define js_AbortRecording(cx, reason) js_AbortRecordingImpl(cx, reason)
 #else
 # define js_AbortRecording(cx, reason) js_AbortRecordingImpl(cx)
 #endif
 
 extern JS_REQUIRES_STACK void