Bug 538293 - remove inlineCallCount and this STACK_QUOTA silliness (r=dvander)
authorLuke Wagner <luke@mozilla.com>
Fri, 27 May 2011 18:15:39 -0700
changeset 71168 abd2dcd555f45b539b63f5ad0943803b7253dc88
parent 71167 c8e12e8c281bcf1d0490a577bc56807877716990
child 71169 c72fed47c034c9aa4fa7f2a6c4d170615b2eec26
push id159
push usereakhgari@mozilla.com
push dateTue, 16 Aug 2011 17:53:11 +0000
treeherdermozilla-beta@8786e3e49240 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersdvander
bugs538293
milestone6.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 538293 - remove inlineCallCount and this STACK_QUOTA silliness (r=dvander)
js/src/jit-test/tests/basic/bug522136.js
js/src/jit-test/tests/basic/bug557168-1.js
js/src/jit-test/tests/basic/bug557168-2.js
js/src/jit-test/tests/basic/test-apply-many-args.js
js/src/jscntxt.cpp
js/src/jscntxt.h
js/src/jscompartment.h
js/src/jsdbgapi.cpp
js/src/jsexn.cpp
js/src/jsinterp.cpp
js/src/jsinterp.h
js/src/jsobj.cpp
js/src/jsopcode.cpp
js/src/jsscript.cpp
js/src/jsscript.h
js/src/jstracer.cpp
js/src/jstracer.h
js/src/jsxml.cpp
js/src/methodjit/InvokeHelpers.cpp
js/src/methodjit/MonoIC.cpp
js/src/methodjit/PolyIC.cpp
js/src/vm/Stack-inl.h
js/src/vm/Stack.cpp
js/src/vm/Stack.h
--- a/js/src/jit-test/tests/basic/bug522136.js
+++ b/js/src/jit-test/tests/basic/bug522136.js
@@ -1,10 +1,11 @@
 var Q = 0;
+var thrown = false;
 try {
-   (function f(i) { Q = i; if (i == 100000) return; f(i+1); })(1)
+   (function f(i) { Q = i; if (i == 200000) return; f(i+1); })(1)
 } catch (e) {
+    thrown = true;
 }
 
 // Exact behavior of recursion check depends on which JIT we use.
-var ok = (Q == 3000 || Q == 3001);
-assertEq(ok, true);
+assertEq(thrown && Q > 10000, true);
 
--- a/js/src/jit-test/tests/basic/bug557168-1.js
+++ b/js/src/jit-test/tests/basic/bug557168-1.js
@@ -1,11 +1,11 @@
 x = <x/>
 try {
   Function("\
     (function f() {\
       ({x:{b}}=x);\
-      f()\
+      f.apply(null, new Array(100))\
     })()\
   ")()
 } catch (e) {
   assertEq(e.message, "too much recursion");
 }
--- a/js/src/jit-test/tests/basic/bug557168-2.js
+++ b/js/src/jit-test/tests/basic/bug557168-2.js
@@ -1,11 +1,11 @@
 x = <x/>
 try {
   Function("\
     (function f() {\
       ({x}=x);\
-      f()\
+      f.apply(null, new Array(100))\
     })()\
   ")()
 } catch (e) {
   assertEq(e.message, "too much recursion");
 }
new file mode 100644
--- /dev/null
+++ b/js/src/jit-test/tests/basic/test-apply-many-args.js
@@ -0,0 +1,11 @@
+function f(x) {
+    if (x == 0)
+        return;
+    arguments[0]--;
+    f.apply(null, arguments);
+}
+
+a = [100];
+for (var i = 0; i < 2000; ++i)
+  a.push(i);
+f.apply(null, a);
--- a/js/src/jscntxt.cpp
+++ b/js/src/jscntxt.cpp
@@ -762,20 +762,20 @@ ReportError(JSContext *cx, const char *m
 /* The report must be initially zeroed. */
 static void
 PopulateReportBlame(JSContext *cx, JSErrorReport *report)
 {
     /*
      * Walk stack until we find a frame that is associated with some script
      * rather than a native frame.
      */
-    for (StackFrame *fp = js_GetTopStackFrame(cx); fp; fp = fp->prev()) {
-        if (fp->pc(cx)) {
-            report->filename = fp->script()->filename;
-            report->lineno = js_FramePCToLineNumber(cx, fp);
+    for (FrameRegsIter iter(cx); !iter.done(); ++iter) {
+        if (iter.fp()->isScriptFrame()) {
+            report->filename = iter.fp()->script()->filename;
+            report->lineno = js_FramePCToLineNumber(cx, iter.fp(), iter.pc());
             break;
         }
     }
 }
 
 /*
  * We don't post an exception in this case, since doing so runs into
  * complications of pre-allocating an exception object which required
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -1454,23 +1454,16 @@ class AutoCheckRequestDepth {
     JS_ASSERT((cx)->thread()->data.requestDepth || (cx)->thread() == (cx)->runtime->gcThread); \
     AutoCheckRequestDepth _autoCheckRequestDepth(cx);
 
 #else
 # define CHECK_REQUEST(cx)          ((void) 0)
 # define CHECK_REQUEST_THREAD(cx)   ((void) 0)
 #endif
 
-static inline uintN
-FramePCOffset(JSContext *cx, js::StackFrame* fp)
-{
-    jsbytecode *pc = fp->hasImacropc() ? fp->imacropc() : fp->pc(cx);
-    return uintN(pc - fp->script()->code);
-}
-
 static inline JSAtom **
 FrameAtomBase(JSContext *cx, js::StackFrame *fp)
 {
     return fp->hasImacropc()
            ? cx->runtime->atomState.commonAtomsStart()
            : fp->script()->atomMap.vector;
 }
 
--- a/js/src/jscompartment.h
+++ b/js/src/jscompartment.h
@@ -97,17 +97,16 @@ struct TracerState
     FrameInfo**    rp;                  // call stack pointer
     void*          eor;                 // first unusable word after the call stack
     VMSideExit*    lastTreeExitGuard;   // guard we exited on during a tree call
     VMSideExit*    lastTreeCallGuard;   // guard we want to grow from if the tree
                                         // call exit guard mismatched
     void*          rpAtLastTreeCall;    // value of rp at innermost tree call guard
     VMSideExit*    outermostTreeExitGuard; // the last side exit returned by js_CallTree
     TreeFragment*  outermostTree;       // the outermost tree we initially invoked
-    uintN*         inlineCallCountp;    // inline call count counter
     VMSideExit**   innermostNestedGuardp;
     VMSideExit*    innermost;
     uint64         startTime;
     TracerState*   prev;
 
     // Used by _FAIL builtins; see jsbuiltins.h. The builtin sets the
     // JSBUILTIN_BAILED bit if it bails off trace and the JSBUILTIN_ERROR bit
     // if an error or exception occurred.
@@ -116,17 +115,17 @@ struct TracerState
     // Used to communicate the location of the return value in case of a deep bail.
     double*        deepBailSp;
 
     // Used when calling natives from trace to root the vp vector.
     uintN          nativeVpLen;
     js::Value*     nativeVp;
 
     TracerState(JSContext *cx, TraceMonitor *tm, TreeFragment *ti,
-                uintN &inlineCallCountp, VMSideExit** innermostNestedGuardp);
+                VMSideExit** innermostNestedGuardp);
     ~TracerState();
 };
 
 /*
  * Storage for the execution state and store during trace execution. Generated
  * code depends on the fact that the globals begin |MAX_NATIVE_STACK_SLOTS|
  * doubles after the stack begins. Thus, on trace, |TracerState::eos| holds a
  * pointer to the first global.
--- a/js/src/jsdbgapi.cpp
+++ b/js/src/jsdbgapi.cpp
@@ -1424,17 +1424,17 @@ JS_PUBLIC_API(JSScript *)
 JS_GetFrameScript(JSContext *cx, JSStackFrame *fp)
 {
     return Valueify(fp)->maybeScript();
 }
 
 JS_PUBLIC_API(jsbytecode *)
 JS_GetFramePC(JSContext *cx, JSStackFrame *fp)
 {
-    return Valueify(fp)->pc(cx);
+    return Valueify(fp)->pcQuadratic(cx);
 }
 
 JS_PUBLIC_API(JSStackFrame *)
 JS_GetScriptedCaller(JSContext *cx, JSStackFrame *fp)
 {
     return Jsvalify(js_GetScriptedCaller(cx, Valueify(fp)));
 }
 
@@ -2478,19 +2478,19 @@ jstv_Filename(JSStackFrame *fp)
         fp = fp->prev();
     return (fp && fp->maybeScript() && fp->script()->filename)
            ? (char *)fp->script()->filename
            : jstv_empty;
 }
 inline uintN
 jstv_Lineno(JSContext *cx, JSStackFrame *fp)
 {
-    while (fp && fp->pc(cx) == NULL)
+    while (fp && fp->pcQuadratic(cx) == NULL)
         fp = fp->prev();
-    return (fp && fp->pc(cx)) ? js_FramePCToLineNumber(cx, fp) : 0;
+    return (fp && fp->pcQuadratic(cx)) ? js_FramePCToLineNumber(cx, fp) : 0;
 }
 
 /* Collect states here and distribute to a matching buffer, if any */
 JS_FRIEND_API(void)
 js::StoreTraceVisState(JSContext *cx, TraceVisState s, TraceVisExitReason r)
 {
     StackFrame *fp = cx->fp();
 
--- a/js/src/jsexn.cpp
+++ b/js/src/jsexn.cpp
@@ -262,17 +262,16 @@ static JSBool
 InitExnPrivate(JSContext *cx, JSObject *exnObject, JSString *message,
                JSString *filename, uintN lineno, JSErrorReport *report)
 {
     JSSecurityCallbacks *callbacks;
     CheckAccessOp checkAccess;
     JSErrorReporter older;
     JSExceptionState *state;
     jsid callerid;
-    StackFrame *fp, *fpstop;
     size_t stackDepth, valueCount, size;
     JSBool overflow;
     JSExnPrivate *priv;
     JSStackTraceElem *elem;
     jsval *values;
 
     JS_ASSERT(exnObject->getClass() == &js_ErrorClass);
 
@@ -288,32 +287,34 @@ InitExnPrivate(JSContext *cx, JSObject *
                   ? Valueify(callbacks->checkObjectAccess)
                   : NULL;
     older = JS_SetErrorReporter(cx, NULL);
     state = JS_SaveExceptionState(cx);
 
     callerid = ATOM_TO_JSID(cx->runtime->atomState.callerAtom);
     stackDepth = 0;
     valueCount = 0;
-    for (fp = js_GetTopStackFrame(cx); fp; fp = fp->prev()) {
+
+    FrameRegsIter firstPass(cx);
+    for (; !firstPass.done(); ++firstPass) {
+        StackFrame *fp = firstPass.fp();
         if (fp->compartment() != cx->compartment)
             break;
         if (fp->isNonEvalFunctionFrame()) {
             Value v = NullValue();
             if (checkAccess &&
                 !checkAccess(cx, &fp->callee(), callerid, JSACC_READ, &v)) {
                 break;
             }
             valueCount += fp->numActualArgs();
         }
         ++stackDepth;
     }
     JS_RestoreExceptionState(cx, state);
     JS_SetErrorReporter(cx, older);
-    fpstop = fp;
 
     size = offsetof(JSExnPrivate, stackElems);
     overflow = (stackDepth > ((size_t)-1 - size) / sizeof(JSStackTraceElem));
     size += stackDepth * sizeof(JSStackTraceElem);
     overflow |= (valueCount > ((size_t)-1 - size) / sizeof(jsval));
     size += valueCount * sizeof(jsval);
     if (overflow) {
         js_ReportAllocationOverflow(cx);
@@ -331,36 +332,37 @@ InitExnPrivate(JSContext *cx, JSObject *
     priv->errorReport = NULL;
     priv->message = message;
     priv->filename = filename;
     priv->lineno = lineno;
     priv->stackDepth = stackDepth;
 
     values = GetStackTraceValueBuffer(priv);
     elem = priv->stackElems;
-    for (fp = js_GetTopStackFrame(cx); fp != fpstop; fp = fp->prev()) {
+    for (FrameRegsIter iter(cx); iter != firstPass; ++iter) {
+        StackFrame *fp = iter.fp();
         if (fp->compartment() != cx->compartment)
             break;
-        if (!fp->isFunctionFrame() || fp->isEvalFrame()) {
+        if (!fp->isNonEvalFunctionFrame()) {
             elem->funName = NULL;
             elem->argc = 0;
         } else {
             elem->funName = fp->fun()->atom
                             ? fp->fun()->atom
                             : cx->runtime->emptyString;
             elem->argc = fp->numActualArgs();
             fp->forEachCanonicalActualArg(CopyTo(Valueify(values)));
             values += elem->argc;
         }
         elem->ulineno = 0;
         elem->filename = NULL;
         if (fp->isScriptFrame()) {
             elem->filename = fp->script()->filename;
-            if (fp->pc(cx))
-                elem->ulineno = js_FramePCToLineNumber(cx, fp);
+            if (fp->isScriptFrame())
+                elem->ulineno = js_FramePCToLineNumber(cx, fp, iter.pc());
         }
         ++elem;
     }
     JS_ASSERT(priv->stackElems + stackDepth == elem);
     JS_ASSERT(GetStackTraceValueBuffer(priv) + valueCount == values);
 
     exnObject->setPrivate(priv);
 
@@ -687,19 +689,16 @@ static JSString *
 FilenameToString(JSContext *cx, const char *filename)
 {
     return JS_NewStringCopyZ(cx, filename);
 }
 
 static JSBool
 Exception(JSContext *cx, uintN argc, Value *vp)
 {
-    JSString *message, *filename;
-    StackFrame *fp;
-
     /*
      * ECMA ed. 3, 15.11.1 requires Error, etc., to construct even when
      * called as functions, without operator new.  But as we do not give
      * each constructor a distinct JSClass, whose .name member is used by
      * NewNativeClassInstance to find the class prototype, we must get the
      * class prototype ourselves.
      */
     JSObject &callee = vp[0].toObject();
@@ -721,52 +720,55 @@ Exception(JSContext *cx, uintN argc, Val
      * If it's a new object of class Exception, then null out the private
      * data so that the finalizer doesn't attempt to free it.
      */
     if (obj->getClass() == &js_ErrorClass)
         obj->setPrivate(NULL);
 
     /* Set the 'message' property. */
     Value *argv = vp + 2;
+    JSString *message;
     if (argc != 0 && !argv[0].isUndefined()) {
         message = js_ValueToString(cx, argv[0]);
         if (!message)
             return JS_FALSE;
         argv[0].setString(message);
     } else {
         message = NULL;
     }
 
+    /* Find the scripted caller. */
+    FrameRegsIter iter(cx);
+    while (!iter.done() && !iter.fp()->isScriptFrame())
+        ++iter;
+
     /* Set the 'fileName' property. */
+    JSString *filename;
     if (argc > 1) {
         filename = js_ValueToString(cx, argv[1]);
         if (!filename)
             return JS_FALSE;
         argv[1].setString(filename);
-        fp = NULL;
     } else {
-        fp = js_GetScriptedCaller(cx, NULL);
-        if (fp) {
-            filename = FilenameToString(cx, fp->script()->filename);
+        if (!iter.done()) {
+            filename = FilenameToString(cx, iter.fp()->script()->filename);
             if (!filename)
                 return JS_FALSE;
         } else {
             filename = cx->runtime->emptyString;
         }
     }
 
     /* Set the 'lineNumber' property. */
     uint32_t lineno;
     if (argc > 2) {
         if (!ValueToECMAUint32(cx, argv[2], &lineno))
             return JS_FALSE;
     } else {
-        if (!fp)
-            fp = js_GetScriptedCaller(cx, NULL);
-        lineno = (fp && fp->pc(cx)) ? js_FramePCToLineNumber(cx, fp) : 0;
+        lineno = iter.done() ? 0 : js_FramePCToLineNumber(cx, iter.fp(), iter.pc());
     }
 
     if (obj->getClass() == &js_ErrorClass &&
         !InitExnPrivate(cx, obj, message, filename, lineno, NULL)) {
         return JS_FALSE;
     }
 
     vp->setObject(*obj);
--- a/js/src/jsinterp.cpp
+++ b/js/src/jsinterp.cpp
@@ -144,17 +144,17 @@ js::GetScopeChain(JSContext *cx)
  */
 JSObject *
 js::GetBlockChain(JSContext *cx, StackFrame *fp)
 {
     if (!fp->isScriptFrame())
         return NULL;
 
     /* Assume that imacros don't affect blockChain */
-    jsbytecode *target = fp->hasImacropc() ? fp->imacropc() : fp->pc(cx);
+    jsbytecode *target = fp->hasImacropc() ? fp->imacropc() : fp->pcQuadratic(cx);
 
     JSScript *script = fp->script();
     jsbytecode *start = script->code;
     JS_ASSERT(target >= start && target < start + script->length);
 
     JSObject *blockChain = NULL;
     uintN indexBase = 0;
     ptrdiff_t oplen;
@@ -190,17 +190,17 @@ js::GetBlockChain(JSContext *cx, StackFr
  * instruction appears immediately after the current PC.
  * We ensure this happens for a few important ops like DEFFUN.
  * |oplen| is the length of opcode at the current PC.
  */
 JSObject *
 js::GetBlockChainFast(JSContext *cx, StackFrame *fp, JSOp op, size_t oplen)
 {
     /* Assume that we're in a script frame. */
-    jsbytecode *pc = fp->pc(cx);
+    jsbytecode *pc = fp->pcQuadratic(cx);
     JS_ASSERT(js_GetOpcode(cx, fp->script(), pc) == op);
 
     pc += oplen;
     op = JSOp(*pc);
 
     /* The fast paths assume no JSOP_RESETBASE/INDEXBASE or JSOP_TRAP noise. */
     if (op == JSOP_NULLBLOCKCHAIN)
         return NULL;
@@ -2138,17 +2138,17 @@ IteratorNext(JSContext *cx, JSObject *it
         }
     }
     return js_IteratorNext(cx, iterobj, rval);
 }
 
 namespace js {
 
 JS_REQUIRES_STACK JS_NEVER_INLINE bool
-Interpret(JSContext *cx, StackFrame *entryFrame, uintN inlineCallCount, InterpMode interpMode)
+Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode)
 {
 #ifdef MOZ_TRACEVIS
     TraceVisStateObj tvso(cx, S_INTERP);
 #endif
     JSAutoResolveFlags rf(cx, RESOLVE_INFER);
 
 # ifdef DEBUG
     /*
@@ -2307,17 +2307,17 @@ Interpret(JSContext *cx, StackFrame *ent
         mjit::CompileStatus status =                                          \
             mjit::CanMethodJITAtBranch(cx, script, regs.fp(), regs.pc);       \
         if (status == mjit::Compile_Error)                                    \
             goto error;                                                       \
         if (status == mjit::Compile_Okay) {                                   \
             void *ncode =                                                     \
                 script->nativeCodeForPC(regs.fp()->isConstructing(), regs.pc);\
             interpReturnOK = mjit::JaegerShotAtSafePoint(cx, ncode);          \
-            if (inlineCallCount)                                              \
+            if (entryFrame != regs.fp())                                      \
                 goto jit_return;                                              \
             regs.fp()->setFinishedInInterpreter();                            \
             goto leave_on_safe_point;                                         \
         }                                                                     \
         if (status == mjit::Compile_Abort) {                                  \
             useMethodJIT = false;                                             \
         }                                                                     \
     JS_END_MACRO
@@ -2360,17 +2360,17 @@ Interpret(JSContext *cx, StackFrame *ent
     JS_END_MACRO
 
 #define MONITOR_BRANCH()                                                      \
     JS_BEGIN_MACRO                                                            \
         if (TRACING_ENABLED(cx)) {                                            \
             if (!TRACE_RECORDER(cx) && !TRACE_PROFILER(cx) && useMethodJIT) { \
                 MONITOR_BRANCH_METHODJIT();                                   \
             } else {                                                          \
-                MonitorResult r = MonitorLoopEdge(cx, inlineCallCount, interpMode); \
+                MonitorResult r = MonitorLoopEdge(cx, interpMode);            \
                 if (r == MONITOR_RECORDING) {                                 \
                     JS_ASSERT(TRACE_RECORDER(cx));                            \
                     JS_ASSERT(!TRACE_PROFILER(cx));                           \
                     MONITOR_BRANCH_TRACEVIS;                                  \
                     ENABLE_INTERRUPTS();                                      \
                     CLEAR_LEAVE_ON_TRACE_POINT();                             \
                 }                                                             \
                 RESTORE_INTERP_VARS();                                        \
@@ -2851,18 +2851,16 @@ BEGIN_CASE(JSOP_STOP)
 
         /* Sync interpreter locals. */
         script = regs.fp()->script();
         argv = regs.fp()->maybeFormalArgs();
         atoms = FrameAtomBase(cx, regs.fp());
 
         /* Resume execution in the calling frame. */
         RESET_USE_METHODJIT();
-        JS_ASSERT(inlineCallCount);
-        inlineCallCount--;
         if (JS_LIKELY(interpReturnOK)) {
             JS_ASSERT(js_CodeSpec[js_GetOpcode(cx, script, regs.pc)].length
                       == JSOP_CALL_LENGTH);
             TRACE_0(LeaveFrame);
             len = JSOP_CALL_LENGTH;
             DO_NEXT_OP(len);
         }
         goto error;
@@ -4613,22 +4611,16 @@ BEGIN_CASE(JSOP_FUNCALL)
         {
             JSScript *newscript = newfun->script();
             if (JS_UNLIKELY(newscript->isEmpty())) {
                 vp->setUndefined();
                 regs.sp = vp + 1;
                 goto end_call;
             }
 
-            /* Restrict recursion of lightweight functions. */
-            if (JS_UNLIKELY(inlineCallCount >= StackSpace::MAX_INLINE_CALLS)) {
-                js_ReportOverRecursed(cx);
-                goto error;
-            }
-
             /* Get pointer to new frame/slots, prepare arguments. */
             ContextStack &stack = cx->stack;
             StackFrame *newfp = stack.getInlineFrame(cx, regs.sp, argc, newfun,
                                                      newscript, &flags);
             if (JS_UNLIKELY(!newfp))
                 goto error;
 
             /* Initialize frame, locals. */
@@ -4644,17 +4636,16 @@ BEGIN_CASE(JSOP_FUNCALL)
             argv = regs.fp()->formalArgsEnd() - newfun->nargs;
             atoms = script->atomMap.vector;
 
             /* Now that the new frame is rooted, maybe create a call object. */
             if (newfun->isHeavyweight() && !CreateFunCallObject(cx, regs.fp()))
                 goto error;
 
             RESET_USE_METHODJIT();
-            inlineCallCount++;
             JS_RUNTIME_METER(rt, inlineCalls);
 
             TRACE_0(EnterFrame);
 
             CHECK_INTERRUPT_HANDLER();
 
 #ifdef JS_METHODJIT
             /* Try to ensure methods are method JIT'd.  */
--- a/js/src/jsinterp.h
+++ b/js/src/jsinterp.h
@@ -252,17 +252,17 @@ enum InterpMode
     JSINTERP_PROFILE   = 3  /* interpreter should profile a loop */
 };
 
 /*
  * Execute the caller-initialized frame for a user-defined script or function
  * pointed to by cx->fp until completion or error.
  */
 extern JS_REQUIRES_STACK JS_NEVER_INLINE bool
-Interpret(JSContext *cx, StackFrame *stopFp, uintN inlineCallCount = 0, InterpMode mode = JSINTERP_NORMAL);
+Interpret(JSContext *cx, StackFrame *stopFp, InterpMode mode = JSINTERP_NORMAL);
 
 extern JS_REQUIRES_STACK bool
 RunScript(JSContext *cx, JSScript *script, StackFrame *fp);
 
 extern bool
 CheckRedeclaration(JSContext *cx, JSObject *obj, jsid id, uintN attrs);
 
 extern bool
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -1155,17 +1155,17 @@ EvalKernel(JSContext *cx, const CallArgs
      * way so that the compiler can make assumptions about what bindings may or
      * may not exist in the current frame if it doesn't see 'eval'.)
      */
     uintN staticLevel;
     if (evalType == DIRECT_EVAL) {
         staticLevel = caller->script()->staticLevel + 1;
 
 #ifdef DEBUG
-        jsbytecode *callerPC = caller->pc(cx);
+        jsbytecode *callerPC = caller->pcQuadratic(cx);
         JS_ASSERT_IF(caller->isFunctionFrame(), caller->fun()->isHeavyweight());
         JS_ASSERT(callerPC && js_GetOpcode(cx, caller->script(), callerPC) == JSOP_EVAL);
 #endif
     } else {
         JS_ASSERT(call.callee().getGlobal() == &scopeobj);
         staticLevel = 0;
     }
 
--- a/js/src/jsopcode.cpp
+++ b/js/src/jsopcode.cpp
@@ -2065,38 +2065,37 @@ Decompile(SprintStack *ss, jsbytecode *p
 
         /*
          * Save source literal associated with JS now before the following
          * rewrite changes op. See bug 380197.
          */
         token = CodeToken[op];
 
         if (pc + oplen == jp->dvgfence) {
-            StackFrame *fp;
-            uint32 format, mode, type;
-
             /*
              * Rewrite non-get ops to their "get" format if the error is in
              * the bytecode at pc, so we don't decompile more than the error
              * expression.
              */
-            fp = js_GetScriptedCaller(cx, NULL);
-            format = cs->format;
-            if (((fp && pc == fp->pc(cx)) ||
+            FrameRegsIter iter(cx);
+            while (!iter.done() && !iter.fp()->isScriptFrame())
+                ++iter;
+            uint32 format = cs->format;
+            if (((!iter.done() && pc == iter.pc()) ||
                  (pc == startpc && nuses != 0)) &&
                 format & (JOF_SET|JOF_DEL|JOF_INCDEC|JOF_FOR|JOF_VARPROP)) {
-                mode = JOF_MODE(format);
+                uint32 mode = JOF_MODE(format);
                 if (mode == JOF_NAME) {
                     /*
                      * JOF_NAME does not imply JOF_ATOM, so we must check for
                      * the QARG and QVAR format types, and translate those to
                      * JSOP_GETARG or JSOP_GETLOCAL appropriately, instead of
                      * to JSOP_NAME.
                      */
-                    type = JOF_TYPE(format);
+                    uint32 type = JOF_TYPE(format);
                     op = (type == JOF_QARG)
                          ? JSOP_GETARG
                          : (type == JOF_LOCAL)
                          ? JSOP_GETLOCAL
                          : JSOP_NAME;
 
                     JS_ASSERT(js_CodeSpec[op].nuses >= 0);
                     i = nuses - js_CodeSpec[op].nuses;
--- a/js/src/jsscript.cpp
+++ b/js/src/jsscript.cpp
@@ -1642,20 +1642,19 @@ js_GetSrcNoteCached(JSContext *cx, JSScr
             GSN_CACHE_METER(cache, fills);
         }
     }
 
     return result;
 }
 
 uintN
-js_FramePCToLineNumber(JSContext *cx, StackFrame *fp)
+js_FramePCToLineNumber(JSContext *cx, StackFrame *fp, jsbytecode *pc)
 {
-    return js_PCToLineNumber(cx, fp->script(),
-                             fp->hasImacropc() ? fp->imacropc() : fp->pc(cx));
+    return js_PCToLineNumber(cx, fp->script(), fp->hasImacropc() ? fp->imacropc() : pc);
 }
 
 uintN
 js_PCToLineNumber(JSContext *cx, JSScript *script, jsbytecode *pc)
 {
     JSOp op;
     JSFunction *fun;
     uintN lineno;
@@ -1760,29 +1759,42 @@ js_GetScriptLineExtent(JSScript *script)
             lineno = (uintN) js_GetSrcNoteOffset(sn, 0);
         } else if (type == SRC_NEWLINE) {
             lineno++;
         }
     }
     return 1 + lineno - script->lineno;
 }
 
-const char *
-js::CurrentScriptFileAndLineSlow(JSContext *cx, uintN *linenop)
+namespace js {
+
+uintN
+CurrentLine(JSContext *cx)
 {
-    StackFrame *fp = js_GetScriptedCaller(cx, NULL);
-    if (!fp) {
+    return js_FramePCToLineNumber(cx, cx->fp(), cx->regs().pc);
+}
+
+const char *
+CurrentScriptFileAndLineSlow(JSContext *cx, uintN *linenop)
+{
+    FrameRegsIter iter(cx);
+    while (!iter.done() && !iter.fp()->isScriptFrame())
+        ++iter;
+
+    if (iter.done()) {
         *linenop = 0;
         return NULL;
     }
 
-    *linenop = js_FramePCToLineNumber(cx, fp);
-    return fp->script()->filename;
+    *linenop = js_FramePCToLineNumber(cx, iter.fp(), iter.pc());
+    return iter.fp()->script()->filename;
 }
 
+}  /* namespace js */
+
 class DisablePrincipalsTranscoding {
     JSSecurityCallbacks *callbacks;
     JSPrincipalsTranscoder temp;
 
   public:
     DisablePrincipalsTranscoding(JSContext *cx)
       : callbacks(JS_GetRuntimeSecurityCallbacks(cx->runtime)),
         temp(NULL)
--- a/js/src/jsscript.h
+++ b/js/src/jsscript.h
@@ -733,29 +733,32 @@ extern jssrcnote *
 js_GetSrcNoteCached(JSContext *cx, JSScript *script, jsbytecode *pc);
 
 /*
  * NOTE: use js_FramePCToLineNumber(cx, fp) when you have an active fp, in
  * preference to js_PCToLineNumber (cx, fp->script  fp->regs->pc), because
  * fp->imacpc may be non-null, indicating an active imacro.
  */
 extern uintN
-js_FramePCToLineNumber(JSContext *cx, js::StackFrame *fp);
+js_FramePCToLineNumber(JSContext *cx, js::StackFrame *fp, jsbytecode *pc);
 
 extern uintN
 js_PCToLineNumber(JSContext *cx, JSScript *script, jsbytecode *pc);
 
 extern jsbytecode *
 js_LineNumberToPC(JSScript *script, uintN lineno);
 
 extern JS_FRIEND_API(uintN)
 js_GetScriptLineExtent(JSScript *script);
 
 namespace js {
 
+extern uintN
+CurrentLine(JSContext *cx);
+
 /*
  * This function returns the file and line number of the script currently
  * executing on cx. If there is no current script executing on cx (e.g., a
  * native called directly through JSAPI (e.g., by setTimeout)), NULL and 0 are
  * returned as the file and line. Additionally, this function avoids the full
  * linear scan to compute line number when the caller guarnatees that the
  * script compilation occurs at a JSOP_EVAL.
  */
--- a/js/src/jstracer.cpp
+++ b/js/src/jstracer.cpp
@@ -379,16 +379,24 @@ ValueToTypeChar(const Value &v)
     if (v.isBoolean()) return 'B';
     if (v.isNull()) return 'N';
     if (v.isUndefined()) return 'U';
     if (v.isMagic()) return 'M';
     return '?';
 }
 #endif
 
+static inline uintN
+CurrentPCOffset(JSContext *cx)
+{
+    StackFrame *fp = cx->fp();
+    jsbytecode *pc = fp->hasImacropc() ? fp->imacropc() : cx->regs().pc;
+    return uintN(pc - fp->script()->code);
+}
+
 
 /* Blacklist parameters. */
 
 /*
  * Number of iterations of a loop where we start tracing.  That is, we don't
  * start tracing until the beginning of the HOTLOOP-th iteration.
  */
 #define HOTLOOP 8
@@ -1631,18 +1639,18 @@ TreeFragment::initialize(JSContext* cx, 
 
     /* Capture the coerced type of each active slot in the type map. */
     this->typeMap.captureTypes(cx, globalObj, *globalSlots, 0 /* callDepth */, speculate);
     this->nStackTypes = this->typeMap.length() - globalSlots->length();
     this->spOffsetAtEntry = cx->regs().sp - cx->fp()->base();
 
 #ifdef DEBUG
     this->treeFileName = cx->fp()->script()->filename;
-    this->treeLineNumber = js_FramePCToLineNumber(cx, cx->fp());
-    this->treePCOffset = FramePCOffset(cx, cx->fp());
+    this->treeLineNumber = CurrentLine(cx);
+    this->treePCOffset = CurrentPCOffset(cx);
 #endif
     this->script = cx->fp()->script();
     this->gcthings.clear();
     this->shapes.clear();
     this->unstableExits = NULL;
     this->sideExits.clear();
 
     /* Determine the native frame layout at the entry point. */
@@ -2532,18 +2540,18 @@ TraceRecorder::finishAbort(const char* r
     AUDIT(recorderAborted);
 #ifdef DEBUG
     debug_only_printf(LC_TMMinimal | LC_TMAbort,
                       "Abort recording of tree %s:%d@%d at %s:%d@%d: %s.\n",
                       tree->treeFileName,
                       tree->treeLineNumber,
                       tree->treePCOffset,
                       cx->fp()->script()->filename,
-                      js_FramePCToLineNumber(cx, cx->fp()),
-                      FramePCOffset(cx, cx->fp()),
+                      CurrentLine(cx),
+                      CurrentPCOffset(cx),
                       reason);
 #endif
     Backoff(traceMonitor, (jsbytecode*) fragment->root->ip, fragment->root);
 
     /*
      * If this is the primary trace and we didn't succeed compiling, trash the
      * tree. Otherwise, remove the VMSideExits we added while recording, which
      * are about to be invalid.
@@ -4173,17 +4181,17 @@ public:
 
 #if defined JS_JIT_SPEW
 JS_REQUIRES_STACK static void
 TreevisLogExit(JSContext* cx, VMSideExit* exit)
 {
     debug_only_printf(LC_TMTreeVis, "TREEVIS ADDEXIT EXIT=%p TYPE=%s FRAG=%p PC=%p FILE=\"%s\""
                       " LINE=%d OFFS=%d", (void*)exit, getExitName(exit->exitType),
                       (void*)exit->from, (void*)cx->regs().pc, cx->fp()->script()->filename,
-                      js_FramePCToLineNumber(cx, cx->fp()), FramePCOffset(cx, cx->fp()));
+                      CurrentLine(cx), CurrentPCOffset(cx));
     debug_only_print0(LC_TMTreeVis, " STACK=\"");
     for (unsigned i = 0; i < exit->numStackSlots; i++)
         debug_only_printf(LC_TMTreeVis, "%c", TypeToChar(exit->stackTypeMap()[i]));
     debug_only_print0(LC_TMTreeVis, "\" GLOBALS=\"");
     for (unsigned i = 0; i < exit->numGlobalSlots; i++)
         debug_only_printf(LC_TMTreeVis, "%c", TypeToChar(exit->globalTypeMap()[i]));
     debug_only_print0(LC_TMTreeVis, "\"\n");
 }
@@ -4527,18 +4535,17 @@ TraceRecorder::compile()
         return ARECORD_STOP;
 
     /* :TODO: windows support */
 #if defined DEBUG && !defined WIN32
     /* Associate a filename and line number with the fragment. */
     const char* filename = cx->fp()->script()->filename;
     char* label = (char*) cx->malloc_((filename ? strlen(filename) : 7) + 16);
     if (label) {
-        sprintf(label, "%s:%u", filename ? filename : "<stdin>",
-                js_FramePCToLineNumber(cx, cx->fp()));
+        sprintf(label, "%s:%u", filename ? filename : "<stdin>", CurrentLine(cx));
         lirbuf->printer->addrNameMap->addAddrRange(fragment, sizeof(Fragment), 0, label);
         cx->free_(label);
     }
 #endif
 
     Assembler *assm = traceMonitor->assembler;
     JS_ASSERT(!assm->error());
     assm->compile(fragment, tempAlloc(), /*optimize*/true verbose_only(, lirbuf->printer));
@@ -5006,18 +5013,18 @@ TraceRecorder::closeLoop()
      * should try to compile the outer tree again.
      */
     if (outerPC)
         AttemptCompilation(traceMonitor, globalObj, outerScript, outerPC, outerArgc);
 #ifdef JS_JIT_SPEW
     debug_only_printf(LC_TMMinimal,
                       "Recording completed at  %s:%u@%u via closeLoop (FragID=%06u)\n",
                       cx->fp()->script()->filename,
-                      js_FramePCToLineNumber(cx, cx->fp()),
-                      FramePCOffset(cx, cx->fp()),
+                      CurrentLine(cx),
+                      CurrentPCOffset(cx),
                       fragment->profFragID);
     debug_only_print0(LC_TMMinimal, "\n");
 #endif
 
     return finishSuccessfully();
 }
 
 static void
@@ -5174,18 +5181,18 @@ TraceRecorder::endLoop(VMSideExit* exit)
      * yet, we should try to compile the outer tree again.
      */
     if (outerPC)
         AttemptCompilation(traceMonitor, globalObj, outerScript, outerPC, outerArgc);
 #ifdef JS_JIT_SPEW
     debug_only_printf(LC_TMMinimal,
                       "Recording completed at  %s:%u@%u via endLoop (FragID=%06u)\n",
                       cx->fp()->script()->filename,
-                      js_FramePCToLineNumber(cx, cx->fp()),
-                      FramePCOffset(cx, cx->fp()),
+                      CurrentLine(cx),
+                      CurrentPCOffset(cx),
                       fragment->profFragID);
     debug_only_print0(LC_TMTracer, "\n");
 #endif
 
     return finishSuccessfully();
 }
 
 /* Emit code to adjust the stack to match the inner tree's stack expectations. */
@@ -5739,18 +5746,17 @@ RecordTree(JSContext* cx, TraceMonitor* 
 
     f->initialize(cx, globalSlots, speculate);
 
 #ifdef DEBUG
     AssertTreeIsUnique(tm, f);
 #endif
 #ifdef JS_JIT_SPEW
     debug_only_printf(LC_TMTreeVis, "TREEVIS CREATETREE ROOT=%p PC=%p FILE=\"%s\" LINE=%d OFFS=%d",
-                      (void*)f, f->ip, f->treeFileName, f->treeLineNumber,
-                      FramePCOffset(cx, cx->fp()));
+                      (void*)f, f->ip, f->treeFileName, f->treeLineNumber, CurrentPCOffset(cx));
     debug_only_print0(LC_TMTreeVis, " STACK=\"");
     for (unsigned i = 0; i < f->nStackTypes; i++)
         debug_only_printf(LC_TMTreeVis, "%c", TypeToChar(f->typeMap[i]));
     debug_only_print0(LC_TMTreeVis, "\" GLOBALS=\"");
     for (unsigned i = 0; i < f->nGlobalTypes(); i++)
         debug_only_printf(LC_TMTreeVis, "%c", TypeToChar(f->typeMap[f->nStackTypes + i]));
     debug_only_print0(LC_TMTreeVis, "\"\n");
 #endif
@@ -5862,18 +5868,17 @@ CreateBranchFragment(JSContext* cx, Trac
                           ? (++(tm->lastFragID)) : 0;
     )
 
     VMFragment* f = new (*tm->dataAlloc) VMFragment(cx->regs().pc verbose_only(, profFragID));
 
     debug_only_printf(LC_TMTreeVis, "TREEVIS CREATEBRANCH ROOT=%p FRAG=%p PC=%p FILE=\"%s\""
                       " LINE=%d ANCHOR=%p OFFS=%d\n",
                       (void*)root, (void*)f, (void*)cx->regs().pc, cx->fp()->script()->filename,
-                      js_FramePCToLineNumber(cx, cx->fp()), (void*)anchor,
-                      FramePCOffset(cx, cx->fp()));
+                      CurrentLine(cx), (void*)anchor, CurrentPCOffset(cx));
     verbose_only( tm->branches = new (*tm->dataAlloc) Seq<Fragment*>(f, tm->branches); )
 
     f->root = root;
     if (anchor)
         anchor->target = f;
     return f;
 }
 
@@ -5976,31 +5981,31 @@ AttemptToExtendTree(JSContext* cx, Trace
     }
 #ifdef MOZ_TRACEVIS
     if (tvso) tvso->r = R_FAIL_EXTEND_COLD;
 #endif
     return false;
 }
 
 static JS_REQUIRES_STACK bool
-ExecuteTree(JSContext* cx, TraceMonitor* tm, TreeFragment* f, uintN& inlineCallCount,
+ExecuteTree(JSContext* cx, TraceMonitor* tm, TreeFragment* f,
             VMSideExit** innermostNestedGuardp, VMSideExit** lrp);
 
 static inline MonitorResult
 RecordingIfTrue(bool b)
 {
     return b ? MONITOR_RECORDING : MONITOR_NOT_RECORDING;
 }
 
 /*
  * A postcondition of recordLoopEdge is that if recordLoopEdge does not return
  * MONITOR_RECORDING, the recording has been aborted.
  */
 JS_REQUIRES_STACK MonitorResult
-TraceRecorder::recordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCallCount)
+TraceRecorder::recordLoopEdge(JSContext* cx, TraceRecorder* r)
 {
     TraceMonitor* tm = r->traceMonitor;
 
     /* Process needFlush and deep abort requests. */
     if (tm->needFlush) {
         ResetJIT(cx, tm, FR_DEEP_BAIL);
         return MONITOR_NOT_RECORDING;
     }
@@ -6020,18 +6025,18 @@ TraceRecorder::recordLoopEdge(JSContext*
     if (!CheckGlobalObjectShape(cx, tm, globalObj, &globalShape, &globalSlots)) {
         JS_ASSERT(!tm->recorder);
         return MONITOR_NOT_RECORDING;
     }
 
     debug_only_printf(LC_TMTracer,
                       "Looking for type-compatible peer (%s:%d@%d)\n",
                       cx->fp()->script()->filename,
-                      js_FramePCToLineNumber(cx, cx->fp()),
-                      FramePCOffset(cx, cx->fp()));
+                      CurrentLine(cx),
+                      CurrentPCOffset(cx));
 
     // Find a matching inner tree. If none can be found, compile one.
     TreeFragment* f = r->findNestedCompatiblePeer(first);
     if (!f || !f->code()) {
         AUDIT(noCompatInnerTrees);
 
         TreeFragment* outerFragment = root;
         JSScript* outerScript = outerFragment->script;
@@ -6041,38 +6046,34 @@ TraceRecorder::recordLoopEdge(JSContext*
 
         if (AbortRecording(cx, "No compatible inner tree") == JIT_RESET)
             return MONITOR_NOT_RECORDING;
 
         return RecordingIfTrue(RecordTree(cx, tm, first,
                                           outerScript, outerPC, outerArgc, globalSlots));
     }
 
-    AbortableRecordingStatus status = r->attemptTreeCall(f, inlineCallCount);
+    AbortableRecordingStatus status = r->attemptTreeCall(f);
     if (status == ARECORD_CONTINUE)
         return MONITOR_RECORDING;
     if (status == ARECORD_ERROR) {
         if (tm->recorder)
             AbortRecording(cx, "Error returned while recording loop edge");
         return MONITOR_ERROR;
     }
     JS_ASSERT(status == ARECORD_ABORTED && !tm->recorder);
     return MONITOR_NOT_RECORDING;
 }
 
 JS_REQUIRES_STACK AbortableRecordingStatus
-TraceRecorder::attemptTreeCall(TreeFragment* f, uintN& inlineCallCount)
+TraceRecorder::attemptTreeCall(TreeFragment* f)
 {
     adjustCallerTypes(f);
     prepareTreeCall(f);
 
-#ifdef DEBUG
-    uintN oldInlineCallCount = inlineCallCount;
-#endif
-
     JSContext *localCx = cx;
     TraceMonitor *localtm = traceMonitor;
 
     // Refresh the import type map so the tracker can reimport values after the
     // call with their correct types. The inner tree must not change the type of
     // any variable in a frame above the current one (i.e., upvars).
     //
     // Note that DetermineTypesVisitor may call determineSlotType, which may
@@ -6081,17 +6082,17 @@ TraceRecorder::attemptTreeCall(TreeFragm
     // if there is not a tracker instruction for that value, which means that
     // value has not been written yet, so that type map entry is up to date.
     importTypeMap.setLength(NativeStackSlots(cx, callDepth));
     DetermineTypesVisitor visitor(*this, importTypeMap.data());
     VisitStackSlots(visitor, cx, callDepth);
 
     VMSideExit* innermostNestedGuard = NULL;
     VMSideExit* lr;
-    bool ok = ExecuteTree(cx, traceMonitor, f, inlineCallCount, &innermostNestedGuard, &lr);
+    bool ok = ExecuteTree(cx, traceMonitor, f, &innermostNestedGuard, &lr);
 
     /*
      * If ExecuteTree reentered the interpreter, it may have killed |this|
      * and/or caused an error, which must be propagated.
      */
     JS_ASSERT_IF(localtm->recorder, localtm->recorder == this);
     if (!ok)
         return ARECORD_ERROR;
@@ -6115,18 +6116,16 @@ TraceRecorder::attemptTreeCall(TreeFragm
                 return ARECORD_ABORTED;
             }
             return AttemptToExtendTree(localCx, localtm,
                                        innermostNestedGuard, lr, outerScript, outerPC)
                    ? ARECORD_CONTINUE
                    : ARECORD_ABORTED;
         }
 
-        JS_ASSERT(oldInlineCallCount == inlineCallCount);
-
         /* Emit a call to the inner tree and continue recording the outer tree trace. */
         emitTreeCall(f, lr);
         return ARECORD_CONTINUE;
 
       case UNSTABLE_LOOP_EXIT:
       {
         /* Abort recording so the inner loop can become type stable. */
         JSObject* _globalObj = globalObj;
@@ -6422,32 +6421,30 @@ FindVMCompatiblePeer(JSContext* cx, JSOb
 /*
  * For the native stacks and global frame, reuse the storage in |tm->storage|.
  * This reuse depends on the invariant that only one trace uses |tm->storage|
  * at a time. This is subtly correct in case of deep bail; see the comment
  * about "clobbering deep bails" in DeepBail.
  */
 JS_ALWAYS_INLINE
 TracerState::TracerState(JSContext* cx, TraceMonitor* tm, TreeFragment* f,
-                         uintN& inlineCallCount, VMSideExit** innermostNestedGuardp)
+                         VMSideExit** innermostNestedGuardp)
   : cx(cx),
     traceMonitor(tm),
     stackBase(tm->storage->stack()),
     sp(stackBase + f->nativeStackBase / sizeof(double)),
     eos(tm->storage->global()),
     callstackBase(tm->storage->callstack()),
     sor(callstackBase),
     rp(callstackBase),
-    eor(callstackBase + JS_MIN(TraceNativeStorage::MAX_CALL_STACK_ENTRIES,
-                               StackSpace::MAX_INLINE_CALLS - inlineCallCount)),
+    eor(callstackBase + TraceNativeStorage::MAX_CALL_STACK_ENTRIES),
     lastTreeExitGuard(NULL),
     lastTreeCallGuard(NULL),
     rpAtLastTreeCall(NULL),
     outermostTree(f),
-    inlineCallCountp(&inlineCallCount),
     innermostNestedGuardp(innermostNestedGuardp),
 #ifdef EXECUTE_TREE_TIMER
     startTime(rdtsc()),
 #endif
     builtinStatus(0),
     nativeVp(NULL)
 {
     JS_ASSERT(!tm->tracecx);
@@ -6464,23 +6461,16 @@ TracerState::TracerState(JSContext* cx, 
     JS_ASSERT(JS_THREAD_DATA(cx)->recordingCompartment == NULL ||
               JS_THREAD_DATA(cx)->recordingCompartment == cx->compartment);
     JS_ASSERT(JS_THREAD_DATA(cx)->profilingCompartment == NULL);
     JS_THREAD_DATA(cx)->onTraceCompartment = cx->compartment;
 
     JS_ASSERT(eos == stackBase + TraceNativeStorage::MAX_NATIVE_STACK_SLOTS);
     JS_ASSERT(sp < eos);
 
-    /*
-     * inlineCallCount has already been incremented, if being invoked from
-     * EnterFrame. It is okay to have a 0-frame restriction since the JIT
-     * might not need any frames.
-     */
-    JS_ASSERT(inlineCallCount <= StackSpace::MAX_INLINE_CALLS);
-
 #ifdef DEBUG
     /*
      * Cannot 0xCD-fill global frame since it may overwrite a bailed outer
      * ExecuteTree's 0xdeadbeefdeadbeef marker.
      */
     memset(tm->storage->stack(), 0xCD, TraceNativeStorage::MAX_NATIVE_STACK_SLOTS * sizeof(double));
     memset(tm->storage->callstack(), 0xCD, TraceNativeStorage::MAX_CALL_STACK_ENTRIES * sizeof(FrameInfo*));
 #endif
@@ -6571,52 +6561,51 @@ enum LEAVE_TREE_STATUS {
   DEEP_BAILED = 1
 };
 
 static LEAVE_TREE_STATUS
 LeaveTree(TraceMonitor *tm, TracerState&, VMSideExit *lr);
 
 /* Return false if the interpreter should goto error. */
 static JS_REQUIRES_STACK bool
-ExecuteTree(JSContext* cx, TraceMonitor* tm, TreeFragment* f, uintN& inlineCallCount,
+ExecuteTree(JSContext* cx, TraceMonitor* tm, TreeFragment* f,
             VMSideExit** innermostNestedGuardp, VMSideExit **lrp)
 {
 #ifdef MOZ_TRACEVIS
     TraceVisStateObj tvso(cx, S_EXECUTE);
 #endif
     JS_ASSERT(f->root == f && f->code());
 
-    if (!ScopeChainCheck(cx, f) || !cx->stack.space().ensureEnoughSpaceToEnterTrace() ||
-        inlineCallCount + f->maxCallDepth > StackSpace::MAX_INLINE_CALLS) {
+    if (!ScopeChainCheck(cx, f) || !cx->stack.space().ensureEnoughSpaceToEnterTrace()) {
         *lrp = NULL;
         return true;
     }
 
     /* Make sure the global object is sane. */
     JS_ASSERT(f->globalObj->numSlots() <= MAX_GLOBAL_SLOTS);
     JS_ASSERT(f->nGlobalTypes() == f->globalSlots->length());
     JS_ASSERT_IF(f->globalSlots->length() != 0,
                  f->globalObj->shape() == f->globalShape);
 
     /* Initialize trace state. */
-    TracerState state(cx, tm, f, inlineCallCount, innermostNestedGuardp);
+    TracerState state(cx, tm, f, innermostNestedGuardp);
     double* stack = tm->storage->stack();
     double* global = tm->storage->global();
     JSObject* globalObj = f->globalObj;
     unsigned ngslots = f->globalSlots->length();
     uint16* gslots = f->globalSlots->data();
 
     BuildNativeFrame(cx, globalObj, 0 /* callDepth */, ngslots, gslots,
                      f->typeMap.data(), global, stack);
 
     AUDIT(traceTriggered);
     debug_only_printf(LC_TMTracer, "entering trace at %s:%u@%u, execs: %u code: %p\n",
                       cx->fp()->script()->filename,
-                      js_FramePCToLineNumber(cx, cx->fp()),
-                      FramePCOffset(cx, cx->fp()),
+                      CurrentLine(cx),
+                      CurrentPCOffset(cx),
            f->execs,
            (void *) f->code());
 
     debug_only_stmt(uint32 globalSlots = globalObj->numSlots();)
     debug_only_stmt(*(uint64*)&tm->storage->global()[globalSlots] = 0xdeadbeefdeadbeefLL;)
 
     /* Execute trace. */
     tm->iterationCounter = 0;
@@ -6650,17 +6639,17 @@ ExecuteTree(JSContext* cx, TraceMonitor*
     if (iters == LOOP_COUNT_MAX)
         prefix = ">";
     debug_only_printf(LC_TMMinimal, "  [%.3f ms] Tree at line %u executed for %s%u iterations;"
                       " executed %u times; leave for %s at %s:%u (%s)\n",
                       double(t1-t0) / PRMJ_USEC_PER_MSEC,
                       f->treeLineNumber, prefix, (uintN)iters, f->execs,
                       getExitName(lr->exitType),
                       fp->script()->filename,
-                      js_FramePCToLineNumber(cx, fp),
+                      CurrentLine(cx),
                       js_CodeName[fp->hasImacropc() ? *fp->imacropc() : *cx->regs().pc]);
 #endif
     
 #ifdef JS_METHODJIT
     if (cx->methodJitEnabled) {
         if (lr->exitType == LOOP_EXIT && f->iters < MIN_LOOP_ITERS
             && f->execs >= LOOP_CHECK_ITERS)
         {
@@ -6840,30 +6829,28 @@ LeaveTree(TraceMonitor *tm, TracerState&
          * to its correct value.
          */
         cx->regs().sp = cx->fp()->slots() + (fi->spdist - (2 + fi->get_argc()));
         int slots = FlushNativeStackFrame(cx, 0 /* callDepth */, fi->get_typemap(), stack);
 
         /* Finish initializing cx->fp() and push a new cx->fp(). */
         SynthesizeFrame(cx, *fi, callee);
 #ifdef DEBUG
-        StackFrame* fp = cx->fp();
         debug_only_printf(LC_TMTracer,
                           "synthesized deep frame for %s:%u@%u, slots=%d, fi=%p\n",
-                          fp->script()->filename,
-                          js_FramePCToLineNumber(cx, fp),
-                          FramePCOffset(cx, fp),
+                          cx->fp()->script()->filename,
+                          CurrentLine(cx),
+                          CurrentPCOffset(cx),
                           slots,
                           (void*)*callstack);
 #endif
         /*
          * Keep track of the additional frames we put on the interpreter stack
          * and the native stack slots we consumed.
          */
-        ++*state.inlineCallCountp;
         ++callstack;
         stack += slots;
     }
 
     /*
      * We already synthesized the frames around the innermost guard. Here we
      * just deal with additional frames inside the tree we are bailing out
      * from.
@@ -6873,23 +6860,22 @@ LeaveTree(TraceMonitor *tm, TracerState&
     unsigned calleeOffset = 0;
     for (unsigned n = 0; n < calldepth; ++n) {
         /* Peek at the callee native slot in the not-yet-synthesized prev frame. */
         calleeOffset += callstack[n]->callerHeight;
         JSObject* callee = *(JSObject**)&stack[calleeOffset];
 
         /* Reconstruct the frame. */
         SynthesizeFrame(cx, *callstack[n], callee);
-        ++*state.inlineCallCountp;
 #ifdef DEBUG
-        StackFrame* fp = cx->fp();
         debug_only_printf(LC_TMTracer,
                           "synthesized shallow frame for %s:%u@%u\n",
-                          fp->script()->filename, js_FramePCToLineNumber(cx, fp),
-                          FramePCOffset(cx, fp));
+                          cx->fp()->script()->filename,
+                          CurrentLine(cx),
+                          CurrentPCOffset(cx));
 #endif
     }
 
     /*
      * Adjust sp and pc relative to the tree we exited from (not the tree we
      * entered into).  These are our final values for sp and pc since
      * SynthesizeFrame has already taken care of all frames in between.
      */
@@ -6932,18 +6918,18 @@ LeaveTree(TraceMonitor *tm, TracerState&
     uint64 cycles = rdtsc() - state.startTime;
 #elif defined(JS_JIT_SPEW)
     uint64 cycles = 0;
 #endif
     debug_only_printf(LC_TMTracer,
                       "leaving trace at %s:%u@%u, op=%s, lr=%p, exitType=%s, sp=%lld, "
                       "calldepth=%d, cycles=%llu\n",
                       fp->script()->filename,
-                      js_FramePCToLineNumber(cx, fp),
-                      FramePCOffset(cx, fp),
+                      CurrentLine(cx),
+                      CurrentPCOffset(cx),
                       js_CodeName[fp->hasImacropc() ? *fp->imacropc() : *cx->regs().pc],
                       (void*)lr,
                       getExitName(lr->exitType),
                       (long long int)(cx->regs().sp - fp->base()),
                       calldepth,
                       (unsigned long long int)cycles);
 
     DebugOnly<int> slots = FlushNativeStackFrame(cx, innermost->calldepth, innermost->stackTypeMap(), stack);
@@ -7030,17 +7016,17 @@ TraceRecorder::assertInsideLoop()
      * immediately preceeding a loop (the one that jumps to the loop
      * condition).
      */
     JS_ASSERT(pc >= beg - JSOP_GOTO_LENGTH && pc <= end);
 #endif
 }
 
 JS_REQUIRES_STACK MonitorResult
-RecordLoopEdge(JSContext* cx, TraceMonitor* tm, uintN& inlineCallCount)
+RecordLoopEdge(JSContext* cx, TraceMonitor* tm)
 {
 #ifdef MOZ_TRACEVIS
     TraceVisStateObj tvso(cx, S_MONITOR);
 #endif
 
     /* Is the recorder currently active? */
     if (tm->recorder) {
         tm->recorder->assertInsideLoop();
@@ -7048,17 +7034,17 @@ RecordLoopEdge(JSContext* cx, TraceMonit
         if (pc == tm->recorder->tree->ip) {
             AbortableRecordingStatus status = tm->recorder->closeLoop();
             if (status != ARECORD_COMPLETED) {
                 if (tm->recorder)
                     AbortRecording(cx, "closeLoop failed");
                 return MONITOR_NOT_RECORDING;
             }
         } else {
-            MonitorResult r = TraceRecorder::recordLoopEdge(cx, tm->recorder, inlineCallCount);
+            MonitorResult r = TraceRecorder::recordLoopEdge(cx, tm->recorder);
             JS_ASSERT((r == MONITOR_RECORDING) == (tm->recorder != NULL));
             if (r == MONITOR_RECORDING || r == MONITOR_ERROR)
                 return r;
 
             /*
              * recordLoopEdge will invoke an inner tree if we have a matching
              * one. If we arrive here, that tree didn't run to completion and
              * instead we mis-matched or the inner tree took a side exit other than
@@ -7135,18 +7121,17 @@ RecordLoopEdge(JSContext* cx, TraceMonit
         if (!rv)
             tvso.r = R_FAIL_RECORD_TREE;
 #endif
         return RecordingIfTrue(rv);
     }
 
     debug_only_printf(LC_TMTracer,
                       "Looking for compat peer %d@%d, from %p (ip: %p)\n",
-                      js_FramePCToLineNumber(cx, cx->fp()),
-                      FramePCOffset(cx, cx->fp()), (void*)f, f->ip);
+                      CurrentLine(cx), CurrentPCOffset(cx), (void*)f, f->ip);
 
     uintN count;
     TreeFragment* match = FindVMCompatiblePeer(cx, globalObj, f, count);
     if (!match) {
         if (count < MAXPEERS)
             goto record;
 
         /*
@@ -7159,17 +7144,17 @@ RecordLoopEdge(JSContext* cx, TraceMonit
         tvso.r = R_MAX_PEERS;
 #endif
         return MONITOR_NOT_RECORDING;
     }
 
     VMSideExit* lr = NULL;
     VMSideExit* innermostNestedGuard = NULL;
 
-    if (!ExecuteTree(cx, tm, match, inlineCallCount, &innermostNestedGuard, &lr))
+    if (!ExecuteTree(cx, tm, match, &innermostNestedGuard, &lr))
         return MONITOR_ERROR;
 
     if (!lr) {
 #ifdef MOZ_TRACEVIS
         tvso.r = R_FAIL_EXECUTE_TREE;
 #endif
         return MONITOR_NOT_RECORDING;
     }
@@ -16706,18 +16691,17 @@ class AutoRetBlacklist
 
     ~AutoRetBlacklist()
     {
         *blacklist = IsBlacklisted(pc);
     }
 };
 
 JS_REQUIRES_STACK TracePointAction
-RecordTracePoint(JSContext* cx, TraceMonitor* tm,
-                 uintN& inlineCallCount, bool* blacklist, bool execAllowed)
+RecordTracePoint(JSContext* cx, TraceMonitor* tm, bool* blacklist, bool execAllowed)
 {
     StackFrame* fp = cx->fp();
     jsbytecode* pc = cx->regs().pc;
 
     JS_ASSERT(!tm->recorder);
     JS_ASSERT(!tm->profile);
 
     JSObject* globalObj = cx->fp()->scopeChain().getGlobal();
@@ -16731,34 +16715,33 @@ RecordTracePoint(JSContext* cx, TraceMon
         return TPA_Nothing;
     }
 
     uint32 argc = entryFrameArgc(cx);
     TreeFragment* tree = LookupOrAddLoop(tm, pc, globalObj, globalShape, argc);
 
     debug_only_printf(LC_TMTracer,
                       "Looking for compat peer %d@%d, from %p (ip: %p)\n",
-                      js_FramePCToLineNumber(cx, cx->fp()),
-                      FramePCOffset(cx, cx->fp()), (void*)tree, tree->ip);
+                      CurrentLine(cx), CurrentPCOffset(cx), (void*)tree, tree->ip);
 
     if (tree->code() || tree->peer) {
         uintN count;
         TreeFragment* match = FindVMCompatiblePeer(cx, globalObj, tree, count);
         if (match) {
             VMSideExit* lr = NULL;
             VMSideExit* innermostNestedGuard = NULL;
 
             if (!execAllowed) {
                 /* We've already compiled a trace for it, but we don't want to use that trace. */
                 Blacklist((jsbytecode*)tree->root->ip);
                 return TPA_Nothing;
             }
 
             /* Best case - just go and execute. */
-            if (!ExecuteTree(cx, tm, match, inlineCallCount, &innermostNestedGuard, &lr))
+            if (!ExecuteTree(cx, tm, match, &innermostNestedGuard, &lr))
                 return TPA_Error;
 
             if (!lr)
                 return TPA_Nothing;
 
             switch (lr->exitType) {
               case UNSTABLE_LOOP_EXIT:
                 if (!AttemptToStabilizeTree(cx, tm, globalObj, lr, NULL, NULL, 0))
@@ -16806,17 +16789,17 @@ RecordTracePoint(JSContext* cx, TraceMon
         return TPA_Nothing;
     if (!RecordTree(cx, tm, tree->first, NULL, NULL, 0, globalSlots))
         return TPA_Nothing;
 
   interpret:
     JS_ASSERT(tm->recorder);
 
     /* Locked and loaded with a recorder. Ask the interperter to go run some code. */
-    if (!Interpret(cx, fp, inlineCallCount, JSINTERP_RECORD))
+    if (!Interpret(cx, fp, JSINTERP_RECORD))
         return TPA_Error;
 
     JS_ASSERT(!cx->isExceptionPending());
     
     return TPA_RanStuff;
 }
 
 LoopProfile::LoopProfile(TraceMonitor *tm, StackFrame *entryfp,
@@ -16848,17 +16831,17 @@ LoopProfile::reset()
     loopStackDepth = 0;
     sp = 0;
 
     PodArrayZero(allOps);
     PodArrayZero(selfOps);
 }
 
 MonitorResult
-LoopProfile::profileLoopEdge(JSContext* cx, uintN& inlineCallCount)
+LoopProfile::profileLoopEdge(JSContext* cx)
 {
     if (cx->regs().pc == top) {
         debug_only_print0(LC_TMProfiler, "Profiling complete (edge)\n");
         decide(cx);
     } else {
         /* Record an inner loop invocation. */
         StackFrame *fp = cx->fp();
         jsbytecode *pc = cx->regs().pc;
@@ -16957,23 +16940,23 @@ LoopProfile::stopProfiling(JSContext *cx
 {
     JS_ASSERT(JS_THREAD_DATA(cx)->recordingCompartment == NULL);
     JS_THREAD_DATA(cx)->profilingCompartment = NULL;
 
     traceMonitor->profile = NULL;
 }
 
 JS_REQUIRES_STACK TracePointAction
-MonitorTracePoint(JSContext *cx, uintN& inlineCallCount, bool* blacklist,
+MonitorTracePoint(JSContext *cx, bool* blacklist,
                   void** traceData, uintN *traceEpoch, uint32 *loopCounter, uint32 hits)
 {
     TraceMonitor *tm = JS_TRACE_MONITOR_FROM_CONTEXT(cx);
 
     if (!cx->profilingEnabled)
-        return RecordTracePoint(cx, tm, inlineCallCount, blacklist, true);
+        return RecordTracePoint(cx, tm, blacklist, true);
 
     *blacklist = false;
 
     /*
      * This is the only place where we check for re-entering the profiler.
      * The assumption is that MonitorTracePoint is the only place where we
      * start profiling. When we do so, we enter an interpreter frame with
      * JSINTERP_PROFILE mode. All other entry points to the profiler check
@@ -16993,32 +16976,31 @@ MonitorTracePoint(JSContext *cx, uintN& 
     prof->hits += hits;
     if (prof->hits < PROFILE_HOTLOOP)
         return TPA_Nothing;
 
     AutoRetBlacklist autoRetBlacklist(cx->regs().pc, blacklist);
 
     if (prof->profiled) {
         if (prof->traceOK) {
-            return RecordTracePoint(cx, tm, inlineCallCount, blacklist, prof->execOK);
+            return RecordTracePoint(cx, tm, blacklist, prof->execOK);
         } else {
             return TPA_Nothing;
         }
     }
 
-    debug_only_printf(LC_TMProfiler, "Profiling at line %d\n",
-                      js_FramePCToLineNumber(cx, cx->fp()));
+    debug_only_printf(LC_TMProfiler, "Profiling at line %d\n", CurrentLine(cx));
 
     tm->profile = prof;
 
     JS_ASSERT(JS_THREAD_DATA(cx)->profilingCompartment == NULL);
     JS_ASSERT(JS_THREAD_DATA(cx)->recordingCompartment == NULL);
     JS_THREAD_DATA(cx)->profilingCompartment = cx->compartment;
 
-    if (!Interpret(cx, cx->fp(), inlineCallCount, JSINTERP_PROFILE))
+    if (!Interpret(cx, cx->fp(), JSINTERP_PROFILE))
         return TPA_Error;
 
     JS_ASSERT(!cx->isExceptionPending());
 
     /* Look it up again since a reset may have happened during Interpret. */
     prof = LookupLoopProfile(tm, pc);
     if (prof && prof->undecided) {
         *loopCounter = 3000;
@@ -17055,17 +17037,17 @@ LoopProfile::profileOperation(JSContext*
     }
 
     jsbytecode *pc = cx->regs().pc;
     StackFrame *fp = cx->fp();
     JSScript *script = fp->script();
 
     if (!PCWithinLoop(fp, pc, *this)) {
         debug_only_printf(LC_TMProfiler, "Profiling complete (loop exit) at line %u\n",
-                          js_FramePCToLineNumber(cx, cx->fp()));
+                          CurrentLine(cx));
         tm->profile->decide(cx);
         stopProfiling(cx);
         return ProfComplete;
     }
 
     while (loopStackDepth > 0 && !PCWithinLoop(fp, pc, loopStack[loopStackDepth-1])) {
         debug_only_print0(LC_TMProfiler, "Profiler: Exiting inner loop\n");
         loopStackDepth--;
@@ -17076,17 +17058,17 @@ LoopProfile::profileOperation(JSContext*
             if (loopStackDepth == PROFILE_MAX_INNER_LOOPS) {
                 debug_only_print0(LC_TMProfiler, "Profiling complete (maxnest)\n");
                 tm->profile->decide(cx);
                 stopProfiling(cx);
                 return ProfComplete;
             }
 
             debug_only_printf(LC_TMProfiler, "Profiler: Entering inner loop at line %d\n",
-                              js_FramePCToLineNumber(cx, cx->fp()));
+                              CurrentLine(cx));
             loopStack[loopStackDepth++] = InnerLoop(fp, pc, GetLoopBottom(cx));
         }
     }
 
     numAllOps++;
     if (loopStackDepth == 0) {
         numSelfOps++;
         numSelfOpsMult += branchMultiplier;
@@ -17437,23 +17419,23 @@ LoopProfile::decide(JSContext *cx)
         debug_only_printf(LC_TMProfiler, "Blacklisting at %d\n", line);
         Blacklist(top);
     }
 
     debug_only_print0(LC_TMProfiler, "\n");
 }
 
 JS_REQUIRES_STACK MonitorResult
-MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, InterpMode interpMode)
+MonitorLoopEdge(JSContext* cx, InterpMode interpMode)
 {
     TraceMonitor *tm = JS_TRACE_MONITOR_FROM_CONTEXT(cx);
     if (interpMode == JSINTERP_PROFILE && tm->profile)
-        return tm->profile->profileLoopEdge(cx, inlineCallCount);
+        return tm->profile->profileLoopEdge(cx);
     else
-        return RecordLoopEdge(cx, tm, inlineCallCount);
+        return RecordLoopEdge(cx, tm);
 }
 
 void
 AbortProfiling(JSContext *cx)
 {
     JS_ASSERT(TRACE_PROFILER(cx));
     LoopProfile *prof = TRACE_PROFILER(cx);
     
@@ -17462,20 +17444,20 @@ AbortProfiling(JSContext *cx)
     prof->traceOK = false;
     prof->execOK = false;
     prof->stopProfiling(cx);
 }
 
 #else /* JS_METHODJIT */
 
 JS_REQUIRES_STACK MonitorResult
-MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, InterpMode interpMode)
+MonitorLoopEdge(JSContext* cx, InterpMode interpMode)
 {
     TraceMonitor *tm = JS_TRACE_MONITOR_FROM_CONTEXT(cx);
-    return RecordLoopEdge(cx, tm, inlineCallCount);
+    return RecordLoopEdge(cx, tm);
 }
 
 #endif /* JS_METHODJIT */
 
 uint32
 GetHotloop(JSContext *cx)
 {
 #ifdef JS_METHODJIT
--- a/js/src/jstracer.h
+++ b/js/src/jstracer.h
@@ -789,17 +789,17 @@ public:
         allOps[kind]++;
         if (loopStackDepth == 0)
             selfOps[kind]++;
     }
 
     inline uintN count(OpKind kind) { return allOps[kind]; }
 
     /* Called for every back edge being profiled. */
-    MonitorResult profileLoopEdge(JSContext* cx, uintN& inlineCallCount);
+    MonitorResult profileLoopEdge(JSContext* cx);
     
     /* Called for every instruction being profiled. */
     ProfileAction profileOperation(JSContext *cx, JSOp op);
 
     /* Once a loop's profile is done, these decide whether it should be traced. */
     bool isCompilationExpensive(JSContext *cx, uintN depth);
     bool isCompilationUnprofitable(JSContext *cx, uintN goodOps);
     void decide(JSContext *cx);
@@ -1534,21 +1534,19 @@ class TraceRecorder
     JS_REQUIRES_STACK AbortableRecordingStatus endLoop(VMSideExit* exit);
     JS_REQUIRES_STACK bool joinEdgesToEntry(TreeFragment* peer_root);
     JS_REQUIRES_STACK void adjustCallerTypes(TreeFragment* f);
     JS_REQUIRES_STACK void prepareTreeCall(TreeFragment* inner);
     JS_REQUIRES_STACK void emitTreeCall(TreeFragment* inner, VMSideExit* exit);
     JS_REQUIRES_STACK void determineGlobalTypes(JSValueType* typeMap);
     JS_REQUIRES_STACK VMSideExit* downSnapshot(FrameInfo* downFrame);
     JS_REQUIRES_STACK TreeFragment* findNestedCompatiblePeer(TreeFragment* f);
-    JS_REQUIRES_STACK AbortableRecordingStatus attemptTreeCall(TreeFragment* inner,
-                                                               uintN& inlineCallCount);
+    JS_REQUIRES_STACK AbortableRecordingStatus attemptTreeCall(TreeFragment* inner);
 
-    static JS_REQUIRES_STACK MonitorResult recordLoopEdge(JSContext* cx, TraceRecorder* r,
-                                                          uintN& inlineCallCount);
+    static JS_REQUIRES_STACK MonitorResult recordLoopEdge(JSContext* cx, TraceRecorder* r);
 
     /* Allocators associated with this recording session. */
     VMAllocator& tempAlloc() const { return *traceMonitor->tempAlloc; }
     VMAllocator& traceAlloc() const { return *traceMonitor->traceAlloc; }
     VMAllocator& dataAlloc() const { return *traceMonitor->dataAlloc; }
 
     /* Member declarations for each opcode, to be called before interpreting the opcode. */
 #define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format)               \
@@ -1592,19 +1590,18 @@ class TraceRecorder
 
     friend class ImportBoxedStackSlotVisitor;
     friend class AdjustCallerGlobalTypesVisitor;
     friend class AdjustCallerStackTypesVisitor;
     friend class TypeCompatibilityVisitor;
     friend class SlotMap;
     friend class DefaultSlotMap;
     friend class DetermineTypesVisitor;
-    friend MonitorResult RecordLoopEdge(JSContext*, TraceMonitor*, uintN&);
-    friend TracePointAction RecordTracePoint(JSContext*, TraceMonitor*, uintN &inlineCallCount,
-                                             bool *blacklist);
+    friend MonitorResult RecordLoopEdge(JSContext*, TraceMonitor*);
+    friend TracePointAction RecordTracePoint(JSContext*, TraceMonitor*, bool *blacklist);
     friend AbortResult AbortRecording(JSContext*, const char*);
     friend class BoxArg;
     friend void TraceMonitor::sweep(JSContext *cx);
 
   public:
     static bool JS_REQUIRES_STACK
     startRecorder(JSContext*, TraceMonitor *, VMSideExit*, VMFragment*,
                   unsigned stackSlots, unsigned ngslots, JSValueType* typeMap,
@@ -1680,24 +1677,24 @@ class TraceRecorder
     JS_END_MACRO
 
 #define TRACE_ARGS(x,args)      TRACE_ARGS_(x, args)
 #define TRACE_0(x)              TRACE_ARGS(x, ())
 #define TRACE_1(x,a)            TRACE_ARGS(x, (a))
 #define TRACE_2(x,a,b)          TRACE_ARGS(x, (a, b))
 
 extern JS_REQUIRES_STACK MonitorResult
-MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, InterpMode interpMode);
+MonitorLoopEdge(JSContext* cx, InterpMode interpMode);
 
 extern JS_REQUIRES_STACK TracePointAction
-RecordTracePoint(JSContext*, uintN& inlineCallCount, bool* blacklist);
+RecordTracePoint(JSContext*, bool* blacklist);
 
 extern JS_REQUIRES_STACK TracePointAction
-MonitorTracePoint(JSContext*, uintN& inlineCallCount, bool* blacklist,
-                  void** traceData, uintN *traceEpoch, uint32 *loopCounter, uint32 hits);
+MonitorTracePoint(JSContext*, bool* blacklist, void** traceData, uintN *traceEpoch,
+                  uint32 *loopCounter, uint32 hits);
 
 extern JS_REQUIRES_STACK TraceRecorder::AbortResult
 AbortRecording(JSContext* cx, const char* reason);
 
 extern void
 InitJIT();
 
 extern void
--- a/js/src/jsxml.cpp
+++ b/js/src/jsxml.cpp
@@ -1737,21 +1737,20 @@ ParseXMLSource(JSContext *cx, JSString *
     LeaveTrace(cx);
     xml = NULL;
     FrameRegsIter i(cx);
     for (; !i.done() && !i.pc(); ++i)
         JS_ASSERT(!i.fp()->isScriptFrame());
     filename = NULL;
     lineno = 1;
     if (!i.done()) {
-        StackFrame *fp = i.fp();
         op = (JSOp) *i.pc();
         if (op == JSOP_TOXML || op == JSOP_TOXMLLIST) {
-            filename = fp->script()->filename;
-            lineno = js_FramePCToLineNumber(cx, fp);
+            filename = i.fp()->script()->filename;
+            lineno = js_FramePCToLineNumber(cx, i.fp(), i.pc());
             for (endp = srcp + srclen; srcp < endp; srcp++) {
                 if (*srcp == '\n')
                     --lineno;
             }
         }
     }
 
     {
--- a/js/src/methodjit/InvokeHelpers.cpp
+++ b/js/src/methodjit/InvokeHelpers.cpp
@@ -215,18 +215,17 @@ RemovePartialFrame(JSContext *cx, StackF
  * overflow f.stackLimit.
  */
 void JS_FASTCALL
 stubs::HitStackQuota(VMFrame &f)
 {
     /* Include space to push another frame. */
     uintN nvals = f.fp()->script()->nslots + VALUES_PER_STACK_FRAME;
     JS_ASSERT(f.regs.sp == f.fp()->base());
-    StackSpace &space = f.cx->stack.space();
-    if (space.bumpLimitWithinQuota(NULL, f.entryfp, f.regs.sp, nvals, &f.stackLimit))
+    if (f.cx->stack.space().tryBumpLimit(NULL, f.regs.sp, nvals, &f.stackLimit))
         return;
 
     /* Remove the current partially-constructed frame before throwing. */
     RemovePartialFrame(f.cx, f.fp());
     js_ReportOverRecursed(f.cx);
     THROW();
 }
 
@@ -253,17 +252,17 @@ stubs::FixupArity(VMFrame &f, uint32 nac
     void *ncode          = oldfp->nativeReturnAddress();
 
     /* Pop the inline frame. */
     f.regs.popPartialFrame((Value *)oldfp);
 
     /* Reserve enough space for a callee frame. */
     StackFrame *newfp = cx->stack.getInlineFrameWithinLimit(cx, (Value*) oldfp, nactual,
                                                             fun, fun->script(), &flags,
-                                                            f.entryfp, &f.stackLimit);
+                                                            &f.stackLimit);
     if (!newfp) {
         /*
          * The PC is not coherent with the current frame, so fix it up for
          * exception handling.
          */
         f.regs.pc = f.jit()->nativeToPC(ncode);
         THROWV(NULL);
     }
@@ -338,17 +337,17 @@ UncachedInlineCall(VMFrame &f, uint32 fl
     Value *vp = f.regs.sp - (argc + 2);
     JSObject &callee = vp->toObject();
     JSFunction *newfun = callee.getFunctionPrivate();
     JSScript *newscript = newfun->script();
 
     /* Get pointer to new frame/slots, prepare arguments. */
     StackFrame *newfp = cx->stack.getInlineFrameWithinLimit(cx, f.regs.sp, argc,
                                                             newfun, newscript, &flags,
-                                                            f.entryfp, &f.stackLimit);
+                                                            &f.stackLimit);
     if (JS_UNLIKELY(!newfp))
         return false;
 
     /* Initialize frame, locals. */
     newfp->initCallFrame(cx, callee, newfun, argc, flags);
     SetValueRangeToUndefined(newfp->slots(), newscript->nfixed);
 
     /* Officially push the frame. */
@@ -691,17 +690,17 @@ PartialInterpret(VMFrame &f)
 #ifdef DEBUG
     JSScript *script = fp->script();
     JS_ASSERT(!fp->finishedInInterpreter());
     JS_ASSERT(fp->hasImacropc() ||
               !script->maybeNativeCodeForPC(fp->isConstructing(), cx->regs().pc));
 #endif
 
     JSBool ok = JS_TRUE;
-    ok = Interpret(cx, fp, 0, JSINTERP_SAFEPOINT);
+    ok = Interpret(cx, fp, JSINTERP_SAFEPOINT);
 
     return ok;
 }
 
 JS_STATIC_ASSERT(JSOP_NOP == 0);
 
 /*
  * Returns whether the current PC would return, or if the frame has already
@@ -941,34 +940,33 @@ RunTracer(VMFrame &f)
      * check the HAS_SCOPECHAIN flag, and the frame is guaranteed to have the
      * correct return value stored if we trace/interpret through to the end
      * of the frame.
      */
     entryFrame->scopeChain();
     entryFrame->returnValue();
 
     bool blacklist;
-    uintN inlineCallCount = 0;
     void **traceData;
     uintN *traceEpoch;
     uint32 *loopCounter;
     uint32 hits;
 #if JS_MONOIC
     traceData = &ic.traceData;
     traceEpoch = &ic.traceEpoch;
     loopCounter = &ic.loopCounter;
     *loopCounter = 1;
     hits = ic.loopCounterStart;
 #else
     traceData = NULL;
     traceEpoch = NULL;
     loopCounter = NULL;
     hits = 1;
 #endif
-    tpa = MonitorTracePoint(f.cx, inlineCallCount, &blacklist, traceData, traceEpoch,
+    tpa = MonitorTracePoint(f.cx, &blacklist, traceData, traceEpoch,
                             loopCounter, hits);
     JS_ASSERT(!TRACE_RECORDER(cx));
 
 #if JS_MONOIC
     ic.loopCounterStart = *loopCounter;
     if (blacklist)
         DisableTraceHint(entryFrame->jit(), ic);
 #endif
--- a/js/src/methodjit/MonoIC.cpp
+++ b/js/src/methodjit/MonoIC.cpp
@@ -1030,52 +1030,22 @@ ic::NativeCall(VMFrame &f, CallICInfo *i
 void JS_FASTCALL
 ic::NativeNew(VMFrame &f, CallICInfo *ic)
 {
     CallCompiler cc(f, *ic, true);
     if (!cc.generateNativeStub())
         stubs::SlowNew(f, ic->frameSize.staticArgc());
 }
 
-static const unsigned MANY_ARGS = 1024;
-
-static bool
-BumpStackFull(VMFrame &f, uintN inc)
-{
-    /* If we are not passing many args, treat this as a normal call. */
-    if (inc < MANY_ARGS) {
-        if (f.regs.sp + inc < f.stackLimit)
-            return true;
-        StackSpace &space = f.cx->stack.space();
-        return space.bumpLimitWithinQuota(f.cx, f.entryfp, f.regs.sp, inc, &f.stackLimit);
-    }
-
-    /*
-     * The purpose of f.stackLimit is to catch over-recursion based on
-     * assumptions about the average frame size. 'apply' with a large number of
-     * arguments breaks these assumptions and can result in premature "out of
-     * script quota" errors. Normally, apply will go through js::Invoke, which
-     * effectively starts a fresh stackLimit. Here, we bump f.stackLimit,
-     * if necessary, to allow for this 'apply' call, and a reasonable number of
-     * subsequent calls, to succeed without hitting the stackLimit. In theory,
-     * this a recursive chain containing apply to circumvent the stackLimit.
-     * However, since each apply call must consume at least MANY_ARGS slots,
-     * this sequence will quickly reach the end of the stack and OOM.
-     */
-    StackSpace &space = f.cx->stack.space();
-    return space.bumpLimit(f.cx, f.entryfp, f.regs.sp, inc, &f.stackLimit);
-}
-
 static JS_ALWAYS_INLINE bool
 BumpStack(VMFrame &f, uintN inc)
 {
-    /* Fast path BumpStackFull. */
-    if (inc < MANY_ARGS && f.regs.sp + inc < f.stackLimit)
+    if (f.regs.sp + inc < f.stackLimit)
         return true;
-    return BumpStackFull(f, inc);
+    return f.cx->stack.space().tryBumpLimit(f.cx, f.regs.sp, inc, &f.stackLimit);
 }
 
 /*
  * SplatApplyArgs is only called for expressions of the form |f.apply(x, y)|.
  * Additionally, the callee has already been checked to be the native apply.
  * All successful paths through SplatApplyArgs must set f.u.call.dynamicArgc
  * and f.regs.sp.
  */
--- a/js/src/methodjit/PolyIC.cpp
+++ b/js/src/methodjit/PolyIC.cpp
@@ -174,18 +174,17 @@ class PICStubCompiler : public BaseCompi
     LookupStatus disable(JSContext *cx, const char *reason) {
         return pic.disable(cx, reason, stub);
     }
 
   protected:
     void spew(const char *event, const char *op) {
 #ifdef JS_METHODJIT_SPEW
         JaegerSpew(JSpew_PICs, "%s %s: %s (%s: %d)\n",
-                   type, event, op, script->filename,
-                   js_FramePCToLineNumber(cx, f.fp()));
+                   type, event, op, script->filename, CurrentLine(cx));
 #endif
     }
 };
 
 class SetPropCompiler : public PICStubCompiler
 {
     JSObject *obj;
     JSAtom *atom;
@@ -2050,18 +2049,17 @@ BaseIC::isCallOp()
     return !!(js_CodeSpec[op].format & JOF_CALLOP);
 }
 
 void
 BaseIC::spew(JSContext *cx, const char *event, const char *message)
 {
 #ifdef JS_METHODJIT_SPEW
     JaegerSpew(JSpew_PICs, "%s %s: %s (%s: %d)\n",
-               js_CodeName[op], event, message, cx->fp()->script()->filename,
-               js_FramePCToLineNumber(cx, cx->fp()));
+               js_CodeName[op], event, message, cx->fp()->script()->filename, CurrentLine(cx));
 #endif
 }
 
 LookupStatus
 BaseIC::disable(JSContext *cx, const char *reason, void *stub)
 {
     spew(cx, "disabled", reason);
     Repatcher repatcher(cx->fp()->jit());
@@ -2235,17 +2233,17 @@ GetElementIC::attachGetProp(JSContext *c
     buffer.maybeLink(protoGuard, slowPathStart);
     buffer.link(done, fastPathRejoin);
 
     CodeLocationLabel cs = buffer.finalize();
 #if DEBUG
     char *chars = DeflateString(cx, v.toString()->getChars(cx), v.toString()->length());
     JaegerSpew(JSpew_PICs, "generated %s stub at %p for atom 0x%x (\"%s\") shape 0x%x (%s: %d)\n",
                js_CodeName[op], cs.executableAddress(), id, chars, holder->shape(),
-               cx->fp()->script()->filename, js_FramePCToLineNumber(cx, cx->fp()));
+               cx->fp()->script()->filename, CurrentLine(cx));
     cx->free_(chars);
 #endif
 
     // Update the inline guards, if needed.
     if (shouldPatchInlineTypeGuard() || shouldPatchUnconditionalClaspGuard()) {
         Repatcher repatcher(cx->fp()->jit());
 
         if (shouldPatchInlineTypeGuard()) {
--- a/js/src/vm/Stack-inl.h
+++ b/js/src/vm/Stack-inl.h
@@ -424,17 +424,17 @@ StackFrame::initEvalFrame(JSContext *cx,
         exec = prev->exec;
         args.script = script;
     } else {
         exec.script = script;
     }
 
     scopeChain_ = &prev->scopeChain();
     prev_ = prev;
-    prevpc_ = prev->pc(cx);
+    prevpc_ = prev->pcQuadratic(cx);
     JS_ASSERT(!hasImacropc());
     JS_ASSERT(!hasHookData());
     setAnnotation(prev->annotation());
 }
 
 inline void
 StackFrame::initGlobalFrame(JSScript *script, JSObject &chain, StackFrame *prev, uint32 flagsArg)
 {
@@ -762,44 +762,42 @@ StackSpace::ensureSpace(JSContext *maybe
     JS_ASSERT(from >= firstUnused());
 #ifdef XP_WIN
     JS_ASSERT(from <= commitEnd_);
     if (commitEnd_ - from < nvals)
         return bumpCommit(maybecx, from, nvals);
     return true;
 #else
     if (end_ - from < nvals) {
-        js_ReportOutOfScriptQuota(maybecx);
+        js_ReportOverRecursed(maybecx);
         return false;
     }
     return true;
 #endif
 }
 
 inline Value *
 StackSpace::getStackLimit(JSContext *cx)
 {
+    Value *limit;
+#ifdef XP_WIN
+    limit = commitEnd_;
+#else
+    limit = end_;
+#endif
+
+    /* See getStackLimit comment in Stack.h. */
     FrameRegs &regs = cx->regs();
     uintN minSpace = regs.fp()->numSlots() + VALUES_PER_STACK_FRAME;
-    Value *sp = regs.sp;
-    Value *required = sp + minSpace;
-    Value *desired = sp + STACK_QUOTA;
-#ifdef XP_WIN
-    if (required <= commitEnd_)
-        return Min(commitEnd_, desired);
-    if (!bumpCommit(cx, sp, minSpace))
+    if (regs.sp + minSpace > limit) {
+        js_ReportOverRecursed(cx);
         return NULL;
-    JS_ASSERT(commitEnd_ >= required);
-    return commitEnd_;
-#else
-    if (required <= end_)
-        return Min(end_, desired);
-    js_ReportOutOfScriptQuota(cx);
-    return NULL;
-#endif
+    }
+
+    return limit;
 }
 
 /*****************************************************************************/
 
 JS_ALWAYS_INLINE bool
 ContextStack::isCurrentAndActive() const
 {
     assertSegmentsInSync();
@@ -814,34 +812,33 @@ struct OOMCheck
     operator()(JSContext *cx, StackSpace &space, Value *from, uintN nvals)
     {
         return space.ensureSpace(cx, from, nvals);
     }
 };
 
 struct LimitCheck
 {
-    StackFrame *base;
     Value **limit;
 
-    LimitCheck(StackFrame *base, Value **limit) : base(base), limit(limit) {}
+    LimitCheck(Value **limit) : limit(limit) {}
 
     JS_ALWAYS_INLINE bool
     operator()(JSContext *cx, StackSpace &space, Value *from, uintN nvals)
     {
         /*
          * Include an extra sizeof(StackFrame) to satisfy the method-jit
          * stackLimit invariant.
          */
         nvals += VALUES_PER_STACK_FRAME;
 
         JS_ASSERT(from < *limit);
         if (*limit - from >= ptrdiff_t(nvals))
             return true;
-        return space.bumpLimitWithinQuota(cx, base, from, nvals, limit);
+        return space.tryBumpLimit(cx, from, nvals, limit);
     }
 };
 
 }  /* namespace detail */
 
 template <class Check>
 JS_ALWAYS_INLINE StackFrame *
 ContextStack::getCallFrame(JSContext *cx, Value *firstUnused, uintN nactual,
@@ -891,22 +888,22 @@ ContextStack::getInlineFrame(JSContext *
     JS_ASSERT(cx->regs().sp == sp);
 
     return getCallFrame(cx, sp, nactual, fun, script, flags, detail::OOMCheck());
 }
 
 JS_ALWAYS_INLINE StackFrame *
 ContextStack::getInlineFrameWithinLimit(JSContext *cx, Value *sp, uintN nactual,
                                         JSFunction *fun, JSScript *script, uint32 *flags,
-                                        StackFrame *fp, Value **limit) const
+                                        Value **limit) const
 {
     JS_ASSERT(isCurrentAndActive());
     JS_ASSERT(cx->regs().sp == sp);
 
-    return getCallFrame(cx, sp, nactual, fun, script, flags, detail::LimitCheck(fp, limit));
+    return getCallFrame(cx, sp, nactual, fun, script, flags, detail::LimitCheck(limit));
 }
 
 JS_ALWAYS_INLINE void
 ContextStack::pushInlineFrame(JSScript *script, StackFrame *fp, FrameRegs &regs)
 {
     JS_ASSERT(isCurrentAndActive());
     JS_ASSERT(regs_ == &regs && script == fp->script());
 
@@ -1053,49 +1050,16 @@ ContextStack::findFrameAtLevel(uintN tar
             break;
         fp = fp->prev();
     }
     return fp;
 }
 
 /*****************************************************************************/
 
-inline
-FrameRegsIter::FrameRegsIter(JSContext *cx)
-  : cx_(cx)
-{
-    seg_ = cx->stack.currentSegment();
-    if (JS_UNLIKELY(!seg_ || !seg_->isActive())) {
-        initSlow();
-        return;
-    }
-    fp_ = cx->fp();
-    sp_ = cx->regs().sp;
-    pc_ = cx->regs().pc;
-    return;
-}
-
-inline FrameRegsIter &
-FrameRegsIter::operator++()
-{
-    StackFrame *oldfp = fp_;
-    fp_ = fp_->prev();
-    if (!fp_)
-        return *this;
-
-    if (JS_UNLIKELY(oldfp == seg_->initialFrame())) {
-        incSlow(oldfp);
-        return *this;
-    }
-
-    pc_ = oldfp->prevpc();
-    sp_ = oldfp->formalArgsEnd();
-    return *this;
-}
-
 namespace detail {
 
 struct STATIC_SKIP_INFERENCE CopyNonHoleArgsTo
 {
     CopyNonHoleArgsTo(ArgumentsObject *argsobj, Value *dst) : argsobj(*argsobj), dst(dst) {}
     ArgumentsObject &argsobj;
     Value *dst;
     bool operator()(uint32 argi, Value *src) {
--- a/js/src/vm/Stack.cpp
+++ b/js/src/vm/Stack.cpp
@@ -85,27 +85,29 @@ StackFrame::prevpcSlow()
     return prevpc_;
 #else
     JS_NOT_REACHED("Unknown PC for frame");
     return NULL;
 #endif
 }
 
 jsbytecode *
-StackFrame::pc(JSContext *cx, StackFrame *next)
+StackFrame::pcQuadratic(JSContext *cx)
 {
-    JS_ASSERT_IF(next, next->prev() == this);
-
     StackSegment &seg = cx->stack.space().containingSegment(this);
     FrameRegs &regs = seg.currentRegs();
+
+    /*
+     * This isn't just an optimization; seg->computeNextFrame(fp) is only
+     * defined if fp != seg->currentFrame.
+     */
     if (regs.fp() == this)
         return regs.pc;
-    if (!next)
-        next = seg.computeNextFrame(this);
-    return next->prevpc();
+
+    return seg.computeNextFrame(this)->prevpc();
 }
 
 /*****************************************************************************/
 
 JS_REQUIRES_STACK bool
 StackSegment::contains(const StackFrame *fp) const
 {
     JS_ASSERT(!empty());
@@ -294,17 +296,17 @@ StackSpace::mark(JSTracer *trc)
     }
 }
 
 #ifdef XP_WIN
 JS_FRIEND_API(bool)
 StackSpace::bumpCommit(JSContext *maybecx, Value *from, ptrdiff_t nvals) const
 {
     if (end_ - from < nvals) {
-        js_ReportOutOfScriptQuota(maybecx);
+        js_ReportOverRecursed(maybecx);
         return false;
     }
 
     Value *newCommit = commitEnd_;
     Value *request = from + nvals;
 
     /* Use a dumb loop; will probably execute once. */
     JS_ASSERT((end_ - newCommit) % COMMIT_VALS == 0);
@@ -312,64 +314,35 @@ StackSpace::bumpCommit(JSContext *maybec
         newCommit += COMMIT_VALS;
         JS_ASSERT((end_ - newCommit) >= 0);
     } while (newCommit < request);
 
     /* The cast is safe because CAPACITY_BYTES is small. */
     int32 size = static_cast<int32>(newCommit - commitEnd_) * sizeof(Value);
 
     if (!VirtualAlloc(commitEnd_, size, MEM_COMMIT, PAGE_READWRITE)) {
-        js_ReportOutOfScriptQuota(maybecx);
+        js_ReportOverRecursed(maybecx);
         return false;
     }
 
     commitEnd_ = newCommit;
     return true;
 }
 #endif
 
 bool
-StackSpace::bumpLimitWithinQuota(JSContext *maybecx, StackFrame *fp, Value *sp,
-                                 uintN nvals, Value **limit) const
+StackSpace::tryBumpLimit(JSContext *maybecx, Value *from, uintN nvals, Value **limit)
 {
-    JS_ASSERT(sp >= firstUnused());
-    JS_ASSERT(sp + nvals >= *limit);
+    if (!ensureSpace(maybecx, from, nvals))
+        return false;
 #ifdef XP_WIN
-    Value *quotaEnd = (Value *)fp + STACK_QUOTA;
-    if (sp + nvals < quotaEnd) {
-        if (!ensureSpace(NULL, sp, nvals))
-            goto fail;
-        *limit = Min(quotaEnd, commitEnd_);
-        return true;
-    }
-  fail:
+    *limit = commitEnd_;
+#else
+    *limit = end_;
 #endif
-    js_ReportOverRecursed(maybecx);
-    return false;
-}
-
-bool
-StackSpace::bumpLimit(JSContext *cx, StackFrame *fp, Value *sp,
-                      uintN nvals, Value **limit) const
-{
-    JS_ASSERT(*limit > base_);
-    JS_ASSERT(sp < *limit);
-
-    /*
-     * Ideally, we would only ensure space for 'nvals', not 'nvals + remain',
-     * since this is ~500K. However, this whole call should be a rare case: some
-     * script is passing a obscene number of args to 'apply' and we are just
-     * trying to keep the stack limit heuristic from breaking the script.
-     */
-    Value *quota = (Value *)fp + STACK_QUOTA;
-    uintN remain = quota - sp;
-    uintN inc = nvals + remain;
-    if (!ensureSpace(NULL, sp, inc))
-        return false;
-    *limit = sp + inc;
     return true;
 }
 
 void
 StackSpace::popSegment()
 {
     JS_ASSERT(seg_->empty());
     seg_ = seg_->previousInMemory();
@@ -659,41 +632,54 @@ ContextStack::notifyIfNoCodeRunning()
         return;
 
     cx_->resetCompartment();
     cx_->maybeMigrateVersionOverride();
 }
 
 /*****************************************************************************/
 
-void
-FrameRegsIter::initSlow()
+FrameRegsIter::FrameRegsIter(JSContext *cx)
+  : cx_(cx)
 {
+    LeaveTrace(cx);
+    seg_ = cx->stack.currentSegment();
     if (!seg_) {
         fp_ = NULL;
         sp_ = NULL;
         pc_ = NULL;
         return;
     }
-
-    JS_ASSERT(seg_->isSuspended());
-    fp_ = seg_->suspendedFrame();
-    sp_ = seg_->suspendedRegs().sp;
-    pc_ = seg_->suspendedRegs().pc;
+    if (!seg_->isActive()) {
+        JS_ASSERT(seg_->isSuspended());
+        fp_ = seg_->suspendedFrame();
+        sp_ = seg_->suspendedRegs().sp;
+        pc_ = seg_->suspendedRegs().pc;
+        return;
+    }
+    fp_ = cx->fp();
+    sp_ = cx->regs().sp;
+    pc_ = cx->regs().pc;
+    return;
 }
 
-/*
- * Using the invariant described in the js::StackSegment comment, we know that,
- * when a pair of prev-linked stack frames are in the same segment, the
- * first frame's address is the top of the prev-frame's stack, modulo missing
- * arguments.
- */
-void
-FrameRegsIter::incSlow(StackFrame *oldfp)
+FrameRegsIter &
+FrameRegsIter::operator++()
 {
+    StackFrame *oldfp = fp_;
+    fp_ = fp_->prev();
+    if (!fp_)
+        return *this;
+
+    if (oldfp != seg_->initialFrame()) {
+        pc_ = oldfp->prevpc();
+        sp_ = oldfp->formalArgsEnd();
+        return *this;
+    }
+
     JS_ASSERT(oldfp == seg_->initialFrame());
     JS_ASSERT(fp_ == oldfp->prev());
 
     /*
      * Segments from arbitrary context stacks can interleave so we must do a
      * linear scan over segments in this context stack. Furthermore, 'prev' can
      * be any frame in the segment (not only the suspendedFrame), so we must
      * scan each stack frame in each segment. Fortunately, this is not hot code.
@@ -709,16 +695,23 @@ FrameRegsIter::incSlow(StackFrame *oldfp
             pc_ = seg_->suspendedRegs().pc;
             f = seg_->suspendedFrame();
         } else {
             sp_ = f->formalArgsEnd();
             pc_ = f->prevpc();
             f = f->prev();
         }
     }
+    return *this;
+}
+
+bool
+FrameRegsIter::operator==(const FrameRegsIter &rhs) const
+{
+    return done() == rhs.done() && (done() || fp_ == rhs.fp_);
 }
 
 /*****************************************************************************/
 
 AllFramesIter::AllFramesIter(JSContext *cx)
   : seg_(cx->stack.currentSegment()),
     fp_(seg_ ? seg_->currentFrame() : NULL)
 {
--- a/js/src/vm/Stack.h
+++ b/js/src/vm/Stack.h
@@ -423,19 +423,30 @@ class StackFrame
      * Script
      *
      * All function and global frames have an associated JSScript which holds
      * the bytecode being executed for the frame.
      */
 
     /*
      * Get the frame's current bytecode, assuming |this| is in |cx|.
-     * next is frame whose prev == this, NULL if not known or if this == cx->fp().
+     *
+     * Beware, as the name implies, pcQuadratic can lead to quadratic behavior
+     * in loops such as:
+     *
+     *   for ( ...; fp; fp = fp->prev())
+     *     ... fp->pcQuadratic(cx);
+     *
+     * For such situations, prefer FrameRegsIter; its amortized O(1).
+     *
+     *   When I get to the bottom I go back to the top of the stack
+     *   Where I stop and I turn and I go right back
+     *   Till I get to the bottom and I see you again...
      */
-    jsbytecode *pc(JSContext *cx, StackFrame *next = NULL);
+    jsbytecode *pcQuadratic(JSContext *cx);
 
     jsbytecode *prevpc() {
         if (flags_ & HAS_PREVPC)
             return prevpc_;
         return prevpcSlow();
     }
 
     JSScript *script() const {
@@ -1090,64 +1101,31 @@ class StackSpace
      * good way to handle an OOM for these allocations, so this function checks
      * that OOM cannot occur using the size of the TraceNativeStorage as a
      * conservative upper bound.
      */
     inline bool ensureEnoughSpaceToEnterTrace();
 #endif
 
     /*
-     * If we let infinite recursion go until it hit the end of the contiguous
-     * stack, it would take a long time. As a heuristic, we kill scripts which
-     * go deeper than MAX_INLINE_CALLS. Note: this heuristic only applies to a
-     * single activation of the VM. If a script reenters, the call count gets
-     * reset. This is ok because we will quickly hit the C recursion limit.
-     */
-    static const size_t MAX_INLINE_CALLS = 3000;
-
-    /*
-     * SunSpider and v8bench have roughly an average of 9 slots per script. Our
-     * heuristic for a quick over-recursion check uses a generous slot count
-     * based on this estimate. We take this frame size and multiply it by the
-     * old recursion limit from the interpreter. Worst case, if an average size
-     * script (<=9 slots) over recurses, it'll effectively be the same as having
-     * increased the old inline call count to <= 5,000.
-     */
-    static const size_t STACK_QUOTA = MAX_INLINE_CALLS * (VALUES_PER_STACK_FRAME + 18);
-
-    /*
-     * In the mjit, we'd like to collapse two "overflow" checks into one:
-     *  - the MAX_INLINE_CALLS check (see above comment)
-     *  - the stack OOM check (or, on Windows, the commit/OOM check) This
-     * function produces a 'limit' pointer that satisfies both these checks.
-     * (The STACK_QUOTA comment explains how this limit simulates checking
-     * MAX_INLINE_CALLS.) This limit is guaranteed to have at least enough space
-     * for cx->fp()->nslots() plus an extra stack frame (which is the min
-     * requirement for entering mjit code) or else an error is reported and NULL
-     * is returned. When the stack grows past the returned limit, the script may
-     * still be within quota, but more memory needs to be committed. This is
-     * handled by bumpLimitWithinQuota.
+     * Return a limit against which jit code can check for. This limit is not
+     * necessarily the end of the stack since we lazily commit stack memory on
+     * some platforms. Thus, when the stack limit is exceeded, the caller should
+     * use tryBumpLimit to attempt to increase the stack limit by committing
+     * more memory. If the stack is truly exhausted, tryBumpLimit will report an
+     * error and return NULL.
+     *
+     * An invariant of the methodjit is that there is always space to push a
+     * frame on top of the current frame's expression stack (which can be at
+     * most script->nslots deep). getStackLimit ensures that the returned limit
+     * does indeed have this required space and reports an error and returns
+     * NULL if this reserve space cannot be allocated.
      */
     inline Value *getStackLimit(JSContext *cx);
-
-    /*
-     * Try to bump the limit, staying within |base + STACK_QUOTA|, by
-     * committing more pages of the contiguous stack.
-     *  base: the frame on which execution started
-     *  from: the current top of the stack
-     *  nvals: requested space above 'from'
-     *  *limit: receives bumped new limit
-     */
-    bool bumpLimitWithinQuota(JSContext *maybecx, StackFrame *base, Value *from, uintN nvals, Value **limit) const;
-
-    /*
-     * Raise the given limit without considering quota.
-     * See comment in BumpStackFull.
-     */
-    bool bumpLimit(JSContext *cx, StackFrame *base, Value *from, uintN nvals, Value **limit) const;
+    bool tryBumpLimit(JSContext *maybecx, Value *from, uintN nvals, Value **limit);
 
     /* Called during GC: mark segments, frames, and slots under firstUnused. */
     void mark(JSTracer *trc);
 };
 
 /*****************************************************************************/
 
 class ContextStack
@@ -1262,17 +1240,17 @@ class ContextStack
     /* Mark the top segment as suspended, without pushing a new one. */
     void saveActiveSegment();
 
     /* Undoes calls to suspendActiveSegment. */
     void restoreSegment();
 
     /*
      * For the five sets of stack operations below:
-     *  - The boolean-valued functions call js_ReportOutOfScriptQuota on OOM.
+     *  - The boolean-valued functions call js_ReportOverRecursed on OOM.
      *  - The "get*Frame" functions do not change any global state, they just
      *    check OOM and return pointers to an uninitialized frame with the
      *    requested missing arguments/slots. Only once the "push*Frame"
      *    function has been called is global state updated. Thus, between
      *    "get*Frame" and "push*Frame", the frame and slots are unrooted.
      *  - Functions taking "*Guard" arguments will use the guard's destructor
      *    to pop the stack. The caller must ensure the guard has the
      *    appropriate lifetime.
@@ -1317,17 +1295,17 @@ class ContextStack
      * limit (see StackSpace::getStackLimit).
      */
     inline StackFrame *
     getInlineFrame(JSContext *cx, Value *sp, uintN nactual,
                    JSFunction *fun, JSScript *script, uint32 *flags) const;
     inline StackFrame *
     getInlineFrameWithinLimit(JSContext *cx, Value *sp, uintN nactual,
                               JSFunction *fun, JSScript *script, uint32 *flags,
-                              StackFrame *base, Value **limit) const;
+                              Value **limit) const;
     inline void pushInlineFrame(JSScript *script, StackFrame *fp, FrameRegs &regs);
     inline void popInlineFrame();
 
     /* For jit use: */
     static size_t offsetOfRegs() { return offsetof(ContextStack, regs_); }
 };
 
 /*****************************************************************************/
@@ -1419,20 +1397,22 @@ class FrameRegsIter
     StackFrame   *fp_;
     Value        *sp_;
     jsbytecode   *pc_;
 
     void initSlow();
     void incSlow(StackFrame *oldfp);
 
   public:
-    inline FrameRegsIter(JSContext *cx);
+    FrameRegsIter(JSContext *cx);
 
     bool done() const { return fp_ == NULL; }
-    inline FrameRegsIter &operator++();
+    FrameRegsIter &operator++();
+    bool operator==(const FrameRegsIter &rhs) const;
+    bool operator!=(const FrameRegsIter &rhs) const { return !(*this == rhs); }
 
     StackFrame *fp() const { return fp_; }
     Value *sp() const { return sp_; }
     jsbytecode *pc() const { return pc_; }
 };
 
 /*
  * Utility class for iteration over all active stack frames.