Bug 460865 - Read barrier for cx->fp. r=mrbkap, r=dmandelin.
authorJason Orendorff <jorendorff@mozilla.com>
Tue, 09 Dec 2008 10:38:32 -0600
changeset 22652 27a7da1735981b2be2ab76d49bfd297709d91a9f
parent 22651 b7a88fd4b75cffd17a57b40ed4d89209b07c5240
child 22653 74c13b64856a90e991c8466072f6d7ffd74906dd
push id4100
push userrsayre@mozilla.com
push dateThu, 11 Dec 2008 11:23:21 +0000
treeherdermozilla-central@0492e9f0f973 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersmrbkap, dmandelin
bugs460865
milestone1.9.2a1pre
Bug 460865 - Read barrier for cx->fp. r=mrbkap, r=dmandelin.
config/static-checking-config.mk
dom/src/base/nsDOMClassInfo.cpp
dom/src/base/nsJSEnvironment.cpp
js/src/Makefile.in
js/src/js.cpp
js/src/jsapi.cpp
js/src/jsarray.cpp
js/src/jsbool.cpp
js/src/jscntxt.cpp
js/src/jscntxt.h
js/src/jsdate.cpp
js/src/jsdbgapi.cpp
js/src/jsexn.cpp
js/src/jsfile.cpp
js/src/jsfun.cpp
js/src/jsgc.cpp
js/src/jsinterp.cpp
js/src/jsinterp.h
js/src/jsiter.cpp
js/src/jsnum.cpp
js/src/jsobj.cpp
js/src/jsobj.h
js/src/jsopcode.cpp
js/src/jsregexp.cpp
js/src/jsscript.cpp
js/src/jsstaticcheck.h
js/src/jsstr.cpp
js/src/jstracer.cpp
js/src/jstracer.h
js/src/jstypes.h
js/src/jsxml.cpp
js/src/liveconnect/nsCLiveconnect.cpp
js/src/xpconnect/src/XPCNativeWrapper.cpp
js/src/xpconnect/src/xpccallcontext.cpp
js/src/xpconnect/src/xpcconvert.cpp
js/src/xpconnect/src/xpcstack.cpp
xpcom/analysis/jsstack.js
--- a/config/static-checking-config.mk
+++ b/config/static-checking-config.mk
@@ -6,16 +6,17 @@ DEHYDRA_SCRIPT = $(topsrcdir)/xpcom/anal
 DEHYDRA_MODULES = \
   $(topsrcdir)/xpcom/analysis/final.js \
   $(NULL)
 
 TREEHYDRA_MODULES = \
   $(topsrcdir)/xpcom/analysis/outparams.js \
   $(topsrcdir)/xpcom/analysis/stack.js \
   $(topsrcdir)/xpcom/analysis/flow.js \
+  $(topsrcdir)/xpcom/analysis/jsstack.js \
   $(NULL)
 
 DEHYDRA_ARGS = \
   --topsrcdir=$(topsrcdir) \
   --objdir=$(DEPTH) \
   --dehydra-modules=$(subst $(NULL) ,$(COMMA),$(strip $(DEHYDRA_MODULES))) \
   --treehydra-modules=$(subst $(NULL) ,$(COMMA),$(strip $(TREEHYDRA_MODULES))) \
   $(NULL)
--- a/dom/src/base/nsDOMClassInfo.cpp
+++ b/dom/src/base/nsDOMClassInfo.cpp
@@ -6447,18 +6447,19 @@ nsWindowSH::NewResolve(nsIXPConnectWrapp
     // Something went wrong, or the property got resolved. Return.
     return rv;
   }
 
   // Make a fast expando if we're assigning to (not declaring or
   // binding a name) a new undefined property that's not already
   // defined on our prototype chain. This way we can access this
   // expando w/o ever getting back into XPConnect.
+  JSStackFrame *fp = NULL;
   if ((flags & (JSRESOLVE_ASSIGNING)) &&
-      !(cx->fp && cx->fp->regs && (JSOp)*cx->fp->regs->pc == JSOP_BINDNAME) &&
+      !(JS_FrameIterator(cx, &fp) && fp->regs && (JSOp)*fp->regs->pc == JSOP_BINDNAME) &&
       win->IsInnerWindow()) {
     JSObject *realObj;
     wrapper->GetJSObject(&realObj);
 
     if (obj == realObj) {
       JSObject *proto = STOBJ_GET_PROTO(obj);
       if (proto) {
         jsid interned_id;
--- a/dom/src/base/nsJSEnvironment.cpp
+++ b/dom/src/base/nsJSEnvironment.cpp
@@ -3320,17 +3320,17 @@ nsJSContext::ScriptEvaluated(PRBool aTer
     mOperationCallbackTime = LL_ZERO;
   }
 }
 
 nsresult
 nsJSContext::SetTerminationFunction(nsScriptTerminationFunc aFunc,
                                     nsISupports* aRef)
 {
-  NS_PRECONDITION(mContext->fp, "should be executing script");
+  NS_PRECONDITION(JS_IsRunning(mContext), "should be executing script");
 
   nsJSContext::TerminationFuncClosure* newClosure =
     new nsJSContext::TerminationFuncClosure(aFunc, aRef, mTerminations);
   if (!newClosure) {
     return NS_ERROR_OUT_OF_MEMORY;
   }
 
   mTerminations = newClosure;
--- a/js/src/Makefile.in
+++ b/js/src/Makefile.in
@@ -210,16 +210,17 @@ INSTALLED_HEADERS = \
 		jsprf.h \
 		jsproto.tbl \
 		jsprvtd.h \
 		jspubtd.h \
 		jsregexp.h \
 		jsscan.h \
 		jsscope.h \
 		jsscript.h \
+		jsstaticcheck.h \
 		jsstddef.h \
 		jsstr.h \
 		jstracer.h \
 		jstypes.h \
 		jsutil.h \
 		jsversion.h \
 		jsxdrapi.h \
 		jsxml.h \
--- a/js/src/js.cpp
+++ b/js/src/js.cpp
@@ -1113,17 +1113,17 @@ ValueToScript(JSContext *cx, jsval v)
 static JSBool
 GetTrapArgs(JSContext *cx, uintN argc, jsval *argv, JSScript **scriptp,
             int32 *ip)
 {
     jsval v;
     uintN intarg;
     JSScript *script;
 
-    *scriptp = cx->fp->down->script;
+    *scriptp = JS_GetScriptedCaller(cx, NULL)->script;
     *ip = 0;
     if (argc != 0) {
         v = argv[0];
         intarg = 0;
         if (!JSVAL_IS_PRIMITIVE(v) &&
             (JS_GET_CLASS(cx, JSVAL_TO_OBJECT(v)) == &js_FunctionClass ||
              JS_GET_CLASS(cx, JSVAL_TO_OBJECT(v)) == &js_ScriptClass)) {
             script = ValueToScript(cx, v);
@@ -1200,17 +1200,17 @@ LineToPC(JSContext *cx, JSObject *obj, u
     int32 i;
     uintN lineno;
     jsbytecode *pc;
 
     if (argc == 0) {
         JS_ReportErrorNumber(cx, my_GetErrorMessage, NULL, JSSMSG_LINE2PC_USAGE);
         return JS_FALSE;
     }
-    script = cx->fp->down->script;
+    script = JS_GetScriptedCaller(cx, NULL)->script;
     if (!GetTrapArgs(cx, argc, argv, &script, &i))
         return JS_FALSE;
     lineno = (i == 0) ? script->lineno : (uintN)i;
     pc = JS_LineNumberToPC(cx, script, lineno);
     if (!pc)
         return JS_FALSE;
     *rval = INT_TO_JSVAL(PTRDIFF(pc, script->code, jsbytecode));
     return JS_TRUE;
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -74,16 +74,17 @@
 #include "jsobj.h"
 #include "jsopcode.h"
 #include "jsparse.h"
 #include "jsregexp.h"
 #include "jsscan.h"
 #include "jsscope.h"
 #include "jsscript.h"
 #include "jsstr.h"
+#include "jsdbgapi.h"
 #include "prmjtime.h"
 #include "jsstaticcheck.h"
 
 #if !defined JS_THREADSAFE && defined JS_TRACER
 #include "jstracer.h"
 #endif
 
 #if JS_HAS_FILE_OBJECT
@@ -1798,17 +1799,17 @@ JS_GetClassObject(JSContext *cx, JSObjec
 }
 
 JS_PUBLIC_API(JSObject *)
 JS_GetScopeChain(JSContext *cx)
 {
     JSStackFrame *fp;
 
     CHECK_REQUEST(cx);
-    fp = cx->fp;
+    fp = js_GetTopStackFrame(cx);
     if (!fp) {
         /*
          * There is no code active on this context. In place of an actual
          * scope chain, use the context's global object, which is set in
          * js_InitFunctionAndObjectClasses, and which represents the default
          * scope chain for the embedding. See also js_FindClassObject.
          *
          * For embeddings that use the inner and outer object hooks, the inner
@@ -4570,20 +4571,20 @@ js_generic_native_method_dispatcher(JSCo
      * first argument passed as |this|.
      */
     memmove(argv - 1, argv, argc * sizeof(jsval));
 
     /*
      * Follow Function.prototype.apply and .call by using the global object as
      * the 'this' param if no args.
      */
-    JS_ASSERT(cx->fp->argv == argv);
     if (!js_ComputeThis(cx, JS_TRUE, argv))
         return JS_FALSE;
-    cx->fp->thisp = JSVAL_TO_OBJECT(argv[-1]);
+    js_GetTopStackFrame(cx)->thisp = JSVAL_TO_OBJECT(argv[-1]);
+    JS_ASSERT(cx->fp->argv == argv);
 
     /*
      * Protect against argc underflowing. By calling js_ComputeThis, we made
      * it as if the static was called with one parameter, the explicit |this|
      * object.
      */
     if (argc != 0)
         --argc;
@@ -4720,17 +4721,17 @@ JS_CompileUCScript(JSContext *cx, JSObje
 #define LAST_FRAME_EXCEPTION_CHECK(cx,result)                                 \
     JS_BEGIN_MACRO                                                            \
         if (!(result) && !((cx)->options & JSOPTION_DONT_REPORT_UNCAUGHT))    \
             js_ReportUncaughtException(cx);                                   \
     JS_END_MACRO
 
 #define LAST_FRAME_CHECKS(cx,result)                                          \
     JS_BEGIN_MACRO                                                            \
-        if (!(cx)->fp) {                                                      \
+        if (!JS_IsRunning(cx)) {                                              \
             (cx)->weakRoots.lastInternalResult = JSVAL_NULL;                  \
             LAST_FRAME_EXCEPTION_CHECK(cx, result);                           \
         }                                                                     \
     JS_END_MACRO
 
 #define JS_OPTIONS_TO_TCFLAGS(cx)                                             \
     ((((cx)->options & JSOPTION_COMPILE_N_GO) ? TCF_COMPILE_N_GO : 0) |       \
      (((cx)->options & JSOPTION_NO_SCRIPT_RVAL) ? TCF_NO_SCRIPT_RVAL : 0))
@@ -5319,32 +5320,37 @@ JS_SetBranchCallback(JSContext *cx, JSBr
         JS_ClearOperationCallback(cx);
     }
     return oldcb;
 }
 
 JS_PUBLIC_API(JSBool)
 JS_IsRunning(JSContext *cx)
 {
-    return cx->fp != NULL;
+    /* The use of cx->fp below is safe: if we're on trace, it is skipped. */
+    VOUCH_DOES_NOT_REQUIRE_STACK();
+
+    return JS_ON_TRACE(cx) || cx->fp != NULL;
 }
 
 JS_PUBLIC_API(JSBool)
 JS_IsConstructing(JSContext *cx)
 {
-    return cx->fp && (cx->fp->flags & JSFRAME_CONSTRUCTING);
+    JSStackFrame *fp;
+
+    fp = js_GetTopStackFrame(cx);
+    return fp && (fp->flags & JSFRAME_CONSTRUCTING);
 }
 
 JS_FRIEND_API(JSBool)
 JS_IsAssigning(JSContext *cx)
 {
     JSStackFrame *fp;
 
-    for (fp = cx->fp; fp && !fp->script; fp = fp->down)
-        continue;
+    fp = js_GetScriptedCaller(cx, NULL);
     if (!fp || !fp->regs)
         return JS_FALSE;
     return (js_CodeSpec[*fp->regs->pc].format & JOF_ASSIGNING) != 0;
 }
 
 JS_PUBLIC_API(void)
 JS_SetCallReturnValue2(JSContext *cx, jsval v)
 {
@@ -5354,30 +5360,32 @@ JS_SetCallReturnValue2(JSContext *cx, js
 #endif
 }
 
 JS_PUBLIC_API(JSStackFrame *)
 JS_SaveFrameChain(JSContext *cx)
 {
     JSStackFrame *fp;
 
-    fp = cx->fp;
+    fp = js_GetTopStackFrame(cx);
     if (!fp)
         return fp;
 
     JS_ASSERT(!fp->dormantNext);
     fp->dormantNext = cx->dormantFrameChain;
     cx->dormantFrameChain = fp;
     cx->fp = NULL;
     return fp;
 }
 
 JS_PUBLIC_API(void)
 JS_RestoreFrameChain(JSContext *cx, JSStackFrame *fp)
 {
+    JS_ASSERT(!JS_ON_TRACE(cx));
+    VOUCH_DOES_NOT_REQUIRE_STACK();
     JS_ASSERT(!cx->fp);
     if (!fp)
         return;
 
     JS_ASSERT(fp == cx->dormantFrameChain);
     cx->fp = fp;
     cx->dormantFrameChain = fp->dormantNext;
     fp->dormantNext = NULL;
@@ -5979,17 +5987,17 @@ JS_ErrorFromException(JSContext *cx, jsv
     CHECK_REQUEST(cx);
     return js_ErrorFromException(cx, v);
 }
 
 JS_PUBLIC_API(JSBool)
 JS_ThrowReportedError(JSContext *cx, const char *message,
                       JSErrorReport *reportp)
 {
-    return cx->fp && js_ErrorToException(cx, message, reportp);
+    return JS_IsRunning(cx) && js_ErrorToException(cx, message, reportp);
 }
 
 JS_PUBLIC_API(JSBool)
 JS_ThrowStopIteration(JSContext *cx)
 {
     return js_ThrowStopIteration(cx);
 }
 
--- a/js/src/jsarray.cpp
+++ b/js/src/jsarray.cpp
@@ -3024,17 +3024,17 @@ static JSFunctionSpec array_methods[] = 
 
 JSBool
 js_Array(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
 {
     jsuint length;
     jsval *vector;
 
     /* If called without new, replace obj with a new Array object. */
-    if (!(cx->fp->flags & JSFRAME_CONSTRUCTING)) {
+    if (!JS_IsConstructing(cx)) {
         obj = js_NewObject(cx, &js_ArrayClass, NULL, NULL, 0);
         if (!obj)
             return JS_FALSE;
         *rval = OBJECT_TO_JSVAL(obj);
     }
 
     if (argc == 0) {
         length = 0;
--- a/js/src/jsbool.cpp
+++ b/js/src/jsbool.cpp
@@ -128,17 +128,17 @@ static JSFunctionSpec boolean_methods[] 
 static JSBool
 Boolean(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
 {
     jsval bval;
 
     bval = (argc != 0)
            ? BOOLEAN_TO_JSVAL(js_ValueToBoolean(argv[0]))
            : JSVAL_FALSE;
-    if (!(cx->fp->flags & JSFRAME_CONSTRUCTING)) {
+    if (!JS_IsConstructing(cx)) {
         *rval = bval;
         return JS_TRUE;
     }
     STOBJ_SET_SLOT(obj, JSSLOT_PRIVATE, bval);
     return JS_TRUE;
 }
 
 JSObject *
--- a/js/src/jscntxt.cpp
+++ b/js/src/jscntxt.cpp
@@ -884,61 +884,68 @@ ReportError(JSContext *cx, const char *m
     /*
      * Call the error reporter only if an exception wasn't raised.
      *
      * If an exception was raised, then we call the debugErrorHook
      * (if present) to give it a chance to see the error before it
      * propagates out of scope.  This is needed for compatability
      * with the old scheme.
      */
-    if (!cx->fp || !js_ErrorToException(cx, message, reportp)) {
+    if (!JS_IsRunning(cx) || !js_ErrorToException(cx, message, reportp)) {
         js_ReportErrorAgain(cx, message, reportp);
     } else if (cx->debugHooks->debugErrorHook && cx->errorReporter) {
         JSDebugErrorHook hook = cx->debugHooks->debugErrorHook;
         /* test local in case debugErrorHook changed on another thread */
         if (hook)
             hook(cx, message, reportp, cx->debugHooks->debugErrorHookData);
     }
 }
 
+/* The report must be initially zeroed. */
+static void
+PopulateReportBlame(JSContext *cx, JSErrorReport *report)
+{
+    JSStackFrame *fp;
+
+    /*
+     * Walk stack until we find a frame that is associated with some script
+     * rather than a native frame.
+     */
+    for (fp = js_GetTopStackFrame(cx); fp; fp = fp->down) {
+        if (fp->regs) {
+            report->filename = fp->script->filename;
+            report->lineno = js_FramePCToLineNumber(cx, fp);
+            break;
+        }
+    }
+}
+
 /*
  * We don't post an exception in this case, since doing so runs into
  * complications of pre-allocating an exception object which required
  * running the Exception class initializer early etc.
  * Instead we just invoke the errorReporter with an "Out Of Memory"
  * type message, and then hope the process ends swiftly.
  */
 void
 js_ReportOutOfMemory(JSContext *cx)
 {
-    JSStackFrame *fp;
     JSErrorReport report;
     JSErrorReporter onError = cx->errorReporter;
 
     /* Get the message for this error, but we won't expand any arguments. */
     const JSErrorFormatString *efs =
         js_GetLocalizedErrorMessage(cx, NULL, NULL, JSMSG_OUT_OF_MEMORY);
     const char *msg = efs ? efs->format : "Out of memory";
 
     /* Fill out the report, but don't do anything that requires allocation. */
     memset(&report, 0, sizeof (struct JSErrorReport));
     report.flags = JSREPORT_ERROR;
     report.errorNumber = JSMSG_OUT_OF_MEMORY;
-
-    /*
-     * Walk stack until we find a frame that is associated with some script
-     * rather than a native frame.
-     */
-    for (fp = cx->fp; fp; fp = fp->down) {
-        if (fp->regs) {
-            report.filename = fp->script->filename;
-            report.lineno = js_FramePCToLineNumber(cx, fp);
-            break;
-        }
-    }
+    PopulateReportBlame(cx, &report);
 
     /*
      * If debugErrorHook is present then we give it a chance to veto sending
      * the error on to the regular ErrorReporter. We also clear a pending
      * exception if any now so the hooks can replace the out-of-memory error
      * by a script-catchable exception.
      */
     cx->throwing = JS_FALSE;
@@ -974,41 +981,32 @@ js_ReportAllocationOverflow(JSContext *c
 }
 
 JSBool
 js_ReportErrorVA(JSContext *cx, uintN flags, const char *format, va_list ap)
 {
     char *message;
     jschar *ucmessage;
     size_t messagelen;
-    JSStackFrame *fp;
     JSErrorReport report;
     JSBool warning;
 
     if ((flags & JSREPORT_STRICT) && !JS_HAS_STRICT_OPTION(cx))
         return JS_TRUE;
 
     message = JS_vsmprintf(format, ap);
     if (!message)
         return JS_FALSE;
     messagelen = strlen(message);
 
     memset(&report, 0, sizeof (struct JSErrorReport));
     report.flags = flags;
     report.errorNumber = JSMSG_USER_DEFINED_ERROR;
     report.ucmessage = ucmessage = js_InflateString(cx, message, &messagelen);
-
-    /* Find the top-most active script frame, for best line number blame. */
-    for (fp = cx->fp; fp; fp = fp->down) {
-        if (fp->regs) {
-            report.filename = fp->script->filename;
-            report.lineno = js_FramePCToLineNumber(cx, fp);
-            break;
-        }
-    }
+    PopulateReportBlame(cx, &report);
 
     warning = JSREPORT_IS_WARNING(report.flags);
     if (warning && JS_HAS_WERROR_OPTION(cx)) {
         report.flags &= ~JSREPORT_WARNING;
         warning = JS_FALSE;
     }
 
     ReportError(cx, message, &report);
@@ -1188,39 +1186,27 @@ error:
     return JS_FALSE;
 }
 
 JSBool
 js_ReportErrorNumberVA(JSContext *cx, uintN flags, JSErrorCallback callback,
                        void *userRef, const uintN errorNumber,
                        JSBool charArgs, va_list ap)
 {
-    JSStackFrame *fp;
     JSErrorReport report;
     char *message;
     JSBool warning;
 
     if ((flags & JSREPORT_STRICT) && !JS_HAS_STRICT_OPTION(cx))
         return JS_TRUE;
 
     memset(&report, 0, sizeof (struct JSErrorReport));
     report.flags = flags;
     report.errorNumber = errorNumber;
-
-    /*
-     * If we can't find out where the error was based on the current frame,
-     * see if the next frame has a script/pc combo we can use.
-     */
-    for (fp = cx->fp; fp; fp = fp->down) {
-        if (fp->regs) {
-            report.filename = fp->script->filename;
-            report.lineno = js_FramePCToLineNumber(cx, fp);
-            break;
-        }
-    }
+    PopulateReportBlame(cx, &report);
 
     if (!js_ExpandErrorArguments(cx, callback, userRef, errorNumber,
                                  &message, &report, &warning, charArgs, ap)) {
         return JS_FALSE;
     }
 
     ReportError(cx, message, &report);
 
@@ -1377,28 +1363,52 @@ js_GetErrorMessage(void *userRef, const 
         return &js_ErrorFormatString[errorNumber];
     return NULL;
 }
 
 JSBool
 js_ResetOperationCount(JSContext *cx)
 {
     JSScript *script;
+    JSStackFrame *fp;
 
     JS_ASSERT(cx->operationCount <= 0);
     JS_ASSERT(cx->operationLimit > 0);
 
     cx->operationCount = (int32) cx->operationLimit;
     if (cx->operationCallbackIsSet)
         return cx->operationCallback(cx);
 
     if (cx->operationCallback) {
         /*
          * Invoke the deprecated branch callback. It may be called only when
          * the top-most frame is scripted or JSOPTION_NATIVE_BRANCH_CALLBACK
          * is set.
          */
-        script = cx->fp ? cx->fp->script : NULL;
+        fp = js_GetTopStackFrame(cx);
+        script = fp ? fp->script : NULL;
         if (script || JS_HAS_OPTION(cx, JSOPTION_NATIVE_BRANCH_CALLBACK))
             return ((JSBranchCallback) cx->operationCallback)(cx, script);
     }
     return JS_TRUE;
 }
+
+#ifndef JS_TRACER
+/* This is defined in jstracer.cpp in JS_TRACER builds. */
+extern JS_FORCES_STACK JSStackFrame *
+js_GetTopStackFrame(JSContext *cx)
+{
+    return cx->fp;
+}
+#endif
+
+JSStackFrame *
+js_GetScriptedCaller(JSContext *cx, JSStackFrame *fp)
+{
+    if (!fp)
+        fp = js_GetTopStackFrame(cx);
+    while (fp) {
+        if (fp->script)
+            return fp;
+        fp = fp->down;
+    }
+    return NULL;
+}
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -134,19 +134,21 @@ typedef struct JSTraceMonitor {
      * way as the real tracing Fragmento. */
     CLS(nanojit::Fragmento) reFragmento;
 
     /* Keep a list of recorders we need to abort on cache flush. */
     CLS(TraceRecorder)      abortStack;
 } JSTraceMonitor;
 
 #ifdef JS_TRACER
-# define JS_ON_TRACE(cx)   (JS_TRACE_MONITOR(cx).onTrace)
+# define JS_ON_TRACE(cx)            (JS_TRACE_MONITOR(cx).onTrace)
+# define JS_EXECUTING_TRACE(cx)     (JS_ON_TRACE(cx) && !JS_TRACE_MONITOR(cx).recorder)
 #else
-# define JS_ON_TRACE(cx)   JS_FALSE
+# define JS_ON_TRACE(cx)            JS_FALSE
+# define JS_EXECUTING_TRACE(cx)     JS_FALSE
 #endif
 
 #ifdef JS_THREADSAFE
 
 /*
  * Structure uniquely representing a thread.  It holds thread-private data
  * that can be accessed without a global lock.
  */
@@ -809,16 +811,18 @@ struct JSContext {
     /* Quota on the size of arenas used to compile and execute scripts. */
     size_t              scriptStackQuota;
 
     /* Data shared by threads in an address space. */
     JSRuntime           *runtime;
 
     /* Stack arena pool and frame pointer register. */
     JSArenaPool         stackPool;
+
+    JS_REQUIRES_STACK
     JSStackFrame        *fp;
 
     /* Temporary arena pool used while compiling and decompiling. */
     JSArenaPool         tempPool;
 
     /* Top-level object and pointer to top stack frame's scope chain. */
     JSObject            *globalObject;
 
@@ -1246,11 +1250,23 @@ extern JSErrorFormatString js_ErrorForma
 
 /*
  * Reset the operation count and call the operation callback assuming that the
  * operation limit is reached.
  */
 extern JSBool
 js_ResetOperationCount(JSContext *cx);
 
+/*
+ * Get the current cx->fp, first lazily instantiating stack frames if needed.
+ * (Do not access cx->fp directly except in JS_REQUIRES_STACK code.)
+ *
+ * Defined in jstracer.cpp if JS_TRACER is defined.
+ */
+extern JS_FORCES_STACK JSStackFrame *
+js_GetTopStackFrame(JSContext *cx);
+
+extern JSStackFrame *
+js_GetScriptedCaller(JSContext *cx, JSStackFrame *fp);
+
 JS_END_EXTERN_C
 
 #endif /* jscntxt_h___ */
--- a/js/src/jsdate.cpp
+++ b/js/src/jsdate.cpp
@@ -2099,17 +2099,17 @@ date_constructor(JSContext *cx, JSObject
 JSBool
 js_Date(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
 {
     jsdouble *date;
     JSString *str;
     jsdouble d;
 
     /* Date called as function. */
-    if (!(cx->fp->flags & JSFRAME_CONSTRUCTING)) {
+    if (!JS_IsConstructing(cx)) {
         return date_format(cx, PRMJ_Now() / PRMJ_USEC_PER_MSEC,
                            FORMATSPEC_FULL, rval);
     }
 
     /* Date called as constructor. */
     if (argc == 0) {
         date = date_constructor(cx, obj);
         if (!date)
--- a/js/src/jsdbgapi.cpp
+++ b/js/src/jsdbgapi.cpp
@@ -635,17 +635,17 @@ js_watch_set(JSContext *cx, JSObject *ob
                         regs.pc = script->code + script->length
                                   - JSOP_STOP_LENGTH;
                         regs.sp = NULL;
                         frame.regs = &regs;
                     }
                     frame.callee = closure;
                     frame.fun = fun;
                     frame.argv = argv + 2;
-                    frame.down = cx->fp;
+                    frame.down = js_GetTopStackFrame(cx);
                     frame.scopeChain = OBJ_GET_PARENT(cx, closure);
 
                     cx->fp = &frame;
                 }
 #ifdef __GNUC__
                 else
                     argv = NULL;    /* suppress bogus gcc warnings */
 #endif
@@ -970,17 +970,17 @@ JS_GetScriptPrincipals(JSContext *cx, JS
 /************************************************************************/
 
 /*
  *  Stack Frame Iterator
  */
 JS_PUBLIC_API(JSStackFrame *)
 JS_FrameIterator(JSContext *cx, JSStackFrame **iteratorp)
 {
-    *iteratorp = (*iteratorp == NULL) ? cx->fp : (*iteratorp)->down;
+    *iteratorp = (*iteratorp == NULL) ? js_GetTopStackFrame(cx) : (*iteratorp)->down;
     return *iteratorp;
 }
 
 JS_PUBLIC_API(JSScript *)
 JS_GetFrameScript(JSContext *cx, JSStackFrame *fp)
 {
     return fp->script;
 }
@@ -989,24 +989,17 @@ JS_PUBLIC_API(jsbytecode *)
 JS_GetFramePC(JSContext *cx, JSStackFrame *fp)
 {
     return fp->regs ? fp->regs->pc : NULL;
 }
 
 JS_PUBLIC_API(JSStackFrame *)
 JS_GetScriptedCaller(JSContext *cx, JSStackFrame *fp)
 {
-    if (!fp)
-        fp = cx->fp;
-    while (fp) {
-        if (fp->script)
-            return fp;
-        fp = fp->down;
-    }
-    return NULL;
+    return js_GetScriptedCaller(cx, fp);
 }
 
 JS_PUBLIC_API(JSPrincipals *)
 JS_StackFramePrincipals(JSContext *cx, JSStackFrame *fp)
 {
     JSSecurityCallbacks *callbacks;
 
     if (fp->fun) {
@@ -1119,17 +1112,17 @@ JS_PUBLIC_API(JSObject *)
 JS_GetFrameThis(JSContext *cx, JSStackFrame *fp)
 {
     JSStackFrame *afp;
 
     if (fp->flags & JSFRAME_COMPUTED_THIS)
         return fp->thisp;
 
     /* js_ComputeThis gets confused if fp != cx->fp, so set it aside. */
-    if (cx->fp != fp) {
+    if (js_GetTopStackFrame(cx) != fp) {
         afp = cx->fp;
         if (afp) {
             afp->dormantNext = cx->dormantFrameChain;
             cx->dormantFrameChain = afp;
             cx->fp = fp;
         }
     } else {
         afp = NULL;
@@ -1628,17 +1621,17 @@ JS_GetScriptTotalSize(JSContext *cx, JSS
 
     return nbytes;
 }
 
 JS_PUBLIC_API(uint32)
 JS_GetTopScriptFilenameFlags(JSContext *cx, JSStackFrame *fp)
 {
     if (!fp)
-        fp = cx->fp;
+        fp = js_GetTopStackFrame(cx);
     while (fp) {
         if (fp->script)
             return JS_GetScriptFilenameFlags(fp->script);
         fp = fp->down;
     }
     return 0;
  }
 
--- a/js/src/jsexn.cpp
+++ b/js/src/jsexn.cpp
@@ -275,17 +275,17 @@ InitExnPrivate(JSContext *cx, JSObject *
                   ? callbacks->checkObjectAccess
                   : NULL;
     older = JS_SetErrorReporter(cx, NULL);
     state = JS_SaveExceptionState(cx);
 
     callerid = ATOM_KEY(cx->runtime->atomState.callerAtom);
     stackDepth = 0;
     valueCount = 0;
-    for (fp = cx->fp; fp; fp = fp->down) {
+    for (fp = js_GetTopStackFrame(cx); fp; fp = fp->down) {
         if (fp->fun && fp->argv) {
             v = JSVAL_NULL;
             if (checkAccess &&
                 !checkAccess(cx, fp->callee, callerid, JSACC_READ, &v)) {
                 break;
             }
             valueCount += fp->argc;
         }
@@ -316,17 +316,17 @@ InitExnPrivate(JSContext *cx, JSObject *
     priv->errorReport = NULL;
     priv->message = message;
     priv->filename = filename;
     priv->lineno = lineno;
     priv->stackDepth = stackDepth;
 
     values = GetStackTraceValueBuffer(priv);
     elem = priv->stackElems;
-    for (fp = cx->fp; fp != fpstop; fp = fp->down) {
+    for (fp = js_GetTopStackFrame(cx); fp != fpstop; fp = fp->down) {
         if (!fp->fun) {
             elem->funName = NULL;
             elem->argc = 0;
         } else {
             elem->funName = fp->fun->atom
                             ? ATOM_TO_STRING(fp->fun->atom)
                             : cx->runtime->emptyString;
             elem->argc = fp->argc;
@@ -737,17 +737,17 @@ StringToFilename(JSContext *cx, JSString
 
 static JSBool
 Exception(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
 {
     uint32 lineno;
     JSString *message, *filename;
     JSStackFrame *fp;
 
-    if (!(cx->fp->flags & JSFRAME_CONSTRUCTING)) {
+    if (!JS_IsConstructing(cx)) {
         /*
          * ECMA ed. 3, 15.11.1 requires Error, etc., to construct even when
          * called as functions, without operator new.  But as we do not give
          * each constructor a distinct JSClass, whose .name member is used by
          * js_NewObject to find the class prototype, we must get the class
          * prototype ourselves.
          */
         if (!OBJ_GET_PROPERTY(cx, JSVAL_TO_OBJECT(argv[-2]),
@@ -781,34 +781,34 @@ Exception(JSContext *cx, JSObject *obj, 
     /* Set the 'fileName' property. */
     if (argc > 1) {
         filename = js_ValueToString(cx, argv[1]);
         if (!filename)
             return JS_FALSE;
         argv[1] = STRING_TO_JSVAL(filename);
         fp = NULL;
     } else {
-        fp = JS_GetScriptedCaller(cx, NULL);
+        fp = js_GetScriptedCaller(cx, NULL);
         if (fp) {
             filename = FilenameToString(cx, fp->script->filename);
             if (!filename)
                 return JS_FALSE;
         } else {
             filename = cx->runtime->emptyString;
         }
     }
 
     /* Set the 'lineNumber' property. */
     if (argc > 2) {
         lineno = js_ValueToECMAUint32(cx, &argv[2]);
         if (JSVAL_IS_NULL(argv[2]))
             return JS_FALSE;
     } else {
         if (!fp)
-            fp = JS_GetScriptedCaller(cx, NULL);
+            fp = js_GetScriptedCaller(cx, NULL);
         lineno = (fp && fp->regs) ? js_FramePCToLineNumber(cx, fp) : 0;
     }
 
     return (OBJ_GET_CLASS(cx, obj) != &js_ErrorClass) ||
             InitExnPrivate(cx, obj, message, filename, lineno, NULL);
 }
 
 /*
--- a/js/src/jsfile.cpp
+++ b/js/src/jsfile.cpp
@@ -2176,17 +2176,17 @@ js_NewFileObjectFromFILE(JSContext *cx, 
 */
 static JSBool
 file_constructor(JSContext *cx, JSObject *obj, uintN argc, jsval *argv,
                  jsval *rval)
 {
     JSString *str;
     JSFile   *file;
 
-    if (!(cx->fp->flags & JSFRAME_CONSTRUCTING)) {
+    if (!JS_IsConstructing(cx)) {
         /* Replace obj with a new File object. */
         obj = JS_NewObject(cx, &js_FileClass, NULL, NULL);
         if (!obj)
             return JS_FALSE;
         *rval = OBJECT_TO_JSVAL(obj);
     }
 
     str = (argc == 0)
--- a/js/src/jsfun.cpp
+++ b/js/src/jsfun.cpp
@@ -1000,17 +1000,18 @@ fun_getProperty(JSContext *cx, JSObject 
         if (slot != ARGS_LENGTH)
             return JS_TRUE;
         obj = OBJ_GET_PROTO(cx, obj);
         if (!obj)
             return JS_TRUE;
     }
 
     /* Find fun's top-most activation record. */
-    for (fp = cx->fp; fp && (fp->fun != fun || (fp->flags & JSFRAME_SPECIAL));
+    for (fp = js_GetTopStackFrame(cx);
+         fp && (fp->fun != fun || (fp->flags & JSFRAME_SPECIAL));
          fp = fp->down) {
         continue;
     }
 
     switch (slot) {
       case CALL_ARGUMENTS:
         /* Warn if strict about f.arguments or equivalent unqualified uses. */
         if (!JS_ReportErrorFlagsAndNumber(cx,
@@ -1792,33 +1793,32 @@ static JSFunctionSpec function_methods[]
     JS_FN("__applyConstructor__", fun_applyConstructor, 1,0),
 #endif
     JS_FS_END
 };
 
 static JSBool
 Function(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
 {
-    JSStackFrame *fp, *caller;
     JSFunction *fun;
     JSObject *parent;
+    JSStackFrame *fp, *caller;
     uintN i, n, lineno;
     JSAtom *atom;
     const char *filename;
     JSBool ok;
     JSString *str, *arg;
     JSTokenStream ts;
     JSPrincipals *principals;
     jschar *collected_args, *cp;
     void *mark;
     size_t arg_length, args_length, old_args_length;
     JSTokenType tt;
 
-    fp = cx->fp;
-    if (!(fp->flags & JSFRAME_CONSTRUCTING)) {
+    if (!JS_IsConstructing(cx)) {
         obj = js_NewObject(cx, &js_FunctionClass, NULL, NULL, 0);
         if (!obj)
             return JS_FALSE;
         *rval = OBJECT_TO_JSVAL(obj);
     } else {
         /*
          * The constructor is called before the private slot is initialized so
          * we must use JS_GetPrivate, not GET_FUNCTION_PRIVATE here.
@@ -1847,18 +1847,19 @@ Function(JSContext *cx, JSObject *obj, u
 
     /*
      * Function is static and not called directly by other functions in this
      * file, therefore it is callable only as a native function by js_Invoke.
      * Find the scripted caller, possibly skipping other native frames such as
      * are built for Function.prototype.call or .apply activations that invoke
      * Function indirectly from a script.
      */
+    fp = js_GetTopStackFrame(cx);
     JS_ASSERT(!fp->script && fp->fun && fp->fun->u.n.native == Function);
-    caller = JS_GetScriptedCaller(cx, fp);
+    caller = js_GetScriptedCaller(cx, fp);
     if (caller) {
         principals = JS_EvalFramePrincipals(cx, fp, caller);
         filename = js_ComputeFilename(cx, caller, principals, &lineno);
     } else {
         filename = NULL;
         lineno = 0;
         principals = NULL;
     }
@@ -2197,17 +2198,17 @@ js_ValueToFunctionObject(JSContext *cx, 
     if (VALUE_IS_FUNCTION(cx, *vp))
         return JSVAL_TO_OBJECT(*vp);
 
     fun = js_ValueToFunction(cx, vp, flags);
     if (!fun)
         return NULL;
     *vp = OBJECT_TO_JSVAL(FUN_OBJECT(fun));
 
-    caller = JS_GetScriptedCaller(cx, cx->fp);
+    caller = js_GetScriptedCaller(cx, NULL);
     if (caller) {
         principals = JS_StackFramePrincipals(cx, caller);
     } else {
         /* No scripted caller, don't allow access. */
         principals = NULL;
     }
 
     if (!js_CheckPrincipalsAccess(cx, FUN_OBJECT(fun), principals,
@@ -2239,17 +2240,17 @@ js_ValueToCallableObject(JSContext *cx, 
 void
 js_ReportIsNotFunction(JSContext *cx, jsval *vp, uintN flags)
 {
     JSStackFrame *fp;
     uintN error;
     const char *name, *source;
     JSTempValueRooter tvr;
 
-    for (fp = cx->fp; fp && !fp->regs; fp = fp->down)
+    for (fp = js_GetTopStackFrame(cx); fp && !fp->regs; fp = fp->down)
         continue;
     name = source = NULL;
     JS_PUSH_TEMP_ROOT_STRING(cx, NULL, &tvr);
     if (flags & JSV2F_ITERATOR) {
         error = JSMSG_BAD_ITERATOR;
         name = js_iterator_str;
         tvr.u.string = js_ValueToSource(cx, *vp);
         if (!tvr.u.string)
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -2925,17 +2925,17 @@ js_TraceContext(JSTracer *trc, JSContext
         acx->doubleFreeList = NULL;
     }
 
     /*
      * Iterate frame chain and dormant chains.
      *
      * (NB: see comment on this whole "dormant" thing in js_Execute.)
      */
-    fp = acx->fp;
+    fp = js_GetTopStackFrame(acx);
     nextChain = acx->dormantFrameChain;
     if (!fp)
         goto next_chain;
 
     /* The top frame must not be dormant. */
     JS_ASSERT(!fp->dormantNext);
     for (;;) {
         do {
--- a/js/src/jsinterp.cpp
+++ b/js/src/jsinterp.cpp
@@ -104,17 +104,17 @@ js_GenerateShape(JSContext *cx, JSBool g
         shape = JS_ATOMIC_INCREMENT(&rt->shapeGen);
         JS_ASSERT(shape != 0);
         JS_ASSERT_IF(shape & SHAPE_OVERFLOW_BIT,
                      JS_PROPERTY_CACHE(cx).disabled);
     }
     return shape;
 }
 
-void
+JS_REQUIRES_STACK void
 js_FillPropertyCache(JSContext *cx, JSObject *obj, jsuword kshape,
                      uintN scopeIndex, uintN protoIndex,
                      JSObject *pobj, JSScopeProperty *sprop,
                      JSPropCacheEntry **entryp)
 {
     JSPropertyCache *cache;
     jsbytecode *pc;
     JSScope *scope;
@@ -296,17 +296,17 @@ js_FillPropertyCache(JSContext *cx, JSOb
     entry->vcap = PCVCAP_MAKE(scope->shape, scopeIndex, protoIndex);
     entry->vword = vword;
     *entryp = entry;
 
     cache->empty = JS_FALSE;
     PCMETER(cache->fills++);
 }
 
-JSAtom *
+JS_REQUIRES_STACK JSAtom *
 js_FullTestPropertyCache(JSContext *cx, jsbytecode *pc,
                          JSObject **objp, JSObject **pobjp,
                          JSPropCacheEntry **entryp)
 {
     JSOp op;
     const JSCodeSpec *cs;
     ptrdiff_t pcoff;
     JSAtom *atom;
@@ -797,17 +797,17 @@ js_ComputeGlobalThis(JSContext *cx, JSBo
          * has access to the callee's parent object. Note that if lazy, the
          * running script whose principals we want to check is the script
          * associated with fp->down, not with fp.
          *
          * FIXME: 417851 -- this access check should not be required, as it
          * imposes a performance penalty on all js_ComputeGlobalThis calls,
          * and it represents a maintenance hazard.
          */
-        fp = cx->fp;    /* quell GCC overwarning */
+        fp = js_GetTopStackFrame(cx);    /* quell GCC overwarning */
         if (lazy) {
             JS_ASSERT(fp->argv == argv);
             fp->dormantNext = cx->dormantFrameChain;
             cx->dormantFrameChain = fp;
             cx->fp = fp->down;
             fp->down = NULL;
         }
         thisp = JSVAL_TO_OBJECT(argv[-2]);
@@ -1257,17 +1257,17 @@ have_fun:
     frame.script = script;
     frame.callee = funobj;
     frame.fun = fun;
     frame.argc = argc;
     frame.argv = argv;
 
     /* Default return value for a constructor is the new object. */
     frame.rval = (flags & JSINVOKE_CONSTRUCT) ? vp[1] : JSVAL_VOID;
-    frame.down = cx->fp;
+    frame.down = js_GetTopStackFrame(cx);
     frame.annotation = NULL;
     frame.scopeChain = NULL;    /* set below for real, after cx->fp is set */
     frame.regs = NULL;
     frame.imacpc = NULL;
     frame.slots = NULL;
     frame.sharpDepth = 0;
     frame.sharpArray = NULL;
     frame.flags = flags | rootedArgsFlag;
@@ -1461,17 +1461,17 @@ js_Execute(JSContext *cx, JSObject *chai
 
 #ifdef INCLUDE_MOZILLA_DTRACE
     if (JAVASCRIPT_EXECUTE_START_ENABLED())
         jsdtrace_execute_start(script);
 #endif
 
     hook = cx->debugHooks->executeHook;
     hookData = mark = NULL;
-    oldfp = cx->fp;
+    oldfp = js_GetTopStackFrame(cx);
     frame.script = script;
     if (down) {
         /* Propagate arg state for eval and the debugger API. */
         frame.callobj = down->callobj;
         frame.argsobj = down->argsobj;
         frame.varobj = down->varobj;
         frame.callee = down->callee;
         frame.fun = down->fun;
@@ -1828,17 +1828,17 @@ js_InternNonIntElementId(JSContext *cx, 
 
     return js_ValueToStringId(cx, idval, idp);
 }
 
 /*
  * Enter the new with scope using an object at sp[-1] and associate the depth
  * of the with block with sp + stackIndex.
  */
-JS_STATIC_INTERPRET JSBool
+JS_STATIC_INTERPRET JS_REQUIRES_STACK JSBool
 js_EnterWith(JSContext *cx, jsint stackIndex)
 {
     JSStackFrame *fp;
     jsval *sp;
     JSObject *obj, *parent, *withobj;
 
     fp = cx->fp;
     sp = fp->regs->sp;
@@ -1867,45 +1867,45 @@ js_EnterWith(JSContext *cx, jsint stackI
     if (!withobj)
         return JS_FALSE;
 
     fp->scopeChain = withobj;
     js_DisablePropertyCache(cx);
     return JS_TRUE;
 }
 
-JS_STATIC_INTERPRET void
+JS_STATIC_INTERPRET JS_REQUIRES_STACK void
 js_LeaveWith(JSContext *cx)
 {
     JSObject *withobj;
 
     withobj = cx->fp->scopeChain;
     JS_ASSERT(OBJ_GET_CLASS(cx, withobj) == &js_WithClass);
     JS_ASSERT(OBJ_GET_PRIVATE(cx, withobj) == cx->fp);
     JS_ASSERT(OBJ_BLOCK_DEPTH(cx, withobj) >= 0);
     cx->fp->scopeChain = OBJ_GET_PARENT(cx, withobj);
     JS_SetPrivate(cx, withobj, NULL);
     js_EnablePropertyCache(cx);
 }
 
-JSClass *
+JS_REQUIRES_STACK JSClass *
 js_IsActiveWithOrBlock(JSContext *cx, JSObject *obj, int stackDepth)
 {
     JSClass *clasp;
 
     clasp = OBJ_GET_CLASS(cx, obj);
     if ((clasp == &js_WithClass || clasp == &js_BlockClass) &&
         OBJ_GET_PRIVATE(cx, obj) == cx->fp &&
         OBJ_BLOCK_DEPTH(cx, obj) >= stackDepth) {
         return clasp;
     }
     return NULL;
 }
 
-JS_STATIC_INTERPRET jsint
+JS_STATIC_INTERPRET JS_REQUIRES_STACK jsint
 js_CountWithBlocks(JSContext *cx, JSStackFrame *fp)
 {
     jsint n;
     JSObject *obj;
     JSClass *clasp;
 
     n = 0;
     for (obj = fp->scopeChain;
@@ -1916,17 +1916,17 @@ js_CountWithBlocks(JSContext *cx, JSStac
     }
     return n;
 }
 
 /*
  * Unwind block and scope chains to match the given depth. The function sets
  * fp->sp on return to stackDepth.
  */
-JSBool
+JS_REQUIRES_STACK JSBool
 js_UnwindScope(JSContext *cx, JSStackFrame *fp, jsint stackDepth,
                JSBool normalUnwind)
 {
     JSObject *obj;
     JSClass *clasp;
 
     JS_ASSERT(stackDepth >= 0);
     JS_ASSERT(StackBase(fp) + stackDepth <= fp->regs->sp);
@@ -1986,17 +1986,17 @@ js_DoIncDec(JSContext *cx, const JSCodeS
 
     if (!(cs->format & JOF_POST))
         *vp = *vp2;
     return JS_TRUE;
 }
 
 #ifdef DEBUG
 
-JS_STATIC_INTERPRET void
+JS_STATIC_INTERPRET JS_REQUIRES_STACK void
 js_TraceOpcode(JSContext *cx, jsint len)
 {
     FILE *tracefp;
     JSStackFrame *fp;
     JSFrameRegs *regs;
     JSOp prevop;
     intN ndefs, n, nuses;
     jsval *siter;
@@ -2438,17 +2438,17 @@ JS_STATIC_ASSERT(JSOP_NULL_LENGTH == JSO
 JS_STATIC_ASSERT(JSOP_IFNE_LENGTH == JSOP_IFEQ_LENGTH);
 JS_STATIC_ASSERT(JSOP_IFNE == JSOP_IFEQ + 1);
 
 /* For the fastest case inder JSOP_INCNAME, etc. */
 JS_STATIC_ASSERT(JSOP_INCNAME_LENGTH == JSOP_DECNAME_LENGTH);
 JS_STATIC_ASSERT(JSOP_INCNAME_LENGTH == JSOP_NAMEINC_LENGTH);
 JS_STATIC_ASSERT(JSOP_INCNAME_LENGTH == JSOP_NAMEDEC_LENGTH);
 
-JSBool
+JS_REQUIRES_STACK JSBool
 js_Interpret(JSContext *cx)
 {
     JSRuntime *rt;
     JSStackFrame *fp;
     JSScript *script;
     uintN inlineCallCount;
     JSAtom **atoms;
     JSVersion currentVersion, originalVersion;
--- a/js/src/jsinterp.h
+++ b/js/src/jsinterp.h
@@ -290,17 +290,17 @@ typedef struct JSPropertyCache {
 #define PCVAL_TO_SPROP(v)       ((JSScopeProperty *) PCVAL_CLRTAG(v))
 #define SPROP_TO_PCVAL(sprop)   PCVAL_SETTAG(sprop, PCVAL_SPROP)
 
 /*
  * Fill property cache entry for key cx->fp->pc, optimized value word computed
  * from obj and sprop, and entry capability forged from 24-bit OBJ_SHAPE(obj),
  * 4-bit scopeIndex, and 4-bit protoIndex.
  */
-extern void
+extern JS_REQUIRES_STACK void
 js_FillPropertyCache(JSContext *cx, JSObject *obj, jsuword kshape,
                      uintN scopeIndex, uintN protoIndex,
                      JSObject *pobj, JSScopeProperty *sprop,
                      JSPropCacheEntry **entryp);
 
 /*
  * Property cache lookup macros. PROPERTY_CACHE_TEST is designed to inline the
  * fast path in js_Interpret, so it makes "just-so" restrictions on parameters,
@@ -345,17 +345,17 @@ js_FillPropertyCache(JSContext *cx, JSOb
             }                                                                 \
             JS_UNLOCK_OBJ(cx, pobj);                                          \
         }                                                                     \
         atom = js_FullTestPropertyCache(cx, pc, &obj, &pobj, &entry);         \
         if (atom)                                                             \
             PCMETER(cache_->misses++);                                        \
     } while (0)
 
-extern JSAtom *
+extern JS_REQUIRES_STACK JSAtom *
 js_FullTestPropertyCache(JSContext *cx, jsbytecode *pc,
                          JSObject **objp, JSObject **pobjp,
                          JSPropCacheEntry **entryp);
 
 extern void
 js_FlushPropertyCache(JSContext *cx);
 
 extern void
@@ -468,17 +468,17 @@ js_InternalGetOrSet(JSContext *cx, JSObj
 
 extern JSBool
 js_Execute(JSContext *cx, JSObject *chain, JSScript *script,
            JSStackFrame *down, uintN flags, jsval *result);
 
 extern JSBool
 js_InvokeConstructor(JSContext *cx, uintN argc, JSBool clampReturn, jsval *vp);
 
-extern JSBool
+extern JS_REQUIRES_STACK JSBool
 js_Interpret(JSContext *cx);
 
 #define JSPROP_INITIALIZER 0x100   /* NB: Not a valid property attribute. */
 
 extern JSBool
 js_CheckRedeclaration(JSContext *cx, JSObject *obj, jsid id, uintN attrs,
                       JSObject **objp, JSProperty **propp);
 
@@ -528,33 +528,33 @@ js_FreeRawStack(JSContext *cx, void *mar
  *   var h = w1.g()
  *   alert(h() == w1)
  *
  * The alert should display "true".
  */
 extern JSObject *
 js_ComputeGlobalThis(JSContext *cx, JSBool lazy, jsval *argv);
 
-extern JSBool
+extern JS_REQUIRES_STACK JSBool
 js_EnterWith(JSContext *cx, jsint stackIndex);
 
-extern void
+extern JS_REQUIRES_STACK void
 js_LeaveWith(JSContext *cx);
 
-extern JSClass *
+extern JS_REQUIRES_STACK JSClass *
 js_IsActiveWithOrBlock(JSContext *cx, JSObject *obj, int stackDepth);
 
 extern jsint
 js_CountWithBlocks(JSContext *cx, JSStackFrame *fp);
 
 /*
  * Unwind block and scope chains to match the given depth. The function sets
  * fp->sp on return to stackDepth.
  */
-extern JSBool
+extern JS_REQUIRES_STACK JSBool
 js_UnwindScope(JSContext *cx, JSStackFrame *fp, jsint stackDepth,
                JSBool normalUnwind);
 
 extern JSBool
 js_InternNonIntElementId(JSContext *cx, JSObject *obj, jsval idval, jsid *idp);
 
 extern JSBool
 js_OnUnknownMethod(JSContext *cx, jsval *vp);
@@ -567,17 +567,17 @@ js_OnUnknownMethod(JSContext *cx, jsval 
  */
 extern JSBool
 js_DoIncDec(JSContext *cx, const JSCodeSpec *cs, jsval *vp, jsval *vp2);
 
 /*
  * Opcode tracing helper. When len is not 0, cx->fp->regs->pc[-len] gives the
  * previous opcode.
  */
-extern void
+extern JS_REQUIRES_STACK void
 js_TraceOpcode(JSContext *cx, jsint len);
 
 /*
  * JS_OPMETER helper functions.
  */
 extern void
 js_MeterOpcodePair(JSOp op1, JSOp op2);
 
--- a/js/src/jsiter.cpp
+++ b/js/src/jsiter.cpp
@@ -169,17 +169,17 @@ Iterator(JSContext *cx, JSObject *iterob
 {
     JSBool keyonly;
     uintN flags;
     JSObject *obj;
 
     keyonly = js_ValueToBoolean(argv[1]);
     flags = keyonly ? 0 : JSITER_FOREACH;
 
-    if (cx->fp->flags & JSFRAME_CONSTRUCTING) {
+    if (JS_IsConstructing(cx)) {
         /* XXX work around old valueOf call hidden beneath js_ValueToObject */
         if (!JSVAL_IS_PRIMITIVE(argv[0])) {
             obj = JSVAL_TO_OBJECT(argv[0]);
         } else {
             obj = js_ValueToNonNullObject(cx, argv[0]);
             if (!obj)
                 return JS_FALSE;
             argv[0] = OBJECT_TO_JSVAL(obj);
@@ -858,17 +858,17 @@ SendToGenerator(JSContext *cx, JSGenerat
     /* Extend the current stack pool with gen->arena. */
     arena = cx->stackPool.current;
     JS_ASSERT(!arena->next);
     JS_ASSERT(!gen->arena.next);
     JS_ASSERT(cx->stackPool.current != &gen->arena);
     cx->stackPool.current = arena->next = &gen->arena;
 
     /* Push gen->frame around the interpreter activation. */
-    fp = cx->fp;
+    fp = js_GetTopStackFrame(cx);
     cx->fp = &gen->frame;
     gen->frame.down = fp;
     ok = js_Interpret(cx);
     cx->fp = fp;
     gen->frame.down = NULL;
 
     /* Retract the stack pool and sanitize gen->arena. */
     JS_ASSERT(!gen->arena.next);
--- a/js/src/jsnum.cpp
+++ b/js/src/jsnum.cpp
@@ -256,17 +256,17 @@ Number(JSContext *cx, JSObject *obj, uin
         } else {
             if (!js_NewNumberInRootedValue(cx, d, &argv[0]))
                 return JS_FALSE;
             v = argv[0];
         }
     } else {
         v = JSVAL_ZERO;
     }
-    if (!(cx->fp->flags & JSFRAME_CONSTRUCTING)) {
+    if (!JS_IsConstructing(cx)) {
         *rval = v;
         return JS_TRUE;
     }
     STOBJ_SET_SLOT(obj, JSSLOT_PRIVATE, v);
     return JS_TRUE;
 }
 
 #if JS_HAS_TOSOURCE
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -1190,18 +1190,18 @@ obj_eval(JSContext *cx, JSObject *obj, u
     JSScript *script;
     JSBool ok;
 #if JS_HAS_EVAL_THIS_SCOPE
     JSObject *callerScopeChain = NULL, *callerVarObj = NULL;
     JSObject *setCallerScopeChain = NULL;
     JSBool setCallerVarObj = JS_FALSE;
 #endif
 
-    fp = cx->fp;
-    caller = JS_GetScriptedCaller(cx, fp);
+    fp = js_GetTopStackFrame(cx);
+    caller = js_GetScriptedCaller(cx, fp);
     indirectCall = (caller && caller->regs && *caller->regs->pc != JSOP_EVAL);
 
     /*
      * Ban all indirect uses of eval (global.foo = eval; global.foo(...)) and
      * calls that attempt to use a non-global object as the "with" object in
      * the former indirect case.
      */
     scopeobj = OBJ_GET_PARENT(cx, obj);
@@ -1380,17 +1380,17 @@ obj_watch_handler(JSContext *cx, JSObjec
     jsval argv[3];
     JSBool ok;
 
     callable = (JSObject *) closure;
 
     callbacks = JS_GetSecurityCallbacks(cx);
     if (callbacks && callbacks->findObjectPrincipals) {
         /* Skip over any obj_watch_* frames between us and the real subject. */
-        caller = JS_GetScriptedCaller(cx, cx->fp);
+        caller = js_GetScriptedCaller(cx, NULL);
         if (caller) {
             /*
              * Only call the watch handler if the watcher is allowed to watch
              * the currently executing script.
              */
             watcher = callbacks->findObjectPrincipals(cx, callable);
             subject = JS_StackFramePrincipals(cx, caller);
 
@@ -1853,17 +1853,17 @@ js_Object(JSContext *cx, JSObject *obj, 
         obj = NULL;
     } else {
         /* If argv[0] is null or undefined, obj comes back null. */
         if (!js_ValueToObject(cx, argv[0], &obj))
             return JS_FALSE;
     }
     if (!obj) {
         JS_ASSERT(!argc || JSVAL_IS_NULL(argv[0]) || JSVAL_IS_VOID(argv[0]));
-        if (cx->fp->flags & JSFRAME_CONSTRUCTING)
+        if (JS_IsConstructing(cx))
             return JS_TRUE;
         obj = js_NewObject(cx, &js_ObjectClass, NULL, NULL, 0);
         if (!obj)
             return JS_FALSE;
     }
     *rval = OBJECT_TO_JSVAL(obj);
     return JS_TRUE;
 }
@@ -1991,17 +1991,17 @@ JSClass js_WithClass = {
     "With",
     JSCLASS_HAS_PRIVATE | JSCLASS_HAS_RESERVED_SLOTS(1) | JSCLASS_IS_ANONYMOUS,
     JS_PropertyStub,  JS_PropertyStub,  JS_PropertyStub,  JS_PropertyStub,
     JS_EnumerateStub, JS_ResolveStub,   JS_ConvertStub,   JS_FinalizeStub,
     with_getObjectOps,
     0,0,0,0,0,0,0
 };
 
-JSObject *
+JS_REQUIRES_STACK JSObject *
 js_NewWithObject(JSContext *cx, JSObject *proto, JSObject *parent, jsint depth)
 {
     JSObject *obj;
 
     obj = js_NewObject(cx, &js_WithClass, proto, parent, 0);
     if (!obj)
         return NULL;
     STOBJ_SET_SLOT(obj, JSSLOT_PRIVATE, PRIVATE_TO_JSVAL(cx->fp));
@@ -2045,17 +2045,17 @@ js_CloneBlockObject(JSContext *cx, JSObj
         return NULL;
     STOBJ_SET_SLOT(clone, JSSLOT_PRIVATE, PRIVATE_TO_JSVAL(fp));
     STOBJ_SET_SLOT(clone, JSSLOT_BLOCK_DEPTH,
                    OBJ_GET_SLOT(cx, proto, JSSLOT_BLOCK_DEPTH));
     JS_ASSERT(OBJ_IS_CLONED_BLOCK(clone));
     return clone;
 }
 
-JSBool
+JS_REQUIRES_STACK JSBool
 js_PutBlockObject(JSContext *cx, JSBool normalUnwind)
 {
     JSStackFrame *fp;
     JSObject *obj;
     uintN depth, count;
 
     /* Blocks have one fixed slot available for the first local.*/
     JS_STATIC_ASSERT(JS_INITIAL_NSLOTS == JSSLOT_BLOCK_DEPTH + 2);
@@ -2798,23 +2798,33 @@ js_SetClassObject(JSContext *cx, JSObjec
         return JS_TRUE;
 
     return JS_SetReservedSlot(cx, obj, key, OBJECT_TO_JSVAL(cobj));
 }
 
 JSBool
 js_FindClassObject(JSContext *cx, JSObject *start, jsid id, jsval *vp)
 {
+    JSStackFrame *fp;
     JSObject *obj, *cobj, *pobj;
     JSProtoKey key;
     JSProperty *prop;
     jsval v;
     JSScopeProperty *sprop;
 
-    if (start || (cx->fp && (start = cx->fp->scopeChain) != NULL)) {
+    /*
+     * Find the global object. Use cx->fp directly to avoid falling off
+     * trace; all JIT-elided stack frames have the same global object as
+     * cx->fp.
+     */
+    VOUCH_DOES_NOT_REQUIRE_STACK();
+    if (!start && (fp = cx->fp) != NULL)
+        start = fp->scopeChain;
+
+    if (start) {
         /* Find the topmost object in the scope chain. */
         do {
             obj = start;
             start = OBJ_GET_PARENT(cx, obj);
         } while (start);
     } else {
         obj = cx->globalObject;
         if (!obj) {
@@ -3276,17 +3286,17 @@ bad:
     return JS_FALSE;
 }
 
 /*
  * Given pc pointing after a property accessing bytecode, return true if the
  * access is "object-detecting" in the sense used by web scripts, e.g., when
  * checking whether document.all is defined.
  */
-static JSBool
+static JS_REQUIRES_STACK JSBool
 Detecting(JSContext *cx, jsbytecode *pc)
 {
     JSScript *script;
     jsbytecode *endpc;
     JSOp op;
     JSAtom *atom;
 
     if (!cx->fp)
@@ -3409,26 +3419,28 @@ js_LookupPropertyWithFlags(JSContext *cx
                     goto out;
                 }
                 generation = cx->resolvingTable->generation;
 
                 /* Null *propp here so we can test it at cleanup: safely. */
                 *propp = NULL;
 
                 if (clasp->flags & JSCLASS_NEW_RESOLVE) {
+                    JSStackFrame *fp = js_GetTopStackFrame(cx);
+
                     newresolve = (JSNewResolveOp)resolve;
-                    if (flags == JSRESOLVE_INFER && cx->fp && cx->fp->regs) {
+                    if (flags == JSRESOLVE_INFER && fp && fp->regs) {
                         flags = 0;
-                        pc = cx->fp->regs->pc;
+                        pc = fp->regs->pc;
                         cs = &js_CodeSpec[*pc];
                         format = cs->format;
                         if (JOF_MODE(format) != JOF_NAME)
                             flags |= JSRESOLVE_QUALIFIED;
                         if ((format & (JOF_SET | JOF_FOR)) ||
-                            (cx->fp->flags & JSFRAME_ASSIGNING)) {
+                            (fp->flags & JSFRAME_ASSIGNING)) {
                             flags |= JSRESOLVE_ASSIGNING;
                         } else {
                             pc += cs->length;
                             if (Detecting(cx, pc))
                                 flags |= JSRESOLVE_DETECTING;
                         }
                         if (format & JOF_DECLARING)
                             flags |= JSRESOLVE_DECLARING;
@@ -3545,17 +3557,18 @@ js_FindPropertyHelper(JSContext *cx, jsi
                       JSPropCacheEntry **entryp)
 {
     JSObject *obj, *pobj, *lastobj;
     uint32 shape;
     int scopeIndex, protoIndex;
     JSProperty *prop;
     JSScopeProperty *sprop;
 
-    obj = cx->fp->scopeChain;
+    JS_ASSERT_IF(entryp, !JS_EXECUTING_TRACE(cx));
+    obj = js_GetTopStackFrame(cx)->scopeChain;
     shape = OBJ_SHAPE(obj);
     for (scopeIndex = 0; ; scopeIndex++) {
         if (obj->map->ops->lookupProperty == js_LookupProperty) {
             protoIndex =
                 js_LookupPropertyWithFlags(cx, obj, id, cx->resolveFlags,
                                            &pobj, &prop);
             if (protoIndex < 0)
                 return -1;
@@ -3598,17 +3611,17 @@ js_FindPropertyHelper(JSContext *cx, jsi
 
 JS_FRIEND_API(JSBool)
 js_FindProperty(JSContext *cx, jsid id, JSObject **objp, JSObject **pobjp,
                 JSProperty **propp)
 {
     return js_FindPropertyHelper(cx, id, objp, pobjp, propp, NULL) >= 0;
 }
 
-JSObject *
+JS_REQUIRES_STACK JSObject *
 js_FindIdentifierBase(JSContext *cx, jsid id, JSPropCacheEntry *entry)
 {
     JSObject *obj, *pobj;
     JSProperty *prop;
 
     /*
      * Look for id's property along the "with" statement chain and the
      * statically-linked scope chain.
@@ -3741,18 +3754,20 @@ js_NativeSet(JSContext *cx, JSObject *ob
 JSBool
 js_GetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, jsval *vp,
                      JSPropCacheEntry **entryp)
 {
     uint32 shape;
     int protoIndex;
     JSObject *obj2;
     JSProperty *prop;
+    JSStackFrame *fp;
     JSScopeProperty *sprop;
 
+    JS_ASSERT_IF(entryp, !JS_ON_TRACE(cx));
     /* Convert string indices to integers if appropriate. */
     CHECK_FOR_STRING_INDEX(id);
     JS_COUNT_OPERATION(cx, JSOW_GET_PROPERTY);
 
     shape = OBJ_SHAPE(obj);
     protoIndex = js_LookupPropertyWithFlags(cx, obj, id, cx->resolveFlags,
                                             &obj2, &prop);
     if (protoIndex < 0)
@@ -3769,21 +3784,21 @@ js_GetPropertyHelper(JSContext *cx, JSOb
             PCMETER(JS_PROPERTY_CACHE(cx).nofills++);
             *entryp = NULL;
         }
 
         /*
          * Give a strict warning if foo.bar is evaluated by a script for an
          * object foo with no property named 'bar'.
          */
-        if (JSVAL_IS_VOID(*vp) && cx->fp && cx->fp->regs) {
+        if (JSVAL_IS_VOID(*vp) && (fp = js_GetTopStackFrame(cx)) && fp->regs) {
             JSOp op;
             uintN flags;
 
-            pc = cx->fp->regs->pc;
+            pc = fp->regs->pc;
             op = (JSOp) *pc;
             if (op == JSOP_GETXPROP) {
                 flags = JSREPORT_ERROR;
             } else {
                 if (!JS_HAS_STRICT_OPTION(cx) ||
                     (op != JSOP_GETPROP && op != JSOP_GETELEM)) {
                     return JS_TRUE;
                 }
@@ -3791,17 +3806,17 @@ js_GetPropertyHelper(JSContext *cx, JSOb
                 /*
                  * XXX do not warn about missing __iterator__ as the function
                  * may be called from JS_GetMethodById. See bug 355145.
                  */
                 if (id == ATOM_TO_JSID(cx->runtime->atomState.iteratorAtom))
                     return JS_TRUE;
 
                 /* Kludge to allow (typeof foo == "undefined") tests. */
-                JS_ASSERT(cx->fp->script);
+                JS_ASSERT(fp->script);
                 pc += js_CodeSpec[op].length;
                 if (Detecting(cx, pc))
                     return JS_TRUE;
 
                 flags = JSREPORT_WARNING | JSREPORT_STRICT;
             }
 
             /* Ok, bad undefined property reference: whine about it. */
@@ -3818,18 +3833,20 @@ js_GetPropertyHelper(JSContext *cx, JSOb
         OBJ_DROP_PROPERTY(cx, obj2, prop);
         return OBJ_GET_PROPERTY(cx, obj2, id, vp);
     }
 
     sprop = (JSScopeProperty *) prop;
     if (!js_NativeGet(cx, obj, obj2, sprop, vp))
         return JS_FALSE;
 
-    if (entryp)
+    if (entryp) {
+        JS_ASSERT_NOT_EXECUTING_TRACE(cx);
         js_FillPropertyCache(cx, obj, shape, 0, protoIndex, obj2, sprop, entryp);
+    }
     JS_UNLOCK_OBJ(cx, obj2);
     return JS_TRUE;
 }
 
 JSBool
 js_GetProperty(JSContext *cx, JSObject *obj, jsid id, jsval *vp)
 {
     return js_GetPropertyHelper(cx, obj, id, vp, NULL);
@@ -4019,16 +4036,17 @@ js_SetPropertyHelper(JSContext *cx, JSOb
                             JS_UNLOCK_SCOPE(cx, scope);
                             return JS_FALSE);
     }
 
     if (!js_NativeSet(cx, obj, sprop, vp))
         return JS_FALSE;
 
     if (entryp) {
+        JS_ASSERT_NOT_EXECUTING_TRACE(cx);
         if (!(attrs & JSPROP_SHARED))
             js_FillPropertyCache(cx, obj, shape, 0, 0, obj, sprop, entryp);
         else
             PCMETER(JS_PROPERTY_CACHE(cx).nofills++);
     }
     JS_UNLOCK_SCOPE(cx, scope);
     return JS_TRUE;
 
@@ -4633,31 +4651,31 @@ js_Call(JSContext *cx, JSObject *obj, ui
         if (!OBJ_GET_PROPERTY(cx, callee,
                               ATOM_TO_JSID(cx->runtime->atomState.__call__Atom),
                               &fval)) {
             return JS_FALSE;
         }
         if (VALUE_IS_FUNCTION(cx, fval)) {
             if (!GetCurrentExecutionContext(cx, obj, &nargv[2]))
                 return JS_FALSE;
-            args = js_GetArgsObject(cx, cx->fp);
+            args = js_GetArgsObject(cx, js_GetTopStackFrame(cx));
             if (!args)
                 return JS_FALSE;
             nargv[0] = OBJECT_TO_JSVAL(obj);
             nargv[1] = OBJECT_TO_JSVAL(args);
             return js_InternalCall(cx, callee, fval, 3, nargv, rval);
         }
         if (JSVAL_IS_OBJECT(fval) && JSVAL_TO_OBJECT(fval) != callee) {
             argv[-2] = fval;
             ok = js_Call(cx, obj, argc, argv, rval);
             argv[-2] = OBJECT_TO_JSVAL(callee);
             return ok;
         }
 #endif
-        js_ReportIsNotFunction(cx, &argv[-2], cx->fp->flags & JSFRAME_ITERATOR);
+        js_ReportIsNotFunction(cx, &argv[-2], js_GetTopStackFrame(cx)->flags & JSFRAME_ITERATOR);
         return JS_FALSE;
     }
     return clasp->call(cx, obj, argc, argv, rval);
 }
 
 JSBool
 js_Construct(JSContext *cx, JSObject *obj, uintN argc, jsval *argv,
              jsval *rval)
@@ -4676,17 +4694,17 @@ js_Construct(JSContext *cx, JSObject *ob
                               ATOM_TO_JSID(cx->runtime->atomState
                                            .__construct__Atom),
                               &cval)) {
             return JS_FALSE;
         }
         if (VALUE_IS_FUNCTION(cx, cval)) {
             if (!GetCurrentExecutionContext(cx, obj, &nargv[1]))
                 return JS_FALSE;
-            args = js_GetArgsObject(cx, cx->fp);
+            args = js_GetArgsObject(cx, js_GetTopStackFrame(cx));
             if (!args)
                 return JS_FALSE;
             nargv[0] = OBJECT_TO_JSVAL(args);
             return js_InternalCall(cx, callee, cval, 2, nargv, rval);
         }
         if (JSVAL_IS_OBJECT(cval) && JSVAL_TO_OBJECT(cval) != callee) {
             argv[-2] = cval;
             ok = js_Call(cx, obj, argc, argv, rval);
--- a/js/src/jsobj.h
+++ b/js/src/jsobj.h
@@ -342,33 +342,33 @@ extern JSClass  js_BlockClass;
 /*
  * To make sure this slot is well-defined, always call js_NewWithObject to
  * create a With object, don't call js_NewObject directly.  When creating a
  * With object that does not correspond to a stack slot, pass -1 for depth.
  *
  * When popping the stack across this object's "with" statement, client code
  * must call JS_SetPrivate(cx, withobj, NULL).
  */
-extern JSObject *
+extern JS_REQUIRES_STACK JSObject *
 js_NewWithObject(JSContext *cx, JSObject *proto, JSObject *parent, jsint depth);
 
 /*
  * Create a new block scope object not linked to any proto or parent object.
  * Blocks are created by the compiler to reify let blocks and comprehensions.
  * Only when dynamic scope is captured do they need to be cloned and spliced
  * into an active scope chain.
  */
 extern JSObject *
 js_NewBlockObject(JSContext *cx);
 
 extern JSObject *
 js_CloneBlockObject(JSContext *cx, JSObject *proto, JSObject *parent,
                     JSStackFrame *fp);
 
-extern JSBool
+extern JS_REQUIRES_STACK JSBool
 js_PutBlockObject(JSContext *cx, JSBool normalUnwind);
 
 struct JSSharpObjectMap {
     jsrefcount  depth;
     jsatomid    sharpgen;
     JSHashTable *table;
 };
 
@@ -579,17 +579,17 @@ js_FindPropertyHelper(JSContext *cx, jsi
 /*
  * Return the index along the scope chain in which id was found, or the last
  * index if not found, or -1 on error.
  */
 extern JS_FRIEND_API(JSBool)
 js_FindProperty(JSContext *cx, jsid id, JSObject **objp, JSObject **pobjp,
                 JSProperty **propp);
 
-extern JSObject *
+extern JS_REQUIRES_STACK JSObject *
 js_FindIdentifierBase(JSContext *cx, jsid id, JSPropCacheEntry *entry);
 
 extern JSObject *
 js_FindVariableScope(JSContext *cx, JSFunction **funp);
 
 /*
  * NB: js_NativeGet and js_NativeSet are called with the scope containing sprop
  * (pobj's scope for Get, obj's for Set) locked, and on successful return, that
--- a/js/src/jsopcode.cpp
+++ b/js/src/jsopcode.cpp
@@ -1908,18 +1908,17 @@ Decompile(SprintStack *ss, jsbytecode *p
             JSStackFrame *fp;
             uint32 format, mode, type;
 
             /*
              * Rewrite non-get ops to their "get" format if the error is in
              * the bytecode at pc, so we don't decompile more than the error
              * expression.
              */
-            for (fp = cx->fp; fp && !fp->script; fp = fp->down)
-                continue;
+            fp = js_GetScriptedCaller(cx, NULL);
             format = cs->format;
             if (((fp && fp->regs && pc == fp->regs->pc) ||
                  (pc == startpc && cs->nuses != 0)) &&
                 format & (JOF_SET|JOF_DEL|JOF_INCDEC|JOF_FOR|JOF_VARPROP)) {
                 mode = JOF_MODE(format);
                 if (mode == JOF_NAME) {
                     /*
                      * JOF_NAME does not imply JOF_ATOM, so we must check for
@@ -2760,17 +2759,17 @@ Decompile(SprintStack *ss, jsbytecode *p
                      * We must be in an eval called from jp->fun, where
                      * jp->script is the eval-compiled script.
                      *
                      * However, it's possible that a js_Invoke already
                      * pushed a frame trying to call js_Construct on an
                      * object that's not a constructor, causing us to be
                      * called with an intervening frame on the stack.
                      */
-                    JSStackFrame *fp = cx->fp;
+                    JSStackFrame *fp = js_GetTopStackFrame(cx);
                     if (fp) {
                         while (!(fp->flags & JSFRAME_EVAL))
                             fp = fp->down;
                         JS_ASSERT(fp->script == jp->script);
                         JS_ASSERT(fp->down->fun == jp->fun);
                         JS_ASSERT(FUN_INTERPRETED(jp->fun));
                         JS_ASSERT(jp->script != jp->fun->u.i.script);
                         JS_ASSERT(jp->script->upvarsOffset != 0);
--- a/js/src/jsregexp.cpp
+++ b/js/src/jsregexp.cpp
@@ -4786,17 +4786,17 @@ static JSFunctionSpec regexp_methods[] =
     JS_FN("exec",           regexp_exec,        1,0),
     JS_TN("test",           regexp_test,        1,0, regexp_test_trcinfo),
     JS_FS_END
 };
 
 static JSBool
 RegExp(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
 {
-    if (!(cx->fp->flags & JSFRAME_CONSTRUCTING)) {
+    if (!JS_IsConstructing(cx)) {
         /*
          * If first arg is regexp and no flags are given, just return the arg.
          * (regexp_compile_sub detects the regexp + flags case and throws a
          * TypeError.)  See 10.15.3.1.
          */
         if ((argc < 2 || JSVAL_IS_VOID(argv[1])) &&
             !JSVAL_IS_PRIMITIVE(argv[0]) &&
             OBJ_GET_CLASS(cx, JSVAL_TO_OBJECT(argv[0])) == &js_RegExpClass) {
--- a/js/src/jsscript.cpp
+++ b/js/src/jsscript.cpp
@@ -225,17 +225,17 @@ script_compile_sub(JSContext *cx, JSObje
     scopeobj = NULL;
     if (argc >= 2) {
         if (!js_ValueToObject(cx, argv[1], &scopeobj))
             return JS_FALSE;
         argv[1] = OBJECT_TO_JSVAL(scopeobj);
     }
 
     /* Compile using the caller's scope chain, which js_Invoke passes to fp. */
-    caller = JS_GetScriptedCaller(cx, cx->fp);
+    caller = js_GetScriptedCaller(cx, NULL);
     JS_ASSERT(!caller || cx->fp->scopeChain == caller->scopeChain);
 
     if (caller) {
         if (!scopeobj) {
             scopeobj = js_GetScopeChain(cx, caller);
             if (!scopeobj)
                 return JS_FALSE;
         }
@@ -306,17 +306,17 @@ script_compile(JSContext *cx, uintN argc
     return script_compile_sub(cx, JS_THIS_OBJECT(cx, vp), argc, vp + 2, vp);
 }
 
 static JSBool
 script_exec_sub(JSContext *cx, JSObject *obj, uintN argc, jsval *argv,
                 jsval *rval)
 {
     JSObject *scopeobj, *parent;
-    JSStackFrame *fp, *caller;
+    JSStackFrame *caller;
     JSPrincipals *principals;
     JSScript *script;
     JSBool ok;
 
     if (!JS_InstanceOf(cx, obj, &js_ScriptClass, argv))
         return JS_FALSE;
 
     scopeobj = NULL;
@@ -333,18 +333,17 @@ script_exec_sub(JSContext *cx, JSObject 
      * (chain) argument to set the exec frame's varobj, thisp, and scopeChain.
      *
      * Unlike eval, which the compiler detects, Script.prototype.exec may be
      * called from a lightweight function, or even from native code (in which
      * case fp->varobj and fp->scopeChain are null).  If exec is called from
      * a lightweight function, we will need to get a Call object representing
      * its frame, to act as the var object and scope chain head.
      */
-    fp = cx->fp;
-    caller = JS_GetScriptedCaller(cx, fp);
+    caller = js_GetScriptedCaller(cx, NULL);
     if (caller && !caller->varobj) {
         /* Called from a lightweight function. */
         JS_ASSERT(caller->fun && !JSFUN_HEAVYWEIGHT_TEST(caller->fun->flags));
 
         /* Scope chain links from Call object to callee's parent. */
         parent = OBJ_GET_PARENT(cx, caller->callee);
         if (!js_GetCallObject(cx, caller, parent))
             return JS_FALSE;
@@ -898,17 +897,17 @@ JS_FRIEND_DATA(JSClass) js_ScriptClass =
 };
 
 #if JS_HAS_SCRIPT_OBJECT
 
 static JSBool
 Script(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
 {
     /* If not constructing, replace obj with a new Script object. */
-    if (!(cx->fp->flags & JSFRAME_CONSTRUCTING)) {
+    if (!JS_IsConstructing(cx)) {
         obj = js_NewObject(cx, &js_ScriptClass, NULL, NULL, 0);
         if (!obj)
             return JS_FALSE;
 
         /*
          * script_compile_sub does not use rval to root its temporaries so we
          * can use it to root obj.
          */
@@ -1596,17 +1595,19 @@ js_DestroyScript(JSContext *cx, JSScript
      * wrapping it to protect the script's mapped atoms against GC. We use
      * script->owner to enforce this requirement via assertions.
      */
 #ifdef CHECK_SCRIPT_OWNER
     JS_ASSERT_IF(cx->runtime->gcRunning, !script->owner);
 #endif
 
     if (!cx->runtime->gcRunning) {
-        if (!(cx->fp && (cx->fp->flags & JSFRAME_EVAL))) {
+        JSStackFrame *fp = js_GetTopStackFrame(cx);
+
+        if (!(fp && (fp->flags & JSFRAME_EVAL))) {
 #ifdef CHECK_SCRIPT_OWNER
             JS_ASSERT(script->owner == cx->thread);
 #endif
             js_FlushPropertyCacheForScript(cx, script);
         }
     }
 
     JS_free(cx, script);
--- a/js/src/jsstaticcheck.h
+++ b/js/src/jsstaticcheck.h
@@ -44,14 +44,25 @@
 /*
  * Trigger a control flow check to make sure that code flows through label
  */
 inline __attribute__ ((unused)) void MUST_FLOW_THROUGH(const char *label) {
 }
 
 /* avoid unused goto-label warnings */
 #define MUST_FLOW_LABEL(label) goto label; label:
+
+inline JS_FORCES_STACK void VOUCH_DOES_NOT_REQUIRE_STACK() {}
+
+inline JS_FORCES_STACK void
+JS_ASSERT_NOT_EXECUTING_TRACE(JSContext *cx)
+{
+    JS_ASSERT(!JS_EXECUTING_TRACE(cx));
+}
+
 #else
-#define MUST_FLOW_THROUGH(label) ((void)0)
+#define MUST_FLOW_THROUGH(label)            ((void) 0)
 #define MUST_FLOW_LABEL(label)
+#define VOUCH_DOES_NOT_REQUIRE_STACK()      ((void) 0)
+#define JS_ASSERT_NOT_EXECUTING_TRACE(cx)   JS_ASSERT(!JS_EXECUTING_TRACE(cx))
 #endif
 
 #endif /* jsstaticcheck_h___ */
--- a/js/src/jsstr.cpp
+++ b/js/src/jsstr.cpp
@@ -1397,17 +1397,17 @@ match_glob(JSContext *cx, jsint count, G
     JSObject *arrayobj;
     JSSubString *matchsub;
     JSString *matchstr;
     jsval v;
 
     mdata = (MatchData *)data;
     arrayobj = JSVAL_TO_OBJECT(*mdata->arrayval);
     if (!arrayobj) {
-        arrayobj = js_ConstructObject(cx, &js_ArrayClass, NULL, NULL, 0, NULL);
+        arrayobj = js_NewArrayObject(cx, 0, NULL);
         if (!arrayobj)
             return JS_FALSE;
         *mdata->arrayval = OBJECT_TO_JSVAL(arrayobj);
     }
     matchsub = &cx->regExpStatics.lastMatch;
     matchstr = js_NewStringCopyN(cx, matchsub->chars, matchsub->length);
     if (!matchstr)
         return JS_FALSE;
@@ -1437,17 +1437,17 @@ js_StringMatchHelper(JSContext *cx, uint
     return ok;
 }
 
 static JSBool
 str_match(JSContext *cx, uintN argc, jsval *vp)
 {
     JSStackFrame *fp;
 
-    for (fp = cx->fp; fp && !fp->regs; fp = fp->down)
+    for (fp = js_GetTopStackFrame(cx); fp && !fp->regs; fp = fp->down)
         JS_ASSERT(!fp->script);
     return js_StringMatchHelper(cx, argc, vp, fp ? fp->regs->pc : NULL);
 }
 
 #ifdef JS_TRACER
 static JSObject* FASTCALL
 String_p_match(JSContext* cx, JSString* str, jsbytecode *pc, JSObject* regexp)
 {
@@ -2020,17 +2020,17 @@ str_split(JSContext *cx, uintN argc, jsv
     JSRegExp *re;
     JSSubString *sep, tmp;
     jsdouble d;
     jsint i, j;
     uint32 len, limit;
 
     NORMALIZE_THIS(cx, vp, str);
 
-    arrayobj = js_ConstructObject(cx, &js_ArrayClass, NULL, NULL, 0, NULL);
+    arrayobj = js_NewArrayObject(cx, 0, NULL);
     if (!arrayobj)
         return JS_FALSE;
     *vp = OBJECT_TO_JSVAL(arrayobj);
 
     if (argc == 0) {
         v = STRING_TO_JSVAL(str);
         ok = OBJ_SET_PROPERTY(cx, arrayobj, INT_TO_JSID(0), &v);
     } else {
@@ -2578,17 +2578,17 @@ String(JSContext *cx, JSObject *obj, uin
     if (argc > 0) {
         str = js_ValueToString(cx, argv[0]);
         if (!str)
             return JS_FALSE;
         argv[0] = STRING_TO_JSVAL(str);
     } else {
         str = cx->runtime->emptyString;
     }
-    if (!(cx->fp->flags & JSFRAME_CONSTRUCTING)) {
+    if (!JS_IsConstructing(cx)) {
         *rval = STRING_TO_JSVAL(str);
         return JS_TRUE;
     }
     STOBJ_SET_SLOT(obj, JSSLOT_PRIVATE, STRING_TO_JSVAL(str));
     return JS_TRUE;
 }
 
 static JSBool
--- a/js/src/jstracer.cpp
+++ b/js/src/jstracer.cpp
@@ -689,17 +689,17 @@ public:
 class FuncFilter: public LirWriter
 {
 public:
     FuncFilter(LirWriter* out):
         LirWriter(out)
     {
     }
 
-    LInsp ins2(LOpcode v, LInsp s0, LInsp s1)
+    JS_REQUIRES_STACK LInsp ins2(LOpcode v, LInsp s0, LInsp s1)
     {
         if (s0 == s1 && v == LIR_feq) {
             if (isPromote(s0)) {
                 // double(int) and double(uint) cannot be nan
                 return insImm(1);
             }
             if (s0->isop(LIR_fmul) || s0->isop(LIR_fsub) || s0->isop(LIR_fadd)) {
                 LInsp lhs = s0->oprnd1();
@@ -902,17 +902,17 @@ public:
 #define FORALL_SLOTS(cx, ngslots, gslots, callDepth, code)                    \
     JS_BEGIN_MACRO                                                            \
         FORALL_GLOBAL_SLOTS(cx, ngslots, gslots, code);                       \
         FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth, code);                  \
     JS_END_MACRO
 
 /* Calculate the total number of native frame slots we need from this frame
    all the way back to the entry frame, including the current stack usage. */
-unsigned
+JS_REQUIRES_STACK unsigned
 js_NativeStackSlots(JSContext *cx, unsigned callDepth)
 {
     JSStackFrame* fp = cx->fp;
     unsigned slots = 0;
 #if defined _DEBUG
     unsigned int origCallDepth = callDepth;
 #endif
     for (;;) {
@@ -935,17 +935,17 @@ js_NativeStackSlots(JSContext *cx, unsig
         int missing = fp2->fun->nargs - fp2->argc;
         if (missing > 0)
             slots += missing;
     }
     JS_NOT_REACHED("js_NativeStackSlots");
 }
 
 /* Capture the type map for the selected slots of the global object. */
-void
+JS_REQUIRES_STACK void
 TypeMap::captureGlobalTypes(JSContext* cx, SlotList& slots)
 {
     unsigned ngslots = slots.length();
     uint16* gslots = slots.data();
     setLength(ngslots);
     uint8* map = data();
     uint8* m = map;
     FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
@@ -954,17 +954,17 @@ TypeMap::captureGlobalTypes(JSContext* c
             type = JSVAL_DOUBLE;
         JS_ASSERT(type != JSVAL_BOXED);
         debug_only_v(printf("capture global type %s%d: %d=%c\n", vpname, vpnum, type, typeChar[type]);)
         *m++ = type;
     );
 }
 
 /* Capture the type map for the currently pending stack frames. */
-void
+JS_REQUIRES_STACK void
 TypeMap::captureStackTypes(JSContext* cx, unsigned callDepth)
 {
     setLength(js_NativeStackSlots(cx, callDepth));
     uint8* map = data();
     uint8* m = map;
     FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
         uint8 type = getCoercedType(*vp);
         if ((type == JSVAL_INT) &&
@@ -996,16 +996,17 @@ mergeTypeMaps(uint8** partial, unsigned*
     memcpy(mem + l, complete + l, (clength - l) * sizeof(uint8));
     *partial = mem;
     *plength = clength;
 }
 
 static void
 js_TrashTree(JSContext* cx, Fragment* f);
 
+JS_REQUIRES_STACK
 TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* _anchor, Fragment* _fragment,
         TreeInfo* ti, unsigned ngslots, uint8* globalTypeMap, uint8* stackTypeMap,
         VMSideExit* innermostNestedGuard, Fragment* outerToBlacklist)
 {
     JS_ASSERT(!_fragment->vmprivate && ti);
 
     this->cx = cx;
     this->traceMonitor = &JS_TRACE_MONITOR(cx);
@@ -1159,17 +1160,17 @@ TraceRecorder::nativeGlobalOffset(jsval*
 bool
 TraceRecorder::isGlobal(jsval* p) const
 {
     return ((size_t(p - globalObj->fslots) < JS_INITIAL_NSLOTS) ||
             (size_t(p - globalObj->dslots) < (STOBJ_NSLOTS(globalObj) - JS_INITIAL_NSLOTS)));
 }
 
 /* Determine the offset in the native stack for a jsval we track */
-ptrdiff_t
+JS_REQUIRES_STACK ptrdiff_t
 TraceRecorder::nativeStackOffset(jsval* p) const
 {
 #ifdef DEBUG
     size_t slow_offset = 0;
     FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
         if (vp == p) goto done;
         slow_offset += sizeof(double)
     );
@@ -1405,43 +1406,43 @@ NativeToValue(JSContext* cx, jsval& v, u
                             ? "null"
                             : STOBJ_GET_CLASS(JSVAL_TO_OBJECT(v))->name);)
         break;
     }
     return true;
 }
 
 /* Attempt to unbox the given list of interned globals onto the native global frame. */
-static void
+static JS_REQUIRES_STACK void
 BuildNativeGlobalFrame(JSContext* cx, unsigned ngslots, uint16* gslots, uint8* mp, double* np)
 {
     debug_only_v(printf("global: ");)
     FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
         ValueToNative(cx, *vp, *mp, np + gslots[n]);
         ++mp;
     );
     debug_only_v(printf("\n");)
 }
 
 /* Attempt to unbox the given JS frame onto a native frame. */
-static void
+static JS_REQUIRES_STACK void
 BuildNativeStackFrame(JSContext* cx, unsigned callDepth, uint8* mp, double* np)
 {
     debug_only_v(printf("stack: ");)
     FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
         debug_only_v(printf("%s%u=", vpname, vpnum);)
         ValueToNative(cx, *vp, *mp, np);
         ++mp; ++np;
     );
     debug_only_v(printf("\n");)
 }
 
 /* Box the given native frame into a JS frame. This only fails due to a hard error
    (out of memory for example). */
-static int
+static JS_REQUIRES_STACK int
 FlushNativeGlobalFrame(JSContext* cx, unsigned ngslots, uint16* gslots, uint8* mp, double* np)
 {
     uint8* mp_base = mp;
     FORALL_GLOBAL_SLOTS(cx, ngslots, gslots,
         if (!NativeToValue(cx, *vp, *mp, np + gslots[n]))
             return -1;
         ++mp;
     );
@@ -1459,17 +1460,17 @@ FlushNativeGlobalFrame(JSContext* cx, un
  * @param mp pointer to an array of type tags (JSVAL_INT, etc.) that indicate
  *           what the types of the things on the stack are.
  * @param np pointer to the native stack.  We want to copy values from here to
  *           the JS stack as needed.
  * @param stopFrame if non-null, this frame and everything above it should not
  *                  be restored.
  * @return the number of things we popped off of np.
  */
-static int
+static JS_REQUIRES_STACK int
 FlushNativeStackFrame(JSContext* cx, unsigned callDepth, uint8* mp, double* np,
                       JSStackFrame* stopFrame)
 {
     jsval* stopAt = stopFrame ? &stopFrame->argv[-2] : NULL;
     uint8* mp_base = mp;
     /* Root all string and object references first (we don't need to call the GC for this). */
     FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
         if (vp == stopAt) goto skip;
@@ -1503,17 +1504,17 @@ skip:
             }
         }
     }
     debug_only_v(printf("\n");)
     return mp - mp_base;
 }
 
 /* Emit load instructions onto the trace that read the initial stack state. */
-void
+JS_REQUIRES_STACK void
 TraceRecorder::import(LIns* base, ptrdiff_t offset, jsval* p, uint8& t,
                       const char *prefix, uintN index, JSStackFrame *fp)
 {
     LIns* ins;
     if (t == JSVAL_INT) { /* demoted */
         JS_ASSERT(isInt32(*p));
         /* Ok, we have a valid demotion attempt pending, so insert an integer
            read and promote it to double since all arithmetic operations expect
@@ -1565,17 +1566,17 @@ TraceRecorder::import(LIns* base, ptrdif
     static const char* typestr[] = {
         "object", "int", "double", "3", "string", "5", "boolean", "any"
     };
     debug_only_v(printf("import vp=%p name=%s type=%s flags=%d\n",
                         p, name, typestr[t & 7], t >> 3);)
 #endif
 }
 
-void
+JS_REQUIRES_STACK void
 TraceRecorder::import(TreeInfo* treeInfo, LIns* sp, unsigned ngslots, unsigned callDepth,
                       uint8* globalTypeMap, uint8* stackTypeMap)
 {
     /* If we get a partial list that doesn't have all the types (i.e. recording from a side
        exit that was recorded but we added more global slots later), merge the missing types
        from the entry type map. This is safe because at the loop edge we verify that we
        have compatible types for all globals (entry type and loop edge type match). While
        a different trace of the tree might have had a guard with a different type map for
@@ -1602,17 +1603,17 @@ TraceRecorder::import(TreeInfo* treeInfo
     m = stackTypeMap;
     FORALL_SLOTS_IN_PENDING_FRAMES(cx, callDepth,
         import(sp, offset, vp, *m, vpname, vpnum, fp);
         m++; offset += sizeof(double);
     );
 }
 
 /* Lazily import a global slot if we don't already have it in the tracker. */
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::lazilyImportGlobalSlot(unsigned slot)
 {
     if (slot != uint16(slot)) /* we use a table of 16-bit ints, bail out if that's not enough */
         return false;
     jsval* vp = &STOBJ_GET_SLOT(globalObj, slot);
     if (tracker.has(vp))
         return true; /* we already have it */
     unsigned index = traceMonitor->globalSlots->length();
@@ -1637,17 +1638,17 @@ TraceRecorder::writeBack(LIns* i, LIns* 
        (uncasted) value. Each guard generates the side exit map based on the types of the
        last stores to every stack location, so its safe to not perform them on-trace. */
     if (isPromoteInt(i))
         i = ::demote(lir, i);
     return lir->insStorei(i, base, offset);
 }
 
 /* Update the tracker, then issue a write back store. */
-void
+JS_REQUIRES_STACK void
 TraceRecorder::set(jsval* p, LIns* i, bool initializing)
 {
     JS_ASSERT(initializing || tracker.has(p));
     tracker.set(p, i);
     /* If we are writing to this location for the first time, calculate the offset into the
        native frame manually, otherwise just look up the last load or store associated with
        the same source address (p) and use the same offset/base. */
     LIns* x = nativeFrameTracker.get(p);
@@ -1671,17 +1672,17 @@ TraceRecorder::set(jsval* p, LIns* i, bo
             JS_ASSERT(x->isop(LIR_sti) || x->isop(LIR_stqi));
             ASSERT_VALID_CACHE_HIT(x->oprnd2(), x->immdisp());
             writeBack(i, x->oprnd2(), x->immdisp());
         }
     }
 #undef ASSERT_VALID_CACHE_HIT
 }
 
-LIns*
+JS_REQUIRES_STACK LIns*
 TraceRecorder::get(jsval* p) const
 {
     return tracker.get(p);
 }
 
 /* Determine whether the current branch instruction terminates the loop. */
 static bool
 js_IsLoopExit(jsbytecode* pc, jsbytecode* header)
@@ -1727,17 +1728,17 @@ js_IsLoopExit(jsbytecode* pc, jsbytecode
         return pc + GET_JUMPX_OFFSET(pc) == header;
 
       default:;
     }
     return false;
 }
 
 /* Determine whether the current branch is a loop edge (taken or not taken). */
-static bool
+static JS_REQUIRES_STACK bool
 js_IsLoopEdge(jsbytecode* pc, jsbytecode* header)
 {
     switch (*pc) {
       case JSOP_IFEQ:
       case JSOP_IFNE:
         return ((pc + GET_JUMP_OFFSET(pc)) == header);
       case JSOP_IFEQX:
       case JSOP_IFNEX:
@@ -1746,17 +1747,17 @@ js_IsLoopEdge(jsbytecode* pc, jsbytecode
         JS_ASSERT((*pc == JSOP_AND) || (*pc == JSOP_ANDX) || 
                   (*pc == JSOP_OR) || (*pc == JSOP_ORX));
     }
     return false;
 }
 
 /* Promote slots if necessary to match the called tree' type map and report error if thats
    impossible. */
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::adjustCallerTypes(Fragment* f, unsigned* demote_slots, bool& trash)
 {
     JSTraceMonitor* tm = traceMonitor;
     uint8* m = tm->globalTypeMap->data();
     uint16* gslots = traceMonitor->globalSlots->data();
     unsigned ngslots = traceMonitor->globalSlots->length();
     uint8* map = ((TreeInfo*)f->vmprivate)->stackTypeMap.data();
     bool ok = true;
@@ -1796,17 +1797,17 @@ TraceRecorder::adjustCallerTypes(Fragmen
     if (!ok) {
         for (unsigned i = 1; i <= NUM_UNDEMOTE_SLOTS(demote_slots); i++)
             oracle.markStackSlotUndemotable(cx, demote_slots[i]);
     }
     JS_ASSERT(f == f->root);
     return ok;
 }
 
-uint8 
+JS_REQUIRES_STACK uint8
 TraceRecorder::determineSlotType(jsval* vp) const
 {
     uint8 m;
     LIns* i = get(vp);
     m = isNumber(*vp)
         ? (isPromoteInt(i) ? JSVAL_INT : JSVAL_DOUBLE)
         : JSVAL_TAG(*vp);
     JS_ASSERT((m != JSVAL_INT) || isInt32(*vp));
@@ -1843,17 +1844,17 @@ TraceRecorder::determineSlotType(jsval* 
                               ? (fp)->imacpc = (fp)->script->code +           \
                                                SCRIPT_PC_ADJ(ip),             \
                                 (fp)->regs->pc = imacro_code[*(fp)->imacpc] + \
                                                  IMACRO_PC_ADJ(ip)            \
                               : (fp)->regs->pc = (fp)->script->code + (ip))
 
 static jsbytecode* imacro_code[JSOP_LIMIT];
 
-LIns*
+JS_REQUIRES_STACK LIns*
 TraceRecorder::snapshot(ExitType exitType)
 {
     JSStackFrame* fp = cx->fp;
     JSFrameRegs* regs = fp->regs;
     jsbytecode* pc = regs->pc;
     if (exitType == BRANCH_EXIT && js_IsLoopExit(pc, (jsbytecode*)fragment->root->ip))
         exitType = LOOP_EXIT;
 
@@ -1972,33 +1973,33 @@ TraceRecorder::snapshot(ExitType exitTyp
 LIns*
 TraceRecorder::guard(bool expected, LIns* cond, LIns* exit)
 {
     return lir->insGuard(expected ? LIR_xf : LIR_xt, cond, exit);
 }
 
 /* Emit a guard for condition (cond), expecting to evaluate to boolean result (expected)
    and generate a side exit with type exitType to jump to if the condition does not hold. */
-LIns*
+JS_REQUIRES_STACK LIns*
 TraceRecorder::guard(bool expected, LIns* cond, ExitType exitType)
 {
     return guard(expected, cond, snapshot(exitType));
 }
 
 /* Try to match the type of a slot to type t. checkType is used to verify that the type of
  * values flowing into the loop edge is compatible with the type we expect in the loop header.
  *
  * @param v             Value.
  * @param t             Typemap entry for value.
  * @param stage_val     Outparam for set() address.
  * @param stage_ins     Outparam for set() instruction.
  * @param stage_count   Outparam for set() buffer count.
  * @return              True if types are compatible, false otherwise.
  */
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::checkType(jsval& v, uint8 t, jsval*& stage_val, LIns*& stage_ins, 
                          unsigned& stage_count)
 {
     if (t == JSVAL_INT) { /* initially all whole numbers cause the slot to be demoted */
         debug_only_v(printf("checkType(tag=1, t=%d, isnum=%d, i2f=%d) stage_count=%d\n", 
                             t,
                             isNumber(v),
                             isPromoteInt(get(&v)),
@@ -2052,17 +2053,17 @@ TraceRecorder::checkType(jsval& v, uint8
  * up and including entryFrame are type-compatible with the entry map.
  *
  * @param root_peer         First fragment in peer list.
  * @param stable_peer       Outparam for first type stable peer.
  * @param trash             Whether to trash the tree (demotion).
  * @param demotes           Array to store demotable stack slots.
  * @return                  True if type stable, false otherwise.
  */
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::deduceTypeStability(Fragment* root_peer, Fragment** stable_peer, unsigned* demotes)
 {
     uint8* m;
     uint8* typemap;
     unsigned ngslots = traceMonitor->globalSlots->length();
     uint16* gslots = traceMonitor->globalSlots->data();
     JS_ASSERT(traceMonitor->globalTypeMap->length() == ngslots);
 
@@ -2197,24 +2198,24 @@ checktype_fail_2:
         );
         return true;
     }
 
     return false;
 }
 
 /* Check whether the current pc location is the loop header of the loop this recorder records. */
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::isLoopHeader(JSContext* cx) const
 {
     return cx->fp->regs->pc == fragment->root->ip;
 }
 
 /* Compile the current fragment. */
-void
+JS_REQUIRES_STACK void
 TraceRecorder::compile(Fragmento* fragmento)
 {
     if (treeInfo->maxNativeStackSlots >= MAX_NATIVE_STACK_SLOTS) {
         debug_only_v(printf("Trace rejected: excessive stack use.\n"));
         js_BlacklistPC(fragmento, fragment);
         return;
     }
     ++treeInfo->branchCount;
@@ -2261,17 +2262,17 @@ js_JoinPeersIfCompatible(Fragmento* frag
     frago->assm()->patch(exit);
 
     stableTree->dependentTrees.addUnique(exit->from->root);
 
     return true;
 }
 
 /* Complete and compile a trace and link it to the existing tree if appropriate. */
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::closeLoop(Fragmento* fragmento, bool& demote, unsigned *demotes)
 {
     bool stable;
     LIns* exitIns;
     Fragment* peer;
     VMSideExit* exit;
     Fragment* peer_root;
 
@@ -2362,17 +2363,17 @@ TraceRecorder::closeLoop(Fragmento* frag
 
     debug_only_v(printf("recording completed at %s:%u@%u via closeLoop\n",
                         cx->fp->script->filename,
                         js_FramePCToLineNumber(cx, cx->fp),
                         FramePCOffset(cx->fp));)
     return true;
 }
 
-void
+JS_REQUIRES_STACK void
 TraceRecorder::joinEdgesToEntry(Fragmento* fragmento, Fragment* peer_root)
 {
     if (fragment->kind == LoopTrace) {
         TreeInfo* ti;
         Fragment* peer;
         uint8* t1, *t2;
         UnstableExit* uexit, **unext;
 
@@ -2427,17 +2428,17 @@ TraceRecorder::joinEdgesToEntry(Fragment
             } 
         } 
     }
 
     debug_only_v(js_DumpPeerStability(fragmento, peer_root->ip);)
 }
 
 /* Emit an always-exit guard and compile the tree (used for break statements. */
-void
+JS_REQUIRES_STACK void
 TraceRecorder::endLoop(Fragmento* fragmento)
 {
     LIns* exitIns = snapshot(LOOP_EXIT);
 
     if (callDepth != 0) {
         debug_only_v(printf("Stack depth mismatch, possible recursion\n");)
         js_BlacklistPC(fragmento, fragment);
         trashSelf = true;
@@ -2454,17 +2455,17 @@ TraceRecorder::endLoop(Fragmento* fragme
 
     debug_only_v(printf("recording completed at %s:%u@%u via endLoop\n",
                         cx->fp->script->filename,
                         js_FramePCToLineNumber(cx, cx->fp),
                         FramePCOffset(cx->fp));)
 }
 
 /* Emit code to adjust the stack to match the inner tree's stack expectations. */
-void
+JS_REQUIRES_STACK void
 TraceRecorder::prepareTreeCall(Fragment* inner)
 {
     TreeInfo* ti = (TreeInfo*)inner->vmprivate;
     inner_sp_ins = lirbuf->sp;
     /* The inner tree expects to be called from the current frame. If the outer tree (this
        trace) is currently inside a function inlining code (calldepth > 0), we have to advance
        the native stack pointer such that we match what the inner trace expects to see. We
        move it back when we come out of the inner tree call. */
@@ -2494,17 +2495,17 @@ TraceRecorder::prepareTreeCall(Fragment*
                 + ti->nativeStackBase), /* plus the inner tree's stack base */
                 lirbuf->state, offsetof(InterpState, sp));
         lir->insStorei(lir->ins2i(LIR_piadd, lirbuf->rp, rp_adj),
                 lirbuf->state, offsetof(InterpState, rp));
     }
 }
 
 /* Record a call to an inner tree. */
-void
+JS_REQUIRES_STACK void
 TraceRecorder::emitTreeCall(Fragment* inner, VMSideExit* exit)
 {
     TreeInfo* ti = (TreeInfo*)inner->vmprivate;
     /* Invoke the inner tree. */
     LIns* args[] = { INS_CONSTPTR(inner), lirbuf->state }; /* reverse order */
     LIns* ret = lir->insCall(&js_CallTree_ci, args);
     /* Read back all registers, in case the called tree changed any of them. */
     import(ti, inner_sp_ins, exit->numGlobalSlots, exit->calldepth,
@@ -2517,17 +2518,17 @@ TraceRecorder::emitTreeCall(Fragment* in
     /* Guard that we come out of the inner tree along the same side exit we came out when
        we called the inner tree at recording time. */
     guard(true, lir->ins2(LIR_eq, ret, INS_CONSTPTR(exit)), NESTED_EXIT);
     /* Register us as a dependent tree of the inner tree. */
     ((TreeInfo*)inner->vmprivate)->dependentTrees.addUnique(fragment->root);
 }
 
 /* Add a if/if-else control-flow merge point to the list of known merge points. */
-void
+JS_REQUIRES_STACK void
 TraceRecorder::trackCfgMerges(jsbytecode* pc)
 {
     /* If we hit the beginning of an if/if-else, then keep track of the merge point after it. */
     JS_ASSERT((*pc == JSOP_IFEQ) || (*pc == JSOP_IFEQX));
     jssrcnote* sn = js_GetSrcNote(cx->fp->script, pc);
     if (sn != NULL) {
         if (SN_TYPE(sn) == SRC_IF) {
             cfgMerges.add((*pc == JSOP_IFEQ) 
@@ -2535,17 +2536,17 @@ TraceRecorder::trackCfgMerges(jsbytecode
                           : pc + GET_JUMPX_OFFSET(pc));
         } else if (SN_TYPE(sn) == SRC_IF_ELSE) 
             cfgMerges.add(pc + js_GetSrcNoteOffset(sn, 0));
     }
 }
 
 /* Invert the direction of the guard if this is a loop edge that is not 
    taken (thin loop). */
-void
+JS_REQUIRES_STACK void
 TraceRecorder::flipIf(jsbytecode* pc, bool& cond)
 {
     if (js_IsLoopEdge(pc, (jsbytecode*)fragment->root->ip)) {
         switch (*pc) {
           case JSOP_IFEQ:
           case JSOP_IFEQX:
             if (!cond)
                 return;
@@ -2571,17 +2572,17 @@ TraceRecorder::flipIf(jsbytecode* pc, bo
             pc += GET_JUMPX_OFFSET(pc);
         else
             pc += GET_JUMP_OFFSET(pc);
         terminate_ip_adj = ENCODE_IP_ADJ(cx->fp, pc);
     }
 }
 
 /* Emit code for a fused IFEQ/IFNE. */
-void
+JS_REQUIRES_STACK void
 TraceRecorder::fuseIf(jsbytecode* pc, bool cond, LIns* x)
 {
     if (x->isconst()) // no need to guard if condition is constant
         return;
     if (*pc == JSOP_IFEQ) {
         flipIf(pc, cond);
         guard(cond, x, BRANCH_EXIT);
         trackCfgMerges(pc); 
@@ -2711,17 +2712,17 @@ js_CheckGlobalObjectShape(JSContext* cx,
         AUDIT(globalShapeMismatchAtEntry);
         debug_only_v(printf("Global shape mismatch (%u vs. %u), flushing cache.\n",
                             OBJ_SHAPE(globalObj), tm->globalShape);)
         return false;
     }
     return true;
 }
 
-static bool
+static JS_REQUIRES_STACK bool
 js_StartRecorder(JSContext* cx, VMSideExit* anchor, Fragment* f, TreeInfo* ti,
                  unsigned ngslots, uint8* globalTypeMap, uint8* stackTypeMap, 
                  VMSideExit* expectedInnerExit, Fragment* outer)
 {
     JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
 
     /*
      * Emulate on-trace semantics and avoid rooting headaches while recording,
@@ -2764,16 +2765,18 @@ js_TrashTree(JSContext* cx, Fragment* f)
         js_TrashTree(cx, data[n]);
     delete ti;
     JS_ASSERT(!f->code() && !f->vmprivate);
 }
 
 static int
 js_SynthesizeFrame(JSContext* cx, const FrameInfo& fi)
 {
+    VOUCH_DOES_NOT_REQUIRE_STACK();
+
     JS_ASSERT(HAS_FUNCTION_CLASS(fi.callee));
 
     JSFunction* fun = GET_FUNCTION_PRIVATE(cx, fi.callee);
     JS_ASSERT(FUN_INTERPRETED(fun));
 
     /* Assert that we have a correct sp distance from cx->fp->slots in fi. */
     JS_ASSERT_IF(!FI_IMACRO_PC(fi, cx->fp),
                  js_ReconstructStackDepth(cx, cx->fp->script, FI_SCRIPT_PC(fi, cx->fp))
@@ -2936,17 +2939,17 @@ static void
 js_dumpMap(TypeMap const & tm) {
     uint8 *data = tm.data();
     for (unsigned i = 0; i < tm.length(); ++i) {
         printf("typemap[%d] = %c\n", i, typeChar[data[i]]);
     }
 }
 #endif
 
-bool
+JS_REQUIRES_STACK bool
 js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, Fragment* outer, unsigned* demotes)
 {
     JS_ASSERT(cx->fp->regs->pc == f->ip && f->root == f);
     
     /* Avoid recording loops in overlarge scripts. */
     if (cx->fp->script->length >= SCRIPT_PC_ADJ_LIMIT) {
         js_AbortRecording(cx, "script too large");
         return false;
@@ -3039,17 +3042,17 @@ js_RecordTree(JSContext* cx, JSTraceMoni
                           tm->globalSlots->length(), tm->globalTypeMap->data(), 
                           ti->stackTypeMap.data(), NULL, outer)) {
         return false;
     }
 
     return true;
 }
 
-static bool
+JS_REQUIRES_STACK static bool
 js_AttemptToStabilizeTree(JSContext* cx, VMSideExit* exit, Fragment* outer)
 {
     JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
     Fragment* from = exit->from->root;
     unsigned* demotes;
 
     JS_ASSERT(exit->from->root->code());
     
@@ -3068,17 +3071,17 @@ js_AttemptToStabilizeTree(JSContext* cx,
     if (!js_RecordTree(cx, tm, from->first, outer, demotes))
         return false;
 
     tm->recorder->setPromotedPeer(demotes ? from : NULL);
 
     return true;
 }
 
-static bool
+static JS_REQUIRES_STACK bool
 js_AttemptToExtendTree(JSContext* cx, VMSideExit* anchor, VMSideExit* exitedFrom, Fragment* outer)
 {
     Fragment* f = anchor->from->root;
     JS_ASSERT(f->vmprivate);
     TreeInfo* ti = (TreeInfo*)f->vmprivate;
 
     /* Don't grow trees above a certain size to avoid code explosion due to tail duplication. */
     if (ti->branchCount >= MAX_BRANCHES)
@@ -3122,24 +3125,24 @@ js_AttemptToExtendTree(JSContext* cx, VM
             stackTypeMap = fullMap.data();
         } 
         return js_StartRecorder(cx, anchor, c, (TreeInfo*)f->vmprivate,
                                 ngslots, globalTypeMap, stackTypeMap, exitedFrom, outer);
     }
     return false;
 }
 
-static VMSideExit*
+static JS_REQUIRES_STACK VMSideExit*
 js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount, 
                VMSideExit** innermostNestedGuardp);
 
-static Fragment*
+static JS_REQUIRES_STACK Fragment*
 js_FindVMCompatiblePeer(JSContext* cx, Fragment* f);
 
-static bool
+static JS_REQUIRES_STACK bool
 js_CloseLoop(JSContext* cx)
 {
     JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
     Fragmento* fragmento = tm->fragmento;
     TraceRecorder* r = tm->recorder;
     JS_ASSERT(fragmento && r);
     bool walkedOutOfLoop = r->walkedOutOfLoop();
     
@@ -3164,17 +3167,17 @@ js_CloseLoop(JSContext* cx)
      * If we just walked out of a thin loop, we can't immediately start the 
      * compiler again here since we didn't return to the loop header.
      */
     if (demote && !walkedOutOfLoop)
         return js_RecordTree(cx, tm, f, NULL, demotes);
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 js_RecordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCallCount)
 {
 #ifdef JS_THREADSAFE
     if (OBJ_SCOPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain))->title.ownercx != cx) {
         js_AbortRecording(cx, "Global object not owned by this context");
         return false; /* we stay away from shared global objects */
     }
 #endif
@@ -3331,17 +3334,18 @@ js_IsEntryTypeCompatible(jsval* vp, uint
         JS_ASSERT(*m == JSVAL_OBJECT);
         if (tag == JSVAL_OBJECT)
             return true;
         debug_only_v(printf("object != tag%u", tag);)
         return false;
     }
 }
 
-Fragment* TraceRecorder::findNestedCompatiblePeer(Fragment* f, Fragment** empty)
+JS_REQUIRES_STACK Fragment*
+TraceRecorder::findNestedCompatiblePeer(Fragment* f, Fragment** empty)
 {
     Fragment* demote;
     JSTraceMonitor* tm;
     unsigned max_demotes;
 
     if (empty)
         *empty = NULL;
     demote = NULL;
@@ -3416,17 +3420,17 @@ check_fail:
 
 /**
  * Check if types are usable for trace execution.
  *
  * @param cx            Context.
  * @param ti            Tree info of peer we're testing.
  * @return              True if compatible (with or without demotions), false otherwise.
  */
-static bool
+static JS_REQUIRES_STACK bool
 js_CheckEntryTypes(JSContext* cx, TreeInfo* ti)
 {
     JSTraceMonitor* tm;
 
     tm = &JS_TRACE_MONITOR(cx);
     unsigned int ngslots = tm->globalSlots->length();
     uint16* gslots = tm->globalSlots->data();
     uint8* m = tm->globalTypeMap->data();
@@ -3459,17 +3463,17 @@ check_fail:
 
 /**
  * Find an acceptable entry tree given a PC.
  *
  * @param cx            Context.
  * @param f             First peer fragment.
  * @param nodemote      If true, will try to find a peer that does not require demotion.
  */
-static Fragment*
+static JS_REQUIRES_STACK Fragment*
 js_FindVMCompatiblePeer(JSContext* cx, Fragment* f)
 {
     for (; f != NULL; f = f->peer) {
         if (f->vmprivate == NULL) 
             continue;
         debug_only_v(printf("checking vm types %p (ip: %p): ", f, f->ip);)
         if (js_CheckEntryTypes(cx, (TreeInfo*)f->vmprivate))
             return f;
@@ -3715,17 +3719,17 @@ js_ExecuteTree(JSContext* cx, Fragment* 
     }
 #endif
 
     AUDIT(sideExitIntoInterpreter);
 
     return innermost;
 }
 
-bool
+JS_REQUIRES_STACK bool
 js_MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount)
 {
     JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
 
     /* Is the recorder currently active? */
     if (tm->recorder) {
         if (js_RecordLoopEdge(cx, tm->recorder, inlineCallCount))
             return true;
@@ -3794,17 +3798,17 @@ monitor_loop:
             return js_AttemptToExtendTree(cx, innermostNestedGuard, lr, NULL);
         return false;
       default:
         /* No, this was an unusual exit (i.e. out of memory/GC), so just resume interpretation. */
         return false;
     }
 }
 
-bool
+JS_REQUIRES_STACK bool
 js_MonitorRecording(TraceRecorder* tr)
 {
     JSContext* cx = tr->cx;
 
     if (tr->lirbuf->outOmem()) {
         js_AbortRecording(cx, "no more LIR memory");
         js_FlushJITCache(cx);
         return false;
@@ -3859,17 +3863,17 @@ js_MonitorRecording(TraceRecorder* tr)
 void
 js_BlacklistPC(Fragmento* frago, Fragment* frag)
 {
     if (frag->kind == LoopTrace)
         frag = frago->getLoop(frag->ip);
     frag->blacklist();
 }
 
-void
+JS_REQUIRES_STACK void
 js_AbortRecording(JSContext* cx, const char* reason)
 {
     JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
     JS_ASSERT(tm->recorder != NULL);
     AUDIT(recorderAborted);
 
     /* Abort the trace and blacklist its starting point. */
     JSStackFrame* fp = cx->fp;
@@ -4026,17 +4030,17 @@ TraceRecorder::popAbortStack()
 extern void
 js_FlushJITOracle(JSContext* cx)
 {
     if (!TRACING_ENABLED(cx))
         return;
     oracle.clear();
 }
 
-extern void
+extern JS_REQUIRES_STACK void
 js_FlushJITCache(JSContext* cx)
 {
     if (!TRACING_ENABLED(cx))
         return;
     debug_only_v(printf("Flushing cache.\n");)
     JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
     if (tm->recorder)
         js_AbortRecording(cx, "flush cache");
@@ -4057,38 +4061,54 @@ js_FlushJITCache(JSContext* cx)
     }
     if (cx->fp) {
         tm->globalShape = OBJ_SHAPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain));
         tm->globalSlots->clear();
         tm->globalTypeMap->clear();
     }
 }
 
-jsval&
+JS_FORCES_STACK JSStackFrame *
+js_GetTopStackFrame(JSContext *cx)
+{
+    if (JS_EXECUTING_TRACE(cx)) {
+        /*
+         * TODO: If executing a tree, synthesize stack frames and bail off
+         * trace. See bug 462027.
+         */
+        debug_only_v(printf("Internal error: getting top stack frame on trace.\n"));
+#ifdef DEBUG_jason
+        JS_ASSERT(0);
+#endif
+    }
+    return cx->fp;
+}
+
+JS_REQUIRES_STACK jsval&
 TraceRecorder::argval(unsigned n) const
 {
     JS_ASSERT(n < cx->fp->fun->nargs);
     return cx->fp->argv[n];
 }
 
-jsval&
+JS_REQUIRES_STACK jsval&
 TraceRecorder::varval(unsigned n) const
 {
     JS_ASSERT(n < cx->fp->script->nslots);
     return cx->fp->slots[n];
 }
 
-jsval&
+JS_REQUIRES_STACK jsval&
 TraceRecorder::stackval(int n) const
 {
     jsval* sp = cx->fp->regs->sp;
     return sp[n];
 }
 
-LIns*
+JS_REQUIRES_STACK LIns*
 TraceRecorder::scopeChain() const
 {
     return lir->insLoad(LIR_ldp,
                         lir->insLoad(LIR_ldp, cx_ins, offsetof(JSContext, fp)),
                         offsetof(JSStackFrame, scopeChain));
 }
 
 static inline bool
@@ -4098,17 +4118,17 @@ FrameInRange(JSStackFrame* fp, JSStackFr
         if (callDepth-- == 0)
             return false;
         if (!(fp = fp->down))
             return false;
     }
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::activeCallOrGlobalSlot(JSObject* obj, jsval*& vp)
 {
     JS_ASSERT(obj != globalObj);
 
     JSAtom* atom = atoms[GET_INDEX(cx->fp->regs->pc)];
     JSObject* obj2;
     JSProperty* prop;
     if (js_FindProperty(cx, ATOM_TO_JSID(atom), &obj, &obj2, &prop) < 0 || !prop)
@@ -4151,47 +4171,47 @@ TraceRecorder::activeCallOrGlobalSlot(JS
             return true;
         }
     }
 
     OBJ_DROP_PROPERTY(cx, obj2, prop);
     ABORT_TRACE("fp->scopeChain is not global or active call object");
 }
 
-LIns*
+JS_REQUIRES_STACK LIns*
 TraceRecorder::arg(unsigned n)
 {
     return get(&argval(n));
 }
 
-void
+JS_REQUIRES_STACK void
 TraceRecorder::arg(unsigned n, LIns* i)
 {
     set(&argval(n), i);
 }
 
-LIns*
+JS_REQUIRES_STACK LIns*
 TraceRecorder::var(unsigned n)
 {
     return get(&varval(n));
 }
 
-void
+JS_REQUIRES_STACK void
 TraceRecorder::var(unsigned n, LIns* i)
 {
     set(&varval(n), i);
 }
 
-LIns*
+JS_REQUIRES_STACK LIns*
 TraceRecorder::stack(int n)
 {
     return get(&stackval(n));
 }
 
-void
+JS_REQUIRES_STACK void
 TraceRecorder::stack(int n, LIns* i)
 {
     set(&stackval(n), i, n >= 0);
 }
 
 LIns*
 TraceRecorder::alu(LOpcode v, jsdouble v0, jsdouble v1, LIns* s0, LIns* s1)
 {
@@ -4243,17 +4263,17 @@ TraceRecorder::alu(LOpcode v, jsdouble v
 }
 
 LIns*
 TraceRecorder::f2i(LIns* f)
 {
     return lir->insCall(&js_DoubleToInt32_ci, &f);
 }
 
-LIns*
+JS_REQUIRES_STACK LIns*
 TraceRecorder::makeNumberInt32(LIns* f)
 {
     JS_ASSERT(f->isQuad());
     LIns* x;
     if (!isPromote(f)) {
         x = f2i(f);
         guard(true, lir->ins2(LIR_feq, f, lir->ins1(LIR_i2f, x)), MISMATCH_EXIT);
     } else {
@@ -4294,17 +4314,17 @@ TraceRecorder::call_imacro(jsbytecode* i
         fp->imacpc = regs->pc;
         fp->flags |= JSFRAME_IMACRO_START;
         regs->pc = imacro;
         atoms = COMMON_ATOMS_START(&cx->runtime->atomState);
     }
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::ifop()
 {
     jsval& v = stackval(-1);
     LIns* v_ins = get(&v);
     bool cond;
     LIns* x;
     /* no need to guard if condition is constant */
     if (v_ins->isconst() || v_ins->isconstq())
@@ -4340,17 +4360,17 @@ TraceRecorder::ifop()
     if (!x->isCond()) {
         x = lir->ins_eq0(x);
         expected = !expected;
     }
     guard(expected, x, BRANCH_EXIT); 
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::switchop()
 {
     jsval& v = stackval(-1);
     LIns* v_ins = get(&v);
     /* no need to guard if condition is constant */
     if (v_ins->isconst() || v_ins->isconstq())
         return true;
     if (isNumber(v)) {
@@ -4373,31 +4393,31 @@ TraceRecorder::switchop()
                       "guard(switch on boolean)"),
               BRANCH_EXIT);
     } else {
         ABORT_TRACE("switch on object or null");
     }
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::inc(jsval& v, jsint incr, bool pre)
 {
     LIns* v_ins = get(&v);
     if (!inc(v, v_ins, incr, pre))
         return false;
     set(&v, v_ins);
     return true;
 }
 
 /*
  * On exit, v_ins is the incremented unboxed value, and the appropriate
  * value (pre- or post-increment as described by pre) is stacked.
  */
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::inc(jsval& v, LIns*& v_ins, jsint incr, bool pre)
 {
     if (!isNumber(v))
         ABORT_TRACE("can only inc numbers");
 
     jsdpun u;
     u.d = jsdouble(incr);
 
@@ -4405,17 +4425,17 @@ TraceRecorder::inc(jsval& v, LIns*& v_in
 
     const JSCodeSpec& cs = js_CodeSpec[*cx->fp->regs->pc];
     JS_ASSERT(cs.ndefs == 1);
     stack(-cs.nuses, pre ? v_after : v_ins);
     v_ins = v_after;
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::incProp(jsint incr, bool pre)
 {
     jsval& l = stackval(-1);
     if (JSVAL_IS_PRIMITIVE(l))
         ABORT_TRACE("incProp on primitive");
 
     JSObject* obj = JSVAL_TO_OBJECT(l);
     LIns* obj_ins = get(&l);
@@ -4435,17 +4455,17 @@ TraceRecorder::incProp(jsint incr, bool 
     if (!box_jsval(v, v_ins))
         return false;
 
     LIns* dslots_ins = NULL;
     stobj_set_slot(obj_ins, slot, dslots_ins, v_ins);
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::incElem(jsint incr, bool pre)
 {
     jsval& r = stackval(-1);
     jsval& l = stackval(-2);
     jsval* vp;
     LIns* v_ins;
     LIns* addr_ins;
     if (!elem(l, r, vp, v_ins, addr_ins))
@@ -4535,17 +4555,17 @@ static struct {
         JSOP_CALL, 0, 1,
         JSOP_IMACOP,
         JSOP_STOP
     }
 };
 
 JS_STATIC_ASSERT(sizeof(binary_imacros) < IMACRO_PC_ADJ_LIMIT);
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::cmp(LOpcode op, int flags)
 {
     jsval& r = stackval(-1);
     jsval& l = stackval(-2);
     LIns* x = NULL;
     bool negate = !!(flags & CMP_NEGATE);
     bool cond;
     LIns* l_ins = get(&l);
@@ -4701,17 +4721,17 @@ TraceRecorder::cmp(LOpcode op, int flags
        the guard bails out at the comparison and the interpreter
        will therefore re-execute the comparison. This way the
        value of the condition doesn't have to be calculated and
        saved on the stack in most cases. */
     set(&l, x);
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::unary(LOpcode op)
 {
     jsval& v = stackval(-1);
     bool intop = !(op & LIR64);
     if (isNumber(v)) {
         LIns* a = get(&v);
         if (intop)
             a = f2i(a);
@@ -4719,17 +4739,17 @@ TraceRecorder::unary(LOpcode op)
         if (intop)
             a = lir->ins1(LIR_i2f, a);
         set(&v, a);
         return true;
     }
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::binary(LOpcode op)
 {
     jsval& r = stackval(-1);
     jsval& l = stackval(-2);
 
     if (JSVAL_IS_OBJECT(l) && hasValueOfMethod(l)) {
         if (JSVAL_IS_OBJECT(r) && hasValueOfMethod(r))
             return call_imacro(binary_imacros.obj_obj);
@@ -4809,17 +4829,17 @@ TraceRecorder::map_is_native(JSObjectMap
               MISMATCH_EXIT);
         return true;
     }
 
 #undef OP
     ABORT_TRACE("non-native map");
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::test_property_cache(JSObject* obj, LIns* obj_ins, JSObject*& obj2, jsuword& pcval)
 {
     jsbytecode* pc = cx->fp->regs->pc;
     JS_ASSERT(*pc != JSOP_INITPROP && *pc != JSOP_SETNAME && *pc != JSOP_SETPROP);
 
     // Mimic the interpreter's special case for dense arrays by skipping up one
     // hop along the proto chain when accessing a named (not indexed) property,
     // typically to find Array.prototype methods.
@@ -4962,17 +4982,17 @@ TraceRecorder::test_property_cache(JSObj
               addName(lir->ins2i(LIR_eq, shape_ins, vshape), "guard(vshape)"),
               MISMATCH_EXIT);
     }
 
     pcval = entry->vword;
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::test_property_cache_direct_slot(JSObject* obj, LIns* obj_ins, uint32& slot)
 {
     JSObject* obj2;
     jsuword pcval;
 
     /*
      * Property cache ensures that we are dealing with an existing property,
      * and guards the shape for us.
@@ -5072,17 +5092,17 @@ TraceRecorder::native_get(LIns* obj_ins,
     else
         v_ins = INS_CONST(JSVAL_TO_BOOLEAN(JSVAL_VOID));
     return true;
 }
 
 // So box_jsval can emit no LIR_or at all to tag an object jsval.
 JS_STATIC_ASSERT(JSVAL_OBJECT == 0);
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::box_jsval(jsval v, LIns*& v_ins)
 {
     if (isNumber(v)) {
         LIns* args[] = { v_ins, cx_ins };
         v_ins = lir->insCall(&js_BoxDouble_ci, args);
         guard(false, lir->ins2(LIR_eq, v_ins, INS_CONST(JSVAL_ERROR_COOKIE)),
               OOM_EXIT);
         return true;
@@ -5095,17 +5115,17 @@ TraceRecorder::box_jsval(jsval v, LIns*&
         return true;
       case JSVAL_STRING:
         v_ins = lir->ins2(LIR_pior, v_ins, INS_CONST(JSVAL_STRING));
         return true;
     }
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::unbox_jsval(jsval v, LIns*& v_ins)
 {
     if (isNumber(v)) {
         // JSVAL_IS_NUMBER(v)
         guard(false,
               lir->ins_eq0(lir->ins2(LIR_pior,
                                      lir->ins2(LIR_piand, v_ins, INS_CONST(JSVAL_INT)),
                                      lir->ins2i(LIR_eq,
@@ -5141,51 +5161,51 @@ TraceRecorder::unbox_jsval(jsval v, LIns
               MISMATCH_EXIT);
         v_ins = lir->ins2(LIR_piand, v_ins, INS_CONST(~JSVAL_TAGMASK));
         return true;
     }
     JS_NOT_REACHED("unbox_jsval");
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::getThis(LIns*& this_ins)
 {
     if (cx->fp->callee) { /* in a function */
         if (JSVAL_IS_NULL(cx->fp->argv[-1]))
             return false;
         this_ins = get(&cx->fp->argv[-1]);
         guard(false, lir->ins_eq0(this_ins), MISMATCH_EXIT);
     } else { /* in global code */
         this_ins = scopeChain();
     }
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::guardClass(JSObject* obj, LIns* obj_ins, JSClass* clasp, ExitType exitType)
 {
     bool cond = STOBJ_GET_CLASS(obj) == clasp;
 
     LIns* class_ins = lir->insLoad(LIR_ldp, obj_ins, offsetof(JSObject, classword));
     class_ins = lir->ins2(LIR_piand, class_ins, lir->insImm(~3));
 
     char namebuf[32];
     JS_snprintf(namebuf, sizeof namebuf, "guard(class is %s)", clasp->name);
     guard(cond, addName(lir->ins2(LIR_eq, class_ins, INS_CONSTPTR(clasp)), namebuf), exitType);
     return cond;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::guardDenseArray(JSObject* obj, LIns* obj_ins, ExitType exitType)
 {
     return guardClass(obj, obj_ins, &js_ArrayClass, exitType);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::guardDenseArrayIndex(JSObject* obj, jsint idx, LIns* obj_ins,
                                     LIns* dslots_ins, LIns* idx_ins, ExitType exitType)
 {
     jsuint length = ARRAY_DENSE_LENGTH(obj);
 
     bool cond = (jsuint(idx) < jsuint(obj->fslots[JSSLOT_ARRAY_LENGTH]) && jsuint(idx) < length);
     if (cond) {
         /* Guard array length */
@@ -5233,17 +5253,17 @@ TraceRecorder::guardDenseArrayIndex(JSOb
  * JSObjectOps). Finally, beware resolve hooks mutating objects. Oh, and watch
  * out for bears too ;-).
  *
  * One win here is that we do not need to generate a guard that obj_ins does
  * not result in the global object on trace, because we guard on shape and rule
  * out obj's shape being the global object's shape at recording time. This is
  * safe because the global shape cannot change on trace.
  */
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::guardElemOp(JSObject* obj, LIns* obj_ins, jsid id, size_t op_offset, jsval* vp)
 {
     LIns* map_ins = lir->insLoad(LIR_ldp, obj_ins, (int)offsetof(JSObject, map));
     LIns* ops_ins;
     if (!map_is_native(obj->map, map_ins, ops_ins, op_offset))
         return false;
 
     uint32 shape = OBJ_SHAPE(obj);
@@ -5283,17 +5303,17 @@ TraceRecorder::guardElemOp(JSObject* obj
     if (OBJ_SHAPE(obj) != shape)
         ABORT_TRACE("resolve hook mutated elem op base object");
 
     LIns* shape_ins = addName(lir->insLoad(LIR_ld, map_ins, offsetof(JSScope, shape)), "shape");
     guard(true, addName(lir->ins2i(LIR_eq, shape_ins, shape), "guard(shape)"), MISMATCH_EXIT);
     return true;
 }
 
-void
+JS_REQUIRES_STACK void
 TraceRecorder::clearFrameSlotsFromCache()
 {
     /* Clear out all slots of this frame in the nativeFrameTracker. Different locations on the
        VM stack might map to different locations on the native stack depending on the
        number of arguments (i.e.) of the next call, so we have to make sure we map
        those in to the cache with the right offsets. */
     JSStackFrame* fp = cx->fp;
     jsval* vp;
@@ -5305,17 +5325,17 @@ TraceRecorder::clearFrameSlotsFromCache(
             nativeFrameTracker.set(vp++, (LIns*)0);
     }
     vp = &fp->slots[0];
     vpstop = &fp->slots[fp->script->nslots];
     while (vp < vpstop)
         nativeFrameTracker.set(vp++, (LIns*)0);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_EnterFrame()
 {
     JSStackFrame* fp = cx->fp;
 
     if (++callDepth >= MAX_CALLDEPTH)
         ABORT_TRACE("exceeded maximum call depth");
     // FIXME: Allow and attempt to inline a single level of recursion until we compile 
     //        recursive calls as independent trees (459301).
@@ -5337,17 +5357,17 @@ TraceRecorder::record_EnterFrame()
 
     vp = &fp->slots[0];
     vpstop = vp + fp->script->nfixed;
     while (vp < vpstop)
         set(vp++, void_ins, true);
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_LeaveFrame()
 {
     debug_only_v(
         if (cx->fp->fun)
             printf("LeaveFrame (back to %s), callDepth=%d\n",
                    js_AtomToPrintableString(cx, cx->fp->fun->atom),
                    callDepth);
         );
@@ -5356,206 +5376,206 @@ TraceRecorder::record_LeaveFrame()
 
     // LeaveFrame gets called after the interpreter popped the frame and
     // stored rval, so cx->fp not cx->fp->down, and -1 not 0.
     atoms = cx->fp->script->atomMap.vector;
     set(&stackval(-1), rval_ins, true);
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_INTERRUPT()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_PUSH()
 {
     stack(0, INS_CONST(JSVAL_TO_BOOLEAN(JSVAL_VOID)));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_POPV()
 {
     jsval& rval = stackval(-1);
     LIns *rval_ins = get(&rval);
     if (!box_jsval(rval, rval_ins))
         return false;
 
     // Store it in cx->fp->rval. NB: Tricky dependencies. cx->fp is the right
     // frame because POPV appears only in global and eval code and we don't
     // trace JSOP_EVAL or leaving the frame where tracing started.
     LIns *fp_ins = lir->insLoad(LIR_ldp, cx_ins, offsetof(JSContext, fp));
     lir->insStorei(rval_ins, fp_ins, offsetof(JSStackFrame, rval));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_ENTERWITH()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_LEAVEWITH()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_RETURN()
 {
     jsval& rval = stackval(-1);
     JSStackFrame *fp = cx->fp;
     if ((cx->fp->flags & JSFRAME_CONSTRUCTING) && JSVAL_IS_PRIMITIVE(rval)) {
         JS_ASSERT(OBJECT_TO_JSVAL(fp->thisp) == fp->argv[-1]);
         rval_ins = get(&fp->argv[-1]);
     } else {
         rval_ins = get(&rval);
     }
     debug_only_v(printf("returning from %s\n", js_AtomToPrintableString(cx, cx->fp->fun->atom));)
     clearFrameSlotsFromCache();
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_GOTO()
 {
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_IFEQ()
 {
     trackCfgMerges(cx->fp->regs->pc);
     return ifop();
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_IFNE()
 {
     return ifop();
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_ARGUMENTS()
 {
 #if 1
     ABORT_TRACE("can't trace arguments yet");
 #else
     LIns* args[] = { cx_ins };
     LIns* a_ins = lir->insCall(&js_Arguments_ci, args);
     guard(false, lir->ins_eq0(a_ins), OOM_EXIT);
     stack(0, a_ins);
     return true;
 #endif
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_DUP()
 {
     stack(0, get(&stackval(-1)));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_DUP2()
 {
     stack(0, get(&stackval(-2)));
     stack(1, get(&stackval(-1)));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_SWAP()
 {
     jsval& l = stackval(-2);
     jsval& r = stackval(-1);
     LIns* l_ins = get(&l);
     LIns* r_ins = get(&r);
     set(&r, l_ins);
     set(&l, r_ins);
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_SETCONST()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_BITOR()
 {
     return binary(LIR_or);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_BITXOR()
 {
     return binary(LIR_xor);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_BITAND()
 {
     return binary(LIR_and);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_EQ()
 {
     return cmp(LIR_feq, CMP_TRY_BRANCH_AFTER_COND);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_NE()
 {
     return cmp(LIR_feq, CMP_NEGATE | CMP_TRY_BRANCH_AFTER_COND);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_LT()
 {
     return cmp(LIR_flt, CMP_TRY_BRANCH_AFTER_COND);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_LE()
 {
     return cmp(LIR_fle, CMP_TRY_BRANCH_AFTER_COND);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_GT()
 {
     return cmp(LIR_fgt, CMP_TRY_BRANCH_AFTER_COND);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_GE()
 {
     return cmp(LIR_fge, CMP_TRY_BRANCH_AFTER_COND);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_LSH()
 {
     return binary(LIR_lsh);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_RSH()
 {
     return binary(LIR_rsh);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_URSH()
 {
     return binary(LIR_ush);
 }
 
 static struct {
     jsbytecode obj_any[10];
     jsbytecode any_obj[8];
@@ -5586,17 +5606,17 @@ static struct {
         JSOP_CALL, 0, 0,
         JSOP_ADD,
         JSOP_STOP
     }
 };
 
 JS_STATIC_ASSERT(sizeof(add_imacros) < IMACRO_PC_ADJ_LIMIT);
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_ADD()
 {
     jsval& r = stackval(-1);
     jsval& l = stackval(-2);
 
     if (JSVAL_IS_OBJECT(l) && hasToStringMethod(l)) {
         if (JSVAL_IS_OBJECT(r) && hasToStringMethod(r))
             return call_imacro(add_imacros.obj_obj);
@@ -5613,35 +5633,35 @@ TraceRecorder::record_JSOP_ADD()
         guard(false, lir->ins_eq0(concat), OOM_EXIT);
         set(&l, concat);
         return true;
     }
 
     return binary(LIR_fadd);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_SUB()
 {
     return binary(LIR_fsub);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_MUL()
 {
     return binary(LIR_fmul);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_DIV()
 {
     return binary(LIR_fdiv);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_MOD()
 {
     jsval& r = stackval(-1);
     jsval& l = stackval(-2);
 
     if (JSVAL_IS_OBJECT(l) && hasValueOfMethod(l)) {
         if (JSVAL_IS_OBJECT(r) && hasValueOfMethod(r))
             return call_imacro(binary_imacros.obj_obj);
@@ -5665,17 +5685,17 @@ TraceRecorder::record_JSOP_MOD()
             x = lir->insCall(&js_dmod_ci, args);
         }
         set(&l, x);
         return true;
     }
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_NOT()
 {
     jsval& v = stackval(-1);
     if (JSVAL_TAG(v) == JSVAL_BOOLEAN) {
         set(&v, lir->ins_eq0(lir->ins2i(LIR_eq, get(&v), 1)));
         return true;
     } 
     if (isNumber(v)) {
@@ -5690,23 +5710,23 @@ TraceRecorder::record_JSOP_NOT()
     }
     JS_ASSERT(JSVAL_IS_STRING(v));
     set(&v, lir->ins_eq0(lir->ins2(LIR_piand, 
                                    lir->insLoad(LIR_ldp, get(&v), (int)offsetof(JSString, length)),
                                    INS_CONSTPTR(JSSTRING_LENGTH_MASK))));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_BITNOT()
 {
     return unary(LIR_not);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_NEG()
 {
     jsval& v = stackval(-1);
     if (isNumber(v)) {
         LIns* a = get(&v);
 
         /* If we're a promoted integer, we have to watch out for 0s since -0 is a double.
            Only follow this path if we're not an integer that's 0 and we're not a double 
@@ -5789,17 +5809,17 @@ TraceRecorder::newArray(JSObject *ctor, 
                 return false;
             stobj_set_dslot(arr_ins, i, dslots_ins, elt_ins, "set_array_elt");
         }
     }
     set(rval, arr_ins);
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::functionCall(bool constructing, uintN argc)
 {
     JSStackFrame* fp = cx->fp;
     jsbytecode *pc = fp->regs->pc;
 
     jsval& fval = stackval(0 - (2 + argc));
     JS_ASSERT(&fval >= StackBase(fp));
 
@@ -5998,41 +6018,41 @@ success:
          * jsval (like Array_p_pop).
          */
         pendingTraceableNative = known;
     }
 
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_NEW()
 {
     return functionCall(true, GET_ARGC(cx->fp->regs->pc));
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_DELNAME()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_DELPROP()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_DELELEM()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_TYPEOF()
 {
     jsval& r = stackval(-1);
     LIns* type;
     if (JSVAL_IS_STRING(r)) {
         type = INS_CONSTPTR(ATOM_TO_STRING(cx->runtime->atomState.typeAtoms[JSTYPE_STRING]));
     } else if (isNumber(r)) {
         type = INS_CONSTPTR(ATOM_TO_STRING(cx->runtime->atomState.typeAtoms[JSTYPE_NUMBER]));
@@ -6047,129 +6067,129 @@ TraceRecorder::record_JSOP_TYPEOF()
             JS_ASSERT(JSVAL_IS_OBJECT(r));
             type = lir->insCall(&js_TypeOfObject_ci, args);
         }
     }
     set(&r, type);
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_VOID()
 {
     stack(-1, INS_CONST(JSVAL_TO_BOOLEAN(JSVAL_VOID)));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_INCNAME()
 {
     return incName(1);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_INCPROP()
 {
     return incProp(1);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_INCELEM()
 {
     return incElem(1);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_DECNAME()
 {
     return incName(-1);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_DECPROP()
 {
     return incProp(-1);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_DECELEM()
 {
     return incElem(-1);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::incName(jsint incr, bool pre)
 {
     jsval* vp;
     if (!name(vp))
         return false;
     LIns* v_ins = get(vp);
     if (!inc(*vp, v_ins, incr, pre))
         return false;
     set(vp, v_ins);
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_NAMEINC()
 {
     return incName(1, false);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_PROPINC()
 {
     return incProp(1, false);
 }
 
 // XXX consolidate with record_JSOP_GETELEM code...
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_ELEMINC()
 {
     return incElem(1, false);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_NAMEDEC()
 {
     return incName(-1, false);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_PROPDEC()
 {
     return incProp(-1, false);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_ELEMDEC()
 {
     return incElem(-1, false);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_GETPROP()
 {
     return getProp(stackval(-1));
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_SETPROP()
 {
     jsval& l = stackval(-2);
     if (JSVAL_IS_PRIMITIVE(l))
         ABORT_TRACE("primitive this for SETPROP");
 
     JSObject* obj = JSVAL_TO_OBJECT(l);
     if (obj->map->ops->setProperty != js_SetProperty)
         ABORT_TRACE("non-native JSObjectOps::setProperty");
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_SetPropHit(JSPropCacheEntry* entry, JSScopeProperty* sprop)
 {
     if (sprop->setter == js_watch_set)
         ABORT_TRACE("watchpoint detected");
 
     jsbytecode* pc = cx->fp->regs->pc;
     jsval& r = stackval(-1);
     jsval& l = stackval(-2);
@@ -6217,17 +6237,17 @@ TraceRecorder::record_SetPropHit(JSPropC
     if (!native_set(obj_ins, sprop, dslots_ins, boxed_ins))
         return false;
 
     if (*pc != JSOP_INITPROP && pc[JSOP_SETPROP_LENGTH] != JSOP_POP)
         set(&l, v_ins);
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_SetPropMiss(JSPropCacheEntry* entry)
 {
     if (entry->kpc != cx->fp->regs->pc || !PCVAL_IS_SPROP(entry->vword))
         ABORT_TRACE("can't trace uncacheable property set");
 
     JSScopeProperty* sprop = PCVAL_TO_SPROP(entry->vword);
 
 #ifdef DEBUG
@@ -6237,17 +6257,17 @@ TraceRecorder::record_SetPropMiss(JSProp
     JS_ASSERT(scope->object == obj);
     JS_ASSERT(scope->shape == PCVCAP_SHAPE(entry->vcap));
     JS_ASSERT(SCOPE_HAS_PROPERTY(scope, sprop));
 #endif
 
     return record_SetPropHit(entry, sprop);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_GETELEM()
 {
     jsval& idx = stackval(-1);
     jsval& lval = stackval(-2);
 
     LIns* obj_ins = get(&lval);
     LIns* idx_ins = get(&idx);
     
@@ -6321,17 +6341,17 @@ TraceRecorder::record_JSOP_GETELEM()
     jsval* vp;
     LIns* addr_ins;
     if (!elem(lval, idx, vp, v_ins, addr_ins))
         return false;
     set(&lval, v_ins);
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_SETELEM()
 {
     jsval& v = stackval(-1);
     jsval& idx = stackval(-2);
     jsval& lval = stackval(-3);
 
     /* no guards for type checks, trace specialized this already */
     if (JSVAL_IS_PRIMITIVE(lval))
@@ -6380,17 +6400,17 @@ TraceRecorder::record_JSOP_SETELEM()
 
     jsbytecode* pc = cx->fp->regs->pc;
     if (*pc == JSOP_SETELEM && pc[JSOP_SETELEM_LENGTH] != JSOP_POP)
         set(&lval, v_ins);
 
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_CALLNAME()
 {
     JSObject* obj = cx->fp->scopeChain;
     if (obj != globalObj) {
         jsval* vp;
         if (!activeCallOrGlobalSlot(obj, vp))
             return false;
         stack(0, get(vp));
@@ -6408,29 +6428,29 @@ TraceRecorder::record_JSOP_CALLNAME()
         ABORT_TRACE("callee is not an object");
     JS_ASSERT(HAS_FUNCTION_CLASS(PCVAL_TO_OBJECT(pcval)));
 
     stack(0, INS_CONSTPTR(PCVAL_TO_OBJECT(pcval)));
     stack(1, obj_ins);
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_GETUPVAR()
 {
     ABORT_TRACE("GETUPVAR");
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_CALLUPVAR()
 {
     ABORT_TRACE("CALLUPVAR");
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::guardCallee(jsval& callee)
 {
     LIns* exit = snapshot(BRANCH_EXIT);
     JSObject* callee_obj = JSVAL_TO_OBJECT(callee);
     LIns* callee_ins = get(&callee);
     guard(true,
           lir->ins2(LIR_eq, 
                     lir->ins2(LIR_piand, 
@@ -6441,17 +6461,17 @@ TraceRecorder::guardCallee(jsval& callee
     guard(true,
           lir->ins2(LIR_eq,
                     stobj_get_fslot(callee_ins, JSSLOT_PARENT),
                     INS_CONSTPTR(OBJ_GET_PARENT(cx, callee_obj))),
           exit);
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::interpretedFunctionCall(jsval& fval, JSFunction* fun, uintN argc, bool constructing)
 {
     if (JS_GetGlobalForObject(cx, JSVAL_TO_OBJECT(fval)) != globalObj)
         ABORT_TRACE("JSOP_CALL or JSOP_NEW crosses global scopes");
 
     JSStackFrame* fp = cx->fp;
 
     // TODO: track the copying via the tracker...
@@ -6494,23 +6514,23 @@ TraceRecorder::interpretedFunctionCall(j
                    callDepth * sizeof(FrameInfo) + offsetof(FrameInfo, typemap));
     lir->insStorei(INS_CONST(fi.word), lirbuf->rp,
                    callDepth * sizeof(FrameInfo) + offsetof(FrameInfo, word));
 
     atoms = fun->u.i.script->atomMap.vector;
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_CALL()
 {
     return functionCall(false, GET_ARGC(cx->fp->regs->pc));
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_APPLY()
 {
     JSStackFrame* fp = cx->fp;
     jsbytecode *pc = fp->regs->pc;
     uintN argc = GET_ARGC(pc);
     jsval* vp = fp->regs->sp - (argc + 2);
     JS_ASSERT(vp >= StackBase(fp));
     jsuint length = 0;
@@ -6649,23 +6669,23 @@ TraceRecorder::record_JSOP_APPLY()
     tracker.set(&vp[0], callee_ins);
     tracker.set(&vp[1], this_ins);
     for (unsigned n = 0; n < argc; ++n)
         tracker.set(&vp[2 + n], argv[n]);
 
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_ApplyComplete(uintN argc)
 {
     return functionCall(false, argc);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_FastNativeCallComplete()
 {
     JS_ASSERT(pendingTraceableNative);
     
     /* At this point the generated code has already called the native function
        and we can no longer fail back to the original pc location (JSOP_CALL)
        because that would cause the interpreter to re-execute the native 
        function, which might have side effects.
@@ -6701,17 +6721,17 @@ TraceRecorder::record_FastNativeCallComp
         }
     }
 
     // We'll null pendingTraceableNative in js_MonitorRecording, on the next op cycle.
     // There must be a next op since the stack is non-empty.
     return ok;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::name(jsval*& vp)
 {
     JSObject* obj = cx->fp->scopeChain;
     if (obj != globalObj)
         return activeCallOrGlobalSlot(obj, vp);
 
     /* Can't use prop here, because we don't want unboxing from global slots. */
     LIns* obj_ins = scopeChain();
@@ -6724,17 +6744,17 @@ TraceRecorder::name(jsval*& vp)
 
     if (!lazilyImportGlobalSlot(slot))
         ABORT_TRACE("lazy import of global slot failed");
 
     vp = &STOBJ_GET_SLOT(obj, slot);
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::prop(JSObject* obj, LIns* obj_ins, uint32& slot, LIns*& v_ins)
 {
     /*
      * Can't specialize to assert obj != global, must guard to avoid aliasing
      * stale homes of stacked global variables.
      */
     if (obj == globalObj)
         ABORT_TRACE("prop op aliases global");
@@ -6813,17 +6833,17 @@ TraceRecorder::prop(JSObject* obj, LIns*
     }
 
     v_ins = stobj_get_slot(obj_ins, slot, dslots_ins);
     if (!unbox_jsval(STOBJ_GET_SLOT(obj, slot), v_ins))
         ABORT_TRACE("unboxing");
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::elem(jsval& oval, jsval& idx, jsval*& vp, LIns*& v_ins, LIns*& addr_ins)
 {
     /* no guards for type checks, trace specialized this already */
     if (JSVAL_IS_PRIMITIVE(oval) || !JSVAL_IS_INT(idx))
         return false;
 
     JSObject* obj = JSVAL_TO_OBJECT(oval);
     LIns* obj_ins = get(&oval);
@@ -6867,223 +6887,223 @@ TraceRecorder::elem(jsval& oval, jsval& 
         // Optimize to guard for a hole only after untagging, so we know that
         // we have a boolean, to avoid an extra guard for non-boolean values.
         guard(false, lir->ins2(LIR_eq, v_ins, INS_CONST(JSVAL_TO_BOOLEAN(JSVAL_HOLE))),
               MISMATCH_EXIT);
     }
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::getProp(JSObject* obj, LIns* obj_ins)
 {
     uint32 slot;
     LIns* v_ins;
     if (!prop(obj, obj_ins, slot, v_ins))
         return false;
 
     const JSCodeSpec& cs = js_CodeSpec[*cx->fp->regs->pc];
     JS_ASSERT(cs.ndefs == 1);
     stack(-cs.nuses, v_ins);
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::getProp(jsval& v)
 {
     if (JSVAL_IS_PRIMITIVE(v))
         ABORT_TRACE("primitive lhs");
 
     return getProp(JSVAL_TO_OBJECT(v), get(&v));
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_NAME()
 {
     jsval* vp;
     if (!name(vp))
         return false;
     stack(0, get(vp));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_DOUBLE()
 {
     jsval v = jsval(atoms[GET_INDEX(cx->fp->regs->pc)]);
     jsdpun u;
     u.d = *JSVAL_TO_DOUBLE(v);
     stack(0, lir->insImmq(u.u64));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_STRING()
 {
     JSAtom* atom = atoms[GET_INDEX(cx->fp->regs->pc)];
     JS_ASSERT(ATOM_IS_STRING(atom));
     stack(0, INS_CONSTPTR(ATOM_TO_STRING(atom)));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_ZERO()
 {
     jsdpun u;
     u.d = 0.0;
     stack(0, lir->insImmq(u.u64));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_ONE()
 {
     jsdpun u;
     u.d = 1.0;
     stack(0, lir->insImmq(u.u64));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_NULL()
 {
     stack(0, INS_CONSTPTR(NULL));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_THIS()
 {
     LIns* this_ins;
     if (!getThis(this_ins))
         return false;
     stack(0, this_ins);
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_FALSE()
 {
     stack(0, lir->insImm(0));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_TRUE()
 {
     stack(0, lir->insImm(1));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_OR()
 {
     return ifop();
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_AND()
 {
     return ifop();
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_TABLESWITCH()
 {
     return switchop();
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_LOOKUPSWITCH()
 {
     return switchop();
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_STRICTEQ()
 {
     return cmp(LIR_feq, CMP_STRICT);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_STRICTNE()
 {
     return cmp(LIR_feq, CMP_STRICT | CMP_NEGATE);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_OBJECT()
 {
     JSStackFrame* fp = cx->fp;
     JSScript* script = fp->script;
     unsigned index = atoms - script->atomMap.vector + GET_INDEX(fp->regs->pc);
 
     JSObject* obj;
     JS_GET_SCRIPT_OBJECT(script, index, obj);
     stack(0, INS_CONSTPTR(obj));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_POP()
 {
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_POS()
 {
     jsval& r = stackval(-1);
     return isNumber(r);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_TRAP()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_GETARG()
 {
     stack(0, arg(GET_ARGNO(cx->fp->regs->pc)));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_SETARG()
 {
     arg(GET_ARGNO(cx->fp->regs->pc), stack(-1));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_GETLOCAL()
 {
     stack(0, var(GET_SLOTNO(cx->fp->regs->pc)));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_SETLOCAL()
 {
     var(GET_SLOTNO(cx->fp->regs->pc), stack(-1));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_UINT16()
 {
     jsdpun u;
     u.d = (jsdouble)GET_UINT16(cx->fp->regs->pc);
     stack(0, lir->insImmq(u.u64));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_NEWINIT()
 {
     JSProtoKey key = JSProtoKey(GET_INT8(cx->fp->regs->pc));
     JSObject* obj;
     const CallInfo *ci;
     if (key == JSProto_Array) {
         if (!js_GetClassPrototype(cx, globalObj, INT_TO_JSID(key), &obj))
             return false;
@@ -7099,17 +7119,17 @@ TraceRecorder::record_JSOP_NEWINIT()
     }
     LIns* args[] = { INS_CONSTPTR(obj), cx_ins };
     LIns* v_ins = lir->insCall(ci, args);
     guard(false, lir->ins_eq0(v_ins), OOM_EXIT);
     stack(0, v_ins);
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_ENDINIT()
 {
     jsval& v = stackval(-1);
     JS_ASSERT(!JSVAL_IS_PRIMITIVE(v));
     JSObject* obj = JSVAL_TO_OBJECT(v);
     if (OBJ_IS_DENSE_ARRAY(cx, obj)) {
         // Until we get JSOP_NEWARRAY working, we do our optimizing here...
         if (obj->fslots[JSSLOT_ARRAY_LENGTH] == 1 &&
@@ -7119,90 +7139,90 @@ TraceRecorder::record_JSOP_ENDINIT()
             LIns* args[] = { stack(1), callArgN(v_ins, 1), cx_ins };
             v_ins = lir->insCall(&js_Array_1str_ci, args);
             set(&v, v_ins);
         }
     }
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_INITPROP()
 {
     // All the action is in record_SetPropHit.
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_INITELEM()
 {
     return record_JSOP_SETELEM();
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_DEFSHARP()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_USESHARP()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_INCARG()
 {
     return inc(argval(GET_ARGNO(cx->fp->regs->pc)), 1);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_INCLOCAL()
 {
     return inc(varval(GET_SLOTNO(cx->fp->regs->pc)), 1);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_DECARG()
 {
     return inc(argval(GET_ARGNO(cx->fp->regs->pc)), -1);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_DECLOCAL()
 {
     return inc(varval(GET_SLOTNO(cx->fp->regs->pc)), -1);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_ARGINC()
 {
     return inc(argval(GET_ARGNO(cx->fp->regs->pc)), 1, false);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_LOCALINC()
 {
     return inc(varval(GET_SLOTNO(cx->fp->regs->pc)), 1, false);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_ARGDEC()
 {
     return inc(argval(GET_ARGNO(cx->fp->regs->pc)), -1, false);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_LOCALDEC()
 {
     return inc(varval(GET_SLOTNO(cx->fp->regs->pc)), -1, false);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_IMACOP()
 {
     JS_ASSERT(cx->fp->imacpc);
     return true;
 }
 
 static struct {
     jsbytecode for_in[10];
@@ -7222,17 +7242,17 @@ static struct {
         JSOP_CALL, 0, 1,
         JSOP_PUSH,
         JSOP_STOP
     }
 };
 
 JS_STATIC_ASSERT(sizeof(iter_imacros) < IMACRO_PC_ADJ_LIMIT);
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_ITER()
 {
     jsval& v = stackval(-1);
     if (!JSVAL_IS_PRIMITIVE(v)) {
         jsuint flags = cx->fp->regs->pc[1];
 
         if (!hasIteratorMethod(v)) {
             LIns* args[] = { get(&v), INS_CONST(flags), cx_ins };
@@ -7269,17 +7289,17 @@ static jsbytecode nextiter_imacro[] = {
     JSOP_CALLPROP, 0, COMMON_ATOM_INDEX(next),
     JSOP_CALL, 0, 0,
     JSOP_TRUE,
     JSOP_STOP
 };
 
 JS_STATIC_ASSERT(sizeof(nextiter_imacro) < IMACRO_PC_ADJ_LIMIT);
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_NEXTITER()
 {
     jsval& iterobj_val = stackval(-2);
     if (!JSVAL_IS_PRIMITIVE(iterobj_val)) {
         LIns* iterobj_ins = get(&iterobj_val);
 
         if (guardClass(JSVAL_TO_OBJECT(iterobj_val), iterobj_ins, &js_IteratorClass, BRANCH_EXIT)) {
             LIns* args[] = { iterobj_ins, cx_ins };
@@ -7296,82 +7316,82 @@ TraceRecorder::record_JSOP_NEXTITER()
 
         // Custom iterator, possibly a generator.
         return call_imacro(nextiter_imacro);
     }
 
     ABORT_TRACE("for-in on a primitive value");
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_IteratorNextComplete()
 {
     JS_ASSERT(*cx->fp->regs->pc == JSOP_NEXTITER);
     JS_ASSERT(pendingTraceableNative == &js_FastCallIteratorNext_tn);
 
     jsval& v = stackval(-2);
     LIns* v_ins = get(&v);
     if (unbox_jsval(v, v_ins)) {
         set(&v, v_ins);
         return true;
     }
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_ENDITER()
 {
     LIns* args[] = { stack(-2), cx_ins };
     LIns* ok_ins = lir->insCall(&js_CloseIterator_ci, args);
     guard(false, lir->ins_eq0(ok_ins), MISMATCH_EXIT);
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_FORNAME()
 {
     jsval* vp;
     if (name(vp)) {
         set(vp, stack(-1));
         return true;
     }
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_FORPROP()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_FORELEM()
 {
     return record_JSOP_DUP();
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_FORARG()
 {
     return record_JSOP_SETARG();
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_FORLOCAL()
 {
     return record_JSOP_SETLOCAL();
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_POPN()
 {
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_BINDNAME()
 {
     JSObject* obj = cx->fp->scopeChain;
     if (obj != globalObj)
         ABORT_TRACE("JSOP_BINDNAME crosses global scopes");
 
     LIns* obj_ins = scopeChain();
     JSObject* obj2;
@@ -7380,17 +7400,17 @@ TraceRecorder::record_JSOP_BINDNAME()
         return false;
     if (obj2 != obj)
         ABORT_TRACE("JSOP_BINDNAME found a non-direct property on the global object");
 
     stack(0, obj_ins);
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_SETNAME()
 {
     jsval& l = stackval(-2);
     JS_ASSERT(!JSVAL_IS_PRIMITIVE(l));
 
     /*
      * Trace cases that are global code or in lightweight functions scoped by
      * the global object only.
@@ -7398,23 +7418,23 @@ TraceRecorder::record_JSOP_SETNAME()
     JSObject* obj = JSVAL_TO_OBJECT(l);
     if (obj != cx->fp->scopeChain || obj != globalObj)
         ABORT_TRACE("JSOP_SETNAME left operand is not the global object");
 
     // The rest of the work is in record_SetPropHit.
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_THROW()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_IN()
 {
     jsval& rval = stackval(-1);
     jsval& lval = stackval(-2);
 
     if (JSVAL_IS_PRIMITIVE(rval))
         ABORT_TRACE("JSOP_IN on non-object right operand");
     JSObject* obj = JSVAL_TO_OBJECT(rval);
@@ -7454,565 +7474,565 @@ TraceRecorder::record_JSOP_IN()
        the guard bails out at the comparison and the interpreter
        will therefore re-execute the comparison. This way the
        value of the condition doesn't have to be calculated and
        saved on the stack in most cases. */
     set(&lval, x);
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_INSTANCEOF()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_DEBUGGER()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_GOSUB()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_RETSUB()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_EXCEPTION()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_LINENO()
 {
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_CONDSWITCH()
 {
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_CASE()
 {
     return cmp(LIR_feq, CMP_CASE);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_DEFAULT()
 {
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_EVAL()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_ENUMELEM()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_GETTER()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_SETTER()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_DEFFUN()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_DEFCONST()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_DEFVAR()
 {
     return false;
 }
 
 /*
  * XXX could hoist out to jsinterp.h and share with jsinterp.cpp, but
  * XXX jsopcode.cpp has different definitions of same-named macros.
  */
 #define GET_FULL_INDEX(PCOFF)                                                 \
     (atoms - script->atomMap.vector + GET_INDEX(regs.pc + PCOFF))
 
 #define LOAD_FUNCTION(PCOFF)                                                  \
     JS_GET_SCRIPT_FUNCTION(script, GET_FULL_INDEX(PCOFF), fun)
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_ANONFUNOBJ()
 {
     JSFunction* fun;
     JSFrameRegs& regs = *cx->fp->regs;
     JSScript* script = cx->fp->script;
     LOAD_FUNCTION(0); // needs script, regs, fun
 
     JSObject* obj = FUN_OBJECT(fun);
     if (OBJ_GET_PARENT(cx, obj) != cx->fp->scopeChain)
         ABORT_TRACE("can't trace with activation object on scopeChain");
 
     stack(0, INS_CONSTPTR(obj));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_NAMEDFUNOBJ()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_SETLOCALPOP()
 {
     var(GET_SLOTNO(cx->fp->regs->pc), stack(-1));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_SETCALL()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_TRY()
 {
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_FINALLY()
 {
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_NOP()
 {
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_ARGSUB()
 {
     JSStackFrame* fp = cx->fp;
     if (!(fp->fun->flags & JSFUN_HEAVYWEIGHT)) {
         uintN slot = GET_ARGNO(fp->regs->pc);
         if (slot < fp->fun->nargs && slot < fp->argc && !fp->argsobj) {
             stack(0, get(&cx->fp->argv[slot]));
             return true;
         }
     }
     ABORT_TRACE("can't trace JSOP_ARGSUB hard case");
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_ARGCNT()
 {
     if (!(cx->fp->fun->flags & JSFUN_HEAVYWEIGHT)) {
         jsdpun u;
         u.d = cx->fp->argc;
         stack(0, lir->insImmq(u.u64));
         return true;
     }
     ABORT_TRACE("can't trace heavyweight JSOP_ARGCNT");
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_DefLocalFunSetSlot(uint32 slot, JSObject* obj)
 {
     var(slot, INS_CONSTPTR(obj));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_DEFLOCALFUN()
 {
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_GOTOX()
 {
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_IFEQX()
 {
     trackCfgMerges(cx->fp->regs->pc);
     return record_JSOP_IFEQ();
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_IFNEX()
 {
     return record_JSOP_IFNE();
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_ORX()
 {
     return record_JSOP_OR();
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_ANDX()
 {
     return record_JSOP_AND();
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_GOSUBX()
 {
     return record_JSOP_GOSUB();
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_CASEX()
 {
     return cmp(LIR_feq, CMP_CASE);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_DEFAULTX()
 {
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_TABLESWITCHX()
 {
     return switchop();
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_LOOKUPSWITCHX()
 {
     return switchop();
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_BACKPATCH()
 {
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_BACKPATCH_POP()
 {
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_THROWING()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_SETRVAL()
 {
     // If we implement this, we need to update JSOP_STOP.
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_RETRVAL()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_GETGVAR()
 {
     jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
     if (JSVAL_IS_NULL(slotval))
         return true; // We will see JSOP_NAME from the interpreter's jump, so no-op here.
 
     uint32 slot = JSVAL_TO_INT(slotval);
 
     if (!lazilyImportGlobalSlot(slot))
          ABORT_TRACE("lazy import of global slot failed");
 
     stack(0, get(&STOBJ_GET_SLOT(globalObj, slot)));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_SETGVAR()
 {
     jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
     if (JSVAL_IS_NULL(slotval))
         return true; // We will see JSOP_NAME from the interpreter's jump, so no-op here.
 
     uint32 slot = JSVAL_TO_INT(slotval);
 
     if (!lazilyImportGlobalSlot(slot))
          ABORT_TRACE("lazy import of global slot failed");
 
     set(&STOBJ_GET_SLOT(globalObj, slot), stack(-1));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_INCGVAR()
 {
     jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
     if (JSVAL_IS_NULL(slotval))
         return true; // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
 
     uint32 slot = JSVAL_TO_INT(slotval);
 
     if (!lazilyImportGlobalSlot(slot))
          ABORT_TRACE("lazy import of global slot failed");
 
     return inc(STOBJ_GET_SLOT(globalObj, slot), 1);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_DECGVAR()
 {
     jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
     if (JSVAL_IS_NULL(slotval))
         return true; // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
 
     uint32 slot = JSVAL_TO_INT(slotval);
 
     if (!lazilyImportGlobalSlot(slot))
          ABORT_TRACE("lazy import of global slot failed");
 
     return inc(STOBJ_GET_SLOT(globalObj, slot), -1);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_GVARINC()
 {
     jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
     if (JSVAL_IS_NULL(slotval))
         return true; // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
 
     uint32 slot = JSVAL_TO_INT(slotval);
 
     if (!lazilyImportGlobalSlot(slot))
          ABORT_TRACE("lazy import of global slot failed");
 
     return inc(STOBJ_GET_SLOT(globalObj, slot), 1, false);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_GVARDEC()
 {
     jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
     if (JSVAL_IS_NULL(slotval))
         return true; // We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
 
     uint32 slot = JSVAL_TO_INT(slotval);
 
     if (!lazilyImportGlobalSlot(slot))
          ABORT_TRACE("lazy import of global slot failed");
 
     return inc(STOBJ_GET_SLOT(globalObj, slot), -1, false);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_REGEXP()
 {
     return false;
 }
 
 // begin JS_HAS_XML_SUPPORT
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_DEFXMLNS()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_ANYNAME()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_QNAMEPART()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_QNAMECONST()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_QNAME()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_TOATTRNAME()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_TOATTRVAL()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_ADDATTRNAME()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_ADDATTRVAL()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_BINDXMLNAME()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_SETXMLNAME()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_XMLNAME()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_DESCENDANTS()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_FILTER()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_ENDFILTER()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_TOXML()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_TOXMLLIST()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_XMLTAGEXPR()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_XMLELTEXPR()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_XMLOBJECT()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_XMLCDATA()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_XMLCOMMENT()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_XMLPI()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_GETFUNNS()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_STARTXML()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_STARTXMLEXPR()
 {
     return false;
 }
 
 // end JS_HAS_XML_SUPPORT
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_CALLPROP()
 {
     jsval& l = stackval(-1);
     JSObject* obj;
     LIns* obj_ins;
     LIns* this_ins;
     if (!JSVAL_IS_PRIMITIVE(l)) {
         obj = JSVAL_TO_OBJECT(l);
@@ -8065,59 +8085,59 @@ TraceRecorder::record_JSOP_CALLPROP()
             ABORT_TRACE("callee does not accept primitive |this|");
     }
 
     stack(0, this_ins);
     stack(-1, INS_CONSTPTR(PCVAL_TO_OBJECT(pcval)));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_DELDESC()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_UINT24()
 {
     jsdpun u;
     u.d = (jsdouble)GET_UINT24(cx->fp->regs->pc);
     stack(0, lir->insImmq(u.u64));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_INDEXBASE()
 {
     atoms += GET_INDEXBASE(cx->fp->regs->pc);
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_RESETBASE()
 {
     atoms = cx->fp->script->atomMap.vector;
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_RESETBASE0()
 {
     atoms = cx->fp->script->atomMap.vector;
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_CALLELEM()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_STOP()
 {
     JSStackFrame *fp = cx->fp;
 
     if (fp->imacpc) {
         // End of imacro, so return true to the interpreter immediately. The
         // interpreter's JSOP_STOP case will return from the imacro, back to
         // the pc after the calling op, still in the same JSStackFrame.
@@ -8138,17 +8158,17 @@ TraceRecorder::record_JSOP_STOP()
         rval_ins = get(&fp->argv[-1]);
     } else {
         rval_ins = INS_CONST(JSVAL_TO_BOOLEAN(JSVAL_VOID));
     }
     clearFrameSlotsFromCache();
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_GETXPROP()
 {
     jsval& l = stackval(-1);
     if (JSVAL_IS_PRIMITIVE(l))
         ABORT_TRACE("primitive-this for GETXPROP?");
 
     JSObject* obj = JSVAL_TO_OBJECT(l);
     if (obj != cx->fp->scopeChain || obj != globalObj)
@@ -8156,41 +8176,41 @@ TraceRecorder::record_JSOP_GETXPROP()
 
     jsval* vp;
     if (!name(vp))
         return false;
     stack(-1, get(vp));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_CALLXMLNAME()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_TYPEOFEXPR()
 {
     return record_JSOP_TYPEOF();
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_ENTERBLOCK()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_LEAVEBLOCK()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_GENERATOR()
 {
     return false;
 #if 0
     JSStackFrame* fp = cx->fp;
     if (fp->callobj || fp->argsobj || fp->varobj)
         ABORT_TRACE("can't trace hard-case generator");
 
@@ -8209,84 +8229,84 @@ TraceRecorder::record_JSOP_GENERATOR()
 
     LIns* args[] = { INS_CONST(fp->argc), INS_CONSTPTR(fp->callee), cx_ins };
     LIns* g_ins = lir->insCall(&js_FastNewGenerator_ci, args);
     guard(false, lir->ins_eq0(g_ins), OOM_EXIT);
     return true;
 #endif
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_YIELD()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_ARRAYPUSH()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_ENUMCONSTELEM()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_LEAVEBLOCKEXPR()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_GETTHISPROP()
 {
     LIns* this_ins;
 
     /* its safe to just use cx->fp->thisp here because getThis() returns false if thisp
        is not available */
     return getThis(this_ins) && getProp(cx->fp->thisp, this_ins);
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_GETARGPROP()
 {
     return getProp(argval(GET_ARGNO(cx->fp->regs->pc)));
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_GETLOCALPROP()
 {
     return getProp(varval(GET_SLOTNO(cx->fp->regs->pc)));
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_INDEXBASE1()
 {
     atoms += 1 << 16;
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_INDEXBASE2()
 {
     atoms += 2 << 16;
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_INDEXBASE3()
 {
     atoms += 3 << 16;
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_CALLGVAR()
 {
     jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
     if (JSVAL_IS_NULL(slotval))
         return true; // We will see JSOP_CALLNAME from the interpreter's jump, so no-op here.
 
     uint32 slot = JSVAL_TO_INT(slotval);
 
@@ -8294,60 +8314,60 @@ TraceRecorder::record_JSOP_CALLGVAR()
          ABORT_TRACE("lazy import of global slot failed");
 
     jsval& v = STOBJ_GET_SLOT(cx->fp->scopeChain, slot);
     stack(0, get(&v));
     stack(1, INS_CONSTPTR(NULL));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_CALLLOCAL()
 {
     uintN slot = GET_SLOTNO(cx->fp->regs->pc);
     stack(0, var(slot));
     stack(1, INS_CONSTPTR(NULL));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_CALLARG()
 {
     uintN slot = GET_ARGNO(cx->fp->regs->pc);
     stack(0, arg(slot));
     stack(1, INS_CONSTPTR(NULL));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_NULLTHIS()
 {
     stack(0, INS_CONSTPTR(NULL));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_INT8()
 {
     jsdpun u;
     u.d = (jsdouble)GET_INT8(cx->fp->regs->pc);
     stack(0, lir->insImmq(u.u64));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_INT32()
 {
     jsdpun u;
     u.d = (jsdouble)GET_INT32(cx->fp->regs->pc);
     stack(0, lir->insImmq(u.u64));
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_LENGTH()
 {
     jsval& l = stackval(-1);
     if (JSVAL_IS_PRIMITIVE(l)) {
         if (!JSVAL_IS_STRING(l))
             ABORT_TRACE("non-string primitives unsupported");
         LIns* str_ins = get(&l);
         LIns* len_ins = lir->insLoad(LIR_ldp, str_ins, (int)offsetof(JSString, length));
@@ -8378,23 +8398,23 @@ TraceRecorder::record_JSOP_LENGTH()
         ABORT_TRACE("only dense arrays supported");
     if (!guardDenseArray(obj, get(&l)))
         ABORT_TRACE("OBJ_IS_DENSE_ARRAY but not?!?");
     LIns* v_ins = lir->ins1(LIR_i2f, stobj_get_fslot(get(&l), JSSLOT_ARRAY_LENGTH));
     set(&l, v_ins);
     return true;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_NEWARRAY()
 {
     return false;
 }
 
-bool
+JS_REQUIRES_STACK bool
 TraceRecorder::record_JSOP_HOLE()
 {
     stack(0, INS_CONST(JSVAL_TO_BOOLEAN(JSVAL_HOLE)));
     return true;
 }
 
 #ifdef JS_JIT_SPEW
 /* Prints information about entry typemaps and unstable exits for all peers at a PC */
@@ -8454,17 +8474,17 @@ InitIMacroCode()
 
     // NB: above loop mis-set JSOP_ADD's entry, so order here is crucial.
     imacro_code[JSOP_ADD] = (jsbytecode*)&add_imacros - 1;
 
     imacro_code[JSOP_ITER] = (jsbytecode*)&iter_imacros - 1;
     imacro_code[JSOP_NEXTITER] = nextiter_imacro - 1;
 }
 
-#define UNUSED(n) bool TraceRecorder::record_JSOP_UNUSED##n() { return false; }
+#define UNUSED(n) JS_REQUIRES_STACK bool TraceRecorder::record_JSOP_UNUSED##n() { return false; }
 
 UNUSED(131)
 UNUSED(201)
 UNUSED(202)
 UNUSED(203)
 UNUSED(204)
 UNUSED(205)
 UNUSED(206)
--- a/js/src/jstracer.h
+++ b/js/src/jstracer.h
@@ -168,18 +168,18 @@ public:
     bool isStackSlotUndemotable(JSContext* cx, unsigned slot) const;
     void clear();
 };
 
 typedef Queue<uint16> SlotList;
 
 class TypeMap : public Queue<uint8> {
 public:
-    void captureGlobalTypes(JSContext* cx, SlotList& slots);
-    void captureStackTypes(JSContext* cx, unsigned callDepth);
+    JS_REQUIRES_STACK void captureGlobalTypes(JSContext* cx, SlotList& slots);
+    JS_REQUIRES_STACK void captureStackTypes(JSContext* cx, unsigned callDepth);
     bool matches(TypeMap& other) const;
 };
 
 enum ExitType {
     BRANCH_EXIT, 
     LOOP_EXIT, 
     NESTED_EXIT,
     MISMATCH_EXIT,
@@ -297,125 +297,130 @@ class TraceRecorder : public avmplus::GC
     intptr_t                terminate_ip_adj;
     nanojit::Fragment*      outerToBlacklist;
     nanojit::Fragment*      promotedPeer;
     TraceRecorder*          nextRecorderToAbort;
     bool                    wasRootFragment;
 
     bool isGlobal(jsval* p) const;
     ptrdiff_t nativeGlobalOffset(jsval* p) const;
-    ptrdiff_t nativeStackOffset(jsval* p) const;
-    void import(nanojit::LIns* base, ptrdiff_t offset, jsval* p, uint8& t,
-                const char *prefix, uintN index, JSStackFrame *fp);
-    void import(TreeInfo* treeInfo, nanojit::LIns* sp, unsigned ngslots, unsigned callDepth,
-                uint8* globalTypeMap, uint8* stackTypeMap);
+    JS_REQUIRES_STACK ptrdiff_t nativeStackOffset(jsval* p) const;
+    JS_REQUIRES_STACK void import(nanojit::LIns* base, ptrdiff_t offset, jsval* p, uint8& t, 
+                                  const char *prefix, uintN index, JSStackFrame *fp);
+    JS_REQUIRES_STACK void import(TreeInfo* treeInfo, nanojit::LIns* sp, unsigned ngslots,
+                                  unsigned callDepth, uint8* globalTypeMap, uint8* stackTypeMap);
     void trackNativeStackUse(unsigned slots);
 
-    bool lazilyImportGlobalSlot(unsigned slot);
+    JS_REQUIRES_STACK bool lazilyImportGlobalSlot(unsigned slot);
 
-    nanojit::LIns* guard(bool expected, nanojit::LIns* cond, ExitType exitType);
+    JS_REQUIRES_STACK nanojit::LIns* guard(bool expected, nanojit::LIns* cond,
+                                           ExitType exitType);
     nanojit::LIns* guard(bool expected, nanojit::LIns* cond, nanojit::LIns* exit);
     nanojit::LIns* addName(nanojit::LIns* ins, const char* name);
 
-    nanojit::LIns* get(jsval* p) const;
+    JS_REQUIRES_STACK nanojit::LIns* get(jsval* p) const;
     nanojit::LIns* writeBack(nanojit::LIns* i, nanojit::LIns* base, ptrdiff_t offset);
-    void set(jsval* p, nanojit::LIns* l, bool initializing = false);
+    JS_REQUIRES_STACK void set(jsval* p, nanojit::LIns* l, bool initializing = false);
 
-    bool checkType(jsval& v, uint8 t, jsval*& stage_val, nanojit::LIns*& stage_ins,
-                   unsigned& stage_count);
-    bool deduceTypeStability(nanojit::Fragment* root_peer, nanojit::Fragment** stable_peer,
-                             unsigned* demotes);
+    JS_REQUIRES_STACK bool checkType(jsval& v, uint8 t, jsval*& stage_val,
+                                     nanojit::LIns*& stage_ins, unsigned& stage_count);
+    JS_REQUIRES_STACK bool deduceTypeStability(nanojit::Fragment* root_peer,
+                                               nanojit::Fragment** stable_peer, unsigned* demotes);
 
-    jsval& argval(unsigned n) const;
-    jsval& varval(unsigned n) const;
-    jsval& stackval(int n) const;
+    JS_REQUIRES_STACK jsval& argval(unsigned n) const;
+    JS_REQUIRES_STACK jsval& varval(unsigned n) const;
+    JS_REQUIRES_STACK jsval& stackval(int n) const;
 
-    nanojit::LIns* scopeChain() const;
-    bool activeCallOrGlobalSlot(JSObject* obj, jsval*& vp);
+    JS_REQUIRES_STACK nanojit::LIns* scopeChain() const;
+    JS_REQUIRES_STACK bool activeCallOrGlobalSlot(JSObject* obj, jsval*& vp);
 
-    nanojit::LIns* arg(unsigned n);
-    void arg(unsigned n, nanojit::LIns* i);
-    nanojit::LIns* var(unsigned n);
-    void var(unsigned n, nanojit::LIns* i);
-    nanojit::LIns* stack(int n);
-    void stack(int n, nanojit::LIns* i);
+    JS_REQUIRES_STACK nanojit::LIns* arg(unsigned n);
+    JS_REQUIRES_STACK void arg(unsigned n, nanojit::LIns* i);
+    JS_REQUIRES_STACK nanojit::LIns* var(unsigned n);
+    JS_REQUIRES_STACK void var(unsigned n, nanojit::LIns* i);
+    JS_REQUIRES_STACK nanojit::LIns* stack(int n);
+    JS_REQUIRES_STACK void stack(int n, nanojit::LIns* i);
 
     nanojit::LIns* alu(nanojit::LOpcode op, jsdouble v0, jsdouble v1, 
                        nanojit::LIns* s0, nanojit::LIns* s1);
     nanojit::LIns* f2i(nanojit::LIns* f);
-    nanojit::LIns* makeNumberInt32(nanojit::LIns* f);
+    JS_REQUIRES_STACK nanojit::LIns* makeNumberInt32(nanojit::LIns* f);
     nanojit::LIns* stringify(jsval& v);
 
     bool call_imacro(jsbytecode* imacro);
 
-    bool ifop();
-    bool switchop();
-    bool inc(jsval& v, jsint incr, bool pre = true);
-    bool inc(jsval& v, nanojit::LIns*& v_ins, jsint incr, bool pre = true);
-    bool incProp(jsint incr, bool pre = true);
-    bool incElem(jsint incr, bool pre = true);
-    bool incName(jsint incr, bool pre = true);
+    JS_REQUIRES_STACK bool ifop();
+    JS_REQUIRES_STACK bool switchop();
+    JS_REQUIRES_STACK bool inc(jsval& v, jsint incr, bool pre = true);
+    JS_REQUIRES_STACK bool inc(jsval& v, nanojit::LIns*& v_ins, jsint incr, bool pre = true);
+    JS_REQUIRES_STACK bool incProp(jsint incr, bool pre = true);
+    JS_REQUIRES_STACK bool incElem(jsint incr, bool pre = true);
+    JS_REQUIRES_STACK bool incName(jsint incr, bool pre = true);
 
     enum { CMP_NEGATE = 1, CMP_TRY_BRANCH_AFTER_COND = 2, CMP_CASE = 4, CMP_STRICT = 8 };
-    bool cmp(nanojit::LOpcode op, int flags = 0);
+    JS_REQUIRES_STACK bool cmp(nanojit::LOpcode op, int flags = 0);
 
-    bool unary(nanojit::LOpcode op);
-    bool binary(nanojit::LOpcode op);
+    JS_REQUIRES_STACK bool unary(nanojit::LOpcode op);
+    JS_REQUIRES_STACK bool binary(nanojit::LOpcode op);
 
     bool ibinary(nanojit::LOpcode op);
     bool iunary(nanojit::LOpcode op);
     bool bbinary(nanojit::LOpcode op);
     void demote(jsval& v, jsdouble result);
 
-    bool map_is_native(JSObjectMap* map, nanojit::LIns* map_ins, nanojit::LIns*& ops_ins,
-                       size_t op_offset = 0);
-    bool test_property_cache(JSObject* obj, nanojit::LIns* obj_ins, JSObject*& obj2,
-                             jsuword& pcval);
-    bool test_property_cache_direct_slot(JSObject* obj, nanojit::LIns* obj_ins, uint32& slot);
+    JS_REQUIRES_STACK bool map_is_native(JSObjectMap* map, nanojit::LIns* map_ins,
+                                         nanojit::LIns*& ops_ins, size_t op_offset = 0);
+    JS_REQUIRES_STACK bool test_property_cache(JSObject* obj, nanojit::LIns* obj_ins,
+                                               JSObject*& obj2, jsuword& pcval);
+    JS_REQUIRES_STACK bool test_property_cache_direct_slot(JSObject* obj, nanojit::LIns* obj_ins,
+                                                           uint32& slot);
     void stobj_set_slot(nanojit::LIns* obj_ins, unsigned slot, nanojit::LIns*& dslots_ins,
                         nanojit::LIns* v_ins);
     void stobj_set_dslot(nanojit::LIns *obj_ins, unsigned slot, nanojit::LIns*& dslots_ins,
                          nanojit::LIns* v_ins, const char *name);
 
     nanojit::LIns* stobj_get_fslot(nanojit::LIns* obj_ins, unsigned slot);
     nanojit::LIns* stobj_get_slot(nanojit::LIns* obj_ins, unsigned slot,
                                   nanojit::LIns*& dslots_ins);
     bool native_set(nanojit::LIns* obj_ins, JSScopeProperty* sprop,
                     nanojit::LIns*& dslots_ins, nanojit::LIns* v_ins);
     bool native_get(nanojit::LIns* obj_ins, nanojit::LIns* pobj_ins, JSScopeProperty* sprop,
                     nanojit::LIns*& dslots_ins, nanojit::LIns*& v_ins);
 
-    bool name(jsval*& vp);
-    bool prop(JSObject* obj, nanojit::LIns* obj_ins, uint32& slot, nanojit::LIns*& v_ins);
-    bool elem(jsval& oval, jsval& idx, jsval*& vp, nanojit::LIns*& v_ins, nanojit::LIns*& addr_ins);
-
-    bool getProp(JSObject* obj, nanojit::LIns* obj_ins);
-    bool getProp(jsval& v);
-    bool getThis(nanojit::LIns*& this_ins);
+    JS_REQUIRES_STACK bool name(jsval*& vp);
+    JS_REQUIRES_STACK bool prop(JSObject* obj, nanojit::LIns* obj_ins, uint32& slot,
+                                nanojit::LIns*& v_ins);
+    JS_REQUIRES_STACK bool elem(jsval& oval, jsval& idx, jsval*& vp, nanojit::LIns*& v_ins,
+                                nanojit::LIns*& addr_ins);
+    JS_REQUIRES_STACK bool getProp(JSObject* obj, nanojit::LIns* obj_ins);
+    JS_REQUIRES_STACK bool getProp(jsval& v);
+    JS_REQUIRES_STACK bool getThis(nanojit::LIns*& this_ins);
 
-    bool box_jsval(jsval v, nanojit::LIns*& v_ins);
-    bool unbox_jsval(jsval v, nanojit::LIns*& v_ins);
-    bool guardClass(JSObject* obj, nanojit::LIns* obj_ins, JSClass* clasp,
-                    ExitType exitType = MISMATCH_EXIT);
-    bool guardDenseArray(JSObject* obj, nanojit::LIns* obj_ins,
-                         ExitType exitType = MISMATCH_EXIT);
-    bool guardDenseArrayIndex(JSObject* obj, jsint idx, nanojit::LIns* obj_ins,
-                              nanojit::LIns* dslots_ins, nanojit::LIns* idx_ins,
-                              ExitType exitType);
-    bool guardElemOp(JSObject* obj, nanojit::LIns* obj_ins, jsid id, size_t op_offset, jsval* vp);
+    JS_REQUIRES_STACK bool box_jsval(jsval v, nanojit::LIns*& v_ins);
+    JS_REQUIRES_STACK bool unbox_jsval(jsval v, nanojit::LIns*& v_ins);
+    JS_REQUIRES_STACK bool guardClass(JSObject* obj, nanojit::LIns* obj_ins, JSClass* clasp,
+                                      ExitType exitType = MISMATCH_EXIT);
+    JS_REQUIRES_STACK bool guardDenseArray(JSObject* obj, nanojit::LIns* obj_ins,
+                                           ExitType exitType = MISMATCH_EXIT);
+    JS_REQUIRES_STACK bool guardDenseArrayIndex(JSObject* obj, jsint idx, nanojit::LIns* obj_ins,
+                                                nanojit::LIns* dslots_ins, nanojit::LIns* idx_ins,
+                                                ExitType exitType);
+    JS_REQUIRES_STACK bool guardElemOp(JSObject* obj, nanojit::LIns* obj_ins, jsid id,
+                                       size_t op_offset, jsval* vp);
     void clearFrameSlotsFromCache();
-    bool guardCallee(jsval& callee);
-    bool getClassPrototype(JSObject* ctor, nanojit::LIns*& proto_ins);
-    bool newArray(JSObject* ctor, uint32 argc, jsval* argv, jsval* vp);
-    bool interpretedFunctionCall(jsval& fval, JSFunction* fun, uintN argc, bool constructing);
-    bool functionCall(bool constructing, uintN argc);
+    JS_REQUIRES_STACK bool guardCallee(jsval& callee);
+    JS_REQUIRES_STACK bool getClassPrototype(JSObject* ctor, nanojit::LIns*& proto_ins);
+    JS_REQUIRES_STACK bool newArray(JSObject* ctor, uint32 argc, jsval* argv, jsval* vp);
+    JS_REQUIRES_STACK bool interpretedFunctionCall(jsval& fval, JSFunction* fun, uintN argc,
+                                                   bool constructing);
+    JS_REQUIRES_STACK bool functionCall(bool constructing, uintN argc);
 
-    void trackCfgMerges(jsbytecode* pc);
-    void flipIf(jsbytecode* pc, bool& cond);
-    void fuseIf(jsbytecode* pc, bool cond, nanojit::LIns* x);
+    JS_REQUIRES_STACK void trackCfgMerges(jsbytecode* pc);
+    JS_REQUIRES_STACK void flipIf(jsbytecode* pc, bool& cond);
+    JS_REQUIRES_STACK void fuseIf(jsbytecode* pc, bool cond, nanojit::LIns* x);
 
     bool hasMethod(JSObject* obj, jsid id);
     bool hasToStringMethod(JSObject* obj);
     bool hasToStringMethod(jsval v) {
         JS_ASSERT(JSVAL_IS_OBJECT(v));
         return hasToStringMethod(JSVAL_TO_OBJECT(v));
     }
     bool hasValueOfMethod(JSObject* obj);
@@ -425,63 +430,67 @@ class TraceRecorder : public avmplus::GC
     }
     bool hasIteratorMethod(JSObject* obj);
     bool hasIteratorMethod(jsval v) {
         JS_ASSERT(JSVAL_IS_OBJECT(v));
         return hasIteratorMethod(JSVAL_TO_OBJECT(v));
     }
 
 public:
-    friend bool js_MonitorRecording(TraceRecorder* tr);
+    friend JS_REQUIRES_STACK bool js_MonitorRecording(TraceRecorder* tr);
 
+    JS_REQUIRES_STACK
     TraceRecorder(JSContext* cx, VMSideExit*, nanojit::Fragment*, TreeInfo*,
-                  unsigned ngslots, uint8* globalTypeMap, uint8* stackTypeMap,
+                  unsigned ngslots, uint8* globalTypeMap, uint8* stackTypeMap, 
                   VMSideExit* expectedInnerExit, nanojit::Fragment* outerToBlacklist);
     ~TraceRecorder();
 
-    uint8 determineSlotType(jsval* vp) const;
-    nanojit::LIns* snapshot(ExitType exitType);
+    JS_REQUIRES_STACK uint8 determineSlotType(jsval* vp) const;
+    JS_REQUIRES_STACK nanojit::LIns* snapshot(ExitType exitType);
     nanojit::Fragment* getFragment() const { return fragment; }
-    bool isLoopHeader(JSContext* cx) const;
-    void compile(nanojit::Fragmento* fragmento);
-    bool closeLoop(nanojit::Fragmento* fragmento, bool& demote, unsigned *demotes);
-    void endLoop(nanojit::Fragmento* fragmento);
-    void joinEdgesToEntry(nanojit::Fragmento* fragmento, nanojit::Fragment* peer_root);
+    JS_REQUIRES_STACK bool isLoopHeader(JSContext* cx) const;
+    JS_REQUIRES_STACK void compile(nanojit::Fragmento* fragmento);
+    JS_REQUIRES_STACK bool closeLoop(nanojit::Fragmento* fragmento, bool& demote,
+                                     unsigned *demotes);
+    JS_REQUIRES_STACK void endLoop(nanojit::Fragmento* fragmento);
+    JS_REQUIRES_STACK void joinEdgesToEntry(nanojit::Fragmento* fragmento,
+                                            nanojit::Fragment* peer_root);
     void blacklist() { fragment->blacklist(); }
-    bool adjustCallerTypes(nanojit::Fragment* f, unsigned* demote_slots, bool& trash);
-    nanojit::Fragment* findNestedCompatiblePeer(nanojit::Fragment* f, nanojit::Fragment** empty);
-    void prepareTreeCall(nanojit::Fragment* inner);
-    void emitTreeCall(nanojit::Fragment* inner, VMSideExit* exit);
+    JS_REQUIRES_STACK bool adjustCallerTypes(nanojit::Fragment* f, unsigned* demote_slots,
+                                             bool& trash);
+    JS_REQUIRES_STACK nanojit::Fragment* findNestedCompatiblePeer(nanojit::Fragment* f,
+                                                                  nanojit::Fragment** empty);
+    JS_REQUIRES_STACK void prepareTreeCall(nanojit::Fragment* inner);
+    JS_REQUIRES_STACK void emitTreeCall(nanojit::Fragment* inner, VMSideExit* exit);
     unsigned getCallDepth() const;
     void pushAbortStack();
     void popAbortStack();
     void removeFragmentoReferences();
 
-    bool record_EnterFrame();
-    bool record_LeaveFrame();
-    bool record_SetPropHit(JSPropCacheEntry* entry, JSScopeProperty* sprop);
-    bool record_SetPropMiss(JSPropCacheEntry* entry);
-    bool record_DefLocalFunSetSlot(uint32 slot, JSObject* obj);
-    bool record_FastNativeCallComplete();
-    bool record_IteratorNextComplete();
-    bool record_ApplyComplete(uintN argc);
-    
+    JS_REQUIRES_STACK bool record_EnterFrame();
+    JS_REQUIRES_STACK bool record_LeaveFrame();
+    JS_REQUIRES_STACK bool record_SetPropHit(JSPropCacheEntry* entry, JSScopeProperty* sprop);
+    JS_REQUIRES_STACK bool record_SetPropMiss(JSPropCacheEntry* entry);
+    JS_REQUIRES_STACK bool record_DefLocalFunSetSlot(uint32 slot, JSObject* obj);
+    JS_REQUIRES_STACK bool record_FastNativeCallComplete();
+    JS_REQUIRES_STACK bool record_IteratorNextComplete();
+    JS_REQUIRES_STACK bool record_ApplyComplete(uintN argc);
+
     nanojit::Fragment* getOuterToBlacklist() { return outerToBlacklist; }
     void deepAbort() { deepAborted = true; }
     bool wasDeepAborted() { return deepAborted; }
     bool walkedOutOfLoop() { return terminate; }
     void setPromotedPeer(nanojit::Fragment* peer) { promotedPeer = peer; }
     TreeInfo* getTreeInfo() { return treeInfo; }
 
 #define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format)               \
-    bool record_##op();
+    JS_REQUIRES_STACK bool record_##op();
 # include "jsopcode.tbl"
 #undef OPDEF
 };
-
 #define TRACING_ENABLED(cx)       JS_HAS_OPTION(cx, JSOPTION_JIT)
 #define TRACE_RECORDER(cx)        (JS_TRACE_MONITOR(cx).recorder)
 #define SET_TRACE_RECORDER(cx,tr) (JS_TRACE_MONITOR(cx).recorder = (tr))
 
 #define JSOP_IS_BINARY(op) ((uintN)((op) - JSOP_BITOR) <= (uintN)(JSOP_MOD - JSOP_BITOR))
 
 /*
  * See jsinterp.cpp for the ENABLE_TRACER definition. Also note how comparing x
@@ -524,23 +533,23 @@ public:
 
 #define TRACE_ARGS(x,args)      TRACE_ARGS_(x, args, )
 
 #define RECORD(x)               RECORD_ARGS(x, ())
 #define TRACE_0(x)              TRACE_ARGS(x, ())
 #define TRACE_1(x,a)            TRACE_ARGS(x, (a))
 #define TRACE_2(x,a,b)          TRACE_ARGS(x, (a, b))
 
-extern bool
+extern JS_REQUIRES_STACK bool
 js_MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount);
 
-extern bool
+extern JS_REQUIRES_STACK bool
 js_MonitorRecording(TraceRecorder *tr);
 
-extern void
+extern JS_REQUIRES_STACK void
 js_AbortRecording(JSContext* cx, const char* reason);
 
 extern void
 js_InitJIT(JSTraceMonitor *tm);
 
 extern void
 js_FinishJIT(JSTraceMonitor *tm);
 
--- a/js/src/jstypes.h
+++ b/js/src/jstypes.h
@@ -183,16 +183,30 @@
 #  define JS_ALWAYS_INLINE   __forceinline
 # elif defined __GNUC__
 #  define JS_ALWAYS_INLINE   __attribute__((always_inline))
 # else
 #  define JS_ALWAYS_INLINE   JS_INLINE
 # endif
 #endif
 
+#ifdef NS_STATIC_CHECKING
+/*
+ * Attributes for static analysis. Functions declared with JS_REQUIRES_STACK
+ * always have a valid cx->fp and can access it freely.  Other functions can
+ * access cx->fp only after calling a function that "forces" the stack
+ * (i.e. lazily instantiates it as needed).
+ */
+# define JS_REQUIRES_STACK   __attribute__((user("JS_REQUIRES_STACK")))
+# define JS_FORCES_STACK     __attribute__((user("JS_FORCES_STACK")))
+#else
+# define JS_REQUIRES_STACK
+# define JS_FORCES_STACK
+#endif
+
 /***********************************************************************
 ** MACROS:      JS_BEGIN_MACRO
 **              JS_END_MACRO
 ** DESCRIPTION:
 **      Macro body brackets so that macros with compound statement definitions
 **      behave syntactically more like functions when called.
 ***********************************************************************/
 #define JS_BEGIN_MACRO  do {
--- a/js/src/jsxml.cpp
+++ b/js/src/jsxml.cpp
@@ -675,17 +675,17 @@ NamespaceHelper(JSContext *cx, JSObject 
 
     return JS_TRUE;
 }
 
 static JSBool
 Namespace(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
 {
     return NamespaceHelper(cx,
-                           (cx->fp->flags & JSFRAME_CONSTRUCTING) ? obj : NULL,
+                           JS_IsConstructing(cx) ? obj : NULL,
                            argc, argv, rval);
 }
 
 /*
  * When argc is -1, it indicates argv is empty but the code should behave as
  * if argc is 1 and argv[0] is JSVAL_VOID.
  */
 static JSBool
@@ -809,25 +809,25 @@ QNameHelper(JSContext *cx, JSObject *obj
 out:
     InitXMLQName(obj, uri, prefix, name);
     return JS_TRUE;
 }
 
 static JSBool
 QName(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
 {
-    return QNameHelper(cx, (cx->fp->flags & JSFRAME_CONSTRUCTING) ? obj : NULL,
+    return QNameHelper(cx, JS_IsConstructing(cx) ? obj : NULL,
                        &js_QNameClass.base, argc, argv, rval);
 }
 
 static JSBool
 AttributeName(JSContext *cx, JSObject *obj, uintN argc, jsval *argv,
               jsval *rval)
 {
-    return QNameHelper(cx, (cx->fp->flags & JSFRAME_CONSTRUCTING) ? obj : NULL,
+    return QNameHelper(cx, JS_IsConstructing(cx) ? obj : NULL,
                        &js_AttributeNameClass, argc, argv, rval);
 }
 
 /*
  * XMLArray library functions.
  */
 static JSBool
 namespace_identity(const void *a, const void *b)
@@ -1868,17 +1868,17 @@ ParseXMLSource(JSContext *cx, JSString *
     js_strncpy(chars + offset, srcp, srclen);
     offset += srclen;
     dstlen = length - offset + 1;
     js_InflateStringToBuffer(cx, suffix, constrlen(suffix), chars + offset,
                              &dstlen);
     chars [offset + dstlen] = 0;
 
     xml = NULL;
-    for (fp = cx->fp; fp && !fp->regs; fp = fp->down)
+    for (fp = js_GetTopStackFrame(cx); fp && !fp->regs; fp = fp->down)
         JS_ASSERT(!fp->script);
     filename = NULL;
     lineno = 1;
     if (fp) {
         op = (JSOp) *fp->regs->pc;
         if (op == JSOP_TOXML || op == JSOP_TOXMLLIST) {
             filename = fp->script->filename;
             lineno = js_FramePCToLineNumber(cx, fp);
@@ -1887,17 +1887,18 @@ ParseXMLSource(JSContext *cx, JSString *
                     --lineno;
             }
         }
     }
 
     if (!js_InitParseContext(cx, &pc, NULL, NULL, chars, length, NULL,
                              filename, lineno))
         goto out;
-    pn = js_ParseXMLText(cx, cx->fp->scopeChain, &pc, JS_FALSE);
+    pn = js_ParseXMLText(cx, js_GetTopStackFrame(cx)->scopeChain, &pc,
+                         JS_FALSE);
     if (pn && XMLArrayInit(cx, &nsarray, 1)) {
         if (GetXMLSettingFlags(cx, &flags))
             xml = ParseNodeToXML(cx, &pc, pn, &nsarray, flags);
 
         XMLArrayFinish(cx, &nsarray);
     }
     js_FinishParseContext(cx, &pc);
 
@@ -7258,17 +7259,17 @@ XML(JSContext *cx, JSObject *obj, uintN 
         v = STRING_TO_JSVAL(cx->runtime->emptyString);
 
     xobj = ToXML(cx, v);
     if (!xobj)
         return JS_FALSE;
     *rval = OBJECT_TO_JSVAL(xobj);
     xml = (JSXML *) JS_GetPrivate(cx, xobj);
 
-    if ((cx->fp->flags & JSFRAME_CONSTRUCTING) && !JSVAL_IS_PRIMITIVE(v)) {
+    if (JS_IsConstructing(cx) && !JSVAL_IS_PRIMITIVE(v)) {
         vobj = JSVAL_TO_OBJECT(v);
         clasp = OBJ_GET_CLASS(cx, vobj);
         if (clasp == &js_XMLClass ||
             (clasp->flags & JSCLASS_DOCUMENT_OBSERVER)) {
             /* No need to lock obj, it's newly constructed and thread local. */
             copy = DeepCopy(cx, xml, obj, 0);
             if (!copy)
                 return JS_FALSE;
@@ -7286,17 +7287,17 @@ XMLList(JSContext *cx, JSObject *obj, ui
     jsval v;
     JSObject *vobj, *listobj;
     JSXML *xml, *list;
 
     v = argv[0];
     if (JSVAL_IS_NULL(v) || JSVAL_IS_VOID(v))
         v = STRING_TO_JSVAL(cx->runtime->emptyString);
 
-    if ((cx->fp->flags & JSFRAME_CONSTRUCTING) && !JSVAL_IS_PRIMITIVE(v)) {
+    if (JS_IsConstructing(cx) && !JSVAL_IS_PRIMITIVE(v)) {
         vobj = JSVAL_TO_OBJECT(v);
         if (OBJECT_IS_XML(cx, vobj)) {
             xml = (JSXML *) JS_GetPrivate(cx, vobj);
             if (xml->xml_class == JSXML_CLASS_LIST) {
                 listobj = js_NewXMLObject(cx, JSXML_CLASS_LIST);
                 if (!listobj)
                     return JS_FALSE;
                 *rval = OBJECT_TO_JSVAL(listobj);
@@ -7705,17 +7706,17 @@ js_GetFunctionNamespace(JSContext *cx, j
  */
 JSBool
 js_GetDefaultXMLNamespace(JSContext *cx, jsval *vp)
 {
     JSStackFrame *fp;
     JSObject *ns, *obj, *tmp;
     jsval v;
 
-    fp = cx->fp;
+    fp = js_GetTopStackFrame(cx);
     ns = fp->xmlNamespace;
     if (ns) {
         *vp = OBJECT_TO_JSVAL(ns);
         return JS_TRUE;
     }
 
     obj = NULL;
     for (tmp = fp->scopeChain; tmp; tmp = OBJ_GET_PARENT(cx, obj)) {
@@ -7753,17 +7754,17 @@ js_SetDefaultXMLNamespace(JSContext *cx,
 
     argv[0] = STRING_TO_JSVAL(cx->runtime->emptyString);
     argv[1] = v;
     ns = js_ConstructObject(cx, &js_NamespaceClass.base, NULL, NULL, 2, argv);
     if (!ns)
         return JS_FALSE;
     v = OBJECT_TO_JSVAL(ns);
 
-    fp = cx->fp;
+    fp = js_GetTopStackFrame(cx);
     varobj = fp->varobj;
     if (varobj) {
         if (!OBJ_DEFINE_PROPERTY(cx, varobj, JS_DEFAULT_XML_NAMESPACE_ID, v,
                                  JS_PropertyStub, JS_PropertyStub,
                                  JSPROP_PERMANENT, NULL)) {
             return JS_FALSE;
         }
     } else {
@@ -7945,17 +7946,17 @@ js_FindXMLProperty(JSContext *cx, jsval 
         JS_ASSERT(OBJ_GET_CLASS(cx, nameobj) == &js_AttributeNameClass ||
                   OBJ_GET_CLASS(cx, nameobj) == &js_QNameClass.base);
     }
 
     qn = nameobj;
     if (!IsFunctionQName(cx, qn, &funid))
         return JS_FALSE;
 
-    obj = cx->fp->scopeChain;
+    obj = js_GetTopStackFrame(cx)->scopeChain;
     do {
         /* Skip any With object that can wrap XML. */
         target = obj;
         while (OBJ_GET_CLASS(cx, target) == &js_WithClass) {
              proto = OBJ_GET_PROTO(cx, target);
              if (!proto)
                  break;
              target = proto;
@@ -8158,17 +8159,17 @@ js_InitXMLFilterClass(JSContext *cx, JSO
 JSBool
 js_StepXMLListFilter(JSContext *cx, JSBool initialized)
 {
     jsval *sp;
     JSObject *obj, *filterobj, *resobj, *kidobj;
     JSXML *xml, *list;
     JSXMLFilter *filter;
 
-    sp = cx->fp->regs->sp;
+    sp = js_GetTopStackFrame(cx)->regs->sp;
     if (!initialized) {
         /*
          * We haven't iterated yet, so initialize the filter based on the
          * value stored in sp[-2].
          */
         if (!VALUE_IS_XML(cx, sp[-2])) {
             js_ReportValueError(cx, JSMSG_NON_XML_FILTER, -2, sp[-2], NULL);
             return JS_FALSE;
--- a/js/src/liveconnect/nsCLiveconnect.cpp
+++ b/js/src/liveconnect/nsCLiveconnect.cpp
@@ -53,16 +53,17 @@
 #include "jsj_private.h"
 #include "jsjava.h"
 
 #include "jsdbgapi.h"
 #include "jsarena.h"
 #include "jsfun.h"
 #include "jscntxt.h"        /* For js_ReportErrorAgain().*/
 #include "jsscript.h"
+#include "jsstaticcheck.h"
 
 #include "netscape_javascript_JSObject.h"   /* javah-generated headers */
 #include "nsISecurityContext.h"
 #include "nsIServiceManager.h"
 #include "nsIJSContextStack.h"
 
 PR_BEGIN_EXTERN_C
 
@@ -156,30 +157,20 @@ AutoPushJSContext::AutoPushJSContext(nsI
         mPushResult = NS_ERROR_FAILURE;
 
     memset(&mFrame, 0, sizeof(mFrame));
 
     if (NS_SUCCEEDED(mPushResult))
     {
         // See if there are any scripts on the stack.
         // If not, we need to add a dummy frame with a principal.
+        JSStackFrame* tempFP = JS_GetScriptedCaller(cx, NULL);
+        JS_ASSERT_NOT_ON_TRACE(cx);
 
-        PRBool hasScript = PR_FALSE;
-        JSStackFrame* tempFP = cx->fp;
-        while (tempFP)
-        {
-            if (tempFP->script)
-            {
-                hasScript = PR_TRUE;
-                break;
-            }
-            tempFP = tempFP->down;
-        };
-
-        if (!hasScript)
+        if (!tempFP)
         {
             JSPrincipals* jsprinc;
             principal->GetJSPrincipals(cx, &jsprinc);
 
             JSFunction *fun = JS_CompileFunctionForPrincipals(cx, JS_GetGlobalObject(cx),
                                                               jsprinc, "anonymous", 0, nsnull,
                                                               "", 0, "", 1);
             JSPRINCIPALS_DROP(cx, jsprinc);
@@ -209,16 +200,18 @@ AutoPushJSContext::~AutoPushJSContext()
     if (mContextStack)
         mContextStack->Pop(nsnull);
 
     if (mFrame.callobj)
         js_PutCallObject(mContext, &mFrame);
     if (mFrame.argsobj)
         js_PutArgsObject(mContext, &mFrame);
     JS_ClearPendingException(mContext);
+
+    VOUCH_DOES_NOT_REQUIRE_STACK();
     if (mFrame.script)
         mContext->fp = mFrame.down;
 
     JS_EndRequest(mContext);
 }
 
 
 ////////////////////////////////////////////////////////////////////////////
--- a/js/src/xpconnect/src/XPCNativeWrapper.cpp
+++ b/js/src/xpconnect/src/XPCNativeWrapper.cpp
@@ -139,22 +139,18 @@ ShouldBypassNativeWrapper(JSContext *cx,
                "Unexpected object");
   jsval flags;
 
   ::JS_GetReservedSlot(cx, obj, 0, &flags);
   if (HAS_FLAGS(flags, FLAG_EXPLICIT))
     return JS_FALSE;
 
   // Check what the script calling us looks like
-  JSScript *script = nsnull;
-  JSStackFrame *fp = cx->fp;
-  while(!script && fp) {
-    script = fp->script;
-    fp = fp->down;
-  }
+  JSStackFrame *fp = JS_GetScriptedCaller(cx, NULL);
+  JSScript *script = fp ? fp->script : NULL;
 
   // If there's no script, bypass for now because that's what the old code did.
   // XXX FIXME: bug 341477 covers figuring out what we _should_ do.
   return !script || !(::JS_GetScriptFilenameFlags(script) & JSFILENAME_SYSTEM);
 }
 
 #define XPC_NW_BYPASS_BASE(cx, obj, code)                                     \
   JS_BEGIN_MACRO                                                              \
--- a/js/src/xpconnect/src/xpccallcontext.cpp
+++ b/js/src/xpconnect/src/xpccallcontext.cpp
@@ -340,17 +340,17 @@ XPCCallContext::~XPCCallContext()
         
             JS_DestroyContext(mJSContext);
         }
         else
         {
             // Don't clear newborns if JS frames (compilation or execution)
             // are active!  Doing so violates ancient invariants in the JS
             // engine, and it's not necessary to fix JS component leaks.
-            if(!mJSContext->fp)
+            if(!JS_IsRunning(mJSContext))
                 JS_ClearNewbornRoots(mJSContext);
         }
     }
 
 #ifdef DEBUG
     {
         StringWrapperEntry *se =
             reinterpret_cast<StringWrapperEntry*>(&mStringWrapperData);
--- a/js/src/xpconnect/src/xpcconvert.cpp
+++ b/js/src/xpconnect/src/xpcconvert.cpp
@@ -1143,26 +1143,21 @@ XPCConvert::NativeInterface2JSObject(XPC
                 JSScript* script = nsnull;
                 JSObject* callee = nsnull;
                 if(ccx.GetXPCContext()->CallerTypeIsJavaScript())
                 {
                     // Called from JS.  We're going to hand the resulting
                     // JSObject to said JS, so look for the script we want on
                     // the stack.
                     JSContext* cx = ccx;
-                    JSStackFrame* fp = cx->fp;
-                    while(fp)
+                    JSStackFrame* fp = JS_GetScriptedCaller(cx, NULL);
+                    if(fp)
                     {
                         script = fp->script;
-                        if(script)
-                        {
-                            callee = fp->callee;
-                            break;
-                        }
-                        fp = fp->down;
+                        callee = fp->callee;
                     }
                 }
                 else if(ccx.GetXPCContext()->CallerTypeIsNative())
                 {
                     callee = ccx.GetCallee();
                     if(callee && JS_ObjectIsFunction(ccx, callee))
                     {
                         // Called from c++, and calling out to |callee|, which
--- a/js/src/xpconnect/src/xpcstack.cpp
+++ b/js/src/xpconnect/src/xpcstack.cpp
@@ -76,20 +76,23 @@ private:
 
 /**********************************************/
 
 // static
 
 nsresult
 XPCJSStack::CreateStack(JSContext* cx, nsIStackFrame** stack)
 {
-    if(!cx || !cx->fp)
+    if(!cx)
         return NS_ERROR_FAILURE;
 
-    return XPCJSStackFrame::CreateStack(cx, cx->fp, (XPCJSStackFrame**) stack);
+    JSStackFrame *fp = NULL;
+    if (!JS_FrameIterator(cx, &fp))
+        return NS_ERROR_FAILURE;
+    return XPCJSStackFrame::CreateStack(cx, fp, (XPCJSStackFrame**) stack);
 }
 
 // static
 nsresult
 XPCJSStack::CreateStackFrameLocation(PRUint32 aLanguage,
                                      const char* aFilename,
                                      const char* aFunctionName,
                                      PRInt32 aLineNumber,
new file mode 100644
--- /dev/null
+++ b/xpcom/analysis/jsstack.js
@@ -0,0 +1,167 @@
+/*
+ * Check that only JS_REQUIRES_STACK/JS_FORCES_STACK functions, and functions
+ * that have called a JS_FORCES_STACK function, access cx->fp directly or
+ * indirectly.
+ */
+
+require({ after_gcc_pass: 'cfg' });
+include('gcc_util.js');
+include('unstable/adts.js');
+include('unstable/analysis.js');
+include('unstable/lazy_types.js');
+include('unstable/esp.js');
+
+var Zero_NonZero = {};
+include('unstable/zero_nonzero.js', Zero_NonZero);
+
+// Tell MapFactory we don't need multimaps (a speed optimization).
+MapFactory.use_injective = true;
+
+/*
+ * There are two regions in the program: RED and GREEN.  Functions and member
+ * variables may be declared RED in the C++ source.  GREEN is the default.
+ *
+ * RED signals danger.  A GREEN part of a function must not call a RED function
+ * or access a RED member.
+ *
+ * The body of a RED function is all red.  The body of a GREEN function is all
+ * GREEN by default, but parts dominated by a call to a TURN_RED function are
+ * red.  This way GREEN functions can safely access RED stuff by calling a
+ * TURN_RED function as preparation.
+ *
+ * The analysis does not attempt to prove anything about the body of a TURN_RED
+ * function.  (Both annotations are trusted; only unannotated code is checked
+ * for errors.)
+ */
+const RED = 'JS_REQUIRES_STACK';
+const TURN_RED = 'JS_FORCES_STACK';
+
+function attrs(tree) {
+  let a = DECL_P(tree) ? DECL_ATTRIBUTES(tree) : TYPE_ATTRIBUTES(TREE_TYPE(tree));
+  return translate_attributes(a);
+}
+
+function hasUserAttribute(tree, attrname) {
+  let attributes = attrs(tree);
+  if (attributes) {
+    for (let i = 0; i < attributes.length; i++) {
+      let attr = attributes[i];
+      if (attr.name == 'user' && attr.value.length == 1 && attr.value[0] == attrname)
+        return true;
+    }
+  }
+  return false;
+}
+
+/*
+ * x is an expression or decl.  These functions assume that 
+ */
+function isRed(x) { return hasUserAttribute(x, RED); }
+function isTurnRed(x) { return hasUserAttribute(x, TURN_RED); }
+
+function process_tree(fndecl)
+{
+  if (!(isRed(fndecl) || isTurnRed(fndecl))) {
+    // Ordinarily a user of ESP runs the analysis, then generates output based
+    // on the results.  But in our case (a) we need sub-basic-block resolution,
+    // which ESP doesn't keep; (b) it so happens that even though ESP can
+    // iterate over blocks multiple times, in our case that won't cause
+    // spurious output.  (It could cause us to the same error message each time
+    // through--but that's easily avoided.)  Therefore we generate the output
+    // while the ESP analysis is running.
+    let a = new RedGreenCheck(fndecl, 0);
+    if (a.hasRed)
+      a.run();
+  }
+}
+
+function RedGreenCheck(fndecl, trace) {
+  //print("RedGreenCheck: " + fndecl.toCString());
+  this._fndecl = fndecl;
+
+  // Tell ESP that fndecl is a "property variable".  This makes ESP track it in
+  // a flow-sensitive way.  The variable will be 1 in RED regions and "don't
+  // know" in GREEN regions.  (We are technically lying to ESP about fndecl
+  // being a variable--what we really want is a synthetic variable indicating
+  // RED/GREEN state, but ESP operates on GCC decl nodes.)
+  this._state_var_decl = fndecl;
+  let state_var = new ESP.PropVarSpec(this._state_var_decl, true, undefined);
+
+  // Call base class constructor.
+  let cfg = function_decl_cfg(fndecl);
+  ESP.Analysis.apply(this, [cfg, [state_var], Zero_NonZero.meet, trace]);
+  this.join = Zero_NonZero.join;
+
+  // Preprocess all instructions in the cfg to determine whether this analysis
+  // is necessary and gather some information we'll use later.
+  //
+  // Each isn may include a function call, an assignment, and/or some reads.
+  // Using walk_tree to walk the isns is a little crazy but robust.
+  //
+  this.hasRed = false;
+  for (let bb in cfg_bb_iterator(cfg)) {
+    for (let isn in bb_isn_iterator(bb)) {
+      walk_tree(isn, function(t, stack) {
+        switch (TREE_CODE(t)) {
+          case FIELD_DECL:
+            if (isRed(t)) {
+              let varName = dehydra_convert(t).name;
+              // location_of(t) is the location of the declaration.
+              isn.redInfo = ["cannot access JS_REQUIRES_STACK variable " + varName,
+                             location_of(stack[stack.length - 1])];
+              this.hasRed = true;
+            }
+            break;
+          case CALL_EXPR:
+          {
+            let callee = call_function_decl(t);
+            if (callee) {
+              if (isRed(callee)) {
+                let calleeName = dehydra_convert(callee).name;
+                isn.redInfo = ["cannot call JS_REQUIRES_STACK function " + calleeName,
+                              location_of(t)];
+                this.hasRed = true;
+              } else if (isTurnRed(callee)) {
+                isn.turnRed = true;
+              }
+            }
+          }
+          break;
+        }
+      });
+    }
+  }
+
+  // Initialize mixin for infeasible-path elimination.
+  this._zeroNonzero = new Zero_NonZero.Zero_NonZero();
+}
+
+RedGreenCheck.prototype = new ESP.Analysis;
+
+RedGreenCheck.prototype.flowStateCond = function(isn, truth, state) {
+  // forward event to mixin
+  this._zeroNonzero.flowStateCond(isn, truth, state);
+};
+
+RedGreenCheck.prototype.flowState = function(isn, state) {
+  // forward event to mixin
+  //try { // The try/catch here is a workaround for some baffling bug in zero_nonzero.
+    this._zeroNonzero.flowState(isn, state);
+  //} catch (exc) {
+  //  warning(exc, location_of(isn));
+  //  warning("(Remove the workaround in jsstack.js and recompile to get a JS stack trace.)",
+  //          location_of(isn));
+  //}
+  let green = (state.get(this._state_var_decl) != 1);
+  let redInfo = isn.redInfo;
+  if (green && redInfo) {
+    error(redInfo[0], redInfo[1]);
+    delete isn.redInfo;  // avoid duplicate messages about this instruction
+  }
+
+  // If we call a TURNS_RED function, it doesn't take effect until after the
+  // whole isn finishes executing (the most conservative rule).
+  if (isn.turnRed)
+    state.assignValue(this._state_var_decl, 1, isn);
+};
+