Bug 525120 - move native stack off the C stack (fixes native global frame alignment) (r=dvander)
authorLuke Wagner <lw@mozilla.com>
Wed, 28 Oct 2009 16:44:44 -0700
changeset 34572 2520976741335174b76ba2790c22d8f08a687dc7
parent 34571 443bc58ad7c9e32d418985b2a5b4ecea7ca4f686
child 34573 24b4d2efe0b4e648372b65b7063bf059ee947bad
push idunknown
push userunknown
push dateunknown
reviewersdvander
bugs525120
milestone1.9.3a1pre
Bug 525120 - move native stack off the C stack (fixes native global frame alignment) (r=dvander)
js/src/jscntxt.h
js/src/jstracer.cpp
js/src/jstracer.h
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -84,51 +84,113 @@ typedef struct JSGSNCache {
 
 extern void
 js_PurgeGSNCache(JSGSNCache *cache);
 
 /* These helper macros take a cx as parameter and operate on its GSN cache. */
 #define JS_PURGE_GSN_CACHE(cx)      js_PurgeGSNCache(&JS_GSN_CACHE(cx))
 #define JS_METER_GSN_CACHE(cx,cnt)  GSN_CACHE_METER(&JS_GSN_CACHE(cx), cnt)
 
-typedef struct InterpState InterpState;
-typedef struct VMSideExit VMSideExit;
-
-namespace nanojit {
+/* Forward declarations of nanojit types. */
+namespace nanojit
+{
     class Assembler;
     class CodeAlloc;
     class Fragment;
     class LirBuffer;
 #ifdef DEBUG
     class LabelMap;
 #endif
-    extern "C++" {
-        template<typename K> struct DefaultHash;
-        template<typename K, typename V, typename H> class HashMap;
-        template<typename T> class Seq;
-    }
+    template<typename K> struct DefaultHash;
+    template<typename K, typename V, typename H> class HashMap;
+    template<typename T> class Seq;
 }
+
+/* Tracer constants. */
+static const size_t MONITOR_N_GLOBAL_STATES = 4;
+static const size_t FRAGMENT_TABLE_SIZE = 512;
+static const size_t MAX_NATIVE_STACK_SLOTS = 4096;
+static const size_t MAX_CALL_STACK_ENTRIES = 500;
+static const size_t MAX_GLOBAL_SLOTS = 4096;
+static const size_t GLOBAL_SLOTS_BUFFER_SIZE = MAX_GLOBAL_SLOTS + 1;
+
+/* Forward declarations of tracer types. */
+class TreeInfo;
+class VMAllocator;
+class TraceRecorder;
+class FrameInfoCache;
+struct REHashFn;
+struct REHashKey;
+struct FrameInfo;
+struct VMSideExit;
+struct VMFragment;
+struct InterpState;
+template<typename T> class Queue;
+typedef Queue<uint16> SlotList;
+typedef nanojit::HashMap<REHashKey, nanojit::Fragment*, REHashFn> REHashMap;
+
 #if defined(JS_JIT_SPEW) || defined(DEBUG)
 struct FragPI;
 typedef nanojit::HashMap<uint32, FragPI, nanojit::DefaultHash<uint32> > FragStatsMap;
 #endif
-class TraceRecorder;
-class VMAllocator;
-extern "C++" { template<typename T> class Queue; }
-typedef Queue<uint16> SlotList;
+
+/* Holds the execution state during trace execution. */
+struct InterpState
+{
+    double*        sp;                  // native stack pointer, stack[0] is spbase[0]
+    FrameInfo**    rp;                  // call stack pointer
+    JSContext*     cx;                  // current VM context handle
+    double*        eos;                 // first unusable word after the native stack / begin of globals
+    void*          eor;                 // first unusable word after the call stack
+    void*          sor;                 // start of rp stack
+    VMSideExit*    lastTreeExitGuard;   // guard we exited on during a tree call
+    VMSideExit*    lastTreeCallGuard;   // guard we want to grow from if the tree
+                                        // call exit guard mismatched
+    void*          rpAtLastTreeCall;    // value of rp at innermost tree call guard
+    VMSideExit*    outermostTreeExitGuard; // the last side exit returned by js_CallTree
+    TreeInfo*      outermostTree;       // the outermost tree we initially invoked
+    double*        stackBase;           // native stack base
+    FrameInfo**    callstackBase;       // call stack base
+    uintN*         inlineCallCountp;    // inline call count counter
+    VMSideExit**   innermostNestedGuardp;
+    VMSideExit*    innermost;
+    uint64         startTime;
+    InterpState*   prev;
 
-#define FRAGMENT_TABLE_SIZE 512
-struct VMFragment;
+    // Used by _FAIL builtins; see jsbuiltins.h. The builtin sets the
+    // JSBUILTIN_BAILED bit if it bails off trace and the JSBUILTIN_ERROR bit
+    // if an error or exception occurred.
+    uint32         builtinStatus;
+
+    // Used to communicate the location of the return value in case of a deep bail.
+    double*        deepBailSp;
+
+
+    // Used when calling natives from trace to root the vp vector.
+    uintN          nativeVpLen;
+    jsval*         nativeVp;
+};
 
-struct REHashKey;
-struct REHashFn;
-class FrameInfoCache;
-typedef nanojit::HashMap<REHashKey, nanojit::Fragment*, REHashFn> REHashMap;
+/*
+ * Storage for the execution state and store during trace execution. Generated
+ * code depends on the fact that the globals begin |MAX_NATIVE_STACK_SLOTS|
+ * doubles after the stack begins. Thus, on trace, |InterpState::eos| holds a
+ * pointer to the first global.
+ */
+struct TraceNativeStorage
+{
+    double stack_global_buf[MAX_NATIVE_STACK_SLOTS + GLOBAL_SLOTS_BUFFER_SIZE];
+    FrameInfo *callstack_buf[MAX_CALL_STACK_ENTRIES];
 
-#define MONITOR_N_GLOBAL_STATES 4
+    double *stack() { return stack_global_buf; }
+    double *global() { return stack_global_buf + MAX_NATIVE_STACK_SLOTS; }
+    FrameInfo **callstack() { return callstack_buf; } 
+};
+
+/* Holds data to track a single globa. */
 struct GlobalState {
     JSObject*               globalObj;
     uint32                  globalShape;
     SlotList*               globalSlots;
 };
 
 /*
  * Trace monitor. Every JSThread (if JS_THREADSAFE) or JSRuntime (if not
@@ -144,16 +206,23 @@ struct JSTraceMonitor {
      * !tracecx && !recorder: not on trace
      * !tracecx && recorder: recording
      * tracecx && !recorder: executing a trace
      * tracecx && recorder: executing inner loop, recording outer loop
      */
     JSContext               *tracecx;
 
     /*
+     * Cached storage to use when executing on trace. While we may enter nested
+     * traces, we always reuse the outer trace's storage, so never need more
+     * than of these.
+     */
+    TraceNativeStorage      storage;
+
+    /*
      * There are 3 allocators here. This might seem like overkill, but they
      * have different lifecycles, and by keeping them separate we keep the
      * amount of retained memory down significantly.
      *
      * The dataAlloc has the lifecycle of the monitor. It's flushed only
      * when the monitor is flushed.
      *
      * The traceAlloc has the same flush lifecycle as the dataAlloc, but
--- a/js/src/jstracer.cpp
+++ b/js/src/jstracer.cpp
@@ -138,25 +138,16 @@ static const char tagChar[]  = "OIDISIBI
 #define MAXPEERS 9
 
 /* Max number of hits to a RECURSIVE_UNLINKED exit before we trash the tree. */
 #define MAX_RECURSIVE_UNLINK_HITS 64
 
 /* Max call depths for inlining. */
 #define MAX_CALLDEPTH 10
 
-/* Max native stack size. */
-#define MAX_NATIVE_STACK_SLOTS 4096
-
-/* Max call stack size. */
-#define MAX_CALL_STACK_ENTRIES 500
-
-/* Max global object size. */
-#define MAX_GLOBAL_SLOTS 4096
-
 /* Max number of slots in a table-switch. */
 #define MAX_TABLE_SWITCH 256
 
 /* Max memory needed to rebuild the interpreter stack when falling off trace. */
 #define MAX_INTERP_STACK_BYTES                                                \
     (MAX_NATIVE_STACK_SLOTS * sizeof(jsval) +                                 \
      MAX_CALL_STACK_ENTRIES * sizeof(JSInlineFrame) +                         \
      sizeof(JSInlineFrame)) /* possibly slow native frame at top of stack */
@@ -2565,18 +2556,18 @@ TraceRecorder::p2i(nanojit::LIns* ins)
 }
 
 /* Determine the offset in the native global frame for a jsval we track. */
 ptrdiff_t
 TraceRecorder::nativeGlobalOffset(jsval* p) const
 {
     JS_ASSERT(isGlobal(p));
     if (size_t(p - globalObj->fslots) < JS_INITIAL_NSLOTS)
-        return sizeof(InterpState) + size_t(p - globalObj->fslots) * sizeof(double);
-    return sizeof(InterpState) + ((p - globalObj->dslots) + JS_INITIAL_NSLOTS) * sizeof(double);
+        return size_t(p - globalObj->fslots) * sizeof(double);
+    return ((p - globalObj->dslots) + JS_INITIAL_NSLOTS) * sizeof(double);
 }
 
 /* Determine whether a value is a global stack slot. */
 bool
 TraceRecorder::isGlobal(jsval* p) const
 {
     return ((size_t(p - globalObj->fslots) < JS_INITIAL_NSLOTS) ||
             (size_t(p - globalObj->dslots) < (STOBJ_NSLOTS(globalObj) - JS_INITIAL_NSLOTS)));
@@ -3627,17 +3618,17 @@ TraceRecorder::import(TreeInfo* treeInfo
      * Check whether there are any values on the stack we have to unbox and do
      * that first before we waste any time fetching the state from the stack.
      */
     if (!anchor || anchor->exitType != RECURSIVE_SLURP_FAIL_EXIT) {
         ImportBoxedStackSlotVisitor boxedStackVisitor(*this, sp, offset, typeMap);
         VisitStackSlots(boxedStackVisitor, cx, callDepth);
     }
 
-    ImportGlobalSlotVisitor globalVisitor(*this, lirbuf->state, globalTypeMap);
+    ImportGlobalSlotVisitor globalVisitor(*this, eos_ins, globalTypeMap);
     VisitGlobalSlots(globalVisitor, cx, globalObj, ngslots,
                      treeInfo->globalSlots->data());
 
     if (!anchor || anchor->exitType != RECURSIVE_SLURP_FAIL_EXIT) {
         ImportUnboxedStackSlotVisitor unboxedStackVisitor(*this, sp, offset,
                                                           typeMap);
         VisitStackSlots(unboxedStackVisitor, cx, callDepth);
     }
@@ -3687,18 +3678,17 @@ TraceRecorder::lazilyImportGlobalSlot(un
 
     /* Add the slot to the list of interned global slots. */
     JS_ASSERT(treeInfo->nGlobalTypes() == treeInfo->globalSlots->length());
     treeInfo->globalSlots->add(slot);
     JSTraceType type = getCoercedType(*vp);
     if (type == TT_INT32 && oracle.isGlobalSlotUndemotable(cx, slot))
         type = TT_DOUBLE;
     treeInfo->typeMap.add(type);
-    import(lirbuf->state, sizeof(struct InterpState) + slot*sizeof(double),
-           vp, type, "global", index, NULL);
+    import(eos_ins, slot*sizeof(double), vp, type, "global", index, NULL);
     SpecializeTreesToMissingGlobals(cx, globalObj, treeInfo);
     return true;
 }
 
 /* Write back a value onto the stack or global frames. */
 LIns*
 TraceRecorder::writeBack(LIns* i, LIns* base, ptrdiff_t offset, bool demote)
 {
@@ -3725,34 +3715,34 @@ TraceRecorder::set(jsval* p, LIns* i, bo
      * If we are writing to this location for the first time, calculate the
      * offset into the native frame manually. Otherwise just look up the last
      * load or store associated with the same source address (p) and use the
      * same offset/base.
      */
     LIns* x = nativeFrameTracker.get(p);
     if (!x) {
         if (isGlobal(p))
-            x = writeBack(i, lirbuf->state, nativeGlobalOffset(p), demote);
+            x = writeBack(i, eos_ins, nativeGlobalOffset(p), demote);
         else
             x = writeBack(i, lirbuf->sp, -treeInfo->nativeStackBase + nativeStackOffset(p), demote);
         nativeFrameTracker.set(p, x);
     } else {
         JS_ASSERT(x->isop(LIR_sti) || x->isop(LIR_stqi));
 
         int disp;
         LIns *base = x->oprnd2();
 #ifdef NANOJIT_ARM
         if (base->isop(LIR_piadd)) {
             disp = base->oprnd2()->imm32();
             base = base->oprnd1();
         } else
 #endif
         disp = x->disp();
 
-        JS_ASSERT(base == lirbuf->sp || base == lirbuf->state);
+        JS_ASSERT(base == lirbuf->sp || base == eos_ins);
         JS_ASSERT(disp == ((base == lirbuf->sp) ?
                   -treeInfo->nativeStackBase + nativeStackOffset(p) :
                   nativeGlobalOffset(p)));
 
         writeBack(i, base, disp, demote);
     }
 }
 
@@ -3763,17 +3753,17 @@ TraceRecorder::get(jsval* p)
     checkForGlobalObjectReallocation();
     return tracker.get(p);
 }
 
 JS_REQUIRES_STACK LIns*
 TraceRecorder::addr(jsval* p)
 {
     return isGlobal(p)
-           ? lir->ins2(LIR_piadd, lirbuf->state, INS_CONSTWORD(nativeGlobalOffset(p)))
+           ? lir->ins2(LIR_piadd, eos_ins, INS_CONSTWORD(nativeGlobalOffset(p)))
            : lir->ins2(LIR_piadd, lirbuf->sp,
                        INS_CONSTWORD(-treeInfo->nativeStackBase + nativeStackOffset(p)));
 }
 
 JS_REQUIRES_STACK bool
 TraceRecorder::known(jsval* p)
 {
     checkForGlobalObjectReallocation();
@@ -3845,17 +3835,17 @@ public:
         return mTypeMap;
     }
 
     JS_REQUIRES_STACK JS_ALWAYS_INLINE void
     visitGlobalSlot(jsval *vp, unsigned n, unsigned slot) {
         LIns *ins = mRecorder.get(vp);
         bool isPromote = isPromoteInt(ins);
         if (isPromote && *mTypeMap == TT_DOUBLE) {
-            mLir->insStorei(mRecorder.get(vp), mLirbuf->state,
+            mLir->insStorei(mRecorder.get(vp), mRecorder.eos_ins,
                             mRecorder.nativeGlobalOffset(vp));
 
             /*
              * Aggressively undo speculation so the inner tree will compile
              * if this fails.
              */
             oracle.markGlobalSlotUndemotable(mCx, slot);
         }
@@ -6379,126 +6369,134 @@ ExecuteTree(JSContext* cx, Fragment* f, 
         return NULL;
     }
 
     JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
     TreeInfo* ti = (TreeInfo*)f->vmprivate;
     unsigned ngslots = ti->globalSlots->length();
     uint16* gslots = ti->globalSlots->data();
     unsigned globalFrameSize = STOBJ_NSLOTS(globalObj);
+    JS_ASSERT(globalFrameSize <= MAX_GLOBAL_SLOTS);
 
     /* Make sure the global object is sane. */
     JS_ASSERT_IF(ngslots != 0,
                  OBJ_SHAPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain)) ==
                  ((VMFragment*)f)->globalShape);
 
     /* Make sure our caller replenished the double pool. */
     JS_ASSERT(tm->reservedDoublePoolPtr >= tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS);
 
     /* Reserve objects and stack space now, to make leaving the tree infallible. */
     if (!js_ReserveObjects(cx, MAX_CALL_STACK_ENTRIES))
         return NULL;
 
-    /* Set up the interpreter state block, which is followed by the native global frame. */
-    InterpState* state = (InterpState*)alloca(sizeof(InterpState) + (globalFrameSize+1)*sizeof(double));
-    state->cx = cx;
-    state->inlineCallCountp = &inlineCallCount;
-    state->innermostNestedGuardp = innermostNestedGuardp;
-    state->outermostTree = ti;
-    state->lastTreeExitGuard = NULL;
-    state->lastTreeCallGuard = NULL;
-    state->rpAtLastTreeCall = NULL;
-    state->nativeVp = NULL;
-    state->builtinStatus = 0;
+    /*
+     * Set up the interpreter state. For the native stacks and global frame,
+     * reuse the storage in |tm->storage|. This reuse depends on the invariant
+     * that only one trace uses |tm->storage| at a time. This is subtley correct
+     * in lieu of deep bail; see comment for |deepBailSp| in js_DeepBail.
+     */
+    InterpState state;
+    state.cx = cx;
+    state.inlineCallCountp = &inlineCallCount;
+    state.innermostNestedGuardp = innermostNestedGuardp;
+    state.outermostTree = ti;
+    state.lastTreeExitGuard = NULL;
+    state.lastTreeCallGuard = NULL;
+    state.rpAtLastTreeCall = NULL;
+    state.nativeVp = NULL;
+    state.builtinStatus = 0;
 
     /* Set up the native global frame. */
-    double* global = (double*)(state+1);
+    double* global = tm->storage.global();
 
     /* Set up the native stack frame. */
-    double stack_buffer[MAX_NATIVE_STACK_SLOTS];
-    state->stackBase = stack_buffer;
-    state->sp = stack_buffer + (ti->nativeStackBase/sizeof(double));
-    state->eos = stack_buffer + MAX_NATIVE_STACK_SLOTS;
+    double* stack = tm->storage.stack();
+    state.stackBase = stack;
+    state.sp = stack + (ti->nativeStackBase/sizeof(double));
+    state.eos = tm->storage.global();
+    JS_ASSERT(state.eos == stack + MAX_NATIVE_STACK_SLOTS);
+    JS_ASSERT(state.sp < state.eos);
 
     /*
      * inlineCallCount has already been incremented, if being invoked from
      * EnterFrame. It is okay to have a 0-frame restriction since the JIT
      * might not need any frames.
      */
     JS_ASSERT(inlineCallCount <= JS_MAX_INLINE_CALL_COUNT);
 
     /* Set up the native call stack frame. */
-    FrameInfo* callstack_buffer[MAX_CALL_STACK_ENTRIES];
-    state->callstackBase = callstack_buffer;
-    state->rp = callstack_buffer;
-    state->eor = callstack_buffer +
-                 JS_MIN(MAX_CALL_STACK_ENTRIES, JS_MAX_INLINE_CALL_COUNT - inlineCallCount);
-    state->sor = state->rp;
+    FrameInfo** callstack = tm->storage.callstack();
+    state.callstackBase = callstack;
+    state.sor = callstack;
+    state.rp = callstack;
+    state.eor = callstack + JS_MIN(MAX_CALL_STACK_ENTRIES,
+                                   JS_MAX_INLINE_CALL_COUNT - inlineCallCount);
 
 #ifdef DEBUG
-    memset(stack_buffer, 0xCD, sizeof(stack_buffer));
-    memset(global, 0xCD, (globalFrameSize+1)*sizeof(double));
-    JS_ASSERT(globalFrameSize <= MAX_GLOBAL_SLOTS);
+    memset(stack, 0xCD, MAX_NATIVE_STACK_SLOTS * sizeof(double));
+    memset(global, 0xCD, GLOBAL_SLOTS_BUFFER_SIZE * sizeof(double));
+    memset(callstack, 0xCD, MAX_CALL_STACK_ENTRIES * sizeof(FrameInfo*));
 #endif
 
     debug_only_stmt(*(uint64*)&global[globalFrameSize] = 0xdeadbeefdeadbeefLL;)
     debug_only_printf(LC_TMTracer,
                       "entering trace at %s:%u@%u, native stack slots: %u code: %p\n",
                       cx->fp->script->filename,
                       js_FramePCToLineNumber(cx, cx->fp),
                       FramePCOffset(cx->fp),
                       ti->maxNativeStackSlots,
                       f->code());
 
     JS_ASSERT(ti->nGlobalTypes() == ngslots);
     BuildNativeFrame(cx, globalObj, 0 /* callDepth */, ngslots, gslots,
-                     ti->typeMap.data(), global, stack_buffer);
+                     ti->typeMap.data(), global, stack);
 
     union { NIns *code; GuardRecord* (FASTCALL *func)(InterpState*, Fragment*); } u;
     u.code = f->code();
 
 #ifdef EXECUTE_TREE_TIMER
-    state->startTime = rdtsc();
+    state.startTime = rdtsc();
 #endif
 
     JS_ASSERT(!tm->tracecx);
     tm->tracecx = cx;
-    state->prev = cx->interpState;
-    cx->interpState = state;
+    state.prev = cx->interpState;
+    cx->interpState = &state;
 
     debug_only_stmt(fflush(NULL));
     GuardRecord* rec;
 
     // Note that the block scoping is crucial here for TraceVis;  the
     // TraceVisStateObj constructors and destructors must run at the right times.
     {
 #ifdef MOZ_TRACEVIS
         TraceVisStateObj tvso_n(cx, S_NATIVE);
 #endif
 #if defined(JS_NO_FASTCALL) && defined(NANOJIT_IA32)
-        SIMULATE_FASTCALL(rec, state, NULL, u.func);
+        SIMULATE_FASTCALL(rec, &state, NULL, u.func);
 #else
-        rec = u.func(state, NULL);
+        rec = u.func(&state, NULL);
 #endif
     }
 
     JS_ASSERT(*(uint64*)&global[globalFrameSize] == 0xdeadbeefdeadbeefLL);
-    JS_ASSERT(!state->nativeVp);
+    JS_ASSERT(!state.nativeVp);
 
     VMSideExit* lr = (VMSideExit*)rec->exit;
 
     AUDIT(traceTriggered);
 
-    cx->interpState = state->prev;
+    cx->interpState = state.prev;
 
     JS_ASSERT(!cx->bailExit);
     JS_ASSERT(lr->exitType != LOOP_EXIT || !lr->calldepth);
     tm->tracecx = NULL;
-    LeaveTree(*state, lr);
-    return state->innermost;
+    LeaveTree(state, lr);
+    return state.innermost;
 }
 
 static JS_FORCES_STACK void
 LeaveTree(InterpState& state, VMSideExit* lr)
 {
     VOUCH_DOES_NOT_REQUIRE_STACK();
 
     JSContext* cx = state.cx;
@@ -6807,19 +6805,18 @@ LeaveTree(InterpState& state, VMSideExit
                               innermost->stackTypeMap(),
                               stack, NULL, ignoreSlots);
     JS_ASSERT(unsigned(slots) == innermost->numStackSlots);
 
     if (innermost->nativeCalleeWord)
         SynthesizeSlowNativeFrame(state, cx, innermost);
 
     /* Write back interned globals. */
-    double* global = (double*)(&state + 1);
-    FlushNativeGlobalFrame(cx, global,
-                           ngslots, gslots, globalTypeMap);
+    JS_ASSERT(state.eos == state.stackBase + MAX_NATIVE_STACK_SLOTS);
+    FlushNativeGlobalFrame(cx, state.eos, ngslots, gslots, globalTypeMap);
 #ifdef DEBUG
     /* Verify that our state restoration worked. */
     for (JSStackFrame* fp = cx->fp; fp; fp = fp->down) {
         JS_ASSERT_IF(fp->argv, JSVAL_IS_OBJECT(fp->argv[-1]));
     }
 #endif
 #ifdef JS_JIT_SPEW
     if (innermost->exitType != TIMEOUT_EXIT)
@@ -7800,16 +7797,26 @@ js_DeepBail(JSContext *cx)
 
     tm->tracecx = NULL;
     debug_only_print0(LC_TMTracer, "Deep bail.\n");
     LeaveTree(*tracecx->interpState, tracecx->bailExit);
     tracecx->bailExit = NULL;
 
     InterpState* state = tracecx->interpState;
     state->builtinStatus |= JSBUILTIN_BAILED;
+
+    /*
+     * Between now and the LeaveTree in ExecuteTree, |tm->storage| may be reused
+     * if another trace executes before the currently executing native returns.
+     * However, all such traces will complete by the time the currently
+     * executing native returns and the return value is written to the native
+     * stack. After that point, no traces may execute until the LeaveTree in
+     * ExecuteTree, hence the invariant is maintained that only one trace uses
+     * |tm->storage| at a time.
+     */
     state->deepBailSp = state->sp;
 }
 
 JS_REQUIRES_STACK jsval&
 TraceRecorder::argval(unsigned n) const
 {
     JS_ASSERT(n < cx->fp->fun->nargs);
     return cx->fp->argv[n];
--- a/js/src/jstracer.h
+++ b/js/src/jstracer.h
@@ -684,63 +684,21 @@ public:
     }
     inline JSTraceType* stackTypeMap() {
         return typeMap.data();
     }
 
     UnstableExit* removeUnstableExit(VMSideExit* exit);
 };
 
-#if defined(JS_JIT_SPEW) && (defined(NANOJIT_IA32) || defined(NANOJIT_X64))
-# define EXECUTE_TREE_TIMER
-#endif
-
 typedef enum JSBuiltinStatus {
     JSBUILTIN_BAILED = 1,
     JSBUILTIN_ERROR = 2
 } JSBuiltinStatus;
 
-struct InterpState
-{
-    double*        sp;                  // native stack pointer, stack[0] is spbase[0]
-    FrameInfo**    rp;                  // call stack pointer
-    JSContext*     cx;                  // current VM context handle
-    double*        eos;                 // first unusable word after the native stack
-    void*          eor;                 // first unusable word after the call stack
-    void*          sor;                 // start of rp stack
-    VMSideExit*    lastTreeExitGuard;   // guard we exited on during a tree call
-    VMSideExit*    lastTreeCallGuard;   // guard we want to grow from if the tree
-                                        // call exit guard mismatched
-    void*          rpAtLastTreeCall;    // value of rp at innermost tree call guard
-    VMSideExit*    outermostTreeExitGuard; // the last side exit returned by js_CallTree
-    TreeInfo*      outermostTree;       // the outermost tree we initially invoked
-    double*        stackBase;           // native stack base
-    FrameInfo**    callstackBase;       // call stack base
-    uintN*         inlineCallCountp;    // inline call count counter
-    VMSideExit**   innermostNestedGuardp;
-    VMSideExit*    innermost;
-#ifdef EXECUTE_TREE_TIMER
-    uint64         startTime;
-#endif
-    InterpState*   prev;
-
-    // Used by _FAIL builtins; see jsbuiltins.h. The builtin sets the
-    // JSBUILTIN_BAILED bit if it bails off trace and the JSBUILTIN_ERROR bit
-    // if an error or exception occurred.
-    uint32         builtinStatus;
-
-    // Used to communicate the location of the return value in case of a deep bail.
-    double*        deepBailSp;
-
-
-    // Used when calling natives from trace to root the vp vector.
-    uintN          nativeVpLen;
-    jsval*         nativeVp;
-};
-
 // Arguments objects created on trace have a private value that points to an
 // instance of this struct. The struct includes a typemap that is allocated
 // as part of the object.
 struct js_ArgsPrivateNative {
     double      *argv;
 
     static js_ArgsPrivateNative *create(VMAllocator &alloc, unsigned argc)
     {