Bug 623297 - Reduce usage of JS_TRACE_MONITOR in jstracer (r=gal)
authorBill McCloskey <wmccloskey@mozilla.com>
Tue, 01 Feb 2011 10:16:44 -0800
changeset 62032 c7e8f00451a52498cfcafc0dd48f49881873456e
parent 62031 41e2276b9b788966daf00bab73b9acbb6aa1559c
child 62033 ee3a4f42994264ab290c27b3b07a96762e9b0de7
push id1
push userroot
push dateTue, 10 Dec 2013 15:46:25 +0000
reviewersgal
bugs623297
milestone2.0b11pre
Bug 623297 - Reduce usage of JS_TRACE_MONITOR in jstracer (r=gal)
js/src/jsapi.cpp
js/src/jsarray.cpp
js/src/jsbuiltins.cpp
js/src/jscntxt.h
js/src/jscompartment.h
js/src/jsnum.cpp
js/src/jstracer.cpp
js/src/jstracer.h
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -2669,17 +2669,17 @@ JS_GetGCParameterForThread(JSContext *cx
     return 0;
 #endif
 }
 
 JS_PUBLIC_API(void)
 JS_FlushCaches(JSContext *cx)
 {
 #ifdef JS_TRACER
-    FlushJITCache(cx);
+    FlushJITCache(cx, &cx->compartment->traceMonitor);
 #endif
 }
 
 JS_PUBLIC_API(intN)
 JS_AddExternalStringFinalizer(JSStringFinalizeOp finalizer)
 {
     return JSExternalString::changeFinalizer(NULL, finalizer);
 }
--- a/js/src/jsarray.cpp
+++ b/js/src/jsarray.cpp
@@ -2064,22 +2064,24 @@ js_ArrayCompPush(JSContext *cx, JSObject
 {
     return ArrayCompPushImpl(cx, obj, vp);
 }
 
 #ifdef JS_TRACER
 JSBool JS_FASTCALL
 js_ArrayCompPush_tn(JSContext *cx, JSObject *obj, ValueArgType v)
 {
+    TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
+
     if (!ArrayCompPushImpl(cx, obj, ValueArgToConstRef(v))) {
-        SetBuiltinError(cx);
+        SetBuiltinError(tm);
         return JS_FALSE;
     }
 
-    return WasBuiltinSuccessful(cx);
+    return WasBuiltinSuccessful(tm);
 }
 JS_DEFINE_CALLINFO_3(extern, BOOL_FAIL, js_ArrayCompPush_tn, CONTEXT, OBJECT,
                      VALUE, 0, nanojit::ACCSET_STORE_ANY)
 #endif
 
 static JSBool
 array_push(JSContext *cx, uintN argc, Value *vp)
 {
--- a/js/src/jsbuiltins.cpp
+++ b/js/src/jsbuiltins.cpp
@@ -67,17 +67,21 @@
 
 using namespace avmplus;
 using namespace nanojit;
 using namespace js;
 
 JS_FRIEND_API(void)
 js_SetTraceableNativeFailed(JSContext *cx)
 {
-    SetBuiltinError(cx);
+    /*
+     * We might not be on trace (we might have deep bailed) so we hope
+     * cx->compartment is correct.
+     */
+    SetBuiltinError(&JS_TRACE_MONITOR(cx));
 }
 
 /*
  * NB: bool FASTCALL is not compatible with Nanojit's calling convention usage.
  * Do not use bool FASTCALL, use JSBool only!
  */
 
 jsdouble FASTCALL
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -810,27 +810,16 @@ public:
 
 private:
     StackSegment *curcs;
     JSStackFrame *curfp;
 };
 
 } /* namespace js */
 
-/*
- * N.B. JS_ON_TRACE(cx) is true if JIT code is on the stack in the current
- * thread, regardless of whether cx is the context in which that trace is
- * executing.  cx must be a context on the current thread.
- */
-#ifdef JS_TRACER
-# define JS_ON_TRACE(cx)            (cx->compartment && JS_TRACE_MONITOR(cx).ontrace())
-#else
-# define JS_ON_TRACE(cx)            false
-#endif
-
 #ifdef DEBUG
 # define FUNCTION_KIND_METER_LIST(_)                                          \
                         _(allfun), _(heavy), _(nofreeupvar), _(onlyfreevar),  \
                         _(flat), _(badfunarg),                                \
                         _(joinedsetmethod), _(joinedinitmethod),              \
                         _(joinedreplace), _(joinedsort), _(joinedmodulepat),  \
                         _(mreadbarrier), _(mwritebarrier), _(mwslotbarrier),  \
                         _(unjoined), _(indynamicscope)
--- a/js/src/jscompartment.h
+++ b/js/src/jscompartment.h
@@ -80,20 +80,23 @@ typedef HashSet<JSScript *,
                 DefaultHasher<JSScript *>,
                 SystemAllocPolicy> TracedScriptSet;
 
 typedef HashMap<JSFunction *,
                 JSString *,
                 DefaultHasher<JSFunction *>,
                 SystemAllocPolicy> ToSourceCache;
 
+struct TraceMonitor;
+
 /* Holds the execution state during trace execution. */
 struct TracerState
 {
     JSContext*     cx;                  // current VM context handle
+    TraceMonitor*  traceMonitor;        // current TM
     double*        stackBase;           // native stack base
     double*        sp;                  // native stack pointer, stack[0] is spbase[0]
     double*        eos;                 // first unusable word after the native stack / begin of globals
     FrameInfo**    callstackBase;       // call stack base
     void*          sor;                 // start of rp stack
     FrameInfo**    rp;                  // call stack pointer
     void*          eor;                 // first unusable word after the call stack
     VMSideExit*    lastTreeExitGuard;   // guard we exited on during a tree call
@@ -446,16 +449,27 @@ struct JS_FRIEND_API(JSCompartment) {
     js::MathCache *getMathCache(JSContext *cx) {
         return mathCache ? mathCache : allocMathCache(cx);
     }
 };
 
 #define JS_TRACE_MONITOR(cx)    (cx->compartment->traceMonitor)
 #define JS_SCRIPTS_TO_GC(cx)    (cx->compartment->scriptsToGC)
 
+/*
+ * N.B. JS_ON_TRACE(cx) is true if JIT code is on the stack in the current
+ * thread, regardless of whether cx is the context in which that trace is
+ * executing. cx must be a context on the current thread.
+ */
+#ifdef JS_TRACER
+# define JS_ON_TRACE(cx)            (cx->compartment && JS_TRACE_MONITOR(cx).ontrace())
+#else
+# define JS_ON_TRACE(cx)            false
+#endif
+
 namespace js {
 static inline MathCache *
 GetMathCache(JSContext *cx)
 {
     return cx->compartment->getMathCache(cx);
 }
 }
 
--- a/js/src/jsnum.cpp
+++ b/js/src/jsnum.cpp
@@ -328,19 +328,21 @@ num_parseFloat(JSContext *cx, uintN argc
     vp->setNumber(d);
     return JS_TRUE;
 }
 
 #ifdef JS_TRACER
 static jsdouble FASTCALL
 ParseFloat(JSContext* cx, JSString* str)
 {
+    TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
+
     const jschar *bp = str->getChars(cx);
     if (!bp) {
-        SetBuiltinError(cx);
+        SetBuiltinError(tm);
         return js_NaN;
     }
     const jschar *end = bp + str->length();
 
     const jschar *ep;
     double d;
     if (!js_strtod(cx, bp, end, &ep, &d) || ep == bp)
         return js_NaN;
@@ -470,26 +472,28 @@ num_parseInt(JSContext *cx, uintN argc, 
     vp->setNumber(number);
     return true;
 }
 
 #ifdef JS_TRACER
 static jsdouble FASTCALL
 ParseInt(JSContext* cx, JSString* str)
 {
+    TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
+
     const jschar *start = str->getChars(cx);
     if (!start) {
-        SetBuiltinError(cx);
+        SetBuiltinError(tm);
         return js_NaN;
     }
     const jschar *end = start + str->length();
 
     jsdouble d;
     if (!ParseIntStringHelper(cx, start, end, 0, true, &d)) {
-        SetBuiltinError(cx);
+        SetBuiltinError(tm);
         return js_NaN;
     }
     return d;
 }
 
 static jsdouble FASTCALL
 ParseIntDouble(jsdouble d)
 {
--- a/js/src/jstracer.cpp
+++ b/js/src/jstracer.cpp
@@ -1484,20 +1484,20 @@ IsBlacklisted(jsbytecode* pc)
     if (*pc == JSOP_NOTRACE)
         return true;
     if (*pc == JSOP_CALL)
         return *(pc + JSOP_CALL_LENGTH) == JSOP_NOTRACE;
     return false;
 }
 
 static void
-Backoff(JSContext *cx, jsbytecode* pc, Fragment* tree = NULL)
+Backoff(TraceMonitor *tm, jsbytecode* pc, Fragment* tree = NULL)
 {
     /* N.B. This code path cannot assume the recorder is/is not alive. */
-    RecordAttemptMap &table = *JS_TRACE_MONITOR(cx).recordAttempts;
+    RecordAttemptMap &table = *tm->recordAttempts;
     if (RecordAttemptMap::AddPtr p = table.lookupForAdd(pc)) {
         if (p->value++ > (BL_ATTEMPTS * MAXPEERS)) {
             p->value = 0;
             Blacklist(pc);
             return;
         }
     } else {
         table.add(p, pc, 0);
@@ -1513,19 +1513,19 @@ Backoff(JSContext *cx, jsbytecode* pc, F
          * well.
          */
         if (++tree->recordAttempts > BL_ATTEMPTS)
             Blacklist(pc);
     }
 }
 
 static void
-ResetRecordingAttempts(JSContext *cx, jsbytecode* pc)
-{
-    RecordAttemptMap &table = *JS_TRACE_MONITOR(cx).recordAttempts;
+ResetRecordingAttempts(TraceMonitor *tm, jsbytecode* pc)
+{
+    RecordAttemptMap &table = *tm->recordAttempts;
     if (RecordAttemptMap::Ptr p = table.lookup(pc))
         p->value = 0;
 }
 
 static inline size_t
 FragmentHash(const void *ip, JSObject* globalObj, uint32 globalShape, uint32 argc)
 {
     uintptr_t h = HASH_SEED;
@@ -1574,17 +1574,18 @@ LookupOrAddLoop(TraceMonitor* tm, const 
     RawLookupFirstPeer(tm, ip, globalObj, globalShape, argc, firstInBucket, prevTreeNextp);
     if (TreeFragment *f = *prevTreeNextp)
         return f;
 
     verbose_only(
     uint32_t profFragID = (LogController.lcbits & LC_FragProfile)
                           ? (++(tm->lastFragID)) : 0;
     )
-    TreeFragment* f = new (*tm->dataAlloc) TreeFragment(ip, tm->dataAlloc, globalObj, globalShape,
+    TreeFragment* f = new (*tm->dataAlloc) TreeFragment(ip, tm->dataAlloc, tm->oracle,
+                                                        globalObj, globalShape,
                                                         argc verbose_only(, profFragID));
     f->root = f;                /* f is the root of a new tree */
     *prevTreeNextp = f;         /* insert f at the end of the vmfragments bucket-list */
     f->next = NULL;
     f->first = f;               /* initialize peer-list at f */
     f->peer = NULL;
     return f;
 }
@@ -1592,19 +1593,19 @@ LookupOrAddLoop(TraceMonitor* tm, const 
 static TreeFragment*
 AddNewPeerToPeerList(TraceMonitor* tm, TreeFragment* peer)
 {
     JS_ASSERT(peer);
     verbose_only(
     uint32_t profFragID = (LogController.lcbits & LC_FragProfile)
                           ? (++(tm->lastFragID)) : 0;
     )
-    TreeFragment* f = new (*tm->dataAlloc) TreeFragment(peer->ip, tm->dataAlloc, peer->globalObj,
-                                                        peer->globalShape, peer->argc
-                                                        verbose_only(, profFragID));
+    TreeFragment* f = new (*tm->dataAlloc) TreeFragment(peer->ip, tm->dataAlloc, tm->oracle,
+                                                        peer->globalObj, peer->globalShape,
+                                                        peer->argc verbose_only(, profFragID));
     f->root = f;                /* f is the root of a new tree */
     f->first = peer->first;     /* add f to peer list */
     f->peer = peer->peer;
     peer->peer = f;
     /* only the |first| Fragment of a peer list needs a valid |next| field */
     debug_only(f->next = (TreeFragment*)0xcdcdcdcd);
     return f;
 }
@@ -1674,24 +1675,22 @@ AssertTreeIsUnique(TraceMonitor* tm, Tre
         if (!peer->code() || peer == f)
             continue;
         JS_ASSERT(!f->typeMap.matches(peer->typeMap));
     }
 }
 #endif
 
 static void
-AttemptCompilation(JSContext *cx, JSObject* globalObj,
+AttemptCompilation(TraceMonitor *tm, JSObject* globalObj,
                    JSScript* script, jsbytecode* pc, uint32 argc)
 {
-    TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
-
     /* If we already permanently blacklisted the location, undo that. */
     Unblacklist(script, pc);
-    ResetRecordingAttempts(cx, pc);
+    ResetRecordingAttempts(tm, pc);
 
     /* Breathe new life into all peer fragments at the designated loop header. */
     TreeFragment* f = LookupLoop(tm, pc, globalObj, globalObj->shape(), argc);
     if (!f) {
         /*
          * If the global object's shape changed, we can't easily find the
          * corresponding loop header via a hash table lookup. In this
          * we simply bail here and hope that the fragment has another
@@ -2068,21 +2067,23 @@ NativeStackSlots(JSContext *cx, unsigned
 class CaptureTypesVisitor : public SlotVisitorBase
 {
     JSContext* mCx;
     JSValueType* mTypeMap;
     JSValueType* mPtr;
     Oracle   * mOracle;
 
 public:
-    JS_ALWAYS_INLINE CaptureTypesVisitor(JSContext* cx, JSValueType* typeMap, bool speculate) :
-        mCx(cx),
+    JS_ALWAYS_INLINE CaptureTypesVisitor(JSContext* cx, Oracle *oracle,
+                                         JSValueType* typeMap, bool speculate)
+      : mCx(cx),
         mTypeMap(typeMap),
         mPtr(typeMap),
-        mOracle(speculate ? JS_TRACE_MONITOR(cx).oracle : NULL) {}
+        mOracle(speculate ? oracle : NULL)
+    {}
 
     JS_REQUIRES_STACK JS_ALWAYS_INLINE void
     visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
             JSValueType type = getCoercedType(*vp);
             if (type == JSVAL_TYPE_INT32 && (!mOracle || mOracle->isGlobalSlotUndemotable(mCx, slot)))
                 type = JSVAL_TYPE_DOUBLE;
             JS_ASSERT(type != JSVAL_TYPE_BOXED);
             debug_only_printf(LC_TMTracer,
@@ -2134,30 +2135,31 @@ TypeMap::set(unsigned stackSlots, unsign
  * Capture the type map for the selected slots of the global object and currently pending
  * stack frames.
  */
 JS_REQUIRES_STACK void
 TypeMap::captureTypes(JSContext* cx, JSObject* globalObj, SlotList& slots, unsigned callDepth,
                       bool speculate)
 {
     setLength(NativeStackSlots(cx, callDepth) + slots.length());
-    CaptureTypesVisitor visitor(cx, data(), speculate);
+    CaptureTypesVisitor visitor(cx, oracle, data(), speculate);
     VisitSlots(visitor, cx, globalObj, callDepth, slots);
     JS_ASSERT(visitor.length() == length());
 }
 
 JS_REQUIRES_STACK void
-TypeMap::captureMissingGlobalTypes(JSContext* cx, JSObject* globalObj, SlotList& slots, unsigned stackSlots,
+TypeMap::captureMissingGlobalTypes(JSContext* cx,
+                                   JSObject* globalObj, SlotList& slots, unsigned stackSlots,
                                    bool speculate)
 {
     unsigned oldSlots = length() - stackSlots;
     int diff = slots.length() - oldSlots;
     JS_ASSERT(diff >= 0);
     setLength(length() + diff);
-    CaptureTypesVisitor visitor(cx, data() + stackSlots + oldSlots, speculate);
+    CaptureTypesVisitor visitor(cx, oracle, data() + stackSlots + oldSlots, speculate);
     VisitGlobalSlots(visitor, cx, globalObj, diff, slots.data() + oldSlots);
 }
 
 /* Compare this type map to another one and see whether they match. */
 bool
 TypeMap::matches(TypeMap& other) const
 {
     if (length() != other.length())
@@ -2227,58 +2229,59 @@ SpecializeTreesToMissingGlobals(JSContex
     root->typeMap.captureMissingGlobalTypes(cx, globalObj, *root->globalSlots, root->nStackTypes,
                                             speculate);
     JS_ASSERT(root->globalSlots->length() == root->typeMap.length() - root->nStackTypes);
 
     SpecializeTreesToLateGlobals(cx, root, root->globalTypeMap(), root->nGlobalTypes());
 }
 
 static void
-ResetJITImpl(JSContext* cx);
+ResetJITImpl(JSContext* cx, TraceMonitor *tm);
 
 #ifdef MOZ_TRACEVIS
 static JS_INLINE void
-ResetJIT(JSContext* cx, TraceVisFlushReason r)
+ResetJIT(JSContext* cx, TraceMonitor *tm, TraceVisFlushReason r)
 {
     LogTraceVisEvent(cx, S_RESET, r);
-    ResetJITImpl(cx);
+    ResetJITImpl(cx, tm);
 }
 #else
-# define ResetJIT(cx, reason) ResetJITImpl(cx)
+# define ResetJIT(cx, tm, reason) ResetJITImpl(cx, tm)
 #endif
 
 void
-FlushJITCache(JSContext *cx)
-{
-    ResetJIT(cx, FR_OOM);
+FlushJITCache(JSContext *cx, TraceMonitor *tm)
+{
+    ResetJIT(cx, tm, FR_OOM);
 }
 
 static void
 TrashTree(TreeFragment* f);
 
 JS_REQUIRES_STACK
-TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* anchor, VMFragment* fragment,
+TraceRecorder::TraceRecorder(JSContext* cx, TraceMonitor *tm,
+                             VMSideExit* anchor, VMFragment* fragment,
                              unsigned stackSlots, unsigned ngslots, JSValueType* typeMap,
                              VMSideExit* innermost, JSScript* outerScript, jsbytecode* outerPC,
                              uint32 outerArgc, bool speculate)
   : cx(cx),
-    traceMonitor(&JS_TRACE_MONITOR(cx)),
-    oracle(speculate ? JS_TRACE_MONITOR(cx).oracle : NULL),
+    traceMonitor(tm),
+    oracle(speculate ? tm->oracle : NULL),
     fragment(fragment),
     tree(fragment->root),
     globalObj(tree->globalObj),
     outerScript(outerScript),
     outerPC(outerPC),
     outerArgc(outerArgc),
     anchor(anchor),
     cx_ins(NULL),
     eos_ins(NULL),
     eor_ins(NULL),
     loopLabel(NULL),
-    importTypeMap(&tempAlloc()),
+    importTypeMap(&tempAlloc(), tm->oracle),
     lirbuf(new (tempAlloc()) LirBuffer(tempAlloc())),
     mark(*traceMonitor->traceAlloc),
     numSideExitsBefore(tree->sideExits.length()),
     tracker(),
     nativeFrameTracker(),
     global_slots(NULL),
     callDepth(anchor ? anchor->calldepth : 0),
     atoms(FrameAtomBase(cx, cx->fp())),
@@ -2500,17 +2503,17 @@ TraceRecorder::finishSuccessfully()
 
     localtm->recorder = NULL;
     /* We can't (easily) use js_delete() here because the constructor is private. */
     this->~TraceRecorder();
     js_free(this);
 
     /* Catch OOM that occurred during recording. */
     if (localtm->outOfMemory() || OverfullJITCache(localcx, localtm)) {
-        ResetJIT(localcx, FR_OOM);
+        ResetJIT(localcx, localtm, FR_OOM);
         return ARECORD_ABORTED;
     }
     return ARECORD_COMPLETED;
 }
 
 /* This function aborts a recorder and any pending outer recorders. */
 JS_REQUIRES_STACK TraceRecorder::AbortResult
 TraceRecorder::finishAbort(const char* reason)
@@ -2525,17 +2528,17 @@ TraceRecorder::finishAbort(const char* r
                       tree->treeFileName,
                       tree->treeLineNumber,
                       tree->treePCOffset,
                       cx->fp()->script()->filename,
                       js_FramePCToLineNumber(cx, cx->fp()),
                       FramePCOffset(cx, cx->fp()),
                       reason);
 #endif
-    Backoff(cx, (jsbytecode*) fragment->root->ip, fragment->root);
+    Backoff(traceMonitor, (jsbytecode*) fragment->root->ip, fragment->root);
 
     /*
      * If this is the primary trace and we didn't succeed compiling, trash the
      * tree. Otherwise, remove the VMSideExits we added while recording, which
      * are about to be invalid.
      *
      * BIG FAT WARNING: resetting the length is only a valid strategy as long as
      * there may be only one recorder active for a single TreeInfo at a time.
@@ -2554,17 +2557,17 @@ TraceRecorder::finishAbort(const char* r
 
     localtm->recorder = NULL;
     /* We can't (easily) use js_delete() here because the constructor is private. */
     this->~TraceRecorder();
     js_free(this);
 
     /* Catch OOM that occurred during recording. */
     if (localtm->outOfMemory() || OverfullJITCache(localcx, localtm)) {
-        ResetJIT(localcx, FR_OOM);
+        ResetJIT(localcx, localtm, FR_OOM);
         return JIT_RESET;
     }
     return NORMAL_ABORT;
 }
 
 inline LIns*
 TraceRecorder::w_immpObjGC(JSObject* obj)
 {
@@ -4032,17 +4035,17 @@ public:
         bool isPromote = IsPromotedInt32(ins);
         if (isPromote && *mTypeMap == JSVAL_TYPE_DOUBLE) {
             mRecorder.w.st(mRecorder.get(vp),
                            EosAddress(mRecorder.eos_ins, mRecorder.nativeGlobalOffset(vp)));
             /*
              * Aggressively undo speculation so the inner tree will compile
              * if this fails.
              */
-            JS_TRACE_MONITOR(mCx).oracle->markGlobalSlotUndemotable(mCx, slot);
+            mRecorder.traceMonitor->oracle->markGlobalSlotUndemotable(mCx, slot);
         }
         JS_ASSERT(!(!isPromote && *mTypeMap == JSVAL_TYPE_INT32));
         ++mTypeMap;
     }
 };
 
 class AdjustCallerStackTypesVisitor : public SlotVisitorBase
 {
@@ -4073,17 +4076,17 @@ public:
             LIns *ins = mRecorder.get(vp);
             bool isPromote = IsPromotedInt32(ins);
             if (isPromote && *mTypeMap == JSVAL_TYPE_DOUBLE) {
                 mRecorder.w.st(ins, StackAddress(mLirbuf->sp, mRecorder.nativespOffset(vp)));
                 /*
                  * Aggressively undo speculation so the inner tree will compile
                  * if this fails.
                  */
-                JS_TRACE_MONITOR(mCx).oracle->markStackSlotUndemotable(mCx, mSlotnum);
+                mRecorder.traceMonitor->oracle->markStackSlotUndemotable(mCx, mSlotnum);
             }
             JS_ASSERT(!(!isPromote && *mTypeMap == JSVAL_TYPE_INT32));
             ++vp;
             ++mTypeMap;
             ++mSlotnum;
         }
         return true;
     }
@@ -4504,21 +4507,20 @@ TraceRecorder::copy(VMSideExit* copy)
  */
 static inline bool
 ProhibitFlush(TraceMonitor *tm)
 {
     return !!tm->tracerState; // don't flush if we're running a trace
 }
 
 static void
-ResetJITImpl(JSContext* cx)
+ResetJITImpl(JSContext* cx, TraceMonitor* tm)
 {
     if (!cx->traceJitEnabled)
         return;
-    TraceMonitor* tm = &JS_TRACE_MONITOR(cx);
     debug_only_print0(LC_TMTracer, "Flushing cache.\n");
     if (tm->recorder) {
         JS_ASSERT_NOT_ON_TRACE(cx);
         AbortRecording(cx, "flush cache");
     }
 #if JS_METHODJIT
     if (tm->profile)
         AbortProfiling(cx);
@@ -4535,17 +4537,17 @@ ResetJITImpl(JSContext* cx)
 JS_REQUIRES_STACK AbortableRecordingStatus
 TraceRecorder::compile()
 {
 #ifdef MOZ_TRACEVIS
     TraceVisStateObj tvso(cx, S_COMPILE);
 #endif
 
     if (traceMonitor->needFlush) {
-        ResetJIT(cx, FR_DEEP_BAIL);
+        ResetJIT(cx, traceMonitor, FR_DEEP_BAIL);
         return ARECORD_ABORTED;
     }
     if (tree->maxNativeStackSlots >= MAX_NATIVE_STACK_SLOTS) {
         debug_only_print0(LC_TMTracer, "Blacklist: excessive stack use.\n");
         Blacklist((jsbytecode*)tree->ip);
         return ARECORD_STOP;
     }
     if (anchor && anchor->exitType != CASE_EXIT)
@@ -4574,18 +4576,18 @@ TraceRecorder::compile()
         assm->setError(nanojit::None);
         debug_only_print0(LC_TMTracer, "Blacklisted: error during compilation\n");
         Blacklist((jsbytecode*)tree->ip);
         return ARECORD_STOP;
     }
 
     if (outOfMemory())
         return ARECORD_STOP;
-    ResetRecordingAttempts(cx, (jsbytecode*)fragment->ip);
-    ResetRecordingAttempts(cx, (jsbytecode*)tree->ip);
+    ResetRecordingAttempts(traceMonitor, (jsbytecode*)fragment->ip);
+    ResetRecordingAttempts(traceMonitor, (jsbytecode*)tree->ip);
     if (anchor) {
 #ifdef NANOJIT_IA32
         if (anchor->exitType == CASE_EXIT)
             assm->patch(anchor, anchor->switchInfo);
         else
 #endif
             assm->patch(anchor);
     }
@@ -5026,17 +5028,17 @@ TraceRecorder::closeLoop()
     if (tree->code())
         SpecializeTreesToMissingGlobals(cx, globalObj, tree);
 
     /*
      * If this is a newly formed tree, and the outer tree has not been compiled yet, we
      * should try to compile the outer tree again.
      */
     if (outerPC)
-        AttemptCompilation(cx, globalObj, outerScript, outerPC, outerArgc);
+        AttemptCompilation(traceMonitor, globalObj, outerScript, outerPC, outerArgc);
 #ifdef JS_JIT_SPEW
     debug_only_printf(LC_TMMinimal,
                       "Recording completed at  %s:%u@%u via closeLoop (FragID=%06u)\n",
                       cx->fp()->script()->filename,
                       js_FramePCToLineNumber(cx, cx->fp()),
                       FramePCOffset(cx, cx->fp()),
                       fragment->profFragID);
     debug_only_print0(LC_TMMinimal, "\n");
@@ -5054,26 +5056,26 @@ FullMapFromExit(TypeMap& typeMap, VMSide
     /* Include globals that were later specialized at the root of the tree. */
     if (exit->numGlobalSlots < exit->root()->nGlobalTypes()) {
         typeMap.fromRaw(exit->root()->globalTypeMap() + exit->numGlobalSlots,
                         exit->root()->nGlobalTypes() - exit->numGlobalSlots);
     }
 }
 
 static JS_REQUIRES_STACK TypeConsensus
-TypeMapLinkability(JSContext* cx, const TypeMap& typeMap, TreeFragment* peer)
+TypeMapLinkability(JSContext* cx, TraceMonitor *tm, const TypeMap& typeMap, TreeFragment* peer)
 {
     const TypeMap& peerMap = peer->typeMap;
     unsigned minSlots = JS_MIN(typeMap.length(), peerMap.length());
     TypeConsensus consensus = TypeConsensus_Okay;
     for (unsigned i = 0; i < minSlots; i++) {
         if (typeMap[i] == peerMap[i])
             continue;
         if (typeMap[i] == JSVAL_TYPE_INT32 && peerMap[i] == JSVAL_TYPE_DOUBLE &&
-            IsSlotUndemotable(JS_TRACE_MONITOR(cx).oracle, cx, peer, i, peer->ip)) {
+            IsSlotUndemotable(tm->oracle, cx, peer, i, peer->ip)) {
             consensus = TypeConsensus_Undemotes;
         } else {
             return TypeConsensus_Bad;
         }
     }
     return consensus;
 }
 
@@ -5096,28 +5098,28 @@ TraceRecorder::findUndemotesInTypemaps(c
 }
 
 JS_REQUIRES_STACK void
 TraceRecorder::joinEdgesToEntry(TreeFragment* peer_root)
 {
     if (fragment->root != fragment)
         return;
 
-    TypeMap typeMap(NULL);
+    TypeMap typeMap(NULL, traceMonitor->oracle);
     Queue<unsigned> undemotes(NULL);
 
     for (TreeFragment* peer = peer_root; peer; peer = peer->peer) {
         if (!peer->code())
             continue;
         UnstableExit* uexit = peer->unstableExits;
         while (uexit != NULL) {
             /* Build the full typemap for this unstable exit */
             FullMapFromExit(typeMap, uexit->exit);
             /* Check its compatibility against this tree */
-            TypeConsensus consensus = TypeMapLinkability(cx, typeMap, tree);
+            TypeConsensus consensus = TypeMapLinkability(cx, traceMonitor, typeMap, tree);
             JS_ASSERT_IF(consensus == TypeConsensus_Okay, peer != fragment);
             if (consensus == TypeConsensus_Okay) {
                 debug_only_printf(LC_TMTracer,
                                   "Joining type-stable trace to target exit %p->%p.\n",
                                   (void*)uexit->fragment, (void*)uexit->exit);
 
                 /*
                  * See bug 531513. Before linking these trees, make sure the
@@ -5191,17 +5193,17 @@ TraceRecorder::endLoop(VMSideExit* exit)
     if (tree->code())
         SpecializeTreesToMissingGlobals(cx, globalObj, fragment->root);
 
     /*
      * If this is a newly formed tree, and the outer tree has not been compiled
      * yet, we should try to compile the outer tree again.
      */
     if (outerPC)
-        AttemptCompilation(cx, globalObj, outerScript, outerPC, outerArgc);
+        AttemptCompilation(traceMonitor, globalObj, outerScript, outerPC, outerArgc);
 #ifdef JS_JIT_SPEW
     debug_only_printf(LC_TMMinimal,
                       "Recording completed at  %s:%u@%u via endLoop (FragID=%06u)\n",
                       cx->fp()->script()->filename,
                       js_FramePCToLineNumber(cx, cx->fp()),
                       FramePCOffset(cx, cx->fp()),
                       fragment->profFragID);
     debug_only_print0(LC_TMTracer, "\n");
@@ -5526,17 +5528,17 @@ TraceRecorder::checkTraceEnd(jsbytecode 
  * indicates whether the recorder is still active.  If 'false', any active
  * recording has been aborted and the JIT may have been reset.
  */
 static JS_REQUIRES_STACK bool
 CheckGlobalObjectShape(JSContext* cx, TraceMonitor* tm, JSObject* globalObj,
                        uint32 *shape = NULL, SlotList** slots = NULL)
 {
     if (tm->needFlush) {
-        ResetJIT(cx, FR_DEEP_BAIL);
+        ResetJIT(cx, tm, FR_DEEP_BAIL);
         return false;
     }
 
     if (globalObj->numSlots() > MAX_GLOBAL_SLOTS) {
         if (tm->recorder)
             AbortRecording(cx, "too many slots in global object");
         return false;
     }
@@ -5561,18 +5563,18 @@ CheckGlobalObjectShape(JSContext* cx, Tr
 
         /* Check the global shape matches the recorder's treeinfo's shape. */
         if (globalObj != root->globalObj || globalShape != root->globalShape) {
             AUDIT(globalShapeMismatchAtEntry);
             debug_only_printf(LC_TMTracer,
                               "Global object/shape mismatch (%p/%u vs. %p/%u), flushing cache.\n",
                               (void*)globalObj, globalShape, (void*)root->globalObj,
                               root->globalShape);
-            Backoff(cx, (jsbytecode*) root->ip);
-            ResetJIT(cx, FR_GLOBAL_SHAPE_MISMATCH);
+            Backoff(tm, (jsbytecode*) root->ip);
+            ResetJIT(cx, tm, FR_GLOBAL_SHAPE_MISMATCH);
             return false;
         }
         if (shape)
             *shape = globalShape;
         if (slots)
             *slots = root->globalSlots;
         return true;
     }
@@ -5597,46 +5599,45 @@ CheckGlobalObjectShape(JSContext* cx, Tr
         }
     }
 
     /* No currently-tracked-global found and no room to allocate, abort. */
     AUDIT(globalShapeMismatchAtEntry);
     debug_only_printf(LC_TMTracer,
                       "No global slotlist for global shape %u, flushing cache.\n",
                       globalShape);
-    ResetJIT(cx, FR_GLOBALS_FULL);
+    ResetJIT(cx, tm, FR_GLOBALS_FULL);
     return false;
 }
 
 /*
  * Return whether or not the recorder could be started. If 'false', the JIT has
  * been reset in response to an OOM.
  */
 bool JS_REQUIRES_STACK
-TraceRecorder::startRecorder(JSContext* cx, VMSideExit* anchor, VMFragment* f,
+TraceRecorder::startRecorder(JSContext* cx, TraceMonitor *tm, VMSideExit* anchor, VMFragment* f,
                              unsigned stackSlots, unsigned ngslots,
                              JSValueType* typeMap, VMSideExit* expectedInnerExit,
                              JSScript* outerScript, jsbytecode* outerPC, uint32 outerArgc,
                              bool speculate)
 {
-    TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
     JS_ASSERT(!tm->profile);
     JS_ASSERT(!tm->needFlush);
     JS_ASSERT_IF(cx->fp()->hasImacropc(), f->root != f);
 
     /* We can't (easily) use js_new() here because the constructor is private. */
     void *memory = js_malloc(sizeof(TraceRecorder));
     tm->recorder = memory
-                 ? new(memory) TraceRecorder(cx, anchor, f, stackSlots, ngslots, typeMap,
+                 ? new(memory) TraceRecorder(cx, tm, anchor, f, stackSlots, ngslots, typeMap,
                                              expectedInnerExit, outerScript, outerPC, outerArgc,
                                              speculate)
                  : NULL;
 
     if (!tm->recorder || tm->outOfMemory() || OverfullJITCache(cx, tm)) {
-        ResetJIT(cx, FR_OOM);
+        ResetJIT(cx, tm, FR_OOM);
         return false;
     }
 
     return true;
 }
 
 static void
 TrashTree(TreeFragment* f)
@@ -5718,21 +5719,20 @@ SynthesizeFrame(JSContext* cx, const Fra
     JSInterpreterHook hook = cx->debugHooks->callHook;
     if (hook) {
         newfp->setHookData(hook(cx, newfp, JS_TRUE, 0,
                                 cx->debugHooks->callHookData));
     }
 }
 
 static JS_REQUIRES_STACK bool
-RecordTree(JSContext* cx, TreeFragment* first, JSScript* outerScript, jsbytecode* outerPC,
+RecordTree(JSContext* cx, TraceMonitor* tm, TreeFragment* first,
+           JSScript* outerScript, jsbytecode* outerPC,
            uint32 outerArgc, SlotList* globalSlots)
 {
-    TraceMonitor* tm = &JS_TRACE_MONITOR(cx);
-
     /* Try to find an unused peer fragment, or allocate a new one. */
     JS_ASSERT(first->first == first);
     TreeFragment* f = NULL;
     size_t count = 0;
     for (TreeFragment* peer = first; peer; peer = peer->peer, ++count) {
         if (!peer->code())
             f = peer;
     }
@@ -5743,30 +5743,30 @@ RecordTree(JSContext* cx, TreeFragment* 
     /* Disable speculation if we are starting to accumulate a lot of trees. */
     bool speculate = count < MAXPEERS-1;
 
     /* save a local copy for use after JIT flush */
     const void* localRootIP = f->root->ip;
 
     /* Make sure the global type map didn't change on us. */
     if (!CheckGlobalObjectShape(cx, tm, f->globalObj)) {
-        Backoff(cx, (jsbytecode*) localRootIP);
+        Backoff(tm, (jsbytecode*) localRootIP);
         return false;
     }
 
     AUDIT(recorderStarted);
 
     if (tm->outOfMemory() ||
         OverfullJITCache(cx, tm) ||
         !tm->tracedScripts.put(cx->fp()->script()))
     {
         if (!OverfullJITCache(cx, tm))
             js_ReportOutOfMemory(cx);
-        Backoff(cx, (jsbytecode*) f->root->ip);
-        ResetJIT(cx, FR_OOM);
+        Backoff(tm, (jsbytecode*) f->root->ip);
+        ResetJIT(cx, tm, FR_OOM);
         debug_only_print0(LC_TMTracer,
                           "Out of memory recording new tree, flushing cache.\n");
         return false;
     }
 
     JS_ASSERT(!f->code());
 
     f->initialize(cx, globalSlots, speculate);
@@ -5783,31 +5783,31 @@ RecordTree(JSContext* cx, TreeFragment* 
         debug_only_printf(LC_TMTreeVis, "%c", TypeToChar(f->typeMap[i]));
     debug_only_print0(LC_TMTreeVis, "\" GLOBALS=\"");
     for (unsigned i = 0; i < f->nGlobalTypes(); i++)
         debug_only_printf(LC_TMTreeVis, "%c", TypeToChar(f->typeMap[f->nStackTypes + i]));
     debug_only_print0(LC_TMTreeVis, "\"\n");
 #endif
 
     /* Recording primary trace. */
-    return TraceRecorder::startRecorder(cx, NULL, f, f->nStackTypes,
+    return TraceRecorder::startRecorder(cx, tm, NULL, f, f->nStackTypes,
                                         f->globalSlots->length(),
                                         f->typeMap.data(), NULL,
                                         outerScript, outerPC, outerArgc, speculate);
 }
 
 static JS_REQUIRES_STACK TypeConsensus
-FindLoopEdgeTarget(JSContext* cx, VMSideExit* exit, TreeFragment** peerp)
+FindLoopEdgeTarget(JSContext* cx, TraceMonitor* tm, VMSideExit* exit, TreeFragment** peerp)
 {
     TreeFragment* from = exit->root();
 
     JS_ASSERT(from->code());
-    Oracle* oracle = JS_TRACE_MONITOR(cx).oracle;
-
-    TypeMap typeMap(NULL);
+    Oracle* oracle = tm->oracle;
+
+    TypeMap typeMap(NULL, oracle);
     FullMapFromExit(typeMap, exit);
     JS_ASSERT(typeMap.length() - exit->numStackSlots == from->nGlobalTypes());
 
     /* Mark all double slots as undemotable */
     uint16* gslots = from->globalSlots->data();
     for (unsigned i = 0; i < typeMap.length(); i++) {
         if (typeMap[i] == JSVAL_TYPE_DOUBLE) {
             if (i < from->nStackTypes)
@@ -5821,40 +5821,39 @@ FindLoopEdgeTarget(JSContext* cx, VMSide
 
     TreeFragment* firstPeer = from->first;
 
     for (TreeFragment* peer = firstPeer; peer; peer = peer->peer) {
         if (!peer->code())
             continue;
         JS_ASSERT(peer->argc == from->argc);
         JS_ASSERT(exit->numStackSlots == peer->nStackTypes);
-        TypeConsensus consensus = TypeMapLinkability(cx, typeMap, peer);
+        TypeConsensus consensus = TypeMapLinkability(cx, tm, typeMap, peer);
         if (consensus == TypeConsensus_Okay || consensus == TypeConsensus_Undemotes) {
             *peerp = peer;
             return consensus;
         }
     }
 
     return TypeConsensus_Bad;
 }
 
 static JS_REQUIRES_STACK bool
-AttemptToStabilizeTree(JSContext* cx, JSObject* globalObj, VMSideExit* exit,
+AttemptToStabilizeTree(JSContext* cx, TraceMonitor* tm, JSObject* globalObj, VMSideExit* exit,
                        JSScript* outerScript, jsbytecode* outerPC, uint32 outerArgc)
 {
-    TraceMonitor* tm = &JS_TRACE_MONITOR(cx);
     if (tm->needFlush) {
-        ResetJIT(cx, FR_DEEP_BAIL);
+        ResetJIT(cx, tm, FR_DEEP_BAIL);
         return false;
     }
 
     TreeFragment* from = exit->root();
 
     TreeFragment* peer = NULL;
-    TypeConsensus consensus = FindLoopEdgeTarget(cx, exit, &peer);
+    TypeConsensus consensus = FindLoopEdgeTarget(cx, tm, exit, &peer);
     if (consensus == TypeConsensus_Okay) {
         JS_ASSERT(from->globalSlots == peer->globalSlots);
         JS_ASSERT_IF(exit->exitType == UNSTABLE_LOOP_EXIT,
                      from->nStackTypes == peer->nStackTypes);
         JS_ASSERT(exit->numStackSlots == peer->nStackTypes);
         /* Patch this exit to its peer */
         JoinPeers(tm->assembler, exit, peer);
         /*
@@ -5878,24 +5877,22 @@ AttemptToStabilizeTree(JSContext* cx, JS
     SlotList *globalSlots = from->globalSlots;
 
     JS_ASSERT(from == from->root);
 
     /* If this tree has been blacklisted, don't try to record a new one. */
     if (*(jsbytecode*)from->ip == JSOP_NOTRACE)
         return false;
 
-    return RecordTree(cx, from->first, outerScript, outerPC, outerArgc, globalSlots);
+    return RecordTree(cx, tm, from->first, outerScript, outerPC, outerArgc, globalSlots);
 }
 
 static JS_REQUIRES_STACK VMFragment*
-CreateBranchFragment(JSContext* cx, TreeFragment* root, VMSideExit* anchor)
-{
-    TraceMonitor* tm = &JS_TRACE_MONITOR(cx);
-
+CreateBranchFragment(JSContext* cx, TraceMonitor* tm, TreeFragment* root, VMSideExit* anchor)
+{
     verbose_only(
     uint32_t profFragID = (LogController.lcbits & LC_FragProfile)
                           ? (++(tm->lastFragID)) : 0;
     )
 
     VMFragment* f = new (*tm->dataAlloc) VMFragment(cx->regs->pc verbose_only(, profFragID));
 
     debug_only_printf(LC_TMTreeVis, "TREEVIS CREATEBRANCH ROOT=%p FRAG=%p PC=%p FILE=\"%s\""
@@ -5907,28 +5904,27 @@ CreateBranchFragment(JSContext* cx, Tree
 
     f->root = root;
     if (anchor)
         anchor->target = f;
     return f;
 }
 
 static JS_REQUIRES_STACK bool
-AttemptToExtendTree(JSContext* cx, VMSideExit* anchor, VMSideExit* exitedFrom,
+AttemptToExtendTree(JSContext* cx, TraceMonitor* tm, VMSideExit* anchor, VMSideExit* exitedFrom,
                     JSScript *outerScript, jsbytecode* outerPC
 #ifdef MOZ_TRACEVIS
     , TraceVisStateObj* tvso = NULL
 #endif
     )
 {
-    TraceMonitor* tm = &JS_TRACE_MONITOR(cx);
     JS_ASSERT(!tm->recorder);
 
     if (tm->needFlush) {
-        ResetJIT(cx, FR_DEEP_BAIL);
+        ResetJIT(cx, tm, FR_DEEP_BAIL);
 #ifdef MOZ_TRACEVIS
         if (tvso) tvso->r = R_FAIL_EXTEND_FLUSH;
 #endif
         return false;
     }
 
     TreeFragment* f = anchor->root();
     JS_ASSERT(f->code());
@@ -5945,17 +5941,17 @@ AttemptToExtendTree(JSContext* cx, VMSid
 #ifdef MOZ_TRACEVIS
         if (tvso) tvso->r = R_FAIL_EXTEND_MAX_BRANCHES;
 #endif
         return false;
     }
 
     VMFragment* c = (VMFragment*)anchor->target;
     if (!c) {
-        c = CreateBranchFragment(cx, f, anchor);
+        c = CreateBranchFragment(cx, tm, f, anchor);
     } else {
         /*
          * If we are recycling a fragment, it might have a different ip so reset it
          * here. This can happen when attaching a branch to a NESTED_EXIT, which
          * might extend along separate paths (i.e. after the loop edge, and after a
          * return statement).
          */
         c->ip = cx->regs->pc;
@@ -5969,17 +5965,17 @@ AttemptToExtendTree(JSContext* cx, VMSid
     int32_t maxHits = HOTEXIT + MAXEXIT;
     if (anchor->exitType == CASE_EXIT)
         maxHits *= anchor->switchInfo->count;
     if (outerPC || (hits++ >= HOTEXIT && hits <= maxHits)) {
         /* start tracing secondary trace from this point */
         unsigned stackSlots;
         unsigned ngslots;
         JSValueType* typeMap;
-        TypeMap fullMap(NULL);
+        TypeMap fullMap(NULL, tm->oracle);
         if (!exitedFrom) {
             /*
              * If we are coming straight from a simple side exit, just use that
              * exit's type map as starting point.
              */
             ngslots = anchor->numGlobalSlots;
             stackSlots = anchor->numStackSlots;
             typeMap = anchor->fullTypeMap();
@@ -5997,53 +5993,53 @@ AttemptToExtendTree(JSContext* cx, VMSid
             fullMap.add(e2->stackTypeMap(), e2->numStackSlots);
             stackSlots = fullMap.length();
             ngslots = BuildGlobalTypeMapFromInnerTree(fullMap, e2);
             JS_ASSERT(ngslots >= e1->numGlobalSlots); // inner tree must have all globals
             JS_ASSERT(ngslots == fullMap.length() - stackSlots);
             typeMap = fullMap.data();
         }
         JS_ASSERT(ngslots >= anchor->numGlobalSlots);
-        bool rv = TraceRecorder::startRecorder(cx, anchor, c, stackSlots, ngslots, typeMap,
+        bool rv = TraceRecorder::startRecorder(cx, tm, anchor, c, stackSlots, ngslots, typeMap,
                                                exitedFrom, outerScript, outerPC, f->argc,
                                                hits < maxHits);
 #ifdef MOZ_TRACEVIS
         if (!rv && tvso)
             tvso->r = R_FAIL_EXTEND_START;
 #endif
         return rv;
     }
 #ifdef MOZ_TRACEVIS
     if (tvso) tvso->r = R_FAIL_EXTEND_COLD;
 #endif
     return false;
 }
 
 static JS_REQUIRES_STACK bool
-ExecuteTree(JSContext* cx, TreeFragment* f, uintN& inlineCallCount,
+ExecuteTree(JSContext* cx, TraceMonitor* tm, TreeFragment* f, uintN& inlineCallCount,
             VMSideExit** innermostNestedGuardp, VMSideExit** lrp);
 
 static inline MonitorResult
 RecordingIfTrue(bool b)
 {
     return b ? MONITOR_RECORDING : MONITOR_NOT_RECORDING;
 }
 
 /*
  * A postcondition of recordLoopEdge is that if recordLoopEdge does not return
  * MONITOR_RECORDING, the recording has been aborted.
  */
 JS_REQUIRES_STACK MonitorResult
 TraceRecorder::recordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCallCount)
 {
-    TraceMonitor* tm = &JS_TRACE_MONITOR(cx);
+    TraceMonitor* tm = r->traceMonitor;
 
     /* Process needFlush and deep abort requests. */
     if (tm->needFlush) {
-        ResetJIT(cx, FR_DEEP_BAIL);
+        ResetJIT(cx, tm, FR_DEEP_BAIL);
         return MONITOR_NOT_RECORDING;
     }
 
     JS_ASSERT(r->fragment && !r->fragment->lastIns);
     TreeFragment* root = r->fragment->root;
     TreeFragment* first = LookupOrAddLoop(tm, cx->regs->pc, root->globalObj,
                                           root->globalShape, entryFrameArgc(cx));
 
@@ -6074,24 +6070,25 @@ TraceRecorder::recordLoopEdge(JSContext*
         JSScript* outerScript = outerFragment->script;
         jsbytecode* outerPC = (jsbytecode*) outerFragment->ip;
         uint32 outerArgc = outerFragment->argc;
         JS_ASSERT(entryFrameArgc(cx) == first->argc);
 
         if (AbortRecording(cx, "No compatible inner tree") == JIT_RESET)
             return MONITOR_NOT_RECORDING;
 
-        return RecordingIfTrue(RecordTree(cx, first, outerScript, outerPC, outerArgc, globalSlots));
+        return RecordingIfTrue(RecordTree(cx, tm, first,
+                                          outerScript, outerPC, outerArgc, globalSlots));
     }
 
     AbortableRecordingStatus status = r->attemptTreeCall(f, inlineCallCount);
     if (status == ARECORD_CONTINUE)
         return MONITOR_RECORDING;
     if (status == ARECORD_ERROR) {
-        if (TRACE_RECORDER(cx))
+        if (tm->recorder)
             AbortRecording(cx, "Error returned while recording loop edge");
         return MONITOR_ERROR;
     }
     JS_ASSERT(status == ARECORD_ABORTED && !tm->recorder);
     return MONITOR_NOT_RECORDING;
 }
 
 JS_REQUIRES_STACK AbortableRecordingStatus
@@ -6100,42 +6097,43 @@ TraceRecorder::attemptTreeCall(TreeFragm
     adjustCallerTypes(f);
     prepareTreeCall(f);
 
 #ifdef DEBUG
     uintN oldInlineCallCount = inlineCallCount;
 #endif
 
     JSContext *localCx = cx;
+    TraceMonitor *localtm = traceMonitor;
 
     // Refresh the import type map so the tracker can reimport values after the
     // call with their correct types. The inner tree must not change the type of
     // any variable in a frame above the current one (i.e., upvars).
     //
     // Note that DetermineTypesVisitor may call determineSlotType, which may
     // read from the (current, stale) import type map, but this is safe here.
     // The reason is that determineSlotType will read the import type map only
     // if there is not a tracker instruction for that value, which means that
     // value has not been written yet, so that type map entry is up to date.
     importTypeMap.setLength(NativeStackSlots(cx, callDepth));
     DetermineTypesVisitor visitor(*this, importTypeMap.data());
     VisitStackSlots(visitor, cx, callDepth);
 
     VMSideExit* innermostNestedGuard = NULL;
     VMSideExit* lr;
-    bool ok = ExecuteTree(cx, f, inlineCallCount, &innermostNestedGuard, &lr);
+    bool ok = ExecuteTree(cx, traceMonitor, f, inlineCallCount, &innermostNestedGuard, &lr);
 
     /*
      * If ExecuteTree reentered the interpreter, it may have killed |this|
      * and/or caused an error, which must be propagated.
      */
-    JS_ASSERT_IF(TRACE_RECORDER(localCx), TRACE_RECORDER(localCx) == this);
+    JS_ASSERT_IF(localtm->recorder, localtm->recorder == this);
     if (!ok)
         return ARECORD_ERROR;
-    if (!TRACE_RECORDER(localCx))
+    if (!localtm->recorder)
         return ARECORD_ABORTED;
 
     if (!lr) {
         AbortRecording(cx, "Couldn't call inner tree");
         return ARECORD_ABORTED;
     }
 
     TreeFragment* outerFragment = tree;
@@ -6144,17 +6142,18 @@ TraceRecorder::attemptTreeCall(TreeFragm
     switch (lr->exitType) {
       case LOOP_EXIT:
         /* If the inner tree exited on an unknown loop exit, grow the tree around it. */
         if (innermostNestedGuard) {
             if (AbortRecording(cx, "Inner tree took different side exit, abort current "
                                    "recording and grow nesting tree") == JIT_RESET) {
                 return ARECORD_ABORTED;
             }
-            return AttemptToExtendTree(localCx, innermostNestedGuard, lr, outerScript, outerPC)
+            return AttemptToExtendTree(localCx, localtm,
+                                       innermostNestedGuard, lr, outerScript, outerPC)
                    ? ARECORD_CONTINUE
                    : ARECORD_ABORTED;
         }
 
         JS_ASSERT(oldInlineCallCount == inlineCallCount);
 
         /* Emit a call to the inner tree and continue recording the outer tree trace. */
         emitTreeCall(f, lr);
@@ -6163,17 +6162,17 @@ TraceRecorder::attemptTreeCall(TreeFragm
       case UNSTABLE_LOOP_EXIT:
       {
         /* Abort recording so the inner loop can become type stable. */
         JSObject* _globalObj = globalObj;
         if (AbortRecording(cx, "Inner tree is trying to stabilize, "
                                "abort outer recording") == JIT_RESET) {
             return ARECORD_ABORTED;
         }
-        return AttemptToStabilizeTree(localCx, _globalObj, lr, outerScript, outerPC,
+        return AttemptToStabilizeTree(localCx, localtm, _globalObj, lr, outerScript, outerPC,
                                       outerFragment->argc)
                ? ARECORD_CONTINUE
                : ARECORD_ABORTED;
       }
 
       case MUL_ZERO_EXIT:
       case OVERFLOW_EXIT:
         if (lr->exitType == MUL_ZERO_EXIT)
@@ -6183,17 +6182,17 @@ TraceRecorder::attemptTreeCall(TreeFragm
         /* FALL THROUGH */
       case BRANCH_EXIT:
       case CASE_EXIT:
         /* Abort recording the outer tree, extend the inner tree. */
         if (AbortRecording(cx, "Inner tree is trying to grow, "
                                "abort outer recording") == JIT_RESET) {
             return ARECORD_ABORTED;
         }
-        return AttemptToExtendTree(localCx, lr, NULL, outerScript, outerPC)
+        return AttemptToExtendTree(localCx, localtm, lr, NULL, outerScript, outerPC)
                ? ARECORD_CONTINUE
                : ARECORD_ABORTED;
 
       case NESTED_EXIT:
         JS_NOT_REACHED("NESTED_EXIT should be replaced by innermost side exit");
       default:
         debug_only_printf(LC_TMTracer, "exit_type=%s\n", getExitName(lr->exitType));
         AbortRecording(cx, "Inner tree not suitable for calling");
@@ -6253,17 +6252,17 @@ class TypeCompatibilityVisitor : public 
     JSValueType *mTypeMap;
     unsigned mStackSlotNum;
     bool mOk;
 public:
     TypeCompatibilityVisitor (TraceRecorder &recorder,
                               JSValueType *typeMap) :
         mRecorder(recorder),
         mCx(mRecorder.cx),
-        mOracle(JS_TRACE_MONITOR(mCx).oracle),
+        mOracle(recorder.traceMonitor->oracle),
         mTypeMap(typeMap),
         mStackSlotNum(0),
         mOk(true)
     {}
 
     JS_REQUIRES_STACK JS_ALWAYS_INLINE void
     visitGlobalSlot(Value *vp, unsigned n, unsigned slot) {
         debug_only_printf(LC_TMTracer, "global%d=", n);
@@ -6458,16 +6457,17 @@ FindVMCompatiblePeer(JSContext* cx, JSOb
  * This reuse depends on the invariant that only one trace uses |tm->storage| at
  * a time. This is subtley correct in lieu of deep bail; see comment for
  * |deepBailSp| in DeepBail.
  */
 JS_ALWAYS_INLINE
 TracerState::TracerState(JSContext* cx, TraceMonitor* tm, TreeFragment* f,
                          uintN& inlineCallCount, VMSideExit** innermostNestedGuardp)
   : cx(cx),
+    traceMonitor(tm),
     stackBase(tm->storage->stack()),
     sp(stackBase + f->nativeStackBase / sizeof(double)),
     eos(tm->storage->global()),
     callstackBase(tm->storage->callstack()),
     sor(callstackBase),
     rp(callstackBase),
     eor(callstackBase + JS_MIN(MAX_CALL_STACK_ENTRIES,
                                JS_MAX_INLINE_CALL_COUNT - inlineCallCount)),
@@ -6478,17 +6478,16 @@ TracerState::TracerState(JSContext* cx, 
     inlineCallCountp(&inlineCallCount),
     innermostNestedGuardp(innermostNestedGuardp),
 #ifdef EXECUTE_TREE_TIMER
     startTime(rdtsc()),
 #endif
     builtinStatus(0),
     nativeVp(NULL)
 {
-    JS_ASSERT(tm == &JS_TRACE_MONITOR(cx));
     JS_ASSERT(!tm->tracecx);
     tm->tracecx = cx;
     prev = tm->tracerState;
     tm->tracerState = this;
 
     JS_ASSERT(eos == stackBase + MAX_NATIVE_STACK_SLOTS);
     JS_ASSERT(sp < eos);
 
@@ -6516,31 +6515,31 @@ TracerState::~TracerState()
 
     TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
     tm->tracerState = prev;
     tm->tracecx = NULL;
 }
 
 /* Call |f|, return the exit taken. */
 static JS_ALWAYS_INLINE VMSideExit*
-ExecuteTrace(JSContext* cx, Fragment* f, TracerState& state)
-{
-    JS_ASSERT(!JS_TRACE_MONITOR(cx).bailExit);
+ExecuteTrace(JSContext* cx, TraceMonitor* tm, Fragment* f, TracerState& state)
+{
+    JS_ASSERT(!tm->bailExit);
 #ifdef JS_METHODJIT
     JS_ASSERT(!TRACE_PROFILER(cx));
 #endif
     union { NIns *code; GuardRecord* (FASTCALL *func)(TracerState*); } u;
     u.code = f->code();
     GuardRecord* rec;
 #if defined(JS_NO_FASTCALL) && defined(NANOJIT_IA32)
     SIMULATE_FASTCALL(rec, state, NULL, u.func);
 #else
     rec = u.func(&state);
 #endif
-    JS_ASSERT(!JS_TRACE_MONITOR(cx).bailExit);
+    JS_ASSERT(!tm->bailExit);
     return (VMSideExit*)rec->exit;
 }
 
 /* Check whether our assumptions about the incoming scope-chain are upheld. */
 static JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
 ScopeChainCheck(JSContext* cx, TreeFragment* f)
 {
     JS_ASSERT(f->globalObj == cx->fp()->scopeChain().getGlobal());
@@ -6586,24 +6585,23 @@ enum LEAVE_TREE_STATUS {
   DEEP_BAILED = 1
 };
 
 static LEAVE_TREE_STATUS
 LeaveTree(TraceMonitor *tm, TracerState&, VMSideExit *lr);
 
 /* Return false if the interpreter should goto error. */
 static JS_REQUIRES_STACK bool
-ExecuteTree(JSContext* cx, TreeFragment* f, uintN& inlineCallCount,
+ExecuteTree(JSContext* cx, TraceMonitor* tm, TreeFragment* f, uintN& inlineCallCount,
             VMSideExit** innermostNestedGuardp, VMSideExit **lrp)
 {
 #ifdef MOZ_TRACEVIS
     TraceVisStateObj tvso(cx, S_EXECUTE);
 #endif
     JS_ASSERT(f->root == f && f->code());
-    TraceMonitor* tm = &JS_TRACE_MONITOR(cx);
 
     JS_ASSERT(!tm->profile);
 
     if (!ScopeChainCheck(cx, f) || !cx->stack().ensureEnoughSpaceToEnterTrace() ||
         inlineCallCount + f->maxCallDepth > JS_MAX_INLINE_CALL_COUNT) {
         *lrp = NULL;
         return true;
     }
@@ -6635,19 +6633,19 @@ ExecuteTree(JSContext* cx, TreeFragment*
 
     debug_only_stmt(uint32 globalSlots = globalObj->numSlots();)
     debug_only_stmt(*(uint64*)&tm->storage->global()[globalSlots] = 0xdeadbeefdeadbeefLL;)
 
     /* Execute trace. */
     tm->iterationCounter = 0;
     debug_only(int64 t0 = PRMJ_Now();)
 #ifdef MOZ_TRACEVIS
-    VMSideExit* lr = (TraceVisStateObj(cx, S_NATIVE), ExecuteTrace(cx, f, state));
+    VMSideExit* lr = (TraceVisStateObj(cx, S_NATIVE), ExecuteTrace(cx, tm, f, state));
 #else
-    VMSideExit* lr = ExecuteTrace(cx, f, state);
+    VMSideExit* lr = ExecuteTrace(cx, tm, f, state);
 #endif
     debug_only(int64 t1 = PRMJ_Now();)
 
     JS_ASSERT_IF(lr->exitType == LOOP_EXIT, !lr->calldepth);
 
     /* Restore interpreter state. */
 #ifdef DEBUG
     LEAVE_TREE_STATUS lts = 
@@ -7056,35 +7054,33 @@ TraceRecorder::assertInsideLoop()
      * immediately preceeding a loop (the one that jumps to the loop
      * condition).
      */
     JS_ASSERT(pc >= beg - JSOP_GOTO_LENGTH && pc <= end);
 #endif
 }
 
 JS_REQUIRES_STACK MonitorResult
-RecordLoopEdge(JSContext* cx, uintN& inlineCallCount)
+RecordLoopEdge(JSContext* cx, TraceMonitor* tm, uintN& inlineCallCount)
 {
 #ifdef MOZ_TRACEVIS
     TraceVisStateObj tvso(cx, S_MONITOR);
 #endif
 
-    TraceMonitor* tm = &JS_TRACE_MONITOR(cx);
-
     JS_ASSERT(!tm->profile);
 
     /* Is the recorder currently active? */
     if (tm->recorder) {
         tm->recorder->assertInsideLoop();
         jsbytecode* pc = cx->regs->pc;
         if (pc == tm->recorder->tree->ip) {
             tm->recorder->closeLoop();
         } else {
             MonitorResult r = TraceRecorder::recordLoopEdge(cx, tm->recorder, inlineCallCount);
-            JS_ASSERT((r == MONITOR_RECORDING) == (TRACE_RECORDER(cx) != NULL));
+            JS_ASSERT((r == MONITOR_RECORDING) == (tm->recorder != NULL));
             if (r == MONITOR_RECORDING || r == MONITOR_ERROR)
                 return r;
 
             /*
              * recordLoopEdge will invoke an inner tree if we have a matching
              * one. If we arrive here, that tree didn't run to completion and
              * instead we mis-matched or the inner tree took a side exit other than
              * the loop exit. We are thus no longer guaranteed to be parked on the
@@ -7108,17 +7104,17 @@ RecordLoopEdge(JSContext* cx, uintN& inl
      * Make sure the shape of the global object still matches (this might flush
      * the JIT cache).
      */
     JSObject* globalObj = cx->fp()->scopeChain().getGlobal();
     uint32 globalShape = -1;
     SlotList* globalSlots = NULL;
 
     if (!CheckGlobalObjectShape(cx, tm, globalObj, &globalShape, &globalSlots)) {
-        Backoff(cx, cx->regs->pc);
+        Backoff(tm, cx->regs->pc);
         return MONITOR_NOT_RECORDING;
     }
 
     /* Do not enter the JIT code with a pending operation callback. */
     if (JS_THREAD_DATA(cx)->interruptFlags) {
 #ifdef MOZ_TRACEVIS
         tvso.r = R_CALLBACK_PENDING;
 #endif
@@ -7150,17 +7146,17 @@ RecordLoopEdge(JSContext* cx, uintN& inl
             return MONITOR_NOT_RECORDING;
         }
 
         /*
          * We can give RecordTree the root peer. If that peer is already taken,
          * it will walk the peer list and find us a free slot or allocate a new
          * tree if needed.
          */
-        bool rv = RecordTree(cx, f->first, NULL, NULL, 0, globalSlots);
+        bool rv = RecordTree(cx, tm, f->first, NULL, NULL, 0, globalSlots);
 #ifdef MOZ_TRACEVIS
         if (!rv)
             tvso.r = R_FAIL_RECORD_TREE;
 #endif
         return RecordingIfTrue(rv);
     }
 
     debug_only_printf(LC_TMTracer,
@@ -7184,17 +7180,17 @@ RecordLoopEdge(JSContext* cx, uintN& inl
         tvso.r = R_MAX_PEERS;
 #endif
         return MONITOR_NOT_RECORDING;
     }
 
     VMSideExit* lr = NULL;
     VMSideExit* innermostNestedGuard = NULL;
 
-    if (!ExecuteTree(cx, match, inlineCallCount, &innermostNestedGuard, &lr))
+    if (!ExecuteTree(cx, tm, match, inlineCallCount, &innermostNestedGuard, &lr))
         return MONITOR_ERROR;
 
     if (!lr) {
 #ifdef MOZ_TRACEVIS
         tvso.r = R_FAIL_EXECUTE_TREE;
 #endif
         return MONITOR_NOT_RECORDING;
     }
@@ -7202,42 +7198,42 @@ RecordLoopEdge(JSContext* cx, uintN& inl
     /*
      * If we exit on a branch, or on a tree call guard, try to grow the inner
      * tree (in case of a branch exit), or the tree nested around the tree we
      * exited from (in case of the tree call guard).
      */
     bool rv;
     switch (lr->exitType) {
       case UNSTABLE_LOOP_EXIT:
-        rv = AttemptToStabilizeTree(cx, globalObj, lr, NULL, NULL, 0);
+        rv = AttemptToStabilizeTree(cx, tm, globalObj, lr, NULL, NULL, 0);
 #ifdef MOZ_TRACEVIS
         if (!rv)
             tvso.r = R_FAIL_STABILIZE;
 #endif
         return RecordingIfTrue(rv);
 
       case MUL_ZERO_EXIT:
       case OVERFLOW_EXIT:
         if (lr->exitType == MUL_ZERO_EXIT)
             tm->oracle->markInstructionSlowZeroTest(cx->regs->pc);
         else
             tm->oracle->markInstructionUndemotable(cx->regs->pc);
         /* FALL THROUGH */
       case BRANCH_EXIT:
       case CASE_EXIT:
-        rv = AttemptToExtendTree(cx, lr, NULL, NULL, NULL
+        rv = AttemptToExtendTree(cx, tm, lr, NULL, NULL, NULL
 #ifdef MOZ_TRACEVIS
                                                    , &tvso
 #endif
                                  );
         return RecordingIfTrue(rv);
 
       case LOOP_EXIT:
         if (innermostNestedGuard) {
-            rv = AttemptToExtendTree(cx, innermostNestedGuard, lr, NULL, NULL
+            rv = AttemptToExtendTree(cx, tm, innermostNestedGuard, lr, NULL, NULL
 #ifdef MOZ_TRACEVIS
                                                                        , &tvso
 #endif
                                      );
             return RecordingIfTrue(rv);
         }
 #ifdef MOZ_TRACEVIS
         tvso.r = R_NO_EXTEND_OUTER;
@@ -7274,24 +7270,24 @@ RecordLoopEdge(JSContext* cx, uintN& inl
     }
 }
 
 JS_REQUIRES_STACK AbortableRecordingStatus
 TraceRecorder::monitorRecording(JSOp op)
 {
     JS_ASSERT(!addPropShapeBefore);
 
-    TraceMonitor &localtm = JS_TRACE_MONITOR(cx);
+    TraceMonitor &localtm = *traceMonitor;
     debug_only_stmt( JSContext *localcx = cx; )
     assertInsideLoop();
     JS_ASSERT(!localtm.profile);
 
     /* Process needFlush requests now. */
     if (localtm.needFlush) {
-        ResetJIT(cx, FR_DEEP_BAIL);
+        ResetJIT(cx, &localtm, FR_DEEP_BAIL);
         return ARECORD_ABORTED;
     }
     JS_ASSERT(!fragment->lastIns);
 
     /*
      * Clear one-shot state used to communicate between record_JSOP_CALL and post-
      * opcode-case-guts record hook (record_NativeCallComplete).
      */
@@ -7376,17 +7372,17 @@ TraceRecorder::monitorRecording(JSOp op)
 
         /* Handle lazy aborts; propagate the 'error' status. */
         if (StatusAbortsRecorderIfActive(status)) {
             AbortRecording(cx, js_CodeName[op]);
             return status == ARECORD_ERROR ? ARECORD_ERROR : ARECORD_ABORTED;
         }
 
         if (outOfMemory() || OverfullJITCache(cx, &localtm)) {
-            ResetJIT(cx, FR_OOM);
+            ResetJIT(cx, &localtm, FR_OOM);
 
             /*
              * If the status returned was ARECORD_IMACRO, then we just
              * changed cx->regs, we need to tell the interpreter to sync
              * its local variables.
              */
             return status == ARECORD_IMACRO ? ARECORD_IMACRO_ABORTED : ARECORD_ABORTED;
         }
@@ -7726,17 +7722,17 @@ InitJIT(TraceMonitor *tm)
     CHECK_ALLOC(traceReserve, (char *)js_malloc(TraceReserveSize));
     CHECK_ALLOC(tempReserve, (char *)js_malloc(TempReserveSize));
     CHECK_ALLOC(tm->dataAlloc, js_new<VMAllocator>(dataReserve, DataReserveSize));
     CHECK_ALLOC(tm->traceAlloc, js_new<VMAllocator>(traceReserve, TraceReserveSize));
     CHECK_ALLOC(tm->tempAlloc, js_new<VMAllocator>(tempReserve, TempReserveSize));
     CHECK_ALLOC(tm->codeAlloc, js_new<CodeAlloc>());
     CHECK_ALLOC(tm->frameCache, js_new<FrameInfoCache>(tm->dataAlloc));
     CHECK_ALLOC(tm->storage, js_new<TraceNativeStorage>());
-    CHECK_ALLOC(tm->cachedTempTypeMap, js_new<TypeMap>((Allocator*)NULL));
+    CHECK_ALLOC(tm->cachedTempTypeMap, js_new<TypeMap>((Allocator*)NULL, tm->oracle));
     tm->flush();
     verbose_only( tm->branches = NULL; )
 
 #if !defined XP_WIN
     debug_only(PodZero(&jitstats));
 #endif
 
 #ifdef JS_JIT_SPEW
@@ -10989,17 +10985,17 @@ TraceRecorder::record_JSOP_OBJTOP()
 RecordingStatus
 TraceRecorder::getClassPrototype(JSObject* ctor, LIns*& proto_ins)
 {
     // ctor must be a function created via js_InitClass.
 #ifdef DEBUG
     Class *clasp = FUN_CLASP(GET_FUNCTION_PRIVATE(cx, ctor));
     JS_ASSERT(clasp);
 
-    TraceMonitor &localtm = JS_TRACE_MONITOR(cx);
+    TraceMonitor &localtm = *traceMonitor;
 #endif
 
     Value pval;
     if (!ctor->getProperty(cx, ATOM_TO_JSID(cx->runtime->atomState.classPrototypeAtom), &pval))
         RETURN_ERROR("error getting prototype from constructor");
 
     // ctor.prototype is a permanent data property, so this lookup cannot have
     // deep-aborted.
@@ -11023,17 +11019,17 @@ TraceRecorder::getClassPrototype(JSObjec
     proto_ins = w.immpObjGC(proto);
     return RECORD_CONTINUE;
 }
 
 RecordingStatus
 TraceRecorder::getClassPrototype(JSProtoKey key, LIns*& proto_ins)
 {
 #ifdef DEBUG
-    TraceMonitor &localtm = JS_TRACE_MONITOR(cx);
+    TraceMonitor &localtm = *traceMonitor;
 #endif
 
     JSObject* proto;
     if (!js_GetClassPrototype(cx, globalObj, key, &proto))
         RETURN_ERROR("error in js_GetClassPrototype");
 
     // This should not have reentered.
     JS_ASSERT(localtm.recorder);
@@ -11760,51 +11756,55 @@ JS_REQUIRES_STACK AbortableRecordingStat
 TraceRecorder::record_JSOP_DELNAME()
 {
     return ARECORD_STOP;
 }
 
 static JSBool JS_FASTCALL
 DeleteIntKey(JSContext* cx, JSObject* obj, int32 i, JSBool strict)
 {
+    TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
+
     LeaveTraceIfGlobalObject(cx, obj);
     LeaveTraceIfArgumentsObject(cx, obj);
     Value v = BooleanValue(false);
     jsid id;
     if (INT_FITS_IN_JSID(i)) {
         id = INT_TO_JSID(i);
     } else {
         if (!js_ValueToStringId(cx, Int32Value(i), &id)) {
-            SetBuiltinError(cx);
+            SetBuiltinError(tm);
             return false;
         }
     }
 
     if (!obj->deleteProperty(cx, id, &v, strict))
-        SetBuiltinError(cx);
+        SetBuiltinError(tm);
     return v.toBoolean();
 }
 JS_DEFINE_CALLINFO_4(extern, BOOL_FAIL, DeleteIntKey, CONTEXT, OBJECT, INT32, BOOL,
                      0, ACCSET_STORE_ANY)
 
 static JSBool JS_FASTCALL
 DeleteStrKey(JSContext* cx, JSObject* obj, JSString* str, JSBool strict)
 {
+    TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
+
     LeaveTraceIfGlobalObject(cx, obj);
     LeaveTraceIfArgumentsObject(cx, obj);
     Value v = BooleanValue(false);
     jsid id;
 
     /*
      * NB: JSOP_DELPROP does not need js_ValueToStringId to atomize, but (see
      * jsatominlines.h) that helper early-returns if the computed property name
      * string is already atomized, and we are *not* on a perf-critical path!
      */
     if (!js_ValueToStringId(cx, StringValue(str), &id) || !obj->deleteProperty(cx, id, &v, strict))
-        SetBuiltinError(cx);
+        SetBuiltinError(tm);
     return v.toBoolean();
 }
 JS_DEFINE_CALLINFO_4(extern, BOOL_FAIL, DeleteStrKey, CONTEXT, OBJECT, STRING, BOOL,
                      0, ACCSET_STORE_ANY)
 
 JS_REQUIRES_STACK AbortableRecordingStatus
 TraceRecorder::record_JSOP_DELPROP()
 {
@@ -12067,18 +12067,20 @@ TraceRecorder::lookupForSetPropertyOp(JS
         }
     }
     return RECORD_CONTINUE;
 }
 
 static JSBool FASTCALL
 MethodWriteBarrier(JSContext* cx, JSObject* obj, uint32 slot, const Value* v)
 {
+    TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
+
     bool ok = obj->methodWriteBarrier(cx, slot, *v);
-    JS_ASSERT(WasBuiltinSuccessful(cx));
+    JS_ASSERT(WasBuiltinSuccessful(tm));
     return ok;
 }
 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, MethodWriteBarrier, CONTEXT, OBJECT, UINT32, CVALUEPTR,
                      0, ACCSET_STORE_ANY)
 
 /* Emit a specialized, inlined copy of js_NativeSet. */
 JS_REQUIRES_STACK RecordingStatus
 TraceRecorder::nativeSet(JSObject* obj, LIns* obj_ins, const Shape* shape,
@@ -12598,76 +12600,79 @@ struct PICTable
         newEntry.id = id;
         newEntry.slot = slot;
     }
 };
 
 static JSBool FASTCALL
 GetPropertyByName(JSContext* cx, JSObject* obj, JSString** namep, Value* vp, PICTable *picTable)
 {
+    TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
+
     LeaveTraceIfGlobalObject(cx, obj);
 
     jsid id;
     if (!RootedStringToId(cx, namep, &id)) {
-        SetBuiltinError(cx);
+        SetBuiltinError(tm);
         return false;
     }
     
     /* Delegate to the op, if present. */
     PropertyIdOp op = obj->getOps()->getProperty;
     if (op) {
         bool result = op(cx, obj, obj, id, vp);
         if (!result)
-            SetBuiltinError(cx);
-        return WasBuiltinSuccessful(cx);
+            SetBuiltinError(tm);
+        return WasBuiltinSuccessful(tm);
     }
 
     /* Try to hit in the cache. */
     uint32 slot;
     if (picTable->scan(obj->shape(), id, &slot)) {
         *vp = obj->getSlot(slot);
-        return WasBuiltinSuccessful(cx);
+        return WasBuiltinSuccessful(tm);
     }
 
     const Shape *shape;
     JSObject *holder;
     if (!js_GetPropertyHelperWithShape(cx, obj, obj, id, JSGET_METHOD_BARRIER, vp, &shape,
                                        &holder)) {
-        SetBuiltinError(cx);
+        SetBuiltinError(tm);
         return false;
     }
 
     /* Only update the table when the object is the holder of the property. */
     if (obj == holder && shape->hasSlot()) {
         /*
          * Note: we insert the non-normalized id into the table so you don't need to
          * normalize it before hitting in the table (faster lookup).
          */
         picTable->update(obj->shape(), id, shape->slot);
     }
     
-    return WasBuiltinSuccessful(cx);
+    return WasBuiltinSuccessful(tm);
 }
 JS_DEFINE_CALLINFO_5(static, BOOL_FAIL, GetPropertyByName, CONTEXT, OBJECT, STRINGPTR, VALUEPTR,
                      PICTABLE,
                      0, ACCSET_STORE_ANY)
 
 // Convert the value in a slot to a string and store the resulting string back
 // in the slot (typically in order to root it).
 JS_REQUIRES_STACK RecordingStatus
 TraceRecorder::primitiveToStringInPlace(Value* vp)
 {
     Value v = *vp;
     JS_ASSERT(v.isPrimitive());
 
     if (!v.isString()) {
         // v is not a string. Turn it into one. js_ValueToString is safe
         // because v is not an object.
+        TraceMonitor *localtm = traceMonitor;
         JSString *str = js_ValueToString(cx, v);
-        JS_ASSERT(TRACE_RECORDER(cx) == this);
+        JS_ASSERT(localtm->recorder == this);
         if (!str)
             RETURN_ERROR("failed to stringify element id");
         v.setString(str);
         set(vp, stringify(*vp));
 
         // Write the string back to the stack to save the interpreter some work
         // and to ensure snapshots get the correct type for this slot.
         *vp = v;
@@ -12701,24 +12706,26 @@ TraceRecorder::getPropertyByName(LIns* o
     finishGetProp(obj_ins, vp_ins, ok_ins, outp);
     leaveDeepBailCall();
     return RECORD_CONTINUE;
 }
 
 static JSBool FASTCALL
 GetPropertyByIndex(JSContext* cx, JSObject* obj, int32 index, Value* vp)
 {
+    TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
+
     LeaveTraceIfGlobalObject(cx, obj);
 
     AutoIdRooter idr(cx);
     if (!js_Int32ToId(cx, index, idr.addr()) || !obj->getProperty(cx, idr.id(), vp)) {
-        SetBuiltinError(cx);
+        SetBuiltinError(tm);
         return JS_FALSE;
     }
-    return WasBuiltinSuccessful(cx);
+    return WasBuiltinSuccessful(tm);
 }
 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, GetPropertyByIndex, CONTEXT, OBJECT, INT32, VALUEPTR, 0,
                      ACCSET_STORE_ANY)
 
 JS_REQUIRES_STACK RecordingStatus
 TraceRecorder::getPropertyByIndex(LIns* obj_ins, LIns* index_ins, Value* outp)
 {
     CHECK_STATUS(makeNumberInt32(index_ins, &index_ins));
@@ -12731,22 +12738,24 @@ TraceRecorder::getPropertyByIndex(LIns* 
     finishGetProp(obj_ins, vp_ins, ok_ins, outp);
     leaveDeepBailCall();
     return RECORD_CONTINUE;
 }
 
 static JSBool FASTCALL
 GetPropertyById(JSContext* cx, JSObject* obj, jsid id, Value* vp)
 {
+    TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
+
     LeaveTraceIfGlobalObject(cx, obj);
     if (!obj->getProperty(cx, id, vp)) {
-        SetBuiltinError(cx);
+        SetBuiltinError(tm);
         return JS_FALSE;
     }
-    return WasBuiltinSuccessful(cx);
+    return WasBuiltinSuccessful(tm);
 }
 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, GetPropertyById, CONTEXT, OBJECT, JSID, VALUEPTR,
                      0, ACCSET_STORE_ANY)
 
 JS_REQUIRES_STACK RecordingStatus
 TraceRecorder::getPropertyById(LIns* obj_ins, Value* outp)
 {
     // Find the atom.
@@ -12774,36 +12783,38 @@ TraceRecorder::getPropertyById(LIns* obj
     leaveDeepBailCall();
     return RECORD_CONTINUE;
 }
 
 /* Manually inlined, specialized copy of js_NativeGet. */
 static JSBool FASTCALL
 GetPropertyWithNativeGetter(JSContext* cx, JSObject* obj, Shape* shape, Value* vp)
 {
+    TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
+
     LeaveTraceIfGlobalObject(cx, obj);
 
 #ifdef DEBUG
     JSProperty* prop;
     JSObject* pobj;
     JS_ASSERT(obj->lookupProperty(cx, shape->id, &pobj, &prop));
     JS_ASSERT(prop == (JSProperty*) shape);
 #endif
 
     // Shape::get contains a special case for With objects. We can elide it
     // here because With objects are, we claim, never on the operand stack
     // while recording.
     JS_ASSERT(obj->getClass() != &js_WithClass);
 
     vp->setUndefined();
     if (!shape->getterOp()(cx, obj, SHAPE_USERID(shape), vp)) {
-        SetBuiltinError(cx);
+        SetBuiltinError(tm);
         return JS_FALSE;
     }
-    return WasBuiltinSuccessful(cx);
+    return WasBuiltinSuccessful(tm);
 }
 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, GetPropertyWithNativeGetter,
                      CONTEXT, OBJECT, SHAPE, VALUEPTR, 0, ACCSET_STORE_ANY)
 
 JS_REQUIRES_STACK RecordingStatus
 TraceRecorder::getPropertyWithNativeGetter(LIns* obj_ins, const Shape* shape, Value* outp)
 {
     JS_ASSERT(!shape->hasGetterValue());
@@ -13107,41 +13118,45 @@ TraceRecorder::record_JSOP_GETELEM()
     return InjectStatus(getPropertyByIndex(obj_ins, idx_ins, &lval));
 }
 
 /* Functions used by JSOP_SETELEM */
 
 static JSBool FASTCALL
 SetPropertyByName(JSContext* cx, JSObject* obj, JSString** namep, Value* vp, JSBool strict)
 {
+    TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
+
     LeaveTraceIfGlobalObject(cx, obj);
 
     jsid id;
     if (!RootedStringToId(cx, namep, &id) || !obj->setProperty(cx, id, vp, strict)) {
-        SetBuiltinError(cx);
+        SetBuiltinError(tm);
         return false;
     }
-    return WasBuiltinSuccessful(cx);
+    return WasBuiltinSuccessful(tm);
 }
 JS_DEFINE_CALLINFO_5(static, BOOL_FAIL, SetPropertyByName, 
                      CONTEXT, OBJECT, STRINGPTR, VALUEPTR, BOOL,
                      0, ACCSET_STORE_ANY)
 
 static JSBool FASTCALL
 InitPropertyByName(JSContext* cx, JSObject* obj, JSString** namep, ValueArgType arg)
 {
+    TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
+
     LeaveTraceIfGlobalObject(cx, obj);
 
     jsid id;
     if (!RootedStringToId(cx, namep, &id) ||
         !obj->defineProperty(cx, id, ValueArgToConstRef(arg), NULL, NULL, JSPROP_ENUMERATE)) {
-        SetBuiltinError(cx);
+        SetBuiltinError(tm);
         return JS_FALSE;
     }
-    return WasBuiltinSuccessful(cx);
+    return WasBuiltinSuccessful(tm);
 }
 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, InitPropertyByName, CONTEXT, OBJECT, STRINGPTR, VALUE,
                      0, ACCSET_STORE_ANY)
 
 JS_REQUIRES_STACK RecordingStatus
 TraceRecorder::initOrSetPropertyByName(LIns* obj_ins, Value* idvalp, Value* rvalp, bool init)
 {
     CHECK_STATUS(primitiveToStringInPlace(idvalp));
@@ -13163,40 +13178,44 @@ TraceRecorder::initOrSetPropertyByName(L
 
     leaveDeepBailCall();
     return RECORD_CONTINUE;
 }
 
 static JSBool FASTCALL
 SetPropertyByIndex(JSContext* cx, JSObject* obj, int32 index, Value* vp, JSBool strict)
 {
+    TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
+
     LeaveTraceIfGlobalObject(cx, obj);
 
     AutoIdRooter idr(cx);
     if (!js_Int32ToId(cx, index, idr.addr()) || !obj->setProperty(cx, idr.id(), vp, strict)) {
-        SetBuiltinError(cx);
+        SetBuiltinError(tm);
         return false;
     }
-    return WasBuiltinSuccessful(cx);
+    return WasBuiltinSuccessful(tm);
 }
 JS_DEFINE_CALLINFO_5(static, BOOL_FAIL, SetPropertyByIndex, CONTEXT, OBJECT, INT32, VALUEPTR, BOOL,
                      0, ACCSET_STORE_ANY)
 
 static JSBool FASTCALL
 InitPropertyByIndex(JSContext* cx, JSObject* obj, int32 index, ValueArgType arg)
 {
+    TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
+
     LeaveTraceIfGlobalObject(cx, obj);
 
     AutoIdRooter idr(cx);
     if (!js_Int32ToId(cx, index, idr.addr()) ||
         !obj->defineProperty(cx, idr.id(), ValueArgToConstRef(arg), NULL, NULL, JSPROP_ENUMERATE)) {
-        SetBuiltinError(cx);
+        SetBuiltinError(tm);
         return JS_FALSE;
     }
-    return WasBuiltinSuccessful(cx);
+    return WasBuiltinSuccessful(tm);
 }
 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, InitPropertyByIndex, CONTEXT, OBJECT, INT32, VALUE,
                      0, ACCSET_STORE_ANY)
 
 JS_REQUIRES_STACK RecordingStatus
 TraceRecorder::initOrSetPropertyByIndex(LIns* obj_ins, LIns* index_ins, Value* rvalp, bool init)
 {
     CHECK_STATUS(makeNumberInt32(index_ins, &index_ins));
@@ -14704,23 +14723,25 @@ TraceRecorder::record_JSOP_IMACOP()
 {
     JS_ASSERT(cx->fp()->hasImacropc());
     return ARECORD_CONTINUE;
 }
 
 static JSBool FASTCALL
 ObjectToIterator(JSContext* cx, JSObject *obj, int32 flags, Value* vp)
 {
+    TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
+
     vp->setObject(*obj);
     bool ok = js_ValueToIterator(cx, flags, vp);
     if (!ok) {
-        SetBuiltinError(cx);
+        SetBuiltinError(tm);
         return false;
     }
-    return WasBuiltinSuccessful(cx);
+    return WasBuiltinSuccessful(tm);
 }
 JS_DEFINE_CALLINFO_4(static, BOOL_FAIL, ObjectToIterator, CONTEXT, OBJECT, INT32, VALUEPTR,
                      0, ACCSET_STORE_ANY)
 
 JS_REQUIRES_STACK AbortableRecordingStatus
 TraceRecorder::record_JSOP_ITER()
 {
     Value& v = stackval(-1);
@@ -14752,21 +14773,23 @@ TraceRecorder::record_JSOP_ITER()
     leaveDeepBailCall();
 
     return ARECORD_CONTINUE;
 }
 
 static JSBool FASTCALL
 IteratorMore(JSContext *cx, JSObject *iterobj, Value *vp)
 {
+    TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
+
     if (!js_IteratorMore(cx, iterobj, vp)) {
-        SetBuiltinError(cx);
+        SetBuiltinError(tm);
         return false;
     }
-    return WasBuiltinSuccessful(cx);
+    return WasBuiltinSuccessful(tm);
 }
 JS_DEFINE_CALLINFO_3(extern, BOOL_FAIL, IteratorMore, CONTEXT, OBJECT, VALUEPTR,
                      0, ACCSET_STORE_ANY)
 
 JS_REQUIRES_STACK AbortableRecordingStatus
 TraceRecorder::record_JSOP_MOREITER()
 {
     Value& iterobj_val = stackval(-1);
@@ -14814,21 +14837,23 @@ TraceRecorder::record_JSOP_MOREITER()
     stack(0, cond_ins);
 
     return ARECORD_CONTINUE;
 }
 
 static JSBool FASTCALL
 CloseIterator(JSContext *cx, JSObject *iterobj)
 {
+    TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
+
     if (!js_CloseIterator(cx, iterobj)) {
-        SetBuiltinError(cx);
+        SetBuiltinError(tm);
         return false;
     }
-    return WasBuiltinSuccessful(cx);
+    return WasBuiltinSuccessful(tm);
 }
 JS_DEFINE_CALLINFO_2(extern, BOOL_FAIL, CloseIterator, CONTEXT, OBJECT, 0, ACCSET_STORE_ANY)
 
 JS_REQUIRES_STACK AbortableRecordingStatus
 TraceRecorder::record_JSOP_ENDITER()
 {
     JS_ASSERT(!stackval(-1).isPrimitive());
 
@@ -15084,16 +15109,17 @@ TraceRecorder::traverseScopeChain(JSObje
 
     targetIns = obj_ins;
     return RECORD_CONTINUE;
 }
 
 JS_REQUIRES_STACK AbortableRecordingStatus
 TraceRecorder::record_JSOP_BINDNAME()
 {
+    TraceMonitor *localtm = traceMonitor;
     JSStackFrame* const fp = cx->fp();
     JSObject *obj;
 
     if (!fp->isFunctionFrame()) {
         obj = &fp->scopeChain();
 
 #ifdef DEBUG
         JSStackFrame *fp2 = fp;
@@ -15148,21 +15174,20 @@ TraceRecorder::record_JSOP_BINDNAME()
         stack(0, w.immpObjGC(obj));
         return ARECORD_CONTINUE;
     }
     LIns *obj_ins = w.ldpObjParent(get(callee));
 
     // Find the target object.
     JSAtom *atom = atoms[GET_INDEX(cx->regs->pc)];
     jsid id = ATOM_TO_JSID(atom);
-    JSContext *localCx = cx;
     JSObject *obj2 = js_FindIdentifierBase(cx, &fp->scopeChain(), id);
     if (!obj2)
         RETURN_ERROR_A("error in js_FindIdentifierBase");
-    if (!TRACE_RECORDER(localCx))
+    if (!localtm->recorder)
         return ARECORD_ABORTED;
     if (obj2 != globalObj && !obj2->isCall())
         RETURN_STOP_A("BINDNAME on non-global, non-call object");
 
     // Generate LIR to get to the target object from the start object.
     LIns *obj2_ins;
     CHECK_STATUS_A(traverseScopeChain(obj, obj_ins, obj2, obj2_ins));
 
@@ -15278,20 +15303,22 @@ TraceRecorder::record_JSOP_IN()
      */
     set(&lval, x);
     return ARECORD_CONTINUE;
 }
 
 static JSBool FASTCALL
 HasInstanceOnTrace(JSContext* cx, JSObject* ctor, ValueArgType arg)
 {
+    TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
+
     const Value &argref = ValueArgToConstRef(arg);
     JSBool result = JS_FALSE;
     if (!HasInstance(cx, ctor, &argref, &result))
-        SetBuiltinError(cx);
+        SetBuiltinError(tm);
     return result;
 }
 JS_DEFINE_CALLINFO_3(static, BOOL_FAIL, HasInstanceOnTrace, CONTEXT, OBJECT, VALUE,
                      0, ACCSET_STORE_ANY)
 
 JS_REQUIRES_STACK AbortableRecordingStatus
 TraceRecorder::record_JSOP_INSTANCEOF()
 {
@@ -16632,17 +16659,17 @@ StopTraceVisNative(JSContext *cx, uintN 
     return ok;
 }
 
 #endif /* MOZ_TRACEVIS */
 
 JS_REQUIRES_STACK void
 TraceRecorder::captureStackTypes(unsigned callDepth, JSValueType* typeMap)
 {
-    CaptureTypesVisitor capVisitor(cx, typeMap, !!oracle);
+    CaptureTypesVisitor capVisitor(cx, traceMonitor->oracle, typeMap, !!oracle);
     VisitStackSlots(capVisitor, cx, callDepth);
 }
 
 JS_REQUIRES_STACK void
 TraceRecorder::determineGlobalTypes(JSValueType* typeMap)
 {
     DetermineTypesVisitor detVisitor(*this, typeMap);
     VisitGlobalSlots(detVisitor, cx, *tree->globalSlots);
@@ -16662,33 +16689,33 @@ class AutoRetBlacklist
 
     ~AutoRetBlacklist()
     {
         *blacklist = IsBlacklisted(pc);
     }
 };
 
 JS_REQUIRES_STACK TracePointAction
-RecordTracePoint(JSContext* cx, uintN& inlineCallCount, bool* blacklist, bool execAllowed)
+RecordTracePoint(JSContext* cx, TraceMonitor* tm,
+                 uintN& inlineCallCount, bool* blacklist, bool execAllowed)
 {
     JSStackFrame* fp = cx->fp();
-    TraceMonitor* tm = &JS_TRACE_MONITOR(cx);
     jsbytecode* pc = cx->regs->pc;
 
-    JS_ASSERT(!TRACE_RECORDER(cx));
+    JS_ASSERT(!tm->recorder);
     JS_ASSERT(!tm->profile);
 
     JSObject* globalObj = cx->fp()->scopeChain().getGlobal();
     uint32 globalShape = -1;
     SlotList* globalSlots = NULL;
 
     AutoRetBlacklist autoRetBlacklist(pc, blacklist);
 
     if (!CheckGlobalObjectShape(cx, tm, globalObj, &globalShape, &globalSlots)) {
-        Backoff(cx, pc);
+        Backoff(tm, pc);
         return TPA_Nothing;
     }
 
     uint32 argc = entryFrameArgc(cx);
     TreeFragment* tree = LookupOrAddLoop(tm, pc, globalObj, globalShape, argc);
 
     debug_only_printf(LC_TMTracer,
                       "Looking for compat peer %d@%d, from %p (ip: %p)\n",
@@ -16704,85 +16731,87 @@ RecordTracePoint(JSContext* cx, uintN& i
 
             if (!execAllowed) {
                 /* We've already compiled a trace for it, but we don't want to use that trace. */
                 Blacklist((jsbytecode*)tree->root->ip);
                 return TPA_Nothing;
             }
 
             /* Best case - just go and execute. */
-            if (!ExecuteTree(cx, match, inlineCallCount, &innermostNestedGuard, &lr))
+            if (!ExecuteTree(cx, tm, match, inlineCallCount, &innermostNestedGuard, &lr))
                 return TPA_Error;
 
             if (!lr)
                 return TPA_Nothing;
 
             switch (lr->exitType) {
               case UNSTABLE_LOOP_EXIT:
-                if (!AttemptToStabilizeTree(cx, globalObj, lr, NULL, NULL, 0))
+                if (!AttemptToStabilizeTree(cx, tm, globalObj, lr, NULL, NULL, 0))
                     return TPA_RanStuff;
                 break;
 
               case MUL_ZERO_EXIT:
               case OVERFLOW_EXIT:
                 if (lr->exitType == MUL_ZERO_EXIT)
                     tm->oracle->markInstructionSlowZeroTest(cx->regs->pc);
                 else
                     tm->oracle->markInstructionUndemotable(cx->regs->pc);
                 /* FALL THROUGH */
               case BRANCH_EXIT:
               case CASE_EXIT:
-                if (!AttemptToExtendTree(cx, lr, NULL, NULL, NULL))
+                if (!AttemptToExtendTree(cx, tm, lr, NULL, NULL, NULL))
                     return TPA_RanStuff;
                 break;
 
               case LOOP_EXIT:
                 if (!innermostNestedGuard)
                     return TPA_RanStuff;
-                if (!AttemptToExtendTree(cx, innermostNestedGuard, lr, NULL, NULL))
+                if (!AttemptToExtendTree(cx, tm, innermostNestedGuard, lr, NULL, NULL))
                     return TPA_RanStuff;
                 break;
 
               default:
                 return TPA_RanStuff;
             }
 
-            JS_ASSERT(TRACE_RECORDER(cx));
+            JS_ASSERT(tm->recorder);
 
             goto interpret;
         }
 
         if (count >= MAXPEERS) {
             debug_only_print0(LC_TMTracer, "Blacklisted: too many peer trees.\n");
             Blacklist((jsbytecode*)tree->root->ip);
             return TPA_Nothing;
         }
     }
 
     if (++tree->hits() < HOTLOOP)
         return TPA_Nothing;
     if (!ScopeChainCheck(cx, tree))
         return TPA_Nothing;
-    if (!RecordTree(cx, tree->first, NULL, NULL, 0, globalSlots))
+    if (!RecordTree(cx, tm, tree->first, NULL, NULL, 0, globalSlots))
         return TPA_Nothing;
 
   interpret:
-    JS_ASSERT(TRACE_RECORDER(cx));
+    JS_ASSERT(tm->recorder);
 
     /* Locked and loaded with a recorder. Ask the interperter to go run some code. */
     if (!Interpret(cx, fp, inlineCallCount, JSINTERP_RECORD))
         return TPA_Error;
 
     JS_ASSERT(!cx->isExceptionPending());
     
     return TPA_RanStuff;
 }
 
-LoopProfile::LoopProfile(JSStackFrame *entryfp, jsbytecode *top, jsbytecode *bottom)
-    : entryScript(entryfp->script()),
+LoopProfile::LoopProfile(TraceMonitor *tm, JSStackFrame *entryfp,
+                         jsbytecode *top, jsbytecode *bottom)
+    : traceMonitor(tm),
+      entryScript(entryfp->script()),
       entryfp(entryfp),
       top(top),
       bottom(bottom),
       hits(0),
       undecided(false),
       unprofitable(false)
 {
     reset();
@@ -16808,16 +16837,17 @@ LoopProfile::reset()
 }
 
 MonitorResult
 LoopProfile::profileLoopEdge(JSContext* cx, uintN& inlineCallCount)
 {
     if (cx->regs->pc == top) {
         debug_only_print0(LC_TMProfiler, "Profiling complete (edge)\n");
         decide(cx);
+        stopProfiling(cx);
     } else {
         /* Record an inner loop invocation. */
         JSStackFrame *fp = cx->fp();
         jsbytecode *pc = cx->regs->pc;
         bool found = false;
 
         /* We started with the most deeply nested one first, since it gets hit most often.*/
         for (int i = int(numInnerLoops)-1; i >= 0; i--) {
@@ -16869,60 +16899,66 @@ LookupOrAddProfile(JSContext *cx, TraceM
 #if JS_MONOIC
     if (*traceData && *traceEpoch == tm->flushEpoch) {
         prof = (LoopProfile *)*traceData;
     } else {
         jsbytecode* pc = cx->regs->pc;
         jsbytecode* bottom = GetLoopBottom(cx);
         if (!bottom)
             return NULL;
-        prof = new (*tm->dataAlloc) LoopProfile(cx->fp(), pc, bottom);
+        prof = new (*tm->dataAlloc) LoopProfile(tm, cx->fp(), pc, bottom);
         *traceData = prof;
         *traceEpoch = tm->flushEpoch;
         tm->loopProfiles->put(pc, prof);
     }
 #else
     LoopProfileMap &table = *tm->loopProfiles;
     jsbytecode* pc = cx->regs->pc;
     if (LoopProfileMap::AddPtr p = table.lookupForAdd(pc)) {
         prof = p->value;
     } else {
         jsbytecode* bottom = GetLoopBottom(cx);
         if (!bottom)
             return NULL;
-        prof = new (*tm->dataAlloc) LoopProfile(cx->fp(), pc, bottom);
+        prof = new (*tm->dataAlloc) LoopProfile(tm, cx->fp(), pc, bottom);
         table.add(p, pc, prof);
     }
 #endif
 
     return prof;
 }
 
 static LoopProfile *
-LookupLoopProfile(JSContext *cx, jsbytecode *pc)
-{
-    TraceMonitor* tm = &JS_TRACE_MONITOR(cx);
+LookupLoopProfile(TraceMonitor *tm, jsbytecode *pc)
+{
     LoopProfileMap &table = *tm->loopProfiles;
     if (LoopProfileMap::Ptr p = table.lookup(pc)) {
         JS_ASSERT(p->value->top == pc);
         return p->value;
     } else
         return NULL;
 }
 
+void
+LoopProfile::stopProfiling(JSContext *cx)
+{
+    traceMonitor->profile = NULL;
+}
+
 JS_REQUIRES_STACK TracePointAction
 MonitorTracePoint(JSContext *cx, uintN& inlineCallCount, bool* blacklist,
                   void** traceData, uintN *traceEpoch, uint32 *loopCounter, uint32 hits)
 {
+    TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
+
     if (!cx->profilingEnabled)
-        return RecordTracePoint(cx, inlineCallCount, blacklist, true);
+        return RecordTracePoint(cx, tm, inlineCallCount, blacklist, true);
 
     *blacklist = false;
 
-    TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
     /*
      * We may have re-entered Interpret while profiling. We don't profile
      * the nested invocation.
      */
     if (tm->profile)
         return TPA_Nothing;
 
     jsbytecode* pc = cx->regs->pc;
@@ -16935,34 +16971,34 @@ MonitorTracePoint(JSContext *cx, uintN& 
     prof->hits += hits;
     if (prof->hits < PROFILE_HOTLOOP)
         return TPA_Nothing;
 
     AutoRetBlacklist autoRetBlacklist(cx->regs->pc, blacklist);
 
     if (prof->profiled) {
         if (prof->traceOK) {
-            return RecordTracePoint(cx, inlineCallCount, blacklist, prof->execOK);
+            return RecordTracePoint(cx, tm, inlineCallCount, blacklist, prof->execOK);
         } else {
             return TPA_Nothing;
         }
     }
 
     debug_only_printf(LC_TMProfiler, "Profiling at line %d\n",
                       js_FramePCToLineNumber(cx, cx->fp()));
 
     tm->profile = prof;
 
     if (!Interpret(cx, cx->fp(), inlineCallCount, JSINTERP_PROFILE))
         return TPA_Error;
 
     JS_ASSERT(!cx->isExceptionPending());
 
     /* Look it up again since a reset may have happened during Interpret. */
-    prof = LookupLoopProfile(cx, pc);
+    prof = LookupLoopProfile(tm, pc);
     if (prof && prof->undecided) {
         *loopCounter = 3000;
         prof->reset();
     }
 
     return TPA_RanStuff;
 }
 
@@ -16980,43 +17016,43 @@ PCWithinLoop(JSStackFrame *fp, jsbytecod
 }
 
 LoopProfile::ProfileAction
 LoopProfile::profileOperation(JSContext* cx, JSOp op)
 {
     TraceMonitor* tm = &JS_TRACE_MONITOR(cx);
 
     if (profiled) {
-        tm->profile = NULL;
+        stopProfiling(cx);
         return ProfComplete;
     }
 
     jsbytecode *pc = cx->regs->pc;
     JSStackFrame *fp = cx->fp();
     JSScript *script = fp->script();
 
     if (!PCWithinLoop(fp, pc, *this)) {
         debug_only_printf(LC_TMProfiler, "Profiling complete (loop exit) at line %u\n",
                           js_FramePCToLineNumber(cx, cx->fp()));
         tm->profile->decide(cx);
-        tm->profile = NULL;
+        stopProfiling(cx);
         return ProfComplete;
     }
 
     while (loopStackDepth > 0 && !PCWithinLoop(fp, pc, loopStack[loopStackDepth-1])) {
         debug_only_print0(LC_TMProfiler, "Profiler: Exiting inner loop\n");
         loopStackDepth--;
     }
 
     if (op == JSOP_TRACE || op == JSOP_NOTRACE) {
         if (pc != top && (loopStackDepth == 0 || pc != loopStack[loopStackDepth-1].top)) {
             if (loopStackDepth == PROFILE_MAX_INNER_LOOPS) {
                 debug_only_print0(LC_TMProfiler, "Profiling complete (maxnest)\n");
                 tm->profile->decide(cx);
-                tm->profile = NULL;
+                stopProfiling(cx);
                 return ProfComplete;
             }
 
             debug_only_printf(LC_TMProfiler, "Profiler: Entering inner loop at line %d\n",
                               js_FramePCToLineNumber(cx, cx->fp()));
             loopStack[loopStackDepth++] = InnerLoop(fp, pc, GetLoopBottom(cx));
         }
     }
@@ -17184,17 +17220,17 @@ LoopProfile::isCompilationExpensive(JSCo
         return true;
 
     /* Is the code too branchy? */
     if (numSelfOpsMult > numSelfOps*100000)
         return true;
 
     /* Ensure that inner loops aren't too expensive. */
     for (uintN i=0; i<numInnerLoops; i++) {
-        LoopProfile *prof = LookupLoopProfile(cx, innerLoops[i].top);
+        LoopProfile *prof = LookupLoopProfile(traceMonitor, innerLoops[i].top);
         if (!prof || prof->isCompilationExpensive(cx, depth-1))
             return true;
     }
 
     return false;
 }
 
 /*
@@ -17209,17 +17245,17 @@ LoopProfile::isCompilationUnprofitable(J
     if (!profiled)
         return false;
 
     if (goodOps <= 22 && allOps[OP_FWDJUMP])
         return true;
     
     /* Ensure that inner loops aren't fleeting. */
     for (uintN i=0; i<numInnerLoops; i++) {
-        LoopProfile *prof = LookupLoopProfile(cx, innerLoops[i].top);
+        LoopProfile *prof = LookupLoopProfile(traceMonitor, innerLoops[i].top);
         if (!prof || prof->unprofitable)
             return true;
     }
 
     return false;
 }
 
 /* After profiling is done, this method decides whether to trace the loop. */
@@ -17235,17 +17271,17 @@ LoopProfile::decide(JSContext *cx)
 
 #ifdef DEBUG
     uintN line = js_PCToLineNumber(cx, entryScript, top);
 
     debug_only_printf(LC_TMProfiler, "LOOP %s:%d\n", entryScript->filename, line);
 
     for (uintN i=0; i<numInnerLoops; i++) {
         InnerLoop &loop = innerLoops[i];
-        if (LoopProfile *prof = LookupLoopProfile(cx, loop.top)) {
+        if (LoopProfile *prof = LookupLoopProfile(traceMonitor, loop.top)) {
             uintN line = js_PCToLineNumber(cx, prof->entryScript, prof->top);
             debug_only_printf(LC_TMProfiler, "NESTED %s:%d (%d iters)\n",
                               prof->entryScript->filename, line, loop.iters);
         }
     }
     debug_only_printf(LC_TMProfiler, "FEATURE float %d\n", allOps[OP_FLOAT]);
     debug_only_printf(LC_TMProfiler, "FEATURE int %d\n", allOps[OP_INT]);
     debug_only_printf(LC_TMProfiler, "FEATURE bit %d\n", allOps[OP_BIT]);
@@ -17306,17 +17342,17 @@ LoopProfile::decide(JSContext *cx)
     }
 
     debug_only_printf(LC_TMProfiler, "TRACE %s:%d = %d\n", entryScript->filename, line, traceOK);
 
     if (traceOK) {
         /* Unblacklist the inner loops. */
         for (uintN i=0; i<numInnerLoops; i++) {
             InnerLoop &loop = innerLoops[i];
-            LoopProfile *prof = LookupLoopProfile(cx, loop.top);
+            LoopProfile *prof = LookupLoopProfile(traceMonitor, loop.top);
             if (prof) {
                 /*
                  * Note that execOK for the inner loop is left unchanged. So even
                  * if we trace the inner loop, we will never call that trace
                  * on its own. We'll only call it from this trace.
                  */
                 prof->traceOK = true;
                 if (IsBlacklisted(loop.top)) {
@@ -17341,36 +17377,39 @@ LoopProfile::decide(JSContext *cx)
 
 JS_REQUIRES_STACK MonitorResult
 MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount)
 {
     TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
     if (tm->profile)
         return tm->profile->profileLoopEdge(cx, inlineCallCount);
     else
-        return RecordLoopEdge(cx, inlineCallCount);
+        return RecordLoopEdge(cx, tm, inlineCallCount);
 }
 
 void
 AbortProfiling(JSContext *cx)
 {
+    JS_ASSERT(TRACE_PROFILER(cx));
+    LoopProfile *prof = TRACE_PROFILER(cx);
+    
     debug_only_print0(LC_TMProfiler, "Profiling complete (aborted)\n");
-    TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
-    tm->profile->profiled = true;
-    tm->profile->traceOK = false;
-    tm->profile->execOK = false;
-    tm->profile = NULL;
+    prof->profiled = true;
+    prof->traceOK = false;
+    prof->execOK = false;
+    prof->stopProfiling(cx);
 }
 
 #else /* JS_METHODJIT */
 
 JS_REQUIRES_STACK MonitorResult
 MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount)
 {
-    return RecordLoopEdge(cx, inlineCallCount);
+    TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
+    return RecordLoopEdge(cx, tm, inlineCallCount);
 }
 
 #endif /* JS_METHODJIT */
 
 uint32
 GetHotloop(JSContext *cx)
 {
 #ifdef JS_METHODJIT
--- a/js/src/jstracer.h
+++ b/js/src/jstracer.h
@@ -304,17 +304,20 @@ public:
     }
 };
 
 typedef Queue<uint16> SlotList;
 
 class TypeMap : public Queue<JSValueType> {
     Oracle *oracle;
 public:
-    TypeMap(nanojit::Allocator* alloc) : Queue<JSValueType>(alloc) {}
+    TypeMap(nanojit::Allocator* alloc, Oracle *oracle)
+      : Queue<JSValueType>(alloc),
+        oracle(oracle)
+    {}
     void set(unsigned stackSlots, unsigned ngslots,
              const JSValueType* stackTypeMap, const JSValueType* globalTypeMap);
     JS_REQUIRES_STACK void captureTypes(JSContext* cx, JSObject* globalObj, SlotList& slots, unsigned callDepth,
                                         bool speculate);
     JS_REQUIRES_STACK void captureMissingGlobalTypes(JSContext* cx, JSObject* globalObj, SlotList& slots,
                                                      unsigned stackSlots, bool speculate);
     bool matches(TypeMap& other) const;
     void fromRaw(JSValueType* other, unsigned numSlots);
@@ -518,19 +521,19 @@ struct UnstableExit
 {
     VMFragment* fragment;
     VMSideExit* exit;
     UnstableExit* next;
 };
 
 struct LinkableFragment : public VMFragment
 {
-    LinkableFragment(const void* _ip, nanojit::Allocator* alloc
+    LinkableFragment(const void* _ip, nanojit::Allocator* alloc, Oracle *oracle
                      verbose_only(, uint32_t profFragID))
-      : VMFragment(_ip verbose_only(, profFragID)), typeMap(alloc), nStackTypes(0)
+        : VMFragment(_ip verbose_only(, profFragID)), typeMap(alloc, oracle), nStackTypes(0)
     { }
 
     uint32                  branchCount;
     TypeMap                 typeMap;
     unsigned                nStackTypes;
     unsigned                spOffsetAtEntry;
     SlotList*               globalSlots;
 };
@@ -541,19 +544,19 @@ struct LinkableFragment : public VMFragm
  * key because the fragment will write those arguments back to the interpreter
  * stack when it exits, using its typemap, which implicitly incorporates a
  * given value of argc. Without this feature, a fragment could be called as an
  * inner tree with two different values of argc, and entry type checking or
  * exit frame synthesis could crash.
  */
 struct TreeFragment : public LinkableFragment
 {
-    TreeFragment(const void* _ip, nanojit::Allocator* alloc, JSObject* _globalObj,
+    TreeFragment(const void* _ip, nanojit::Allocator* alloc, Oracle *oracle, JSObject* _globalObj,
                  uint32 _globalShape, uint32 _argc verbose_only(, uint32_t profFragID)):
-        LinkableFragment(_ip, alloc verbose_only(, profFragID)),
+        LinkableFragment(_ip, alloc, oracle verbose_only(, profFragID)),
         first(NULL),
         next(NULL),
         peer(NULL),
         globalObj(_globalObj),
         globalShape(_globalShape),
         argc(_argc),
         dependentTrees(alloc),
         linkedTrees(alloc),
@@ -639,16 +642,19 @@ public:
         OP_FWDJUMP, // Jumps with positive delta
         OP_NEW, // JSOP_NEW instructions
         OP_RECURSIVE, // Recursive calls
         OP_ARRAY_READ, // Reads from dense arrays
         OP_TYPED_ARRAY, // Accesses to typed arrays
         OP_LIMIT
     };
 
+    /* The TraceMonitor for which we're profiling. */
+    TraceMonitor *traceMonitor;
+    
     /* The script in which the loop header lives. */
     JSScript *entryScript;
 
     /* The stack frame where we started profiling. Only valid while profiling! */
     JSStackFrame *entryfp;
 
     /* The bytecode locations of the loop header and the back edge. */
     jsbytecode *top, *bottom;
@@ -762,17 +768,17 @@ public:
     inline StackValue stackAt(int pos) {
         pos += sp;
         if (pos >= 0 && uintN(pos) < PROFILE_MAX_STACK)
             return stack[pos];
         else
             return StackValue(false);
     }
     
-    LoopProfile(JSStackFrame *entryfp, jsbytecode *top, jsbytecode *bottom);
+    LoopProfile(TraceMonitor *tm, JSStackFrame *entryfp, jsbytecode *top, jsbytecode *bottom);
 
     void reset();
 
     enum ProfileAction {
         ProfContinue,
         ProfComplete
     };
 
@@ -791,38 +797,40 @@ public:
     
     /* Called for every instruction being profiled. */
     ProfileAction profileOperation(JSContext *cx, JSOp op);
 
     /* Once a loop's profile is done, these decide whether it should be traced. */
     bool isCompilationExpensive(JSContext *cx, uintN depth);
     bool isCompilationUnprofitable(JSContext *cx, uintN goodOps);
     void decide(JSContext *cx);
+
+    void stopProfiling(JSContext *cx);
 };
 
 /*
  * BUILTIN_NO_FIXUP_NEEDED indicates that after the initial LeaveTree of a deep
  * bail, the builtin call needs no further fixup when the trace exits and calls
  * LeaveTree the second time.
  */
 typedef enum BuiltinStatus {
     BUILTIN_BAILED = 1,
     BUILTIN_ERROR = 2
 } BuiltinStatus;
 
 static JS_INLINE void
-SetBuiltinError(JSContext *cx)
+SetBuiltinError(TraceMonitor *tm)
 {
-    JS_TRACE_MONITOR(cx).tracerState->builtinStatus |= BUILTIN_ERROR;
+    tm->tracerState->builtinStatus |= BUILTIN_ERROR;
 }
 
 static JS_INLINE bool
-WasBuiltinSuccessful(JSContext *cx)
+WasBuiltinSuccessful(TraceMonitor *tm)
 {
-    return JS_TRACE_MONITOR(cx).tracerState->builtinStatus == 0;
+    return tm->tracerState->builtinStatus == 0;
 }
 
 #ifdef DEBUG_RECORDING_STATUS_NOT_BOOL
 /* #define DEBUG_RECORDING_STATUS_NOT_BOOL to detect misuses of RecordingStatus */
 struct RecordingStatus {
     int code;
     bool operator==(RecordingStatus &s) { return this->code == s.code; };
     bool operator!=(RecordingStatus &s) { return this->code != s.code; };
@@ -1544,17 +1552,17 @@ class TraceRecorder
 
     /* Member declarations for each opcode, to be called before interpreting the opcode. */
 #define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format)               \
     JS_REQUIRES_STACK AbortableRecordingStatus record_##op();
 # include "jsopcode.tbl"
 #undef OPDEF
 
     JS_REQUIRES_STACK
-    TraceRecorder(JSContext* cx, VMSideExit*, VMFragment*,
+    TraceRecorder(JSContext* cx, TraceMonitor *tm, VMSideExit*, VMFragment*,
                   unsigned stackSlots, unsigned ngslots, JSValueType* typeMap,
                   VMSideExit* expectedInnerExit, JSScript* outerScript, jsbytecode* outerPC,
                   uint32 outerArgc, bool speculate);
 
     /* The destructor should only be called through finish*, not directly. */
     ~TraceRecorder();
     JS_REQUIRES_STACK AbortableRecordingStatus finishSuccessfully();
 
@@ -1568,26 +1576,26 @@ class TraceRecorder
     friend class AdjustCallerStackTypesVisitor;
     friend class TypeCompatibilityVisitor;
     friend class ImportFrameSlotsVisitor;
     friend class SlotMap;
     friend class DefaultSlotMap;
     friend class DetermineTypesVisitor;
     friend class RecursiveSlotMap;
     friend class UpRecursiveSlotMap;
-    friend MonitorResult RecordLoopEdge(JSContext*, uintN&);
-    friend TracePointAction RecordTracePoint(JSContext*, uintN &inlineCallCount,
+    friend MonitorResult RecordLoopEdge(JSContext*, TraceMonitor*, uintN&);
+    friend TracePointAction RecordTracePoint(JSContext*, TraceMonitor*, uintN &inlineCallCount,
                                              bool *blacklist);
     friend AbortResult AbortRecording(JSContext*, const char*);
     friend class BoxArg;
     friend void TraceMonitor::sweep(JSContext *cx);
 
   public:
     static bool JS_REQUIRES_STACK
-    startRecorder(JSContext*, VMSideExit*, VMFragment*,
+    startRecorder(JSContext*, TraceMonitor *, VMSideExit*, VMFragment*,
                   unsigned stackSlots, unsigned ngslots, JSValueType* typeMap,
                   VMSideExit* expectedInnerExit, JSScript* outerScript, jsbytecode* outerPC,
                   uint32 outerArgc, bool speculate);
 
     /* Accessors. */
     VMFragment*         getFragment() const { return fragment; }
     TreeFragment*       getTree() const { return tree; }
     bool                outOfMemory() const { return traceMonitor->outOfMemory(); }
@@ -1651,17 +1659,16 @@ class TraceRecorder
                                   nanojit::LIns *ins6);
 #endif
 };
 
 #define TRACING_ENABLED(cx)       ((cx)->traceJitEnabled)
 #define REGEX_JIT_ENABLED(cx)     ((cx)->traceJitEnabled || (cx)->methodJitEnabled)
 #define TRACE_RECORDER(cx)        (JS_TRACE_MONITOR(cx).recorder)
 #define TRACE_PROFILER(cx)        (JS_TRACE_MONITOR(cx).profile)
-#define SET_TRACE_RECORDER(cx,tr) (JS_TRACE_MONITOR(cx).recorder = (tr))
 
 #define JSOP_IN_RANGE(op,lo,hi)   (uintN((op) - (lo)) <= uintN((hi) - (lo)))
 #define JSOP_IS_BINARY(op)        JSOP_IN_RANGE(op, JSOP_BITOR, JSOP_MOD)
 #define JSOP_IS_UNARY(op)         JSOP_IN_RANGE(op, JSOP_NEG, JSOP_POS)
 #define JSOP_IS_EQUALITY(op)      JSOP_IN_RANGE(op, JSOP_EQ, JSOP_NE)
 
 #define TRACE_ARGS_(x,args)                                                   \
     JS_BEGIN_MACRO                                                            \
@@ -1708,17 +1715,17 @@ FinishJIT(TraceMonitor *tm);
 
 extern void
 PurgeScriptFragments(TraceMonitor* tm, JSScript* script);
 
 extern bool
 OverfullJITCache(JSContext *cx, TraceMonitor* tm);
 
 extern void
-FlushJITCache(JSContext* cx);
+FlushJITCache(JSContext* cx, TraceMonitor* tm);
 
 extern JSObject *
 GetBuiltinFunction(JSContext *cx, uintN index);
 
 extern void
 SetMaxCodeCacheBytes(JSContext* cx, uint32 bytes);
 
 extern void