Always discard methodjit code on GC, remove JM+TM integration, bug 685358. r=dvander
authorBrian Hackett <bhackett1024@gmail.com>
Mon, 24 Oct 2011 20:46:00 -0700
changeset 79173 049a08dfadc2cd9be9817bffae36f27f0f2fbaa7
parent 79172 03c236ba6ae0408b00c51a8899ac46989329bac6
child 79174 b87afa49527d08b46de74a07332945777993f88f
push id2893
push userbhackett@mozilla.com
push dateTue, 25 Oct 2011 03:46:24 +0000
treeherdermozilla-inbound@049a08dfadc2 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersdvander
bugs685358
milestone10.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Always discard methodjit code on GC, remove JM+TM integration, bug 685358. r=dvander
js/src/jscompartment.cpp
js/src/jscompartment.h
js/src/jsgc.cpp
js/src/jsgcmark.cpp
js/src/jsinfer.cpp
js/src/jsinterp.cpp
js/src/jsinterp.h
js/src/jstracer.cpp
js/src/methodjit/Compiler.cpp
js/src/methodjit/Compiler.h
js/src/methodjit/FastArithmetic.cpp
js/src/methodjit/FrameState.cpp
js/src/methodjit/InvokeHelpers.cpp
js/src/methodjit/MethodJIT.cpp
js/src/methodjit/MethodJIT.h
js/src/methodjit/MonoIC.cpp
js/src/methodjit/MonoIC.h
js/src/methodjit/PolyIC.cpp
js/src/methodjit/PolyIC.h
js/src/methodjit/StubCalls.h
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -407,52 +407,16 @@ JSCompartment::wrap(JSContext *cx, AutoI
     jsint length = props.length();
     for (size_t n = 0; n < size_t(length); ++n) {
         if (!wrapId(cx, &vector[n]))
             return false;
     }
     return true;
 }
 
-#if defined JS_METHODJIT && defined JS_MONOIC
-/*
- * Check if the pool containing the code for jit should be destroyed, per the
- * heuristics in JSCompartment::sweep.
- */
-static inline bool
-ScriptPoolDestroyed(JSContext *cx, mjit::JITScript *jit,
-                    uint32 releaseInterval, uint32 &counter)
-{
-    JSC::ExecutablePool *pool = jit->code.m_executablePool;
-    if (pool->m_gcNumber != cx->runtime->gcNumber) {
-        /*
-         * The m_destroy flag may have been set in a previous GC for a pool which had
-         * references we did not remove (e.g. from the compartment's ExecutableAllocator)
-         * and is still around. Forget we tried to destroy it in such cases.
-         */
-        pool->m_destroy = false;
-        pool->m_gcNumber = cx->runtime->gcNumber;
-        if (--counter == 0) {
-            pool->m_destroy = true;
-            counter = releaseInterval;
-        }
-    }
-    return pool->m_destroy;
-}
-
-static inline void
-ScriptTryDestroyCode(JSContext *cx, JSScript *script, bool normal,
-                     uint32 releaseInterval, uint32 &counter)
-{
-    mjit::JITScript *jit = normal ? script->jitNormal : script->jitCtor;
-    if (jit && ScriptPoolDestroyed(cx, jit, releaseInterval, counter))
-        mjit::ReleaseScriptCode(cx, script, !normal);
-}
-#endif // JS_METHODJIT && JS_MONOIC
-
 /*
  * This method marks pointers that cross compartment boundaries. It should be
  * called only for per-compartment GCs, since full GCs naturally follow pointers
  * across compartments.
  */
 void
 JSCompartment::markCrossCompartmentWrappers(JSTracer *trc)
 {
@@ -487,17 +451,17 @@ JSCompartment::markTypes(JSTracer *trc)
         }
     }
 
     for (CellIterUnderGC i(this, FINALIZE_TYPE_OBJECT); !i.done(); i.next())
         MarkTypeObject(trc, i.get<types::TypeObject>(), "mark_types_scan");
 }
 
 void
-JSCompartment::sweep(JSContext *cx, uint32 releaseInterval)
+JSCompartment::sweep(JSContext *cx, bool releaseTypes)
 {
     /* Remove dead wrappers from the table. */
     for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
         JS_ASSERT_IF(IsAboutToBeFinalized(cx, e.front().key.toGCThing()) &&
                      !IsAboutToBeFinalized(cx, e.front().value.toGCThing()),
                      e.front().key.isString());
         if (IsAboutToBeFinalized(cx, e.front().key.toGCThing()) ||
             IsAboutToBeFinalized(cx, e.front().value.toGCThing())) {
@@ -526,117 +490,63 @@ JSCompartment::sweep(JSContext *cx, uint
 
     sweepBreakpoints(cx);
 
 #ifdef JS_TRACER
     if (hasTraceMonitor())
         traceMonitor()->sweep(cx);
 #endif
 
-#ifdef JS_METHODJIT
     /*
-     * Purge PICs in the compartment, along with native call stubs for
-     * compartments which do not have such stubs on the stack. PICs can
-     * reference shapes and type data, and native call stubs are disassociated
-     * from the PIC or MIC they were generated for.
+     * Kick all frames on the stack into the interpreter, and release all JIT
+     * code in the compartment.
      */
-    bool canPurgeNativeCalls = true;
-    VMFrame *f = hasJaegerCompartment() ? jaegerCompartment()->activeFrame() : NULL;
-    for (; f; f = f->previous) {
-        if (f->stubRejoin)
-            canPurgeNativeCalls = false;
-    }
+#ifdef JS_METHODJIT
+    mjit::ClearAllFrames(this);
+
     for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
         JSScript *script = i.get<JSScript>();
-        if (script->hasJITCode()) {
-#ifdef JS_POLYIC
-            mjit::ic::PurgePICs(cx, script);
-#endif
-            if (canPurgeNativeCalls) {
-                if (script->jitNormal)
-                    script->jitNormal->purgeNativeCallStubs();
-                if (script->jitCtor)
-                    script->jitCtor->purgeNativeCallStubs();
-            }
-        }
+        mjit::ReleaseScriptCode(cx, script);
+
+        /*
+         * Use counts for scripts are reset on GC. After discarding code we
+         * need to let it warm back up to get information like which opcodes
+         * are setting array holes or accessing getter properties.
+         */
+        script->resetUseCount();
     }
 #endif
 
-    bool discardScripts = !active && (releaseInterval != 0 || hasDebugModeCodeToDrop);
-
-#if defined JS_METHODJIT && defined JS_MONOIC
-
-    /*
-     * The release interval is the frequency with which we should try to destroy
-     * executable pools by releasing all JIT code in them, zero to never destroy pools.
-     * Initialize counter so that the first pool will be destroyed, and eventually drive
-     * the amount of JIT code in never-used compartments to zero. Don't discard anything
-     * for compartments which currently have active stack frames.
-     */
-    uint32 counter = 1;
-    if (discardScripts)
-        hasDebugModeCodeToDrop = false;
-
-    for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
-        JSScript *script = i.get<JSScript>();
-        if (script->hasJITCode()) {
-            mjit::ic::SweepCallICs(cx, script, discardScripts);
-            if (discardScripts) {
-                ScriptTryDestroyCode(cx, script, true, releaseInterval, counter);
-                ScriptTryDestroyCode(cx, script, false, releaseInterval, counter);
-            }
-        }
-    }
-
-#endif
-
-#ifdef JS_METHODJIT
-    if (types.inferenceEnabled)
-        mjit::ClearAllFrames(this);
-#endif
-
-    if (activeAnalysis) {
-        /*
-         * Analysis information is in use, so don't clear the analysis pool.
-         * jitcode still needs to be released, if this is a shape-regenerating
-         * GC then shape numbers baked into the code may change.
-         */
-#ifdef JS_METHODJIT
-        if (types.inferenceEnabled) {
-            for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
-                JSScript *script = i.get<JSScript>();
-                mjit::ReleaseScriptCode(cx, script);
-            }
-        }
-#endif
-    } else {
+    if (!activeAnalysis) {
         /*
          * Clear the analysis pool, but don't release its data yet. While
          * sweeping types any live data will be allocated into the pool.
          */
         LifoAlloc oldAlloc(typeLifoAlloc.defaultChunkSize());
         oldAlloc.steal(&typeLifoAlloc);
 
         /*
+         * Periodically release observed types for all scripts. This is safe to
+         * do when there are no frames for the compartment on the stack.
+         */
+        if (active)
+            releaseTypes = false;
+
+        /*
          * Sweep analysis information and everything depending on it from the
          * compartment, including all remaining mjit code if inference is
          * enabled in the compartment.
          */
         if (types.inferenceEnabled) {
             for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
                 JSScript *script = i.get<JSScript>();
                 if (script->types) {
                     types::TypeScript::Sweep(cx, script);
 
-                    /*
-                     * On each 1/8 lifetime, release observed types for all scripts.
-                     * This is always safe to do when there are no frames for the
-                     * compartment on the stack.
-                     */
-                    if (discardScripts) {
+                    if (releaseTypes) {
                         script->types->destroy();
                         script->types = NULL;
                         script->typesPurged = true;
                     }
                 }
             }
         }
 
@@ -679,30 +589,16 @@ JSCompartment::purge(JSContext *cx)
     /*
      * If we are about to regenerate shapes, we have to flush the JIT cache,
      * which will eventually abort any current recording.
      */
     if (cx->runtime->gcRegenShapes)
         if (hasTraceMonitor())
             traceMonitor()->needFlush = JS_TRUE;
 #endif
-
-#if defined JS_METHODJIT && defined JS_MONOIC
-    /*
-     * MICs do not refer to data which can be GC'ed and do not generate stubs
-     * which might need to be discarded, but are sensitive to shape regeneration.
-     */
-    if (cx->runtime->gcRegenShapes) {
-        for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
-            JSScript *script = i.get<JSScript>();
-            if (script->hasJITCode())
-                mjit::ic::PurgeMICs(cx, script);
-        }
-    }
-#endif
 }
 
 MathCache *
 JSCompartment::allocMathCache(JSContext *cx)
 {
     JS_ASSERT(!mathCache);
     mathCache = cx->new_<MathCache>();
     if (!mathCache)
--- a/js/src/jscompartment.h
+++ b/js/src/jscompartment.h
@@ -529,17 +529,17 @@ struct JS_FRIEND_API(JSCompartment) {
     bool wrap(JSContext *cx, JSObject **objp);
     bool wrapId(JSContext *cx, jsid *idp);
     bool wrap(JSContext *cx, js::PropertyOp *op);
     bool wrap(JSContext *cx, js::StrictPropertyOp *op);
     bool wrap(JSContext *cx, js::PropertyDescriptor *desc);
     bool wrap(JSContext *cx, js::AutoIdVector &props);
 
     void markTypes(JSTracer *trc);
-    void sweep(JSContext *cx, uint32 releaseInterval);
+    void sweep(JSContext *cx, bool releaseTypes);
     void purge(JSContext *cx);
 
     void setGCLastBytes(size_t lastBytes, JSGCInvocationKind gckind);
     void reduceGCTriggerBytes(uint32 amount);
 
     js::DtoaCache dtoaCache;
 
   private:
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -725,22 +725,18 @@ IsAboutToBeFinalized(JSContext *cx, cons
 JS_FRIEND_API(bool)
 js_GCThingIsMarked(void *thing, uintN color = BLACK)
 {
     JS_ASSERT(thing);
     AssertValidColor(thing, color);
     return reinterpret_cast<Cell *>(thing)->isMarked(color);
 }
 
-/*
- * 1/8 life for JIT code. After this number of microseconds have passed, 1/8 of all
- * JIT code is discarded in inactive compartments, regardless of how often that
- * code runs.
- */
-static const int64 JIT_SCRIPT_EIGHTH_LIFETIME = 60 * 1000 * 1000;
+/* Lifetime for type sets attached to scripts containing observed types. */
+static const int64 JIT_SCRIPT_RELEASE_TYPES_INTERVAL = 60 * 1000 * 1000;
 
 JSBool
 js_InitGC(JSRuntime *rt, uint32 maxbytes)
 {
     if (!rt->gcChunkSet.init(INITIAL_CHUNK_CAPACITY))
         return false;
 
     if (!rt->gcRootsHash.init(256))
@@ -772,17 +768,17 @@ js_InitGC(JSRuntime *rt, uint32 maxbytes
     rt->gcEmptyArenaPoolLifespan = 30000;
 
     /*
      * The assigned value prevents GC from running when GC memory is too low
      * (during JS engine start).
      */
     rt->setGCLastBytes(8192, GC_NORMAL);
 
-    rt->gcJitReleaseTime = PRMJ_Now() + JIT_SCRIPT_EIGHTH_LIFETIME;
+    rt->gcJitReleaseTime = PRMJ_Now() + JIT_SCRIPT_RELEASE_TYPES_INTERVAL;
     return true;
 }
 
 namespace js {
 
 inline bool
 InFreeList(ArenaHeader *aheader, uintptr_t addr)
 {
@@ -2276,37 +2272,29 @@ GCHelperThread::doSweep()
     }
     freeVector.resize(0);
 }
 
 } /* namespace js */
 
 #endif /* JS_THREADSAFE */
 
-static uint32
-ComputeJitReleaseInterval(JSContext *cx)
+static bool
+ReleaseObservedTypes(JSContext *cx)
 {
     JSRuntime *rt = cx->runtime;
-    /*
-     * Figure out how much JIT code should be released from inactive compartments.
-     * If multiple eighth-lives have passed, compound the release interval linearly;
-     * if enough time has passed, all inactive JIT code will be released.
-     */
-    uint32 releaseInterval = 0;
+
+    bool releaseTypes = false;
     int64 now = PRMJ_Now();
     if (now >= rt->gcJitReleaseTime) {
-        releaseInterval = 8;
-        while (now >= rt->gcJitReleaseTime) {
-            if (--releaseInterval == 1)
-                rt->gcJitReleaseTime = now;
-            rt->gcJitReleaseTime += JIT_SCRIPT_EIGHTH_LIFETIME;
-        }
+        releaseTypes = true;
+        rt->gcJitReleaseTime = now + JIT_SCRIPT_RELEASE_TYPES_INTERVAL;
     }
 
-    return releaseInterval;
+    return releaseTypes;
 }
 
 static void
 SweepCompartments(JSContext *cx, JSGCInvocationKind gckind)
 {
     JSRuntime *rt = cx->runtime;
     JSCompartmentCallback callback = rt->compartmentCallback;
 
@@ -2447,19 +2435,19 @@ SweepPhase(JSContext *cx, GCMarker *gcma
     js_SweepAtomState(cx);
 
     /* Collect watch points associated with unreachable objects. */
     WatchpointMap::sweepAll(cx);
 
     if (!rt->gcCurrentCompartment)
         Debugger::sweepAll(cx);
 
-    uint32 releaseInterval = rt->gcCurrentCompartment ? 0 : ComputeJitReleaseInterval(cx);
+    bool releaseTypes = !rt->gcCurrentCompartment && ReleaseObservedTypes(cx);
     for (GCCompartmentsIter c(rt); !c.done(); c.next())
-        c->sweep(cx, releaseInterval);
+        c->sweep(cx, releaseTypes);
 
     {
         gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_OBJECT);
 
         /*
          * We finalize objects before other GC things to ensure that the object's
          * finalizer can access the other things even if they will be freed.
          */
--- a/js/src/jsgcmark.cpp
+++ b/js/src/jsgcmark.cpp
@@ -837,23 +837,16 @@ MarkChildren(JSTracer *trc, JSScript *sc
 
     if (IS_GC_MARKING_TRACER(trc) && script->filename)
         js_MarkScriptFilename(script->filename);
 
     script->bindings.trace(trc);
 
     if (script->types)
         script->types->trace(trc);
-
-#ifdef JS_METHODJIT
-    if (script->jitNormal)
-        script->jitNormal->trace(trc);
-    if (script->jitCtor)
-        script->jitCtor->trace(trc);
-#endif
 }
 
 void
 MarkChildren(JSTracer *trc, const Shape *shape)
 {
 restart:
     MarkId(trc, shape->propid, "propid");
 
--- a/js/src/jsinfer.cpp
+++ b/js/src/jsinfer.cpp
@@ -6081,31 +6081,16 @@ TypeScript::Sweep(JSContext *cx, JSScrip
     /*
      * If the script has nesting state with a most recent activation, we do not
      * need either to mark the call object or clear it if not live. Even with
      * a dead pointer in the nesting, we can't get a spurious match while
      * testing for reentrancy: if previous activations are still live, they
      * cannot alias the most recent one, and future activations will overwrite
      * activeCall on creation.
      */
-
-    /*
-     * Method JIT code depends on the type inference data which is about to
-     * be purged, so purge the jitcode as well.
-     */
-#ifdef JS_METHODJIT
-    mjit::ReleaseScriptCode(cx, script);
-
-    /*
-     * Use counts for scripts are reset on GC. After discarding code we need to
-     * let it warm back up to get information like which opcodes are setting
-     * array holes or accessing getter properties.
-     */
-    script->resetUseCount();
-#endif
 }
 
 void
 TypeScript::destroy()
 {
     while (dynamicList) {
         TypeResult *next = dynamicList->next;
         Foreground::delete_(dynamicList);
--- a/js/src/jsinterp.cpp
+++ b/js/src/jsinterp.cpp
@@ -1797,40 +1797,22 @@ js::Interpret(JSContext *cx, StackFrame 
      * if it returns false.
      */
 #define CHECK_BRANCH()                                                        \
     JS_BEGIN_MACRO                                                            \
         if (JS_THREAD_DATA(cx)->interruptFlags && !js_HandleExecutionInterrupt(cx)) \
             goto error;                                                       \
     JS_END_MACRO
 
-#if defined(JS_TRACER) && defined(JS_METHODJIT)
-# define LEAVE_ON_SAFE_POINT()                                                \
-    do {                                                                      \
-        JS_ASSERT_IF(leaveOnSafePoint, !TRACE_RECORDER(cx));                  \
-        JS_ASSERT_IF(leaveOnSafePoint, !TRACE_PROFILER(cx));                  \
-        JS_ASSERT_IF(leaveOnSafePoint, interpMode != JSINTERP_NORMAL);        \
-        if (leaveOnSafePoint && !regs.fp()->hasImacropc() &&                  \
-            script->maybeNativeCodeForPC(regs.fp()->isConstructing(), regs.pc)) { \
-            JS_ASSERT(!TRACE_RECORDER(cx));                                   \
-            interpReturnOK = true;                                            \
-            goto leave_on_safe_point;                                         \
-        }                                                                     \
-    } while (0)
-#else
-# define LEAVE_ON_SAFE_POINT() /* nop */
-#endif
-
 #define BRANCH(n)                                                             \
     JS_BEGIN_MACRO                                                            \
         regs.pc += (n);                                                       \
         op = (JSOp) *regs.pc;                                                 \
         if ((n) <= 0)                                                         \
             goto check_backedge;                                              \
-        LEAVE_ON_SAFE_POINT();                                                \
         DO_OP();                                                              \
     JS_END_MACRO
 
 #define SET_SCRIPT(s)                                                         \
     JS_BEGIN_MACRO                                                            \
         script = (s);                                                         \
         if (script->stepModeEnabled())                                        \
             ENABLE_INTERRUPTS();                                              \
@@ -1856,23 +1838,16 @@ js::Interpret(JSContext *cx, StackFrame 
     JSRuntime *const rt = cx->runtime;
     JSScript *script;
     SET_SCRIPT(regs.fp()->script());
     double *pcCounts = script->pcCounters.get(JSPCCounters::INTERP);
     ENABLE_PCCOUNT_INTERRUPTS();
     Value *argv = regs.fp()->maybeFormalArgs();
     CHECK_INTERRUPT_HANDLER();
 
-#if defined(JS_TRACER) && defined(JS_METHODJIT)
-    bool leaveOnSafePoint = (interpMode == JSINTERP_SAFEPOINT);
-# define CLEAR_LEAVE_ON_TRACE_POINT() ((void) (leaveOnSafePoint = false))
-#else
-# define CLEAR_LEAVE_ON_TRACE_POINT() ((void) 0)
-#endif
-
     if (!entryFrame)
         entryFrame = regs.fp();
 
     /*
      * Initialize the index segment register used by LOAD_ATOM and
      * GET_FULL_INDEX macros below. As a register we use a pointer based on
      * the atom map to turn frequently executed LOAD_ATOM into simple array
      * access. For less frequent object and regexp loads we have to recover
@@ -2045,60 +2020,33 @@ js::Interpret(JSContext *cx, StackFrame 
         }
 
 #ifdef JS_TRACER
 #ifdef JS_METHODJIT
         if (TRACE_PROFILER(cx) && interpMode == JSINTERP_PROFILE) {
             LoopProfile *prof = TRACE_PROFILER(cx);
             JS_ASSERT(!TRACE_RECORDER(cx));
             LoopProfile::ProfileAction act = prof->profileOperation(cx, op);
-            switch (act) {
-                case LoopProfile::ProfComplete:
-                    if (interpMode != JSINTERP_NORMAL) {
-                        leaveOnSafePoint = true;
-                        LEAVE_ON_SAFE_POINT();
-                    }
-                    break;
-                default:
-                    moreInterrupts = true;
-                    break;
-            }
+            if (act != LoopProfile::ProfComplete)
+                moreInterrupts = true;
         }
 #endif
         if (TraceRecorder* tr = TRACE_RECORDER(cx)) {
             JS_ASSERT(!TRACE_PROFILER(cx));
             AbortableRecordingStatus status = tr->monitorRecording(op);
             JS_ASSERT_IF(cx->isExceptionPending(), status == ARECORD_ERROR);
 
-            if (interpMode != JSINTERP_NORMAL) {
-                JS_ASSERT(interpMode == JSINTERP_RECORD || JSINTERP_SAFEPOINT);
-                switch (status) {
-                  case ARECORD_IMACRO_ABORTED:
-                  case ARECORD_ABORTED:
-                  case ARECORD_COMPLETED:
-                  case ARECORD_STOP:
-#ifdef JS_METHODJIT
-                    leaveOnSafePoint = true;
-                    LEAVE_ON_SAFE_POINT();
-#endif
-                    break;
-                  default:
-                    break;
-                }
-            }
-
             switch (status) {
               case ARECORD_CONTINUE:
                 moreInterrupts = true;
                 break;
               case ARECORD_IMACRO:
               case ARECORD_IMACRO_ABORTED:
                 atoms = rt->atomState.commonAtomsStart();
                 op = JSOp(*regs.pc);
-                CLEAR_LEAVE_ON_TRACE_POINT();
                 if (status == ARECORD_IMACRO)
                     DO_OP();    /* keep interrupting for op. */
                 break;
               case ARECORD_ERROR:
                 // The code at 'error:' aborts the recording.
                 goto error;
               case ARECORD_ABORTED:
               case ARECORD_COMPLETED:
@@ -2133,34 +2081,33 @@ ADD_EMPTY_CASE(JSOP_TRY)
 ADD_EMPTY_CASE(JSOP_STARTXML)
 ADD_EMPTY_CASE(JSOP_STARTXMLEXPR)
 #endif
 ADD_EMPTY_CASE(JSOP_NULLBLOCKCHAIN)
 END_EMPTY_CASES
 
 BEGIN_CASE(JSOP_TRACE)
 BEGIN_CASE(JSOP_NOTRACE)
-    LEAVE_ON_SAFE_POINT();
+    /* No-op */
 END_CASE(JSOP_TRACE)
 
 check_backedge:
 {
     CHECK_BRANCH();
     if (op != JSOP_NOTRACE && op != JSOP_TRACE)
         DO_OP();
 
 #ifdef JS_TRACER
     if (TRACING_ENABLED(cx) && (TRACE_RECORDER(cx) || TRACE_PROFILER(cx) || (op == JSOP_TRACE && !useMethodJIT))) {
         MonitorResult r = MonitorLoopEdge(cx, interpMode);
         if (r == MONITOR_RECORDING) {
             JS_ASSERT(TRACE_RECORDER(cx));
             JS_ASSERT(!TRACE_PROFILER(cx));
             MONITOR_BRANCH_TRACEVIS;
             ENABLE_INTERRUPTS();
-            CLEAR_LEAVE_ON_TRACE_POINT();
         }
         JS_ASSERT_IF(cx->isExceptionPending(), r == MONITOR_ERROR);
         RESTORE_INTERP_VARS_CHECK_EXCEPTION();
         op = (JSOp) *regs.pc;
         DO_OP();
     }
 #endif /* JS_TRACER */
 
@@ -2275,17 +2222,16 @@ BEGIN_CASE(JSOP_STOP)
          */
         JS_ASSERT(op == JSOP_STOP);
         JS_ASSERT((uintN)(regs.sp - regs.fp()->slots()) <= script->nslots);
         jsbytecode *imacpc = regs.fp()->imacropc();
         regs.pc = imacpc + js_CodeSpec[*imacpc].length;
         if (js_CodeSpec[*imacpc].format & JOF_DECOMPOSE)
             regs.pc += GetDecomposeLength(imacpc, js_CodeSpec[*imacpc].length);
         regs.fp()->clearImacropc();
-        LEAVE_ON_SAFE_POINT();
         atoms = script->atoms;
         op = JSOp(*regs.pc);
         DO_OP();
     }
 #endif
 
     interpReturnOK = true;
     if (entryFrame != regs.fp())
@@ -5344,22 +5290,16 @@ BEGIN_CASE(JSOP_RETSUB)
     len = rval.toInt32();
     regs.pc = script->code;
 END_VARLEN_CASE
 }
 
 BEGIN_CASE(JSOP_EXCEPTION)
     PUSH_COPY(cx->getPendingException());
     cx->clearPendingException();
-#if defined(JS_TRACER) && defined(JS_METHODJIT)
-    if (interpMode == JSINTERP_PROFILE) {
-        leaveOnSafePoint = true;
-        LEAVE_ON_SAFE_POINT();
-    }
-#endif
     CHECK_BRANCH();
 END_CASE(JSOP_EXCEPTION)
 
 BEGIN_CASE(JSOP_FINALLY)
     CHECK_BRANCH();
 END_CASE(JSOP_FINALLY)
 
 BEGIN_CASE(JSOP_THROWING)
--- a/js/src/jsinterp.h
+++ b/js/src/jsinterp.h
@@ -210,20 +210,19 @@ ExecuteKernel(JSContext *cx, JSScript *s
 extern bool
 Execute(JSContext *cx, JSScript *script, JSObject &scopeChain, Value *rval);
 
 /* Flags to toggle js::Interpret() execution. */
 enum InterpMode
 {
     JSINTERP_NORMAL    = 0, /* interpreter is running normally */
     JSINTERP_RECORD    = 1, /* interpreter has been started to record/run traces */
-    JSINTERP_SAFEPOINT = 2, /* interpreter should leave on a method JIT safe point */
-    JSINTERP_PROFILE   = 3, /* interpreter should profile a loop */
-    JSINTERP_REJOIN    = 4, /* as normal, but the frame has already started */
-    JSINTERP_SKIP_TRAP = 5  /* as REJOIN, but skip trap at first opcode */
+    JSINTERP_PROFILE   = 2, /* interpreter should profile a loop */
+    JSINTERP_REJOIN    = 3, /* as normal, but the frame has already started */
+    JSINTERP_SKIP_TRAP = 4  /* as REJOIN, but skip trap at first opcode */
 };
 
 /*
  * Execute the caller-initialized frame for a user-defined script or function
  * pointed to by cx->fp until completion or error.
  */
 extern JS_REQUIRES_STACK JS_NEVER_INLINE bool
 Interpret(JSContext *cx, StackFrame *stopFp, InterpMode mode = JSINTERP_NORMAL);
--- a/js/src/jstracer.cpp
+++ b/js/src/jstracer.cpp
@@ -1314,24 +1314,18 @@ Blacklist(jsbytecode* pc)
     JS_ASSERT(*pc == JSOP_TRACE || *pc == JSOP_NOTRACE);
     *pc = JSOP_NOTRACE;
 }
 
 static void
 Unblacklist(JSScript *script, jsbytecode *pc)
 {
     JS_ASSERT(*pc == JSOP_NOTRACE || *pc == JSOP_TRACE);
-    if (*pc == JSOP_NOTRACE) {
+    if (*pc == JSOP_NOTRACE)
         *pc = JSOP_TRACE;
-
-#ifdef JS_METHODJIT
-        /* This code takes care of unblacklisting in the method JIT. */
-        js::mjit::ResetTraceHint(script, pc, GET_UINT16(pc), false);
-#endif
-    }
 }
 
 #ifdef JS_METHODJIT
 static bool
 IsBlacklisted(jsbytecode* pc)
 {
     if (*pc == JSOP_NOTRACE)
         return true;
@@ -2683,27 +2677,16 @@ TraceMonitor::flush()
 
     verbose_only(
         for (Seq<Fragment*>* f = branches; f; f = f->tail)
             FragProfiling_FragFinalizer(f->head, this);
     )
 
     flushEpoch++;
 
-#ifdef JS_METHODJIT
-    if (loopProfiles) {
-        for (LoopProfileMap::Enum e(*loopProfiles); !e.empty(); e.popFront()) {
-            jsbytecode *pc = e.front().key;
-            LoopProfile *prof = e.front().value;
-            /* This code takes care of resetting all methodjit state. */
-            js::mjit::ResetTraceHint(prof->entryScript, pc, GET_UINT16(pc), true);
-        }
-    }
-#endif
-
     frameCache->reset();
     dataAlloc->reset();
     traceAlloc->reset();
     codeAlloc->reset();
     tempAlloc->reset();
     oracle->clear();
     loopProfiles->clear();
 
--- a/js/src/methodjit/Compiler.cpp
+++ b/js/src/methodjit/Compiler.cpp
@@ -107,51 +107,40 @@ mjit::Compiler::Compiler(JSContext *cx, 
     a(NULL), outer(NULL), script(NULL), PC(NULL), loop(NULL),
     inlineFrames(CompilerAllocPolicy(cx, *thisFromCtor())),
     branchPatches(CompilerAllocPolicy(cx, *thisFromCtor())),
 #if defined JS_MONOIC
     getGlobalNames(CompilerAllocPolicy(cx, *thisFromCtor())),
     setGlobalNames(CompilerAllocPolicy(cx, *thisFromCtor())),
     callICs(CompilerAllocPolicy(cx, *thisFromCtor())),
     equalityICs(CompilerAllocPolicy(cx, *thisFromCtor())),
-    traceICs(CompilerAllocPolicy(cx, *thisFromCtor())),
 #endif
 #if defined JS_POLYIC
     pics(CompilerAllocPolicy(cx, *thisFromCtor())),
     getElemICs(CompilerAllocPolicy(cx, *thisFromCtor())),
     setElemICs(CompilerAllocPolicy(cx, *thisFromCtor())),
 #endif
     callPatches(CompilerAllocPolicy(cx, *thisFromCtor())),
     callSites(CompilerAllocPolicy(cx, *thisFromCtor())),
     doubleList(CompilerAllocPolicy(cx, *thisFromCtor())),
     fixedIntToDoubleEntries(CompilerAllocPolicy(cx, *thisFromCtor())),
     fixedDoubleToAnyEntries(CompilerAllocPolicy(cx, *thisFromCtor())),
     jumpTables(CompilerAllocPolicy(cx, *thisFromCtor())),
     jumpTableOffsets(CompilerAllocPolicy(cx, *thisFromCtor())),
     loopEntries(CompilerAllocPolicy(cx, *thisFromCtor())),
-    rootedObjects(CompilerAllocPolicy(cx, *thisFromCtor())),
     stubcc(cx, *thisFromCtor(), frame),
     debugMode_(cx->compartment->debugMode()),
-#if defined JS_TRACER
-    addTraceHints(cx->traceJitEnabled),
-#else
-    addTraceHints(false),
-#endif
     inlining_(false),
     hasGlobalReallocation(false),
     oomInVector(false),
     overflowICSpace(false),
     gcNumber(cx->runtime->gcNumber),
     applyTricks(NoApplyTricks),
     pcLengths(NULL)
 {
-    /* :FIXME: bug 637856 disabling traceJit if inference is enabled */
-    if (cx->typeInferenceEnabled())
-        addTraceHints = false;
-
     /* Once a script starts getting really hot we will inline calls in it. */
     if (!debugMode() && cx->typeInferenceEnabled() && globalObj &&
         (outerScript->getUseCount() >= USES_BEFORE_INLINING ||
          cx->hasRunOption(JSOPTION_METHODJIT_ALWAYS))) {
         inlining_ = true;
     }
 }
 
@@ -755,20 +744,19 @@ mjit::Compiler::generatePrologue()
             stubcc.linkExitDirect(stackCheck, stubcc.masm.label());
             OOL_STUBCALL(stubs::HitStackQuota, REJOIN_NONE);
             stubcc.crossJump(stubcc.masm.jump(), masm.label());
         }
 
         /*
          * Set locals to undefined, as in initCallFrameLatePrologue.
          * Skip locals which aren't closed and are known to be defined before used,
-         * :FIXME: bug 604541: write undefined if we might be using the tracer, so it works.
          */
         for (uint32 i = 0; i < script->nfixed; i++) {
-            if (analysis->localHasUseBeforeDef(i) || addTraceHints) {
+            if (analysis->localHasUseBeforeDef(i)) {
                 Address local(JSFrameReg, sizeof(StackFrame) + i * sizeof(Value));
                 masm.storeValue(UndefinedValue(), local);
             }
         }
 
         types::TypeScriptNesting *nesting = script->nesting();
 
         /*
@@ -963,23 +951,21 @@ mjit::Compiler::finishThisUp(JITScript *
         }
     }
 
     /* Please keep in sync with JITScript::scriptDataSize! */
     size_t dataSize = sizeof(JITScript) +
                       sizeof(NativeMapEntry) * nNmapLive +
                       sizeof(InlineFrame) * inlineFrames.length() +
                       sizeof(CallSite) * callSites.length() +
-                      sizeof(JSObject *) * rootedObjects.length() +
 #if defined JS_MONOIC
                       sizeof(ic::GetGlobalNameIC) * getGlobalNames.length() +
                       sizeof(ic::SetGlobalNameIC) * setGlobalNames.length() +
                       sizeof(ic::CallICInfo) * callICs.length() +
                       sizeof(ic::EqualityICInfo) * equalityICs.length() +
-                      sizeof(ic::TraceICInfo) * traceICs.length() +
 #endif
 #if defined JS_POLYIC
                       sizeof(ic::PICInfo) * pics.length() +
                       sizeof(ic::GetElementIC) * getElemICs.length() +
                       sizeof(ic::SetElementIC) * setElemICs.length() +
 #endif
                       0;
 
@@ -1095,23 +1081,16 @@ mjit::Compiler::finishThisUp(JITScript *
          * Patch stores of the base call's return address for InvariantFailure
          * calls. InvariantFailure will patch its own return address to this
          * pointer before triggering recompilation.
          */
         if (from.loopPatch.hasPatch)
             stubCode.patch(from.loopPatch.codePatch, result + codeOffset);
     }
 
-    /* Build the list of objects rooted by the script. */
-    JSObject **jitRooted = (JSObject **)cursor;
-    jit->nRootedObjects = rootedObjects.length();
-    cursor += sizeof(JSObject *) * jit->nRootedObjects;
-    for (size_t i = 0; i < jit->nRootedObjects; i++)
-        jitRooted[i] = rootedObjects[i];
-
 #if defined JS_MONOIC
     JS_INIT_CLIST(&jit->callers);
 
     if (script->hasFunction && cx->typeInferenceEnabled()) {
         jit->argsCheckStub = stubCode.locationOf(argsCheckStub);
         jit->argsCheckFallthrough = stubCode.locationOf(argsCheckFallthrough);
         jit->argsCheckJump = stubCode.locationOf(argsCheckJump);
         jit->argsCheckPool = NULL;
@@ -1244,53 +1223,16 @@ mjit::Compiler::finishThisUp(JITScript *
         jitEqualityICs[i].tempReg = equalityICs[i].tempReg;
         jitEqualityICs[i].cond = equalityICs[i].cond;
         if (equalityICs[i].jumpToStub.isSet())
             jitEqualityICs[i].jumpToStub = fullCode.locationOf(equalityICs[i].jumpToStub.get());
         jitEqualityICs[i].fallThrough = fullCode.locationOf(equalityICs[i].fallThrough);
 
         stubCode.patch(equalityICs[i].addrLabel, &jitEqualityICs[i]);
     }
-
-    ic::TraceICInfo *jitTraceICs = (ic::TraceICInfo *)cursor;
-    jit->nTraceICs = traceICs.length();
-    cursor += sizeof(ic::TraceICInfo) * jit->nTraceICs;
-    for (size_t i = 0; i < jit->nTraceICs; i++) {
-        jitTraceICs[i].initialized = traceICs[i].initialized;
-        if (!traceICs[i].initialized)
-            continue;
-
-        if (traceICs[i].fastTrampoline) {
-            jitTraceICs[i].fastTarget = stubCode.locationOf(traceICs[i].trampolineStart);
-        } else {
-            uint32 offs = uint32(traceICs[i].jumpTarget - script->code);
-            JS_ASSERT(jumpMap[offs].isSet());
-            jitTraceICs[i].fastTarget = fullCode.locationOf(jumpMap[offs]);
-        }
-        jitTraceICs[i].slowTarget = stubCode.locationOf(traceICs[i].trampolineStart);
-
-        jitTraceICs[i].traceHint = fullCode.locationOf(traceICs[i].traceHint);
-        jitTraceICs[i].stubEntry = stubCode.locationOf(traceICs[i].stubEntry);
-        jitTraceICs[i].traceData = NULL;
-#ifdef DEBUG
-        jitTraceICs[i].jumpTargetPC = traceICs[i].jumpTarget;
-#endif
-
-        jitTraceICs[i].hasSlowTraceHint = traceICs[i].slowTraceHint.isSet();
-        if (traceICs[i].slowTraceHint.isSet())
-            jitTraceICs[i].slowTraceHint = stubCode.locationOf(traceICs[i].slowTraceHint.get());
-#ifdef JS_TRACER
-        uint32 hotloop = GetHotloop(cx);
-        uint32 prevCount = cx->compartment->backEdgeCount(traceICs[i].jumpTarget);
-        jitTraceICs[i].loopCounterStart = hotloop;
-        jitTraceICs[i].loopCounter = hotloop < prevCount ? 1 : hotloop - prevCount;
-#endif
-
-        stubCode.patch(traceICs[i].addrLabel, &jitTraceICs[i]);
-    }
 #endif /* JS_MONOIC */
 
     for (size_t i = 0; i < callPatches.length(); i++) {
         CallPatchInfo &patch = callPatches[i];
 
         CodeLocationLabel joinPoint = patch.joinSlow
             ? stubCode.locationOf(patch.joinPoint)
             : fullCode.locationOf(patch.joinPoint);
@@ -4710,22 +4652,16 @@ mjit::Compiler::jsop_callprop_str(JSAtom
         return true;
     }
 
     /* Bake in String.prototype. This is safe because of compileAndGo. */
     JSObject *obj = globalObj->getOrCreateStringPrototype(cx);
     if (!obj)
         return false;
 
-    /*
-     * Root the proto, since JS_ClearScope might overwrite the global object's
-     * copy.
-     */
-    rootedObjects.append(obj);
-
     /* Force into a register because getprop won't expect a constant. */
     RegisterID reg = frame.allocReg();
 
     masm.move(ImmPtr(obj), reg);
     frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
 
     /* Get the property. */
     if (!jsop_getprop(atom, knownPushedType(0)))
@@ -6833,160 +6769,60 @@ mjit::Compiler::jumpAndTrace(Jump j, jsb
             alloc = cx->typeLifoAlloc().new_<RegisterAllocation>(false);
             if (!alloc)
                 return false;
         }
         lvtarget = alloc;
         consistent = frame.consistentRegisters(target);
     }
 
-    if (!addTraceHints || target >= PC ||
-        (JSOp(*target) != JSOP_TRACE && JSOp(*target) != JSOP_NOTRACE)
-#ifdef JS_MONOIC
-        || GET_UINT16(target) == BAD_TRACEIC_INDEX
-#endif
-        )
-    {
-        if (!lvtarget || lvtarget->synced()) {
-            JS_ASSERT(consistent);
-            if (!jumpInScript(j, target))
-                return false;
-            if (slow && !stubcc.jumpInScript(*slow, target))
-                return false;
-        } else {
-            if (consistent) {
-                if (!jumpInScript(j, target))
-                    return false;
-            } else {
-                /*
-                 * Make a trampoline to issue remaining loads for the register
-                 * state at target.
-                 */
-                Label start = stubcc.masm.label();
-                stubcc.linkExitDirect(j, start);
-                frame.prepareForJump(target, stubcc.masm, false);
-                if (!stubcc.jumpInScript(stubcc.masm.jump(), target))
-                    return false;
-                if (trampoline)
-                    *trampoline = true;
-                if (pcLengths) {
-                    /*
-                     * This is OOL code but will usually be executed, so track
-                     * it in the CODE_LENGTH for the opcode.
-                     */
-                    uint32 offset = ssa.frameLength(a->inlineIndex) + PC - script->code;
-                    size_t length = stubcc.masm.size() - stubcc.masm.distanceOf(start);
-                    pcLengths[offset].codeLength += length;
-                }
-            }
-
-            if (slow) {
-                slow->linkTo(stubcc.masm.label(), &stubcc.masm);
-                frame.prepareForJump(target, stubcc.masm, true);
-                if (!stubcc.jumpInScript(stubcc.masm.jump(), target))
-                    return false;
-            }
-        }
-
-        if (target < PC)
-            return finishLoop(target);
-        return true;
-    }
-
-    /* The trampoline should not be specified if we need to generate a trace IC. */
-    JS_ASSERT(!trampoline);
-
-#ifndef JS_TRACER
-    JS_NOT_REACHED("Bad addTraceHints");
-    return false;
-#else
-
-# if JS_MONOIC
-    TraceGenInfo ic;
-
-    ic.initialized = true;
-    ic.stubEntry = stubcc.masm.label();
-    ic.traceHint = j;
-    if (slow)
-        ic.slowTraceHint = *slow;
-
-    uint16 index = GET_UINT16(target);
-    if (traceICs.length() <= index)
-        if (!traceICs.resize(index+1))
+    if (!lvtarget || lvtarget->synced()) {
+        JS_ASSERT(consistent);
+        if (!jumpInScript(j, target))
             return false;
-# endif
-
-    Label traceStart = stubcc.masm.label();
-
-    stubcc.linkExitDirect(j, traceStart);
-    if (slow)
-        slow->linkTo(traceStart, &stubcc.masm);
-
-# if JS_MONOIC
-    ic.addrLabel = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
-
-    Jump nonzero = stubcc.masm.branchSub32(Assembler::NonZero, Imm32(1),
-                                           Address(Registers::ArgReg1,
-                                                   offsetof(TraceICInfo, loopCounter)));
-# endif
-
-    /* Save and restore compiler-tracked PC, so cx->regs is right in InvokeTracer. */
-    {
-        jsbytecode* pc = PC;
-        PC = target;
-
-        OOL_STUBCALL(stubs::InvokeTracer, REJOIN_NONE);
-
-        PC = pc;
-    }
-
-    Jump no = stubcc.masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
-                                        Registers::ReturnReg);
-    if (!cx->typeInferenceEnabled())
-        stubcc.masm.loadPtr(FrameAddress(VMFrame::offsetOfFp), JSFrameReg);
-    stubcc.masm.jump(Registers::ReturnReg);
-    no.linkTo(stubcc.masm.label(), &stubcc.masm);
-
-#ifdef JS_MONOIC
-    nonzero.linkTo(stubcc.masm.label(), &stubcc.masm);
-
-    ic.jumpTarget = target;
-    ic.fastTrampoline = !consistent;
-    ic.trampolineStart = stubcc.masm.label();
-
-    traceICs[index] = ic;
-#endif
-
-    /*
-     * Jump past the tracer call if the trace has been blacklisted. We still make
-     * a trace IC in such cases, in case it is un-blacklisted later.
-     */
-    if (JSOp(*target) == JSOP_NOTRACE) {
+        if (slow && !stubcc.jumpInScript(*slow, target))
+            return false;
+    } else {
         if (consistent) {
             if (!jumpInScript(j, target))
                 return false;
         } else {
-            stubcc.linkExitDirect(j, stubcc.masm.label());
+            /*
+             * Make a trampoline to issue remaining loads for the register
+             * state at target.
+             */
+            Label start = stubcc.masm.label();
+            stubcc.linkExitDirect(j, start);
+            frame.prepareForJump(target, stubcc.masm, false);
+            if (!stubcc.jumpInScript(stubcc.masm.jump(), target))
+                return false;
+            if (trampoline)
+                *trampoline = true;
+            if (pcLengths) {
+                /*
+                 * This is OOL code but will usually be executed, so track
+                 * it in the CODE_LENGTH for the opcode.
+                 */
+                uint32 offset = ssa.frameLength(a->inlineIndex) + PC - script->code;
+                size_t length = stubcc.masm.size() - stubcc.masm.distanceOf(start);
+                pcLengths[offset].codeLength += length;
+            }
         }
-        if (slow)
+
+        if (slow) {
             slow->linkTo(stubcc.masm.label(), &stubcc.masm);
-    }
-
-    /*
-     * Reload any registers needed at the head of the loop. Note that we didn't
-     * need to do syncing before calling InvokeTracer, as state is always synced
-     * on backwards jumps.
-     */
-    frame.prepareForJump(target, stubcc.masm, true);
-
-    if (!stubcc.jumpInScript(stubcc.masm.jump(), target))
-        return false;
-#endif
-
-    return finishLoop(target);
+            frame.prepareForJump(target, stubcc.masm, true);
+            if (!stubcc.jumpInScript(stubcc.masm.jump(), target))
+                return false;
+        }
+    }
+
+    if (target < PC)
+        return finishLoop(target);
+    return true;
 }
 
 void
 mjit::Compiler::enterBlock(JSObject *obj)
 {
     /* For now, don't bother doing anything for this opcode. */
     frame.syncAndForgetEverything();
     masm.move(ImmPtr(obj), Registers::ArgReg1);
--- a/js/src/methodjit/Compiler.h
+++ b/js/src/methodjit/Compiler.h
@@ -120,29 +120,16 @@ class Compiler : public BaseCompiler
         Label fallThrough;
         jsbytecode *jumpTarget;
         bool trampoline;
         Label trampolineStart;
         ValueRemat lvr, rvr;
         Assembler::Condition cond;
         JSC::MacroAssembler::RegisterID tempReg;
     };
-    
-    struct TraceGenInfo {
-        bool initialized;
-        Label stubEntry;
-        DataLabelPtr addrLabel;
-        jsbytecode *jumpTarget;
-        bool fastTrampoline;
-        Label trampolineStart;
-        Jump traceHint;
-        MaybeJump slowTraceHint;
-
-        TraceGenInfo() : initialized(false) {}
-    };
 
     /* InlineFrameAssembler wants to see this. */
   public:
     struct CallGenInfo {
         /*
          * These members map to members in CallICInfo. See that structure for
          * more comments.
          */
@@ -444,43 +431,40 @@ private:
 
     js::Vector<ActiveFrame*, 4, CompilerAllocPolicy> inlineFrames;
     js::Vector<BranchPatch, 64, CompilerAllocPolicy> branchPatches;
 #if defined JS_MONOIC
     js::Vector<GetGlobalNameICInfo, 16, CompilerAllocPolicy> getGlobalNames;
     js::Vector<SetGlobalNameICInfo, 16, CompilerAllocPolicy> setGlobalNames;
     js::Vector<CallGenInfo, 64, CompilerAllocPolicy> callICs;
     js::Vector<EqualityGenInfo, 64, CompilerAllocPolicy> equalityICs;
-    js::Vector<TraceGenInfo, 64, CompilerAllocPolicy> traceICs;
 #endif
 #if defined JS_POLYIC
     js::Vector<PICGenInfo, 16, CompilerAllocPolicy> pics;
     js::Vector<GetElementICInfo, 16, CompilerAllocPolicy> getElemICs;
     js::Vector<SetElementICInfo, 16, CompilerAllocPolicy> setElemICs;
 #endif
     js::Vector<CallPatchInfo, 64, CompilerAllocPolicy> callPatches;
     js::Vector<InternalCallSite, 64, CompilerAllocPolicy> callSites;
     js::Vector<DoublePatch, 16, CompilerAllocPolicy> doubleList;
     js::Vector<uint32> fixedIntToDoubleEntries;
     js::Vector<uint32> fixedDoubleToAnyEntries;
     js::Vector<JumpTable, 16> jumpTables;
     js::Vector<uint32, 16> jumpTableOffsets;
     js::Vector<LoopEntry, 16> loopEntries;
-    js::Vector<JSObject *, 0, CompilerAllocPolicy> rootedObjects;
     StubCompiler stubcc;
     Label invokeLabel;
     Label arityLabel;
     Label argsCheckLabel;
 #ifdef JS_MONOIC
     Label argsCheckStub;
     Label argsCheckFallthrough;
     Jump argsCheckJump;
 #endif
     bool debugMode_;
-    bool addTraceHints;
     bool inlining_;
     bool hasGlobalReallocation;
     bool oomInVector;       // True if we have OOM'd appending to a vector. 
     bool overflowICSpace;   // True if we added a constant pool in a reserved space.
     uint32 gcNumber;
     enum { NoApplyTricks, LazyArgsObj } applyTricks;
     PCLengthEntry *pcLengths;
 
--- a/js/src/methodjit/FastArithmetic.cpp
+++ b/js/src/methodjit/FastArithmetic.cpp
@@ -1157,17 +1157,17 @@ mjit::Compiler::jsop_equality_int_string
 
         ic.cond = cond;
         ic.tempReg = tempReg;
         ic.lvr = lvr;
         ic.rvr = rvr;
         ic.stubEntry = stubEntry;
         ic.stub = stub;
 
-        bool useIC = (!addTraceHints || target >= PC) && !a->parent;
+        bool useIC = !a->parent;
 
         /* Call the IC stub, which may generate a fast path. */
         if (useIC) {
             /* Adjust for the two values just pushed. */
             ic.addrLabel = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
             ic.stubCall = OOL_STUBCALL_LOCAL_SLOTS(ic::Equality, REJOIN_BRANCH,
                                                    frame.totalDepth() + 2);
             needStub = false;
--- a/js/src/methodjit/FrameState.cpp
+++ b/js/src/methodjit/FrameState.cpp
@@ -1320,21 +1320,17 @@ FrameState::sync(Assembler &masm, Uses u
 
     /*
      * Keep track of free registers using a bitmask. If we have to drop into
      * syncFancy(), then this mask will help avoid eviction.
      */
     Registers avail(freeRegs.freeMask & Registers::AvailRegs);
     Registers temp(Registers::TempAnyRegs);
 
-    FrameEntry *bottom = (cx->typeInferenceEnabled() || cx->compartment->debugMode())
-        ? entries
-        : a->sp - uses.nuses;
-
-    for (FrameEntry *fe = a->sp - 1; fe >= bottom; fe--) {
+    for (FrameEntry *fe = a->sp - 1; fe >= entries; fe--) {
         if (!fe->isTracked())
             continue;
 
         if (fe->isType(JSVAL_TYPE_DOUBLE)) {
             /* Copies of in-memory doubles can be synced without spilling. */
             if (fe->isCopy() || !fe->data.inFPRegister())
                 ensureFeSynced(fe, masm);
             continue;
@@ -1374,17 +1370,17 @@ FrameState::sync(Assembler &masm, Uses u
 
                 masm.storeValue(syncReg, addressOf(fe));
                 continue;
             }
 #elif defined JS_NUNBOX32
             /* Fall back to a slower sync algorithm if load required. */
             if ((!fe->type.synced() && backing->type.inMemory()) ||
                 (!fe->data.synced() && backing->data.inMemory())) {
-                syncFancy(masm, avail, fe, bottom);
+                syncFancy(masm, avail, fe, entries);
                 return;
             }
 #endif
         }
 
         bool copy = fe->isCopy();
 
         /* If a part still needs syncing, it is either a copy or constant. */
@@ -1455,21 +1451,17 @@ FrameState::syncAndKill(Registers kill, 
             JS_ASSERT(fe->type.reg() == reg.reg());
             syncType(fe);
         }
 #endif
     }
 
     uint32 maxvisits = tracker.nentries;
 
-    FrameEntry *bottom = (cx->typeInferenceEnabled() || cx->compartment->debugMode())
-        ? entries
-        : a->sp - uses.nuses;
-
-    for (FrameEntry *fe = a->sp - 1; fe >= bottom && maxvisits; fe--) {
+    for (FrameEntry *fe = a->sp - 1; fe >= entries && maxvisits; fe--) {
         if (!fe->isTracked())
             continue;
 
         maxvisits--;
 
         if (deadEntry(fe, ignore.nuses))
             continue;
 
--- a/js/src/methodjit/InvokeHelpers.cpp
+++ b/js/src/methodjit/InvokeHelpers.cpp
@@ -528,31 +528,17 @@ js_InternalThrow(VMFrame &f)
     JSContext *cx = f.cx;
 
     ExpandInlineFrames(cx->compartment);
 
     // The current frame may have an associated orphaned native, if the native
     // or SplatApplyArgs threw an exception.
     RemoveOrphanedNative(cx, f.fp());
 
-    // It's possible that from within RunTracer(), Interpret() returned with
-    // an error and finished the frame (i.e., called ScriptEpilogue), but has
-    // not yet performed an inline return.
-    //
-    // In this case, RunTracer() has no choice but to propagate the error
-    // up to the method JIT, and thus to this function. But ScriptEpilogue()
-    // has already been called. Detect this, and avoid double-finishing the
-    // frame. See HandleErrorInExcessFrame() and bug 624100.
-    if (f.fp()->finishedInInterpreter()) {
-        // If it's the last frame, just propagate the failure up again.
-        if (f.fp() == f.entryfp)
-            return NULL;
-
-        InlineReturn(f);
-    }
+    JS_ASSERT(!f.fp()->finishedInInterpreter());
 
     // Make sure sp is up to date.
     JS_ASSERT(&cx->regs() == &f.regs);
 
     jsbytecode *pc = NULL;
     for (;;) {
         if (cx->isExceptionPending()) {
             // Call the throw hook if necessary
@@ -611,60 +597,56 @@ js_InternalThrow(VMFrame &f)
     JS_ASSERT(f.regs.sp == cx->regs().sp);
 
     if (!pc)
         return NULL;
 
     StackFrame *fp = cx->fp();
     JSScript *script = fp->script();
 
-    if (cx->typeInferenceEnabled() || !fp->jit()) {
-        /*
-         * Fall back to EnterMethodJIT and finish the frame in the interpreter.
-         * With type inference enabled, we may wipe out all JIT code on the
-         * stack without patching ncode values to jump to the interpreter, and
-         * thus can only enter JIT code via EnterMethodJIT (which overwrites
-         * its entry frame's ncode). See ClearAllFrames.
-         */
-        cx->compartment->jaegerCompartment()->setLastUnfinished(Jaeger_Unfinished);
-
-        if (!script->ensureRanAnalysis(cx)) {
-            js_ReportOutOfMemory(cx);
-            return NULL;
-        }
-
-        analyze::AutoEnterAnalysis enter(cx);
-
-        cx->regs().pc = pc;
-        cx->regs().sp = fp->base() + script->analysis()->getCode(pc).stackDepth;
+    /*
+     * Fall back to EnterMethodJIT and finish the frame in the interpreter.
+     * With type inference enabled, we may wipe out all JIT code on the
+     * stack without patching ncode values to jump to the interpreter, and
+     * thus can only enter JIT code via EnterMethodJIT (which overwrites
+     * its entry frame's ncode). See ClearAllFrames.
+     */
+    cx->compartment->jaegerCompartment()->setLastUnfinished(Jaeger_Unfinished);
 
-        /*
-         * Interpret the ENTERBLOCK and EXCEPTION opcodes, so that we don't go
-         * back into the interpreter with a pending exception. This will cause
-         * it to immediately rethrow.
-         */
-        if (cx->isExceptionPending()) {
-            JS_ASSERT(js_GetOpcode(cx, script, pc) == JSOP_ENTERBLOCK);
-            JSObject *obj = script->getObject(GET_SLOTNO(pc));
-            Value *vp = cx->regs().sp + OBJ_BLOCK_COUNT(cx, obj);
-            SetValueRangeToUndefined(cx->regs().sp, vp);
-            cx->regs().sp = vp;
-            JS_ASSERT(js_GetOpcode(cx, script, pc + JSOP_ENTERBLOCK_LENGTH) == JSOP_EXCEPTION);
-            cx->regs().sp[0] = cx->getPendingException();
-            cx->clearPendingException();
-            cx->regs().sp++;
-            cx->regs().pc = pc + JSOP_ENTERBLOCK_LENGTH + JSOP_EXCEPTION_LENGTH;
-        }
-
-        *f.oldregs = f.regs;
-
+    if (!script->ensureRanAnalysis(cx)) {
+        js_ReportOutOfMemory(cx);
         return NULL;
     }
 
-    return script->nativeCodeForPC(fp->isConstructing(), pc);
+    analyze::AutoEnterAnalysis enter(cx);
+
+    cx->regs().pc = pc;
+    cx->regs().sp = fp->base() + script->analysis()->getCode(pc).stackDepth;
+
+    /*
+     * Interpret the ENTERBLOCK and EXCEPTION opcodes, so that we don't go
+     * back into the interpreter with a pending exception. This will cause
+     * it to immediately rethrow.
+     */
+    if (cx->isExceptionPending()) {
+        JS_ASSERT(js_GetOpcode(cx, script, pc) == JSOP_ENTERBLOCK);
+        JSObject *obj = script->getObject(GET_SLOTNO(pc));
+        Value *vp = cx->regs().sp + OBJ_BLOCK_COUNT(cx, obj);
+        SetValueRangeToUndefined(cx->regs().sp, vp);
+        cx->regs().sp = vp;
+        JS_ASSERT(js_GetOpcode(cx, script, pc + JSOP_ENTERBLOCK_LENGTH) == JSOP_EXCEPTION);
+        cx->regs().sp[0] = cx->getPendingException();
+        cx->clearPendingException();
+        cx->regs().sp++;
+        cx->regs().pc = pc + JSOP_ENTERBLOCK_LENGTH + JSOP_EXCEPTION_LENGTH;
+    }
+
+    *f.oldregs = f.regs;
+
+    return NULL;
 }
 
 void JS_FASTCALL
 stubs::CreateThis(VMFrame &f, JSObject *proto)
 {
     JSContext *cx = f.cx;
     StackFrame *fp = f.fp();
     JSObject *callee = &fp->callee();
@@ -696,491 +678,18 @@ stubs::ScriptProbeOnlyPrologue(VMFrame &
 }
 
 void JS_FASTCALL
 stubs::ScriptProbeOnlyEpilogue(VMFrame &f)
 {
     Probes::exitJSFun(f.cx, f.fp()->fun(), f.fp()->script());
 }
 
-#ifdef JS_TRACER
-
-/*
- * Called when an error is in progress and the topmost frame could not handle
- * it. This will unwind to a given frame, or find and align to an exception
- * handler in the process.
- */
-static inline bool
-HandleErrorInExcessFrame(VMFrame &f, StackFrame *stopFp, bool searchedTopmostFrame = true)
-{
-    JSContext *cx = f.cx;
-
-    /*
-     * Callers of this called either Interpret() or JaegerShot(), which would
-     * have searched for exception handlers already. If we see stopFp, just
-     * return false. Otherwise, pop the frame, since it's guaranteed useless.
-     *
-     * Note that this also guarantees ScriptEpilogue() has been called.
-     */
-    StackFrame *fp = cx->fp();
-    if (searchedTopmostFrame) {
-        /*
-         * This is a special case meaning that fp->finishedInInterpreter() is
-         * true. If so, and fp == stopFp, our only choice is to propagate this
-         * error up, back to the method JIT, and then to js_InternalThrow,
-         * where this becomes a special case. See the comment there and bug
-         * 624100.
-         */
-        if (fp == stopFp)
-            return false;
-
-        /*
-         * Otherwise, the protocol here (like Invoke) is to assume that the
-         * execution mode finished the frame, and to just pop it.
-         */
-        InlineReturn(f);
-    }
-
-    /* Remove the bottom frame. */
-    bool returnOK = false;
-    for (;;) {
-        fp = cx->fp();
-
-        /* Clear imacros. */
-        if (fp->hasImacropc()) {
-            cx->regs().pc = fp->imacropc();
-            fp->clearImacropc();
-        }
-        JS_ASSERT(!fp->hasImacropc());
-
-        /* If there's an exception and a handler, set the pc and leave. */
-        if (cx->isExceptionPending()) {
-            jsbytecode *pc = FindExceptionHandler(cx);
-            if (pc) {
-                cx->regs().pc = pc;
-                returnOK = true;
-                break;
-            }
-        }
-
-        /* Don't unwind if this was the entry frame. */
-        if (fp == stopFp)
-            break;
-
-        /* Unwind and return. */
-        returnOK &= UnwindScope(cx, 0, returnOK || cx->isExceptionPending());
-        returnOK = ScriptEpilogue(cx, fp, returnOK);
-        InlineReturn(f);
-    }
-
-    JS_ASSERT(&f.regs == &cx->regs());
-    JS_ASSERT_IF(!returnOK, cx->fp() == stopFp);
-
-    return returnOK;
-}
-
-/* Returns whether the current PC has method JIT'd code. */
-static inline void *
-AtSafePoint(JSContext *cx)
-{
-    StackFrame *fp = cx->fp();
-    if (fp->hasImacropc())
-        return NULL;
-
-    JSScript *script = fp->script();
-    return script->maybeNativeCodeForPC(fp->isConstructing(), cx->regs().pc);
-}
-
-/*
- * Interprets until either a safe point is reached that has method JIT'd
- * code, or the current frame tries to return.
- */
-static inline JSBool
-PartialInterpret(VMFrame &f)
-{
-    JSContext *cx = f.cx;
-    StackFrame *fp = cx->fp();
-
-#ifdef DEBUG
-    JSScript *script = fp->script();
-    JS_ASSERT(!fp->finishedInInterpreter());
-    JS_ASSERT(fp->hasImacropc() ||
-              !script->maybeNativeCodeForPC(fp->isConstructing(), cx->regs().pc));
-#endif
-
-    JSBool ok = JS_TRUE;
-    ok = Interpret(cx, fp, JSINTERP_SAFEPOINT);
-
-    return ok;
-}
-
 JS_STATIC_ASSERT(JSOP_NOP == 0);
 
-/*
- * Returns whether the current PC would return, or if the frame has already
- * been completed. This distinction avoids re-entering the interpreter or JIT
- * to complete a JSOP_RETURN. Instead, that edge case is handled in
- * HandleFinishedFrame. We could consider reducing complexity, and making this
- * function return only "finishedInInterpreter", and always using the full VM
- * machinery to fully finish frames.
- */
-static inline bool
-FrameIsFinished(JSContext *cx)
-{
-    JSOp op = JSOp(*cx->regs().pc);
-    return (op == JSOP_RETURN ||
-            op == JSOP_RETRVAL ||
-            op == JSOP_STOP)
-        ? true
-        : cx->fp()->finishedInInterpreter();
-}
-
-/*
- * Given a frame that is about to return, make sure its return value and
- * activation objects are fixed up. Then, pop the frame and advance the
- * current PC. Note that while we could enter the JIT at this point, the
- * logic would still be necessary for the interpreter, so it's easier
- * (and faster) to finish frames in C++ even if at a safe point here.
- */
-static bool
-HandleFinishedFrame(VMFrame &f, StackFrame *entryFrame)
-{
-    JSContext *cx = f.cx;
-
-    JS_ASSERT(FrameIsFinished(cx));
-
-    /*
-     * This is the most difficult and complicated piece of the tracer
-     * integration, and historically has been very buggy. The problem is that
-     * although this frame has to be popped (see RemoveExcessFrames), it may
-     * be at a JSOP_RETURN opcode, and it might not have ever been executed.
-     * That is, fp->rval may not be set to the top of the stack, and if it
-     * has, the stack has already been decremented. Note that fp->rval is not
-     * the only problem: the epilogue may never have been executed.
-     *
-     * Here are the edge cases and whether the frame has been exited cleanly:
-     *  1. No: A trace exited directly before a RETURN op, and the
-     *         interpreter never ran.
-     *  2. Yes: The interpreter exited cleanly.
-     *  3. No: The interpreter exited on a safe point. LEAVE_ON_SAFE_POINT
-     *         is not used in between JSOP_RETURN and advancing the PC,
-     *         therefore, it cannot have been run if at a safe point.
-     *  4. No: Somewhere in the RunTracer call tree, we removed a frame,
-     *         and we returned to a JSOP_RETURN opcode. Note carefully
-     *         that in this situation, FrameIsFinished() returns true!
-     *  5. Yes: The function exited in the method JIT, during
-     *         FinishExcessFrames() However, in this case, we'll never enter
-     *         HandleFinishedFrame(): we always immediately pop JIT'd frames.
-     *
-     * Since the only scenario where this fixup is NOT needed is a normal exit
-     * from the interpreter, we can cleanly check for this scenario by checking
-     * a bit it sets in the frame.
-     */
-    bool returnOK = true;
-    if (!cx->fp()->finishedInInterpreter()) {
-        if (JSOp(*cx->regs().pc) == JSOP_RETURN)
-            cx->fp()->setReturnValue(f.regs.sp[-1]);
-
-        returnOK = ScriptEpilogue(cx, cx->fp(), true);
-    }
-
-    if (cx->fp() != entryFrame) {
-        InlineReturn(f);
-    }
-
-    return returnOK;
-}
-
-/*
- * Given a frame newer than the entry frame, try to finish it. If it's at a
- * return position, pop the frame. If it's at a safe point, execute it in
- * Jaeger code. Otherwise, try to interpret until a safe point.
- *
- * While this function is guaranteed to make progress, it may not actually
- * finish or pop the current frame. It can either:
- *   1) Finalize a finished frame, or
- *   2) Finish and finalize the frame in the Method JIT, or
- *   3) Interpret, which can:
- *     a) Propagate an error, or
- *     b) Finish the frame, but not finalize it, or
- *     c) Abruptly leave at any point in the frame, or in a newer frame
- *        pushed by a call, that has method JIT'd code.
- */
-static bool
-EvaluateExcessFrame(VMFrame &f, StackFrame *entryFrame)
-{
-    JSContext *cx = f.cx;
-    StackFrame *fp = cx->fp();
-
-    /*
-     * A "finished" frame is when the interpreter rested on a STOP,
-     * RETURN, RETRVAL, etc. We check for finished frames BEFORE looking
-     * for a safe point. If the frame was finished, we could have already
-     * called ScriptEpilogue(), and entering the JIT could call it twice.
-     */
-    if (!fp->hasImacropc() && FrameIsFinished(cx))
-        return HandleFinishedFrame(f, entryFrame);
-
-    if (void *ncode = AtSafePoint(cx)) {
-        if (!JaegerShotAtSafePoint(cx, ncode, false))
-            return false;
-        InlineReturn(f);
-        return true;
-    }
-
-    return PartialInterpret(f);
-}
-
-/*
- * Evaluate frames newer than the entry frame until all are gone. This will
- * always leave f.regs.fp == entryFrame.
- */
-static bool
-FinishExcessFrames(VMFrame &f, StackFrame *entryFrame)
-{
-    JSContext *cx = f.cx;
-
-    while (cx->fp() != entryFrame || entryFrame->hasImacropc()) {
-        if (!EvaluateExcessFrame(f, entryFrame)) {
-            if (!HandleErrorInExcessFrame(f, entryFrame))
-                return false;
-        }
-    }
-
-    return true;
-}
-
-#if defined JS_MONOIC
-static void
-UpdateTraceHintSingle(Repatcher &repatcher, JSC::CodeLocationJump jump, JSC::CodeLocationLabel target)
-{
-    /*
-     * Hack: The value that will be patched is before the executable address,
-     * so to get protection right, just unprotect the general region around
-     * the jump.
-     */
-    repatcher.relink(jump, target);
-
-    JaegerSpew(JSpew_PICs, "relinking trace hint %p to %p\n",
-               jump.executableAddress(), target.executableAddress());
-}
-
-static void
-DisableTraceHint(JITScript *jit, ic::TraceICInfo &ic)
-{
-    Repatcher repatcher(jit);
-    UpdateTraceHintSingle(repatcher, ic.traceHint, ic.fastTarget);
-
-    if (ic.hasSlowTraceHint)
-        UpdateTraceHintSingle(repatcher, ic.slowTraceHint, ic.slowTarget);
-}
-
-static void
-ResetTraceHintAt(JSScript *script, js::mjit::JITScript *jit,
-                 jsbytecode *pc, uint16_t index, bool full)
-{
-    if (index >= jit->nTraceICs)
-        return;
-    ic::TraceICInfo &ic = jit->traceICs()[index];
-    if (!ic.initialized)
-        return;
-    
-    JS_ASSERT(ic.jumpTargetPC == pc);
-
-    JaegerSpew(JSpew_PICs, "Enabling trace IC %u in script %p\n", index,
-               static_cast<void*>(script));
-
-    Repatcher repatcher(jit);
-
-    UpdateTraceHintSingle(repatcher, ic.traceHint, ic.stubEntry);
-
-    if (ic.hasSlowTraceHint)
-        UpdateTraceHintSingle(repatcher, ic.slowTraceHint, ic.stubEntry);
-
-    if (full) {
-        ic.traceData = NULL;
-        ic.loopCounterStart = 1;
-        ic.loopCounter = ic.loopCounterStart;
-    }
-}
-#endif
-
-void
-js::mjit::ResetTraceHint(JSScript *script, jsbytecode *pc, uint16_t index, bool full)
-{
-#if JS_MONOIC
-    if (script->jitNormal)
-        ResetTraceHintAt(script, script->jitNormal, pc, index, full);
-
-    if (script->jitCtor)
-        ResetTraceHintAt(script, script->jitCtor, pc, index, full);
-#endif
-}
-
-#if JS_MONOIC
-void *
-RunTracer(VMFrame &f, ic::TraceICInfo &ic)
-#else
-void *
-RunTracer(VMFrame &f)
-#endif
-{
-    JSContext *cx = f.cx;
-    StackFrame *entryFrame = f.fp();
-    TracePointAction tpa;
-
-    /* :TODO: nuke PIC? */
-    if (!cx->traceJitEnabled)
-        return NULL;
-
-    /*
-     * Force initialization of the entry frame's scope chain and return value,
-     * if necessary.  The tracer can query the scope chain without needing to
-     * check the HAS_SCOPECHAIN flag, and the frame is guaranteed to have the
-     * correct return value stored if we trace/interpret through to the end
-     * of the frame.
-     */
-    entryFrame->scopeChain();
-    entryFrame->returnValue();
-
-    bool blacklist;
-    void **traceData;
-    uintN *traceEpoch;
-    uint32 *loopCounter;
-    uint32 hits;
-#if JS_MONOIC
-    traceData = &ic.traceData;
-    traceEpoch = &ic.traceEpoch;
-    loopCounter = &ic.loopCounter;
-    *loopCounter = 1;
-    hits = ic.loopCounterStart;
-#else
-    traceData = NULL;
-    traceEpoch = NULL;
-    loopCounter = NULL;
-    hits = 1;
-#endif
-
-    {
-        /*
-         * While the tracer is running, redirect the regs to a local variable here.
-         * If the tracer exits during an inlined frame, it will synthesize those
-         * frames, point f.regs.fp at them and then enter the interpreter. If the
-         * interpreter pops the frames it will not be reflected here as a local
-         * set of regs is used by the interpreter, and f->regs end up pointing at
-         * garbage, confusing the recompiler.
-         */
-        FrameRegs regs = f.regs;
-        PreserveRegsGuard regsGuard(cx, regs);
-
-        tpa = MonitorTracePoint(f.cx, &blacklist, traceData, traceEpoch,
-                                loopCounter, hits);
-        JS_ASSERT(!TRACE_RECORDER(cx));
-    }
-
-#if JS_MONOIC
-    ic.loopCounterStart = *loopCounter;
-    if (blacklist)
-        DisableTraceHint(entryFrame->jit(), ic);
-#endif
-
-    // Even though ExecuteTree() bypasses the interpreter, it should propagate
-    // error failures correctly.
-    JS_ASSERT_IF(cx->isExceptionPending(), tpa == TPA_Error);
-
-    JS_ASSERT(f.fp() == cx->fp());
-    switch (tpa) {
-      case TPA_Nothing:
-        return NULL;
-
-      case TPA_Error:
-        if (!HandleErrorInExcessFrame(f, entryFrame, f.fp()->finishedInInterpreter()))
-            THROWV(NULL);
-        JS_ASSERT(!cx->fp()->hasImacropc());
-        break;
-
-      case TPA_RanStuff:
-      case TPA_Recorded:
-        break;
-    }
-
-    /*
-     * The tracer could have dropped us off on any frame at any position.
-     * Well, it could not have removed frames (recursion is disabled).
-     *
-     * Frames after the entryFrame cannot be entered via JaegerShotAtSafePoint()
-     * unless each is at a safe point. We can JaegerShotAtSafePoint these
-     * frames individually, but we must unwind to the entryFrame.
-     *
-     * Note carefully that JaegerShotAtSafePoint can resume methods at
-     * arbitrary safe points whereas JaegerShot cannot.
-     *
-     * If we land on entryFrame without a safe point in sight, we'll end up
-     * at the RETURN op. This is an edge case with two paths:
-     *
-     * 1) The entryFrame is the last inline frame. If it fell on a RETURN,
-     *    move the return value down.
-     * 2) The entryFrame is NOT the last inline frame. Pop the frame.
-     *
-     * In both cases, we hijack the stub to return to the force-return
-     * trampoline. This trampoline simulates the frame-popping portion of
-     * emitReturn (except without the benefit of the FrameState) and will
-     * produce the necessary register state to return to the caller.
-     */
-
-  restart:
-    /* Step 1. Finish frames created after the entry frame. */
-    if (!FinishExcessFrames(f, entryFrame))
-        THROWV(NULL);
-
-    /* IMacros are guaranteed to have been removed by now. */
-    JS_ASSERT(f.fp() == entryFrame);
-    JS_ASSERT(!entryFrame->hasImacropc());
-
-    /* Step 2. If entryFrame is done, use a special path to return to EnterMethodJIT(). */
-    if (FrameIsFinished(cx)) {
-        if (!HandleFinishedFrame(f, entryFrame))
-            THROWV(NULL);
-        *f.returnAddressLocation() = cx->jaegerCompartment()->forceReturnFromFastCall();
-        return NULL;
-    }
-
-    /* Step 3. If entryFrame is at a safe point, just leave. */
-    if (void *ncode = AtSafePoint(cx))
-        return ncode;
-
-    /* Step 4. Do a partial interp, then restart the whole process. */
-    if (!PartialInterpret(f)) {
-        if (!HandleErrorInExcessFrame(f, entryFrame))
-            THROWV(NULL);
-    }
-
-    goto restart;
-}
-
-#endif /* JS_TRACER */
-
-#if defined JS_TRACER
-# if defined JS_MONOIC
-void *JS_FASTCALL
-stubs::InvokeTracer(VMFrame &f, ic::TraceICInfo *ic)
-{
-    return RunTracer(f, *ic);
-}
-
-# else
-
-void *JS_FASTCALL
-stubs::InvokeTracer(VMFrame &f)
-{
-    return RunTracer(f);
-}
-# endif /* JS_MONOIC */
-#endif /* JS_TRACER */
-
 /* :XXX: common out with identical copy in Compiler.cpp */
 #if defined(JS_METHODJIT_SPEW)
 static const char *OpcodeNames[] = {
 # define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) #name,
 # include "jsopcode.tbl"
 # undef OPDEF
 };
 #endif
--- a/js/src/methodjit/MethodJIT.cpp
+++ b/js/src/methodjit/MethodJIT.cpp
@@ -1086,26 +1086,20 @@ JITScript::inlineFrames() const
 }
 
 js::mjit::CallSite *
 JITScript::callSites() const
 {
     return (js::mjit::CallSite *)&inlineFrames()[nInlineFrames];
 }
 
-JSObject **
-JITScript::rootedObjects() const
-{
-    return (JSObject **)&callSites()[nCallSites];
-}
-
 char *
 JITScript::commonSectionLimit() const
 {
-    return (char *)&rootedObjects()[nRootedObjects];
+    return (char *)&callSites()[nCallSites];
 }
 
 #ifdef JS_MONOIC
 ic::GetGlobalNameIC *
 JITScript::getGlobalNames() const
 {
     return (ic::GetGlobalNameIC *) commonSectionLimit();
 }
@@ -1124,26 +1118,20 @@ JITScript::callICs() const
 }
 
 ic::EqualityICInfo *
 JITScript::equalityICs() const
 {
     return (ic::EqualityICInfo *)&callICs()[nCallICs];
 }
 
-ic::TraceICInfo *
-JITScript::traceICs() const
-{
-    return (ic::TraceICInfo *)&equalityICs()[nEqualityICs];
-}
-
 char *
 JITScript::monoICSectionsLimit() const
 {
-    return (char *)&traceICs()[nTraceICs];
+    return (char *)&equalityICs()[nEqualityICs];
 }
 #else   // JS_MONOIC
 char *
 JITScript::monoICSectionsLimit() const
 {
     return commonSectionLimit();
 }
 #endif  // JS_MONOIC
@@ -1181,27 +1169,16 @@ JITScript::polyICSectionsLimit() const
 #endif  // JS_POLYIC
 
 template <typename T>
 static inline void Destroy(T &t)
 {
     t.~T();
 }
 
-void
-mjit::JITScript::purgeNativeCallStubs()
-{
-    for (unsigned i = 0; i < nativeCallStubs.length(); i++) {
-        JSC::ExecutablePool *pool = nativeCallStubs[i].pool;
-        if (pool)
-            pool->release();
-    }
-    nativeCallStubs.clear();
-}
-
 mjit::JITScript::~JITScript()
 {
     code.release();
 
     if (pcLengths)
         Foreground::free_(pcLengths);
 
 #if defined JS_POLYIC
@@ -1222,17 +1199,21 @@ mjit::JITScript::~JITScript()
 
     for (JSC::ExecutablePool **pExecPool = execPools.begin();
          pExecPool != execPools.end();
          ++pExecPool)
     {
         (*pExecPool)->release();
     }
 
-    purgeNativeCallStubs();
+    for (unsigned i = 0; i < nativeCallStubs.length(); i++) {
+        JSC::ExecutablePool *pool = nativeCallStubs[i].pool;
+        if (pool)
+            pool->release();
+    }
 
     ic::CallICInfo *callICs_ = callICs();
     for (uint32 i = 0; i < nCallICs; i++) {
         callICs_[i].releasePools();
         if (callICs_[i].fastGuardedObject)
             callICs_[i].purgeGuardedObject();
     }
 
@@ -1267,23 +1248,21 @@ size_t
 mjit::JITScript::scriptDataSize(JSUsableSizeFun usf)
 {
     size_t usable = usf ? usf(this) : 0;
     return usable ? usable :
         sizeof(JITScript) +
         sizeof(NativeMapEntry) * nNmapPairs +
         sizeof(InlineFrame) * nInlineFrames +
         sizeof(CallSite) * nCallSites +
-        sizeof(JSObject *) * nRootedObjects +
 #if defined JS_MONOIC
         sizeof(ic::GetGlobalNameIC) * nGetGlobalNames +
         sizeof(ic::SetGlobalNameIC) * nSetGlobalNames +
         sizeof(ic::CallICInfo) * nCallICs +
         sizeof(ic::EqualityICInfo) * nEqualityICs +
-        sizeof(ic::TraceICInfo) * nTraceICs +
 #endif
 #if defined JS_POLYIC
         sizeof(ic::PICInfo) * nPICs +
         sizeof(ic::GetElementIC) * nGetElems +
         sizeof(ic::SetElementIC) * nSetElems +
 #endif
         0;
 }
@@ -1408,25 +1387,9 @@ JITScript::nativeToPC(void *returnAddres
 }
 
 jsbytecode *
 mjit::NativeToPC(JITScript *jit, void *ncode, mjit::CallSite **pinline)
 {
     return jit->nativeToPC(ncode, pinline);
 }
 
-void
-JITScript::trace(JSTracer *trc)
-{
-    /*
-     * MICs and PICs attached to the JITScript are weak references, and either
-     * entirely purged or selectively purged on each GC. We do, however, need
-     * to maintain references to any scripts whose code was inlined into this.
-     */
-    InlineFrame *inlineFrames_ = inlineFrames();
-    for (unsigned i = 0; i < nInlineFrames; i++)
-        MarkObject(trc, *inlineFrames_[i].fun, "jitscript_fun");
-
-    for (uint32 i = 0; i < nRootedObjects; ++i)
-        MarkObject(trc, *rootedObjects()[i], "mjit rooted object");
-}
-
 /* static */ const double mjit::Assembler::oneDouble = 1.0;
--- a/js/src/methodjit/MethodJIT.h
+++ b/js/src/methodjit/MethodJIT.h
@@ -495,17 +495,16 @@ namespace ic {
     struct PICInfo;
     struct GetElementIC;
     struct SetElementIC;
 # endif
 # if defined JS_MONOIC
     struct GetGlobalNameIC;
     struct SetGlobalNameIC;
     struct EqualityICInfo;
-    struct TraceICInfo;
     struct CallICInfo;
 # endif
 }
 }
 
 typedef void (JS_FASTCALL *VoidStub)(VMFrame &);
 typedef void (JS_FASTCALL *VoidVpStub)(VMFrame &, Value *);
 typedef void (JS_FASTCALL *VoidStubUInt32)(VMFrame &, uint32);
@@ -526,17 +525,16 @@ typedef void (JS_FASTCALL *VoidStubJSObj
 typedef void (JS_FASTCALL *VoidStubPC)(VMFrame &, jsbytecode *);
 typedef JSBool (JS_FASTCALL *BoolStubUInt32)(VMFrame &f, uint32);
 #ifdef JS_MONOIC
 typedef void (JS_FASTCALL *VoidStubCallIC)(VMFrame &, js::mjit::ic::CallICInfo *);
 typedef void * (JS_FASTCALL *VoidPtrStubCallIC)(VMFrame &, js::mjit::ic::CallICInfo *);
 typedef void (JS_FASTCALL *VoidStubGetGlobal)(VMFrame &, js::mjit::ic::GetGlobalNameIC *);
 typedef void (JS_FASTCALL *VoidStubSetGlobal)(VMFrame &, js::mjit::ic::SetGlobalNameIC *);
 typedef JSBool (JS_FASTCALL *BoolStubEqualityIC)(VMFrame &, js::mjit::ic::EqualityICInfo *);
-typedef void * (JS_FASTCALL *VoidPtrStubTraceIC)(VMFrame &, js::mjit::ic::TraceICInfo *);
 #endif
 #ifdef JS_POLYIC
 typedef void (JS_FASTCALL *VoidStubPIC)(VMFrame &, js::mjit::ic::PICInfo *);
 typedef void (JS_FASTCALL *VoidStubGetElemIC)(VMFrame &, js::mjit::ic::GetElementIC *);
 typedef void (JS_FASTCALL *VoidStubSetElemIC)(VMFrame &f, js::mjit::ic::SetElementIC *);
 #endif
 
 namespace mjit {
@@ -600,23 +598,21 @@ struct JITScript {
      * Therefore, do not change the section ordering in finishThisUp() without
      * changing nMICs() et al as well.
      */
     uint32          nNmapPairs:31;      /* The NativeMapEntrys are sorted by .bcOff.
                                            .ncode values may not be NULL. */
     bool            singleStepMode:1;   /* compiled in "single step mode" */
     uint32          nInlineFrames;
     uint32          nCallSites;
-    uint32          nRootedObjects;
 #ifdef JS_MONOIC
     uint32          nGetGlobalNames;
     uint32          nSetGlobalNames;
     uint32          nCallICs;
     uint32          nEqualityICs;
-    uint32          nTraceICs;
 #endif
 #ifdef JS_POLYIC
     uint32          nGetElems;
     uint32          nSetElems;
     uint32          nPICs;
 #endif
 
 #ifdef JS_MONOIC
@@ -638,45 +634,37 @@ struct JITScript {
 #endif
 
     // Additional ExecutablePools for native call and getter stubs.
     Vector<NativeCallStub, 0, SystemAllocPolicy> nativeCallStubs;
 
     NativeMapEntry *nmap() const;
     js::mjit::InlineFrame *inlineFrames() const;
     js::mjit::CallSite *callSites() const;
-    JSObject **rootedObjects() const;
 #ifdef JS_MONOIC
     ic::GetGlobalNameIC *getGlobalNames() const;
     ic::SetGlobalNameIC *setGlobalNames() const;
     ic::CallICInfo *callICs() const;
     ic::EqualityICInfo *equalityICs() const;
-    ic::TraceICInfo *traceICs() const;
 #endif
 #ifdef JS_POLYIC
     ic::GetElementIC *getElems() const;
     ic::SetElementIC *setElems() const;
     ic::PICInfo     *pics() const;
 #endif
 
     ~JITScript();
 
     bool isValidCode(void *ptr) {
         char *jitcode = (char *)code.m_code.executableAddress();
         char *jcheck = (char *)ptr;
         return jcheck >= jitcode && jcheck < jitcode + code.m_size;
     }
 
     void nukeScriptDependentICs();
-    void sweepCallICs(JSContext *cx, bool purgeAll);
-    void purgeMICs();
-    void purgePICs();
-    void purgeNativeCallStubs();
-
-    void trace(JSTracer *trc);
 
     /* |usf| can be NULL here, in which case the fallback size computation will be used. */
     size_t scriptDataSize(JSUsableSizeFun usf);
 
     jsbytecode *nativeToPC(void *returnAddress, CallSite **pinline) const;
 
   private:
     /* Helpers used to navigate the variable-length sections. */
@@ -761,23 +749,16 @@ struct CallSite
         this->rejoin = rejoin;
     }
 
     bool isTrap() const {
         return rejoin == REJOIN_TRAP;
     }
 };
 
-/*
- * Re-enables a tracepoint in the method JIT. When full is true, we
- * also reset the iteration counter.
- */
-void
-ResetTraceHint(JSScript *script, jsbytecode *pc, uint16_t index, bool full);
-
 uintN
 GetCallTargetCount(JSScript *script, jsbytecode *pc);
 
 void
 DumpAllProfiles(JSContext *cx);
 
 inline void * bsearch_nmap(NativeMapEntry *nmap, size_t nPairs, size_t bcOff)
 {
--- a/js/src/methodjit/MonoIC.cpp
+++ b/js/src/methodjit/MonoIC.cpp
@@ -1381,173 +1381,10 @@ JITScript::resetArgsCheck()
 {
     argsCheckPool->release();
     argsCheckPool = NULL;
 
     Repatcher repatch(this);
     repatch.relink(argsCheckJump, argsCheckStub);
 }
 
-void
-JITScript::purgeMICs()
-{
-    if (!nGetGlobalNames || !nSetGlobalNames)
-        return;
-
-    Repatcher repatch(this);
-
-    ic::GetGlobalNameIC *getGlobalNames_ = getGlobalNames();
-    for (uint32 i = 0; i < nGetGlobalNames; i++) {
-        ic::GetGlobalNameIC &ic = getGlobalNames_[i];
-        JSC::CodeLocationDataLabel32 label = ic.fastPathStart.dataLabel32AtOffset(ic.shapeOffset);
-        repatch.repatch(label, int(INVALID_SHAPE));
-    }
-
-    ic::SetGlobalNameIC *setGlobalNames_ = setGlobalNames();
-    for (uint32 i = 0; i < nSetGlobalNames; i++) {
-        ic::SetGlobalNameIC &ic = setGlobalNames_[i];
-        ic.patchInlineShapeGuard(repatch, int32(INVALID_SHAPE));
-
-        if (ic.hasExtraStub) {
-            Repatcher repatcher(ic.extraStub);
-            ic.patchExtraShapeGuard(repatcher, int32(INVALID_SHAPE));
-        }
-    }
-}
-
-void
-ic::PurgeMICs(JSContext *cx, JSScript *script)
-{
-    /* MICs are purged during GC to handle changing shapes. */
-    JS_ASSERT(cx->runtime->gcRegenShapes);
-
-    if (script->jitNormal)
-        script->jitNormal->purgeMICs();
-    if (script->jitCtor)
-        script->jitCtor->purgeMICs();
-}
-
-void
-JITScript::nukeScriptDependentICs()
-{
-    if (!nCallICs)
-        return;
-
-    Repatcher repatcher(this);
-
-    ic::CallICInfo *callICs_ = callICs();
-    for (uint32 i = 0; i < nCallICs; i++) {
-        ic::CallICInfo &ic = callICs_[i];
-        if (!ic.fastGuardedObject)
-            continue;
-        repatcher.repatch(ic.funGuard, NULL);
-        repatcher.relink(ic.funJump, ic.slowPathStart);
-        ic.releasePool(CallICInfo::Pool_ClosureStub);
-        ic.fastGuardedObject = NULL;
-        ic.hasJsFunCheck = false;
-    }
-}
-
-void
-JITScript::sweepCallICs(JSContext *cx, bool purgeAll)
-{
-    Repatcher repatcher(this);
-
-    /*
-     * If purgeAll is set, purge stubs in the script except those covered by PurgePICs
-     * (which is always called during GC). We want to remove references which can keep
-     * alive pools that we are trying to destroy (see JSCompartment::sweep).
-     */
-
-    ic::CallICInfo *callICs_ = callICs();
-    for (uint32 i = 0; i < nCallICs; i++) {
-        ic::CallICInfo &ic = callICs_[i];
-
-        /*
-         * If the object is unreachable, we're guaranteed not to be currently
-         * executing a stub generated by a guard on that object. This lets us
-         * precisely GC call ICs while keeping the identity guard safe.
-         */
-        bool fastFunDead = ic.fastGuardedObject &&
-            (purgeAll || IsAboutToBeFinalized(cx, ic.fastGuardedObject));
-        bool hasNative = ic.fastGuardedNative != NULL;
-
-        /*
-         * There are three conditions where we need to relink:
-         * (1) purgeAll is true.
-         * (2) There is a native stub. These have a NativeCallStub, which will
-         *     all be released if the compartment has no code on the stack.
-         * (3) The fastFun is dead *and* there is a closure stub.
-         *
-         * Note although both objects can be non-NULL, there can only be one
-         * of [closure, native] stub per call IC.
-         */
-        if (purgeAll || hasNative || (fastFunDead && ic.hasJsFunCheck)) {
-            repatcher.relink(ic.funJump, ic.slowPathStart);
-            ic.hit = false;
-        }
-
-        if (fastFunDead) {
-            repatcher.repatch(ic.funGuard, NULL);
-            ic.purgeGuardedObject();
-        }
-
-        if (hasNative)
-            ic.fastGuardedNative = NULL;
-
-        if (purgeAll) {
-            ic.releasePool(CallICInfo::Pool_ScriptStub);
-            JSC::CodeLocationJump oolJump = ic.slowPathStart.jumpAtOffset(ic.oolJumpOffset);
-            JSC::CodeLocationLabel icCall = ic.slowPathStart.labelAtOffset(ic.icCallOffset);
-            repatcher.relink(oolJump, icCall);
-        }
-    }
-
-    /* The arguments type check IC can refer to type objects which might be swept. */
-    if (argsCheckPool)
-        resetArgsCheck();
-
-    if (purgeAll) {
-        /* Purge ICs generating stubs into execPools. */
-        uint32 released = 0;
-
-        ic::EqualityICInfo *equalityICs_ = equalityICs();
-        for (uint32 i = 0; i < nEqualityICs; i++) {
-            ic::EqualityICInfo &ic = equalityICs_[i];
-            if (!ic.generated)
-                continue;
-
-            JSC::FunctionPtr fptr(JS_FUNC_TO_DATA_PTR(void *, ic::Equality));
-            repatcher.relink(ic.stubCall, fptr);
-            repatcher.relink(ic.jumpToStub, ic.stubEntry);
-
-            ic.generated = false;
-            released++;
-        }
-
-        ic::SetGlobalNameIC *setGlobalNames_ = setGlobalNames();
-        for (uint32 i = 0; i < nSetGlobalNames; i ++) {
-            ic::SetGlobalNameIC &ic = setGlobalNames_[i];
-            if (!ic.hasExtraStub)
-                continue;
-            repatcher.relink(ic.fastPathStart.jumpAtOffset(ic.inlineShapeJump), ic.slowPathStart);
-            ic.hasExtraStub = false;
-            released++;
-        }
-
-        JS_ASSERT(released == execPools.length());
-        for (uint32 i = 0; i < released; i++)
-            execPools[i]->release();
-        execPools.clear();
-    }
-}
-
-void
-ic::SweepCallICs(JSContext *cx, JSScript *script, bool purgeAll)
-{
-    if (script->jitNormal)
-        script->jitNormal->sweepCallICs(cx, purgeAll);
-    if (script->jitCtor)
-        script->jitCtor->sweepCallICs(cx, purgeAll);
-}
-
 #endif /* JS_MONOIC */
 
--- a/js/src/methodjit/MonoIC.h
+++ b/js/src/methodjit/MonoIC.h
@@ -155,40 +155,16 @@ struct SetGlobalNameIC : public GlobalNa
 
     /* SET only. */
     ValueRemat vr;              /* RHS value. */
 
     void patchInlineShapeGuard(Repatcher &repatcher, int32 shape);
     void patchExtraShapeGuard(Repatcher &repatcher, int32 shape);
 };
 
-struct TraceICInfo {
-    TraceICInfo() {}
-
-    JSC::CodeLocationLabel stubEntry;
-    JSC::CodeLocationLabel fastTarget;
-    JSC::CodeLocationLabel slowTarget;
-    JSC::CodeLocationJump traceHint;
-    JSC::CodeLocationJump slowTraceHint;
-#ifdef DEBUG
-    jsbytecode *jumpTargetPC;
-#endif
-    
-    /* This data is used by the tracing JIT. */
-    void *traceData;
-    uintN traceEpoch;
-    uint32 loopCounter;
-    uint32 loopCounterStart;
-
-    bool initialized : 1;
-    bool hasSlowTraceHint : 1;
-};
-
-static const uint16 BAD_TRACEIC_INDEX = (uint16)0xffff;
-
 void JS_FASTCALL GetGlobalName(VMFrame &f, ic::GetGlobalNameIC *ic);
 void JS_FASTCALL SetGlobalName(VMFrame &f, ic::SetGlobalNameIC *ic);
 
 struct EqualityICInfo {
     typedef JSC::MacroAssembler::RegisterID RegisterID;
 
     JSC::CodeLocationLabel stubEntry;
     JSC::CodeLocationCall stubCall;
@@ -296,17 +272,14 @@ struct CallICInfo {
 void * JS_FASTCALL New(VMFrame &f, ic::CallICInfo *ic);
 void * JS_FASTCALL Call(VMFrame &f, ic::CallICInfo *ic);
 void * JS_FASTCALL NativeNew(VMFrame &f, ic::CallICInfo *ic);
 void * JS_FASTCALL NativeCall(VMFrame &f, ic::CallICInfo *ic);
 JSBool JS_FASTCALL SplatApplyArgs(VMFrame &f);
 
 void GenerateArgumentCheckStub(VMFrame &f);
 
-void PurgeMICs(JSContext *cx, JSScript *script);
-void SweepCallICs(JSContext *cx, JSScript *script, bool purgeAll);
-
 } /* namespace ic */
 } /* namespace mjit */
 } /* namespace js */
 
 #endif /* jsjaeger_mono_ic_h__ */
 
--- a/js/src/methodjit/PolyIC.cpp
+++ b/js/src/methodjit/PolyIC.cpp
@@ -3237,62 +3237,10 @@ ic::SetElement(VMFrame &f, ic::SetElemen
     }
 
     stubs::SetElem<strict>(f);
 }
 
 template void JS_FASTCALL ic::SetElement<true>(VMFrame &f, SetElementIC *ic);
 template void JS_FASTCALL ic::SetElement<false>(VMFrame &f, SetElementIC *ic);
 
-void
-JITScript::purgePICs()
-{
-    if (!nPICs && !nGetElems && !nSetElems)
-        return;
-
-    Repatcher repatcher(this);
-
-    ic::PICInfo *pics_ = pics();
-    for (uint32 i = 0; i < nPICs; i++) {
-        ic::PICInfo &pic = pics_[i];
-        switch (pic.kind) {
-          case ic::PICInfo::SET:
-          case ic::PICInfo::SETMETHOD:
-            SetPropCompiler::reset(repatcher, pic);
-            break;
-          case ic::PICInfo::NAME:
-          case ic::PICInfo::XNAME:
-          case ic::PICInfo::CALLNAME:
-            ScopeNameCompiler::reset(repatcher, pic);
-            break;
-          case ic::PICInfo::BIND:
-            BindNameCompiler::reset(repatcher, pic);
-            break;
-          case ic::PICInfo::CALL: /* fall-through */
-          case ic::PICInfo::GET:
-            GetPropCompiler::reset(repatcher, pic);
-            break;
-          default:
-            JS_NOT_REACHED("Unhandled PIC kind");
-            break;
-        }
-        pic.reset();
-    }
-
-    ic::GetElementIC *getElems_ = getElems();
-    ic::SetElementIC *setElems_ = setElems();
-    for (uint32 i = 0; i < nGetElems; i++)
-        getElems_[i].purge(repatcher);
-    for (uint32 i = 0; i < nSetElems; i++)
-        setElems_[i].purge(repatcher);
-}
-
-void
-ic::PurgePICs(JSContext *cx, JSScript *script)
-{
-    if (script->jitNormal)
-        script->jitNormal->purgePICs();
-    if (script->jitCtor)
-        script->jitCtor->purgePICs();
-}
-
 #endif /* JS_POLYIC */
 
--- a/js/src/methodjit/PolyIC.h
+++ b/js/src/methodjit/PolyIC.h
@@ -55,18 +55,16 @@
 namespace js {
 namespace mjit {
 namespace ic {
 
 /* Maximum number of stubs for a given callsite. */
 static const uint32 MAX_PIC_STUBS = 16;
 static const uint32 MAX_GETELEM_IC_STUBS = 17;
 
-void PurgePICs(JSContext *cx);
-
 enum LookupStatus {
     Lookup_Error = 0,
     Lookup_Uncacheable,
     Lookup_Cacheable
 };
 
 struct BaseIC : public MacroAssemblerTypedefs {
     BaseIC() { }
@@ -549,17 +547,16 @@ struct PICInfo : public BasePolyIC {
     void reset() {
         BasePolyIC::reset();
         inlinePathPatched = false;
         shapeRegHasBaseShape = true;
     }
 };
 
 #ifdef JS_POLYIC
-void PurgePICs(JSContext *cx, JSScript *script);
 void JS_FASTCALL GetProp(VMFrame &f, ic::PICInfo *);
 void JS_FASTCALL GetPropNoCache(VMFrame &f, ic::PICInfo *);
 void JS_FASTCALL SetProp(VMFrame &f, ic::PICInfo *);
 void JS_FASTCALL CallProp(VMFrame &f, ic::PICInfo *);
 void JS_FASTCALL Name(VMFrame &f, ic::PICInfo *);
 void JS_FASTCALL CallName(VMFrame &f, ic::PICInfo *);
 void JS_FASTCALL XName(VMFrame &f, ic::PICInfo *);
 void JS_FASTCALL BindName(VMFrame &f, ic::PICInfo *);
--- a/js/src/methodjit/StubCalls.h
+++ b/js/src/methodjit/StubCalls.h
@@ -108,21 +108,16 @@ struct UncachedCallResult {
  * These functions either execute the function, return a native code
  * pointer that can be used to call the function, or throw.
  */
 void UncachedCallHelper(VMFrame &f, uint32 argc, bool lowered, UncachedCallResult *ucr);
 void UncachedNewHelper(VMFrame &f, uint32 argc, UncachedCallResult *ucr);
 
 void JS_FASTCALL CreateThis(VMFrame &f, JSObject *proto);
 void JS_FASTCALL Throw(VMFrame &f);
-#if JS_MONOIC
-void * JS_FASTCALL InvokeTracer(VMFrame &f, ic::TraceICInfo *tic);
-#else
-void * JS_FASTCALL InvokeTracer(VMFrame &f);
-#endif
 
 void * JS_FASTCALL LookupSwitch(VMFrame &f, jsbytecode *pc);
 void * JS_FASTCALL TableSwitch(VMFrame &f, jsbytecode *origPc);
 
 void JS_FASTCALL BindName(VMFrame &f);
 void JS_FASTCALL BindNameNoCache(VMFrame &f, JSAtom *atom);
 JSObject * JS_FASTCALL BindGlobalName(VMFrame &f);
 template<JSBool strict> void JS_FASTCALL SetName(VMFrame &f, JSAtom *atom);