[INFER] Backout bug 685358.
authorBrian Hackett <bhackett1024@gmail.com>
Wed, 14 Sep 2011 22:09:40 -0700
changeset 77078 f933cbe46a03a44d8f318dbc71e9864320cd40b3
parent 77077 c51c15708dcbada41991deca4cc6c39ebbb71dca
child 77079 300e1f974f552c7ef84ac36cef4c19e03f7ac7df
push id3
push userfelipc@gmail.com
push dateFri, 30 Sep 2011 20:09:13 +0000
bugs685358
milestone9.0a1
[INFER] Backout bug 685358.
js/src/jscompartment.cpp
js/src/jsgcmark.cpp
js/src/jsinterp.cpp
js/src/jsinterp.h
js/src/methodjit/Compiler.cpp
js/src/methodjit/Compiler.h
js/src/methodjit/InvokeHelpers.cpp
js/src/methodjit/MethodJIT.cpp
js/src/methodjit/MethodJIT.h
js/src/methodjit/MonoIC.cpp
js/src/methodjit/MonoIC.h
js/src/methodjit/PolyIC.cpp
js/src/methodjit/StubCalls.h
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -505,29 +505,38 @@ JSCompartment::sweep(JSContext *cx, bool
     sweepBreakpoints(cx);
 
 #ifdef JS_TRACER
     if (hasTraceMonitor())
         traceMonitor()->sweep(cx);
 #endif
 
 #ifdef JS_METHODJIT
-    mjit::ClearAllFrames(this);
+    /* Purge ICs in the compartment. These can reference GC things. */
+    for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
+        JSScript *script = i.get<JSScript>();
+        mjit::PurgeICs(cx, script);
+    }
+
+    if (types.inferenceEnabled)
+        mjit::ClearAllFrames(this);
 #endif
 
     if (activeAnalysis) {
         /*
          * Analysis information is in use, so don't clear the analysis pool.
          * jitcode still needs to be released, if this is a shape-regenerating
          * GC then shape numbers baked into the code may change.
          */
 #ifdef JS_METHODJIT
-        for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
-            JSScript *script = i.get<JSScript>();
-            mjit::ReleaseScriptCode(cx, script);
+        if (types.inferenceEnabled) {
+            for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
+                JSScript *script = i.get<JSScript>();
+                mjit::ReleaseScriptCode(cx, script);
+            }
         }
 #endif
     } else {
         /*
          * Clear the analysis pool, but don't release its data yet. While
          * sweeping types any live data will be allocated into the pool.
          */
         JSArenaPool oldPool;
@@ -555,19 +564,22 @@ JSCompartment::sweep(JSContext *cx, bool
                         script->types->destroy();
                         script->types = NULL;
                         script->typesPurged = true;
                     }
                 }
             }
         } else {
 #ifdef JS_METHODJIT
-            for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
-                JSScript *script = i.get<JSScript>();
-                mjit::ReleaseScriptCode(cx, script);
+            /* :XXX: bug 685358 only releasing jitcode if there are no frames on the stack */
+            if (!active) {
+                for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
+                    JSScript *script = i.get<JSScript>();
+                    mjit::ReleaseScriptCode(cx, script);
+                }
             }
 #endif
         }
 
         types.sweep(cx);
 
         for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
             JSScript *script = i.get<JSScript>();
--- a/js/src/jsgcmark.cpp
+++ b/js/src/jsgcmark.cpp
@@ -854,16 +854,23 @@ MarkChildren(JSTracer *trc, JSScript *sc
 
     if (IS_GC_MARKING_TRACER(trc) && script->filename)
         js_MarkScriptFilename(script->filename);
 
     script->bindings.trace(trc);
 
     if (script->types)
         script->types->trace(trc);
+
+#ifdef JS_METHODJIT
+    if (script->jitNormal)
+        script->jitNormal->trace(trc);
+    if (script->jitCtor)
+        script->jitCtor->trace(trc);
+#endif
 }
 
 void
 MarkChildren(JSTracer *trc, const Shape *shape)
 {
 restart:
     MarkId(trc, shape->propid, "propid");
 
--- a/js/src/jsinterp.cpp
+++ b/js/src/jsinterp.cpp
@@ -1827,22 +1827,40 @@ js::Interpret(JSContext *cx, StackFrame 
      * if it returns false.
      */
 #define CHECK_BRANCH()                                                        \
     JS_BEGIN_MACRO                                                            \
         if (JS_THREAD_DATA(cx)->interruptFlags && !js_HandleExecutionInterrupt(cx)) \
             goto error;                                                       \
     JS_END_MACRO
 
+#if defined(JS_TRACER) && defined(JS_METHODJIT)
+# define LEAVE_ON_SAFE_POINT()                                                \
+    do {                                                                      \
+        JS_ASSERT_IF(leaveOnSafePoint, !TRACE_RECORDER(cx));                  \
+        JS_ASSERT_IF(leaveOnSafePoint, !TRACE_PROFILER(cx));                  \
+        JS_ASSERT_IF(leaveOnSafePoint, interpMode != JSINTERP_NORMAL);        \
+        if (leaveOnSafePoint && !regs.fp()->hasImacropc() &&                  \
+            script->maybeNativeCodeForPC(regs.fp()->isConstructing(), regs.pc)) { \
+            JS_ASSERT(!TRACE_RECORDER(cx));                                   \
+            interpReturnOK = true;                                            \
+            goto leave_on_safe_point;                                         \
+        }                                                                     \
+    } while (0)
+#else
+# define LEAVE_ON_SAFE_POINT() /* nop */
+#endif
+
 #define BRANCH(n)                                                             \
     JS_BEGIN_MACRO                                                            \
         regs.pc += (n);                                                       \
         op = (JSOp) *regs.pc;                                                 \
         if ((n) <= 0)                                                         \
             goto check_backedge;                                              \
+        LEAVE_ON_SAFE_POINT();                                                \
         DO_OP();                                                              \
     JS_END_MACRO
 
 #define SET_SCRIPT(s)                                                         \
     JS_BEGIN_MACRO                                                            \
         script = (s);                                                         \
         if (script->stepModeEnabled())                                        \
             ENABLE_INTERRUPTS();                                              \
@@ -1868,16 +1886,23 @@ js::Interpret(JSContext *cx, StackFrame 
     JSRuntime *const rt = cx->runtime;
     JSScript *script;
     SET_SCRIPT(regs.fp()->script());
     double *pcCounts = script->pcCounters.get(JSPCCounters::INTERP);
     ENABLE_PCCOUNT_INTERRUPTS();
     Value *argv = regs.fp()->maybeFormalArgs();
     CHECK_INTERRUPT_HANDLER();
 
+#if defined(JS_TRACER) && defined(JS_METHODJIT)
+    bool leaveOnSafePoint = (interpMode == JSINTERP_SAFEPOINT);
+# define CLEAR_LEAVE_ON_TRACE_POINT() ((void) (leaveOnSafePoint = false))
+#else
+# define CLEAR_LEAVE_ON_TRACE_POINT() ((void) 0)
+#endif
+
     if (!entryFrame)
         entryFrame = regs.fp();
 
     /*
      * Initialize the index segment register used by LOAD_ATOM and
      * GET_FULL_INDEX macros below. As a register we use a pointer based on
      * the atom map to turn frequently executed LOAD_ATOM into simple array
      * access. For less frequent object and regexp loads we have to recover
@@ -2052,33 +2077,60 @@ js::Interpret(JSContext *cx, StackFrame 
         }
 
 #ifdef JS_TRACER
 #ifdef JS_METHODJIT
         if (TRACE_PROFILER(cx) && interpMode == JSINTERP_PROFILE) {
             LoopProfile *prof = TRACE_PROFILER(cx);
             JS_ASSERT(!TRACE_RECORDER(cx));
             LoopProfile::ProfileAction act = prof->profileOperation(cx, op);
-            if (act != LoopProfile::ProfComplete)
-                moreInterrupts = true;
+            switch (act) {
+                case LoopProfile::ProfComplete:
+                    if (interpMode != JSINTERP_NORMAL) {
+                        leaveOnSafePoint = true;
+                        LEAVE_ON_SAFE_POINT();
+                    }
+                    break;
+                default:
+                    moreInterrupts = true;
+                    break;
+            }
         }
 #endif
         if (TraceRecorder* tr = TRACE_RECORDER(cx)) {
             JS_ASSERT(!TRACE_PROFILER(cx));
             AbortableRecordingStatus status = tr->monitorRecording(op);
             JS_ASSERT_IF(cx->isExceptionPending(), status == ARECORD_ERROR);
 
+            if (interpMode != JSINTERP_NORMAL) {
+                JS_ASSERT(interpMode == JSINTERP_RECORD || JSINTERP_SAFEPOINT);
+                switch (status) {
+                  case ARECORD_IMACRO_ABORTED:
+                  case ARECORD_ABORTED:
+                  case ARECORD_COMPLETED:
+                  case ARECORD_STOP:
+#ifdef JS_METHODJIT
+                    leaveOnSafePoint = true;
+                    LEAVE_ON_SAFE_POINT();
+#endif
+                    break;
+                  default:
+                    break;
+                }
+            }
+
             switch (status) {
               case ARECORD_CONTINUE:
                 moreInterrupts = true;
                 break;
               case ARECORD_IMACRO:
               case ARECORD_IMACRO_ABORTED:
                 atoms = rt->atomState.commonAtomsStart();
                 op = JSOp(*regs.pc);
+                CLEAR_LEAVE_ON_TRACE_POINT();
                 if (status == ARECORD_IMACRO)
                     DO_OP();    /* keep interrupting for op. */
                 break;
               case ARECORD_ERROR:
                 // The code at 'error:' aborts the recording.
                 goto error;
               case ARECORD_ABORTED:
               case ARECORD_COMPLETED:
@@ -2113,33 +2165,34 @@ ADD_EMPTY_CASE(JSOP_TRY)
 ADD_EMPTY_CASE(JSOP_STARTXML)
 ADD_EMPTY_CASE(JSOP_STARTXMLEXPR)
 #endif
 ADD_EMPTY_CASE(JSOP_NULLBLOCKCHAIN)
 END_EMPTY_CASES
 
 BEGIN_CASE(JSOP_TRACE)
 BEGIN_CASE(JSOP_NOTRACE)
-    /* No-op */
+    LEAVE_ON_SAFE_POINT();
 END_CASE(JSOP_TRACE)
 
 check_backedge:
 {
     CHECK_BRANCH();
     if (op != JSOP_NOTRACE && op != JSOP_TRACE)
         DO_OP();
 
 #ifdef JS_TRACER
     if (TRACING_ENABLED(cx) && (TRACE_RECORDER(cx) || TRACE_PROFILER(cx) || (op == JSOP_TRACE && !useMethodJIT))) {
         MonitorResult r = MonitorLoopEdge(cx, interpMode);
         if (r == MONITOR_RECORDING) {
             JS_ASSERT(TRACE_RECORDER(cx));
             JS_ASSERT(!TRACE_PROFILER(cx));
             MONITOR_BRANCH_TRACEVIS;
             ENABLE_INTERRUPTS();
+            CLEAR_LEAVE_ON_TRACE_POINT();
         }
         JS_ASSERT_IF(cx->isExceptionPending(), r == MONITOR_ERROR);
         RESTORE_INTERP_VARS_CHECK_EXCEPTION();
         op = (JSOp) *regs.pc;
         DO_OP();
     }
 #endif /* JS_TRACER */
 
@@ -2254,16 +2307,17 @@ BEGIN_CASE(JSOP_STOP)
          */
         JS_ASSERT(op == JSOP_STOP);
         JS_ASSERT((uintN)(regs.sp - regs.fp()->slots()) <= script->nslots);
         jsbytecode *imacpc = regs.fp()->imacropc();
         regs.pc = imacpc + js_CodeSpec[*imacpc].length;
         if (js_CodeSpec[*imacpc].format & JOF_DECOMPOSE)
             regs.pc += GetDecomposeLength(imacpc, js_CodeSpec[*imacpc].length);
         regs.fp()->clearImacropc();
+        LEAVE_ON_SAFE_POINT();
         atoms = script->atoms;
         op = JSOp(*regs.pc);
         DO_OP();
     }
 #endif
 
     interpReturnOK = true;
     if (entryFrame != regs.fp())
@@ -5354,16 +5408,22 @@ BEGIN_CASE(JSOP_RETSUB)
     len = rval.toInt32();
     regs.pc = script->code;
 END_VARLEN_CASE
 }
 
 BEGIN_CASE(JSOP_EXCEPTION)
     PUSH_COPY(cx->getPendingException());
     cx->clearPendingException();
+#if defined(JS_TRACER) && defined(JS_METHODJIT)
+    if (interpMode == JSINTERP_PROFILE) {
+        leaveOnSafePoint = true;
+        LEAVE_ON_SAFE_POINT();
+    }
+#endif
     CHECK_BRANCH();
 END_CASE(JSOP_EXCEPTION)
 
 BEGIN_CASE(JSOP_FINALLY)
     CHECK_BRANCH();
 END_CASE(JSOP_FINALLY)
 
 BEGIN_CASE(JSOP_THROWING)
--- a/js/src/jsinterp.h
+++ b/js/src/jsinterp.h
@@ -230,19 +230,20 @@ ExecuteKernel(JSContext *cx, JSScript *s
 extern bool
 Execute(JSContext *cx, JSScript *script, JSObject &scopeChain, Value *rval);
 
 /* Flags to toggle js::Interpret() execution. */
 enum InterpMode
 {
     JSINTERP_NORMAL    = 0, /* interpreter is running normally */
     JSINTERP_RECORD    = 1, /* interpreter has been started to record/run traces */
-    JSINTERP_PROFILE   = 2, /* interpreter should profile a loop */
-    JSINTERP_REJOIN    = 3, /* as normal, but the frame has already started */
-    JSINTERP_SKIP_TRAP = 4  /* as REJOIN, but skip trap at first opcode */
+    JSINTERP_SAFEPOINT = 2, /* interpreter should leave on a method JIT safe point */
+    JSINTERP_PROFILE   = 3, /* interpreter should profile a loop */
+    JSINTERP_REJOIN    = 4, /* as normal, but the frame has already started */
+    JSINTERP_SKIP_TRAP = 5  /* as REJOIN, but skip trap at first opcode */
 };
 
 /*
  * Execute the caller-initialized frame for a user-defined script or function
  * pointed to by cx->fp until completion or error.
  */
 extern JS_REQUIRES_STACK JS_NEVER_INLINE bool
 Interpret(JSContext *cx, StackFrame *stopFp, InterpMode mode = JSINTERP_NORMAL);
--- a/js/src/methodjit/Compiler.cpp
+++ b/js/src/methodjit/Compiler.cpp
@@ -117,16 +117,17 @@ mjit::Compiler::Compiler(JSContext *cx, 
     callPatches(CompilerAllocPolicy(cx, *thisFromCtor())),
     callSites(CompilerAllocPolicy(cx, *thisFromCtor())),
     doubleList(CompilerAllocPolicy(cx, *thisFromCtor())),
     fixedIntToDoubleEntries(CompilerAllocPolicy(cx, *thisFromCtor())),
     fixedDoubleToAnyEntries(CompilerAllocPolicy(cx, *thisFromCtor())),
     jumpTables(CompilerAllocPolicy(cx, *thisFromCtor())),
     jumpTableOffsets(CompilerAllocPolicy(cx, *thisFromCtor())),
     loopEntries(CompilerAllocPolicy(cx, *thisFromCtor())),
+    rootedObjects(CompilerAllocPolicy(cx, *thisFromCtor())),
     stubcc(cx, *thisFromCtor(), frame),
     debugMode_(cx->compartment->debugMode()),
 #if defined JS_TRACER
     addTraceHints(cx->traceJitEnabled),
 #else
     addTraceHints(false),
 #endif
     inlining_(false),
@@ -936,16 +937,17 @@ mjit::Compiler::finishThisUp(JITScript *
         }
     }
 
     /* Please keep in sync with JITScript::scriptDataSize! */
     size_t dataSize = sizeof(JITScript) +
                       sizeof(NativeMapEntry) * nNmapLive +
                       sizeof(InlineFrame) * inlineFrames.length() +
                       sizeof(CallSite) * callSites.length() +
+                      sizeof(JSObject *) * rootedObjects.length() +
 #if defined JS_MONOIC
                       sizeof(ic::GetGlobalNameIC) * getGlobalNames.length() +
                       sizeof(ic::SetGlobalNameIC) * setGlobalNames.length() +
                       sizeof(ic::CallICInfo) * callICs.length() +
                       sizeof(ic::EqualityICInfo) * equalityICs.length() +
                       sizeof(ic::TraceICInfo) * traceICs.length() +
 #endif
 #if defined JS_POLYIC
@@ -1067,16 +1069,23 @@ mjit::Compiler::finishThisUp(JITScript *
          * Patch stores of the base call's return address for InvariantFailure
          * calls. InvariantFailure will patch its own return address to this
          * pointer before triggering recompilation.
          */
         if (from.loopPatch.hasPatch)
             stubCode.patch(from.loopPatch.codePatch, result + codeOffset);
     }
 
+    /* Build the list of objects rooted by the script. */
+    JSObject **jitRooted = (JSObject **)cursor;
+    jit->nRootedObjects = rootedObjects.length();
+    cursor += sizeof(JSObject *) * jit->nRootedObjects;
+    for (size_t i = 0; i < jit->nRootedObjects; i++)
+        jitRooted[i] = rootedObjects[i];
+
 #if defined JS_MONOIC
     JS_INIT_CLIST(&jit->callers);
 
     if (script->hasFunction && cx->typeInferenceEnabled()) {
         jit->argsCheckStub = stubCode.locationOf(argsCheckStub);
         jit->argsCheckFallthrough = stubCode.locationOf(argsCheckFallthrough);
         jit->argsCheckJump = stubCode.locationOf(argsCheckJump);
         jit->argsCheckPool = NULL;
@@ -4660,16 +4669,22 @@ mjit::Compiler::jsop_callprop_str(JSAtom
      * We must pass an explicit scope chain only because JSD calls into
      * here via the recompiler with a dummy context, and we need to use
      * the global object for the script we are now compiling.
      */
     JSObject *obj;
     if (!js_GetClassPrototype(cx, globalObj, JSProto_String, &obj))
         return false;
 
+    /*
+     * Root the proto, since JS_ClearScope might overwrite the global object's
+     * copy.
+     */
+    rootedObjects.append(obj);
+
     /* Force into a register because getprop won't expect a constant. */
     RegisterID reg = frame.allocReg();
 
     masm.move(ImmPtr(obj), reg);
     frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
 
     /* Get the property. */
     if (!jsop_getprop(atom, knownPushedType(0)))
@@ -6785,17 +6800,17 @@ mjit::Compiler::jumpAndTrace(Jump j, jsb
                                                    offsetof(TraceICInfo, loopCounter)));
 # endif
 
     /* Save and restore compiler-tracked PC, so cx->regs is right in InvokeTracer. */
     {
         jsbytecode* pc = PC;
         PC = target;
 
-        OOL_STUBCALL(stubs::InvokeTracer, REJOIN_RUN_TRACER);
+        OOL_STUBCALL(stubs::InvokeTracer, REJOIN_NONE);
 
         PC = pc;
     }
 
     Jump no = stubcc.masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
                                         Registers::ReturnReg);
     if (!cx->typeInferenceEnabled())
         stubcc.masm.loadPtr(FrameAddress(VMFrame::offsetOfFp), JSFrameReg);
--- a/js/src/methodjit/Compiler.h
+++ b/js/src/methodjit/Compiler.h
@@ -452,16 +452,17 @@ class Compiler : public BaseCompiler
     js::Vector<CallPatchInfo, 64, CompilerAllocPolicy> callPatches;
     js::Vector<InternalCallSite, 64, CompilerAllocPolicy> callSites;
     js::Vector<DoublePatch, 16, CompilerAllocPolicy> doubleList;
     js::Vector<uint32> fixedIntToDoubleEntries;
     js::Vector<uint32> fixedDoubleToAnyEntries;
     js::Vector<JumpTable, 16> jumpTables;
     js::Vector<uint32, 16> jumpTableOffsets;
     js::Vector<LoopEntry, 16> loopEntries;
+    js::Vector<JSObject *, 0, CompilerAllocPolicy> rootedObjects;
     StubCompiler stubcc;
     Label invokeLabel;
     Label arityLabel;
     Label argsCheckLabel;
 #ifdef JS_MONOIC
     Label argsCheckStub;
     Label argsCheckFallthrough;
     Jump argsCheckJump;
--- a/js/src/methodjit/InvokeHelpers.cpp
+++ b/js/src/methodjit/InvokeHelpers.cpp
@@ -172,27 +172,20 @@ top:
 }
 
 /*
  * Clean up a frame and return.
  */
 static void
 InlineReturn(VMFrame &f)
 {
-    bool shiftResult = f.fp()->loweredCallOrApply();
-
     JS_ASSERT(f.fp() != f.entryfp);
     JS_ASSERT(!IsActiveWithOrBlock(f.cx, f.fp()->scopeChain(), 0));
     f.cx->stack.popInlineFrame(f.regs);
 
-    if (shiftResult) {
-        f.regs.sp[-2] = f.regs.sp[-1];
-        f.regs.sp--;
-    }
-
     DebugOnly<JSOp> op = js_GetOpcode(f.cx, f.fp()->script(), f.regs.pc);
     JS_ASSERT(op == JSOP_CALL ||
               op == JSOP_NEW ||
               op == JSOP_EVAL ||
               op == JSOP_FUNCALL ||
               op == JSOP_FUNAPPLY);
     f.regs.pc += JSOP_CALL_LENGTH;
 }
@@ -776,31 +769,47 @@ HandleErrorInExcessFrame(VMFrame &f, Sta
     }
 
     JS_ASSERT(&f.regs == &cx->regs());
     JS_ASSERT_IF(!returnOK, cx->fp() == stopFp);
 
     return returnOK;
 }
 
+/* Returns whether the current PC has method JIT'd code. */
+static inline void *
+AtSafePoint(JSContext *cx)
+{
+    StackFrame *fp = cx->fp();
+    if (fp->hasImacropc())
+        return NULL;
+
+    JSScript *script = fp->script();
+    return script->maybeNativeCodeForPC(fp->isConstructing(), cx->regs().pc);
+}
+
 /*
  * Interprets until either a safe point is reached that has method JIT'd
  * code, or the current frame tries to return.
  */
 static inline JSBool
 PartialInterpret(VMFrame &f)
 {
     JSContext *cx = f.cx;
     StackFrame *fp = cx->fp();
-    JS_ASSERT(!fp->finishedInInterpreter());
 
-    JS_ASSERT(!cx->compartment->jaegerCompartment()->finishingTracer);
-    cx->compartment->jaegerCompartment()->finishingTracer = true;
-    JSBool ok = Interpret(cx, fp, JSINTERP_REJOIN);
-    cx->compartment->jaegerCompartment()->finishingTracer = false;
+#ifdef DEBUG
+    JSScript *script = fp->script();
+    JS_ASSERT(!fp->finishedInInterpreter());
+    JS_ASSERT(fp->hasImacropc() ||
+              !script->maybeNativeCodeForPC(fp->isConstructing(), cx->regs().pc));
+#endif
+
+    JSBool ok = JS_TRUE;
+    ok = Interpret(cx, fp, JSINTERP_SAFEPOINT);
 
     return ok;
 }
 
 JS_STATIC_ASSERT(JSOP_NOP == 0);
 
 /*
  * Returns whether the current PC would return, or if the frame has already
@@ -902,16 +911,23 @@ EvaluateExcessFrame(VMFrame &f, StackFra
      * A "finished" frame is when the interpreter rested on a STOP,
      * RETURN, RETRVAL, etc. We check for finished frames BEFORE looking
      * for a safe point. If the frame was finished, we could have already
      * called ScriptEpilogue(), and entering the JIT could call it twice.
      */
     if (!fp->hasImacropc() && FrameIsFinished(cx))
         return HandleFinishedFrame(f, entryFrame);
 
+    if (void *ncode = AtSafePoint(cx)) {
+        if (!JaegerShotAtSafePoint(cx, ncode, false))
+            return false;
+        InlineReturn(f);
+        return true;
+    }
+
     return PartialInterpret(f);
 }
 
 /*
  * Evaluate frames newer than the entry frame until all are gone. This will
  * always leave f.regs.fp == entryFrame.
  */
 static bool
@@ -992,37 +1008,30 @@ js::mjit::ResetTraceHint(JSScript *scrip
         ResetTraceHintAt(script, script->jitNormal, pc, index, full);
 
     if (script->jitCtor)
         ResetTraceHintAt(script, script->jitCtor, pc, index, full);
 #endif
 }
 
 #if JS_MONOIC
-JSBool
+void *
 RunTracer(VMFrame &f, ic::TraceICInfo &ic)
 #else
-JSBool
+void *
 RunTracer(VMFrame &f)
 #endif
 {
     JSContext *cx = f.cx;
     StackFrame *entryFrame = f.fp();
     TracePointAction tpa;
 
     /* :TODO: nuke PIC? */
     if (!cx->traceJitEnabled)
-        return false;
-
-    /*
-     * Don't reenter the tracer while finishing frames we bailed out from,
-     * to avoid over-recursing.
-     */
-    if (cx->compartment->jaegerCompartment()->finishingTracer)
-        return false;
+        return NULL;
 
     /*
      * Force initialization of the entry frame's scope chain and return value,
      * if necessary.  The tracer can query the scope chain without needing to
      * check the HAS_SCOPECHAIN flag, and the frame is guaranteed to have the
      * correct return value stored if we trace/interpret through to the end
      * of the frame.
      */
@@ -1042,58 +1051,51 @@ RunTracer(VMFrame &f)
     hits = ic.loopCounterStart;
 #else
     traceData = NULL;
     traceEpoch = NULL;
     loopCounter = NULL;
     hits = 1;
 #endif
 
-    RecompilationMonitor monitor(cx);
-
     {
         /*
          * While the tracer is running, redirect the regs to a local variable here.
          * If the tracer exits during an inlined frame, it will synthesize those
          * frames, point f.regs.fp at them and then enter the interpreter. If the
          * interpreter pops the frames it will not be reflected here as a local
          * set of regs is used by the interpreter, and f->regs end up pointing at
          * garbage, confusing the recompiler.
          */
         FrameRegs regs = f.regs;
         PreserveRegsGuard regsGuard(cx, regs);
 
         tpa = MonitorTracePoint(f.cx, &blacklist, traceData, traceEpoch,
                                 loopCounter, hits);
         JS_ASSERT(!TRACE_RECORDER(cx));
-
-        if (tpa != TPA_Nothing)
-            ClearAllFrames(cx->compartment);
     }
 
 #if JS_MONOIC
-    if (!monitor.recompiled()) {
-        ic.loopCounterStart = *loopCounter;
-        if (blacklist)
-            DisableTraceHint(entryFrame->jit(), ic);
-    }
+    ic.loopCounterStart = *loopCounter;
+    if (blacklist)
+        DisableTraceHint(entryFrame->jit(), ic);
 #endif
 
     // Even though ExecuteTree() bypasses the interpreter, it should propagate
     // error failures correctly.
     JS_ASSERT_IF(cx->isExceptionPending(), tpa == TPA_Error);
 
     JS_ASSERT(f.fp() == cx->fp());
     switch (tpa) {
       case TPA_Nothing:
-        return false;
+        return NULL;
 
       case TPA_Error:
         if (!HandleErrorInExcessFrame(f, entryFrame, f.fp()->finishedInInterpreter()))
-            THROWV(false);
+            THROWV(NULL);
         JS_ASSERT(!cx->fp()->hasImacropc());
         break;
 
       case TPA_RanStuff:
       case TPA_Recorded:
         break;
     }
 
@@ -1119,51 +1121,56 @@ RunTracer(VMFrame &f)
      * trampoline. This trampoline simulates the frame-popping portion of
      * emitReturn (except without the benefit of the FrameState) and will
      * produce the necessary register state to return to the caller.
      */
 
   restart:
     /* Step 1. Finish frames created after the entry frame. */
     if (!FinishExcessFrames(f, entryFrame))
-        THROWV(false);
+        THROWV(NULL);
 
     /* IMacros are guaranteed to have been removed by now. */
     JS_ASSERT(f.fp() == entryFrame);
     JS_ASSERT(!entryFrame->hasImacropc());
 
     /* Step 2. If entryFrame is done, use a special path to return to EnterMethodJIT(). */
     if (FrameIsFinished(cx)) {
         if (!HandleFinishedFrame(f, entryFrame))
-            THROWV(false);
-        return true;
+            THROWV(NULL);
+        *f.returnAddressLocation() = cx->jaegerCompartment()->forceReturnFromFastCall();
+        return NULL;
     }
 
-    /* Step 3. Do a partial interp, then restart the whole process. */
+    /* Step 3. If entryFrame is at a safe point, just leave. */
+    if (void *ncode = AtSafePoint(cx))
+        return ncode;
+
+    /* Step 4. Do a partial interp, then restart the whole process. */
     if (!PartialInterpret(f)) {
         if (!HandleErrorInExcessFrame(f, entryFrame))
-            THROWV(false);
+            THROWV(NULL);
     }
 
     goto restart;
 }
 
 #endif /* JS_TRACER */
 
 #if defined JS_TRACER
 # if defined JS_MONOIC
-JSBool JS_FASTCALL
+void * JS_FASTCALL
 stubs::InvokeTracer(VMFrame &f, ic::TraceICInfo *ic)
 {
     return RunTracer(f, *ic);
 }
 
 # else
 
-JSBool JS_FASTCALL
+void * JS_FASTCALL
 stubs::InvokeTracer(VMFrame &f)
 {
     return RunTracer(f);
 }
 # endif /* JS_MONOIC */
 #endif /* JS_TRACER */
 
 /* :XXX: common out with identical copy in Compiler.cpp */
@@ -1554,33 +1561,16 @@ js_InternalInterpret(void *returnData, v
         }
         if (takeBranch)
             f.regs.pc = nextpc + GET_JUMP_OFFSET(nextpc);
         else
             f.regs.pc = nextpc + analyze::GetBytecodeLength(nextpc);
         break;
       }
 
-      case REJOIN_RUN_TRACER:
-        if (returnReg) {
-            /* InvokeTracer finishes the frame it is given, including the epilogue. */
-            if (fp->isFunctionFrame())
-                fp->markFunctionEpilogueDone();
-            if (fp != f.entryfp) {
-                InlineReturn(f);
-                cx->compartment->jaegerCompartment()->setLastUnfinished(Jaeger_Unfinished);
-                *f.oldregs = f.regs;
-                return NULL;
-            } else {
-                cx->compartment->jaegerCompartment()->setLastUnfinished(Jaeger_Returned);
-                return NULL;
-            }
-        }
-        break;
-
       default:
         JS_NOT_REACHED("Missing rejoin");
     }
 
     if (nextDepth == uint32(-1))
         nextDepth = analysis->getCode(f.regs.pc).stackDepth;
     f.regs.sp = fp->base() + nextDepth;
 
--- a/js/src/methodjit/MethodJIT.cpp
+++ b/js/src/methodjit/MethodJIT.cpp
@@ -889,17 +889,17 @@ mjit::EnterMethodJIT(JSContext *cx, Stac
     JaegerSpew(JSpew_Prof, "script run took %d ms\n", prof.time_ms());
 #endif
 
     /* Undo repointRegs in SetVMFrameRegs. */
     cx->stack.repointRegs(&oldRegs);
 
     JaegerStatus status = cx->compartment->jaegerCompartment()->lastUnfinished();
     if (status) {
-        if (partial || status == Jaeger_Returned) {
+        if (partial) {
             /*
              * Being called from the interpreter, which will resume execution
              * where the JIT left off.
              */
             return status;
         }
 
         /*
@@ -985,20 +985,26 @@ JITScript::inlineFrames() const
 }
 
 js::mjit::CallSite *
 JITScript::callSites() const
 {
     return (js::mjit::CallSite *)&inlineFrames()[nInlineFrames];
 }
 
+JSObject **
+JITScript::rootedObjects() const
+{
+    return (JSObject **)&callSites()[nCallSites];
+}
+
 char *
 JITScript::commonSectionLimit() const
 {
-    return (char *)&callSites()[nCallSites];
+    return (char *)&rootedObjects()[nRootedObjects];
 }
 
 #ifdef JS_MONOIC
 ic::GetGlobalNameIC *
 JITScript::getGlobalNames() const
 {
     return (ic::GetGlobalNameIC *) commonSectionLimit();
 }
@@ -1144,16 +1150,17 @@ size_t
 mjit::JITScript::scriptDataSize(JSUsableSizeFun usf)
 {
     size_t usable = usf ? usf(this) : 0;
     return usable ? usable :
         sizeof(JITScript) +
         sizeof(NativeMapEntry) * nNmapPairs +
         sizeof(InlineFrame) * nInlineFrames +
         sizeof(CallSite) * nCallSites +
+        sizeof(JSObject *) * nRootedObjects +
 #if defined JS_MONOIC
         sizeof(ic::GetGlobalNameIC) * nGetGlobalNames +
         sizeof(ic::SetGlobalNameIC) * nSetGlobalNames +
         sizeof(ic::CallICInfo) * nCallICs +
         sizeof(ic::EqualityICInfo) * nEqualityICs +
         sizeof(ic::TraceICInfo) * nTraceICs +
 #endif
 #if defined JS_POLYIC
@@ -1283,9 +1290,46 @@ JITScript::nativeToPC(void *returnAddres
 }
 
 jsbytecode *
 mjit::NativeToPC(JITScript *jit, void *ncode, mjit::CallSite **pinline)
 {
     return jit->nativeToPC(ncode, pinline);
 }
 
+void
+JITScript::trace(JSTracer *trc)
+{
+    /*
+     * MICs and PICs attached to the JITScript are weak references, and either
+     * entirely purged or selectively purged on each GC. We do, however, need
+     * to maintain references to any scripts whose code was inlined into this.
+     */
+    InlineFrame *inlineFrames_ = inlineFrames();
+    for (unsigned i = 0; i < nInlineFrames; i++)
+        MarkObject(trc, *inlineFrames_[i].fun, "jitscript_fun");
+
+    for (uint32 i = 0; i < nRootedObjects; ++i)
+        MarkObject(trc, *rootedObjects()[i], "mjit rooted object");
+}
+
+void
+mjit::PurgeICs(JSContext *cx, JSScript *script)
+{
+#ifdef JS_MONOIC
+    if (script->jitNormal) {
+        script->jitNormal->purgeMICs();
+        script->jitNormal->sweepCallICs(cx);
+    }
+    if (script->jitCtor) {
+        script->jitCtor->purgeMICs();
+        script->jitCtor->sweepCallICs(cx);
+    }
+#endif
+#ifdef JS_POLYIC
+    if (script->jitNormal)
+        script->jitNormal->purgePICs();
+    if (script->jitCtor)
+        script->jitCtor->purgePICs();
+#endif
+}
+
 /* static */ const double mjit::Assembler::oneDouble = 1.0;
--- a/js/src/methodjit/MethodJIT.h
+++ b/js/src/methodjit/MethodJIT.h
@@ -312,20 +312,17 @@ enum RejoinState {
     REJOIN_GETTER,
     REJOIN_POS,
     REJOIN_BINARY,
 
     /*
      * For an opcode fused with IFEQ/IFNE, call returns a boolean indicating
      * the result of the comparison and whether to take or not take the branch.
      */
-    REJOIN_BRANCH,
-
-    /* Calls to RunTracer which either finished the frame or did nothing. */
-    REJOIN_RUN_TRACER
+    REJOIN_BRANCH
 };
 
 /* Helper to watch for recompilation and frame expansion activity on a compartment. */
 struct RecompilationMonitor
 {
     JSContext *cx;
 
     /*
@@ -458,19 +455,16 @@ class JaegerCompartment {
 
     /*
      * References held on pools created for native ICs, where the IC was
      * destroyed and we are waiting for the pool to finish use and jump
      * into the interpoline.
      */
     Vector<StackFrame *, 8, SystemAllocPolicy> orphanedNativeFrames;
     Vector<JSC::ExecutablePool *, 8, SystemAllocPolicy> orphanedNativePools;
-
-    /* Whether frames pushed after bailing out in RunTracer are unwinding. */
-    bool finishingTracer;
 };
 
 /*
  * Allocation policy for compiler jstl objects. The goal is to free the
  * compiler from having to check and propagate OOM after every time we
  * append to a vector. We do this by reporting OOM to the engine and
  * setting a flag on the compiler when OOM occurs. The compiler is required
  * to check for OOM only before trying to use the contents of the list.
@@ -607,16 +601,17 @@ struct JITScript {
      * Therefore, do not change the section ordering in finishThisUp() without
      * changing nMICs() et al as well.
      */
     uint32          nNmapPairs:31;      /* The NativeMapEntrys are sorted by .bcOff.
                                            .ncode values may not be NULL. */
     bool            singleStepMode:1;   /* compiled in "single step mode" */
     uint32          nInlineFrames;
     uint32          nCallSites;
+    uint32          nRootedObjects;
 #ifdef JS_MONOIC
     uint32          nGetGlobalNames;
     uint32          nSetGlobalNames;
     uint32          nCallICs;
     uint32          nEqualityICs;
     uint32          nTraceICs;
 #endif
 #ifdef JS_POLYIC
@@ -644,16 +639,17 @@ struct JITScript {
 #endif
 
     // Additional ExecutablePools for native call and getter stubs.
     Vector<NativeCallStub, 0, SystemAllocPolicy> nativeCallStubs;
 
     NativeMapEntry *nmap() const;
     js::mjit::InlineFrame *inlineFrames() const;
     js::mjit::CallSite *callSites() const;
+    JSObject **rootedObjects() const;
 #ifdef JS_MONOIC
     ic::GetGlobalNameIC *getGlobalNames() const;
     ic::SetGlobalNameIC *setGlobalNames() const;
     ic::CallICInfo *callICs() const;
     ic::EqualityICInfo *equalityICs() const;
     ic::TraceICInfo *traceICs() const;
 #endif
 #ifdef JS_POLYIC
@@ -665,31 +661,38 @@ struct JITScript {
     ~JITScript();
 
     bool isValidCode(void *ptr) {
         char *jitcode = (char *)code.m_code.executableAddress();
         char *jcheck = (char *)ptr;
         return jcheck >= jitcode && jcheck < jitcode + code.m_size;
     }
 
-    void nukeScriptDependentICs();
     void purgeGetterPICs();
 
+    void sweepCallICs(JSContext *cx);
+    void purgeMICs();
+    void purgePICs();
+
+    void trace(JSTracer *trc);
+
     /* |usf| can be NULL here, in which case the fallback size computation will be used. */
     size_t scriptDataSize(JSUsableSizeFun usf);
 
     jsbytecode *nativeToPC(void *returnAddress, CallSite **pinline) const;
 
   private:
     /* Helpers used to navigate the variable-length sections. */
     char *commonSectionLimit() const;
     char *monoICSectionsLimit() const;
     char *polyICSectionsLimit() const;
 };
 
+void PurgeICs(JSContext *cx, JSScript *script);
+
 /*
  * Execute the given mjit code. This is a low-level call and callers must
  * provide the same guarantees as JaegerShot/CheckStackAndEnterMethodJIT.
  */
 JaegerStatus EnterMethodJIT(JSContext *cx, StackFrame *fp, void *code, Value *stackLimit,
                             bool partial);
 
 /* Execute a method that has been JIT compiled. */
--- a/js/src/methodjit/MonoIC.cpp
+++ b/js/src/methodjit/MonoIC.cpp
@@ -1384,10 +1384,83 @@ JITScript::resetArgsCheck()
 {
     argsCheckPool->release();
     argsCheckPool = NULL;
 
     Repatcher repatch(this);
     repatch.relink(argsCheckJump, argsCheckStub);
 }
 
+void
+JITScript::purgeMICs()
+{
+    if (!nGetGlobalNames || !nSetGlobalNames)
+        return;
+
+    Repatcher repatch(this);
+
+    ic::GetGlobalNameIC *getGlobalNames_ = getGlobalNames();
+    for (uint32 i = 0; i < nGetGlobalNames; i++) {
+        ic::GetGlobalNameIC &ic = getGlobalNames_[i];
+        JSC::CodeLocationDataLabel32 label = ic.fastPathStart.dataLabel32AtOffset(ic.shapeOffset);
+        repatch.repatch(label, int(INVALID_SHAPE));
+    }
+
+    ic::SetGlobalNameIC *setGlobalNames_ = setGlobalNames();
+    for (uint32 i = 0; i < nSetGlobalNames; i++) {
+        ic::SetGlobalNameIC &ic = setGlobalNames_[i];
+        ic.patchInlineShapeGuard(repatch, int32(INVALID_SHAPE));
+
+        if (ic.hasExtraStub) {
+            Repatcher repatcher(ic.extraStub);
+            ic.patchExtraShapeGuard(repatcher, int32(INVALID_SHAPE));
+        }
+    }
+}
+
+void
+JITScript::sweepCallICs(JSContext *cx)
+{
+    Repatcher repatcher(this);
+
+    ic::CallICInfo *callICs_ = callICs();
+    for (uint32 i = 0; i < nCallICs; i++) {
+        ic::CallICInfo &ic = callICs_[i];
+
+        /*
+         * If the object is unreachable, we're guaranteed not to be currently
+         * executing a stub generated by a guard on that object. This lets us
+         * precisely GC call ICs while keeping the identity guard safe.
+         */
+        bool fastFunDead = ic.fastGuardedObject &&
+            IsAboutToBeFinalized(cx, ic.fastGuardedObject);
+        bool nativeDead = ic.fastGuardedNative &&
+            IsAboutToBeFinalized(cx, ic.fastGuardedNative);
+
+        /*
+         * There are two conditions where we need to relink:
+         * (1) The native is dead, since it always has a stub.
+         * (2) The fastFun is dead *and* there is a closure stub.
+         *
+         * Note although both objects can be non-NULL, there can only be one
+         * of [closure, native] stub per call IC.
+         */
+        if (nativeDead || (fastFunDead && ic.hasJsFunCheck)) {
+            repatcher.relink(ic.funJump, ic.slowPathStart);
+            ic.hit = false;
+        }
+
+        if (fastFunDead) {
+            repatcher.repatch(ic.funGuard, NULL);
+            ic.purgeGuardedObject();
+        }
+
+        if (nativeDead)
+            ic.fastGuardedNative = NULL;
+    }
+
+    /* The arguments type check IC can refer to type objects which might be swept. */
+    if (argsCheckPool)
+        resetArgsCheck();
+}
+
 #endif /* JS_MONOIC */
 
--- a/js/src/methodjit/MonoIC.h
+++ b/js/src/methodjit/MonoIC.h
@@ -279,16 +279,24 @@ struct CallICInfo {
 
     inline void releasePool(PoolIndex index) {
         if (pools[index]) {
             pools[index]->release();
             pools[index] = NULL;
         }
     }
 
+    inline void purgeGuardedObject() {
+        JS_ASSERT(fastGuardedObject);
+        releasePool(CallICInfo::Pool_ClosureStub);
+        hasJsFunCheck = false;
+        fastGuardedObject = NULL;
+        JS_REMOVE_LINK(&links);
+    }
+
     void purge();
 };
 
 void * JS_FASTCALL New(VMFrame &f, ic::CallICInfo *ic);
 void * JS_FASTCALL Call(VMFrame &f, ic::CallICInfo *ic);
 void * JS_FASTCALL NativeNew(VMFrame &f, ic::CallICInfo *ic);
 void * JS_FASTCALL NativeCall(VMFrame &f, ic::CallICInfo *ic);
 JSBool JS_FASTCALL SplatApplyArgs(VMFrame &f);
--- a/js/src/methodjit/PolyIC.cpp
+++ b/js/src/methodjit/PolyIC.cpp
@@ -3244,10 +3244,53 @@ JITScript::purgeGetterPICs()
             pic.reset();
             break;
           default:
             break;
         }
     }
 }
 
+void
+JITScript::purgePICs()
+{
+    if (!nPICs && !nGetElems && !nSetElems)
+        return;
+
+    Repatcher repatcher(this);
+
+    ic::PICInfo *pics_ = pics();
+    for (uint32 i = 0; i < nPICs; i++) {
+        ic::PICInfo &pic = pics_[i];
+        switch (pic.kind) {
+          case ic::PICInfo::SET:
+          case ic::PICInfo::SETMETHOD:
+            SetPropCompiler::reset(repatcher, pic);
+            break;
+          case ic::PICInfo::NAME:
+          case ic::PICInfo::XNAME:
+          case ic::PICInfo::CALLNAME:
+            ScopeNameCompiler::reset(repatcher, pic);
+            break;
+          case ic::PICInfo::BIND:
+            BindNameCompiler::reset(repatcher, pic);
+            break;
+          case ic::PICInfo::CALL: /* fall-through */
+          case ic::PICInfo::GET:
+            GetPropCompiler::reset(repatcher, pic);
+            break;
+          default:
+            JS_NOT_REACHED("Unhandled PIC kind");
+            break;
+        }
+        pic.reset();
+    }
+
+    ic::GetElementIC *getElems_ = getElems();
+    ic::SetElementIC *setElems_ = setElems();
+    for (uint32 i = 0; i < nGetElems; i++)
+        getElems_[i].purge(repatcher);
+    for (uint32 i = 0; i < nSetElems; i++)
+        setElems_[i].purge(repatcher);
+}
+
 #endif /* JS_POLYIC */
 
--- a/js/src/methodjit/StubCalls.h
+++ b/js/src/methodjit/StubCalls.h
@@ -109,19 +109,19 @@ struct UncachedCallResult {
  * pointer that can be used to call the function, or throw.
  */
 void UncachedCallHelper(VMFrame &f, uint32 argc, bool lowered, UncachedCallResult *ucr);
 void UncachedNewHelper(VMFrame &f, uint32 argc, UncachedCallResult *ucr);
 
 void JS_FASTCALL CreateThis(VMFrame &f, JSObject *proto);
 void JS_FASTCALL Throw(VMFrame &f);
 #if JS_MONOIC
-JSBool JS_FASTCALL InvokeTracer(VMFrame &f, ic::TraceICInfo *tic);
+void * JS_FASTCALL InvokeTracer(VMFrame &f, ic::TraceICInfo *tic);
 #else
-JSBool JS_FASTCALL InvokeTracer(VMFrame &f);
+void * JS_FASTCALL InvokeTracer(VMFrame &f);
 #endif
 
 void * JS_FASTCALL LookupSwitch(VMFrame &f, jsbytecode *pc);
 void * JS_FASTCALL TableSwitch(VMFrame &f, jsbytecode *origPc);
 
 void JS_FASTCALL BindName(VMFrame &f);
 void JS_FASTCALL BindNameNoCache(VMFrame &f, JSAtom *atom);
 JSObject * JS_FASTCALL BindGlobalName(VMFrame &f);