Periodically discard JIT code during GC, bug 617656. r=dmandelin
authorBrian Hackett <bhackett1024@gmail.com>
Fri, 17 Dec 2010 16:33:04 -0800
changeset 59895 bd9cfa70bf187d7a04fb2de5b451db867fe7dd12
parent 59894 123fa989ef7066ec0c401e5ec404805d6a2d3d30
child 59896 c475ea5e257614a36fe98bd7a698132c5a87c18c
push idunknown
push userunknown
push dateunknown
reviewersdmandelin
bugs617656
milestone2.0b8pre
Periodically discard JIT code during GC, bug 617656. r=dmandelin
js/src/assembler/jit/ExecutableAllocator.h
js/src/jscntxt.h
js/src/jscompartment.cpp
js/src/jscompartment.h
js/src/jsgc.cpp
js/src/methodjit/Compiler.cpp
js/src/methodjit/Compiler.h
js/src/methodjit/MethodJIT.h
js/src/methodjit/MonoIC.cpp
js/src/methodjit/MonoIC.h
--- a/js/src/assembler/jit/ExecutableAllocator.h
+++ b/js/src/assembler/jit/ExecutableAllocator.h
@@ -165,16 +165,23 @@ public:
     {
         Allocation* end = m_pools.end();
         for (Allocation* ptr = m_pools.begin(); ptr != end; ++ptr)
             ExecutablePool::systemRelease(*ptr);
     }
 
     size_t available() const { return (m_pools.length() > 1) ? 0 : m_end - m_freePtr; }
 
+    // Flag for downstream use, whether to try to release references to this pool.
+    bool m_destroy;
+
+    // GC number in which the m_destroy flag was most recently set. Used downstream to
+    // remember whether m_destroy was computed for the currently active GC.
+    size_t m_gcNumber;
+
 private:
     // On OOM, this will return an Allocation where pages is NULL.
     static Allocation systemAlloc(size_t n);
     static void systemRelease(const Allocation& alloc);
 
     ExecutablePool(size_t n);
 
     void* poolAllocate(size_t n);
@@ -388,17 +395,17 @@ private:
     static const size_t maxSmallPools = 4;
     typedef js::Vector<ExecutablePool *, maxSmallPools, js::SystemAllocPolicy > SmallExecPoolVector;
     SmallExecPoolVector m_smallAllocationPools;
     static void intializePageSize();
 };
 
 // This constructor can fail due to OOM. If it does, m_freePtr will be
 // set to NULL. 
-inline ExecutablePool::ExecutablePool(size_t n) : m_refCount(1)
+inline ExecutablePool::ExecutablePool(size_t n) : m_refCount(1), m_destroy(false), m_gcNumber(0)
 {
     size_t allocSize = roundUpAllocationSize(n, JIT_ALLOCATOR_PAGE_SIZE);
     if (allocSize == OVERSIZE_ALLOCATION) {
         m_freePtr = NULL;
         return;
     }
 #ifdef DEBUG_STRESS_JSC_ALLOCATOR
     Allocation mem = systemAlloc(size_t(4294967291));
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -1317,16 +1317,17 @@ struct JSRuntime {
     size_t              gcTriggerBytes;
     size_t              gcLastBytes;
     size_t              gcMaxBytes;
     size_t              gcMaxMallocBytes;
     uint32              gcEmptyArenaPoolLifespan;
     uint32              gcNumber;
     js::GCMarker        *gcMarkingTracer;
     uint32              gcTriggerFactor;
+    int64               gcJitReleaseTime;
     volatile JSBool     gcIsNeeded;
 
     /*
      * We can pack these flags as only the GC thread writes to them. Atomic
      * updates to packed bytes are not guaranteed, so stores issued by one
      * thread may be lost due to unsynchronized read-modify-write cycles on
      * other threads.
      */
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -49,17 +49,17 @@
 #include "methodjit/MonoIC.h"
 
 #include "jsgcinlines.h"
 
 using namespace js;
 using namespace js::gc;
 
 JSCompartment::JSCompartment(JSRuntime *rt)
-  : rt(rt), principals(NULL), data(NULL), marked(false), debugMode(rt->debugMode),
+  : rt(rt), principals(NULL), data(NULL), marked(false), active(false), debugMode(rt->debugMode),
     anynameObject(NULL), functionNamespaceObject(NULL)
 {
     JS_INIT_CLIST(&scripts);
 }
 
 JSCompartment::~JSCompartment()
 {
 #ifdef JS_METHODJIT
@@ -326,38 +326,89 @@ JSCompartment::wrapException(JSContext *
             cx->throwing = true;
             cx->exception = tvr.value();
         }
         return false;
     }
     return true;
 }
 
+/*
+ * Check if the pool containing the code for jit should be destroyed, per the
+ * heuristics in JSCompartment::sweep.
+ */
+static inline bool
+ScriptPoolDestroyed(JSContext *cx, mjit::JITScript *jit,
+                    uint32 releaseInterval, uint32 &counter)
+{
+    JSC::ExecutablePool *pool = jit->code.m_executablePool;
+    if (pool->m_gcNumber != cx->runtime->gcNumber) {
+        /*
+         * The m_destroy flag may have been set in a previous GC for a pool which had
+         * references we did not remove (e.g. from the compartment's ExecutableAllocator)
+         * and is still around. Forget we tried to destroy it in such cases.
+         */
+        pool->m_destroy = false;
+        pool->m_gcNumber = cx->runtime->gcNumber;
+        if (--counter == 0) {
+            pool->m_destroy = true;
+            counter = releaseInterval;
+        }
+    }
+    return pool->m_destroy;
+}
+
 void
-JSCompartment::sweep(JSContext *cx)
+JSCompartment::sweep(JSContext *cx, uint32 releaseInterval)
 {
     chunk = NULL;
     /* Remove dead wrappers from the table. */
     for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
         JS_ASSERT_IF(IsAboutToBeFinalized(e.front().key.toGCThing()) &&
                      !IsAboutToBeFinalized(e.front().value.toGCThing()),
                      e.front().key.isString());
         if (IsAboutToBeFinalized(e.front().key.toGCThing()) ||
             IsAboutToBeFinalized(e.front().value.toGCThing())) {
             e.removeFront();
         }
     }
 
 #if defined JS_METHODJIT && defined JS_MONOIC
+
+    /*
+     * The release interval is the frequency with which we should try to destroy
+     * executable pools by releasing all JIT code in them, zero to never destroy pools.
+     * Initialize counter so that the first pool will be destroyed, and eventually drive
+     * the amount of JIT code in never-used compartments to zero. Don't discard anything
+     * for compartments which currently have active stack frames.
+     */
+    uint32 counter = 1;
+    bool discardScripts = !active && releaseInterval != 0;
+
     for (JSCList *cursor = scripts.next; cursor != &scripts; cursor = cursor->next) {
         JSScript *script = reinterpret_cast<JSScript *>(cursor);
-        if (script->hasJITCode())
-            mjit::ic::SweepCallICs(script);
+        if (script->hasJITCode()) {
+            mjit::ic::SweepCallICs(script, discardScripts);
+            if (discardScripts) {
+                if (script->jitNormal &&
+                    ScriptPoolDestroyed(cx, script->jitNormal, releaseInterval, counter)) {
+                    mjit::ReleaseScriptCode(cx, script);
+                    continue;
+                }
+                if (script->jitCtor &&
+                    ScriptPoolDestroyed(cx, script->jitCtor, releaseInterval, counter)) {
+                    mjit::ReleaseScriptCode(cx, script);
+                }
+            }
+        }
     }
-#endif
+
+#endif /* JS_METHODJIT && JS_MONOIC */
+
+    active = false;
 }
 
 void
 JSCompartment::purge(JSContext *cx)
 {
     freeLists.purge();
 
 #ifdef JS_METHODJIT
@@ -365,18 +416,18 @@ JSCompartment::purge(JSContext *cx)
          &script->links != &scripts;
          script = (JSScript *)script->links.next) {
         if (script->hasJITCode()) {
 # if defined JS_POLYIC
             mjit::ic::PurgePICs(cx, script);
 # endif
 # if defined JS_MONOIC
             /*
-             * MICs do not refer to data which can be GC'ed, but are sensitive
-             * to shape regeneration.
+             * MICs do not refer to data which can be GC'ed and do not generate stubs
+             * which might need to be discarded, but are sensitive to shape regeneration.
              */
             if (cx->runtime->gcRegenShapes)
                 mjit::ic::PurgeMICs(cx, script);
 # endif
         }
     }
 #endif
 }
--- a/js/src/jscompartment.h
+++ b/js/src/jscompartment.h
@@ -69,16 +69,17 @@ struct JS_FRIEND_API(JSCompartment) {
     js::gc::FreeLists            freeLists;
 
 #ifdef JS_GCMETER
     js::gc::JSGCArenaStats       compartmentStats[js::gc::FINALIZE_LIMIT];
 #endif
 
     void                         *data;
     bool                         marked;
+    bool                         active;  // GC flag, whether there are active frames
     js::WrapperMap               crossCompartmentWrappers;
 
 #ifdef JS_METHODJIT
     js::mjit::JaegerCompartment  *jaegerCompartment;
 #endif
 
     bool                         debugMode;  // true iff debug mode on
     JSCList                      scripts;    // scripts in this compartment
@@ -102,17 +103,17 @@ struct JS_FRIEND_API(JSCompartment) {
     bool wrap(JSContext *cx, JSString **strp);
     bool wrap(JSContext *cx, JSObject **objp);
     bool wrapId(JSContext *cx, jsid *idp);
     bool wrap(JSContext *cx, js::PropertyOp *op);
     bool wrap(JSContext *cx, js::PropertyDescriptor *desc);
     bool wrap(JSContext *cx, js::AutoIdVector &props);
     bool wrapException(JSContext *cx);
 
-    void sweep(JSContext *cx);
+    void sweep(JSContext *cx, uint32 releaseInterval);
     void purge(JSContext *cx);
     void finishArenaLists();
     bool arenaListsAreEmpty();
 };
 
 #ifdef _MSC_VER
 #pragma warning(pop)
 #endif
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -487,16 +487,23 @@ IsAboutToBeFinalized(void *thing)
 JS_FRIEND_API(bool)
 js_GCThingIsMarked(void *thing, uint32 color = BLACK)
 {
     JS_ASSERT(thing);
     AssertValidColor(thing, color);
     return reinterpret_cast<Cell *>(thing)->isMarked(color);
 }
 
+/*
+ * 1/8 life for JIT code. After this number of microseconds have passed, 1/8 of all
+ * JIT code is discarded in inactive compartments, regardless of how often that
+ * code runs.
+ */
+static const int64 JIT_SCRIPT_EIGHTH_LIFETIME = 120 * 1000 * 1000;
+
 JSBool
 js_InitGC(JSRuntime *rt, uint32 maxbytes)
 {
     /*
      * Make room for at least 16 chunks so the table would not grow before
      * the browser starts up.
      */
     if (!rt->gcChunkSet.init(16))
@@ -534,16 +541,18 @@ js_InitGC(JSRuntime *rt, uint32 maxbytes
     rt->gcTriggerFactor = uint32(100.0f * GC_HEAP_GROWTH_FACTOR);
 
     /*
      * The assigned value prevents GC from running when GC memory is too low
      * (during JS engine start).
      */
     rt->setGCLastBytes(8192);
 
+    rt->gcJitReleaseTime = PRMJ_Now() + JIT_SCRIPT_EIGHTH_LIFETIME;
+
     METER(PodZero(&rt->gcStats));
     return true;
 }
 
 namespace js {
 
 template <typename T>
 static inline ConservativeGCTest
@@ -1430,18 +1439,20 @@ js_TraceStackFrame(JSTracer *trc, JSStac
     MarkObject(trc, fp->scopeChain(), "scope chain");
     if (fp->isDummyFrame())
         return;
 
     if (fp->hasCallObj())
         MarkObject(trc, fp->callObj(), "call");
     if (fp->hasArgsObj())
         MarkObject(trc, fp->argsObj(), "arguments");
-    if (fp->isScriptFrame())
+    if (fp->isScriptFrame()) {
         js_TraceScript(trc, fp->script());
+        fp->script()->compartment->active = true;
+    }
 
     MarkValue(trc, fp->returnValue(), "rval");
 }
 
 void
 AutoIdArray::trace(JSTracer *trc)
 {
     JS_ASSERT(tag == IDARRAY);
@@ -2024,35 +2035,51 @@ SweepCompartments(JSContext *cx, JSGCInv
     JSCompartmentCallback callback = rt->compartmentCallback;
     JSCompartment **read = rt->compartments.begin();
     JSCompartment **end = rt->compartments.end();
     JSCompartment **write = read;
 
     /* Delete defaultCompartment only during runtime shutdown */
     rt->defaultCompartment->marked = true;
 
+    /*
+     * Figure out how much JIT code should be released from inactive compartments.
+     * If multiple eighth-lifes have passed, compound the release interval linearly;
+     * if enough time has passed, all inactive JIT code will be released.
+     */
+    uint32 releaseInterval = 0;
+    int64 now = PRMJ_Now();
+    if (now >= rt->gcJitReleaseTime) {
+        releaseInterval = 8;
+        while (now >= rt->gcJitReleaseTime) {
+            if (--releaseInterval == 1)
+                rt->gcJitReleaseTime = now;
+            rt->gcJitReleaseTime += JIT_SCRIPT_EIGHTH_LIFETIME;
+        }
+    }
+
     while (read < end) {
         JSCompartment *compartment = (*read++);
         if (compartment->marked) {
             compartment->marked = false;
             *write++ = compartment;
             /* Remove dead wrappers from the compartment map. */
-            compartment->sweep(cx);
+            compartment->sweep(cx, releaseInterval);
         } else {
             JS_ASSERT(compartment->freeLists.isEmpty());
             if (compartment->arenaListsAreEmpty() || gckind == GC_LAST_CONTEXT) {
                 if (callback)
                     (void) callback(cx, compartment, JSCOMPARTMENT_DESTROY);
                 if (compartment->principals)
                     JSPRINCIPALS_DROP(cx, compartment->principals);
                 delete compartment;
             } else {
                 compartment->marked = false;
                 *write++ = compartment;
-                compartment->sweep(cx);
+                compartment->sweep(cx, releaseInterval);
             }
         }
     }
     rt->compartments.resize(write - rt->compartments.begin());
 }
 
 /*
  * Common cache invalidation and so forth that must be done before GC. Even if
--- a/js/src/methodjit/Compiler.cpp
+++ b/js/src/methodjit/Compiler.cpp
@@ -535,16 +535,22 @@ mjit::Compiler::finishThisUp(JITScript *
             JS_ASSERT(cics[i].oolCallOffset == offset);
 
             /* Compute the OOL jump offset. */
             offset = stubCode.locationOf(callICs[i].oolJump) -
                      stubCode.locationOf(callICs[i].slowPathStart);
             cics[i].oolJumpOffset = offset;
             JS_ASSERT(cics[i].oolJumpOffset == offset);
 
+            /* Compute the start of the OOL IC call. */
+            offset = stubCode.locationOf(callICs[i].icCall) -
+                     stubCode.locationOf(callICs[i].slowPathStart);
+            cics[i].icCallOffset = offset;
+            JS_ASSERT(cics[i].icCallOffset == offset);
+
             /* Compute the slow join point offset. */
             offset = stubCode.locationOf(callICs[i].slowJoinPoint) -
                      stubCode.locationOf(callICs[i].slowPathStart);
             cics[i].slowJoinOffset = offset;
             JS_ASSERT(cics[i].slowJoinOffset == offset);
 
             /* Compute the join point offset for continuing on the hot path. */
             offset = stubCode.locationOf(callICs[i].hotPathLabel) -
@@ -2697,16 +2703,17 @@ mjit::Compiler::inlineCallHelper(uint32 
 
         /*
          * No-op jump that gets patched by ic::New/Call to the stub generated
          * by generateFullCallStub.
          */
         Jump toPatch = stubcc.masm.jump();
         toPatch.linkTo(stubcc.masm.label(), &stubcc.masm);
         callIC.oolJump = toPatch;
+        callIC.icCall = stubcc.masm.label();
 
         /*
          * At this point the function is definitely scripted, so we try to
          * compile it and patch either funGuard/funJump or oolJump. This code
          * is only executed once.
          */
         callIC.addrLabel1 = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
         void *icFunPtr = JS_FUNC_TO_DATA_PTR(void *, callingNew ? ic::New : ic::Call);
@@ -4778,28 +4785,27 @@ mjit::Compiler::jumpAndTrace(Jump j, jsb
     if (!jumpInScript(j, target))
         return false;
 
     if (slow) {
         if (!stubcc.jumpInScript(*slow, target))
             return false;
     }
 #else
-    if (!addTraceHints || target >= PC || JSOp(*target) != JSOP_TRACE
+    if (!addTraceHints || target >= PC ||
+        (JSOp(*target) != JSOP_TRACE && JSOp(*target) != JSOP_NOTRACE)
 #ifdef JS_MONOIC
         || GET_UINT16(target) == BAD_TRACEIC_INDEX
 #endif
         )
     {
         if (!jumpInScript(j, target))
             return false;
-        if (slow) {
-            if (!stubcc.jumpInScript(*slow, target))
-                stubcc.jumpInScript(*slow, target);
-        }
+        if (slow && !stubcc.jumpInScript(*slow, target))
+            return false;
         return true;
     }
 
 # if JS_MONOIC
     TraceGenInfo ic;
 
     ic.initialized = true;
     ic.stubEntry = stubcc.masm.label();
@@ -4811,19 +4817,31 @@ mjit::Compiler::jumpAndTrace(Jump j, jsb
     uint16 index = GET_UINT16(target);
     if (traceICs.length() <= index)
         if (!traceICs.resize(index+1))
             return false;
 # endif
 
     Label traceStart = stubcc.masm.label();
 
-    stubcc.linkExitDirect(j, traceStart);
-    if (slow)
-        slow->linkTo(traceStart, &stubcc.masm);
+    /*
+     * We make a trace IC even if the trace is currently disabled, in case it is
+     * enabled later, but set up the jumps so that InvokeTracer is initially skipped.
+     */
+    if (JSOp(*target) == JSOP_TRACE) {
+        stubcc.linkExitDirect(j, traceStart);
+        if (slow)
+            slow->linkTo(traceStart, &stubcc.masm);
+    } else {
+        if (!jumpInScript(j, target))
+            return false;
+        if (slow && !stubcc.jumpInScript(*slow, target))
+            return false;
+    }
+
 # if JS_MONOIC
     ic.addrLabel = stubcc.masm.moveWithPatch(ImmPtr(NULL), Registers::ArgReg1);
     traceICs[index] = ic;
 # endif
 
     /* Save and restore compiler-tracked PC, so cx->regs is right in InvokeTracer. */
     {
         jsbytecode* pc = PC;
--- a/js/src/methodjit/Compiler.h
+++ b/js/src/methodjit/Compiler.h
@@ -135,16 +135,17 @@ class Compiler : public BaseCompiler
         Call         oolCall;
         Label        joinPoint;
         Label        slowJoinPoint;
         Label        slowPathStart;
         Label        hotPathLabel;
         DataLabelPtr addrLabel1;
         DataLabelPtr addrLabel2;
         Jump         oolJump;
+        Label        icCall;
         RegisterID   funObjReg;
         RegisterID   funPtrReg;
         FrameSize    frameSize;
     };
 
   private:
 #endif
 
--- a/js/src/methodjit/MethodJIT.h
+++ b/js/src/methodjit/MethodJIT.h
@@ -337,17 +337,17 @@ struct JITScript {
 
     bool isValidCode(void *ptr) {
         char *jitcode = (char *)code.m_code.executableAddress();
         char *jcheck = (char *)ptr;
         return jcheck >= jitcode && jcheck < jitcode + code.m_size;
     }
 
     void nukeScriptDependentICs();
-    void sweepCallICs();
+    void sweepCallICs(bool purgeAll);
     void purgeMICs();
     void purgePICs();
 };
 
 /*
  * Execute the given mjit code. This is a low-level call and callers must
  * provide the same guarantees as JaegerShot/CheckStackAndEnterMethodJIT.
  */
--- a/js/src/methodjit/MonoIC.cpp
+++ b/js/src/methodjit/MonoIC.cpp
@@ -1113,58 +1113,89 @@ JITScript::nukeScriptDependentICs()
         repatcher.relink(ic.funJump, ic.slowPathStart);
         ic.releasePool(CallICInfo::Pool_ClosureStub);
         ic.fastGuardedObject = NULL;
         ic.hasJsFunCheck = false;
     }
 }
 
 void
-JITScript::sweepCallICs()
+JITScript::sweepCallICs(bool purgeAll)
 {
-    if (!nCallICs)
-        return;
+    Repatcher repatcher(this);
 
-    Repatcher repatcher(this);
+    /*
+     * If purgeAll is set, purge stubs in the script except those covered by PurgePICs
+     * (which is always called during GC). We want to remove references which can keep
+     * alive pools that we are trying to destroy (see JSCompartment::sweep).
+     */
 
     for (uint32 i = 0; i < nCallICs; i++) {
         ic::CallICInfo &ic = callICs[i];
 
         /*
          * If the object is unreachable, we're guaranteed not to be currently
          * executing a stub generated by a guard on that object. This lets us
          * precisely GC call ICs while keeping the identity guard safe.
          */
-        bool fastFunDead = ic.fastGuardedObject && IsAboutToBeFinalized(ic.fastGuardedObject);
-        bool nativeDead = ic.fastGuardedNative && IsAboutToBeFinalized(ic.fastGuardedNative);
-
-        if (!fastFunDead && !nativeDead)
-            continue;
+        bool fastFunDead = ic.fastGuardedObject &&
+            (purgeAll || IsAboutToBeFinalized(ic.fastGuardedObject));
+        bool nativeDead = ic.fastGuardedNative &&
+            (purgeAll || IsAboutToBeFinalized(ic.fastGuardedNative));
 
         if (fastFunDead) {
             repatcher.repatch(ic.funGuard, NULL);
             ic.releasePool(CallICInfo::Pool_ClosureStub);
             ic.hasJsFunCheck = false;
             ic.fastGuardedObject = NULL;
         }
 
         if (nativeDead) {
             ic.releasePool(CallICInfo::Pool_NativeStub);
             ic.fastGuardedNative = NULL;
         }
 
+        if (purgeAll) {
+            ic.releasePool(CallICInfo::Pool_ScriptStub);
+            JSC::CodeLocationJump oolJump = ic.slowPathStart.jumpAtOffset(ic.oolJumpOffset);
+            JSC::CodeLocationLabel icCall = ic.slowPathStart.labelAtOffset(ic.icCallOffset);
+            repatcher.relink(oolJump, icCall);
+        }
+
         repatcher.relink(ic.funJump, ic.slowPathStart);
+        ic.hit = false;
+    }
+
+    if (purgeAll) {
+        /* Purge ICs generating stubs into execPools. */
+        uint32 released = 0;
 
-        ic.hit = false;
+        for (uint32 i = 0; i < nEqualityICs; i++) {
+            ic::EqualityICInfo &ic = equalityICs[i];
+            if (!ic.generated)
+                continue;
+
+            JSC::FunctionPtr fptr(JS_FUNC_TO_DATA_PTR(void *, ic::Equality));
+            repatcher.relink(ic.stubCall, fptr);
+            repatcher.relink(ic.jumpToStub, ic.stubEntry);
+
+            ic.generated = false;
+            released++;
+        }
+
+        JS_ASSERT(released == execPools.length());
+        for (uint32 i = 0; i < released; i++)
+            execPools[i]->release();
+        execPools.clear();
     }
 }
 
 void
-ic::SweepCallICs(JSScript *script)
+ic::SweepCallICs(JSScript *script, bool purgeAll)
 {
     if (script->jitNormal)
-        script->jitNormal->sweepCallICs();
+        script->jitNormal->sweepCallICs(purgeAll);
     if (script->jitCtor)
-        script->jitCtor->sweepCallICs();
+        script->jitCtor->sweepCallICs(purgeAll);
 }
 
 #endif /* JS_MONOIC */
 
--- a/js/src/methodjit/MonoIC.h
+++ b/js/src/methodjit/MonoIC.h
@@ -217,16 +217,19 @@ struct CallICInfo {
     uint32 joinPointOffset : 16;
 
     /* Out of line slow call. */
     uint32 oolCallOffset   : 16;
 
     /* Jump to patch for out-of-line scripted calls. */
     uint32 oolJumpOffset   : 16;
 
+    /* Label for out-of-line call to IC function. */
+    uint32 icCallOffset    : 16;
+
     /* Offset for deep-fun check to rejoin at. */
     uint32 hotPathOffset   : 16;
 
     /* Join point for all slow call paths. */
     uint32 slowJoinOffset  : 16;
 
     RegisterID funObjReg : 5;
     RegisterID funPtrReg : 5;
@@ -257,16 +260,16 @@ struct CallICInfo {
 
 void * JS_FASTCALL New(VMFrame &f, ic::CallICInfo *ic);
 void * JS_FASTCALL Call(VMFrame &f, ic::CallICInfo *ic);
 void JS_FASTCALL NativeNew(VMFrame &f, ic::CallICInfo *ic);
 void JS_FASTCALL NativeCall(VMFrame &f, ic::CallICInfo *ic);
 JSBool JS_FASTCALL SplatApplyArgs(VMFrame &f);
 
 void PurgeMICs(JSContext *cx, JSScript *script);
-void SweepCallICs(JSScript *script);
+void SweepCallICs(JSScript *script, bool purgeAll);
 
 } /* namespace ic */
 } /* namespace mjit */
 } /* namespace js */
 
 #endif /* jsjaeger_mono_ic_h__ */