Try to release native call stubs on GC, bug 688292. r=dvander
authorBrian Hackett <bhackett1024@gmail.com>
Thu, 22 Sep 2011 07:26:14 -0700
changeset 77326 502f2bd2c229164699421ad823d27e8d05b2426f
parent 77325 4da52874a049b2f20cbf079ad9456869c6404baa
child 77327 d300ffff0be707803e580744824c78348f0fcc96
push id3
push userfelipc@gmail.com
push dateFri, 30 Sep 2011 20:09:13 +0000
reviewersdvander
bugs688292
milestone9.0a1
Try to release native call stubs on GC, bug 688292. r=dvander
js/src/jscompartment.cpp
js/src/methodjit/MethodJIT.cpp
js/src/methodjit/MethodJIT.h
js/src/methodjit/MonoIC.cpp
js/src/methodjit/PolyIC.cpp
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -536,27 +536,44 @@ JSCompartment::sweep(JSContext *cx, uint
 
     sweepBreakpoints(cx);
 
 #ifdef JS_TRACER
     if (hasTraceMonitor())
         traceMonitor()->sweep(cx);
 #endif
 
-# if defined JS_METHODJIT && defined JS_POLYIC
+#ifdef JS_METHODJIT
     /*
-     * Purge all PICs in the compartment. These can reference type data and
-     * need to know which types are pending collection.
+     * Purge PICs in the compartment, along with native call stubs for
+     * compartments which do not have such stubs on the stack. PICs can
+     * reference shapes and type data, and native call stubs are disassociated
+     * from the PIC or MIC they were generated for.
      */
+    bool canPurgeNativeCalls = true;
+    VMFrame *f = hasJaegerCompartment() ? jaegerCompartment()->activeFrame() : NULL;
+    for (; f; f = f->previous) {
+        if (f->stubRejoin)
+            canPurgeNativeCalls = false;
+    }
     for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
         JSScript *script = i.get<JSScript>();
-        if (script->hasJITCode())
+        if (script->hasJITCode()) {
+#ifdef JS_POLYIC
             mjit::ic::PurgePICs(cx, script);
+#endif
+            if (canPurgeNativeCalls) {
+                if (script->jitNormal)
+                    script->jitNormal->purgeNativeCallStubs();
+                if (script->jitCtor)
+                    script->jitCtor->purgeNativeCallStubs();
+            }
+        }
     }
-# endif
+#endif
 
     bool discardScripts = !active && (releaseInterval != 0 || hasDebugModeCodeToDrop);
 
 #if defined JS_METHODJIT && defined JS_MONOIC
 
     /*
      * The release interval is the frequency with which we should try to destroy
      * executable pools by releasing all JIT code in them, zero to never destroy pools.
--- a/js/src/methodjit/MethodJIT.cpp
+++ b/js/src/methodjit/MethodJIT.cpp
@@ -1080,16 +1080,27 @@ JITScript::polyICSectionsLimit() const
 #endif  // JS_POLYIC
 
 template <typename T>
 static inline void Destroy(T &t)
 {
     t.~T();
 }
 
+void
+mjit::JITScript::purgeNativeCallStubs()
+{
+    for (unsigned i = 0; i < nativeCallStubs.length(); i++) {
+        JSC::ExecutablePool *pool = nativeCallStubs[i].pool;
+        if (pool)
+            pool->release();
+    }
+    nativeCallStubs.clear();
+}
+
 mjit::JITScript::~JITScript()
 {
     code.release();
 
     if (pcLengths)
         Foreground::free_(pcLengths);
 
 #if defined JS_POLYIC
@@ -1110,21 +1121,17 @@ mjit::JITScript::~JITScript()
 
     for (JSC::ExecutablePool **pExecPool = execPools.begin();
          pExecPool != execPools.end();
          ++pExecPool)
     {
         (*pExecPool)->release();
     }
 
-    for (unsigned i = 0; i < nativeCallStubs.length(); i++) {
-        JSC::ExecutablePool *pool = nativeCallStubs[i].pool;
-        if (pool)
-            pool->release();
-    }
+    purgeNativeCallStubs();
 
     ic::CallICInfo *callICs_ = callICs();
     for (uint32 i = 0; i < nCallICs; i++) {
         callICs_[i].releasePools();
         if (callICs_[i].fastGuardedObject)
             callICs_[i].purgeGuardedObject();
     }
 
--- a/js/src/methodjit/MethodJIT.h
+++ b/js/src/methodjit/MethodJIT.h
@@ -664,16 +664,17 @@ struct JITScript {
         char *jcheck = (char *)ptr;
         return jcheck >= jitcode && jcheck < jitcode + code.m_size;
     }
 
     void nukeScriptDependentICs();
     void sweepCallICs(JSContext *cx, bool purgeAll);
     void purgeMICs();
     void purgePICs();
+    void purgeNativeCallStubs();
 
     void trace(JSTracer *trc);
 
     /* |usf| can be NULL here, in which case the fallback size computation will be used. */
     size_t scriptDataSize(JSUsableSizeFun usf);
 
     jsbytecode *nativeToPC(void *returnAddress, CallSite **pinline) const;
 
--- a/js/src/methodjit/MonoIC.cpp
+++ b/js/src/methodjit/MonoIC.cpp
@@ -589,35 +589,32 @@ mjit::NativeStubEpilogue(VMFrame &f, Ass
     masm.loadPtr(FrameAddress(VMFrame::offsetOfFp), JSFrameReg);
 
     Jump hasException = masm.branchTest32(Assembler::Zero, Registers::ReturnReg,
                                           Registers::ReturnReg);
 
     Address resultAddress(JSFrameReg, vpOffset);
 
     Vector<Jump> mismatches(f.cx);
-    if (f.cx->typeInferenceEnabled()) {
-        if (!typeReg.isSet()) {
-            /*
-             * Test the result of this native against the known result type set
-             * for the call. We don't assume knowledge about the types that
-             * natives can return, except when generating specialized paths in
-             * FastBuiltins.
-             */
-            types::TypeSet *types = f.script()->analysis()->bytecodeTypes(f.pc());
-            if (!masm.generateTypeCheck(f.cx, resultAddress, types, &mismatches))
-                THROWV(false);
-        }
+    if (f.cx->typeInferenceEnabled() && !typeReg.isSet()) {
+        /*
+         * Test the result of this native against the known result type set for
+         * the call. We don't assume knowledge about the types that natives can
+         * return, except when generating specialized paths in FastBuiltins.
+         */
+        types::TypeSet *types = f.script()->analysis()->bytecodeTypes(f.pc());
+        if (!masm.generateTypeCheck(f.cx, resultAddress, types, &mismatches))
+            THROWV(false);
+    }
 
-        /*
-         * Can no longer trigger recompilation in this stub, clear the stub
-         * rejoin on the VMFrame.
-         */
-        masm.storePtr(ImmPtr(NULL), FrameAddress(offsetof(VMFrame, stubRejoin)));
-    }
+    /*
+     * Can no longer trigger recompilation in this stub, clear the stub rejoin
+     * on the VMFrame.
+     */
+    masm.storePtr(ImmPtr(NULL), FrameAddress(offsetof(VMFrame, stubRejoin)));
 
     if (typeReg.isSet())
         masm.loadValueAsComponents(resultAddress, typeReg.reg(), dataReg.reg());
 
     /*
      * The final jump is a indirect on x64, so that we'll always be able
      * to repatch it to the interpoline later.
      */
@@ -637,18 +634,17 @@ mjit::NativeStubEpilogue(VMFrame &f, Ass
         masm.fallibleVMCall(true, JS_FUNC_TO_DATA_PTR(void *, stubs::TypeBarrierReturn),
                             f.regs.pc, NULL, initialFrameDepth);
         masm.storePtr(ImmPtr(NULL), FrameAddress(offsetof(VMFrame, stubRejoin)));
         masm.jump().linkTo(finished, &masm);
     }
 
     /* Move JaegerThrowpoline into register for very far jump on x64. */
     hasException.linkTo(masm.label(), &masm);
-    if (f.cx->typeInferenceEnabled())
-        masm.storePtr(ImmPtr(NULL), FrameAddress(offsetof(VMFrame, stubRejoin)));
+    masm.storePtr(ImmPtr(NULL), FrameAddress(offsetof(VMFrame, stubRejoin)));
     masm.throwInJIT();
 
     *result = done;
     return true;
 }
 
 /*
  * Calls have an inline path and an out-of-line path. The inline path is used
@@ -747,25 +743,23 @@ class CallCompiler : public BaseCompiler
          * nmap anyway.
          */
         size_t offset = callingNew
                         ? offsetof(JSScript, jitArityCheckCtor)
                         : offsetof(JSScript, jitArityCheckNormal);
         masm.loadPtr(Address(t0, offset), t0);
         Jump hasCode = masm.branchPtr(Assembler::Above, t0, ImmPtr(JS_UNJITTABLE_SCRIPT));
 
-        if (cx->typeInferenceEnabled()) {
-            /*
-             * Write the rejoin state to indicate this is a compilation call
-             * made from an IC (the recompiler cannot detect calls made from
-             * ICs automatically).
-             */
-            masm.storePtr(ImmPtr((void *) ic.frameSize.rejoinState(f.pc(), false)),
-                          FrameAddress(offsetof(VMFrame, stubRejoin)));
-        }
+        /*
+         * Write the rejoin state to indicate this is a compilation call made
+         * from an IC (the recompiler cannot detect calls made from ICs
+         * automatically).
+         */
+        masm.storePtr(ImmPtr((void *) ic.frameSize.rejoinState(f.pc(), false)),
+                      FrameAddress(offsetof(VMFrame, stubRejoin)));
 
         masm.bumpStubCounter(f.script(), f.pc(), Registers::tempCallReg());
 
         /* Try and compile. On success we get back the nmap pointer. */
         void *compilePtr = JS_FUNC_TO_DATA_PTR(void *, stubs::CompileFunction);
         DataLabelPtr inlined;
         if (ic.frameSize.isStatic()) {
             masm.move(Imm32(ic.frameSize.staticArgc()), Registers::ArgReg1);
@@ -964,26 +958,24 @@ class CallCompiler : public BaseCompiler
         }
 
         /* Generate fast-path for calling this native. */
         Assembler masm;
 
         /* Guard on the function object identity, for now. */
         Jump funGuard = masm.branchPtr(Assembler::NotEqual, ic.funObjReg, ImmPtr(obj));
 
-        if (cx->typeInferenceEnabled()) {
-            /*
-             * Write the rejoin state for the recompiler to use if this call
-             * triggers recompilation. Natives use a different stack address to
-             * store the return value than FASTCALLs, and without additional
-             * information we cannot tell which one is active on a VMFrame.
-             */
-            masm.storePtr(ImmPtr((void *) ic.frameSize.rejoinState(f.pc(), true)),
-                          FrameAddress(offsetof(VMFrame, stubRejoin)));
-        }
+        /*
+         * Write the rejoin state for the recompiler to use if this call
+         * triggers recompilation. Natives use a different stack address to
+         * store the return value than FASTCALLs, and without additional
+         * information we cannot tell which one is active on a VMFrame.
+         */
+        masm.storePtr(ImmPtr((void *) ic.frameSize.rejoinState(f.pc(), true)),
+                      FrameAddress(offsetof(VMFrame, stubRejoin)));
 
         /* N.B. After this call, the frame will have a dynamic frame size. */
         if (ic.frameSize.isDynamic()) {
             masm.bumpStubCounter(f.script(), f.pc(), Registers::tempCallReg());
             masm.fallibleVMCall(cx->typeInferenceEnabled(),
                                 JS_FUNC_TO_DATA_PTR(void *, ic::SplatApplyArgs),
                                 f.regs.pc, NULL, initialFrameDepth);
         }
@@ -1460,39 +1452,39 @@ JITScript::sweepCallICs(JSContext *cx, b
 
         /*
          * If the object is unreachable, we're guaranteed not to be currently
          * executing a stub generated by a guard on that object. This lets us
          * precisely GC call ICs while keeping the identity guard safe.
          */
         bool fastFunDead = ic.fastGuardedObject &&
             (purgeAll || IsAboutToBeFinalized(cx, ic.fastGuardedObject));
-        bool nativeDead = ic.fastGuardedNative &&
-            (purgeAll || IsAboutToBeFinalized(cx, ic.fastGuardedNative));
+        bool hasNative = ic.fastGuardedNative != NULL;
 
         /*
          * There are three conditions where we need to relink:
          * (1) purgeAll is true.
-         * (2) The native is dead, since it always has a stub.
+         * (2) There is a native stub. These have a NativeCallStub, which will
+         *     all be released if the compartment has no code on the stack.
          * (3) The fastFun is dead *and* there is a closure stub.
          *
          * Note although both objects can be non-NULL, there can only be one
          * of [closure, native] stub per call IC.
          */
-        if (purgeAll || nativeDead || (fastFunDead && ic.hasJsFunCheck)) {
+        if (purgeAll || hasNative || (fastFunDead && ic.hasJsFunCheck)) {
             repatcher.relink(ic.funJump, ic.slowPathStart);
             ic.hit = false;
         }
 
         if (fastFunDead) {
             repatcher.repatch(ic.funGuard, NULL);
             ic.purgeGuardedObject();
         }
 
-        if (nativeDead)
+        if (hasNative)
             ic.fastGuardedNative = NULL;
 
         if (purgeAll) {
             ic.releasePool(CallICInfo::Pool_ScriptStub);
             JSC::CodeLocationJump oolJump = ic.slowPathStart.jumpAtOffset(ic.oolJumpOffset);
             JSC::CodeLocationLabel icCall = ic.slowPathStart.labelAtOffset(ic.icCallOffset);
             repatcher.relink(oolJump, icCall);
         }
--- a/js/src/methodjit/PolyIC.cpp
+++ b/js/src/methodjit/PolyIC.cpp
@@ -1162,20 +1162,18 @@ class GetPropCompiler : public PICStubCo
     {
         /*
          * Getter hook needs to be called from the stub. The state is fully
          * synced and no registers are live except the result registers.
          */
         JS_ASSERT(pic.canCallHook);
         PropertyOp getter = shape->getterOp();
 
-        if (cx->typeInferenceEnabled()) {
-            masm.storePtr(ImmPtr((void *) REJOIN_NATIVE_GETTER),
-                          FrameAddress(offsetof(VMFrame, stubRejoin)));
-        }
+        masm.storePtr(ImmPtr((void *) REJOIN_NATIVE_GETTER),
+                      FrameAddress(offsetof(VMFrame, stubRejoin)));
 
         Registers tempRegs = Registers::tempCallRegMask();
         if (tempRegs.hasReg(Registers::ClobberInCall))
             tempRegs.takeReg(Registers::ClobberInCall);
 
         /* Get a register to hold obj while we set up the rest of the frame. */
         RegisterID holdObjReg = pic.objReg;
         if (tempRegs.hasReg(pic.objReg)) {