Backed out changeset 4c157cfe2289 (bug 487845).
authorAndreas Gal <gal@mozilla.com>
Tue, 14 Apr 2009 21:28:40 -0700
changeset 24924 45d02441c7ff14ff13292bc72bb9c66326e2b53c
parent 24923 4c3bb14f2bbcc32b9527116cf3b921e0fc1db9a8
child 24925 b481eb1839adc5a8ed4a222a7d4161f0aa09355c
push id1267
push userrsayre@mozilla.com
push dateSun, 19 Apr 2009 02:47:24 +0000
bugs487845
milestone1.9.1b4pre
Backed out changeset 4c157cfe2289 (bug 487845).
js/src/jscntxt.h
js/src/jstracer.cpp
js/src/jstracer.h
js/src/trace-test.js
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -126,44 +126,42 @@ struct JSTraceMonitor {
      * Flag set when running (or recording) JIT-compiled code. This prevents
      * both interpreter activation and last-ditch garbage collection when up
      * against our runtime's memory limits. This flag also suppresses calls to
      * JS_ReportOutOfMemory when failing due to runtime limits.
      *
      * !onTrace && !recorder: not on trace.
      * onTrace && recorder: recording a trace.
      * onTrace && !recorder: executing a trace.
-     * !onTrace && recorder && !prohibitFlush:
+     * !onTrace && recorder && !prohibitRecording:
      *      not on trace; deep-aborted while recording.
-     * !onTrace && recorder && prohibitFlush:
+     * !onTrace && recorder && prohibitRecording:
      *      not on trace; deep-bailed in SpiderMonkey code called from a
      *      trace. JITted code is on the stack.
      */
     JSPackedBool            onTrace;
 
+    /*
+     * Do not start recording after a deep bail.  That would free JITted code
+     * pages that we will later return to.
+     */
+    JSPackedBool            prohibitRecording;
+
     /* See reservedObjects below. */
     JSPackedBool            useReservedObjects;
 
     CLS(nanojit::LirBuffer) lirbuf;
     CLS(nanojit::Fragmento) fragmento;
     CLS(TraceRecorder)      recorder;
     jsval                   *reservedDoublePool;
     jsval                   *reservedDoublePoolPtr;
 
     struct GlobalState globalStates[MONITOR_N_GLOBAL_STATES];
     struct VMFragment* vmfragments[FRAGMENT_TABLE_SIZE];
-
-
-    /*
-     * If nonzero, do not flush the JIT cache after a deep bail.  That would
-     * free JITted code pages that we will later return to.  Instead, set
-     * the needFlush flag so that it can be flushed later.
-     */
-    uintN                   prohibitFlush;
-    JSBool                  needFlush;
+    JSBool needFlush;
 
     /*
      * reservedObjects is a linked list (via fslots[0]) of preallocated JSObjects.
      * The JIT uses this to ensure that leaving a trace tree can't fail.
      */
     JSObject                *reservedObjects;
 
     /* Fragmento for the regular expression compiler. This is logically
--- a/js/src/jstracer.cpp
+++ b/js/src/jstracer.cpp
@@ -2548,17 +2548,16 @@ checktype_fail_2:
 
     return false;
 }
 
 /* Compile the current fragment. */
 JS_REQUIRES_STACK void
 TraceRecorder::compile(JSTraceMonitor* tm)
 {
-    JS_ASSERT(!tm->needFlush);
     Fragmento* fragmento = tm->fragmento;
     if (treeInfo->maxNativeStackSlots >= MAX_NATIVE_STACK_SLOTS) {
         debug_only_v(printf("Blacklist: excessive stack use.\n"));
         js_Blacklist(fragment->root);
         return;
     }
     if (anchor && anchor->exitType != CASE_EXIT)
         ++treeInfo->branchCount;
@@ -2625,18 +2624,16 @@ TraceRecorder::closeLoop(JSTraceMonitor*
 {
     /*
      * We should have arrived back at the loop header, and hence we don't want to be in an imacro
      * here and the opcode should be either JSOP_LOOP, or in case this loop was blacklisted in the
      * meantime JSOP_NOP.
      */
     JS_ASSERT((*cx->fp->regs->pc == JSOP_LOOP || *cx->fp->regs->pc == JSOP_NOP) && !cx->fp->imacpc);
 
-    JS_ASSERT(!tm->needFlush);
-
     bool stable;
     LIns* exitIns;
     Fragment* peer;
     VMSideExit* exit;
     VMFragment* peer_root;
     Fragmento* fragmento = tm->fragmento;
 
     exitIns = snapshot(UNSTABLE_LOOP_EXIT);
@@ -3097,92 +3094,48 @@ nanojit::LirNameMap::formatGuard(LIns *i
 #endif
 
 void
 nanojit::Fragment::onDestroy()
 {
     delete (TreeInfo *)vmprivate;
 }
 
-static JS_REQUIRES_STACK void
-FlushJITCache(JSContext* cx)
-{
-    if (!TRACING_ENABLED(cx))
-        return;
-    JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
-    debug_only_v(printf("Flushing cache.\n");)
-    if (tm->recorder)
-        js_AbortRecording(cx, "flush cache");
-    TraceRecorder* tr;
-    while ((tr = tm->abortStack) != NULL) {
-        tr->removeFragmentoReferences();
-        tr->deepAbort();
-        tr->popAbortStack();
-    }
-    Fragmento* fragmento = tm->fragmento;
-    if (fragmento) {
-        if (tm->prohibitFlush) {
-            debug_only_v(printf("Deferring fragmento flush due to deep bail.\n");)
-            tm->needFlush = JS_TRUE;
-            return;
-        }
-
-        fragmento->clearFrags();
-#ifdef DEBUG
-        JS_ASSERT(fragmento->labels);
-        fragmento->labels->clear();
-#endif
-        tm->lirbuf->rewind();
-        for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) {
-            VMFragment* f = tm->vmfragments[i];
-            while (f) {
-                VMFragment* next = f->next;
-                fragmento->clearFragment(f);
-                f = next;
-            }
-            tm->vmfragments[i] = NULL;
-        }
-        for (size_t i = 0; i < MONITOR_N_GLOBAL_STATES; ++i) {
-            tm->globalStates[i].globalShape = -1;
-            tm->globalStates[i].globalSlots->clear();
-        }
-    }
-    tm->needFlush = JS_FALSE;
-}
-
 static JS_REQUIRES_STACK bool
 js_DeleteRecorder(JSContext* cx)
 {
     JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
 
     /* Aborting and completing a trace end up here. */
     delete tm->recorder;
     tm->recorder = NULL;
 
     /*
      * If we ran out of memory, flush the code cache.
      */
     if (JS_TRACE_MONITOR(cx).fragmento->assm()->error() == OutOMem
         || js_OverfullFragmento(tm->fragmento, MAX_MEM_IN_MAIN_FRAGMENTO)) {
-        FlushJITCache(cx);
+        js_FlushJITCache(cx);
         return false;
     }
 
     return true;
 }
 
 /**
  * Checks whether the shape of the global object has changed.
  */
 static inline bool
 js_CheckGlobalObjectShape(JSContext* cx, JSTraceMonitor* tm, JSObject* globalObj,
                           uint32 *shape=NULL, SlotList** slots=NULL)
 {
-    if (tm->needFlush)
+    if (tm->needFlush) {
+        tm->needFlush = JS_FALSE;
         return false;
+    }
 
     uint32 globalShape = OBJ_SHAPE(globalObj);
 
     if (tm->recorder) {
         VMFragment* root = (VMFragment*)tm->recorder->getFragment()->root;
         TreeInfo* ti = tm->recorder->getTreeInfo();
         /* Check the global shape matches the recorder's treeinfo's shape. */
         if (globalObj != root->globalObj || globalShape != root->globalShape) {
@@ -3229,17 +3182,17 @@ js_CheckGlobalObjectShape(JSContext* cx,
 static JS_REQUIRES_STACK bool
 js_StartRecorder(JSContext* cx, VMSideExit* anchor, Fragment* f, TreeInfo* ti,
                  unsigned stackSlots, unsigned ngslots, uint8* typeMap,
                  VMSideExit* expectedInnerExit, jsbytecode* outer)
 {
     JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
     JS_ASSERT(f->root != f || !cx->fp->imacpc);
 
-    if (JS_TRACE_MONITOR(cx).needFlush)
+    if (JS_TRACE_MONITOR(cx).prohibitRecording)
         return false;
 
     /* start recording if no exception during construction */
     tm->recorder = new (&gc) TraceRecorder(cx, anchor, f, ti,
                                            stackSlots, ngslots, typeMap,
                                            expectedInnerExit, outer);
 
     if (cx->throwing) {
@@ -3456,71 +3409,68 @@ js_SynthesizeFrame(JSContext* cx, const 
 JS_REQUIRES_STACK bool
 js_RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, jsbytecode* outer,
               JSObject* globalObj, uint32 globalShape, SlotList* globalSlots)
 {
     JS_ASSERT(f->root == f);
 
     /* Make sure the global type map didn't change on us. */
     if (!js_CheckGlobalObjectShape(cx, tm, globalObj)) {
-        FlushJITCache(cx);
+        js_FlushJITCache(cx);
         return false;
     }
 
     AUDIT(recorderStarted);
 
     /* Try to find an unused peer fragment, or allocate a new one. */
     while (f->code() && f->peer)
         f = f->peer;
     if (f->code())
         f = getAnchor(&JS_TRACE_MONITOR(cx), f->root->ip, globalObj, globalShape);
 
     if (!f) {
-        FlushJITCache(cx);
+        js_FlushJITCache(cx);
         return false;
     }
 
     f->root = f;
     f->lirbuf = tm->lirbuf;
 
     if (f->lirbuf->outOMem() ||
         js_OverfullFragmento(tm->fragmento, MAX_MEM_IN_MAIN_FRAGMENTO)) {
-        FlushJITCache(cx);
+        js_FlushJITCache(cx);
         debug_only_v(printf("Out of memory recording new tree, flushing cache.\n");)
         return false;
     }
 
     JS_ASSERT(!f->code() && !f->vmprivate);
 
     /* setup the VM-private treeInfo structure for this fragment */
     TreeInfo* ti = new (&gc) TreeInfo(f, globalSlots);
 
     /* capture the coerced type of each active slot in the type map */
     ti->typeMap.captureTypes(cx, *globalSlots, 0/*callDepth*/);
     ti->nStackTypes = ti->typeMap.length() - globalSlots->length();
 
-#ifdef DEBUG
-    /*
-     * Check for duplicate entry type maps.  This is always wrong and hints at
-     * trace explosion since we are trying to stabilize something without
-     * properly connecting peer edges.
-     */
+    /* Check for duplicate entry type maps.  This is always wrong and hints at trace explosion
+       since we are trying to stabilize something without properly connecting peer edges. */
+    #ifdef DEBUG
     TreeInfo* ti_other;
     for (Fragment* peer = getLoop(tm, f->root->ip, globalObj, globalShape); peer != NULL;
          peer = peer->peer) {
         if (!peer->code() || peer == f)
             continue;
         ti_other = (TreeInfo*)peer->vmprivate;
         JS_ASSERT(ti_other);
         JS_ASSERT(!ti->typeMap.matches(ti_other->typeMap));
     }
     ti->treeFileName = cx->fp->script->filename;
     ti->treeLineNumber = js_FramePCToLineNumber(cx, cx->fp);
     ti->treePCOffset = FramePCOffset(cx->fp);
-#endif
+    #endif
 
     /* determine the native frame layout at the entry point */
     unsigned entryNativeStackSlots = ti->nStackTypes;
     JS_ASSERT(entryNativeStackSlots == js_NativeStackSlots(cx, 0/*callDepth*/));
     ti->nativeStackBase = (entryNativeStackSlots -
             (cx->fp->regs->sp - StackBase(cx->fp))) * sizeof(double);
     ti->maxNativeStackSlots = entryNativeStackSlots;
     ti->maxCallDepth = 0;
@@ -3546,17 +3496,16 @@ isSlotUndemotable(JSContext* cx, TreeInf
     uint16* gslots = ti->globalSlots->data();
     return oracle.isGlobalSlotUndemotable(cx, gslots[slot - ti->nStackTypes]);
 }
 
 JS_REQUIRES_STACK static bool
 js_AttemptToStabilizeTree(JSContext* cx, VMSideExit* exit, jsbytecode* outer)
 {
     JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
-    JS_ASSERT(!tm->needFlush);
     VMFragment* from = (VMFragment*)exit->from->root;
     TreeInfo* from_ti = (TreeInfo*)from->vmprivate;
 
     JS_ASSERT(exit->from->root->code());
 
     /* Make sure any doubles are not accidentally undemoted */
     uint8* m = getStackTypeMap(exit);
     for (unsigned i = 0; i < exit->numStackSlots; i++) {
@@ -3645,18 +3594,16 @@ js_AttemptToStabilizeTree(JSContext* cx,
     VMFragment* root = (VMFragment*)from->root;
     return js_RecordTree(cx, tm, from->first, outer, root->globalObj, root->globalShape,
                          from_ti->globalSlots);
 }
 
 static JS_REQUIRES_STACK bool
 js_AttemptToExtendTree(JSContext* cx, VMSideExit* anchor, VMSideExit* exitedFrom, jsbytecode* outer)
 {
-    JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
-    JS_ASSERT(!tm->needFlush);
     Fragment* f = anchor->from->root;
     JS_ASSERT(f->vmprivate);
     TreeInfo* ti = (TreeInfo*)f->vmprivate;
 
     /* Don't grow trees above a certain size to avoid code explosion due to tail duplication. */
     if (ti->branchCount >= MAX_BRANCHES)
         return false;
 
@@ -3792,17 +3739,17 @@ js_RecordLoopEdge(JSContext* cx, TraceRe
 
         jsbytecode* outer = (jsbytecode*)tm->recorder->getFragment()->root->ip;
         js_AbortRecording(cx, "No compatible inner tree");
 
         f = empty;
         if (!f) {
             f = getAnchor(tm, cx->fp->regs->pc, globalObj, globalShape);
             if (!f) {
-                FlushJITCache(cx);
+                js_FlushJITCache(cx);
                 return false;
             }
         }
         return js_RecordTree(cx, tm, f, outer, globalObj, globalShape, globalSlots);
     }
 
     r->prepareTreeCall(f);
     VMSideExit* innermostNestedGuard = NULL;
@@ -4119,20 +4066,16 @@ js_ExecuteTree(JSContext* cx, Fragment* 
     SIMULATE_FASTCALL(rec, state, NULL, u.func);
 #else
     rec = u.func(state, NULL);
 #endif
     VMSideExit* lr = (VMSideExit*)rec->exit;
 
     AUDIT(traceTriggered);
 
-#ifdef DEBUG
-    cx->interpState = NULL;
-#endif
-
     JS_ASSERT(lr->exitType != LOOP_EXIT || !lr->calldepth);
     tm->onTrace = false;
     LeaveTree(*state, lr);
     return state->innermost;
 }
 
 static JS_FORCES_STACK void
 LeaveTree(InterpState& state, VMSideExit* lr)
@@ -4216,19 +4159,17 @@ LeaveTree(InterpState& state, VMSideExit
              * but we have it now. Box it.
              */
             uint8* typeMap = getStackTypeMap(innermost);
             NativeToValue(cx,
                           cx->fp->regs->sp[-1],
                           typeMap[innermost->numStackSlots - 1],
                           (jsdouble *) state.sp + innermost->sp_adj / sizeof(jsdouble) - 1);
         }
-        JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
-        if (tm->prohibitFlush && --tm->prohibitFlush == 0 && tm->needFlush)
-            FlushJITCache(cx);
+        JS_TRACE_MONITOR(cx).prohibitRecording = false;
         return;
     }
 
     JS_ARENA_RELEASE(&cx->stackPool, state.stackMark);
     while (callstack < rp) {
         /* Synthesize a stack frame and write out the values in it using the type map pointer
            on the native call stack. */
         js_SynthesizeFrame(cx, **callstack);
@@ -4392,30 +4333,30 @@ js_MonitorLoopEdge(JSContext* cx, uintN&
     }
 
     /* Make sure the shape of the global object still matches (this might flush the JIT cache). */
     JSObject* globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
     uint32 globalShape = -1;
     SlotList* globalSlots = NULL;
 
     if (!js_CheckGlobalObjectShape(cx, tm, globalObj, &globalShape, &globalSlots))
-        FlushJITCache(cx);
+        js_FlushJITCache(cx);
 
     /* Do not enter the JIT code with a pending operation callback. */
     if (cx->operationCallbackFlag)
         return false;
     
     jsbytecode* pc = cx->fp->regs->pc;
 
     Fragment* f = getLoop(tm, pc, globalObj, globalShape);
     if (!f)
         f = getAnchor(tm, pc, globalObj, globalShape);
 
     if (!f) {
-        FlushJITCache(cx);
+        js_FlushJITCache(cx);
         return false;
     }
 
     /* If we have no code in the anchor and no peers, we definitively won't be able to
        activate any trees so, start compiling. */
     if (!f->code() && !f->peer) {
     record:
         if (++f->hits() < HOTLOOP)
@@ -4520,17 +4461,17 @@ TraceRecorder::monitorRecording(JSContex
         js_AbortRecording(cx, "error during recording");
         return JSMRS_STOP;
     }
 
     if (tr->lirbuf->outOMem() || 
         js_OverfullFragmento(JS_TRACE_MONITOR(cx).fragmento, 
                              MAX_MEM_IN_MAIN_FRAGMENTO)) {
         js_AbortRecording(cx, "no more LIR memory");
-        FlushJITCache(cx);
+        js_FlushJITCache(cx);
         return JSMRS_STOP;
     }
 
     if (flag)
         return JSMRS_CONTINUE;
 
     goto stop_recording;
 
@@ -4948,27 +4889,65 @@ js_OverfullFragmento(Fragmento *frago, s
      * and only report outOMem to us when there is literally no OS memory
      * left. Merely purging our cache when we hit our highwater mark is
      * handled by the (few) callers of this function.
      *
      */
     return (frago->_stats.pages > (maxsz >> NJ_LOG2_PAGE_SIZE));
 }
 
+JS_REQUIRES_STACK void
+js_FlushJITCache(JSContext* cx)
+{
+    if (!TRACING_ENABLED(cx))
+        return;
+    debug_only_v(printf("Flushing cache.\n");)
+    JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
+    if (tm->recorder)
+        js_AbortRecording(cx, "flush cache");
+    TraceRecorder* tr;
+    while ((tr = tm->abortStack) != NULL) {
+        tr->removeFragmentoReferences();
+        tr->deepAbort();
+        tr->popAbortStack();
+    }
+    Fragmento* fragmento = tm->fragmento;
+    if (fragmento) {
+        fragmento->clearFrags();
+#ifdef DEBUG
+        JS_ASSERT(fragmento->labels);
+        fragmento->labels->clear();
+#endif
+        tm->lirbuf->rewind();
+        for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) {
+            VMFragment* f = tm->vmfragments[i];
+            while(f) {
+                VMFragment* next = f->next;
+                fragmento->clearFragment(f);
+                f = next;
+            }
+            tm->vmfragments[i] = NULL;
+        }
+        for (size_t i = 0; i < MONITOR_N_GLOBAL_STATES; ++i) {
+            tm->globalStates[i].globalShape = -1;
+            tm->globalStates[i].globalSlots->clear();
+        }
+    }
+}
+
 JS_FORCES_STACK JS_FRIEND_API(void)
 js_DeepBail(JSContext *cx)
 {
     JS_ASSERT(JS_ON_TRACE(cx));
 
     /* It's a bug if a non-FAIL_STATUS builtin gets here. */
     JS_ASSERT(cx->bailExit);
 
     JS_TRACE_MONITOR(cx).onTrace = false;
-    JS_TRACE_MONITOR(cx).prohibitFlush++;
-    debug_only_v(printf("Deep bail.\n");)
+    JS_TRACE_MONITOR(cx).prohibitRecording = true;
     LeaveTree(*cx->interpState, cx->bailExit);
     cx->bailExit = NULL;
     cx->interpState->builtinStatus |= JSBUILTIN_BAILED;
 }
 
 JS_REQUIRES_STACK jsval&
 TraceRecorder::argval(unsigned n) const
 {
--- a/js/src/jstracer.h
+++ b/js/src/jstracer.h
@@ -658,16 +658,19 @@ js_FinishJIT(JSTraceMonitor *tm);
 
 extern void
 js_PurgeScriptFragments(JSContext* cx, JSScript* script);
 
 extern bool
 js_OverfullFragmento(nanojit::Fragmento *frago, size_t maxsz);
 
 extern void
+js_FlushJITCache(JSContext* cx);
+
+extern void
 js_PurgeJITOracle();
 
 extern JSObject *
 js_GetBuiltinFunction(JSContext *cx, uintN index);
 
 #else  /* !JS_TRACER */
 
 #define TRACE_0(x)              ((void)0)
--- a/js/src/trace-test.js
+++ b/js/src/trace-test.js
@@ -4951,34 +4951,16 @@ function testDeepPropertyShadowing()
     var tree = {__proto__: {__proto__: {parent: null}}};
     h(tree);
     h(tree);
     tree.parent = {};
     assertEq(h(tree), 2);
 }
 test(testDeepPropertyShadowing);
 
-// Complicated whitebox test for bug 487845.
-function testGlobalShapeChangeAfterDeepBail() {
-    function f(name) {
-        this[name] = 1;  // may change global shape
-        for (var i = 0; i < 4; i++)
-            ; // MonitorLoopEdge eventually triggers assertion
-    }
-
-    // When i==3, deep-bail, then change global shape enough times to exhaust
-    // the array of GlobalStates.
-    var arr = [[], [], [], ["bug0", "bug1", "bug2", "bug3", "bug4"]];
-    for (var i = 0; i < arr.length; i++)
-        arr[i].forEach(f);
-}
-test(testGlobalShapeChangeAfterDeepBail);
-for (let i = 0; i < 5; i++)
-    delete this["bug" + i];
-
 /*****************************************************************************
  *                                                                           *
  *  _____ _   _  _____ ______ _____ _______                                  *
  * |_   _| \ | |/ ____|  ____|  __ \__   __|                                 *
  *   | | |  \| | (___ | |__  | |__) | | |                                    *
  *   | | | . ` |\___ \|  __| |  _  /  | |                                    *
  *  _| |_| |\  |____) | |____| | \ \  | |                                    *
  * |_____|_| \_|_____/|______|_|  \_\ |_|                                    *