Keep the interpreter stack synced for GC scanning, bug 781657. r=billm
authorBrian Hackett <bhackett1024@gmail.com>
Tue, 23 Oct 2012 09:20:56 -0700
changeset 111294 9a5191dfae8d0f5577b248ca79fbdd46e49473f8
parent 111293 ca391c7bceb811856bf3014049775722084033e9
child 111295 15b7770dcfcc671a80af452df736813eb54c56c5
push id93
push usernmatsakis@mozilla.com
push dateWed, 31 Oct 2012 21:26:57 +0000
reviewersbillm
bugs781657
milestone19.0a1
Keep the interpreter stack synced for GC scanning, bug 781657. r=billm
js/src/jsgc.cpp
js/src/methodjit/Compiler.cpp
js/src/methodjit/FrameState-inl.h
js/src/methodjit/FrameState.cpp
js/src/vm/Stack.cpp
js/src/vm/Stack.h
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -2598,17 +2598,17 @@ MarkRuntime(JSTracer *trc, bool useSaved
     }
 
 #ifdef JS_METHODJIT
     /* We need to expand inline frames before stack scanning. */
     for (CompartmentsIter c(rt); !c.done(); c.next())
         mjit::ExpandInlineFrames(c);
 #endif
 
-    rt->stackSpace.markAndClobber(trc);
+    rt->stackSpace.mark(trc);
     rt->debugScopes->mark(trc);
 
 #ifdef JS_ION
     ion::MarkIonActivations(rt, trc);
 #endif
 
     for (CompartmentsIter c(rt); !c.done(); c.next())
         c->mark(trc);
@@ -3846,23 +3846,16 @@ BeginSweepPhase(JSRuntime *rt)
     Debugger::sweepAll(&fop);
 
     PartitionCompartments partition(rt);
     partition.partition();
 
     {
         gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_COMPARTMENTS);
 
-        /*
-         * Eliminate any garbage values from the VM stack that may have been
-         * left by the JIT in between incremental GC slices. We need to do this
-         * before discarding analysis data during JSCompartment::sweep.
-         */
-        rt->stackSpace.markAndClobber(NULL);
-
         bool releaseTypes = ReleaseObservedTypes(rt);
         for (CompartmentsIter c(rt); !c.done(); c.next()) {
             gcstats::AutoSCC scc(rt->gcStats, partition.getSCC(c));
             if (c->isCollecting())
                 c->sweep(&fop, releaseTypes);
             else
                 c->sweepCrossCompartmentWrappers();
         }
--- a/js/src/methodjit/Compiler.cpp
+++ b/js/src/methodjit/Compiler.cpp
@@ -1287,27 +1287,17 @@ mjit::Compiler::ensureDoubleArguments()
 }
 
 void
 mjit::Compiler::markUndefinedLocal(uint32_t offset, uint32_t i)
 {
     uint32_t depth = ssa.getFrame(a->inlineIndex).depth;
     uint32_t slot = LocalSlot(script_, i);
     Address local(JSFrameReg, sizeof(StackFrame) + (depth + i) * sizeof(Value));
-    if (!cx->typeInferenceEnabled() || !analysis->trackSlot(slot)) {
-        masm.storeValue(UndefinedValue(), local);
-    } else {
-        Lifetime *lifetime = analysis->liveness(slot).live(offset);
-        if (lifetime)
-            masm.storeValue(UndefinedValue(), local);
-#ifdef DEBUG
-        else
-            masm.storeValue(ObjectValueCrashOnTouch(), local);
-#endif
-    }
+    masm.storeValue(UndefinedValue(), local);
 }
 
 void
 mjit::Compiler::markUndefinedLocals()
 {
     /*
      * Set locals to undefined. Skip locals which aren't closed and are known
      * to be defined before used,
--- a/js/src/methodjit/FrameState-inl.h
+++ b/js/src/methodjit/FrameState-inl.h
@@ -933,18 +933,16 @@ inline void
 FrameState::fakeSync(FrameEntry *fe)
 {
     /*
      * If a frame entry's value will no longer be used, we can mark it as being
      * synced without actually performing the sync: the value is not observed.
      */
     if (!fe->data.synced())
         fe->data.sync();
-    if (!fe->type.synced())
-        fe->type.sync();
 }
 
 inline void
 FrameState::forgetType(FrameEntry *fe)
 {
     /*
      * The type may have been forgotten with an intervening storeLocal in the
      * presence of eval or closed variables. For defense in depth and to make
--- a/js/src/methodjit/FrameState.cpp
+++ b/js/src/methodjit/FrameState.cpp
@@ -307,21 +307,21 @@ FrameState::bestEvictReg(uint32_t mask, 
         }
 
         /*
          * All entries still in registers should have a lifetime, except 'this'
          * in constructors which are not accessed later on.
          */
         Lifetime *lifetime = variableLive(fe, a->PC);
 
+        /* Evict variables whose value is no longer live. */
         if (!lifetime) {
-            JS_ASSERT(isConstructorThis(fe));
             fallback = reg;
             fallbackOffset = a->script->length;
-            JaegerSpew(JSpew_Regalloc, "    %s is 'this' in a constructor\n", reg.name());
+            JaegerSpew(JSpew_Regalloc, "    %s is dead\n", reg.name());
             continue;
         }
 
         /*
          * Evict variables which are only live in future loop iterations, and are
          * not carried around the loop in a register.
          */
         if (lifetime->loopTail && (!loop || !loop->carriesLoopReg(fe))) {
@@ -348,16 +348,31 @@ FrameState::bestEvictReg(uint32_t mask, 
     }
 
     JS_ASSERT(fallback.isSet());
 
     JaegerSpew(JSpew_Regalloc, "result %s\n", fallback.name());
     return fallback;
 }
 
+/*
+ * Whether we can pretend the payload for a given entry is synced provided that
+ * the value in the entry is dead. The contents of dead variables can still be
+ * observed during stack scanning, so the payloads of values which might hold
+ * objects or strings must be preserved.
+ */
+static inline bool
+CanFakeSync(FrameEntry *fe)
+{
+    return fe->isNotType(JSVAL_TYPE_OBJECT)
+        && fe->isNotType(JSVAL_TYPE_STRING)
+        && fe->isNotType(JSVAL_TYPE_DOUBLE)
+        && fe->isNotType(JSVAL_TYPE_MAGIC);
+}
+
 void
 FrameState::evictDeadEntries(bool includePinned)
 {
     for (uint32_t i = 0; i < Registers::TotalAnyRegisters; i++) {
         AnyRegisterID reg = AnyRegisterID::fromRaw(i);
 
         /* Follow along with the same filters as bestEvictReg. */
 
@@ -369,35 +384,27 @@ FrameState::evictDeadEntries(bool includ
             continue;
 
         if (fe == a->callee_ || isConstructorThis(fe) ||
             fe >= a->spBase || fe->isCopied() || (a->parent && fe < a->locals)) {
             continue;
         }
 
         Lifetime *lifetime = variableLive(fe, a->PC);
-        if (lifetime)
+        if (lifetime || !CanFakeSync(fe))
             continue;
 
-        /*
-         * If we are about to fake sync for an entry with known type, reset
-         * that type. We don't want to regard it as correctly synced later.
-         */
-        if (!fe->type.synced() && fe->isTypeKnown())
-            fe->type.setMemory();
+        JS_ASSERT(regstate(reg).type() == RematInfo::DATA);
 
         /*
          * Mark the entry as synced to avoid emitting a store, we don't need
          * to keep this value around.
          */
         fakeSync(fe);
-        if (regstate(reg).type() == RematInfo::DATA)
-            fe->data.setMemory();
-        else
-            fe->type.setMemory();
+        fe->data.setMemory();
         forgetReg(reg);
     }
 }
 
 AnyRegisterID
 FrameState::evictSomeReg(uint32_t mask)
 {
     JS_ASSERT(!freeRegs.hasRegInMask(mask));
@@ -714,27 +721,27 @@ FrameState::syncForAllocation(RegisterAl
             forgetAllRegs(fe);
             fe->resetSynced();
             continue;
         }
 
         /* Force syncs for locals which are dead at the current PC. */
         if (isLocal(fe) && !fe->copied && !a->analysis->slotEscapes(entrySlot(fe))) {
             Lifetime *lifetime = a->analysis->liveness(entrySlot(fe)).live(a->PC - a->script->code);
-            if (!lifetime)
+            if (!lifetime && CanFakeSync(fe))
                 fakeSync(fe);
         }
 
         /* If returning from a script, fake syncs for dead locals in the immediate parent. */
         if (inlineReturn && fe >= a->parent->locals &&
             fe - a->parent->locals < a->parent->script->nfixed &&
             !a->parent->analysis->slotEscapes(frameSlot(a->parent, fe))) {
             const LifetimeVariable &var = a->parent->analysis->liveness(frameSlot(a->parent, fe));
             Lifetime *lifetime = var.live(a->parent->PC - a->parent->script->code);
-            if (!lifetime)
+            if (!lifetime && CanFakeSync(fe))
                 fakeSync(fe);
         }
 
         if (!fe->isCopy() && alloc->hasAnyReg(fe - entries)) {
             /* Types are always synced, except for known doubles. */
             if (!fe->isType(JSVAL_TYPE_DOUBLE))
                 syncType(fe);
         } else {
--- a/js/src/vm/Stack.cpp
+++ b/js/src/vm/Stack.cpp
@@ -628,91 +628,24 @@ StackSpace::containingSegment(const Stac
         if (s->contains(target))
             return *s;
     }
     JS_NOT_REACHED("frame not in stack space");
     return *(StackSegment *)NULL;
 }
 
 void
-StackSpace::markAndClobberFrame(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc)
+StackSpace::markFrame(JSTracer *trc, StackFrame *fp, Value *slotsEnd)
 {
-    AutoAssertNoGC nogc;
     Value *slotsBegin = fp->slots();
-
-    /* If it's a scripted frame, we should have a pc. */
-    JS_ASSERT(pc);
-
-    RawScript script = fp->script();
-    if (!script->hasAnalysis() || !script->analysis()->ranLifetimes()) {
-        if (trc)
-            gc::MarkValueRootRange(trc, slotsBegin, slotsEnd, "vm_stack");
-        return;
-    }
-
-    /*
-     * If the JIT ran a lifetime analysis, then it may have left garbage in the
-     * slots considered not live. We need to avoid marking them. Additionally,
-     * in case the analysis information is thrown out later, we overwrite these
-     * dead slots with valid values so that future GCs won't crash. Analysis
-     * results are thrown away during the sweeping phase, so we always have at
-     * least one GC to do this.
-     */
-    JSRuntime *rt = script->compartment()->rt;
-    analyze::AutoEnterAnalysis aea(script->compartment());
-    analyze::ScriptAnalysis *analysis = script->analysis();
-    uint32_t offset = pc - script->code;
-    Value *fixedEnd = slotsBegin + script->nfixed;
-    for (Value *vp = slotsBegin; vp < fixedEnd; vp++) {
-        uint32_t slot = analyze::LocalSlot(script, vp - slotsBegin);
-
-        /* Will this slot be synced by the JIT? */
-        if (!analysis->trackSlot(slot) || analysis->liveness(slot).live(offset)) {
-            if (trc)
-                gc::MarkValueRoot(trc, vp, "vm_stack");
-        } else if (!trc || script->compartment()->isDiscardingJitCode(trc)) {
-            /*
-             * If we're throwing away analysis information, we need to replace
-             * non-live Values with ones that can safely be marked in later
-             * collections.
-             */
-            if (vp->isDouble()) {
-                *vp = DoubleValue(0.0);
-            } else {
-                /*
-                 * It's possible that *vp may not be a valid Value. For example,
-                 * it may be tagged as a NullValue but the low bits may be
-                 * nonzero so that isNull() returns false. This can cause
-                 * problems later on when marking the value. Extracting the type
-                 * in this way and then overwriting the value circumvents the
-                 * problem.
-                 */
-                JSValueType type = vp->extractNonDoubleType();
-                if (type == JSVAL_TYPE_INT32)
-                    *vp = Int32Value(0);
-                else if (type == JSVAL_TYPE_UNDEFINED)
-                    *vp = UndefinedValue();
-                else if (type == JSVAL_TYPE_BOOLEAN)
-                    *vp = BooleanValue(false);
-                else if (type == JSVAL_TYPE_STRING)
-                    *vp = StringValue(rt->atomState.null);
-                else if (type == JSVAL_TYPE_NULL)
-                    *vp = NullValue();
-                else if (type == JSVAL_TYPE_OBJECT)
-                    *vp = ObjectValue(fp->scopeChain()->global());
-            }
-        }
-    }
-
-    if (trc)
-        gc::MarkValueRootRange(trc, fixedEnd, slotsEnd, "vm_stack");
+    gc::MarkValueRootRange(trc, slotsBegin, slotsEnd, "vm_stack");
 }
 
 void
-StackSpace::markAndClobber(JSTracer *trc)
+StackSpace::mark(JSTracer *trc)
 {
     /* NB: this depends on the continuity of segments in memory. */
     Value *nextSegEnd = firstUnused();
     for (StackSegment *seg = seg_; seg; seg = seg->prevInMemory()) {
         /*
          * A segment describes a linear region of memory that contains a stack
          * of native and interpreted calls. For marking purposes, though, we
          * only need to distinguish between frames and values and mark
@@ -721,28 +654,26 @@ StackSpace::markAndClobber(JSTracer *trc
          * calls. Thus, marking can view the stack as the regex:
          *   (segment slots (frame slots)*)*
          * which gets marked in reverse order.
          */
         Value *slotsEnd = nextSegEnd;
         jsbytecode *pc = seg->maybepc();
         for (StackFrame *fp = seg->maybefp(); (Value *)fp > (Value *)seg; fp = fp->prev()) {
             /* Mark from fp->slots() to slotsEnd. */
-            markAndClobberFrame(trc, fp, slotsEnd, pc);
+            markFrame(trc, fp, slotsEnd);
 
-            if (trc)
-                fp->mark(trc);
+            fp->mark(trc);
             slotsEnd = (Value *)fp;
 
             InlinedSite *site;
             pc = fp->prevpc(&site);
             JS_ASSERT_IF(fp->prev(), !site);
         }
-        if (trc)
-            gc::MarkValueRootRange(trc, seg->slotsBegin(), slotsEnd, "vm_stack");
+        gc::MarkValueRootRange(trc, seg->slotsBegin(), slotsEnd, "vm_stack");
         nextSegEnd = (Value *)seg;
     }
 }
 
 void
 StackSpace::markActiveCompartments()
 {
     for (StackSegment *seg = seg_; seg; seg = seg->prevInMemory()) {
--- a/js/src/vm/Stack.h
+++ b/js/src/vm/Stack.h
@@ -1377,17 +1377,17 @@ class StackSpace
                                         Value *from, ptrdiff_t nvals) const;
 
     StackSegment &findContainingSegment(const StackFrame *target) const;
 
     bool containsFast(StackFrame *fp) {
         return (Value *)fp >= base_ && (Value *)fp <= trustedEnd_;
     }
 
-    void markAndClobberFrame(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc);
+    void markFrame(JSTracer *trc, StackFrame *fp, Value *slotsEnd);
 
   public:
     StackSpace();
     bool init();
     ~StackSpace();
 
     /*
      * Maximum supported value of arguments.length. This bounds the maximum
@@ -1430,17 +1430,17 @@ class StackSpace
      * most script->nslots deep). getStackLimit ensures that the returned limit
      * does indeed have this required space and reports an error and returns
      * NULL if this reserve space cannot be allocated.
      */
     inline Value *getStackLimit(JSContext *cx, MaybeReportError report);
     bool tryBumpLimit(JSContext *cx, Value *from, unsigned nvals, Value **limit);
 
     /* Called during GC: mark segments, frames, and slots under firstUnused. */
-    void markAndClobber(JSTracer *trc);
+    void mark(JSTracer *trc);
 
     /* Called during GC: sets active flag on compartments with active frames. */
     void markActiveCompartments();
 
     /*
      * On Windows, report the committed size; on *nix, we report the resident
      * size (which means that if part of the stack is swapped to disk, we say
      * it's shrunk).