Bug 782337 - Stack clobbering (r=bhackett,a=lsblakk) FIREFOX_BETA_16_BASE
authorBill McCloskey <wmccloskey@mozilla.com>
Thu, 16 Aug 2012 14:03:18 -0700
changeset 102526 179a8a17a542f15af0ec993bf7f572c9f854f4f3
parent 102525 d22251230d354c5f363b6f9efd0e9053b3e0ec1e
child 102527 3317e776a8fa1820c48672af6776b86bbf4215a1
push idunknown
push userunknown
push dateunknown
reviewersbhackett, lsblakk
bugs782337
milestone16.0a2
Bug 782337 - Stack clobbering (r=bhackett,a=lsblakk)
js/src/jsgc.cpp
js/src/vm/Stack.cpp
js/src/vm/Stack.h
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -2463,17 +2463,17 @@ MarkRuntime(JSTracer *trc, bool useSaved
     }
 
 #ifdef JS_METHODJIT
     /* We need to expand inline frames before stack scanning. */
     for (CompartmentsIter c(rt); !c.done(); c.next())
         mjit::ExpandInlineFrames(c);
 #endif
 
-    rt->stackSpace.mark(trc);
+    rt->stackSpace.markAndClobber(trc);
     rt->debugScopes->mark(trc);
 
     /* The embedding can register additional roots here. */
     if (JSTraceDataOp op = rt->gcBlackRootsTraceOp)
         (*op)(trc, rt->gcBlackRootsData);
 
     /* During GC, this buffers up the gray roots and doesn't mark them. */
     if (JSTraceDataOp op = rt->gcGrayRootsTraceOp) {
@@ -3431,16 +3431,23 @@ SweepPhase(JSRuntime *rt, JSGCInvocation
     WatchpointMap::sweepAll(rt);
 
     /* Detach unreachable debuggers and global objects from each other. */
     Debugger::sweepAll(&fop);
 
     {
         gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_COMPARTMENTS);
 
+        /*
+         * Eliminate any garbage values from the VM stack that may have been
+         * left by the JIT in between incremental GC slices. We need to do this
+         * before discarding analysis data during JSCompartment::sweep.
+         */
+        rt->stackSpace.markAndClobber(NULL);
+
         bool releaseTypes = ReleaseObservedTypes(rt);
         for (CompartmentsIter c(rt); !c.done(); c.next()) {
             if (c->isCollecting())
                 c->sweep(&fop, releaseTypes);
             else
                 c->sweepCrossCompartmentWrappers();
         }
     }
--- a/js/src/vm/Stack.cpp
+++ b/js/src/vm/Stack.cpp
@@ -609,54 +609,58 @@ StackSpace::containingSegment(const Stac
         if (s->contains(target))
             return *s;
     }
     JS_NOT_REACHED("frame not in stack space");
     return *(StackSegment *)NULL;
 }
 
 void
-StackSpace::markFrameValues(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc)
+StackSpace::markAndClobberFrame(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc)
 {
     Value *slotsBegin = fp->slots();
 
     if (!fp->isScriptFrame()) {
         JS_ASSERT(fp->isDummyFrame());
-        gc::MarkValueRootRange(trc, slotsBegin, slotsEnd, "vm_stack");
+        if (trc)
+            gc::MarkValueRootRange(trc, slotsBegin, slotsEnd, "vm_stack");
         return;
     }
 
     /* If it's a scripted frame, we should have a pc. */
     JS_ASSERT(pc);
 
     JSScript *script = fp->script();
     if (!script->hasAnalysis() || !script->analysis()->ranLifetimes()) {
-        gc::MarkValueRootRange(trc, slotsBegin, slotsEnd, "vm_stack");
+        if (trc)
+            gc::MarkValueRootRange(trc, slotsBegin, slotsEnd, "vm_stack");
         return;
     }
 
     /*
      * If the JIT ran a lifetime analysis, then it may have left garbage in the
      * slots considered not live. We need to avoid marking them. Additionally,
      * in case the analysis information is thrown out later, we overwrite these
      * dead slots with valid values so that future GCs won't crash. Analysis
      * results are thrown away during the sweeping phase, so we always have at
      * least one GC to do this.
      */
+    JSRuntime *rt = script->compartment()->rt;
     analyze::AutoEnterAnalysis aea(script->compartment());
     analyze::ScriptAnalysis *analysis = script->analysis();
     uint32_t offset = pc - script->code;
     Value *fixedEnd = slotsBegin + script->nfixed;
     for (Value *vp = slotsBegin; vp < fixedEnd; vp++) {
         uint32_t slot = analyze::LocalSlot(script, vp - slotsBegin);
 
         /* Will this slot be synced by the JIT? */
         if (!analysis->trackSlot(slot) || analysis->liveness(slot).live(offset)) {
-            gc::MarkValueRoot(trc, vp, "vm_stack");
-        } else if (script->compartment()->isDiscardingJitCode(trc)) {
+            if (trc)
+                gc::MarkValueRoot(trc, vp, "vm_stack");
+        } else if (!trc || script->compartment()->isDiscardingJitCode(trc)) {
             /*
              * If we're throwing away analysis information, we need to replace
              * non-live Values with ones that can safely be marked in later
              * collections.
              */
             if (vp->isDouble()) {
                 *vp = DoubleValue(0.0);
             } else {
@@ -671,36 +675,32 @@ StackSpace::markFrameValues(JSTracer *tr
                 JSValueType type = vp->extractNonDoubleType();
                 if (type == JSVAL_TYPE_INT32)
                     *vp = Int32Value(0);
                 else if (type == JSVAL_TYPE_UNDEFINED)
                     *vp = UndefinedValue();
                 else if (type == JSVAL_TYPE_BOOLEAN)
                     *vp = BooleanValue(false);
                 else if (type == JSVAL_TYPE_STRING)
-                    *vp = StringValue(trc->runtime->atomState.nullAtom);
+                    *vp = StringValue(rt->atomState.nullAtom);
                 else if (type == JSVAL_TYPE_NULL)
                     *vp = NullValue();
                 else if (type == JSVAL_TYPE_OBJECT)
                     *vp = ObjectValue(fp->scopeChain()->global());
             }
         }
     }
 
-    gc::MarkValueRootRange(trc, fixedEnd, slotsEnd, "vm_stack");
+    if (trc)
+        gc::MarkValueRootRange(trc, fixedEnd, slotsEnd, "vm_stack");
 }
 
 void
-StackSpace::mark(JSTracer *trc)
+StackSpace::markAndClobber(JSTracer *trc)
 {
-    /*
-     * JIT code can leave values in an incoherent (i.e., unsafe for precise
-     * marking) state, hence MarkStackRangeConservatively.
-     */
-
     /* NB: this depends on the continuity of segments in memory. */
     Value *nextSegEnd = firstUnused();
     for (StackSegment *seg = seg_; seg; seg = seg->prevInMemory()) {
         /*
          * A segment describes a linear region of memory that contains a stack
          * of native and interpreted calls. For marking purposes, though, we
          * only need to distinguish between frames and values and mark
          * accordingly. Since native calls only push values on the stack, we
@@ -708,26 +708,28 @@ StackSpace::mark(JSTracer *trc)
          * calls. Thus, marking can view the stack as the regex:
          *   (segment slots (frame slots)*)*
          * which gets marked in reverse order.
          */
         Value *slotsEnd = nextSegEnd;
         jsbytecode *pc = seg->maybepc();
         for (StackFrame *fp = seg->maybefp(); (Value *)fp > (Value *)seg; fp = fp->prev()) {
             /* Mark from fp->slots() to slotsEnd. */
-            markFrameValues(trc, fp, slotsEnd, pc);
+            markAndClobberFrame(trc, fp, slotsEnd, pc);
 
-            fp->mark(trc);
+            if (trc)
+                fp->mark(trc);
             slotsEnd = (Value *)fp;
 
             InlinedSite *site;
             pc = fp->prevpc(&site);
             JS_ASSERT_IF(fp->prev(), !site);
         }
-        gc::MarkValueRootRange(trc, seg->slotsBegin(), slotsEnd, "vm_stack");
+        if (trc)
+            gc::MarkValueRootRange(trc, seg->slotsBegin(), slotsEnd, "vm_stack");
         nextSegEnd = (Value *)seg;
     }
 }
 
 void
 StackSpace::markActiveCompartments()
 {
     for (StackSegment *seg = seg_; seg; seg = seg->prevInMemory()) {
--- a/js/src/vm/Stack.h
+++ b/js/src/vm/Stack.h
@@ -1355,16 +1355,18 @@ class StackSpace
                                         JSCompartment *dest) const;
 
     StackSegment &findContainingSegment(const StackFrame *target) const;
 
     bool containsFast(StackFrame *fp) {
         return (Value *)fp >= base_ && (Value *)fp <= trustedEnd_;
     }
 
+    void markAndClobberFrame(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc);
+
   public:
     StackSpace();
     bool init();
     ~StackSpace();
 
     /*
      * Maximum supported value of arguments.length. This bounds the maximum
      * number of arguments that can be supplied to Function.prototype.apply.
@@ -1406,18 +1408,17 @@ class StackSpace
      * most script->nslots deep). getStackLimit ensures that the returned limit
      * does indeed have this required space and reports an error and returns
      * NULL if this reserve space cannot be allocated.
      */
     inline Value *getStackLimit(JSContext *cx, MaybeReportError report);
     bool tryBumpLimit(JSContext *cx, Value *from, unsigned nvals, Value **limit);
 
     /* Called during GC: mark segments, frames, and slots under firstUnused. */
-    void mark(JSTracer *trc);
-    void markFrameValues(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc);
+    void markAndClobber(JSTracer *trc);
 
     /* Called during GC: sets active flag on compartments with active frames. */
     void markActiveCompartments();
 
     /* We only report the committed size;  uncommitted size is uninteresting. */
     JS_FRIEND_API(size_t) sizeOfCommitted();
 
 #ifdef DEBUG