Back out 5f623a11c6cb (bug 713226), 1ed8ccf96402 (bug 721579), 32af27f89c49 (bug 722028), 1300d282fd22 (bug 716067), dc0f6ad7eff3 (bug 723313), 0d2ab3f2e9b9 (bug 723773) for talos crashes
authorPhil Ringnalda <philringnalda@gmail.com>
Fri, 10 Feb 2012 19:47:48 -0800
changeset 89536 71f5bf4df2f60a730733073a15e6766d929e9516
parent 89535 dfe18a0ece029b1b69a3838677b45c1d0b7b6dd2
child 89537 8716f10ab139b8e5045ce7acc3f024c12c2f79b3
push idunknown
push userunknown
push dateunknown
bugs713226, 721579, 722028, 716067, 723313, 723773
milestone13.0a1
backs out5f623a11c6cb7aff1be6e50321bd900ec8d6db05
Back out 5f623a11c6cb (bug 713226), 1ed8ccf96402 (bug 721579), 32af27f89c49 (bug 722028), 1300d282fd22 (bug 716067), dc0f6ad7eff3 (bug 723313), 0d2ab3f2e9b9 (bug 723773) for talos crashes
js/src/jit-test/tests/basic/bug713226.js
js/src/jit-test/tests/basic/bug722028.js
js/src/jsanalyze.cpp
js/src/jsapi.cpp
js/src/jscntxt.h
js/src/jsgc.cpp
js/src/jsgc.h
js/src/jsiter.cpp
js/src/methodjit/Compiler.cpp
js/src/vm/Stack.cpp
js/src/vm/Stack.h
js/xpconnect/src/XPCInlines.h
js/xpconnect/src/XPCWrappedNative.cpp
js/xpconnect/src/XPCWrappedNativeScope.cpp
js/xpconnect/src/dombindings.cpp
js/xpconnect/src/xpcprivate.h
js/xpconnect/wrappers/WrapperFactory.cpp
deleted file mode 100644
--- a/js/src/jit-test/tests/basic/bug713226.js
+++ /dev/null
@@ -1,21 +0,0 @@
-// |jit-test| mjitalways;
-gczeal(4);
-var optionNames = options().split(',');
-  for (var i = 0; i < optionNames.length; i++)
-    var optionName = optionNames[i];
-      options(optionName);
-evaluate("\
-function addDebug(g, id) {\
-    var debuggerGlobal = newGlobal('new-compartment');\
-    debuggerGlobal.debuggee = g;\
-    debuggerGlobal.id = id;\
-    debuggerGlobal.print = function (s) { (g) += s; };\
-    debuggerGlobal.eval('var dbg = new Debugger(debuggee);dbg.onDebuggerStatement = function () { print(id); debugger; };');\
-    return debuggerGlobal;\
-}\
-var base = newGlobal('new-compartment');\
-var top = base;\
-for (var i = 0; i < 8; i++ )\
-    top = addDebug(top, i);\
-base.eval('debugger;');\
-");
deleted file mode 100644
--- a/js/src/jit-test/tests/basic/bug722028.js
+++ /dev/null
@@ -1,13 +0,0 @@
-
-gczeal(4);
-var BUGNUMBER = 668024;
-var summary =
-print(BUGNUMBER + ": " + summary);
-var arr = [0, 1, 2, 3, 4, 5, , 7];
-var seen = [];
-for (var p in arr) {
-    if (seen.indexOf(unescape) >= 0) {}
-    arr.splice(2, 3);
-  seen.push(p);
-}
-
--- a/js/src/jsanalyze.cpp
+++ b/js/src/jsanalyze.cpp
@@ -935,25 +935,22 @@ ScriptAnalysis::killVariable(JSContext *
      */
     var.lifetime->start = offset;
     var.lifetime->write = true;
 
     if (var.ensured) {
         /*
          * The variable is live even before the write, due to an enclosing try
          * block. We need to split the lifetime to indicate there was a write.
-         * We set the new interval's savedEnd to 0, since it will always be
-         * adjacent to the old interval, so it never needs to be extended.
          */
-        var.lifetime = cx->typeLifoAlloc().new_<Lifetime>(start, 0, var.lifetime);
+        var.lifetime = cx->typeLifoAlloc().new_<Lifetime>(start, offset, var.lifetime);
         if (!var.lifetime) {
             setOOM(cx);
             return;
         }
-        var.lifetime->end = offset;
     } else {
         var.saved = var.lifetime;
         var.savedEnd = 0;
         var.lifetime = NULL;
 
         saved[savedCount++] = &var;
     }
 }
@@ -971,53 +968,35 @@ ScriptAnalysis::extendVariable(JSContext
          */
         JS_ASSERT(var.lifetime->start < start);
         return;
     }
 
     var.lifetime->start = start;
 
     /*
-     * Consider this code:
-     *
-     *   while (...) { (#1)
-     *       use x;    (#2)
-     *       ...
-     *       x = ...;  (#3)
-     *       ...
-     *   }             (#4)
-     *
-     * Just before analyzing the while statement, there would be a live range
-     * from #1..#2 and a "point range" at #3. The job of extendVariable is to
-     * create a new live range from #3..#4.
-     *
-     * However, more extensions may be required if the definition of x is
-     * conditional. Consider the following.
+     * When walking backwards through loop bodies, we don't know which vars
+     * are live at the loop's backedge. We save the endpoints for lifetime
+     * segments which we *would* use if the variables were live at the backedge
+     * and extend the variable with new lifetimes if we find the variable is
+     * indeed live at the head of the loop.
      *
-     *   while (...) {     (#1)
-     *       use x;        (#2)
-     *       ...
-     *       if (...)      (#5)
-     *           x = ...;  (#3)
-     *       ...
-     *   }                 (#4)
+     * while (...) {
+     *   if (x #1) { ... }
+     *   ...
+     *   if (... #2) { x = 0; #3}
+     * }
      *
-     * Assume that x is not used after the loop. Then, before extendVariable is
-     * run, the live ranges would be the same as before (#1..#2 and #3..#3). We
-     * still need to create a range from #3..#4. But, since the assignment at #3
-     * may never run, we also need to create a range from #2..#3. This is done
-     * as follows.
-     *
-     * Each time we create a Lifetime, we store the start of the most recently
-     * seen sequence of conditional code in the Lifetime's savedEnd field. So,
-     * when creating the Lifetime at #2, we set the Lifetime's savedEnd to
-     * #5. (The start of the most recent conditional is cached in each
-     * variable's savedEnd field.) Consequently, extendVariable is able to
-     * create a new interval from #2..#5 using the savedEnd field of the
-     * existing #1..#2 interval.
+     * If x is not live after the loop, we treat it as dead in the walk and
+     * make a point lifetime for the write at #3. At the beginning of that
+     * basic block (#2), we save the loop endpoint; if we knew x was live in
+     * the next iteration then a new lifetime would be made here. At #1 we
+     * mark x live again, make a segment between the head of the loop and #1,
+     * and then extend x with loop tail lifetimes from #1 to #2, and from #3
+     * to the back edge.
      */
 
     Lifetime *segment = var.lifetime;
     while (segment && segment->start < end) {
         uint32_t savedEnd = segment->savedEnd;
         if (!segment->next || segment->next->start >= end) {
             /*
              * savedEnd is only set for variables killed in the middle of the
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -715,16 +715,18 @@ JSRuntime::JSRuntime()
 # ifdef DEBUG
     checkRequestDepth(0),
 # endif
 #endif
     gcSystemAvailableChunkListHead(NULL),
     gcUserAvailableChunkListHead(NULL),
     gcKeepAtoms(0),
     gcBytes(0),
+    gcTriggerBytes(0),
+    gcLastBytes(0),
     gcMaxBytes(0),
     gcMaxMallocBytes(0),
     gcNumArenasFreeCommitted(0),
     gcNumber(0),
     gcIncrementalTracer(NULL),
     gcVerifyData(NULL),
     gcChunkAllocationSinceLastGC(false),
     gcNextFullGCTime(0),
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -279,16 +279,18 @@ struct JSRuntime : js::RuntimeFriendFiel
     js::gc::Chunk       *gcSystemAvailableChunkListHead;
     js::gc::Chunk       *gcUserAvailableChunkListHead;
     js::gc::ChunkPool   gcChunkPool;
 
     js::RootedValueMap  gcRootsHash;
     js::GCLocks         gcLocksHash;
     jsrefcount          gcKeepAtoms;
     size_t              gcBytes;
+    size_t              gcTriggerBytes;
+    size_t              gcLastBytes;
     size_t              gcMaxBytes;
     size_t              gcMaxMallocBytes;
 
     /*
      * Number of the committed arenas in all GC chunks including empty chunks.
      * The counter is volatile as it is read without the GC lock, see comments
      * in MaybeGC.
      */
@@ -533,16 +535,19 @@ struct JSRuntime : js::RuntimeFriendFiel
 
     JSRuntime();
     ~JSRuntime();
 
     bool init(uint32_t maxbytes);
 
     JSRuntime *thisFromCtor() { return this; }
 
+    void setGCLastBytes(size_t lastBytes, JSGCInvocationKind gckind);
+    void reduceGCTriggerBytes(size_t amount);
+
     /*
      * Call the system malloc while checking for GC memory pressure and
      * reporting OOM error when cx is not null. We will not GC from here.
      */
     void* malloc_(size_t bytes, JSContext *cx = NULL) {
         updateMallocCounter(cx, bytes);
         void *p = ::js_malloc(bytes);
         return JS_LIKELY(!!p) ? p : onOutOfMemory(NULL, bytes, cx);
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -779,18 +779,20 @@ Chunk::releaseArena(ArenaHeader *aheader
     if (rt->gcHelperThread.sweeping())
         maybeLock.lock(rt);
 #endif
 
     Probes::resizeHeap(comp, rt->gcBytes, rt->gcBytes - ArenaSize);
     JS_ASSERT(rt->gcBytes >= ArenaSize);
     JS_ASSERT(comp->gcBytes >= ArenaSize);
 #ifdef JS_THREADSAFE
-    if (rt->gcHelperThread.sweeping())
+    if (rt->gcHelperThread.sweeping()) {
+        rt->reduceGCTriggerBytes(GC_HEAP_GROWTH_FACTOR * ArenaSize);
         comp->reduceGCTriggerBytes(GC_HEAP_GROWTH_FACTOR * ArenaSize);
+    }
 #endif
     rt->gcBytes -= ArenaSize;
     comp->gcBytes -= ArenaSize;
 
     aheader->setAsNotAllocated();
     addArenaToFreeList(rt, aheader);
 
     if (info.numArenasFree == 1) {
@@ -886,16 +888,22 @@ js_InitGC(JSRuntime *rt, uint32_t maxbyt
 
     /*
      * Separate gcMaxMallocBytes from gcMaxBytes but initialize to maxbytes
      * for default backward API compatibility.
      */
     rt->gcMaxBytes = maxbytes;
     rt->setGCMaxMallocBytes(maxbytes);
 
+    /*
+     * The assigned value prevents GC from running when GC memory is too low
+     * (during JS engine start).
+     */
+    rt->setGCLastBytes(8192, GC_NORMAL);
+
     rt->gcJitReleaseTime = PRMJ_Now() + JIT_SCRIPT_RELEASE_TYPES_INTERVAL;
     return true;
 }
 
 namespace js {
 
 inline bool
 InFreeList(ArenaHeader *aheader, uintptr_t addr)
@@ -1343,28 +1351,46 @@ js_MapGCRoots(JSRuntime *rt, JSGCRootMap
         if (mapflags & JS_MAP_GCROOT_STOP)
             break;
     }
 
     return ct;
 }
 
 void
+JSRuntime::setGCLastBytes(size_t lastBytes, JSGCInvocationKind gckind)
+{
+    gcLastBytes = lastBytes;
+
+    size_t base = gckind == GC_SHRINK ? lastBytes : Max(lastBytes, GC_ALLOCATION_THRESHOLD);
+    float trigger = float(base) * GC_HEAP_GROWTH_FACTOR;
+    gcTriggerBytes = size_t(Min(float(gcMaxBytes), trigger));
+}
+
+void
+JSRuntime::reduceGCTriggerBytes(size_t amount) {
+    JS_ASSERT(amount > 0);
+    JS_ASSERT(gcTriggerBytes - amount >= 0);
+    if (gcTriggerBytes - amount < GC_ALLOCATION_THRESHOLD * GC_HEAP_GROWTH_FACTOR)
+        return;
+    gcTriggerBytes -= amount;
+}
+
+void
 JSCompartment::setGCLastBytes(size_t lastBytes, JSGCInvocationKind gckind)
 {
     gcLastBytes = lastBytes;
 
     size_t base = gckind == GC_SHRINK ? lastBytes : Max(lastBytes, GC_ALLOCATION_THRESHOLD);
     float trigger = float(base) * GC_HEAP_GROWTH_FACTOR;
     gcTriggerBytes = size_t(Min(float(rt->gcMaxBytes), trigger));
 }
 
 void
-JSCompartment::reduceGCTriggerBytes(size_t amount)
-{
+JSCompartment::reduceGCTriggerBytes(size_t amount) {
     JS_ASSERT(amount > 0);
     JS_ASSERT(gcTriggerBytes - amount >= 0);
     if (gcTriggerBytes - amount < GC_ALLOCATION_THRESHOLD * GC_HEAP_GROWTH_FACTOR)
         return;
     gcTriggerBytes -= amount;
 }
 
 namespace js {
@@ -1861,16 +1887,36 @@ gc_root_traversal(JSTracer *trc, const R
 static void
 gc_lock_traversal(const GCLocks::Entry &entry, JSTracer *trc)
 {
     JS_ASSERT(entry.value >= 1);
     MarkRootGCThing(trc, entry.key, "locked object");
 }
 
 void
+js_TraceStackFrame(JSTracer *trc, StackFrame *fp)
+{
+    MarkRoot(trc, &fp->scopeChain(), "scope chain");
+    if (fp->isDummyFrame())
+        return;
+    if (fp->hasArgsObj())
+        MarkRoot(trc, &fp->argsObj(), "arguments");
+    if (fp->isFunctionFrame()) {
+        MarkRoot(trc, fp->fun(), "fun");
+        if (fp->isEvalFrame()) {
+            MarkRoot(trc, fp->script(), "eval script");
+        }
+    } else {
+        MarkRoot(trc, fp->script(), "script");
+    }
+    fp->script()->compartment()->active = true;
+    MarkRoot(trc, fp->returnValue(), "rval");
+}
+
+void
 AutoIdArray::trace(JSTracer *trc)
 {
     JS_ASSERT(tag == IDARRAY);
     gc::MarkIdRange(trc, idArray->vector, idArray->vector + idArray->length,
                     "JSAutoIdArray.idArray");
 }
 
 void
@@ -2122,16 +2168,22 @@ TriggerCompartmentGC(JSCompartment *comp
 
     if (rt->gcIsNeeded) {
         /* If we need to GC more than one compartment, run a full GC. */
         if (rt->gcTriggerCompartment != comp)
             rt->gcTriggerCompartment = NULL;
         return;
     }
 
+    if (rt->gcBytes > 8192 && rt->gcBytes >= 3 * (rt->gcTriggerBytes / 2)) {
+        /* If we're using significantly more than our quota, do a full GC. */
+        TriggerGC(rt, reason);
+        return;
+    }
+
     /*
      * Trigger the GC when it is safe to call an operation callback on any
      * thread.
      */
     rt->gcIsNeeded = true;
     rt->gcTriggerCompartment = comp;
     rt->gcTriggerReason = reason;
     comp->rt->triggerOperationCallback();
@@ -2909,16 +2961,17 @@ GCCycle(JSContext *cx, JSCompartment *co
     if (cx->gcBackgroundFree) {
         JS_ASSERT(cx->gcBackgroundFree == &rt->gcHelperThread);
         cx->gcBackgroundFree = NULL;
         rt->gcHelperThread.startBackgroundSweep(cx, gckind == GC_SHRINK);
     }
 #endif
 
     rt->gcMarkAndSweep = false;
+    rt->setGCLastBytes(rt->gcBytes, gckind);
     rt->gcCurrentCompartment = NULL;
 
     for (CompartmentsIter c(rt); !c.done(); c.next())
         c->setGCLastBytes(c->gcBytes, gckind);
 }
 
 void
 js_GC(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind, gcreason::Reason reason)
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -1366,16 +1366,19 @@ extern void
 js_UnlockGCThingRT(JSRuntime *rt, void *thing);
 
 extern JS_FRIEND_API(bool)
 IsAboutToBeFinalized(const js::gc::Cell *thing);
 
 extern bool
 IsAboutToBeFinalized(const js::Value &value);
 
+extern void
+js_TraceStackFrame(JSTracer *trc, js::StackFrame *fp);
+
 extern bool
 js_IsAddressableGCThing(JSRuntime *rt, uintptr_t w, js::gc::AllocKind *thingKind, void **thing);
 
 namespace js {
 
 extern void
 TraceRuntime(JSTracer *trc);
 
--- a/js/src/jsiter.cpp
+++ b/js/src/jsiter.cpp
@@ -1024,22 +1024,16 @@ SuppressDeletedPropertyHelper(JSContext 
                      * If it is the next property to be enumerated, just skip it.
                      */
                     if (idp == props_cursor) {
                         ni->incCursor();
                     } else {
                         for (HeapPtr<JSFlatString> *p = idp; p + 1 != props_end; p++)
                             *p = *(p + 1);
                         ni->props_end = ni->end() - 1;
-
-                        /*
-                         * Invoke the write barrier on this element, since it's
-                         * no longer going to be marked.
-                         */
-                        ni->props_end->HeapPtr<JSFlatString>::~HeapPtr<JSFlatString>();
                     }
 
                     /* Don't reuse modified native iterators. */
                     ni->flags |= JSITER_UNREUSABLE;
 
                     if (predicate.matchesAtMostOne())
                         break;
                 }
@@ -1380,17 +1374,17 @@ MarkGenerator(JSTracer *trc, JSGenerator
     /*
      * Currently, generators are not mjitted. Still, (overflow) args can be
      * pushed by the mjit and need to be conservatively marked. Technically, the
      * formal args and generator slots are safe for exact marking, but since the
      * plan is to eventually mjit generators, it makes sense to future-proof
      * this code and save someone an hour later.
      */
     MarkStackRangeConservatively(trc, gen->floatingStack, fp->formalArgsEnd());
-    fp->mark(trc);
+    js_TraceStackFrame(trc, fp);
     MarkStackRangeConservatively(trc, fp->slots(), gen->regs.sp);
 }
 
 static void
 GeneratorWriteBarrierPre(JSContext *cx, JSGenerator *gen)
 {
     JSCompartment *comp = cx->compartment;
     if (comp->needsBarrier())
--- a/js/src/methodjit/Compiler.cpp
+++ b/js/src/methodjit/Compiler.cpp
@@ -5622,28 +5622,26 @@ mjit::Compiler::jsop_setprop(PropertyNam
                 bumpPropCounter(PC, OpcodeCounts::PROP_DEFINITE);
             return true;
         }
     }
 
     if (script->pcCounters)
         bumpPropCounter(PC, OpcodeCounts::PROP_OTHER);
 
-    JSOp op = JSOp(*PC);
-
 #ifdef JSGC_INCREMENTAL_MJ
-    /* Write barrier. We only have type information for JSOP_SETPROP. */
-    if (cx->compartment->needsBarrier() &&
-        (!types || op != JSOP_SETPROP || types->propertyNeedsBarrier(cx, id)))
-    {
+    /* Write barrier. */
+    if (cx->compartment->needsBarrier() && (!types || types->propertyNeedsBarrier(cx, id))) {
         jsop_setprop_slow(name);
         return true;
     }
 #endif
 
+    JSOp op = JSOp(*PC);
+
     ic::PICInfo::Kind kind = (op == JSOP_SETMETHOD)
                              ? ic::PICInfo::SETMETHOD
                              : ic::PICInfo::SET;
     PICGenInfo pic(kind, op);
     pic.name = name;
 
     if (monitored(PC)) {
         pic.typeMonitored = true;
--- a/js/src/vm/Stack.cpp
+++ b/js/src/vm/Stack.cpp
@@ -206,41 +206,16 @@ StackFrame::pcQuadratic(const ContextSta
         return regs.pc;
     }
 
     if (!next)
         next = seg.computeNextFrame(this);
     return next->prevpc(pinlined);
 }
 
-void
-StackFrame::mark(JSTracer *trc)
-{
-    /*
-     * Normally we would use MarkRoot here, except that generators also take
-     * this path. However, generators use a special write barrier when the stack
-     * frame is copied to the floating frame. Therefore, no barrier is needed.
-     */
-    gc::MarkObjectUnbarriered(trc, &scopeChain(), "scope chain");
-    if (isDummyFrame())
-        return;
-    if (hasArgsObj())
-        gc::MarkObjectUnbarriered(trc, &argsObj(), "arguments");
-    if (isFunctionFrame()) {
-        gc::MarkObjectUnbarriered(trc, fun(), "fun");
-        if (isEvalFrame())
-            gc::MarkScriptUnbarriered(trc, script(), "eval script");
-    } else {
-        gc::MarkScriptUnbarriered(trc, script(), "script");
-    }
-    if (IS_GC_MARKING_TRACER(trc))
-        script()->compartment()->active = true;
-    gc::MarkValueUnbarriered(trc, returnValue(), "rval");
-}
-
 /*****************************************************************************/
 
 bool
 StackSegment::contains(const StackFrame *fp) const
 {
     /* NB: this depends on the continuity of segments in memory. */
     return (Value *)fp >= slotsBegin() && (Value *)fp <= (Value *)maybefp();
 }
@@ -402,61 +377,16 @@ StackSpace::containingSegment(const Stac
         if (s->contains(target))
             return *s;
     }
     JS_NOT_REACHED("frame not in stack space");
     return *(StackSegment *)NULL;
 }
 
 void
-StackSpace::markFrameSlots(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc)
-{
-    Value *slotsBegin = fp->slots();
-
-    if (!fp->isScriptFrame()) {
-        JS_ASSERT(fp->isDummyFrame());
-        gc::MarkRootRange(trc, slotsBegin, slotsEnd, "vm_stack");
-        return;
-    }
-
-    /* If it's a scripted frame, we should have a pc. */
-    JS_ASSERT(pc);
-
-    JSScript *script = fp->script();
-    if (!script->hasAnalysis() || !script->analysis()->ranLifetimes()) {
-        gc::MarkRootRange(trc, slotsBegin, slotsEnd, "vm_stack");
-        return;
-    }
-
-    /*
-     * If the JIT ran a lifetime analysis, then it may have left garbage in the
-     * slots considered not live. We need to avoid marking them. Additionally,
-     * in case the analysis information is thrown out later, we overwrite these
-     * dead slots with valid values so that future GCs won't crash. Analysis
-     * results are thrown away during the sweeping phase, so we always have at
-     * least one GC to do this.
-     */
-    analyze::AutoEnterAnalysis aea(script->compartment());
-    analyze::ScriptAnalysis *analysis = script->analysis();
-    uint32_t offset = pc - script->code;
-    Value *fixedEnd = slotsBegin + script->nfixed;
-    for (Value *vp = slotsBegin; vp < fixedEnd; vp++) {
-        uint32_t slot = analyze::LocalSlot(script, vp - slotsBegin);
-
-        /* Will this slot be synced by the JIT? */
-        if (!analysis->trackSlot(slot) || analysis->liveness(slot).live(offset))
-            gc::MarkRoot(trc, *vp, "vm_stack");
-        else
-            *vp = UndefinedValue();
-    }
-
-    gc::MarkRootRange(trc, fixedEnd, slotsEnd, "vm_stack");
-}
-
-void
 StackSpace::mark(JSTracer *trc)
 {
     /*
      * JIT code can leave values in an incoherent (i.e., unsafe for precise
      * marking) state, hence MarkStackRangeConservatively.
      */
 
     /* NB: this depends on the continuity of segments in memory. */
@@ -466,31 +396,25 @@ StackSpace::mark(JSTracer *trc)
          * A segment describes a linear region of memory that contains a stack
          * of native and interpreted calls. For marking purposes, though, we
          * only need to distinguish between frames and values and mark
          * accordingly. Since native calls only push values on the stack, we
          * can effectively lump them together and just iterate over interpreted
          * calls. Thus, marking can view the stack as the regex:
          *   (segment slots (frame slots)*)*
          * which gets marked in reverse order.
+         *
          */
         Value *slotsEnd = nextSegEnd;
-        jsbytecode *pc = seg->maybepc();
         for (StackFrame *fp = seg->maybefp(); (Value *)fp > (Value *)seg; fp = fp->prev()) {
-            /* Mark from fp->slots() to slotsEnd. */
-            markFrameSlots(trc, fp, slotsEnd, pc);
-
-            fp->mark(trc);
+            MarkStackRangeConservatively(trc, fp->slots(), slotsEnd);
+            js_TraceStackFrame(trc, fp);
             slotsEnd = (Value *)fp;
-
-            JSInlinedSite *site;
-            pc = fp->prevpc(&site);
-            JS_ASSERT_IF(fp->prev(), !site);
         }
-        gc::MarkRootRange(trc, seg->slotsBegin(), slotsEnd, "vm_stack");
+        MarkStackRangeConservatively(trc, seg->slotsBegin(), slotsEnd);
         nextSegEnd = (Value *)seg;
     }
 }
 
 JS_FRIEND_API(bool)
 StackSpace::ensureSpaceSlow(JSContext *cx, MaybeReportError report, Value *from, ptrdiff_t nvals,
                             JSCompartment *dest) const
 {
--- a/js/src/vm/Stack.h
+++ b/js/src/vm/Stack.h
@@ -1,9 +1,9 @@
-/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+/* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
  * vim: set ts=4 sw=4 et tw=79 ft=cpp:
  *
  * ***** BEGIN LICENSE BLOCK *****
  * Version: MPL 1.1/GPL 2.0/LGPL 2.1
  *
  * The contents of this file are subject to the Mozilla Public License Version
  * 1.1 (the "License"); you may not use this file except in compliance with
  * the License. You may obtain a copy of the License at
@@ -1188,19 +1188,16 @@ class StackFrame
 
 #ifdef JS_METHODJIT
     mjit::JITScript *jit() {
         return script()->getJIT(isConstructing());
     }
 #endif
 
     void methodjitStaticAsserts();
-
-  public:
-    void mark(JSTracer *trc);
 };
 
 static const size_t VALUES_PER_STACK_FRAME = sizeof(StackFrame) / sizeof(Value);
 
 static inline uintN
 ToReportFlags(InitialFrameFlags initial)
 {
     return uintN(initial & StackFrame::CONSTRUCTING);
@@ -1363,20 +1360,16 @@ class StackSegment
     StackFrame *fp() const {
         return regs_->fp();
     }
 
     StackFrame *maybefp() const {
         return regs_ ? regs_->fp() : NULL;
     }
 
-    jsbytecode *maybepc() const {
-        return regs_ ? regs_->pc : NULL;
-    }
-
     CallArgsList &calls() const {
         JS_ASSERT(calls_);
         return *calls_;
     }
 
     CallArgsList *maybeCalls() const {
         return calls_;
     }
@@ -1537,17 +1530,16 @@ class StackSpace
      * does indeed have this required space and reports an error and returns
      * NULL if this reserve space cannot be allocated.
      */
     inline Value *getStackLimit(JSContext *cx, MaybeReportError report);
     bool tryBumpLimit(JSContext *cx, Value *from, uintN nvals, Value **limit);
 
     /* Called during GC: mark segments, frames, and slots under firstUnused. */
     void mark(JSTracer *trc);
-    void markFrameSlots(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc);
 
     /* We only report the committed size;  uncommitted size is uninteresting. */
     JS_FRIEND_API(size_t) sizeOfCommitted();
 };
 
 /*****************************************************************************/
 
 class ContextStack
--- a/js/xpconnect/src/XPCInlines.h
+++ b/js/xpconnect/src/XPCInlines.h
@@ -577,39 +577,31 @@ inline void XPCNativeSet::ASSERT_NotMark
     for (int i = (int) mInterfaceCount; i > 0; i--, pp++)
         NS_ASSERTION(!(*pp)->IsMarked(), "bad");
 }
 #endif
 
 /***************************************************************************/
 
 inline
-JSObject* XPCWrappedNativeTearOff::GetJSObjectPreserveColor() const
+JSObject* XPCWrappedNativeTearOff::GetJSObject() const
 {
     return mJSObject;
 }
 
 inline
-JSObject* XPCWrappedNativeTearOff::GetJSObject()
-{
-    JSObject *obj = GetJSObjectPreserveColor();
-    xpc_UnmarkGrayObject(obj);
-    return obj;
-}
-
-inline
 void XPCWrappedNativeTearOff::SetJSObject(JSObject*  JSObj)
 {
         mJSObject = JSObj;
 }
 
 inline
 XPCWrappedNativeTearOff::~XPCWrappedNativeTearOff()
 {
-    NS_ASSERTION(!(GetInterface()||GetNative()||GetJSObjectPreserveColor()), "tearoff not empty in dtor");
+    NS_ASSERTION(!(GetInterface()||GetNative()||GetJSObject()), "tearoff not empty in dtor");
 }
 
 /***************************************************************************/
 
 inline JSBool
 XPCWrappedNative::HasInterfaceNoQI(const nsIID& iid)
 {
     return nsnull != GetSet()->FindInterfaceWithIID(iid);
@@ -624,17 +616,17 @@ XPCWrappedNative::SweepTearOffs()
         for (int i = XPC_WRAPPED_NATIVE_TEAROFFS_PER_CHUNK; i > 0; i--, to++) {
             JSBool marked = to->IsMarked();
             to->Unmark();
             if (marked)
                 continue;
 
             // If this tearoff does not have a live dedicated JSObject,
             // then let's recycle it.
-            if (!to->GetJSObjectPreserveColor()) {
+            if (!to->GetJSObject()) {
                 nsISupports* obj = to->GetNative();
                 if (obj) {
                     obj->Release();
                     to->SetNative(nsnull);
                 }
                 to->SetInterface(nsnull);
             }
         }
--- a/js/xpconnect/src/XPCWrappedNative.cpp
+++ b/js/xpconnect/src/XPCWrappedNative.cpp
@@ -166,17 +166,17 @@ XPCWrappedNative::NoteTearoffs(nsCycleCo
     // finalized yet we'll note the edge between the JS object and the native
     // (see nsXPConnect::Traverse), but if their JS object has been finalized
     // then the tearoff is only reachable through the XPCWrappedNative, so we
     // record an edge here.
     XPCWrappedNativeTearOffChunk* chunk;
     for (chunk = &mFirstChunk; chunk; chunk = chunk->mNextChunk) {
         XPCWrappedNativeTearOff* to = chunk->mTearOffs;
         for (int i = XPC_WRAPPED_NATIVE_TEAROFFS_PER_CHUNK-1; i >= 0; i--, to++) {
-            JSObject* jso = to->GetJSObjectPreserveColor();
+            JSObject* jso = to->GetJSObject();
             if (!jso) {
                 NS_CYCLE_COLLECTION_NOTE_EDGE_NAME(cb, "tearoff's mNative");
                 cb.NoteXPCOMChild(to->GetNative());
             }
         }
     }
 }
 
@@ -1254,17 +1254,17 @@ XPCWrappedNative::FlatJSObjectFinalized(
     // This will keep them from trying to access their pointers to the
     // dying tearoff object. We can safely assume that those remaining
     // JSObjects are about to be finalized too.
 
     XPCWrappedNativeTearOffChunk* chunk;
     for (chunk = &mFirstChunk; chunk; chunk = chunk->mNextChunk) {
         XPCWrappedNativeTearOff* to = chunk->mTearOffs;
         for (int i = XPC_WRAPPED_NATIVE_TEAROFFS_PER_CHUNK-1; i >= 0; i--, to++) {
-            JSObject* jso = to->GetJSObjectPreserveColor();
+            JSObject* jso = to->GetJSObject();
             if (jso) {
                 NS_ASSERTION(JS_IsAboutToBeFinalized(jso), "bad!");
                 JS_SetPrivate(jso, nsnull);
                 to->JSObjectFinalized();
             }
 
             // We also need to release any native pointers held...
             nsISupports* obj = to->GetNative();
@@ -1358,18 +1358,18 @@ XPCWrappedNative::SystemIsBeingShutDown(
     }
 
     // cleanup the tearoffs...
 
     XPCWrappedNativeTearOffChunk* chunk;
     for (chunk = &mFirstChunk; chunk; chunk = chunk->mNextChunk) {
         XPCWrappedNativeTearOff* to = chunk->mTearOffs;
         for (int i = XPC_WRAPPED_NATIVE_TEAROFFS_PER_CHUNK-1; i >= 0; i--, to++) {
-            if (JSObject *jso = to->GetJSObjectPreserveColor()) {
-                JS_SetPrivate(jso, nsnull);
+            if (to->GetJSObject()) {
+                JS_SetPrivate(to->GetJSObject(), nsnull);
                 to->SetJSObject(nsnull);
             }
             // We leak the tearoff mNative
             // (for the same reason we leak mIdentity - see above).
             to->SetNative(nsnull);
             to->SetInterface(nsnull);
         }
     }
@@ -1772,17 +1772,17 @@ XPCWrappedNative::FindTearOff(XPCCallCon
          lastChunk = chunk, chunk = chunk->mNextChunk) {
         to = chunk->mTearOffs;
         XPCWrappedNativeTearOff* const end = chunk->mTearOffs +
             XPC_WRAPPED_NATIVE_TEAROFFS_PER_CHUNK;
         for (to = chunk->mTearOffs;
              to < end;
              to++) {
             if (to->GetInterface() == aInterface) {
-                if (needJSObject && !to->GetJSObjectPreserveColor()) {
+                if (needJSObject && !to->GetJSObject()) {
                     AutoMarkingWrappedNativeTearOffPtr tearoff(ccx, to);
                     JSBool ok = InitTearOffJSObject(ccx, to);
                     // During shutdown, we don't sweep tearoffs.  So make sure
                     // to unmark manually in case the auto-marker marked us.
                     // We shouldn't ever be getting here _during_ our
                     // Mark/Sweep cycle, so this should be safe.
                     to->Unmark();
                     if (!ok) {
--- a/js/xpconnect/src/XPCWrappedNativeScope.cpp
+++ b/js/xpconnect/src/XPCWrappedNativeScope.cpp
@@ -322,18 +322,16 @@ XPCWrappedNativeScope::GetPrototypeNoHel
         mPrototypeNoHelper =
             xpc_NewSystemInheritingJSObject(ccx,
                                             js::Jsvalify(&XPC_WN_NoHelper_Proto_JSClass),
                                             mPrototypeJSObject,
                                             false, mGlobalJSObject);
 
         NS_ASSERTION(mPrototypeNoHelper,
                      "Failed to create prototype for wrappers w/o a helper");
-    } else {
-        xpc_UnmarkGrayObject(mPrototypeNoHelper);
     }
 
     return mPrototypeNoHelper;
 }
 
 static JSDHashOperator
 WrappedNativeJSGCThingTracer(JSDHashTable *table, JSDHashEntryHdr *hdr,
                              uint32_t number, void *arg)
--- a/js/xpconnect/src/dombindings.cpp
+++ b/js/xpconnect/src/dombindings.cpp
@@ -486,20 +486,18 @@ template<class LC>
 JSObject *
 ListBase<LC>::getPrototype(JSContext *cx, XPCWrappedNativeScope *scope)
 {
     nsDataHashtable<nsDepCharHashKey, JSObject*> &cache =
         scope->GetCachedDOMPrototypes();
 
     JSObject *interfacePrototype;
     if (cache.IsInitialized()) {
-        if (cache.Get(sInterfaceClass.name, &interfacePrototype)) {
-            xpc_UnmarkGrayObject(interfacePrototype);
+        if (cache.Get(sInterfaceClass.name, &interfacePrototype))
             return interfacePrototype;
-        }
     } else if (!cache.Init()) {
         return NULL;
     }
 
     JSObject* proto = Base::getPrototype(cx, scope);
     if (!proto)
         return NULL;
 
--- a/js/xpconnect/src/xpcprivate.h
+++ b/js/xpconnect/src/xpcprivate.h
@@ -2377,18 +2377,17 @@ class XPCWrappedNativeTearOff
 public:
     JSBool IsAvailable() const {return mInterface == nsnull;}
     JSBool IsReserved()  const {return mInterface == (XPCNativeInterface*)1;}
     JSBool IsValid()     const {return !IsAvailable() && !IsReserved();}
     void   SetReserved()       {mInterface = (XPCNativeInterface*)1;}
 
     XPCNativeInterface* GetInterface() const {return mInterface;}
     nsISupports*        GetNative()    const {return mNative;}
-    JSObject*           GetJSObject();
-    JSObject*           GetJSObjectPreserveColor() const;
+    JSObject*           GetJSObject()  const;
     void SetInterface(XPCNativeInterface*  Interface) {mInterface = Interface;}
     void SetNative(nsISupports*  Native)              {mNative = Native;}
     void SetJSObject(JSObject*  JSObj);
 
     void JSObjectFinalized() {SetJSObject(nsnull);}
 
     XPCWrappedNativeTearOff()
         : mInterface(nsnull), mNative(nsnull), mJSObject(nsnull) {}
--- a/js/xpconnect/wrappers/WrapperFactory.cpp
+++ b/js/xpconnect/wrappers/WrapperFactory.cpp
@@ -98,20 +98,18 @@ WrapperFactory::WaiveXray(JSContext *cx,
     // the .wrappedJSObject also wraps the outer window.
     obj = GetCurrentOuter(cx, obj);
 
     {
         // See if we already have a waiver wrapper for this object.
         CompartmentPrivate *priv =
             (CompartmentPrivate *)JS_GetCompartmentPrivate(cx, js::GetObjectCompartment(obj));
         JSObject *wobj = nsnull;
-        if (priv && priv->waiverWrapperMap) {
+        if (priv && priv->waiverWrapperMap)
             wobj = priv->waiverWrapperMap->Find(obj);
-            xpc_UnmarkGrayObject(wobj);
-        }
 
         // No wrapper yet, make one.
         if (!wobj) {
             JSObject *proto = js::GetObjectProto(obj);
             if (proto && !(proto = WaiveXray(cx, proto)))
                 return nsnull;
 
             JSAutoEnterCompartment ac;