Bug 739899 - Eliminate the distinction between full and compartment GCs (r=igor)
authorBill McCloskey <wmccloskey@mozilla.com>
Mon, 02 Apr 2012 18:29:11 -0700
changeset 91132 94199cf080a3a26d1d600445a1ed2c198e1b4b20
parent 91131 d41b23cd23596a204e0a5338045a9bc729963d8d
child 91133 94efe1ec3367688a9871c67da042e59750b6543b
push id667
push usertim.taubert@gmx.de
push dateTue, 10 Apr 2012 10:56:50 +0000
treeherderfx-team@6fe5b0271cd1 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersigor
bugs739899
milestone14.0a1
Bug 739899 - Eliminate the distinction between full and compartment GCs (r=igor)
js/src/gc/Statistics.cpp
js/src/gc/Statistics.h
js/src/jsapi.cpp
js/src/jsatom.cpp
js/src/jsatom.h
js/src/jscntxt.cpp
js/src/jscntxt.h
js/src/jsfriendapi.cpp
js/src/jsgc.cpp
js/src/jsgc.h
js/src/jsgcmark.cpp
--- a/js/src/gc/Statistics.cpp
+++ b/js/src/gc/Statistics.cpp
@@ -323,17 +323,18 @@ Statistics::formatData(StatisticsSeriali
 
     double mmu20 = computeMMU(20 * PRMJ_USEC_PER_MSEC);
     double mmu50 = computeMMU(50 * PRMJ_USEC_PER_MSEC);
 
     ss.beginObject(NULL);
     if (ss.isJSON())
         ss.appendNumber("Timestamp", "%llu", "", (unsigned long long)timestamp);
     ss.appendNumber("Total Time", "%.1f", "ms", t(total));
-    ss.appendString("Type", wasFullGC ? "global" : "compartment");
+    ss.appendNumber("Compartments Collected", "%d", "", collectedCount);
+    ss.appendNumber("Total Compartments", "%d", "", compartmentCount);
     ss.appendNumber("MMU (20ms)", "%d", "%", int(mmu20 * 100));
     ss.appendNumber("MMU (50ms)", "%d", "%", int(mmu50 * 100));
     if (slices.length() > 1 || ss.isJSON())
         ss.appendNumber("Max Pause", "%.1f", "ms", t(longest));
     else
         ss.appendString("Reason", ExplainReason(slices[0].reason));
     if (nonincrementalReason || ss.isJSON()) {
         ss.appendString("Nonincremental Reason",
@@ -393,17 +394,18 @@ Statistics::formatJSON(uint64_t timestam
     return ss.finishJSString();
 }
 
 Statistics::Statistics(JSRuntime *rt)
   : runtime(rt),
     startupTime(PRMJ_Now()),
     fp(NULL),
     fullFormat(false),
-    wasFullGC(false),
+    collectedCount(0),
+    compartmentCount(0),
     nonincrementalReason(NULL)
 {
     PodArrayZero(phaseTotals);
     PodArrayZero(counts);
 
     char *env = getenv("MOZ_GCTIMER");
     if (!env || strcmp(env, "none") == 0) {
         fp = NULL;
@@ -487,46 +489,48 @@ Statistics::endGC()
 {
     Probes::GCEnd();
     crash::SnapshotGCStack();
 
     for (int i = 0; i < PHASE_LIMIT; i++)
         phaseTotals[i] += phaseTimes[i];
 
     if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback) {
-        (*cb)(JS_TELEMETRY_GC_IS_COMPARTMENTAL, wasFullGC ? 0 : 1);
+        (*cb)(JS_TELEMETRY_GC_IS_COMPARTMENTAL, collectedCount == compartmentCount ? 0 : 1);
         (*cb)(JS_TELEMETRY_GC_MS, t(gcDuration()));
         (*cb)(JS_TELEMETRY_GC_MARK_MS, t(phaseTimes[PHASE_MARK]));
         (*cb)(JS_TELEMETRY_GC_SWEEP_MS, t(phaseTimes[PHASE_SWEEP]));
         (*cb)(JS_TELEMETRY_GC_NON_INCREMENTAL, !!nonincrementalReason);
         (*cb)(JS_TELEMETRY_GC_INCREMENTAL_DISABLED, !runtime->gcIncrementalEnabled);
 
         double mmu50 = computeMMU(50 * PRMJ_USEC_PER_MSEC);
         (*cb)(JS_TELEMETRY_GC_MMU_50, mmu50 * 100);
     }
 
     if (fp)
         printStats();
 }
 
 void
-Statistics::beginSlice(bool full, gcreason::Reason reason)
+Statistics::beginSlice(int collectedCount, int compartmentCount, gcreason::Reason reason)
 {
-    wasFullGC = full;
+    collectedCount = collectedCount;
+    compartmentCount = compartmentCount;
 
     bool first = runtime->gcIncrementalState == gc::NO_INCREMENTAL;
     if (first)
         beginGC();
 
     SliceData data(reason, PRMJ_Now());
     (void) slices.append(data); /* Ignore any OOMs here. */
 
     if (JSAccumulateTelemetryDataCallback cb = runtime->telemetryCallback)
         (*cb)(JS_TELEMETRY_GC_REASON, reason);
 
+    bool wasFullGC = collectedCount == compartmentCount;
     if (GCSliceCallback cb = runtime->gcSliceCallback)
         (*cb)(runtime, first ? GC_CYCLE_BEGIN : GC_SLICE_BEGIN, GCDescription(!wasFullGC));
 }
 
 void
 Statistics::endSlice()
 {
     slices.back().end = PRMJ_Now();
@@ -535,16 +539,17 @@ Statistics::endSlice()
         (*cb)(JS_TELEMETRY_GC_SLICE_MS, t(slices.back().end - slices.back().start));
         (*cb)(JS_TELEMETRY_GC_RESET, !!slices.back().resetReason);
     }
 
     bool last = runtime->gcIncrementalState == gc::NO_INCREMENTAL;
     if (last)
         endGC();
 
+    bool wasFullGC = collectedCount == compartmentCount;
     if (GCSliceCallback cb = runtime->gcSliceCallback) {
         if (last)
             (*cb)(runtime, GC_CYCLE_END, GCDescription(!wasFullGC));
         else
             (*cb)(runtime, GC_SLICE_END, GCDescription(!wasFullGC));
     }
 
     /* Do this after the slice callback since it uses these values. */
--- a/js/src/gc/Statistics.h
+++ b/js/src/gc/Statistics.h
@@ -89,17 +89,17 @@ class StatisticsSerializer;
 
 struct Statistics {
     Statistics(JSRuntime *rt);
     ~Statistics();
 
     void beginPhase(Phase phase);
     void endPhase(Phase phase);
 
-    void beginSlice(bool full, gcreason::Reason reason);
+    void beginSlice(int collectedCount, int compartmentCount, gcreason::Reason reason);
     void endSlice();
 
     void reset(const char *reason) { slices.back().resetReason = reason; }
     void nonincremental(const char *reason) { nonincrementalReason = reason; }
 
     void count(Stat s) {
         JS_ASSERT(s < STAT_LIMIT);
         counts[s]++;
@@ -111,17 +111,18 @@ struct Statistics {
   private:
     JSRuntime *runtime;
 
     int64_t startupTime;
 
     FILE *fp;
     bool fullFormat;
 
-    bool wasFullGC;
+    int collectedCount;
+    int compartmentCount;
     const char *nonincrementalReason;
 
     struct SliceData {
         SliceData(gcreason::Reason reason, int64_t start)
           : reason(reason), resetReason(NULL), start(start)
         {
             PodArrayZero(phaseTimes);
         }
@@ -157,19 +158,23 @@ struct Statistics {
     int64_t gcDuration();
     void printStats();
     bool formatData(StatisticsSerializer &ss, uint64_t timestamp);
 
     double computeMMU(int64_t resolution);
 };
 
 struct AutoGCSlice {
-    AutoGCSlice(Statistics &stats, bool full, gcreason::Reason reason
+    AutoGCSlice(Statistics &stats, int collectedCount, int compartmentCount, gcreason::Reason reason
                 JS_GUARD_OBJECT_NOTIFIER_PARAM)
-      : stats(stats) { JS_GUARD_OBJECT_NOTIFIER_INIT; stats.beginSlice(full, reason); }
+      : stats(stats)
+    {
+        JS_GUARD_OBJECT_NOTIFIER_INIT;
+        stats.beginSlice(collectedCount, compartmentCount, reason);
+    }
     ~AutoGCSlice() { stats.endSlice(); }
 
     Statistics &stats;
     JS_DECL_USE_GUARD_OBJECT_NOTIFIER
 };
 
 struct AutoPhase {
     AutoPhase(Statistics &stats, Phase phase JS_GUARD_OBJECT_NOTIFIER_PARAM)
--- a/js/src/jsapi.cpp
+++ b/js/src/jsapi.cpp
@@ -725,28 +725,24 @@ JSRuntime::JSRuntime()
     gcMaxMallocBytes(0),
     gcNumArenasFreeCommitted(0),
     gcVerifyData(NULL),
     gcChunkAllocationSinceLastGC(false),
     gcNextFullGCTime(0),
     gcJitReleaseTime(0),
     gcMode(JSGC_MODE_GLOBAL),
     gcIsNeeded(0),
-    gcFullIsNeeded(0),
     gcWeakMapList(NULL),
     gcStats(thisFromCtor()),
     gcNumber(0),
     gcStartNumber(0),
     gcTriggerReason(gcreason::NO_REASON),
-    gcIsFull(false),
     gcStrictCompartmentChecking(false),
     gcIncrementalState(gc::NO_INCREMENTAL),
-    gcCompartmentCreated(false),
     gcLastMarkSlice(false),
-    gcIncrementalIsFull(false),
     gcInterFrameGC(0),
     gcSliceBudget(SliceBudget::Unlimited),
     gcIncrementalEnabled(true),
     gcPoke(false),
     gcRunning(false),
 #ifdef JS_GC_ZEAL
     gcZeal_(0),
     gcZealFrequency(0),
@@ -2869,19 +2865,20 @@ JS_CompartmentGC(JSContext *cx, JSCompar
 {
     AssertNoGC(cx);
 
     /* We cannot GC the atoms compartment alone; use a full GC instead. */
     JS_ASSERT(comp != cx->runtime->atomsCompartment);
 
     if (comp) {
         PrepareCompartmentForGC(comp);
-        GC(cx, false, GC_NORMAL, gcreason::API);
+        GC(cx, GC_NORMAL, gcreason::API);
     } else {
-        GC(cx, true, GC_NORMAL, gcreason::API);
+        PrepareForFullGC(cx->runtime);
+        GC(cx, GC_NORMAL, gcreason::API);
     }
 }
 
 JS_PUBLIC_API(void)
 JS_GC(JSContext *cx)
 {
     JS_CompartmentGC(cx, NULL);
 }
--- a/js/src/jsatom.cpp
+++ b/js/src/jsatom.cpp
@@ -219,17 +219,17 @@ js_FinishAtomState(JSRuntime *rt)
     }
 
     FreeOp fop(rt, false, false);
     for (AtomSet::Range r = state->atoms.all(); !r.empty(); r.popFront())
         r.front().asPtr()->finalize(&fop);
 }
 
 bool
-js_InitCommonAtoms(JSContext *cx)
+js::InitCommonAtoms(JSContext *cx)
 {
     JSAtomState *state = &cx->runtime->atomState;
     JSAtom **atoms = state->commonAtomsStart();
     for (size_t i = 0; i < ArrayLength(js_common_atom_names); i++, atoms++) {
         JSAtom *atom = js_Atomize(cx, js_common_atom_names[i], strlen(js_common_atom_names[i]),
                                   InternAtom);
         if (!atom)
             return false;
@@ -237,29 +237,29 @@ js_InitCommonAtoms(JSContext *cx)
     }
 
     state->clearLazyAtoms();
     cx->runtime->emptyString = state->emptyAtom;
     return true;
 }
 
 void
-js_FinishCommonAtoms(JSContext *cx)
+js::FinishCommonAtoms(JSRuntime *rt)
 {
-    cx->runtime->emptyString = NULL;
-    cx->runtime->atomState.junkAtoms();
+    rt->emptyString = NULL;
+    rt->atomState.junkAtoms();
 }
 
 void
-js_TraceAtomState(JSTracer *trc)
+js::MarkAtomState(JSTracer *trc, bool markAll)
 {
     JSRuntime *rt = trc->runtime;
     JSAtomState *state = &rt->atomState;
 
-    if (rt->gcKeepAtoms) {
+    if (markAll) {
         for (AtomSet::Range r = state->atoms.all(); !r.empty(); r.popFront()) {
             JSAtom *tmp = r.front().asPtr();
             MarkStringRoot(trc, &tmp, "locked_atom");
             JS_ASSERT(tmp == r.front().asPtr());
         }
     } else {
         for (AtomSet::Range r = state->atoms.all(); !r.empty(); r.popFront()) {
             AtomStateEntry entry = r.front();
@@ -269,17 +269,17 @@ js_TraceAtomState(JSTracer *trc)
             JSAtom *tmp = entry.asPtr();
             MarkStringRoot(trc, &tmp, "interned_atom");
             JS_ASSERT(tmp == entry.asPtr());
         }
     }
 }
 
 void
-js_SweepAtomState(JSRuntime *rt)
+js::SweepAtomState(JSRuntime *rt)
 {
     JSAtomState *state = &rt->atomState;
 
     for (AtomSet::Enum e(state->atoms); !e.empty(); e.popFront()) {
         AtomStateEntry entry = e.front();
 
         if (entry.isTagged()) {
             /* Pinned or interned key cannot be finalized. */
--- a/js/src/jsatom.h
+++ b/js/src/jsatom.h
@@ -403,29 +403,29 @@ js_InitAtomState(JSRuntime *rt);
  */
 extern void
 js_FinishAtomState(JSRuntime *rt);
 
 /*
  * Atom tracing and garbage collection hooks.
  */
 
-extern void
-js_TraceAtomState(JSTracer *trc);
+namespace js {
 
 extern void
-js_SweepAtomState(JSRuntime *rt);
+MarkAtomState(JSTracer *trc, bool markAll);
+
+extern void
+SweepAtomState(JSRuntime *rt);
 
 extern bool
-js_InitCommonAtoms(JSContext *cx);
+InitCommonAtoms(JSContext *cx);
 
 extern void
-js_FinishCommonAtoms(JSContext *cx);
-
-namespace js {
+FinishCommonAtoms(JSRuntime *rt);
 
 /* N.B. must correspond to boolean tagging behavior. */
 enum InternBehavior
 {
     DoNotInternAtom = false,
     InternAtom = true
 };
 
--- a/js/src/jscntxt.cpp
+++ b/js/src/jscntxt.cpp
@@ -202,17 +202,17 @@ js_NewContext(JSRuntime *rt, size_t stac
      * as well as "first".
      */
     if (first) {
 #ifdef JS_THREADSAFE
         JS_BeginRequest(cx);
 #endif
         bool ok = rt->staticStrings.init(cx);
         if (ok)
-            ok = js_InitCommonAtoms(cx);
+            ok = InitCommonAtoms(cx);
 
 #ifdef JS_THREADSAFE
         JS_EndRequest(cx);
 #endif
         if (!ok) {
             js_DestroyContext(cx, JSDCM_NEW_FAILED);
             return NULL;
         }
@@ -265,27 +265,29 @@ js_DestroyContext(JSContext *cx, JSDestr
         /*
          * Dump remaining type inference results first. This printing
          * depends on atoms still existing.
          */
         for (CompartmentsIter c(rt); !c.done(); c.next())
             c->types.print(cx, false);
 
         /* Unpin all common atoms before final GC. */
-        js_FinishCommonAtoms(cx);
+        FinishCommonAtoms(cx->runtime);
 
         /* Clear debugging state to remove GC roots. */
         for (CompartmentsIter c(rt); !c.done(); c.next())
             c->clearTraps(cx);
         JS_ClearAllWatchPoints(cx);
 
-        GC(cx, true, GC_NORMAL, gcreason::LAST_CONTEXT);
+        PrepareForFullGC(rt);
+        GC(cx, GC_NORMAL, gcreason::LAST_CONTEXT);
     } else if (mode == JSDCM_FORCE_GC) {
         JS_ASSERT(!rt->gcRunning);
-        GC(cx, true, GC_NORMAL, gcreason::DESTROY_CONTEXT);
+        PrepareForFullGC(rt);
+        GC(cx, GC_NORMAL, gcreason::DESTROY_CONTEXT);
     } else if (mode == JSDCM_MAYBE_GC) {
         JS_ASSERT(!rt->gcRunning);
         JS_MaybeGC(cx);
     }
 
 #ifdef JS_THREADSAFE
     {
         AutoLockGC lock(rt);
@@ -878,17 +880,17 @@ js_InvokeOperationCallback(JSContext *cx
     /*
      * Reset the callback counter first, then run GC and yield. If another
      * thread is racing us here we will accumulate another callback request
      * which will be serviced at the next opportunity.
      */
     JS_ATOMIC_SET(&rt->interrupt, 0);
 
     if (rt->gcIsNeeded)
-        GCSlice(cx, rt->gcFullIsNeeded, GC_NORMAL, rt->gcTriggerReason);
+        GCSlice(cx, GC_NORMAL, rt->gcTriggerReason);
 
 #ifdef JS_THREADSAFE
     /*
      * We automatically yield the current context every time the operation
      * callback is hit since we might be called as a result of an impending
      * GC on another thread, which would deadlock if we do not yield.
      * Operation callbacks are supposed to happen rarely (seconds, not
      * milliseconds) so it is acceptable to yield at every callback.
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -347,54 +347,44 @@ struct JSRuntime : js::RuntimeFriendFiel
     JSGCMode            gcMode;
 
     /*
      * These flags must be kept separate so that a thread requesting a
      * compartment GC doesn't cancel another thread's concurrent request for a
      * full GC.
      */
     volatile uintptr_t  gcIsNeeded;
-    volatile uintptr_t  gcFullIsNeeded;
 
     js::WeakMapBase     *gcWeakMapList;
     js::gcstats::Statistics gcStats;
 
     /* Incremented on every GC slice. */
     uint64_t            gcNumber;
 
     /* The gcNumber at the time of the most recent GC's first slice. */
     uint64_t            gcStartNumber;
 
     /* The reason that an interrupt-triggered GC should be called. */
     js::gcreason::Reason gcTriggerReason;
 
-    /* Is the currently running GC a full GC or a compartmental GC? */
-    bool                gcIsFull;
-
     /*
      * If this is true, all marked objects must belong to a compartment being
      * GCed. This is used to look for compartment bugs.
      */
     bool                gcStrictCompartmentChecking;
 
     /*
      * The current incremental GC phase. During non-incremental GC, this is
      * always NO_INCREMENTAL.
      */
     js::gc::State       gcIncrementalState;
 
-    /* Indicates that a new compartment was created during incremental GC. */
-    bool                gcCompartmentCreated;
-
     /* Indicates that the last incremental slice exhausted the mark stack. */
     bool                gcLastMarkSlice;
 
-    /* Is there a full incremental GC in progress. */
-    bool                gcIncrementalIsFull;
-
     /*
      * Indicates that a GC slice has taken place in the middle of an animation
      * frame, rather than at the beginning. In this case, the next slice will be
      * delayed so that we don't get back-to-back slices.
      */
     volatile uintptr_t  gcInterFrameGC;
 
     /* Default budget for incremental GC slice. See SliceBudget in jsgc.h. */
--- a/js/src/jsfriendapi.cpp
+++ b/js/src/jsfriendapi.cpp
@@ -129,39 +129,42 @@ JS_NewObjectWithUniqueType(JSContext *cx
     if (!obj || !obj->setSingletonType(cx))
         return NULL;
     return obj;
 }
 
 JS_FRIEND_API(void)
 js::GCForReason(JSContext *cx, gcreason::Reason reason)
 {
-    GC(cx, true, GC_NORMAL, reason);
+    PrepareForFullGC(cx->runtime);
+    GC(cx, GC_NORMAL, reason);
 }
 
 JS_FRIEND_API(void)
 js::CompartmentGCForReason(JSContext *cx, JSCompartment *comp, gcreason::Reason reason)
 {
     /* We cannot GC the atoms compartment alone; use a full GC instead. */
     JS_ASSERT(comp != cx->runtime->atomsCompartment);
 
     PrepareCompartmentForGC(comp);
-    GC(cx, false, GC_NORMAL, reason);
+    GC(cx, GC_NORMAL, reason);
 }
 
 JS_FRIEND_API(void)
 js::ShrinkingGC(JSContext *cx, gcreason::Reason reason)
 {
-    GC(cx, true, GC_SHRINK, reason);
+    PrepareForFullGC(cx->runtime);
+    GC(cx, GC_SHRINK, reason);
 }
 
 JS_FRIEND_API(void)
 js::IncrementalGC(JSContext *cx, gcreason::Reason reason)
 {
-    GCSlice(cx, true, GC_NORMAL, reason);
+    PrepareForFullGC(cx->runtime);
+    GCSlice(cx, GC_NORMAL, reason);
 }
 
 JS_FRIEND_API(void)
 JS_ShrinkGCBuffers(JSRuntime *rt)
 {
     ShrinkGCBuffers(rt);
 }
 
@@ -748,26 +751,27 @@ NotifyDidPaint(JSContext *cx)
     JSRuntime *rt = cx->runtime;
 
     if (rt->gcZeal() == gc::ZealFrameVerifierValue) {
         gc::VerifyBarriers(cx);
         return;
     }
 
     if (rt->gcZeal() == gc::ZealFrameGCValue) {
-        GCSlice(cx, true, GC_NORMAL, gcreason::REFRESH_FRAME);
+        PrepareForFullGC(rt);
+        GCSlice(cx, GC_NORMAL, gcreason::REFRESH_FRAME);
         return;
     }
 
     if (rt->gcIncrementalState != gc::NO_INCREMENTAL && !rt->gcInterFrameGC) {
         for (CompartmentsIter c(rt); !c.done(); c.next()) {
             if (c->needsBarrier())
                 PrepareCompartmentForGC(c);
         }
-        GCSlice(cx, rt->gcIncrementalIsFull, GC_NORMAL, gcreason::REFRESH_FRAME);
+        GCSlice(cx, GC_NORMAL, gcreason::REFRESH_FRAME);
     }
 
     rt->gcInterFrameGC = false;
 }
 
 extern JS_FRIEND_API(bool)
 IsIncrementalGCEnabled(JSRuntime *rt)
 {
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -1637,39 +1637,39 @@ ArenaLists::finalizeShapes(FreeOp *fop)
 
 void
 ArenaLists::finalizeScripts(FreeOp *fop)
 {
     finalizeNow(fop, FINALIZE_SCRIPT);
 }
 
 static void
-RunLastDitchGC(JSContext *cx, gcreason::Reason reason, bool full)
+RunLastDitchGC(JSContext *cx, gcreason::Reason reason)
 {
     JSRuntime *rt = cx->runtime;
 
     /* The last ditch GC preserves all atoms. */
     AutoKeepAtoms keep(rt);
-    GC(cx, full, GC_NORMAL, reason);
+    GC(cx, GC_NORMAL, reason);
 }
 
 /* static */ void *
 ArenaLists::refillFreeList(JSContext *cx, AllocKind thingKind)
 {
     JS_ASSERT(cx->compartment->arenas.freeLists[thingKind].isEmpty());
 
     JSCompartment *comp = cx->compartment;
     JSRuntime *rt = comp->rt;
     JS_ASSERT(!rt->gcRunning);
 
     bool runGC = rt->gcIncrementalState != NO_INCREMENTAL && comp->gcBytes > comp->gcTriggerBytes;
     for (;;) {
         if (JS_UNLIKELY(runGC)) {
             PrepareCompartmentForGC(comp);
-            RunLastDitchGC(cx, gcreason::LAST_DITCH, rt->gcFullIsNeeded);
+            RunLastDitchGC(cx, gcreason::LAST_DITCH);
 
             /*
              * The JSGC_END callback can legitimately allocate new GC
              * things and populate the free list. If that happens, just
              * return that list head.
              */
             size_t thingSize = Arena::thingSize(thingKind);
             if (void *thing = comp->arenas.allocateFromFreeList(thingKind, thingSize))
@@ -2235,17 +2235,17 @@ AutoGCRooter::traceAll(JSTracer *trc)
 namespace js {
 
 static void
 MarkRuntime(JSTracer *trc, bool useSavedRoots = false)
 {
     JSRuntime *rt = trc->runtime;
     JS_ASSERT(trc->callback != GCMarker::GrayCallback);
 
-    if (IS_GC_MARKING_TRACER(trc) && !rt->gcIsFull) {
+    if (IS_GC_MARKING_TRACER(trc)) {
         for (CompartmentsIter c(rt); !c.done(); c.next()) {
             if (!c->isCollecting())
                 c->markCrossCompartmentWrappers(trc);
         }
         Debugger::markCrossCompartmentDebuggerObjectReferents(trc);
     }
 
     AutoGCRooter::traceAll(trc);
@@ -2260,17 +2260,30 @@ MarkRuntime(JSTracer *trc, bool useSaved
         gc_lock_traversal(r.front(), trc);
 
     if (rt->scriptAndCountsVector) {
         ScriptAndCountsVector &vec = *rt->scriptAndCountsVector;
         for (size_t i = 0; i < vec.length(); i++)
             MarkScriptRoot(trc, &vec[i].script, "scriptAndCountsVector");
     }
 
-    js_TraceAtomState(trc);
+    /*
+     * Atoms are not in the cross-compartment map. So if there are any
+     * compartments that are not being collected, we are not allowed to collect
+     * atoms. Otherwise, the non-collected compartments could contain pointers
+     * to atoms that we would miss.
+     */
+    bool isFullGC = true;
+    if (IS_GC_MARKING_TRACER(trc)) {
+        for (CompartmentsIter c(rt); !c.done(); c.next()) {
+            if (!c->isCollecting())
+                isFullGC = false;
+        }
+    }
+    MarkAtomState(trc, rt->gcKeepAtoms || !isFullGC);
     rt->staticStrings.trace(trc);
 
     for (ContextIter acx(rt); !acx.done(); acx.next())
         acx->mark(trc);
 
     /* We can't use GCCompartmentsIter if we're called from TraceRuntime. */
     for (CompartmentsIter c(rt); !c.done(); c.next()) {
         if (IS_GC_MARKING_TRACER(trc) && !c->isCollecting())
@@ -2317,105 +2330,108 @@ MarkRuntime(JSTracer *trc, bool useSaved
             (*op)(trc, rt->gcGrayRootsData);
             gcmarker->endBufferingGrayRoots();
         } else {
             (*op)(trc, rt->gcGrayRootsData);
         }
     }
 }
 
+static void
+TriggerOperationCallback(JSRuntime *rt, gcreason::Reason reason)
+{
+    if (rt->gcIsNeeded)
+        return;
+
+    rt->gcIsNeeded = true;
+    rt->gcTriggerReason = reason;
+    rt->triggerOperationCallback();
+}
+
 void
 TriggerGC(JSRuntime *rt, gcreason::Reason reason)
 {
     JS_ASSERT(rt->onOwnerThread());
 
-    if (rt->gcRunning || rt->gcIsNeeded)
+    if (rt->gcRunning)
         return;
 
-    /* Trigger the GC when it is safe to call an operation callback. */
-    rt->gcIsNeeded = true;
-    rt->gcFullIsNeeded = true;
-    rt->gcTriggerReason = reason;
-    rt->triggerOperationCallback();
+    PrepareForFullGC(rt);
+    TriggerOperationCallback(rt, reason);
 }
 
 void
 TriggerCompartmentGC(JSCompartment *comp, gcreason::Reason reason)
 {
     JSRuntime *rt = comp->rt;
-    JS_ASSERT(!rt->gcRunning);
+    JS_ASSERT(rt->onOwnerThread());
+
+    if (rt->gcRunning)
+        return;
 
     if (rt->gcZeal() == ZealAllocValue) {
         TriggerGC(rt, reason);
         return;
     }
 
     if (comp == rt->atomsCompartment) {
         /* We can't do a compartmental GC of the default compartment. */
         TriggerGC(rt, reason);
         return;
     }
 
     PrepareCompartmentForGC(comp);
-
-    if (rt->gcIsNeeded)
-        return;
-
-    /*
-     * Trigger the GC when it is safe to call an operation callback on any
-     * thread.
-     */
-    rt->gcIsNeeded = true;
-    rt->gcTriggerReason = reason;
-    rt->triggerOperationCallback();
+    TriggerOperationCallback(rt, reason);
 }
 
 void
 MaybeGC(JSContext *cx)
 {
     JSRuntime *rt = cx->runtime;
     JS_ASSERT(rt->onOwnerThread());
 
     if (rt->gcZeal() == ZealAllocValue || rt->gcZeal() == ZealPokeValue) {
-        GC(cx, true, GC_NORMAL, gcreason::MAYBEGC);
+        PrepareForFullGC(rt);
+        GC(cx, GC_NORMAL, gcreason::MAYBEGC);
         return;
     }
 
     JSCompartment *comp = cx->compartment;
     if (rt->gcIsNeeded) {
-        GCSlice(cx, rt->gcFullIsNeeded, GC_NORMAL, gcreason::MAYBEGC);
+        GCSlice(cx, GC_NORMAL, gcreason::MAYBEGC);
         return;
     }
 
     if (comp->gcBytes > 8192 &&
         comp->gcBytes >= 3 * (comp->gcTriggerBytes / 4) &&
         rt->gcIncrementalState == NO_INCREMENTAL)
     {
         PrepareCompartmentForGC(comp);
-        GCSlice(cx, false, GC_NORMAL, gcreason::MAYBEGC);
+        GCSlice(cx, GC_NORMAL, gcreason::MAYBEGC);
         return;
     }
 
     if (comp->gcMallocAndFreeBytes > comp->gcTriggerMallocAndFreeBytes) {
         PrepareCompartmentForGC(comp);
-        GCSlice(cx, false, GC_NORMAL, gcreason::MAYBEGC);
+        GCSlice(cx, GC_NORMAL, gcreason::MAYBEGC);
         return;
     }
 
     /*
      * Access to the counters and, on 32 bit, setting gcNextFullGCTime below
      * is not atomic and a race condition could trigger or suppress the GC. We
      * tolerate this.
      */
     int64_t now = PRMJ_Now();
     if (rt->gcNextFullGCTime && rt->gcNextFullGCTime <= now) {
         if (rt->gcChunkAllocationSinceLastGC ||
             rt->gcNumArenasFreeCommitted > FreeCommittedArenasThreshold)
         {
-            GCSlice(cx, true, GC_SHRINK, gcreason::MAYBEGC);
+            PrepareForFullGC(rt);
+            GCSlice(cx, GC_SHRINK, gcreason::MAYBEGC);
         } else {
             rt->gcNextFullGCTime = now + GC_IDLE_FULL_SPAN;
         }
     }
 }
 
 static void
 DecommitArenasFromAvailableList(JSRuntime *rt, Chunk **availableListHeadp)
@@ -2804,16 +2820,23 @@ GCHelperThread::doSweep()
         shrinkFlag = false;
         ExpireChunksAndArenas(rt, true);
     }
 }
 
 #endif /* JS_THREADSAFE */
 
 void
+PrepareForFullGC(JSRuntime *rt)
+{
+    for (CompartmentsIter c(rt); !c.done(); c.next())
+        c->scheduleGC();
+}
+
+void
 PrepareCompartmentForGC(JSCompartment *comp)
 {
     comp->scheduleGC();
 }
 
 } /* namespace js */
 
 static bool
@@ -2840,17 +2863,17 @@ SweepCompartments(FreeOp *fop, JSGCInvoc
     JSCompartment **end = rt->compartments.end();
     JSCompartment **write = read;
     JS_ASSERT(rt->compartments.length() >= 1);
     JS_ASSERT(*rt->compartments.begin() == rt->atomsCompartment);
 
     while (read < end) {
         JSCompartment *compartment = *read++;
 
-        if (!compartment->hold &&
+        if (!compartment->hold && compartment->isCollecting() &&
             (compartment->arenas.arenaListsAreEmpty() || !rt->hasContexts()))
         {
             compartment->arenas.checkEmptyFreeLists();
             if (callback)
                 callback(fop, compartment);
             if (compartment->principals)
                 JS_DropPrincipals(rt, compartment->principals);
             fop->delete_(compartment);
@@ -2973,21 +2996,19 @@ EndMarkPhase(JSContext *cx)
 
 #ifdef DEBUG
     if (rt->gcIncrementalState != NO_INCREMENTAL)
         ValidateIncrementalMarking(cx);
 #endif
 
 #ifdef DEBUG
     /* Make sure that we didn't mark an object in another compartment */
-    if (!rt->gcIsFull) {
-        for (CompartmentsIter c(rt); !c.done(); c.next()) {
-            JS_ASSERT_IF(!c->isCollecting() && c != rt->atomsCompartment,
-                         c->arenas.checkArenaListAllUnmarked());
-        }
+    for (CompartmentsIter c(rt); !c.done(); c.next()) {
+        JS_ASSERT_IF(!c->isCollecting() && c != rt->atomsCompartment,
+                     c->arenas.checkArenaListAllUnmarked());
     }
 #endif
 }
 
 #ifdef DEBUG
 static void
 ValidateIncrementalMarking(JSContext *cx)
 {
@@ -3124,28 +3145,28 @@ SweepPhase(JSContext *cx, JSGCInvocation
         gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_FINALIZE_START);
         if (rt->gcFinalizeCallback)
             rt->gcFinalizeCallback(&fop, JSFINALIZE_START);
     }
 
     /* Finalize unreachable (key,value) pairs in all weak maps. */
     WeakMapBase::sweepAll(&rt->gcMarker);
 
-    js_SweepAtomState(rt);
+    SweepAtomState(rt);
 
     /* Collect watch points associated with unreachable objects. */
     WatchpointMap::sweepAll(rt);
 
     /* Detach unreachable debuggers and global objects from each other. */
     Debugger::sweepAll(&fop);
 
     {
         gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_COMPARTMENTS);
 
-        bool releaseTypes = rt->gcIsFull && ReleaseObservedTypes(rt);
+        bool releaseTypes = ReleaseObservedTypes(rt);
         for (GCCompartmentsIter c(rt); !c.done(); c.next())
             c->sweep(&fop, releaseTypes);
     }
 
     {
         gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_OBJECT);
 
         /*
@@ -3189,18 +3210,17 @@ SweepPhase(JSContext *cx, JSGCInvocation
          */
         for (GCCompartmentsIter c(rt); !c.done(); c.next())
             SweepScriptFilenames(c);
 
         /*
          * This removes compartments from rt->compartment, so we do it last to make
          * sure we don't miss sweeping any compartments.
          */
-        if (rt->gcIsFull)
-            SweepCompartments(&fop, gckind);
+        SweepCompartments(&fop, gckind);
 
 #ifndef JS_THREADSAFE
         /*
          * Destroy arenas after we finished the sweeping so finalizers can safely
          * use IsAboutToBeFinalized().
          * This is done on the GCHelperThread if JS_THREADSAFE is defined.
          */
         ExpireChunksAndArenas(rt, gckind == GC_SHRINK);
@@ -3255,17 +3275,17 @@ class AutoHeapSession {
   private:
     AutoHeapSession(const AutoHeapSession&) MOZ_DELETE;
     void operator=(const AutoHeapSession&) MOZ_DELETE;
 };
 
 /* ...while this class is to be used only for garbage collection. */
 class AutoGCSession : AutoHeapSession {
   public:
-    explicit AutoGCSession(JSRuntime *rt, bool full);
+    explicit AutoGCSession(JSRuntime *rt);
     ~AutoGCSession();
 };
 
 /* Start a new heap session. */
 AutoHeapSession::AutoHeapSession(JSRuntime *rt)
   : runtime(rt)
 {
     JS_ASSERT(!rt->noGCOrAllocationCheck);
@@ -3274,62 +3294,58 @@ AutoHeapSession::AutoHeapSession(JSRunti
 }
 
 AutoHeapSession::~AutoHeapSession()
 {
     JS_ASSERT(runtime->gcRunning);
     runtime->gcRunning = false;
 }
 
-AutoGCSession::AutoGCSession(JSRuntime *rt, bool full)
+AutoGCSession::AutoGCSession(JSRuntime *rt)
   : AutoHeapSession(rt)
 {
-    rt->gcIsFull = full;
     DebugOnly<bool> any = false;
     for (CompartmentsIter c(rt); !c.done(); c.next()) {
-        if (full || c->isGCScheduled()) {
+        if (c->isGCScheduled()) {
             c->setCollecting(true);
             any = true;
         }
     }
     JS_ASSERT(any);
 
     runtime->gcIsNeeded = false;
-    runtime->gcFullIsNeeded = false;
     runtime->gcInterFrameGC = true;
 
     runtime->gcNumber++;
 
     runtime->resetGCMallocBytes();
 
     /* Clear gcMallocBytes for all compartments */
     for (CompartmentsIter c(runtime); !c.done(); c.next())
         c->resetGCMallocBytes();
 }
 
 AutoGCSession::~AutoGCSession()
 {
-    runtime->gcIsFull = false;
     for (GCCompartmentsIter c(runtime); !c.done(); c.next())
         c->setCollecting(false);
 
     runtime->gcNextFullGCTime = PRMJ_Now() + GC_IDLE_FULL_SPAN;
     runtime->gcChunkAllocationSinceLastGC = false;
 }
 
 static void
 ResetIncrementalGC(JSRuntime *rt, const char *reason)
 {
     if (rt->gcIncrementalState == NO_INCREMENTAL)
         return;
 
     for (CompartmentsIter c(rt); !c.done(); c.next())
         c->needsBarrier_ = false;
 
-    rt->gcIncrementalIsFull = false;
     rt->gcMarker.reset();
     rt->gcMarker.stop();
     rt->gcIncrementalState = NO_INCREMENTAL;
 
     JS_ASSERT(!rt->gcStrictCompartmentChecking);
 
     rt->gcStats.reset(reason);
 }
@@ -3370,17 +3386,16 @@ AutoGCSlice::~AutoGCSlice()
     JSRuntime *rt = context->runtime;
 
     for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
         if (rt->gcIncrementalState == MARK) {
             c->needsBarrier_ = true;
             c->arenas.prepareForIncrementalGC(rt);
         } else {
             JS_ASSERT(rt->gcIncrementalState == NO_INCREMENTAL);
-
             c->needsBarrier_ = false;
         }
     }
 }
 
 class AutoCopyFreeListToArenas {
     JSRuntime *rt;
 
@@ -3403,18 +3418,16 @@ IncrementalGCSlice(JSContext *cx, int64_
     JSRuntime *rt = cx->runtime;
 
     AutoUnlockGC unlock(rt);
     AutoGCSlice slice(cx);
 
     gc::State initialState = rt->gcIncrementalState;
 
     if (rt->gcIncrementalState == NO_INCREMENTAL) {
-        JS_ASSERT(!rt->gcIncrementalIsFull);
-        rt->gcIncrementalIsFull = rt->gcIsFull;
         rt->gcIncrementalState = MARK_ROOTS;
         rt->gcLastMarkSlice = false;
     }
 
     if (rt->gcIncrementalState == MARK_ROOTS) {
         rt->gcMarker.start(rt);
         JS_ASSERT(IS_GC_MARKING_TRACER(&rt->gcMarker));
 
@@ -3450,18 +3463,16 @@ IncrementalGCSlice(JSContext *cx, int64_
     if (rt->gcIncrementalState == SWEEP) {
         EndMarkPhase(cx);
         SweepPhase(cx, gckind);
 
         rt->gcMarker.stop();
 
         /* JIT code was already discarded during sweeping. */
 
-        rt->gcIncrementalIsFull = false;
-
         rt->gcIncrementalState = NO_INCREMENTAL;
     }
 }
 
 class IncrementalSafety
 {
     const char *reason_;
 
@@ -3482,21 +3493,16 @@ class IncrementalSafety
         JS_ASSERT(reason_);
         return reason_;
     }
 };
 
 static IncrementalSafety
 IsIncrementalGCSafe(JSRuntime *rt)
 {
-    if (rt->gcCompartmentCreated) {
-        rt->gcCompartmentCreated = false;
-        return IncrementalSafety::Unsafe("compartment created");
-    }
-
     if (rt->gcKeepAtoms)
         return IncrementalSafety::Unsafe("gcKeepAtoms set");
 
     for (CompartmentsIter c(rt); !c.done(); c.next()) {
         if (c->activeAnalysis)
             return IncrementalSafety::Unsafe("activeAnalysis set");
     }
 
@@ -3543,32 +3549,34 @@ BudgetIncrementalGC(JSRuntime *rt, int64
             return;
         }
     }
 }
 
 /*
  * GC, repeatedly if necessary, until we think we have not created any new
  * garbage. We disable inlining to ensure that the bottom of the stack with
- * possible GC roots recorded in js_GC excludes any pointers we use during the
- * marking implementation.
+ * possible GC roots recorded in MarkRuntime excludes any pointers we use during
+ * the marking implementation.
  */
 static JS_NEVER_INLINE void
-GCCycle(JSContext *cx, bool full, int64_t budget, JSGCInvocationKind gckind)
+GCCycle(JSContext *cx, int64_t budget, JSGCInvocationKind gckind)
 {
     JSRuntime *rt = cx->runtime;
 
-    JS_ASSERT_IF(!full, !rt->atomsCompartment->isCollecting());
-    JS_ASSERT_IF(!full, rt->gcMode != JSGC_MODE_GLOBAL);
+#ifdef DEBUG
+    for (CompartmentsIter c(rt); !c.done(); c.next())
+        JS_ASSERT_IF(rt->gcMode == JSGC_MODE_GLOBAL, c->isGCScheduled());
+#endif
 
     /* Recursive GC is no-op. */
     if (rt->gcRunning)
         return;
 
-    AutoGCSession gcsession(rt, full);
+    AutoGCSession gcsession(rt);
 
     /* Don't GC if we are reporting an OOM. */
     if (rt->inOOMReport)
         return;
 
 #ifdef JS_THREADSAFE
     /*
      * As we about to purge caches and clear the mark bits we must wait for
@@ -3626,18 +3634,17 @@ IsDeterministicGCReason(gcreason::Reason
     if (reason == gcreason::MAYBEGC)
         return false;
 
     return true;
 }
 #endif
 
 static void
-Collect(JSContext *cx, bool full, int64_t budget,
-        JSGCInvocationKind gckind, gcreason::Reason reason)
+Collect(JSContext *cx, int64_t budget, JSGCInvocationKind gckind, gcreason::Reason reason)
 {
     JSRuntime *rt = cx->runtime;
     JS_AbortIfWrongThread(rt);
 
 #ifdef JS_GC_ZEAL
     if (rt->gcDeterministicOnly && !IsDeterministicGCReason(reason))
         return;
 #endif
@@ -3662,41 +3669,49 @@ Collect(JSContext *cx, bool full, int64_
             if (restart)
                 StartVerifyBarriers(cx);
         }
     } av(cx, restartVerify);
 #endif
 
     RecordNativeStackTopForGC(rt);
 
-    if (rt->gcMode == JSGC_MODE_GLOBAL)
-        full = true;
-
-    /* This is a heuristic to avoid resets. */
-    if (rt->gcIncrementalState != NO_INCREMENTAL && rt->gcIncrementalIsFull)
-        full = true;
-
-    gcstats::AutoGCSlice agc(rt->gcStats, full, reason);
+    int compartmentCount = 0;
+    int collectedCount = 0;
+    for (CompartmentsIter c(rt); !c.done(); c.next()) {
+        if (rt->gcMode == JSGC_MODE_GLOBAL)
+            c->scheduleGC();
+
+        /* This is a heuristic to avoid resets. */
+        if (rt->gcIncrementalState != NO_INCREMENTAL && c->needsBarrier())
+            c->scheduleGC();
+
+        compartmentCount++;
+        if (c->isGCScheduled())
+            collectedCount++;
+    }
+
+    gcstats::AutoGCSlice agc(rt->gcStats, collectedCount, compartmentCount, reason);
 
     do {
         /*
          * Let the API user decide to defer a GC if it wants to (unless this
          * is the last context). Invoke the callback regardless.
          */
         if (rt->gcIncrementalState == NO_INCREMENTAL) {
             gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_GC_BEGIN);
             if (JSGCCallback callback = rt->gcCallback)
                 callback(rt, JSGC_BEGIN);
         }
 
         {
             /* Lock out other GC allocator and collector invocations. */
             AutoLockGC lock(rt);
             rt->gcPoke = false;
-            GCCycle(cx, full, budget, gckind);
+            GCCycle(cx, budget, gckind);
         }
 
         if (rt->gcIncrementalState == NO_INCREMENTAL) {
             gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_GC_END);
             if (JSGCCallback callback = rt->gcCallback)
                 callback(rt, JSGC_END);
         }
 
@@ -3705,31 +3720,32 @@ Collect(JSContext *cx, bool full, int64_
          * stop creating garbage.
          */
     } while (!rt->hasContexts() && rt->gcPoke);
 }
 
 namespace js {
 
 void
-GC(JSContext *cx, bool full, JSGCInvocationKind gckind, gcreason::Reason reason)
+GC(JSContext *cx, JSGCInvocationKind gckind, gcreason::Reason reason)
 {
-    Collect(cx, full, SliceBudget::Unlimited, gckind, reason);
+    Collect(cx, SliceBudget::Unlimited, gckind, reason);
 }
 
 void
-GCSlice(JSContext *cx, bool full, JSGCInvocationKind gckind, gcreason::Reason reason)
+GCSlice(JSContext *cx, JSGCInvocationKind gckind, gcreason::Reason reason)
 {
-    Collect(cx, full, cx->runtime->gcSliceBudget, gckind, reason);
+    Collect(cx, cx->runtime->gcSliceBudget, gckind, reason);
 }
 
 void
 GCDebugSlice(JSContext *cx, int64_t objCount)
 {
-    Collect(cx, NULL, SliceBudget::WorkBudget(objCount), GC_NORMAL, gcreason::API);
+    PrepareForFullGC(cx->runtime);
+    Collect(cx, SliceBudget::WorkBudget(objCount), GC_NORMAL, gcreason::API);
 }
 
 void
 ShrinkGCBuffers(JSRuntime *rt)
 {
     AutoLockGC lock(rt);
     JS_ASSERT(!rt->gcRunning);
 #ifndef JS_THREADSAFE
@@ -3897,25 +3913,16 @@ NewCompartment(JSContext *cx, JSPrincipa
         compartment->setGCLastBytes(8192, 8192, GC_NORMAL);
 
         /*
          * Before reporting the OOM condition, |lock| needs to be cleaned up,
          * hence the scoping.
          */
         {
             AutoLockGC lock(rt);
-
-            /*
-             * If we're in the middle of an incremental GC, we cancel
-             * it. Otherwise we might fail the mark the newly created
-             * compartment fully.
-             */
-            if (rt->gcIncrementalState == MARK)
-                rt->gcCompartmentCreated = true;
-
             if (rt->compartments.append(compartment))
                 return compartment;
         }
 
         js_ReportOutOfMemory(cx);
     }
     Foreground::delete_(compartment);
     return NULL;
@@ -3928,17 +3935,19 @@ RunDebugGC(JSContext *cx)
     JSRuntime *rt = cx->runtime;
 
     /*
      * If rt->gcDebugCompartmentGC is true, only GC the current
      * compartment. But don't GC the atoms compartment.
      */
     if (rt->gcDebugCompartmentGC)
         PrepareCompartmentForGC(cx->compartment);
-    RunLastDitchGC(cx, gcreason::DEBUG_GC, !rt->gcDebugCompartmentGC);
+    else
+        PrepareForFullGC(cx->runtime);
+    RunLastDitchGC(cx, gcreason::DEBUG_GC);
 #endif
 }
 
 void
 SetDeterministicGC(JSContext *cx, bool enabled)
 {
 #ifdef JS_GC_ZEAL
     JSRuntime *rt = cx->runtime;
@@ -4316,27 +4325,31 @@ EndVerifyBarriers(JSContext *cx)
     AutoCopyFreeListToArenas copy(rt);
     RecordNativeStackTopForGC(rt);
 
     VerifyTracer *trc = (VerifyTracer *)rt->gcVerifyData;
 
     if (!trc)
         return;
 
+    /* We need to disable barriers before tracing, which may invoke barriers. */
+    for (CompartmentsIter c(rt); !c.done(); c.next()) {
+        /* Don't verify if a new compartment was created. */
+        if (!c->needsBarrier_)
+            return;
+        c->needsBarrier_ = false;
+    }
+
     /*
      * We need to bump gcNumber so that the methodjit knows that jitcode has
      * been discarded.
      */
     JS_ASSERT(trc->number == rt->gcNumber);
     rt->gcNumber++;
 
-    /* We need to disable barriers before tracing, which may invoke barriers. */
-    for (CompartmentsIter c(rt); !c.done(); c.next())
-        c->needsBarrier_ = false;
-
     for (CompartmentsIter c(rt); !c.done(); c.next())
         c->discardJitCode(rt->defaultFreeOp());
 
     rt->gcVerifyData = NULL;
     rt->gcIncrementalState = NO_INCREMENTAL;
 
     JS_TracerInit(trc, rt, MarkFromAutorooter);
 
--- a/js/src/jsgc.h
+++ b/js/src/jsgc.h
@@ -1379,35 +1379,37 @@ TriggerCompartmentGC(JSCompartment *comp
 
 extern void
 MaybeGC(JSContext *cx);
 
 extern void
 ShrinkGCBuffers(JSRuntime *rt);
 
 extern void
+PrepareForFullGC(JSRuntime *rt);
+
+extern void
 PrepareCompartmentForGC(JSCompartment *comp);
 
 /*
  * Kinds of js_GC invocation.
  */
 typedef enum JSGCInvocationKind {
     /* Normal invocation. */
     GC_NORMAL           = 0,
 
     /* Minimize GC triggers and release empty GC chunks right away. */
     GC_SHRINK             = 1
 } JSGCInvocationKind;
 
-/* Pass NULL for |comp| to get a full GC. */
 extern void
-GC(JSContext *cx, bool full, JSGCInvocationKind gckind, js::gcreason::Reason reason);
+GC(JSContext *cx, JSGCInvocationKind gckind, js::gcreason::Reason reason);
 
 extern void
-GCSlice(JSContext *cx, bool full, JSGCInvocationKind gckind, js::gcreason::Reason reason);
+GCSlice(JSContext *cx, JSGCInvocationKind gckind, js::gcreason::Reason reason);
 
 extern void
 GCDebugSlice(JSContext *cx, int64_t objCount);
 
 } /* namespace js */
 
 namespace js {
 
--- a/js/src/jsgcmark.cpp
+++ b/js/src/jsgcmark.cpp
@@ -78,17 +78,16 @@ CheckMarkedThing(JSTracer *trc, T *thing
     JS_ASSERT(trc);
     JS_ASSERT(thing);
     JS_ASSERT(thing->compartment());
     JS_ASSERT(thing->compartment()->rt == trc->runtime);
     JS_ASSERT(trc->debugPrinter || trc->debugPrintArg);
 
     DebugOnly<JSRuntime *> rt = trc->runtime;
 
-    JS_ASSERT_IF(rt->gcIsFull, IS_GC_MARKING_TRACER(trc));
     JS_ASSERT_IF(thing->compartment()->requireGCTracer(), IS_GC_MARKING_TRACER(trc));
 
     JS_ASSERT(thing->isAligned());
 
     JS_ASSERT_IF(rt->gcStrictCompartmentChecking,
                  thing->compartment()->isCollecting() ||
                  thing->compartment() == rt->atomsCompartment);
 }