Bug 779183 - GC: Incremental sweeping of atoms table part 1 - Add per-compartment mark/sweep state r=billm
authorJon Coppeard <jcoppeard@mozilla.com>
Wed, 22 Aug 2012 10:45:37 +0100
changeset 105057 271c3965015e4396bc27f82bdd5e2572d4adb58d
parent 105056 e8bf3b589c2d88707b9e58bb2a1a6d5050c42326
child 105058 a8785c8a603a156de71d42cc11d861fd95c409e3
push id55
push usershu@rfrn.org
push dateThu, 30 Aug 2012 01:33:09 +0000
reviewersbillm
bugs779183
milestone17.0a1
Bug 779183 - GC: Incremental sweeping of atoms table part 1 - Add per-compartment mark/sweep state r=billm
js/src/jscntxt.h
js/src/jscompartment.h
js/src/jsgc.cpp
--- a/js/src/jscntxt.h
+++ b/js/src/jscntxt.h
@@ -561,18 +561,18 @@ struct JSRuntime : js::RuntimeFriendFiel
      * If this is 0, all cross-compartment proxies must be registered in the
      * wrapper map. This checking must be disabled temporarily while creating
      * new wrappers. When non-zero, this records the recursion depth of wrapper
      * creation.
      */
     uintptr_t           gcDisableStrictProxyCheckingCount;
 
     /*
-     * The current incremental GC phase. During non-incremental GC, this is
-     * always NO_INCREMENTAL.
+     * The current incremental GC phase. This is also used internally in
+     * non-incremental GC.
      */
     js::gc::State       gcIncrementalState;
 
     /* Indicates that the last incremental slice exhausted the mark stack. */
     bool                gcLastMarkSlice;
 
     /* Whether any sweeping will take place in the separate GC helper thread. */
     bool                gcSweepOnBackgroundThread;
--- a/js/src/jscompartment.h
+++ b/js/src/jscompartment.h
@@ -165,22 +165,24 @@ struct JSCompartment
 
     void setNeedsBarrier(bool needs);
 
     js::GCMarker *barrierTracer() {
         JS_ASSERT(needsBarrier_);
         return &rt->gcMarker;
     }
 
-  private:
+  public:
     enum CompartmentGCState {
         NoGC,
-        Collecting
+        Mark,
+        Sweep
     };
 
+  private:
     bool                         gcScheduled;
     CompartmentGCState           gcState;
     bool                         gcPreserveCode;
 
   public:
     bool isCollecting() const {
         if (rt->isHeapCollecting()) {
             return gcState != NoGC;
@@ -196,19 +198,19 @@ struct JSCompartment
     /*
      * If this returns true, all object tracing must be done with a GC marking
      * tracer.
      */
     bool requireGCTracer() const {
         return rt->isHeapCollecting() && gcState != NoGC;
     }
 
-    void setCollecting(bool collecting) {
+    void setGCState(CompartmentGCState state) {
         JS_ASSERT(rt->isHeapBusy());
-        gcState = collecting ? Collecting : NoGC;
+        gcState = state;
     }
 
     void scheduleGC() {
         JS_ASSERT(!rt->isHeapBusy());
         gcScheduled = true;
     }
 
     void unscheduleGC() {
@@ -222,18 +224,22 @@ struct JSCompartment
     void setPreservingCode(bool preserving) {
         gcPreserveCode = preserving;
     }
 
     bool wasGCStarted() const {
         return gcState != NoGC;
     }
 
+    bool isGCMarking() {
+        return gcState == Mark;
+    }
+
     bool isGCSweeping() {
-        return gcState != NoGC && rt->gcIncrementalState == js::gc::SWEEP;
+        return gcState == Sweep;
     }
 
     size_t                       gcBytes;
     size_t                       gcTriggerBytes;
     size_t                       gcMaxMallocBytes;
     double                       gcHeapGrowthFactor;
     JSCompartment                *gcNextCompartment;
 
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -3237,17 +3237,17 @@ BeginMarkPhase(JSRuntime *rt)
         JS_ASSERT(!c->isCollecting());
         for (unsigned i = 0; i < FINALIZE_LIMIT; ++i)
             JS_ASSERT(!c->arenas.arenaListsToSweep[i]);
 
         /* Set up which compartments will be collected. */
         if (c->isGCScheduled()) {
             any = true;
             if (c.get() != rt->atomsCompartment)
-                c->setCollecting(true);
+                c->setGCState(JSCompartment::Mark);
         } else {
             rt->gcIsFull = false;
         }
 
         c->setPreservingCode(ShouldPreserveJITCode(c, currentTime));
     }
 
     /* Check that at least one compartment is scheduled for collection. */
@@ -3255,17 +3255,17 @@ BeginMarkPhase(JSRuntime *rt)
 
     /*
      * Atoms are not in the cross-compartment map. So if there are any
      * compartments that are not being collected, we are not allowed to collect
      * atoms. Otherwise, the non-collected compartments could contain pointers
      * to atoms that we would miss.
      */
     if (rt->atomsCompartment->isGCScheduled() && rt->gcIsFull && !rt->gcKeepAtoms)
-        rt->atomsCompartment->setCollecting(true);
+        rt->atomsCompartment->setGCState(JSCompartment::Mark);
 
     /*
      * At the end of each incremental slice, we call prepareForIncrementalGC,
      * which marks objects in all arenas that we're currently allocating
      * into. This can cause leaks if unreachable objects are in these
      * arenas. This purge call ensures that we only mark arenas that have had
      * allocations after the incremental GC started.
      */
@@ -3673,17 +3673,19 @@ BeginSweepPhase(JSRuntime *rt)
     gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP);
 
     /*
      * Although there is a runtime-wide gcIsFull flag, it is set in
      * BeginMarkPhase. More compartments may have been created since then.
      */
     bool isFull = true;
     for (CompartmentsIter c(rt); !c.done(); c.next()) {
-        if (!c->isCollecting())
+        if (c->isCollecting())
+            c->setGCState(JSCompartment::Sweep);
+        else
             isFull = false;
     }
     JS_ASSERT_IF(isFull, rt->gcIsFull);
 
 #ifdef JS_THREADSAFE
     rt->gcSweepOnBackgroundThread = rt->hasContexts();
 #endif
 
@@ -3871,17 +3873,17 @@ EndSweepPhase(JSRuntime *rt, JSGCInvocat
     while (ArenaHeader *arena = rt->gcArenasAllocatedDuringSweep) {
         rt->gcArenasAllocatedDuringSweep = arena->getNextAllocDuringSweep();
         arena->unsetAllocDuringSweep();
     }
 
     for (CompartmentsIter c(rt); !c.done(); c.next()) {
         c->setGCLastBytes(c->gcBytes, c->gcMallocAndFreeBytes, gckind);
         if (c->wasGCStarted())
-            c->setCollecting(false);
+            c->setGCState(JSCompartment::NoGC);
 
         JS_ASSERT(!c->isCollecting());
         JS_ASSERT(!c->wasGCStarted());
         for (unsigned i = 0 ; i < FINALIZE_LIMIT ; ++i) {
             JS_ASSERT_IF(!IsBackgroundFinalized(AllocKind(i)) ||
                          !rt->gcSweepOnBackgroundThread,
                          !c->arenas.arenaListsToSweep[i]);
         }
@@ -3980,17 +3982,17 @@ ResetIncrementalGC(JSRuntime *rt, const 
         rt->gcHelperThread.waitBackgroundSweepOrAllocEnd();
         return;
     }
 
     JS_ASSERT(rt->gcIncrementalState == MARK);
 
     for (CompartmentsIter c(rt); !c.done(); c.next()) {
         c->setNeedsBarrier(false);
-        c->setCollecting(false);
+        c->setGCState(JSCompartment::NoGC);
         JS_ASSERT(!c->gcNextCompartment);
         for (unsigned i = 0 ; i < FINALIZE_LIMIT ; ++i)
             JS_ASSERT(!c->arenas.arenaListsToSweep[i]);
     }
 
     rt->gcMarker.reset();
     rt->gcMarker.stop();
 
@@ -4018,34 +4020,33 @@ AutoGCSlice::AutoGCSlice(JSRuntime *rt)
      * there are stack frames active for any of its scripts. Normally this flag
      * is set at the beginning of the mark phase. During incremental GC, we also
      * set it at the start of every phase.
      */
     rt->stackSpace.markActiveCompartments();
 
     for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
         /* Clear this early so we don't do any write barriers during GC. */
-        if (rt->gcIncrementalState == MARK) {
+        if (c->isGCMarking()) {
             JS_ASSERT(c->needsBarrier());
             c->setNeedsBarrier(false);
         } else {
             JS_ASSERT(!c->needsBarrier());
         }
     }
 }
 
 AutoGCSlice::~AutoGCSlice()
 {
     for (GCCompartmentsIter c(runtime); !c.done(); c.next()) {
-        if (runtime->gcIncrementalState == MARK) {
+        if (c->isGCMarking()) {
             c->setNeedsBarrier(true);
             c->arenas.prepareForIncrementalGC(runtime);
         } else {
-            JS_ASSERT(runtime->gcIncrementalState == NO_INCREMENTAL ||
-                      runtime->gcIncrementalState == SWEEP);
+            JS_ASSERT(c->isGCSweeping());
             c->setNeedsBarrier(false);
         }
     }
 }
 
 class AutoCopyFreeListToArenas {
     JSRuntime *rt;