Bug 650161 - Fix compacting GC after parallel sweeping changes r=terrence
authorJon Coppeard <jcoppeard@mozilla.com>
Fri, 03 Oct 2014 10:04:18 +0100
changeset 208557 b8b6ded7370d84e3e367d5dfb028e1d92744c4e6
parent 208556 7c26c6d5b2fb433fcc674d9939ed087843d78730
child 208558 978df8aa78baf8bc8e73c30d8bf6619bc82dc929
push id1
push userroot
push dateMon, 20 Oct 2014 17:29:22 +0000
reviewersterrence
bugs650161
milestone35.0a1
Bug 650161 - Fix compacting GC after parallel sweeping changes r=terrence
js/src/gc/GCInternals.h
js/src/gc/GCRuntime.h
js/src/jsgc.cpp
--- a/js/src/gc/GCInternals.h
+++ b/js/src/gc/GCInternals.h
@@ -136,17 +136,16 @@ void
 CheckHashTablesAfterMovingGC(JSRuntime *rt);
 #endif
 
 #ifdef JSGC_COMPACTING
 struct MovingTracer : JSTracer {
     MovingTracer(JSRuntime *rt) : JSTracer(rt, Visit, TraceWeakMapKeysValues) {}
 
     static void Visit(JSTracer *jstrc, void **thingp, JSGCTraceKind kind);
-    static void Sweep(JSTracer *jstrc);
     static bool IsMovingTracer(JSTracer *trc) {
         return trc->callback == Visit;
     }
 };
 #endif
 
 
 } /* namespace gc */
--- a/js/src/gc/GCRuntime.h
+++ b/js/src/gc/GCRuntime.h
@@ -527,16 +527,17 @@ class GCRuntime
     void sweepZones(FreeOp *fop, bool lastGC);
     void decommitArenasFromAvailableList(Chunk **availableListHeadp);
     void decommitArenas();
     void expireChunksAndArenas(bool shouldShrink);
     void sweepBackgroundThings(bool onBackgroundThread);
     void assertBackgroundSweepingFinished();
     bool shouldCompact();
 #ifdef JSGC_COMPACTING
+    void sweepZoneAfterCompacting(Zone *zone);
     void compactPhase();
     ArenaHeader *relocateArenas();
     void updatePointersToRelocatedCells();
     void releaseRelocatedArenas(ArenaHeader *relocatedList);
 #endif
     void finishCollection();
 
     void computeNonIncrementalMarkingForValidation();
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -2112,17 +2112,16 @@ ArenaContainsGlobal(ArenaHeader *arena)
 
 static bool
 CanRelocateArena(ArenaHeader *arena)
 {
     /*
      * We can't currently move global objects because their address can be baked
      * into compiled code so we skip relocation of any area containing one.
      */
-    JSRuntime *rt = arena->zone->runtimeFromMainThread();
     return arena->getAllocKind() <= FINALIZE_OBJECT_LAST && !ArenaContainsGlobal(arena);
 }
 
 static bool
 ShouldRelocateArena(ArenaHeader *arena)
 {
 #ifdef JS_GC_ZEAL
     if (arena->zone->runtimeFromMainThread()->gc.zeal() == ZealCompactValue)
@@ -2338,48 +2337,45 @@ MovingTracer::Visit(JSTracer *jstrc, voi
 
     if (IsForwarded(thing)) {
         Cell *dst = Forwarded(thing);
         *thingp = dst;
     }
 }
 
 void
-MovingTracer::Sweep(JSTracer *jstrc)
-{
-    JSRuntime *rt = jstrc->runtime();
+GCRuntime::sweepZoneAfterCompacting(Zone *zone)
+{
     FreeOp *fop = rt->defaultFreeOp();
-
-    WatchpointMap::sweepAll(rt);
-
-    Debugger::sweepAll(fop);
-
-    for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) {
-        if (zone->isCollecting()) {
-            bool oom = false;
-            zone->sweep(fop, false, &oom);
-            MOZ_ASSERT(!oom);
-
-            for (CompartmentsInZoneIter c(zone); !c.done(); c.next()) {
-                c->sweep(fop, false);
-            }
-        } else {
-            /* Update cross compartment wrappers into moved zones. */
-            for (CompartmentsInZoneIter c(zone); !c.done(); c.next())
-                c->sweepCrossCompartmentWrappers();
+    if (zone->isCollecting()) {
+        zone->discardJitCode(fop);
+        zone->sweepAnalysis(fop, rt->gc.releaseObservedTypes && !zone->isPreservingCode());
+        zone->sweepBreakpoints(fop);
+
+        for (CompartmentsInZoneIter c(zone); !c.done(); c.next()) {
+            c->sweepInnerViews();
+            c->sweepCrossCompartmentWrappers();
+            c->sweepBaseShapeTable();
+            c->sweepInitialShapeTable();
+            c->sweepTypeObjectTables();
+            c->sweepRegExps();
+            c->sweepCallsiteClones();
+            c->sweepSavedStacks();
+            c->sweepGlobalObject(fop);
+            c->sweepSelfHostingScriptSource();
+            c->sweepDebugScopes();
+            c->sweepJitCompartment(fop);
+            c->sweepWeakMaps();
+            c->sweepNativeIterators();
         }
-    }
-
-    /* Type inference may put more blocks here to free. */
-    rt->freeLifoAlloc.freeAll();
-
-    /* Clear runtime caches that can contain cell pointers. */
-    // TODO: Should possibly just call PurgeRuntime() here.
-    rt->newObjectCache.purge();
-    rt->nativeIterCache.purge();
+    } else {
+        /* Update cross compartment wrappers into moved zones. */
+        for (CompartmentsInZoneIter c(zone); !c.done(); c.next())
+            c->sweepCrossCompartmentWrappers();
+    }
 }
 
 /*
  * Update the interal pointers in a single cell.
  */
 static void
 UpdateCellPointers(MovingTracer *trc, Cell *cell, JSGCTraceKind traceKind) {
     if (traceKind == JSTRACE_OBJECT) {
@@ -2454,17 +2450,30 @@ GCRuntime::updatePointersToRelocatedCell
             c->watchpointMap->markAll(&trc);
     }
 
     // Mark all gray roots, making sure we call the trace callback to get the
     // current set.
     if (JSTraceDataOp op = grayRootTracer.op)
         (*op)(&trc, grayRootTracer.data);
 
-    MovingTracer::Sweep(&trc);
+    // Sweep everything to fix up weak pointers
+    WatchpointMap::sweepAll(rt);
+    Debugger::sweepAll(rt->defaultFreeOp());
+
+    for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next())
+        rt->gc.sweepZoneAfterCompacting(zone);
+
+    // Type inference may put more blocks here to free.
+    rt->freeLifoAlloc.freeAll();
+
+    // Clear runtime caches that can contain cell pointers.
+    // TODO: Should possibly just call PurgeRuntime() here.
+    rt->newObjectCache.purge();
+    rt->nativeIterCache.purge();
 
     // Call callbacks to get the rest of the system to fixup other untraced pointers.
     callWeakPointerCallbacks();
 }
 
 void
 GCRuntime::releaseRelocatedArenas(ArenaHeader *relocatedList)
 {