Bug 928050 - Specify whether to iterate over atoms zone (r=bhackett)
authorBill McCloskey <wmccloskey@mozilla.com>
Sun, 10 Nov 2013 22:48:58 -0800
changeset 168962 442211bce621f621b19feca73192755fd3798c99
parent 168961 372dddb6113d607865fba1a54a10c6343929a5a3
child 168963 cf1ca47f2830d77c2871dc4ce82b14dfb2ce020b
push id3224
push userlsblakk@mozilla.com
push dateTue, 04 Feb 2014 01:06:49 +0000
treeherdermozilla-beta@60c04d0987f1 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersbhackett
bugs928050
milestone28.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 928050 - Specify whether to iterate over atoms zone (r=bhackett)
js/src/gc/GCInternals.h
js/src/gc/Iteration.cpp
js/src/gc/Nursery.cpp
js/src/gc/RootMarking.cpp
js/src/gc/Verifier.cpp
js/src/gc/Zone.h
js/src/jit/BaselineJIT.cpp
js/src/jscntxt.cpp
js/src/jsfriendapi.cpp
js/src/jsgc.cpp
js/src/jsgcinlines.h
js/src/jswatchpoint.cpp
js/src/jsweakmap.cpp
js/src/jswrapper.cpp
js/src/vm/Debugger.cpp
js/src/vm/MemoryMetrics.cpp
js/src/vm/OldDebugAPI.cpp
js/src/vm/Runtime.cpp
--- a/js/src/gc/GCInternals.h
+++ b/js/src/gc/GCInternals.h
@@ -21,19 +21,20 @@ void
 MarkRuntime(JSTracer *trc, bool useSavedRoots = false);
 
 void
 BufferGrayRoots(GCMarker *gcmarker);
 
 class AutoCopyFreeListToArenas
 {
     JSRuntime *runtime;
+    ZoneSelector selector;
 
   public:
-    AutoCopyFreeListToArenas(JSRuntime *rt);
+    AutoCopyFreeListToArenas(JSRuntime *rt, ZoneSelector selector);
     ~AutoCopyFreeListToArenas();
 };
 
 struct AutoFinishGC
 {
     AutoFinishGC(JSRuntime *rt);
 };
 
@@ -59,17 +60,17 @@ class AutoTraceSession
 
 struct AutoPrepareForTracing
 {
     AutoFinishGC finish;
     AutoPauseWorkersForTracing pause;
     AutoTraceSession session;
     AutoCopyFreeListToArenas copy;
 
-    AutoPrepareForTracing(JSRuntime *rt);
+    AutoPrepareForTracing(JSRuntime *rt, ZoneSelector selector);
 };
 
 class IncrementalSafety
 {
     const char *reason_;
 
     IncrementalSafety(const char *reason) : reason_(reason) {}
 
--- a/js/src/gc/Iteration.cpp
+++ b/js/src/gc/Iteration.cpp
@@ -6,27 +6,29 @@
 
 #include "jscompartment.h"
 #include "jsgc.h"
 
 #include "gc/GCInternals.h"
 #include "js/HashTable.h"
 #include "vm/Runtime.h"
 
+#include "jscntxtinlines.h"
 #include "jsgcinlines.h"
 
 using namespace js;
 using namespace js::gc;
 
 void
 js::TraceRuntime(JSTracer *trc)
 {
     JS_ASSERT(!IS_GC_MARKING_TRACER(trc));
 
-    AutoPrepareForTracing prep(trc->runtime);
+    AutoLockForExclusiveAccess lock(trc->runtime);
+    AutoPrepareForTracing prep(trc->runtime, WithAtoms);
     MarkRuntime(trc);
 }
 
 static void
 IterateCompartmentsArenasCells(JSRuntime *rt, Zone *zone, void *data,
                                JSIterateCompartmentCallback compartmentCallback,
                                IterateArenaCallback arenaCallback,
                                IterateCellCallback cellCallback)
@@ -49,86 +51,89 @@ IterateCompartmentsArenasCells(JSRuntime
 
 void
 js::IterateZonesCompartmentsArenasCells(JSRuntime *rt, void *data,
                                         IterateZoneCallback zoneCallback,
                                         JSIterateCompartmentCallback compartmentCallback,
                                         IterateArenaCallback arenaCallback,
                                         IterateCellCallback cellCallback)
 {
-    AutoPrepareForTracing prop(rt);
+    AutoLockForExclusiveAccess lock(rt);
+    AutoPrepareForTracing prop(rt, WithAtoms);
 
-    for (ZonesIter zone(rt); !zone.done(); zone.next()) {
+    for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
         (*zoneCallback)(rt, data, zone);
         IterateCompartmentsArenasCells(rt, zone, data,
                                        compartmentCallback, arenaCallback, cellCallback);
     }
 }
 
 void
 js::IterateZoneCompartmentsArenasCells(JSRuntime *rt, Zone *zone, void *data,
                                        IterateZoneCallback zoneCallback,
                                        JSIterateCompartmentCallback compartmentCallback,
                                        IterateArenaCallback arenaCallback,
                                        IterateCellCallback cellCallback)
 {
-    AutoPrepareForTracing prop(rt);
+    AutoLockForExclusiveAccess lock(rt);
+    AutoPrepareForTracing prop(rt, WithAtoms);
 
     (*zoneCallback)(rt, data, zone);
     IterateCompartmentsArenasCells(rt, zone, data,
                                    compartmentCallback, arenaCallback, cellCallback);
 }
 
 void
 js::IterateChunks(JSRuntime *rt, void *data, IterateChunkCallback chunkCallback)
 {
-    AutoPrepareForTracing prep(rt);
+    AutoPrepareForTracing prep(rt, SkipAtoms);
 
     for (js::GCChunkSet::Range r = rt->gcChunkSet.all(); !r.empty(); r.popFront())
         chunkCallback(rt, data, r.front());
 }
 
 void
 js::IterateScripts(JSRuntime *rt, JSCompartment *compartment,
                    void *data, IterateScriptCallback scriptCallback)
 {
-    AutoPrepareForTracing prep(rt);
+    AutoPrepareForTracing prep(rt, SkipAtoms);
 
     if (compartment) {
         for (CellIterUnderGC i(compartment->zone(), gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
             JSScript *script = i.get<JSScript>();
             if (script->compartment() == compartment)
                 scriptCallback(rt, data, script);
         }
     } else {
-        for (ZonesIter zone(rt); !zone.done(); zone.next()) {
+        for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) {
             for (CellIterUnderGC i(zone, gc::FINALIZE_SCRIPT); !i.done(); i.next())
                 scriptCallback(rt, data, i.get<JSScript>());
         }
     }
 }
 
 void
 js::IterateGrayObjects(Zone *zone, GCThingCallback cellCallback, void *data)
 {
-    AutoPrepareForTracing prep(zone->runtimeFromMainThread());
+    AutoPrepareForTracing prep(zone->runtimeFromMainThread(), SkipAtoms);
 
     for (size_t finalizeKind = 0; finalizeKind <= FINALIZE_OBJECT_LAST; finalizeKind++) {
         for (CellIterUnderGC i(zone, AllocKind(finalizeKind)); !i.done(); i.next()) {
             JSObject *obj = i.get<JSObject>();
             if (obj->isMarked(GRAY))
                 cellCallback(data, obj);
         }
     }
 }
 
 JS_PUBLIC_API(void)
 JS_IterateCompartments(JSRuntime *rt, void *data,
                        JSIterateCompartmentCallback compartmentCallback)
 {
     JS_ASSERT(!rt->isHeapBusy());
 
+    AutoLockForExclusiveAccess lock(rt);
     AutoPauseWorkersForTracing pause(rt);
     AutoTraceSession session(rt);
 
-    for (CompartmentsIter c(rt); !c.done(); c.next())
+    for (CompartmentsIter c(rt, WithAtoms); !c.done(); c.next())
         (*compartmentCallback)(rt, data, c);
 }
--- a/js/src/gc/Nursery.cpp
+++ b/js/src/gc/Nursery.cpp
@@ -597,17 +597,17 @@ js::Nursery::collect(JSRuntime *rt, JS::
 
     rt->gcHelperThread.waitBackgroundSweepEnd();
 
     /* Move objects pointed to by roots from the nursery to the major heap. */
     MinorCollectionTracer trc(rt, this);
     rt->gcStoreBuffer.mark(&trc); // This must happen first.
     MarkRuntime(&trc);
     Debugger::markAll(&trc);
-    for (CompartmentsIter comp(rt); !comp.done(); comp.next()) {
+    for (CompartmentsIter comp(rt, SkipAtoms); !comp.done(); comp.next()) {
         comp->markAllCrossCompartmentWrappers(&trc);
         comp->markAllInitialShapeTableEntries(&trc);
     }
     rt->newObjectCache.clearNurseryObjects(rt);
 
     /*
      * Most of the work is done here. This loop iterates over objects that have
      * been moved to the major heap. If these objects have any outgoing pointers
--- a/js/src/gc/RootMarking.cpp
+++ b/js/src/gc/RootMarking.cpp
@@ -659,17 +659,17 @@ void
 js::gc::MarkRuntime(JSTracer *trc, bool useSavedRoots)
 {
     JSRuntime *rt = trc->runtime;
     JS_ASSERT(trc->callback != GCMarker::GrayCallback);
 
     JS_ASSERT(!rt->mainThread.suppressGC);
 
     if (IS_GC_MARKING_TRACER(trc)) {
-        for (CompartmentsIter c(rt); !c.done(); c.next()) {
+        for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) {
             if (!c->zone()->isCollecting())
                 c->markCrossCompartmentWrappers(trc);
         }
         Debugger::markCrossCompartmentDebuggerObjectReferents(trc);
     }
 
     AutoGCRooter::traceAll(trc);
 
@@ -716,17 +716,17 @@ js::gc::MarkRuntime(JSTracer *trc, bool 
 #ifdef JS_ION
         jit::JitRuntime::Mark(trc);
 #endif
     }
 
     for (ContextIter acx(rt); !acx.done(); acx.next())
         acx->mark(trc);
 
-    for (ZonesIter zone(rt); !zone.done(); zone.next()) {
+    for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) {
         if (IS_GC_MARKING_TRACER(trc) && !zone->isCollecting())
             continue;
 
         if (IS_GC_MARKING_TRACER(trc) && zone->isPreservingCode()) {
             gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_MARK_TYPES);
             zone->markTypes(trc);
         }
 
@@ -738,17 +738,17 @@ js::gc::MarkRuntime(JSTracer *trc, bool 
                     MarkScriptRoot(trc, &script, "profilingScripts");
                     JS_ASSERT(script == i.get<JSScript>());
                 }
             }
         }
     }
 
     /* We can't use GCCompartmentsIter if we're called from TraceRuntime. */
-    for (CompartmentsIter c(rt); !c.done(); c.next()) {
+    for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) {
         if (trc->runtime->isHeapMinorCollecting())
             c->globalWriteBarriered = false;
 
         if (IS_GC_MARKING_TRACER(trc) && !c->zone()->isCollecting())
             continue;
 
         /* During a GC, these are treated as weak pointers. */
         if (!IS_GC_MARKING_TRACER(trc)) {
@@ -768,17 +768,17 @@ js::gc::MarkRuntime(JSTracer *trc, bool 
 #endif
 
     if (!rt->isHeapMinorCollecting()) {
         /*
          * All JSCompartment::mark does is mark the globals for compartments
          * which have been entered. Globals aren't nursery allocated so there's
          * no need to do this for minor GCs.
          */
-        for (CompartmentsIter c(rt); !c.done(); c.next())
+        for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next())
             c->mark(trc);
 
         /*
          * The embedding can register additional roots here.
          *
          * We don't need to trace these in a minor GC because all pointers into
          * the nursery should be in the store buffer, and we want to avoid the
          * time taken to trace all these roots.
--- a/js/src/gc/Verifier.cpp
+++ b/js/src/gc/Verifier.cpp
@@ -11,16 +11,17 @@
 #include "jscntxt.h"
 #include "jsgc.h"
 #include "jsprf.h"
 
 #include "gc/GCInternals.h"
 #include "gc/Zone.h"
 #include "js/HashTable.h"
 
+#include "jscntxtinlines.h"
 #include "jsgcinlines.h"
 
 using namespace js;
 using namespace js::gc;
 using namespace mozilla;
 
 #if defined(DEBUG) && defined(JS_GC_ZEAL) && defined(JSGC_ROOT_ANALYSIS) && !defined(JS_THREADSAFE)
 #  if JS_STACK_GROWTH_DIRECTION > 0
@@ -230,23 +231,23 @@ JS::CheckStackRoots(JSContext *cx)
     if (cx->compartment()->activeAnalysis)
         return;
 
     if (rt->mainThread.suppressGC)
         return;
 
     // Can switch to the atoms compartment during analysis.
     if (IsAtomsCompartment(cx->compartment())) {
-        for (CompartmentsIter c(rt); !c.done(); c.next()) {
+        for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) {
             if (c.get()->activeAnalysis)
                 return;
         }
     }
 
-    AutoCopyFreeListToArenas copy(rt);
+    AutoCopyFreeListToArenas copy(rt, WithAtoms);
 
     ConservativeGCData *cgcd = &rt->conservativeGC;
     cgcd->recordStackTop();
 
     JS_ASSERT(cgcd->hasStackToScan());
     uintptr_t *stackMin, *stackEnd;
     stackMin = cgcd->nativeStackTop + 1;
     stackEnd = reinterpret_cast<uintptr_t *>(rt->nativeStackBase);
@@ -442,17 +443,18 @@ NextNode(VerifyNode *node)
 void
 gc::StartVerifyPreBarriers(JSRuntime *rt)
 {
     if (rt->gcVerifyPreData || rt->gcIncrementalState != NO_INCREMENTAL)
         return;
 
     MinorGC(rt, JS::gcreason::API);
 
-    AutoPrepareForTracing prep(rt);
+    AutoLockForExclusiveAccess lock(rt);
+    AutoPrepareForTracing prep(rt, WithAtoms);
 
     if (!IsIncrementalGCSafe(rt))
         return;
 
     for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
         r.front()->bitmap.clear();
 
     VerifyPreTracer *trc = js_new<VerifyPreTracer>();
@@ -505,17 +507,17 @@ gc::StartVerifyPreBarriers(JSRuntime *rt
         node = NextNode(node);
     }
 
     rt->gcVerifyPreData = trc;
     rt->gcIncrementalState = MARK;
     rt->gcMarker.start();
 
     rt->setNeedsBarrier(true);
-    for (ZonesIter zone(rt); !zone.done(); zone.next()) {
+    for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
         PurgeJITCaches(zone);
         zone->setNeedsBarrier(true, Zone::UpdateIon);
         zone->allocator.arenas.purge();
     }
 
     return;
 
 oom:
@@ -570,27 +572,27 @@ AssertMarkedOrAllocated(const EdgeValue 
     JS_snprintf(msgbuf, sizeof(msgbuf), "[barrier verifier] Unmarked edge: %s", label);
     MOZ_ReportAssertionFailure(msgbuf, __FILE__, __LINE__);
     MOZ_CRASH();
 }
 
 void
 gc::EndVerifyPreBarriers(JSRuntime *rt)
 {
-    AutoPrepareForTracing prep(rt);
+    AutoPrepareForTracing prep(rt, SkipAtoms);
 
     VerifyPreTracer *trc = (VerifyPreTracer *)rt->gcVerifyPreData;
 
     if (!trc)
         return;
 
     bool compartmentCreated = false;
 
     /* We need to disable barriers before tracing, which may invoke barriers. */
-    for (ZonesIter zone(rt); !zone.done(); zone.next()) {
+    for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
         if (!zone->needsBarrier())
             compartmentCreated = true;
 
         zone->setNeedsBarrier(false, Zone::UpdateIon);
         PurgeJITCaches(zone);
     }
     rt->setNeedsBarrier(false);
 
@@ -735,17 +737,17 @@ PostVerifierVisitEdge(JSTracer *jstrc, v
 }
 #endif
 
 void
 js::gc::EndVerifyPostBarriers(JSRuntime *rt)
 {
 #ifdef JSGC_GENERATIONAL
     VerifyPostTracer::EdgeSet edges;
-    AutoPrepareForTracing prep(rt);
+    AutoPrepareForTracing prep(rt, SkipAtoms);
 
     VerifyPostTracer *trc = (VerifyPostTracer *)rt->gcVerifyPostData;
 
     /* Visit every entry in the store buffer and put the edges in a hash set. */
     JS_TracerInit(trc, rt, PostVerifierCollectStoreBufferEdges);
     if (!edges.init())
         goto oom;
     trc->edges = &edges;
--- a/js/src/gc/Zone.h
+++ b/js/src/gc/Zone.h
@@ -314,31 +314,49 @@ struct Zone : public JS::shadow::Zone,
   private:
     void sweepBreakpoints(js::FreeOp *fop);
 };
 
 } /* namespace JS */
 
 namespace js {
 
+/*
+ * Using the atoms zone without holding the exclusive access lock is dangerous
+ * because worker threads may be using it simultaneously. Therefore, it's
+ * better to skip the atoms zone when iterating over zones. If you need to
+ * iterate over the atoms zone, consider taking the exclusive access lock first.
+ */
+enum ZoneSelector {
+    WithAtoms,
+    SkipAtoms
+};
+
 class ZonesIter {
   private:
     JS::Zone **it, **end;
 
   public:
-    ZonesIter(JSRuntime *rt) {
+    ZonesIter(JSRuntime *rt, ZoneSelector selector) {
         it = rt->zones.begin();
         end = rt->zones.end();
+
+        if (selector == SkipAtoms) {
+            JS_ASSERT(rt->isAtomsZone(*it));
+            it++;
+        }
     }
 
     bool done() const { return it == end; }
 
     void next() {
         JS_ASSERT(!done());
-        it++;
+        do {
+            it++;
+        } while (!done() && (*it)->usedByExclusiveThread);
     }
 
     JS::Zone *get() const {
         JS_ASSERT(!done());
         return *it;
     }
 
     operator JS::Zone *() const { return get(); }
@@ -378,18 +396,25 @@ class CompartmentsIterT
   private:
     ZonesIterT zone;
     mozilla::Maybe<CompartmentsInZoneIter> comp;
 
   public:
     CompartmentsIterT(JSRuntime *rt)
       : zone(rt)
     {
-        JS_ASSERT(!zone.done());
-        comp.construct(zone);
+        if (!zone.done())
+            comp.construct(zone);
+    }
+
+    CompartmentsIterT(JSRuntime *rt, ZoneSelector selector)
+      : zone(rt, selector)
+    {
+        if (!zone.done())
+            comp.construct(zone);
     }
 
     bool done() const { return zone.done(); }
 
     void next() {
         JS_ASSERT(!done());
         JS_ASSERT(!comp.ref().done());
         comp.ref().next();
--- a/js/src/jit/BaselineJIT.cpp
+++ b/js/src/jit/BaselineJIT.cpp
@@ -899,17 +899,17 @@ jit::AddSizeOfBaselineData(JSScript *scr
 {
     if (script->hasBaselineScript())
         script->baselineScript()->addSizeOfIncludingThis(mallocSizeOf, data, fallbackStubs);
 }
 
 void
 jit::ToggleBaselineSPS(JSRuntime *runtime, bool enable)
 {
-    for (ZonesIter zone(runtime); !zone.done(); zone.next()) {
+    for (ZonesIter zone(runtime, SkipAtoms); !zone.done(); zone.next()) {
         for (gc::CellIter i(zone, gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
             JSScript *script = i.get<JSScript>();
             if (!script->hasBaselineScript())
                 continue;
             script->baselineScript()->toggleSPS(enable);
         }
     }
 }
--- a/js/src/jscntxt.cpp
+++ b/js/src/jscntxt.cpp
@@ -255,17 +255,17 @@ js::DestroyContext(JSContext *cx, Destro
 
     cx->remove();
     bool last = !rt->hasContexts();
     if (last) {
         /*
          * Dump remaining type inference results while we still have a context.
          * This printing depends on atoms still existing.
          */
-        for (CompartmentsIter c(rt); !c.done(); c.next())
+        for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next())
             c->types.print(cx, false);
     }
     if (mode == DCM_FORCE_GC) {
         JS_ASSERT(!rt->isHeapBusy());
         JS::PrepareForFullGC(rt);
         GC(rt, GC_NORMAL, JS::gcreason::DESTROY_CONTEXT);
     }
     js_delete_poison(cx);
--- a/js/src/jsfriendapi.cpp
+++ b/js/src/jsfriendapi.cpp
@@ -150,36 +150,36 @@ JS_FRIEND_API(void)
 JS::PrepareZoneForGC(Zone *zone)
 {
     zone->scheduleGC();
 }
 
 JS_FRIEND_API(void)
 JS::PrepareForFullGC(JSRuntime *rt)
 {
-    for (ZonesIter zone(rt); !zone.done(); zone.next())
+    for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next())
         zone->scheduleGC();
 }
 
 JS_FRIEND_API(void)
 JS::PrepareForIncrementalGC(JSRuntime *rt)
 {
     if (!JS::IsIncrementalGCInProgress(rt))
         return;
 
-    for (ZonesIter zone(rt); !zone.done(); zone.next()) {
+    for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
         if (zone->wasGCStarted())
             PrepareZoneForGC(zone);
     }
 }
 
 JS_FRIEND_API(bool)
 JS::IsGCScheduled(JSRuntime *rt)
 {
-    for (ZonesIter zone(rt); !zone.done(); zone.next()) {
+    for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
         if (zone->isGCScheduled())
             return true;
     }
 
     return false;
 }
 
 JS_FRIEND_API(void)
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -1005,17 +1005,17 @@ js_FinishGC(JSRuntime *rt)
     rt->gcHelperThread.finish();
 
 #ifdef JS_GC_ZEAL
     /* Free memory associated with GC verification. */
     FinishVerifier(rt);
 #endif
 
     /* Delete all remaining zones. */
-    for (ZonesIter zone(rt); !zone.done(); zone.next()) {
+    for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
         for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next())
             js_delete(comp.get());
         js_delete(zone.get());
     }
 
     rt->zones.clear();
 
     rt->gcSystemAvailableChunkListHead = nullptr;
@@ -1903,17 +1903,17 @@ GCMarker::GrayCallback(JSTracer *trc, vo
     GCMarker *gcmarker = static_cast<GCMarker *>(trc);
     gcmarker->appendGrayRoot(*thingp, kind);
 }
 
 size_t
 GCMarker::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const
 {
     size_t size = stack.sizeOfExcludingThis(mallocSizeOf);
-    for (ZonesIter zone(runtime); !zone.done(); zone.next())
+    for (ZonesIter zone(runtime, WithAtoms); !zone.done(); zone.next())
         size += zone->gcGrayRoots.sizeOfExcludingThis(mallocSizeOf);
     return size;
 }
 
 void
 js::SetMarkStackLimit(JSRuntime *rt, size_t limit)
 {
     JS_ASSERT(!rt->isHeapBusy());
@@ -2203,17 +2203,17 @@ SweepBackgroundThings(JSRuntime* rt, boo
     rt->gcSweepingZones = nullptr;
 }
 
 #ifdef JS_THREADSAFE
 static void
 AssertBackgroundSweepingFinished(JSRuntime *rt)
 {
     JS_ASSERT(!rt->gcSweepingZones);
-    for (ZonesIter zone(rt); !zone.done(); zone.next()) {
+    for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
         for (unsigned i = 0; i < FINALIZE_LIMIT; ++i) {
             JS_ASSERT(!zone->allocator.arenas.arenaListsToSweep[i]);
             JS_ASSERT(zone->allocator.arenas.doneBackgroundFinalize(AllocKind(i)));
         }
     }
 }
 
 unsigned
@@ -2766,17 +2766,17 @@ static void
 CheckForCompartmentMismatches(JSRuntime *rt)
 {
     if (rt->gcDisableStrictProxyCheckingCount)
         return;
 
     CompartmentCheckTracer trc;
     JS_TracerInit(&trc, rt, CheckCompartmentCallback);
 
-    for (ZonesIter zone(rt); !zone.done(); zone.next()) {
+    for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) {
         trc.zone = zone;
         for (size_t thingKind = 0; thingKind < FINALIZE_LAST; thingKind++) {
             for (CellIterUnderGC i(zone, AllocKind(thingKind)); !i.done(); i.next()) {
                 trc.src = i.getCell();
                 trc.srcKind = MapAllocToTraceKind(AllocKind(thingKind));
                 trc.compartment = CompartmentOfCell(trc.src, trc.srcKind);
                 JS_TraceChildren(&trc, trc.src, trc.srcKind);
             }
@@ -2793,17 +2793,17 @@ BeginMarkPhase(JSRuntime *rt)
 #ifdef DEBUG
     if (rt->gcFullCompartmentChecks)
         CheckForCompartmentMismatches(rt);
 #endif
 
     rt->gcIsFull = true;
     bool any = false;
 
-    for (ZonesIter zone(rt); !zone.done(); zone.next()) {
+    for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
         /* Assert that zone state is as we expect */
         JS_ASSERT(!zone->isCollecting());
         JS_ASSERT(!zone->compartments.empty());
         for (unsigned i = 0; i < FINALIZE_LIMIT; ++i)
             JS_ASSERT(!zone->allocator.arenas.arenaListsToSweep[i]);
 
         /* Set up which zones will be collected. */
         if (zone->isGCScheduled()) {
@@ -2815,17 +2815,17 @@ BeginMarkPhase(JSRuntime *rt)
             rt->gcIsFull = false;
         }
 
         zone->scheduledForDestruction = false;
         zone->maybeAlive = zone->hold;
         zone->setPreservingCode(false);
     }
 
-    for (CompartmentsIter c(rt); !c.done(); c.next()) {
+    for (CompartmentsIter c(rt, WithAtoms); !c.done(); c.next()) {
         JS_ASSERT(!c->gcLiveArrayBuffers);
         c->marked = false;
         if (ShouldPreserveJITCode(c, currentTime))
             c->zone()->setPreservingCode(true);
     }
 
     /*
      * Atoms are not in the cross-compartment map. So if there are any
@@ -2948,17 +2948,17 @@ BeginMarkPhase(JSRuntime *rt)
      * dead zone are difficult to avoid. We detect such cases (via the
      * gcObjectsMarkedInDeadCompartment counter) and redo any ongoing GCs after
      * the JS_TransplantObject function has finished. This ensures that the dead
      * zones will be cleaned up. See AutoMarkInDeadZone and
      * AutoMaybeTouchDeadZones for details.
      */
 
     /* Set the maybeAlive flag based on cross-compartment edges. */
-    for (CompartmentsIter c(rt); !c.done(); c.next()) {
+    for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) {
         for (JSCompartment::WrapperEnum e(c); !e.empty(); e.popFront()) {
             Cell *dst = e.front().key.wrapped;
             dst->tenuredZone()->maybeAlive = true;
         }
     }
 
     /*
      * For black roots, code in gc/Marking.cpp will already have set maybeAlive
@@ -3288,31 +3288,31 @@ FinishMarkingValidation(JSRuntime *rt)
 #endif
 }
 
 static void
 AssertNeedsBarrierFlagsConsistent(JSRuntime *rt)
 {
 #ifdef DEBUG
     bool anyNeedsBarrier = false;
-    for (ZonesIter zone(rt); !zone.done(); zone.next())
+    for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next())
         anyNeedsBarrier |= zone->needsBarrier();
     JS_ASSERT(rt->needsBarrier() == anyNeedsBarrier);
 #endif
 }
 
 static void
 DropStringWrappers(JSRuntime *rt)
 {
     /*
      * String "wrappers" are dropped on GC because their presence would require
      * us to sweep the wrappers in all compartments every time we sweep a
      * compartment group.
      */
-    for (CompartmentsIter c(rt); !c.done(); c.next()) {
+    for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) {
         for (JSCompartment::WrapperEnum e(c); !e.empty(); e.popFront()) {
             if (e.front().key.kind == CrossCompartmentKey::StringWrapper)
                 e.removeFront();
         }
     }
 }
 
 /*
@@ -3851,17 +3851,17 @@ BeginSweepPhase(JSRuntime *rt, bool last
 
     gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP);
 
 #ifdef JS_THREADSAFE
     rt->gcSweepOnBackgroundThread = !lastGC && rt->useHelperThreads();
 #endif
 
 #ifdef DEBUG
-    for (CompartmentsIter c(rt); !c.done(); c.next()) {
+    for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) {
         JS_ASSERT(!c->gcIncomingGrayPointers);
         for (JSCompartment::WrapperEnum e(c); !e.empty(); e.popFront()) {
             if (e.front().key.kind != CrossCompartmentKey::StringWrapper)
                 AssertNotOnGrayList(&e.front().value.get().toObject());
         }
     }
 #endif
 
@@ -3961,32 +3961,32 @@ EndSweepPhase(JSRuntime *rt, JSGCInvocat
     JS_ASSERT(rt->gcMarker.isDrained());
     rt->gcMarker.stop();
 
     /*
      * Recalculate whether GC was full or not as this may have changed due to
      * newly created zones.  Can only change from full to not full.
      */
     if (rt->gcIsFull) {
-        for (ZonesIter zone(rt); !zone.done(); zone.next()) {
+        for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
             if (!zone->isCollecting()) {
                 rt->gcIsFull = false;
                 break;
             }
         }
     }
 
     /*
      * If we found any black->gray edges during marking, we completely clear the
      * mark bits of all uncollected zones, or if a reset has occured, zones that
      * will no longer be collected. This is safe, although it may
      * prevent the cycle collector from collecting some dead objects.
      */
     if (rt->gcFoundBlackGrayEdges) {
-        for (ZonesIter zone(rt); !zone.done(); zone.next()) {
+        for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
             if (!zone->isCollecting())
                 zone->allocator.arenas.unmarkAll();
         }
     }
 
 #ifdef DEBUG
     PropertyTree::dumpShapes(rt);
 #endif
@@ -4052,17 +4052,17 @@ EndSweepPhase(JSRuntime *rt, JSGCInvocat
 
         rt->freeLifoAlloc.freeAll();
 
         /* Ensure the compartments get swept if it's the last GC. */
         if (lastGC)
             SweepZones(&fop, lastGC);
     }
 
-    for (ZonesIter zone(rt); !zone.done(); zone.next()) {
+    for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
         zone->setGCLastBytes(zone->gcBytes, gckind);
         if (zone->isCollecting()) {
             JS_ASSERT(zone->isGCFinished());
             zone->setGCState(Zone::NoGC);
         }
 
 #ifdef DEBUG
         JS_ASSERT(!zone->isCollecting());
@@ -4072,17 +4072,17 @@ EndSweepPhase(JSRuntime *rt, JSGCInvocat
             JS_ASSERT_IF(!IsBackgroundFinalized(AllocKind(i)) ||
                          !rt->gcSweepOnBackgroundThread,
                          !zone->allocator.arenas.arenaListsToSweep[i]);
         }
 #endif
     }
 
 #ifdef DEBUG
-    for (CompartmentsIter c(rt); !c.done(); c.next()) {
+    for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) {
         JS_ASSERT(!c->gcIncomingGrayPointers);
         JS_ASSERT(!c->gcLiveArrayBuffers);
 
         for (JSCompartment::WrapperEnum e(c); !e.empty(); e.popFront()) {
             if (e.front().key.kind != CrossCompartmentKey::StringWrapper)
                 AssertNotOnGrayList(&e.front().value.get().toObject());
         }
     }
@@ -4160,53 +4160,73 @@ AutoGCSession::~AutoGCSession()
     runtime->gcChunkAllocationSinceLastGC = false;
 
 #ifdef JS_GC_ZEAL
     /* Keeping these around after a GC is dangerous. */
     runtime->gcSelectedForMarking.clearAndFree();
 #endif
 
     /* Clear gcMallocBytes for all compartments */
-    for (ZonesIter zone(runtime); !zone.done(); zone.next()) {
+    for (ZonesIter zone(runtime, WithAtoms); !zone.done(); zone.next()) {
         zone->resetGCMallocBytes();
         zone->unscheduleGC();
     }
 
     runtime->resetGCMallocBytes();
 }
 
-AutoCopyFreeListToArenas::AutoCopyFreeListToArenas(JSRuntime *rt)
-  : runtime(rt)
-{
-    for (ZonesIter zone(rt); !zone.done(); zone.next())
+AutoCopyFreeListToArenas::AutoCopyFreeListToArenas(JSRuntime *rt, ZoneSelector selector)
+  : runtime(rt),
+    selector(selector)
+{
+    for (ZonesIter zone(rt, selector); !zone.done(); zone.next())
         zone->allocator.arenas.copyFreeListsToArenas();
 }
 
 AutoCopyFreeListToArenas::~AutoCopyFreeListToArenas()
 {
-    for (ZonesIter zone(runtime); !zone.done(); zone.next())
+    for (ZonesIter zone(runtime, selector); !zone.done(); zone.next())
         zone->allocator.arenas.clearFreeListsInArenas();
 }
 
+class AutoCopyFreeListToArenasForGC
+{
+    JSRuntime *runtime;
+
+  public:
+    AutoCopyFreeListToArenasForGC(JSRuntime *rt) : runtime(rt) {
+        for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
+            //if (zone->canCollect())
+                zone->allocator.arenas.copyFreeListsToArenas();
+        }
+    }
+    ~AutoCopyFreeListToArenasForGC() {
+        for (ZonesIter zone(runtime, WithAtoms); !zone.done(); zone.next()) {
+            //if (zone->canCollect())
+                zone->allocator.arenas.clearFreeListsInArenas();
+        }
+    }
+};
+
 static void
 IncrementalCollectSlice(JSRuntime *rt,
                         int64_t budget,
                         JS::gcreason::Reason gcReason,
                         JSGCInvocationKind gcKind);
 
 static void
 ResetIncrementalGC(JSRuntime *rt, const char *reason)
 {
     switch (rt->gcIncrementalState) {
       case NO_INCREMENTAL:
         return;
 
       case MARK: {
         /* Cancel any ongoing marking. */
-        AutoCopyFreeListToArenas copy(rt);
+        AutoCopyFreeListToArenasForGC copy(rt);
 
         rt->gcMarker.reset();
         rt->gcMarker.stop();
 
         for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
             ArrayBufferObject::resetArrayBufferList(c);
             ResetGrayList(c);
         }
@@ -4224,17 +4244,17 @@ ResetIncrementalGC(JSRuntime *rt, const 
         JS_ASSERT(!rt->gcStrictCompartmentChecking);
 
         break;
       }
 
       case SWEEP:
         rt->gcMarker.reset();
 
-        for (ZonesIter zone(rt); !zone.done(); zone.next())
+        for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next())
             zone->scheduledForDestruction = false;
 
         /* Finish sweeping the current zone group, then abort. */
         rt->gcAbortSweepAfterCurrentGroup = true;
         IncrementalCollectSlice(rt, SliceBudget::Unlimited, JS::gcreason::RESET, GC_NORMAL);
 
         {
             gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_WAIT_BACKGROUND_THREAD);
@@ -4244,20 +4264,20 @@ ResetIncrementalGC(JSRuntime *rt, const 
 
       default:
         MOZ_ASSUME_UNREACHABLE("Invalid incremental GC state");
     }
 
     rt->gcStats.reset(reason);
 
 #ifdef DEBUG
-    for (CompartmentsIter c(rt); !c.done(); c.next())
+    for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next())
         JS_ASSERT(!c->gcLiveArrayBuffers);
 
-    for (ZonesIter zone(rt); !zone.done(); zone.next()) {
+    for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
         JS_ASSERT(!zone->needsBarrier());
         for (unsigned i = 0; i < FINALIZE_LIMIT; ++i)
             JS_ASSERT(!zone->allocator.arenas.arenaListsToSweep[i]);
     }
 #endif
 }
 
 namespace {
@@ -4302,17 +4322,17 @@ AutoGCSlice::AutoGCSlice(JSRuntime *rt)
     rt->setNeedsBarrier(false);
     AssertNeedsBarrierFlagsConsistent(rt);
 }
 
 AutoGCSlice::~AutoGCSlice()
 {
     /* We can't use GCZonesIter if this is the end of the last slice. */
     bool haveBarriers = false;
-    for (ZonesIter zone(runtime); !zone.done(); zone.next()) {
+    for (ZonesIter zone(runtime, WithAtoms); !zone.done(); zone.next()) {
         if (zone->isGCMarking()) {
             zone->setNeedsBarrier(true, Zone::UpdateIon);
             zone->allocator.arenas.prepareForIncrementalGC(runtime);
             haveBarriers = true;
         } else {
             zone->setNeedsBarrier(false, Zone::UpdateIon);
         }
     }
@@ -4334,17 +4354,17 @@ PushZealSelectedObjects(JSRuntime *rt)
 }
 
 static void
 IncrementalCollectSlice(JSRuntime *rt,
                         int64_t budget,
                         JS::gcreason::Reason reason,
                         JSGCInvocationKind gckind)
 {
-    AutoCopyFreeListToArenas copy(rt);
+    AutoCopyFreeListToArenasForGC copy(rt);
     AutoGCSlice slice(rt);
 
     bool lastGC = (reason == JS::gcreason::DESTROY_RUNTIME);
 
     gc::State initialState = rt->gcIncrementalState;
 
     int zeal = 0;
 #ifdef JS_GC_ZEAL
@@ -4499,17 +4519,17 @@ BudgetIncrementalGC(JSRuntime *rt, int64
     }
 
     if (rt->isTooMuchMalloc()) {
         *budget = SliceBudget::Unlimited;
         rt->gcStats.nonincremental("malloc bytes trigger");
     }
 
     bool reset = false;
-    for (ZonesIter zone(rt); !zone.done(); zone.next()) {
+    for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
         if (zone->gcBytes >= zone->gcTriggerBytes) {
             *budget = SliceBudget::Unlimited;
             rt->gcStats.nonincremental("allocation trigger");
         }
 
         if (rt->gcIncrementalState != NO_INCREMENTAL &&
             zone->isGCScheduled() != zone->wasGCStarted())
         {
@@ -4677,30 +4697,30 @@ Collect(JSRuntime *rt, bool incremental,
      */
     AutoDisableStoreBuffer adsb(rt);
 
     RecordNativeStackTopForGC(rt);
 
     int zoneCount = 0;
     int compartmentCount = 0;
     int collectedCount = 0;
-    for (ZonesIter zone(rt); !zone.done(); zone.next()) {
+    for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
         if (rt->gcMode == JSGC_MODE_GLOBAL)
             zone->scheduleGC();
 
         /* This is a heuristic to avoid resets. */
         if (rt->gcIncrementalState != NO_INCREMENTAL && zone->needsBarrier())
             zone->scheduleGC();
 
         zoneCount++;
         if (zone->isGCScheduled())
             collectedCount++;
     }
 
-    for (CompartmentsIter c(rt); !c.done(); c.next())
+    for (CompartmentsIter c(rt, WithAtoms); !c.done(); c.next())
         compartmentCount++;
 
     rt->gcShouldCleanUpEverything = ShouldCleanUpEverything(rt, reason, gckind);
 
     gcstats::AutoGCSlice agc(rt->gcStats, collectedCount, zoneCount, compartmentCount, reason);
 
     bool repeat = false;
 
@@ -4762,17 +4782,17 @@ void
 js::GCFinalSlice(JSRuntime *rt, JSGCInvocationKind gckind, JS::gcreason::Reason reason)
 {
     Collect(rt, true, SliceBudget::Unlimited, gckind, reason);
 }
 
 static bool
 ZonesSelected(JSRuntime *rt)
 {
-    for (ZonesIter zone(rt); !zone.done(); zone.next()) {
+    for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
         if (zone->isGCScheduled())
             return true;
     }
     return false;
 }
 
 void
 js::GCDebugSlice(JSRuntime *rt, bool limit, int64_t objCount)
@@ -4831,21 +4851,21 @@ AutoFinishGC::AutoFinishGC(JSRuntime *rt
     if (JS::IsIncrementalGCInProgress(rt)) {
         JS::PrepareForIncrementalGC(rt);
         JS::FinishIncrementalGC(rt, JS::gcreason::API);
     }
 
     gc::FinishBackgroundFinalize(rt);
 }
 
-AutoPrepareForTracing::AutoPrepareForTracing(JSRuntime *rt)
+AutoPrepareForTracing::AutoPrepareForTracing(JSRuntime *rt, ZoneSelector selector)
   : finish(rt),
     pause(rt),
     session(rt),
-    copy(rt)
+    copy(rt, selector)
 {
     RecordNativeStackTopForGC(rt);
 }
 
 JSCompartment *
 js::NewCompartment(JSContext *cx, Zone *zone, JSPrincipals *principals,
                    const JS::CompartmentOptions &options)
 {
@@ -4891,17 +4911,17 @@ js::NewCompartment(JSContext *cx, Zone *
     zoneHolder.forget();
     return compartment.forget();
 }
 
 void
 gc::MergeCompartments(JSCompartment *source, JSCompartment *target)
 {
     JSRuntime *rt = source->runtimeFromMainThread();
-    AutoPrepareForTracing prepare(rt);
+    AutoPrepareForTracing prepare(rt, SkipAtoms);
 
     // Cleanup tables and other state in the source compartment that will be
     // meaningless after merging into the target compartment.
 
     source->clearTables();
 
     // Fixup compartment pointers in source to refer to target.
 
@@ -5026,17 +5046,17 @@ void PreventGCDuringInteractiveDebug()
 }
 
 #endif
 
 void
 js::ReleaseAllJITCode(FreeOp *fop)
 {
 #ifdef JS_ION
-    for (ZonesIter zone(fop->runtime()); !zone.done(); zone.next()) {
+    for (ZonesIter zone(fop->runtime(), SkipAtoms); !zone.done(); zone.next()) {
 
 # ifdef DEBUG
         /* Assert no baseline scripts are marked as active. */
         for (CellIter i(zone, FINALIZE_SCRIPT); !i.done(); i.next()) {
             JSScript *script = i.get<JSScript>();
             JS_ASSERT_IF(script->hasBaselineScript(), !script->baselineScript()->active());
         }
 # endif
@@ -5054,17 +5074,17 @@ js::ReleaseAllJITCode(FreeOp *fop)
              * Discard baseline script if it's not marked as active. Note that
              * this also resets the active flag.
              */
             jit::FinishDiscardBaselineScript(fop, script);
         }
     }
 
     /* Sweep now invalidated compiler outputs from each compartment. */
-    for (CompartmentsIter comp(fop->runtime()); !comp.done(); comp.next())
+    for (CompartmentsIter comp(fop->runtime(), SkipAtoms); !comp.done(); comp.next())
         comp->types.clearCompilerOutputs(fop);
 #endif
 }
 
 /*
  * There are three possible PCCount profiling states:
  *
  * 1. None: Neither scripts nor the runtime have count information.
@@ -5129,17 +5149,17 @@ js::StopPCCountProfiling(JSContext *cx)
     JS_ASSERT(!rt->scriptAndCountsVector);
 
     ReleaseAllJITCode(rt->defaultFreeOp());
 
     ScriptAndCountsVector *vec = cx->new_<ScriptAndCountsVector>(SystemAllocPolicy());
     if (!vec)
         return;
 
-    for (ZonesIter zone(rt); !zone.done(); zone.next()) {
+    for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) {
         for (CellIter i(zone, FINALIZE_SCRIPT); !i.done(); i.next()) {
             JSScript *script = i.get<JSScript>();
             if (script->hasScriptCounts && script->types) {
                 ScriptAndCounts sac;
                 sac.script = script;
                 sac.scriptCounts.set(script->releaseScriptCounts());
                 if (!vec->append(sac))
                     sac.scriptCounts.destroy(rt->defaultFreeOp());
--- a/js/src/jsgcinlines.h
+++ b/js/src/jsgcinlines.h
@@ -301,17 +301,17 @@ class CellIter : public CellIterImpl
 };
 
 class GCZonesIter
 {
   private:
     ZonesIter zone;
 
   public:
-    GCZonesIter(JSRuntime *rt) : zone(rt) {
+    GCZonesIter(JSRuntime *rt) : zone(rt, WithAtoms) {
         if (!zone->isCollecting())
             next();
     }
 
     bool done() const { return zone.done(); }
 
     void next() {
         JS_ASSERT(!done());
--- a/js/src/jswatchpoint.cpp
+++ b/js/src/jswatchpoint.cpp
@@ -224,17 +224,17 @@ WatchpointMap::sweep()
         }
     }
 }
 
 void
 WatchpointMap::traceAll(WeakMapTracer *trc)
 {
     JSRuntime *rt = trc->runtime;
-    for (CompartmentsIter comp(rt); !comp.done(); comp.next()) {
+    for (CompartmentsIter comp(rt, SkipAtoms); !comp.done(); comp.next()) {
         if (WatchpointMap *wpmap = comp->watchpointMap)
             wpmap->trace(trc);
     }
 }
 
 void
 WatchpointMap::trace(WeakMapTracer *trc)
 {
--- a/js/src/jsweakmap.cpp
+++ b/js/src/jsweakmap.cpp
@@ -51,17 +51,17 @@ WeakMapBase::sweepCompartment(JSCompartm
     for (WeakMapBase *m = c->gcWeakMapList; m; m = m->next)
         m->sweep();
 }
 
 void
 WeakMapBase::traceAllMappings(WeakMapTracer *tracer)
 {
     JSRuntime *rt = tracer->runtime;
-    for (CompartmentsIter c(rt); !c.done(); c.next()) {
+    for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) {
         for (WeakMapBase *m = c->gcWeakMapList; m; m = m->next)
             m->traceMappings(tracer);
     }
 }
 
 void
 WeakMapBase::resetCompartmentWeakMapList(JSCompartment *c)
 {
@@ -435,8 +435,9 @@ js_InitWeakMapClass(JSContext *cx, Handl
 
     if (!DefinePropertiesAndBrand(cx, weakMapProto, nullptr, weak_map_methods))
         return nullptr;
 
     if (!DefineConstructorAndPrototype(cx, global, JSProto_WeakMap, ctor, weakMapProto))
         return nullptr;
     return weakMapProto;
 }
+
--- a/js/src/jswrapper.cpp
+++ b/js/src/jswrapper.cpp
@@ -880,17 +880,17 @@ js::NukeCrossCompartmentWrappers(JSConte
                                  js::NukeReferencesToWindow nukeReferencesToWindow)
 {
     CHECK_REQUEST(cx);
     JSRuntime *rt = cx->runtime();
 
     // Iterate through scopes looking for system cross compartment wrappers
     // that point to an object that shares a global with obj.
 
-    for (CompartmentsIter c(rt); !c.done(); c.next()) {
+    for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) {
         if (!sourceFilter.match(c))
             continue;
 
         // Iterate the wrappers looking for anything interesting.
         for (JSCompartment::WrapperEnum e(c); !e.empty(); e.popFront()) {
             // Some cross-compartment wrappers are for strings.  We're not
             // interested in those.
             const CrossCompartmentKey &k = e.front().key;
@@ -987,17 +987,17 @@ js::RemapAllWrappersForObject(JSContext 
 {
     RootedValue origv(cx, ObjectValue(*oldTargetArg));
     RootedObject newTarget(cx, newTargetArg);
 
     AutoWrapperVector toTransplant(cx);
     if (!toTransplant.reserve(cx->runtime()->numCompartments))
         return false;
 
-    for (CompartmentsIter c(cx->runtime()); !c.done(); c.next()) {
+    for (CompartmentsIter c(cx->runtime(), SkipAtoms); !c.done(); c.next()) {
         if (WrapperMap::Ptr wp = c->lookupWrapper(origv)) {
             // We found a wrapper. Remember and root it.
             toTransplant.infallibleAppend(WrapperValue(wp));
         }
     }
 
     for (WrapperValue *begin = toTransplant.begin(), *end = toTransplant.end();
          begin != end; ++begin)
@@ -1012,17 +1012,17 @@ js::RemapAllWrappersForObject(JSContext 
 JS_FRIEND_API(bool)
 js::RecomputeWrappers(JSContext *cx, const CompartmentFilter &sourceFilter,
                       const CompartmentFilter &targetFilter)
 {
     AutoMaybeTouchDeadZones agc(cx);
 
     AutoWrapperVector toRecompute(cx);
 
-    for (CompartmentsIter c(cx->runtime()); !c.done(); c.next()) {
+    for (CompartmentsIter c(cx->runtime(), SkipAtoms); !c.done(); c.next()) {
         // Filter by source compartment.
         if (!sourceFilter.match(c))
             continue;
 
         // Iterate over the wrappers, filtering appropriately.
         for (JSCompartment::WrapperEnum e(c); !e.empty(); e.popFront()) {
             // Filter out non-objects.
             const CrossCompartmentKey &k = e.front().key;
--- a/js/src/vm/Debugger.cpp
+++ b/js/src/vm/Debugger.cpp
@@ -1443,17 +1443,17 @@ Debugger::markAllIteratively(GCMarker *t
 {
     bool markedAny = false;
 
     /*
      * Find all Debugger objects in danger of GC. This code is a little
      * convoluted since the easiest way to find them is via their debuggees.
      */
     JSRuntime *rt = trc->runtime;
-    for (CompartmentsIter c(rt); !c.done(); c.next()) {
+    for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) {
         GlobalObjectSet &debuggees = c->getDebuggees();
         for (GlobalObjectSet::Enum e(debuggees); !e.empty(); e.popFront()) {
             GlobalObject *global = e.front();
             if (!IsObjectMarked(&global))
                 continue;
             else if (global != e.front())
                 e.rekeyFront(global);
 
@@ -1945,17 +1945,17 @@ Debugger::addDebuggee(JSContext *cx, uns
     return true;
 }
 
 bool
 Debugger::addAllGlobalsAsDebuggees(JSContext *cx, unsigned argc, Value *vp)
 {
     THIS_DEBUGGER(cx, argc, vp, "addAllGlobalsAsDebuggees", args, dbg);
     AutoDebugModeGC dmgc(cx->runtime());
-    for (CompartmentsIter c(cx->runtime()); !c.done(); c.next()) {
+    for (CompartmentsIter c(cx->runtime(), SkipAtoms); !c.done(); c.next()) {
         if (c == dbg->object->compartment() || c->options().invisibleToDebugger)
             continue;
         c->zone()->scheduledForDestruction = false;
         GlobalObject *global = c->maybeGlobal();
         if (global) {
             Rooted<GlobalObject*> rg(cx, global);
             if (!dbg->addDebuggeeGlobal(cx, rg, dmgc))
                 return false;
@@ -2624,17 +2624,17 @@ bool
 Debugger::findAllGlobals(JSContext *cx, unsigned argc, Value *vp)
 {
     THIS_DEBUGGER(cx, argc, vp, "findAllGlobals", args, dbg);
 
     RootedObject result(cx, NewDenseEmptyArray(cx));
     if (!result)
         return false;
 
-    for (CompartmentsIter c(cx->runtime()); !c.done(); c.next()) {
+    for (CompartmentsIter c(cx->runtime(), SkipAtoms); !c.done(); c.next()) {
         c->zone()->scheduledForDestruction = false;
 
         GlobalObject *global = c->maybeGlobal();
         if (global) {
             /*
              * We pulled |global| out of nowhere, so it's possible that it was
              * marked gray by XPConnect. Since we're now exposing it to JS code,
              * we need to mark it black.
--- a/js/src/vm/MemoryMetrics.cpp
+++ b/js/src/vm/MemoryMetrics.cpp
@@ -482,17 +482,17 @@ JS::CollectRuntimeStats(JSRuntime *rt, R
 #ifdef DEBUG
     // Check that the in-arena measurements look ok.
     size_t totalArenaSize = rtStats->zTotals.gcHeapArenaAdmin +
                             rtStats->zTotals.unusedGCThings +
                             rtStats->gcHeapGCThings;
     JS_ASSERT(totalArenaSize % gc::ArenaSize == 0);
 #endif
 
-    for (CompartmentsIter comp(rt); !comp.done(); comp.next())
+    for (CompartmentsIter comp(rt, WithAtoms); !comp.done(); comp.next())
         comp->compartmentStats = nullptr;
 
     size_t numDirtyChunks =
         (rtStats->gcHeapChunkTotal - rtStats->gcHeapUnusedChunks) / gc::ChunkSize;
     size_t perChunkAdmin =
         sizeof(gc::Chunk) - (sizeof(gc::Arena) * gc::ArenasPerChunk);
     rtStats->gcHeapChunkAdmin = numDirtyChunks * perChunkAdmin;
 
@@ -507,28 +507,28 @@ JS::CollectRuntimeStats(JSRuntime *rt, R
                                   rtStats->gcHeapGCThings;
     return true;
 }
 
 JS_PUBLIC_API(size_t)
 JS::SystemCompartmentCount(JSRuntime *rt)
 {
     size_t n = 0;
-    for (CompartmentsIter comp(rt); !comp.done(); comp.next()) {
+    for (CompartmentsIter comp(rt, WithAtoms); !comp.done(); comp.next()) {
         if (comp->isSystem)
             ++n;
     }
     return n;
 }
 
 JS_PUBLIC_API(size_t)
 JS::UserCompartmentCount(JSRuntime *rt)
 {
     size_t n = 0;
-    for (CompartmentsIter comp(rt); !comp.done(); comp.next()) {
+    for (CompartmentsIter comp(rt, WithAtoms); !comp.done(); comp.next()) {
         if (!comp->isSystem)
             ++n;
     }
     return n;
 }
 
 JS_PUBLIC_API(size_t)
 JS::PeakSizeOfTemporary(const JSRuntime *rt)
--- a/js/src/vm/OldDebugAPI.cpp
+++ b/js/src/vm/OldDebugAPI.cpp
@@ -167,17 +167,17 @@ js::DebugExceptionUnwind(JSContext *cx, 
     return status;
 }
 
 JS_FRIEND_API(bool)
 JS_SetDebugModeForAllCompartments(JSContext *cx, bool debug)
 {
     AutoDebugModeGC dmgc(cx->runtime());
 
-    for (CompartmentsIter c(cx->runtime()); !c.done(); c.next()) {
+    for (CompartmentsIter c(cx->runtime(), SkipAtoms); !c.done(); c.next()) {
         // Ignore special compartments (atoms, JSD compartments)
         if (c->principals) {
             if (!c->setDebugModeFromC(cx, !!debug, dmgc))
                 return false;
         }
     }
     return true;
 }
--- a/js/src/vm/Runtime.cpp
+++ b/js/src/vm/Runtime.cpp
@@ -403,30 +403,30 @@ JSRuntime::init(uint32_t maxbytes)
 JSRuntime::~JSRuntime()
 {
     JS_ASSERT(!isHeapBusy());
 
     /* Free source hook early, as its destructor may want to delete roots. */
     sourceHook = nullptr;
 
     /* Off thread compilation and parsing depend on atoms still existing. */
-    for (CompartmentsIter comp(this); !comp.done(); comp.next())
+    for (CompartmentsIter comp(this, SkipAtoms); !comp.done(); comp.next())
         CancelOffThreadIonCompile(comp, nullptr);
     WaitForOffThreadParsingToFinish(this);
 
 #ifdef JS_WORKER_THREADS
     if (workerThreadState)
         workerThreadState->cleanup(this);
 #endif
 
     /* Poison common names before final GC. */
     FinishCommonNames(this);
 
     /* Clear debugging state to remove GC roots. */
-    for (CompartmentsIter comp(this); !comp.done(); comp.next()) {
+    for (CompartmentsIter comp(this, SkipAtoms); !comp.done(); comp.next()) {
         comp->clearTraps(defaultFreeOp());
         if (WatchpointMap *wpmap = comp->watchpointMap)
             wpmap->clear();
     }
 
     /* Clear the statics table to remove GC roots. */
     staticStrings.finish();
 
@@ -708,17 +708,17 @@ void
 JSRuntime::setGCMaxMallocBytes(size_t value)
 {
     /*
      * For compatibility treat any value that exceeds PTRDIFF_T_MAX to
      * mean that value.
      */
     gcMaxMallocBytes = (ptrdiff_t(value) >= 0) ? value : size_t(-1) >> 1;
     resetGCMallocBytes();
-    for (ZonesIter zone(this); !zone.done(); zone.next())
+    for (ZonesIter zone(this, WithAtoms); !zone.done(); zone.next())
         zone->setGCMaxMallocBytes(value);
 }
 
 void
 JSRuntime::updateMallocCounter(size_t nbytes)
 {
     updateMallocCounter(nullptr, nbytes);
 }