Bug 1163059 - Add a more convenient wrapper for isAtomsZone; r=sfink
authorTerrence Cole <terrence@mozilla.com>
Thu, 07 May 2015 10:17:49 -0700
changeset 243375 6bc3e88e0107e6b7d653919d943449a42da3b6be
parent 243374 49678aa590e25ac6b76e365adf81041ca790d85d
child 243376 70c63c8546e37c369faf303912197b2208d23419
push id28738
push usercbook@mozilla.com
push dateTue, 12 May 2015 14:11:31 +0000
treeherdermozilla-central@bedce1b405a3 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerssfink
bugs1163059
milestone40.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1163059 - Add a more convenient wrapper for isAtomsZone; r=sfink
js/src/gc/Marking.cpp
js/src/gc/Zone.cpp
js/src/gc/Zone.h
js/src/jit/VMFunctions.cpp
js/src/jscntxtinlines.h
js/src/jscompartment.cpp
js/src/jscompartment.h
js/src/jsgc.cpp
js/src/jsobj.cpp
js/src/vm/Runtime.h
js/src/vm/SelfHosting.cpp
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -202,20 +202,20 @@ js::CheckTracedThing(JSTracer* trc, T th
      */
     bool isGcMarkingTracer = trc->isMarkingTracer();
 
     MOZ_ASSERT_IF(zone->requireGCTracer(), isGcMarkingTracer || IsBufferingGrayRoots(trc));
 
     if (isGcMarkingTracer) {
         GCMarker* gcMarker = static_cast<GCMarker*>(trc);
         MOZ_ASSERT_IF(gcMarker->shouldCheckCompartments(),
-                      zone->isCollecting() || rt->isAtomsZone(zone));
+                      zone->isCollecting() || zone->isAtomsZone());
 
         MOZ_ASSERT_IF(gcMarker->markColor() == GRAY,
-                      !zone->isGCMarkingBlack() || rt->isAtomsZone(zone));
+                      !zone->isGCMarkingBlack() || zone->isAtomsZone());
 
         MOZ_ASSERT(!(zone->isGCSweeping() || zone->isGCFinished() || zone->isGCCompacting()));
     }
 
     /*
      * Try to assert that the thing is allocated.  This is complicated by the
      * fact that allocated things may still contain the poison pattern if that
      * part has not been overwritten, and that the free span list head in the
@@ -310,28 +310,26 @@ AssertZoneIsMarking(Cell* thing)
     MOZ_ASSERT(TenuredCell::fromPointer(thing)->zone()->isGCMarking());
 }
 
 static void
 AssertZoneIsMarking(JSString* str)
 {
 #ifdef DEBUG
     Zone* zone = TenuredCell::fromPointer(str)->zone();
-    JSRuntime* rt = str->runtimeFromMainThread();
-    MOZ_ASSERT(zone->isGCMarking() || rt->isAtomsZone(zone));
+    MOZ_ASSERT(zone->isGCMarking() || zone->isAtomsZone());
 #endif
 }
 
 static void
 AssertZoneIsMarking(JS::Symbol* sym)
 {
 #ifdef DEBUG
     Zone* zone = TenuredCell::fromPointer(sym)->zone();
-    JSRuntime* rt = sym->runtimeFromMainThread();
-    MOZ_ASSERT(zone->isGCMarking() || rt->isAtomsZone(zone));
+    MOZ_ASSERT(zone->isGCMarking() || zone->isAtomsZone());
 #endif
 }
 
 static void
 AssertRootMarkingPhase(JSTracer* trc)
 {
     MOZ_ASSERT_IF(trc->isMarkingTracer(),
                   trc->runtime()->gc.state() == NO_INCREMENTAL ||
@@ -748,17 +746,17 @@ GCMarker::traverse(AccessorShape* thing)
 }
 } // namespace js
 
 template <typename S, typename T>
 void
 js::GCMarker::traverseEdge(S source, T target)
 {
     MOZ_ASSERT_IF(!ThingIsPermanentAtomOrWellKnownSymbol(target),
-                  runtime()->isAtomsZone(target->zone()) || target->zone() == source->zone());
+                  target->zone()->isAtomsZone() || target->zone() == source->zone());
     traverse(target);
 }
 
 namespace js {
 // Special-case JSObject->JSObject edges to check the compartment too.
 template <>
 void
 GCMarker::traverseEdge(JSObject* source, JSObject* target)
--- a/js/src/gc/Zone.cpp
+++ b/js/src/gc/Zone.cpp
@@ -68,19 +68,17 @@ bool Zone::init(bool isSystemArg)
 void
 Zone::setNeedsIncrementalBarrier(bool needs, ShouldUpdateJit updateJit)
 {
     if (updateJit == UpdateJit && needs != jitUsingBarriers_) {
         jit::ToggleBarriers(this, needs);
         jitUsingBarriers_ = needs;
     }
 
-    if (needs && runtimeFromMainThread()->isAtomsZone(this))
-        MOZ_ASSERT(!runtimeFromMainThread()->exclusiveThreadsPresent());
-
+    MOZ_ASSERT_IF(needs && isAtomsZone(), !runtimeFromMainThread()->exclusiveThreadsPresent());
     MOZ_ASSERT_IF(needs, canCollect());
     needsIncrementalBarrier_ = needs;
 }
 
 void
 Zone::resetGCMallocBytes()
 {
     gcMallocBytes = ptrdiff_t(gcMaxMallocBytes);
@@ -244,17 +242,17 @@ Zone::hasMarkedCompartments()
 
 bool
 Zone::canCollect()
 {
     // Zones cannot be collected while in use by other threads.
     if (usedByExclusiveThread)
         return false;
     JSRuntime* rt = runtimeFromAnyThread();
-    if (rt->isAtomsZone(this) && rt->exclusiveThreadsPresent())
+    if (isAtomsZone() && rt->exclusiveThreadsPresent())
         return false;
     return true;
 }
 
 void
 Zone::notifyObservingDebuggers()
 {
     for (CompartmentsInZoneIter comps(this); !comps.done(); comps.next()) {
--- a/js/src/gc/Zone.h
+++ b/js/src/gc/Zone.h
@@ -217,16 +217,19 @@ struct Zone : public JS::shadow::Zone,
 
     enum ShouldUpdateJit { DontUpdateJit, UpdateJit };
     void setNeedsIncrementalBarrier(bool needs, ShouldUpdateJit updateJit);
     const bool* addressOfNeedsIncrementalBarrier() const { return &needsIncrementalBarrier_; }
 
     js::jit::JitZone* getJitZone(JSContext* cx) { return jitZone_ ? jitZone_ : createJitZone(cx); }
     js::jit::JitZone* jitZone() { return jitZone_; }
 
+    bool isAtomsZone() const { return runtimeFromAnyThread()->isAtomsZone(this); }
+    bool isSelfHostingZone() const { return runtimeFromAnyThread()->isSelfHostingZone(this); }
+
 #ifdef DEBUG
     // For testing purposes, return the index of the zone group which this zone
     // was swept in in the last GC.
     unsigned lastZoneGroupIndex() { return gcLastZoneGroupIndex; }
 #endif
 
   private:
     void sweepBreakpoints(js::FreeOp* fop);
--- a/js/src/jit/VMFunctions.cpp
+++ b/js/src/jit/VMFunctions.cpp
@@ -1193,21 +1193,20 @@ AssertValidStringPtr(JSContext* cx, JSSt
 #ifdef DEBUG
     // We can't closely inspect strings from another runtime.
     if (str->runtimeFromAnyThread() != cx->runtime()) {
         MOZ_ASSERT(str->isPermanentAtom());
         return;
     }
 
     if (str->isAtom())
-        MOZ_ASSERT(cx->runtime()->isAtomsZone(str->zone()));
+        MOZ_ASSERT(str->zone()->isAtomsZone());
     else
         MOZ_ASSERT(str->zone() == cx->zone());
 
-    MOZ_ASSERT(str->runtimeFromMainThread() == cx->runtime());
     MOZ_ASSERT(str->isAligned());
     MOZ_ASSERT(str->length() <= JSString::MAX_LENGTH);
 
     gc::AllocKind kind = str->getAllocKind();
     if (str->isFatInline())
         MOZ_ASSERT(kind == gc::AllocKind::FAT_INLINE_STRING);
     else if (str->isExternal())
         MOZ_ASSERT(kind == gc::AllocKind::EXTERNAL_STRING);
@@ -1217,22 +1216,22 @@ AssertValidStringPtr(JSContext* cx, JSSt
         MOZ_ASSERT(kind == gc::AllocKind::STRING);
 #endif
 }
 
 void
 AssertValidSymbolPtr(JSContext* cx, JS::Symbol* sym)
 {
     // We can't closely inspect symbols from another runtime.
-    if (sym->runtimeFromAnyThread() != cx->runtime())
+    if (sym->runtimeFromAnyThread() != cx->runtime()) {
+        MOZ_ASSERT(sym->isWellKnownSymbol());
         return;
+    }
 
-    MOZ_ASSERT(cx->runtime()->isAtomsZone(sym->zone()));
-
-    MOZ_ASSERT(sym->runtimeFromMainThread() == cx->runtime());
+    MOZ_ASSERT(sym->zone()->isAtomsZone());
     MOZ_ASSERT(sym->isAligned());
     if (JSString* desc = sym->description()) {
         MOZ_ASSERT(desc->isAtom());
         AssertValidStringPtr(cx, desc);
     }
 
     MOZ_ASSERT(sym->getAllocKind() == gc::AllocKind::SYMBOL);
 }
--- a/js/src/jscntxtinlines.h
+++ b/js/src/jscntxtinlines.h
@@ -424,17 +424,17 @@ js::ExclusiveContext::setCompartment(JSC
                   !comp->zone()->usedByExclusiveThread);
 
     // Only one thread can be in the atoms compartment at a time.
     MOZ_ASSERT_IF(runtime_->isAtomsCompartment(comp),
                   runtime_->currentThreadHasExclusiveAccess());
 
     // Make sure that the atoms compartment has its own zone.
     MOZ_ASSERT_IF(comp && !runtime_->isAtomsCompartment(comp),
-                  !runtime_->isAtomsZone(comp->zone()));
+                  !comp->zone()->isAtomsZone());
 
     // Both the current and the new compartment should be properly marked as
     // entered at this point.
     MOZ_ASSERT_IF(compartment_, compartment_->hasBeenEntered());
     MOZ_ASSERT_IF(comp, comp->hasBeenEntered());
 
     compartment_ = comp;
     zone_ = comp ? comp->zone() : nullptr;
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -335,18 +335,17 @@ JSCompartment::wrap(JSContext* cx, Mutab
 
     /* If the string is already in this compartment, we are done. */
     JSString* str = strp;
     if (str->zoneFromAnyThread() == zone())
         return true;
 
     /* If the string is an atom, we don't have to copy. */
     if (str->isAtom()) {
-        MOZ_ASSERT(str->isPermanentAtom() ||
-                   cx->runtime()->isAtomsZone(str->zone()));
+        MOZ_ASSERT(str->isPermanentAtom() || str->zone()->isAtomsZone());
         return true;
     }
 
     /* Check the cache. */
     RootedValue key(cx, StringValue(str));
     if (WrapperMap::Ptr p = crossCompartmentWrappers.lookup(CrossCompartmentKey(key))) {
         strp.set(p->value().get().toString());
         return true;
--- a/js/src/jscompartment.h
+++ b/js/src/jscompartment.h
@@ -599,17 +599,17 @@ struct JSCompartment
 
     void reportTelemetry();
 
   public:
     void addTelemetry(const char* filename, DeprecatedLanguageExtension e);
 };
 
 inline bool
-JSRuntime::isAtomsZone(JS::Zone* zone)
+JSRuntime::isAtomsZone(const JS::Zone* zone) const
 {
     return zone == atomsCompartment_->zone();
 }
 
 namespace js {
 
 // We only set the maybeAlive flag for objects and scripts. It's assumed that,
 // if a compartment is alive, then it will have at least some live object or
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -1867,19 +1867,19 @@ AutoDisableCompactingGC::AutoDisableComp
 }
 
 AutoDisableCompactingGC::~AutoDisableCompactingGC()
 {
     gc.enableCompactingGC();
 }
 
 static bool
-CanRelocateZone(JSRuntime* rt, Zone* zone)
-{
-    return !rt->isAtomsZone(zone) && !rt->isSelfHostingZone(zone);
+CanRelocateZone(Zone* zone)
+{
+    return !zone->isAtomsZone() && !zone->isSelfHostingZone();
 }
 
 static bool
 CanRelocateAllocKind(AllocKind kind)
 {
     return IsObjectAllocKind(kind);
 }
 
@@ -2177,17 +2177,17 @@ ArenaLists::relocateArenas(ArenaHeader*&
 }
 
 bool
 GCRuntime::relocateArenas(Zone* zone, JS::gcreason::Reason reason, SliceBudget& sliceBudget)
 {
     gcstats::AutoPhase ap(stats, gcstats::PHASE_COMPACT_MOVE);
 
     MOZ_ASSERT(!zone->isPreservingCode());
-    MOZ_ASSERT(CanRelocateZone(rt, zone));
+    MOZ_ASSERT(CanRelocateZone(zone));
 
     jit::StopAllOffThreadCompilations(zone);
 
     if (!zone->arenas.relocateArenas(relocatedArenasToRelease, reason, sliceBudget, stats))
         return false;
 
 #ifdef DEBUG
     // Check that we did as much compaction as we should have. There
@@ -3099,32 +3099,32 @@ GCRuntime::maybeAllocTriggerZoneGC(Zone*
     }
 }
 
 bool
 GCRuntime::triggerZoneGC(Zone* zone, JS::gcreason::Reason reason)
 {
     /* Zones in use by a thread with an exclusive context can't be collected. */
     if (!CurrentThreadCanAccessRuntime(rt)) {
-        MOZ_ASSERT(zone->usedByExclusiveThread || rt->isAtomsZone(zone));
+        MOZ_ASSERT(zone->usedByExclusiveThread || zone->isAtomsZone());
         return false;
     }
 
     /* GC is already running. */
     if (rt->isHeapCollecting())
         return false;
 
 #ifdef JS_GC_ZEAL
     if (zealMode == ZealAllocValue) {
         triggerGC(reason);
         return true;
     }
 #endif
 
-    if (rt->isAtomsZone(zone)) {
+    if (zone->isAtomsZone()) {
         /* We can't do a zone GC of the atoms compartment. */
         if (rt->keepAtoms()) {
             /* Skip GC and retrigger later, since atoms zone won't be collected
              * if keepAtoms is true. */
             fullGCForAtomsRequested_ = true;
             return false;
         }
         triggerGC(reason);
@@ -3605,17 +3605,17 @@ GCRuntime::sweepZones(FreeOp* fop, bool 
 
     JSZoneCallback callback = rt->destroyZoneCallback;
 
     /* Skip the atomsCompartment zone. */
     Zone** read = zones.begin() + 1;
     Zone** end = zones.end();
     Zone** write = read;
     MOZ_ASSERT(zones.length() >= 1);
-    MOZ_ASSERT(rt->isAtomsZone(zones[0]));
+    MOZ_ASSERT(zones[0]->isAtomsZone());
 
     while (read < end) {
         Zone* zone = *read++;
 
         if (zone->wasGCStarted()) {
             if ((!zone->isQueuedForBackgroundSweep() &&
                  zone->arenas.arenaListsAreEmpty() &&
                  !zone->hasMarkedCompartments()) || destroyingRuntime)
@@ -3742,22 +3742,20 @@ CompartmentOfCell(Cell* thing, JSGCTrace
 
 static void
 CheckCompartmentCallback(JS::CallbackTracer* trcArg, void** thingp, JSGCTraceKind kind)
 {
     CompartmentCheckTracer* trc = static_cast<CompartmentCheckTracer*>(trcArg);
     TenuredCell* thing = TenuredCell::fromPointer(*thingp);
 
     JSCompartment* comp = CompartmentOfCell(thing, kind);
-    if (comp && trc->compartment) {
+    if (comp && trc->compartment)
         CheckCompartment(trc, comp, thing, kind);
-    } else {
-        MOZ_ASSERT(thing->zone() == trc->zone ||
-                   trc->runtime()->isAtomsZone(thing->zone()));
-    }
+    else
+        MOZ_ASSERT(thing->zone() == trc->zone || thing->zone()->isAtomsZone());
 }
 
 void
 GCRuntime::checkForCompartmentMismatches()
 {
     if (disableStrictProxyCheckingCount)
         return;
 
@@ -3811,17 +3809,17 @@ GCRuntime::beginMarkPhase(JS::gcreason::
 #ifdef DEBUG
         for (auto i : AllAllocKinds()) { // Braces needed to appease MSVC 2013.
             MOZ_ASSERT(!zone->arenas.arenaListsToSweep[i]);
         }
 #endif
 
         /* Set up which zones will be collected. */
         if (zone->isGCScheduled()) {
-            if (!rt->isAtomsZone(zone)) {
+            if (!zone->isAtomsZone()) {
                 any = true;
                 zone->setGCState(Zone::Mark);
             }
         } else {
             isFull = false;
         }
 
         zone->setPreservingCode(false);
@@ -4888,17 +4886,17 @@ GCRuntime::beginSweepingZoneGroup()
     for (GCZoneGroupIter zone(rt); !zone.done(); zone.next()) {
         /* Set the GC state to sweeping. */
         MOZ_ASSERT(zone->isGCMarking());
         zone->setGCState(Zone::Sweep);
 
         /* Purge the ArenaLists before sweeping. */
         zone->arenas.purge();
 
-        if (rt->isAtomsZone(zone))
+        if (zone->isAtomsZone())
             sweepingAtoms = true;
 
         if (rt->sweepZoneCallback)
             rt->sweepZoneCallback(zone);
 
         zone->gcLastZoneGroupIndex = zoneGroupIndex;
     }
 
@@ -5426,17 +5424,17 @@ GCRuntime::beginCompactPhase()
         if (isBackgroundSweeping())
             return NotFinished;
     } else {
         waitBackgroundSweepEnd();
     }
 
     MOZ_ASSERT(zonesToMaybeCompact.isEmpty());
     for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
-        if (CanRelocateZone(rt, zone))
+        if (CanRelocateZone(zone))
             zonesToMaybeCompact.append(zone);
     }
 
     MOZ_ASSERT(!relocatedArenasToRelease);
     startedCompacting = true;
     return Finished;
 }
 
--- a/js/src/jsobj.cpp
+++ b/js/src/jsobj.cpp
@@ -2006,17 +2006,17 @@ js::CloneObjectLiteral(JSContext* cx, Ha
     RootedId id(cx);
     RootedValue value(cx);
     for (size_t i = 0; i < length; i++) {
         // The only markable values in copy on write arrays are atoms, which
         // can be freely copied between compartments.
         value = srcArray->getDenseElement(i);
         MOZ_ASSERT_IF(value.isMarkable(),
                       value.toGCThing()->isTenured() &&
-                      cx->runtime()->isAtomsZone(value.toGCThing()->asTenured().zoneFromAnyThread()));
+                      value.toGCThing()->asTenured().zoneFromAnyThread()->isAtomsZone());
 
         id = INT_TO_JSID(i);
         if (!DefineProperty(cx, res, id, value, nullptr, nullptr, JSPROP_ENUMERATE))
             return nullptr;
     }
 
     if (!ObjectElements::MakeElementsCopyOnWrite(cx, res))
         return nullptr;
--- a/js/src/vm/Runtime.h
+++ b/js/src/vm/Runtime.h
@@ -918,18 +918,18 @@ struct JSRuntime : public JS::shadow::Ru
     //-------------------------------------------------------------------------
 
     bool initSelfHosting(JSContext* cx);
     void finishSelfHosting();
     void markSelfHostingGlobal(JSTracer* trc);
     bool isSelfHostingGlobal(JSObject* global) {
         return global == selfHostingGlobal_;
     }
-    bool isSelfHostingCompartment(JSCompartment* comp);
-    bool isSelfHostingZone(JS::Zone* zone);
+    bool isSelfHostingCompartment(JSCompartment* comp) const;
+    bool isSelfHostingZone(const JS::Zone* zone) const;
     bool cloneSelfHostedFunctionScript(JSContext* cx, js::Handle<js::PropertyName*> name,
                                        js::Handle<JSFunction*> targetFun);
     bool cloneSelfHostedValue(JSContext* cx, js::Handle<js::PropertyName*> name,
                               js::MutableHandleValue vp);
 
     //-------------------------------------------------------------------------
     // Locale information
     //-------------------------------------------------------------------------
@@ -1264,17 +1264,17 @@ struct JSRuntime : public JS::shadow::Ru
         return atomsCompartment_;
     }
 
     bool isAtomsCompartment(JSCompartment* comp) {
         return comp == atomsCompartment_;
     }
 
     // The atoms compartment is the only one in its zone.
-    inline bool isAtomsZone(JS::Zone* zone);
+    inline bool isAtomsZone(const JS::Zone* zone) const;
 
     bool activeGCInAtomsZone();
 
     js::SymbolRegistry& symbolRegistry() {
         MOZ_ASSERT(currentThreadHasExclusiveAccess());
         return symbolRegistry_;
     }
 
--- a/js/src/vm/SelfHosting.cpp
+++ b/js/src/vm/SelfHosting.cpp
@@ -1629,23 +1629,23 @@ JSRuntime::finishSelfHosting()
 void
 JSRuntime::markSelfHostingGlobal(JSTracer* trc)
 {
     if (selfHostingGlobal_ && !parentRuntime)
         TraceRoot(trc, &selfHostingGlobal_, "self-hosting global");
 }
 
 bool
-JSRuntime::isSelfHostingCompartment(JSCompartment* comp)
+JSRuntime::isSelfHostingCompartment(JSCompartment* comp) const
 {
     return selfHostingGlobal_->compartment() == comp;
 }
 
 bool
-JSRuntime::isSelfHostingZone(JS::Zone* zone)
+JSRuntime::isSelfHostingZone(const JS::Zone* zone) const
 {
     return selfHostingGlobal_ && selfHostingGlobal_->zoneFromAnyThread() == zone;
 }
 
 static bool
 CloneValue(JSContext* cx, HandleValue selfHostedValue, MutableHandleValue vp);
 
 static bool