Bug 1132502 (part 1) - Abort if compartmentStats is null during memory reporting. r=jandem.
authorNicholas Nethercote <nnethercote@mozilla.com>
Fri, 15 Apr 2016 12:03:39 +1000
changeset 331375 223a5febb34f8dfdb326d15e4736c82d51a43811
parent 331374 d9e341cca8e53923f8f3a45845753f7d3fd0ef50
child 331376 b9d0760ce84c3683f192e5e8d9181576793f50c1
push id6048
push userkmoir@mozilla.com
push dateMon, 06 Jun 2016 19:02:08 +0000
treeherdermozilla-beta@46d72a56c57d [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjandem
bugs1132502
milestone48.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1132502 (part 1) - Abort if compartmentStats is null during memory reporting. r=jandem. We have inconclusive evidence that compartmentStats is sometimes nullptr during memory reporting, which would be bad. This patch makes us abort in that case. It also changes some pointers to references to make the expected non-nullness clearer.
js/public/MemoryMetrics.h
js/src/jscompartment.cpp
js/src/jscompartment.h
js/src/vm/MemoryMetrics.cpp
--- a/js/public/MemoryMetrics.h
+++ b/js/public/MemoryMetrics.h
@@ -732,16 +732,18 @@ struct CompartmentStats
         allClasses(other.allClasses),
         notableClasses(mozilla::Move(other.notableClasses)),
         isTotals(other.isTotals)
     {
         other.allClasses = nullptr;
         MOZ_ASSERT(!other.isTotals);
     }
 
+    CompartmentStats(const CompartmentStats&) = delete; // disallow copying
+
     ~CompartmentStats() {
         // |allClasses| is usually deleted and set to nullptr before this
         // destructor runs. But there are failure cases due to OOMs that may
         // prevent that, so it doesn't hurt to try again here.
         js_delete(allClasses);
     }
 
     bool initClasses(JSRuntime* rt);
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -75,17 +75,17 @@ JSCompartment::JSCompartment(Zone* zone,
     nonSyntacticLexicalScopes_(nullptr),
     gcIncomingGrayPointers(nullptr),
     debugModeBits(0),
     watchpointMap(nullptr),
     scriptCountsMap(nullptr),
     debugScriptMap(nullptr),
     debugScopes(nullptr),
     enumerators(nullptr),
-    compartmentStats(nullptr),
+    compartmentStats_(nullptr),
     scheduledForDestruction(false),
     maybeAlive(true),
     jitCompartment_(nullptr),
     mappedArgumentsTemplate_(nullptr),
     unmappedArgumentsTemplate_(nullptr),
     lcovOutput()
 {
     PodArrayZero(sawDeprecatedLanguageExtension);
--- a/js/src/jscompartment.h
+++ b/js/src/jscompartment.h
@@ -750,18 +750,38 @@ struct JSCompartment
     js::DebugScopes* debugScopes;
 
     /*
      * List of potentially active iterators that may need deleted property
      * suppression.
      */
     js::NativeIterator* enumerators;
 
+  private:
     /* Used by memory reporters and invalid otherwise. */
-    void*              compartmentStats;
+    JS::CompartmentStats* compartmentStats_;
+
+  public:
+    // This should only be called when it is non-null, i.e. during memory
+    // reporting.
+    JS::CompartmentStats& compartmentStats() {
+        // We use MOZ_RELEASE_ASSERT here because in bug 1132502 there was some
+        // (inconclusive) evidence that compartmentStats_ can be nullptr
+        // unexpectedly.
+        MOZ_RELEASE_ASSERT(compartmentStats_);
+        return *compartmentStats_;
+    }
+    void nullCompartmentStats() {
+        MOZ_ASSERT(compartmentStats_);
+        compartmentStats_ = nullptr;
+    }
+    void setCompartmentStats(JS::CompartmentStats* newStats) {
+        MOZ_ASSERT(!compartmentStats_ && newStats);
+        compartmentStats_ = newStats;
+    }
 
     // These flags help us to discover if a compartment that shouldn't be alive
     // manages to outlive a GC.
     bool scheduledForDestruction;
     bool maybeAlive;
 
   private:
     js::jit::JitCompartment* jitCompartment_;
--- a/js/src/vm/MemoryMetrics.cpp
+++ b/js/src/vm/MemoryMetrics.cpp
@@ -324,17 +324,17 @@ StatsCompartmentCallback(JSRuntime* rt, 
 
     // CollectRuntimeStats reserves enough space.
     MOZ_ALWAYS_TRUE(rtStats->compartmentStatsVector.growBy(1));
     CompartmentStats& cStats = rtStats->compartmentStatsVector.back();
     if (!cStats.initClasses(rt))
         MOZ_CRASH("oom");
     rtStats->initExtraCompartmentStats(compartment, &cStats);
 
-    compartment->compartmentStats = &cStats;
+    compartment->setCompartmentStats(&cStats);
 
     // Measure the compartment object itself, and things hanging off it.
     compartment->addSizeOfIncludingThis(rtStats->mallocSizeOf_,
                                         &cStats.typeInferenceAllocationSiteTables,
                                         &cStats.typeInferenceArrayTypeTables,
                                         &cStats.typeInferenceObjectTypeTables,
                                         &cStats.compartmentObject,
                                         &cStats.compartmentTables,
@@ -362,42 +362,35 @@ StatsArenaCallback(JSRuntime* rt, void* 
 
     // We don't call the callback on unused things.  So we compute the
     // unused space like this:  arenaUnused = maxArenaUnused - arenaUsed.
     // We do this by setting arenaUnused to maxArenaUnused here, and then
     // subtracting thingSize for every used cell, in StatsCellCallback().
     rtStats->currZoneStats->unusedGCThings.addToKind(traceKind, allocationSpace);
 }
 
-static CompartmentStats*
-GetCompartmentStats(JSCompartment* comp)
-{
-    return static_cast<CompartmentStats*>(comp->compartmentStats);
-}
-
 // FineGrained is used for normal memory reporting.  CoarseGrained is used by
 // AddSizeOfTab(), which aggregates all the measurements into a handful of
 // high-level numbers, which means that fine-grained reporting would be a waste
 // of effort.
 enum Granularity {
     FineGrained,
     CoarseGrained
 };
 
 static void
-AddClassInfo(Granularity granularity, CompartmentStats* cStats, const char* className,
+AddClassInfo(Granularity granularity, CompartmentStats& cStats, const char* className,
              JS::ClassInfo& info)
 {
     if (granularity == FineGrained) {
         if (!className)
             className = "<no class name>";
-        CompartmentStats::ClassesHashMap::AddPtr p =
-            cStats->allClasses->lookupForAdd(className);
+        CompartmentStats::ClassesHashMap::AddPtr p = cStats.allClasses->lookupForAdd(className);
         if (!p) {
-            bool ok = cStats->allClasses->add(p, className, info);
+            bool ok = cStats.allClasses->add(p, className, info);
             // Ignore failure -- we just won't record the
             // object/shape/base-shape as notable.
             (void)ok;
         } else {
             p->value().add(info);
         }
     }
 }
@@ -411,44 +404,44 @@ StatsCellCallback(JSRuntime* rt, void* d
                   size_t thingSize)
 {
     StatsClosure* closure = static_cast<StatsClosure*>(data);
     RuntimeStats* rtStats = closure->rtStats;
     ZoneStats* zStats = rtStats->currZoneStats;
     switch (traceKind) {
       case JS::TraceKind::Object: {
         JSObject* obj = static_cast<JSObject*>(thing);
-        CompartmentStats* cStats = GetCompartmentStats(obj->compartment());
+        CompartmentStats& cStats = obj->compartment()->compartmentStats();
         JS::ClassInfo info;        // This zeroes all the sizes.
         info.objectsGCHeap += thingSize;
         obj->addSizeOfExcludingThis(rtStats->mallocSizeOf_, &info);
 
-        cStats->classInfo.add(info);
+        cStats.classInfo.add(info);
 
         const Class* clasp = obj->getClass();
         const char* className = clasp->name;
         AddClassInfo(granularity, cStats, className, info);
 
         if (ObjectPrivateVisitor* opv = closure->opv) {
             nsISupports* iface;
             if (opv->getISupports_(obj, &iface) && iface)
-                cStats->objectsPrivate += opv->sizeOfIncludingThis(iface);
+                cStats.objectsPrivate += opv->sizeOfIncludingThis(iface);
         }
         break;
       }
 
       case JS::TraceKind::Script: {
         JSScript* script = static_cast<JSScript*>(thing);
-        CompartmentStats* cStats = GetCompartmentStats(script->compartment());
-        cStats->scriptsGCHeap += thingSize;
-        cStats->scriptsMallocHeapData += script->sizeOfData(rtStats->mallocSizeOf_);
-        cStats->typeInferenceTypeScripts += script->sizeOfTypeScript(rtStats->mallocSizeOf_);
-        jit::AddSizeOfBaselineData(script, rtStats->mallocSizeOf_, &cStats->baselineData,
-                                   &cStats->baselineStubsFallback);
-        cStats->ionData += jit::SizeOfIonData(script, rtStats->mallocSizeOf_);
+        CompartmentStats& cStats = script->compartment()->compartmentStats();
+        cStats.scriptsGCHeap += thingSize;
+        cStats.scriptsMallocHeapData += script->sizeOfData(rtStats->mallocSizeOf_);
+        cStats.typeInferenceTypeScripts += script->sizeOfTypeScript(rtStats->mallocSizeOf_);
+        jit::AddSizeOfBaselineData(script, rtStats->mallocSizeOf_, &cStats.baselineData,
+                                   &cStats.baselineStubsFallback);
+        cStats.ionData += jit::SizeOfIonData(script, rtStats->mallocSizeOf_);
 
         ScriptSource* ss = script->scriptSource();
         SourceSet::AddPtr entry = closure->seenSources.lookupForAdd(ss);
         if (!entry) {
             bool ok = closure->seenSources.add(entry, ss);
             (void)ok; // Not much to be done on failure.
 
             JS::ScriptSourceInfo info;  // This zeroes all the sizes.
@@ -509,23 +502,23 @@ StatsCellCallback(JSRuntime* rt, void* d
       }
 
       case JS::TraceKind::Symbol:
         zStats->symbolsGCHeap += thingSize;
         break;
 
       case JS::TraceKind::BaseShape: {
         BaseShape* base = static_cast<BaseShape*>(thing);
-        CompartmentStats* cStats = GetCompartmentStats(base->compartment());
+        CompartmentStats& cStats = base->compartment()->compartmentStats();
 
         JS::ClassInfo info;        // This zeroes all the sizes.
         info.shapesGCHeapBase += thingSize;
         // No malloc-heap measurements.
 
-        cStats->classInfo.add(info);
+        cStats.classInfo.add(info);
 
         const Class* clasp = base->clasp();
         const char* className = clasp->name;
         AddClassInfo(granularity, cStats, className, info);
         break;
       }
 
       case JS::TraceKind::JitCode: {
@@ -538,24 +531,24 @@ StatsCellCallback(JSRuntime* rt, void* d
         LazyScript* lazy = static_cast<LazyScript*>(thing);
         zStats->lazyScriptsGCHeap += thingSize;
         zStats->lazyScriptsMallocHeap += lazy->sizeOfExcludingThis(rtStats->mallocSizeOf_);
         break;
       }
 
       case JS::TraceKind::Shape: {
         Shape* shape = static_cast<Shape*>(thing);
-        CompartmentStats* cStats = GetCompartmentStats(shape->compartment());
+        CompartmentStats& cStats = shape->compartment()->compartmentStats();
         JS::ClassInfo info;        // This zeroes all the sizes.
         if (shape->inDictionary())
             info.shapesGCHeapDict += thingSize;
         else
             info.shapesGCHeapTree += thingSize;
         shape->addSizeOfExcludingThis(rtStats->mallocSizeOf_, &info);
-        cStats->classInfo.add(info);
+        cStats.classInfo.add(info);
 
         const BaseShape* base = shape->base();
         const Class* clasp = base->clasp();
         const char* className = clasp->name;
         AddClassInfo(granularity, cStats, className, info);
         break;
       }
 
@@ -774,17 +767,17 @@ CollectRuntimeStatsHelper(JSRuntime* rt,
     // Check that the in-arena measurements look ok.
     size_t totalArenaSize = rtStats->zTotals.gcHeapArenaAdmin +
                             rtStats->zTotals.unusedGCThings.totalSize() +
                             rtStats->gcHeapGCThings;
     MOZ_ASSERT(totalArenaSize % gc::ArenaSize == 0);
 #endif
 
     for (CompartmentsIter comp(rt, WithAtoms); !comp.done(); comp.next())
-        comp->compartmentStats = nullptr;
+        comp->nullCompartmentStats();
 
     size_t numDirtyChunks =
         (rtStats->gcHeapChunkTotal - rtStats->gcHeapUnusedChunks) / gc::ChunkSize;
     size_t perChunkAdmin =
         sizeof(gc::Chunk) - (sizeof(gc::Arena) * gc::ArenasPerChunk);
     rtStats->gcHeapChunkAdmin = numDirtyChunks * perChunkAdmin;
 
     // |gcHeapUnusedArenas| is the only thing left.  Compute it in terms of
@@ -879,17 +872,17 @@ AddSizeOfTab(JSRuntime* rt, HandleObject
 
     MOZ_ASSERT(rtStats.zoneStatsVector.length() == 1);
     rtStats.zTotals.addSizes(rtStats.zoneStatsVector[0]);
 
     for (size_t i = 0; i < rtStats.compartmentStatsVector.length(); i++)
         rtStats.cTotals.addSizes(rtStats.compartmentStatsVector[i]);
 
     for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next())
-        comp->compartmentStats = nullptr;
+        comp->nullCompartmentStats();
 
     rtStats.zTotals.addToTabSizes(sizes);
     rtStats.cTotals.addToTabSizes(sizes);
 
     return true;
 }
 
 JS_PUBLIC_API(bool)