Bug 1517409 - (part 2) Rename HeapUsage to HeapSize r=jonco
authorPaul Bone <pbone@mozilla.com>
Fri, 11 Jan 2019 16:16:14 +1100
changeset 514396 6c747052bceb38167dfc798a7d92fd6269fde95f
parent 514395 92b72891266c5ab96c3c62fb0792f82a8f57e859
child 514397 4b29deaac54a9d52b32d2ecdd000c4da06f6ac86
push id1953
push userffxbld-merge
push dateMon, 11 Mar 2019 12:10:20 +0000
treeherdermozilla-release@9c35dcbaa899 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjonco
bugs1517409
milestone66.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1517409 - (part 2) Rename HeapUsage to HeapSize r=jonco
js/src/builtin/TestingFunctions.cpp
js/src/gc/Allocator.cpp
js/src/gc/GC.cpp
js/src/gc/GCRuntime.h
js/src/gc/Heap.h
js/src/gc/Nursery.cpp
js/src/gc/Statistics.cpp
js/src/gc/Zone.cpp
js/src/gc/Zone.h
js/src/jsfriendapi.cpp
--- a/js/src/builtin/TestingFunctions.cpp
+++ b/js/src/builtin/TestingFunctions.cpp
@@ -413,32 +413,32 @@ static bool GC(JSContext* cx, unsigned a
     if (arg.isString()) {
       if (!JS_StringEqualsAscii(cx, arg.toString(), "shrinking", &shrinking)) {
         return false;
       }
     }
   }
 
 #ifndef JS_MORE_DETERMINISTIC
-  size_t preBytes = cx->runtime()->gc.usage.gcBytes();
+  size_t preBytes = cx->runtime()->gc.heapSize.gcBytes();
 #endif
 
   if (zone) {
     PrepareForDebugGC(cx->runtime());
   } else {
     JS::PrepareForFullGC(cx);
   }
 
   JSGCInvocationKind gckind = shrinking ? GC_SHRINK : GC_NORMAL;
   JS::NonIncrementalGC(cx, gckind, JS::gcreason::API);
 
   char buf[256] = {'\0'};
 #ifndef JS_MORE_DETERMINISTIC
   SprintfLiteral(buf, "before %zu, after %zu\n", preBytes,
-                 cx->runtime()->gc.usage.gcBytes());
+                 cx->runtime()->gc.heapSize.gcBytes());
 #endif
   return ReturnStringCopy(cx, args, buf);
 }
 
 static bool MinorGC(JSContext* cx, unsigned argc, Value* vp) {
   CallArgs args = CallArgsFromVp(argc, vp);
   if (args.get(0) == BooleanValue(true)) {
     cx->runtime()->gc.storeBuffer().setAboutToOverflow(
--- a/js/src/gc/Allocator.cpp
+++ b/js/src/gc/Allocator.cpp
@@ -344,17 +344,17 @@ bool GCRuntime::gcIfNeededAtAllocation(J
   if (cx->hasAnyPendingInterrupt()) {
     gcIfRequested();
   }
 
   // If we have grown past our GC heap threshold while in the middle of
   // an incremental GC, we're growing faster than we're GCing, so stop
   // the world and do a full, non-incremental GC right now, if possible.
   if (isIncrementalGCInProgress() &&
-      cx->zone()->usage.gcBytes() > cx->zone()->threshold.gcTriggerBytes()) {
+      cx->zone()->zoneSize.gcBytes() > cx->zone()->threshold.gcTriggerBytes()) {
     PrepareZoneForGC(cx->zone());
     gc(GC_NORMAL, JS::gcreason::INCREMENTAL_TOO_SLOW);
   }
 
   return true;
 }
 
 template <typename T>
@@ -562,21 +562,21 @@ bool GCRuntime::wantBackgroundAllocation
 
 Arena* GCRuntime::allocateArena(Chunk* chunk, Zone* zone, AllocKind thingKind,
                                 ShouldCheckThresholds checkThresholds,
                                 const AutoLockGC& lock) {
   MOZ_ASSERT(chunk->hasAvailableArenas());
 
   // Fail the allocation if we are over our heap size limits.
   if ((checkThresholds != ShouldCheckThresholds::DontCheckThresholds) &&
-      (usage.gcBytes() >= tunables.gcMaxBytes()))
+      (heapSize.gcBytes() >= tunables.gcMaxBytes()))
     return nullptr;
 
   Arena* arena = chunk->allocateArena(rt, zone, thingKind, lock);
-  zone->usage.addGCArena();
+  zone->zoneSize.addGCArena();
 
   // Trigger an incremental slice if needed.
   if (checkThresholds != ShouldCheckThresholds::DontCheckThresholds) {
     maybeAllocTriggerZoneGC(zone, lock);
   }
 
   return arena;
 }
--- a/js/src/gc/GC.cpp
+++ b/js/src/gc/GC.cpp
@@ -887,27 +887,27 @@ void Chunk::updateChunkListAfterFree(JSR
     rt->gc.availableChunks(lock).remove(this);
     decommitAllArenas();
     MOZ_ASSERT(info.numArenasFreeCommitted == 0);
     rt->gc.recycleChunk(this, lock);
   }
 }
 
 void GCRuntime::releaseArena(Arena* arena, const AutoLockGC& lock) {
-  arena->zone->usage.removeGCArena();
+  arena->zone->zoneSize.removeGCArena();
   arena->chunk()->releaseArena(rt, arena, lock);
 }
 
 GCRuntime::GCRuntime(JSRuntime* rt)
     : rt(rt),
       systemZone(nullptr),
       atomsZone(nullptr),
       stats_(rt),
       marker(rt),
-      usage(nullptr),
+      heapSize(nullptr),
       rootsHash(256),
       nextCellUniqueId_(LargestTaggedNullCellPointer +
                         1),  // Ensure disjoint from null tagged pointers.
       numArenasFreeCommitted(0),
       verifyPreData(nullptr),
       chunkAllocationSinceLastGC(false),
       lastGCTime(ReallyNow()),
       mode(TuningDefaults::Mode),
@@ -1374,17 +1374,17 @@ bool GCRuntime::setParameter(JSGCParamKe
     case JSGC_COMPACTING_ENABLED:
       compactingEnabled = value != 0;
       break;
     default:
       if (!tunables.setParameter(key, value, lock)) {
         return false;
       }
       for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
-        zone->threshold.updateAfterGC(zone->usage.gcBytes(), GC_NORMAL,
+        zone->threshold.updateAfterGC(zone->zoneSize.gcBytes(), GC_NORMAL,
                                       tunables, schedulingState, lock);
       }
   }
 
   return true;
 }
 
 bool GCSchedulingTunables::setParameter(JSGCParamKey key, uint32_t value,
@@ -1602,17 +1602,17 @@ void GCRuntime::resetParameter(JSGCParam
       mode = TuningDefaults::Mode;
       break;
     case JSGC_COMPACTING_ENABLED:
       compactingEnabled = TuningDefaults::CompactingEnabled;
       break;
     default:
       tunables.resetParameter(key, lock);
       for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
-        zone->threshold.updateAfterGC(zone->usage.gcBytes(), GC_NORMAL,
+        zone->threshold.updateAfterGC(zone->zoneSize.gcBytes(), GC_NORMAL,
                                       tunables, schedulingState, lock);
       }
   }
 }
 
 void GCSchedulingTunables::resetParameter(JSGCParamKey key,
                                           const AutoLockGC& lock) {
   switch (key) {
@@ -1680,17 +1680,17 @@ void GCSchedulingTunables::resetParamete
 
 uint32_t GCRuntime::getParameter(JSGCParamKey key, const AutoLockGC& lock) {
   switch (key) {
     case JSGC_MAX_BYTES:
       return uint32_t(tunables.gcMaxBytes());
     case JSGC_MAX_MALLOC_BYTES:
       return mallocCounter.maxBytes();
     case JSGC_BYTES:
-      return uint32_t(usage.gcBytes());
+      return uint32_t(heapSize.gcBytes());
     case JSGC_MODE:
       return uint32_t(mode);
     case JSGC_UNUSED_CHUNKS:
       return uint32_t(emptyChunks(lock).count());
     case JSGC_TOTAL_CHUNKS:
       return uint32_t(fullChunks(lock).count() + availableChunks(lock).count() +
                       emptyChunks(lock).count());
     case JSGC_SLICE_TIME_BUDGET:
@@ -3275,17 +3275,17 @@ void GCRuntime::maybeAllocTriggerZoneGC(
   if (!CurrentThreadCanAccessRuntime(rt)) {
     // Zones in use by a helper thread can't be collected.
     MOZ_ASSERT(zone->usedByHelperThread() || zone->isAtomsZone());
     return;
   }
 
   MOZ_ASSERT(!JS::RuntimeHeapIsCollecting());
 
-  size_t usedBytes = zone->usage.gcBytes();
+  size_t usedBytes = zone->zoneSize.gcBytes();
   size_t thresholdBytes = zone->threshold.gcTriggerBytes();
 
   if (usedBytes >= thresholdBytes) {
     // The threshold has been surpassed, immediately trigger a GC, which
     // will be done non-incrementally.
     triggerZoneGC(zone, JS::gcreason::ALLOC_TRIGGER, usedBytes, thresholdBytes);
     return;
   }
@@ -3373,17 +3373,17 @@ void GCRuntime::maybeGC(Zone* zone) {
 #endif
 
   if (gcIfRequested()) {
     return;
   }
 
   float threshold = zone->threshold.eagerAllocTrigger(
       schedulingState.inHighFrequencyGCMode());
-  float usedBytes = zone->usage.gcBytes();
+  float usedBytes = zone->zoneSize.gcBytes();
   if (usedBytes > 1024 * 1024 && usedBytes >= threshold &&
       !isIncrementalGCInProgress() && !isBackgroundSweeping()) {
     stats().recordTrigger(usedBytes, threshold);
     PrepareZoneForGC(zone);
     startGC(GC_NORMAL, JS::gcreason::EAGER_ALLOC_TRIGGER);
   }
 }
 
@@ -5777,17 +5777,17 @@ IncrementalProgress GCRuntime::endSweepi
 
   /* Free LIFO blocks on a background thread if possible. */
   startBackgroundFree();
 
   /* Update the GC state for zones we have swept. */
   for (SweepGroupZonesIter zone(rt); !zone.done(); zone.next()) {
     AutoLockGC lock(rt);
     zone->changeGCState(Zone::Sweep, Zone::Finished);
-    zone->threshold.updateAfterGC(zone->usage.gcBytes(), invocationKind,
+    zone->threshold.updateAfterGC(zone->zoneSize.gcBytes(), invocationKind,
                                   tunables, schedulingState, lock);
     zone->updateAllGCMallocCountersOnGCEnd(lock);
     zone->arenas.unmarkPreMarkedFreeCells();
   }
 
   /*
    * Start background thread to sweep zones if required, sweeping the atoms
    * zone last if present.
@@ -7253,17 +7253,17 @@ GCRuntime::IncrementalResult GCRuntime::
   }
 
   AbortReason resetReason = AbortReason::None;
   for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
     if (!zone->canCollect()) {
       continue;
     }
 
-    if (zone->usage.gcBytes() >= zone->threshold.gcTriggerBytes()) {
+    if (zone->zoneSize.gcBytes() >= zone->threshold.gcTriggerBytes()) {
       CheckZoneIsScheduled(zone, reason, "GC bytes");
       budget.makeUnlimited();
       stats().nonincremental(AbortReason::GCBytesTrigger);
       if (zone->wasGCStarted() && zone->gcState() > Zone::Sweep) {
         resetReason = AbortReason::GCBytesTrigger;
       }
     }
 
@@ -7309,17 +7309,17 @@ class AutoScheduleZonesForGC {
       // To avoid resets, continue to collect any zones that were being
       // collected in a previous slice.
       if (gc->isIncrementalGCInProgress() && zone->wasGCStarted()) {
         zone->scheduleGC();
       }
 
       // This is a heuristic to reduce the total number of collections.
       bool inHighFrequencyMode = gc->schedulingState.inHighFrequencyGCMode();
-      if (zone->usage.gcBytes() >=
+      if (zone->zoneSize.gcBytes() >=
           zone->threshold.eagerAllocTrigger(inHighFrequencyMode)) {
         zone->scheduleGC();
       }
 
       // This ensures we collect zones that have reached the malloc limit.
       if (zone->shouldTriggerGCForTooMuchMalloc()) {
         zone->scheduleGC();
       }
@@ -8131,17 +8131,17 @@ void GCRuntime::mergeRealms(Realm* sourc
     MOZ_ASSERT(r.get() == source);
   }
 
   // Merge the allocator, stats and UIDs in source's zone into target's zone.
   target->zone()->arenas.adoptArenas(&source->zone()->arenas,
                                      targetZoneIsCollecting);
   target->zone()->addTenuredAllocsSinceMinorGC(
       source->zone()->getAndResetTenuredAllocsSinceMinorGC());
-  target->zone()->usage.adopt(source->zone()->usage);
+  target->zone()->zoneSize.adopt(source->zone()->zoneSize);
   target->zone()->adoptUniqueIds(source->zone());
   target->zone()->adoptMallocBytes(source->zone());
 
   // Merge other info in source's zone into target's zone.
   target->zone()->types.typeLifoAlloc().transferFrom(
       &source->zone()->types.typeLifoAlloc());
   MOZ_RELEASE_ASSERT(source->zone()->types.sweepTypeLifoAlloc.ref().isEmpty());
 
@@ -8718,17 +8718,17 @@ uint64_t js::gc::NextCellUniqueId(JSRunt
 }
 
 namespace js {
 namespace gc {
 namespace MemInfo {
 
 static bool GCBytesGetter(JSContext* cx, unsigned argc, Value* vp) {
   CallArgs args = CallArgsFromVp(argc, vp);
-  args.rval().setNumber(double(cx->runtime()->gc.usage.gcBytes()));
+  args.rval().setNumber(double(cx->runtime()->gc.heapSize.gcBytes()));
   return true;
 }
 
 static bool GCMaxBytesGetter(JSContext* cx, unsigned argc, Value* vp) {
   CallArgs args = CallArgsFromVp(argc, vp);
   args.rval().setNumber(double(cx->runtime()->gc.tunables.gcMaxBytes()));
   return true;
 }
@@ -8767,17 +8767,17 @@ static bool MajorGCCountGetter(JSContext
 static bool MinorGCCountGetter(JSContext* cx, unsigned argc, Value* vp) {
   CallArgs args = CallArgsFromVp(argc, vp);
   args.rval().setNumber(double(cx->runtime()->gc.minorGCCount()));
   return true;
 }
 
 static bool ZoneGCBytesGetter(JSContext* cx, unsigned argc, Value* vp) {
   CallArgs args = CallArgsFromVp(argc, vp);
-  args.rval().setNumber(double(cx->zone()->usage.gcBytes()));
+  args.rval().setNumber(double(cx->zone()->zoneSize.gcBytes()));
   return true;
 }
 
 static bool ZoneGCTriggerBytesGetter(JSContext* cx, unsigned argc, Value* vp) {
   CallArgs args = CallArgsFromVp(argc, vp);
   args.rval().setNumber(double(cx->zone()->threshold.gcTriggerBytes()));
   return true;
 }
--- a/js/src/gc/GCRuntime.h
+++ b/js/src/gc/GCRuntime.h
@@ -709,18 +709,18 @@ class GCRuntime {
 
  public:
   gcstats::Statistics& stats() { return stats_.ref(); }
 
   GCMarker marker;
 
   Vector<JS::GCCellPtr, 0, SystemAllocPolicy> unmarkGrayStack;
 
-  /* Track heap usage for this runtime. */
-  HeapUsage usage;
+  /* Track heap size for this runtime. */
+  HeapSize heapSize;
 
   /* GC scheduling state and parameters. */
   GCSchedulingTunables tunables;
   GCSchedulingState schedulingState;
 
   // State used for managing atom mark bitmaps in each zone.
   AtomMarkingRuntime atomMarking;
 
--- a/js/src/gc/Heap.h
+++ b/js/src/gc/Heap.h
@@ -837,36 +837,36 @@ static_assert(
     js::gc::ChunkStoreBufferOffset ==
         offsetof(Chunk, trailer) + offsetof(ChunkTrailer, storeBuffer),
     "The hardcoded API storeBuffer offset must match the actual offset.");
 
 /*
  * Tracks the used sizes for owned heap data and automatically maintains the
  * memory usage relationship between GCRuntime and Zones.
  */
-class HeapUsage {
+class HeapSize {
   /*
    * A heap usage that contains our parent's heap usage, or null if this is
    * the top-level usage container.
    */
-  HeapUsage* const parent_;
+  HeapSize* const parent_;
 
   /*
    * The approximate number of bytes in use on the GC heap, to the nearest
    * ArenaSize. This does not include any malloc data. It also does not
    * include not-actively-used addresses that are still reserved at the OS
    * level for GC usage. It is atomic because it is updated by both the active
    * and GC helper threads.
    */
   mozilla::Atomic<size_t, mozilla::ReleaseAcquire,
                   mozilla::recordreplay::Behavior::DontPreserve>
       gcBytes_;
 
  public:
-  explicit HeapUsage(HeapUsage* parent) : parent_(parent), gcBytes_(0) {}
+  explicit HeapSize(HeapSize* parent) : parent_(parent), gcBytes_(0) {}
 
   size_t gcBytes() const { return gcBytes_; }
 
   void addGCArena() {
     gcBytes_ += ArenaSize;
     if (parent_) {
       parent_->addGCArena();
     }
@@ -875,17 +875,17 @@ class HeapUsage {
     MOZ_ASSERT(gcBytes_ >= ArenaSize);
     gcBytes_ -= ArenaSize;
     if (parent_) {
       parent_->removeGCArena();
     }
   }
 
   /* Pair to adoptArenas. Adopts the attendant usage statistics. */
-  void adopt(HeapUsage& other) {
+  void adopt(HeapSize& other) {
     gcBytes_ += other.gcBytes_;
     other.gcBytes_ = 0;
   }
 };
 
 inline void Arena::checkAddress() const {
   mozilla::DebugOnly<uintptr_t> addr = uintptr_t(this);
   MOZ_ASSERT(addr);
--- a/js/src/gc/Nursery.cpp
+++ b/js/src/gc/Nursery.cpp
@@ -823,17 +823,17 @@ void js::Nursery::collect(JS::gcreason::
   stats().setStat(gcstats::STAT_NURSERY_STRING_REALMS_DISABLED,
                   numNurseryStringRealmsDisabled);
   stats().setStat(gcstats::STAT_STRINGS_TENURED, numStringsTenured);
   endProfile(ProfileKey::Pretenure);
 
   // We ignore gcMaxBytes when allocating for minor collection. However, if we
   // overflowed, we disable the nursery. The next time we allocate, we'll fail
   // because gcBytes >= gcMaxBytes.
-  if (rt->gc.usage.gcBytes() >= tunables().gcMaxBytes()) {
+  if (rt->gc.heapSize.gcBytes() >= tunables().gcMaxBytes()) {
     disable();
   }
   // Disable the nursery if the user changed the configuration setting.  The
   // nursery can only be re-enabled by resetting the configurationa and
   // restarting firefox.
   if (chunkCountLimit_ == 0) {
     disable();
   }
--- a/js/src/gc/Statistics.cpp
+++ b/js/src/gc/Statistics.cpp
@@ -965,17 +965,17 @@ void Statistics::printStats() {
 }
 
 void Statistics::beginGC(JSGCInvocationKind kind) {
   slices_.clearAndFree();
   sccTimes.clearAndFree();
   gckind = kind;
   nonincrementalReason_ = gc::AbortReason::None;
 
-  preBytes = runtime->gc.usage.gcBytes();
+  preBytes = runtime->gc.heapSize.gcBytes();
   startingMajorGCNumber = runtime->gc.majorGCCount();
   startingSliceNumber = runtime->gc.gcNumber();
 }
 
 void Statistics::endGC() {
   TimeDuration sccTotal, sccLongest;
   sccDurations(&sccTotal, &sccLongest);
 
--- a/js/src/gc/Zone.cpp
+++ b/js/src/gc/Zone.cpp
@@ -47,17 +47,17 @@ JS::Zone::Zone(JSRuntime* rt)
       gcWeakKeys_(this, SystemAllocPolicy(), rt->randomHashCodeScrambler()),
       typeDescrObjects_(this, this),
       markedAtoms_(this),
       atomCache_(this),
       externalStringCache_(this),
       functionToStringCache_(this),
       keepAtomsCount(this, 0),
       purgeAtomsDeferred(this, 0),
-      usage(&rt->gc.usage),
+      zoneSize(&rt->gc.heapSize),
       threshold(),
       gcDelayBytes(0),
       tenuredStrings(this, 0),
       allocNurseryStrings(this, true),
       propertyTree_(this, this),
       baseShapes_(this, this),
       initialShapes_(this, this),
       nurseryShapes_(this),
--- a/js/src/gc/Zone.h
+++ b/js/src/gc/Zone.h
@@ -539,18 +539,18 @@ class Zone : public JS::shadow::Zone,
   js::ExternalStringCache& externalStringCache() {
     return externalStringCache_.ref();
   };
 
   js::FunctionToStringCache& functionToStringCache() {
     return functionToStringCache_.ref();
   }
 
-  // Track heap usage under this Zone.
-  js::gc::HeapUsage usage;
+  // Track heap size under this Zone.
+  js::gc::HeapSize zoneSize;
 
   // Thresholds used to trigger GC.
   js::gc::ZoneHeapThreshold threshold;
 
   // Amount of data to allocate before triggering a new incremental slice for
   // the current GC.
   js::UnprotectedData<size_t> gcDelayBytes;
 
--- a/js/src/jsfriendapi.cpp
+++ b/js/src/jsfriendapi.cpp
@@ -1410,10 +1410,10 @@ JS_FRIEND_API JS::Value js::MaybeGetScri
   if (!object->is<ScriptSourceObject>()) {
     return UndefinedValue();
   }
 
   return object->as<ScriptSourceObject>().canonicalPrivate();
 }
 
 JS_FRIEND_API uint64_t js::GetGCHeapUsageForObjectZone(JSObject* obj) {
-  return obj->zone()->usage.gcBytes();
+  return obj->zone()->zoneSize.gcBytes();
 }