Bug 1479954 - Rename Hash{Set,Map}::sizeOf{In,Ex}cludingThis(). r=luke
authorNicholas Nethercote <nnethercote@mozilla.com>
Wed, 01 Aug 2018 09:57:52 +1000
changeset 429715 b50724cff788dfdea5f595c7ddc213d4491555c0
parent 429714 6f0153c4213030704e6d8e5433a0e1df0462b8dc
child 429716 591d08c5c22b1286579a216f0dc2c462cb5679fc
push id105976
push usernnethercote@mozilla.com
push dateThu, 02 Aug 2018 01:55:00 +0000
treeherdermozilla-inbound@3c7c754ce8fd [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersluke
bugs1479954
milestone63.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1479954 - Rename Hash{Set,Map}::sizeOf{In,Ex}cludingThis(). r=luke In PLDHashTable the equivalent functions have a "Shallow" prefix, which makes it clear that they don't measure things hanging off the table. This patch makes mozilla::Hash{Set,Map} do likewise. MozReview-Commit-ID: 3kwCJynhW7d
js/public/GCHashTable.h
js/src/builtin/intl/SharedIntlData.cpp
js/src/ds/Bitmap.cpp
js/src/gc/Nursery.h
js/src/gc/NurseryAwareHashMap.h
js/src/gc/StoreBuffer.h
js/src/gc/WeakMap.cpp
js/src/jit/Ion.cpp
js/src/vm/ArrayBufferObject.cpp
js/src/vm/AtomsTable.h
js/src/vm/Compartment.h
js/src/vm/JSAtom.cpp
js/src/vm/JSScript.cpp
js/src/vm/ObjectGroup.cpp
js/src/vm/Realm.cpp
js/src/vm/Runtime.cpp
js/src/vm/SavedStacks.cpp
js/src/vm/Shape.cpp
js/src/vm/Shape.h
js/src/vm/SharedImmutableStringsCache.h
js/src/vm/TraceLogging.cpp
js/src/vm/TypeInference.cpp
js/xpconnect/src/XPCMaps.cpp
memory/replace/dmd/DMD.cpp
mfbt/HashTable.h
--- a/js/public/GCHashTable.h
+++ b/js/public/GCHashTable.h
@@ -516,17 +516,17 @@ class WeakCache<GCHashMap<Key, Value, Ha
     bool has(const Lookup& l) const {
         return lookup(l).found();
     }
 
     size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
         return map.sizeOfExcludingThis(mallocSizeOf);
     }
     size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
-        return mallocSizeOf(this) + map.sizeOfExcludingThis(mallocSizeOf);
+        return mallocSizeOf(this) + map.shallowSizeOfExcludingThis(mallocSizeOf);
     }
 
     bool init(uint32_t len = 16) {
         MOZ_ASSERT(!needsBarrier);
         return map.init(len);
     }
 
     void clear() {
@@ -725,20 +725,20 @@ class WeakCache<GCHashSet<T, HashPolicy,
         return set.capacity();
     }
 
     bool has(const Lookup& l) const {
         return lookup(l).found();
     }
 
     size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
-        return set.sizeOfExcludingThis(mallocSizeOf);
+        return set.shallowSizeOfExcludingThis(mallocSizeOf);
     }
     size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
-        return mallocSizeOf(this) + set.sizeOfExcludingThis(mallocSizeOf);
+        return mallocSizeOf(this) + set.shallowSizeOfExcludingThis(mallocSizeOf);
     }
 
     bool init(uint32_t len = 16) {
         MOZ_ASSERT(!needsBarrier);
         return set.init(len);
     }
 
     void clear() {
--- a/js/src/builtin/intl/SharedIntlData.cpp
+++ b/js/src/builtin/intl/SharedIntlData.cpp
@@ -409,13 +409,13 @@ js::intl::SharedIntlData::trace(JSTracer
         ianaLinksCanonicalizedDifferentlyByICU.trace(trc);
         upperCaseFirstLocales.trace(trc);
     }
 }
 
 size_t
 js::intl::SharedIntlData::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const
 {
-    return availableTimeZones.sizeOfExcludingThis(mallocSizeOf) +
-           ianaZonesTreatedAsLinksByICU.sizeOfExcludingThis(mallocSizeOf) +
-           ianaLinksCanonicalizedDifferentlyByICU.sizeOfExcludingThis(mallocSizeOf) +
-           upperCaseFirstLocales.sizeOfExcludingThis(mallocSizeOf);
+    return availableTimeZones.shallowSizeOfExcludingThis(mallocSizeOf) +
+           ianaZonesTreatedAsLinksByICU.shallowSizeOfExcludingThis(mallocSizeOf) +
+           ianaLinksCanonicalizedDifferentlyByICU.shallowSizeOfExcludingThis(mallocSizeOf) +
+           upperCaseFirstLocales.shallowSizeOfExcludingThis(mallocSizeOf);
 }
--- a/js/src/ds/Bitmap.cpp
+++ b/js/src/ds/Bitmap.cpp
@@ -16,17 +16,17 @@ SparseBitmap::~SparseBitmap()
         for (Data::Range r(data.all()); !r.empty(); r.popFront())
             js_delete(r.front().value());
     }
 }
 
 size_t
 SparseBitmap::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf)
 {
-    size_t size = data.sizeOfExcludingThis(mallocSizeOf);
+    size_t size = data.shallowSizeOfExcludingThis(mallocSizeOf);
     for (Data::Range r(data.all()); !r.empty(); r.popFront())
         size += mallocSizeOf(r.front().value());
     return size;
 }
 
 SparseBitmap::BitBlock&
 SparseBitmap::createBlock(Data::AddPtr p, size_t blockId)
 {
--- a/js/src/gc/Nursery.h
+++ b/js/src/gc/Nursery.h
@@ -288,17 +288,17 @@ class Nursery
         return allocatedChunkCount() * gc::ChunkSize;
     }
     size_t sizeOfMallocedBuffers(mozilla::MallocSizeOf mallocSizeOf) const {
         if (!mallocedBuffers.initialized())
             return 0;
         size_t total = 0;
         for (MallocedBuffersSet::Range r = mallocedBuffers.all(); !r.empty(); r.popFront())
             total += mallocSizeOf(r.front());
-        total += mallocedBuffers.sizeOfExcludingThis(mallocSizeOf);
+        total += mallocedBuffers.shallowSizeOfExcludingThis(mallocSizeOf);
         return total;
     }
 
     // The number of bytes from the start position to the end of the nursery.
     // pass maxChunkCount(), allocatedChunkCount() or chunkCountLimit()
     // to calculate the nursery size, current lazy-allocated size or nursery
     // limit respectively.
     size_t spaceToEnd(unsigned chunkCount) const;
--- a/js/src/gc/NurseryAwareHashMap.h
+++ b/js/src/gc/NurseryAwareHashMap.h
@@ -96,21 +96,21 @@ class NurseryAwareHashMap
     bool empty() const { return map.empty(); }
     Ptr lookup(const Lookup& l) const { return map.lookup(l); }
     void remove(Ptr p) { map.remove(p); }
     Range all() const { return map.all(); }
     struct Enum : public MapType::Enum {
         explicit Enum(NurseryAwareHashMap& namap) : MapType::Enum(namap.map) {}
     };
     size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
-        return map.sizeOfExcludingThis(mallocSizeOf) +
+        return map.shallowSizeOfExcludingThis(mallocSizeOf) +
                nurseryEntries.sizeOfExcludingThis(mallocSizeOf);
     }
     size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
-        return map.sizeOfIncludingThis(mallocSizeOf) +
+        return map.shallowSizeOfIncludingThis(mallocSizeOf) +
                nurseryEntries.sizeOfIncludingThis(mallocSizeOf);
     }
 
     MOZ_MUST_USE bool put(const Key& k, const Value& v) {
         auto p = map.lookupForAdd(k);
         if (p) {
             if (!JS::GCPolicy<Key>::isTenured(k) || !JS::GCPolicy<Value>::isTenured(v)) {
                 if (!nurseryEntries.append(k))
--- a/js/src/gc/StoreBuffer.h
+++ b/js/src/gc/StoreBuffer.h
@@ -133,17 +133,17 @@ class StoreBuffer
             sinkStore(owner);
             return stores_.has(v);
         }
 
         /* Trace the source of all edges in the store buffer. */
         void trace(StoreBuffer* owner, TenuringTracer& mover);
 
         size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) {
-            return stores_.sizeOfExcludingThis(mallocSizeOf);
+            return stores_.shallowSizeOfExcludingThis(mallocSizeOf);
         }
 
         bool isEmpty() const {
             return last_ == T() && (!stores_.initialized() || stores_.empty());
         }
 
       private:
         MonoTypeBuffer(const MonoTypeBuffer& other) = delete;
--- a/js/src/gc/WeakMap.cpp
+++ b/js/src/gc/WeakMap.cpp
@@ -199,17 +199,17 @@ ObjectWeakMap::trace(JSTracer* trc)
 {
     map.trace(trc);
 }
 
 size_t
 ObjectWeakMap::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf)
 {
     MOZ_ASSERT(map.initialized());
-    return map.sizeOfExcludingThis(mallocSizeOf);
+    return map.shallowSizeOfExcludingThis(mallocSizeOf);
 }
 
 #ifdef JSGC_HASH_TABLE_CHECKS
 void
 ObjectWeakMap::checkAfterMovingGC()
 {
     MOZ_ASSERT(map.initialized());
     for (ObjectValueMap::Range r = map.all(); !r.empty(); r.popFront()) {
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -645,29 +645,29 @@ JitZone::sweep()
     baselineCacheIRStubCodes_.sweep();
 }
 
 size_t
 JitRealm::sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const
 {
     size_t n = mallocSizeOf(this);
     if (stubCodes_)
-        n += stubCodes_->sizeOfIncludingThis(mallocSizeOf);
+        n += stubCodes_->shallowSizeOfIncludingThis(mallocSizeOf);
     return n;
 }
 
 void
 JitZone::addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
                                 size_t* jitZone,
                                 size_t* baselineStubsOptimized,
                                 size_t* cachedCFG) const
 {
     *jitZone += mallocSizeOf(this);
-    *jitZone += baselineCacheIRStubCodes_.sizeOfExcludingThis(mallocSizeOf);
-    *jitZone += ionCacheIRStubInfoSet_.sizeOfExcludingThis(mallocSizeOf);
+    *jitZone += baselineCacheIRStubCodes_.shallowSizeOfExcludingThis(mallocSizeOf);
+    *jitZone += ionCacheIRStubInfoSet_.shallowSizeOfExcludingThis(mallocSizeOf);
 
     *baselineStubsOptimized += optimizedStubSpace_.sizeOfExcludingThis(mallocSizeOf);
     *cachedCFG += cfgSpace_.sizeOfExcludingThis(mallocSizeOf);
 }
 
 TrampolinePtr
 JitRuntime::getBailoutTable(const FrameSizeClass& frameClass) const
 {
--- a/js/src/vm/ArrayBufferObject.cpp
+++ b/js/src/vm/ArrayBufferObject.cpp
@@ -1631,17 +1631,17 @@ InnerViewTable::sizeOfExcludingThis(mozi
     if (!map.initialized())
         return 0;
 
     size_t vectorSize = 0;
     for (Map::Enum e(map); !e.empty(); e.popFront())
         vectorSize += e.front().value().sizeOfExcludingThis(mallocSizeOf);
 
     return vectorSize
-         + map.sizeOfExcludingThis(mallocSizeOf)
+         + map.shallowSizeOfExcludingThis(mallocSizeOf)
          + nurseryKeys.sizeOfExcludingThis(mallocSizeOf);
 }
 
 /*
  * ArrayBufferViewObject
  */
 
 /*
--- a/js/src/vm/AtomsTable.h
+++ b/js/src/vm/AtomsTable.h
@@ -99,17 +99,17 @@ class FrozenAtomSet
     // This constructor takes ownership of the passed-in AtomSet.
     explicit FrozenAtomSet(AtomSet* set) { mSet = set; }
 
     ~FrozenAtomSet() { js_delete(mSet); }
 
     MOZ_ALWAYS_INLINE AtomSet::Ptr readonlyThreadsafeLookup(const AtomSet::Lookup& l) const;
 
     size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
-        return mSet->sizeOfIncludingThis(mallocSizeOf);
+        return mSet->shallowSizeOfIncludingThis(mallocSizeOf);
     }
 
     typedef AtomSet::Range Range;
 
     AtomSet::Range all() const { return mSet->all(); }
 };
 
 class AtomsTable
--- a/js/src/vm/Compartment.h
+++ b/js/src/vm/Compartment.h
@@ -308,23 +308,23 @@ class WrapperMap
             InnerMap m;
             if (!m.init(InitialInnerMapSize) || !map.add(p, c, std::move(m)))
                 return false;
         }
         return p->value().put(k, v);
     }
 
     size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) {
-        size_t size = map.sizeOfExcludingThis(mallocSizeOf);
+        size_t size = map.shallowSizeOfExcludingThis(mallocSizeOf);
         for (OuterMap::Enum e(map); !e.empty(); e.popFront())
             size += e.front().value().sizeOfExcludingThis(mallocSizeOf);
         return size;
     }
     size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) {
-        size_t size = map.sizeOfIncludingThis(mallocSizeOf);
+        size_t size = map.shallowSizeOfIncludingThis(mallocSizeOf);
         for (OuterMap::Enum e(map); !e.empty(); e.popFront())
             size += e.front().value().sizeOfIncludingThis(mallocSizeOf);
         return size;
     }
 
     bool hasNurseryAllocatedWrapperEntries(const CompartmentFilter& f) {
         for (OuterMap::Enum e(map); !e.empty(); e.popFront()) {
             JS::Compartment* c = e.front().key();
--- a/js/src/vm/JSAtom.cpp
+++ b/js/src/vm/JSAtom.cpp
@@ -556,17 +556,17 @@ AtomsTable::sweepIncrementally(SweepIter
 }
 
 size_t
 AtomsTable::sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const
 {
     size_t size = sizeof(AtomsTable);
     for (size_t i = 0; i < PartitionCount; i++) {
         size += sizeof(Partition);
-        size += partitions[i]->atoms.sizeOfExcludingThis(mallocSizeOf);
+        size += partitions[i]->atoms.shallowSizeOfExcludingThis(mallocSizeOf);
     }
     return size;
 }
 
 bool
 JSRuntime::initMainAtomsTables(JSContext* cx)
 {
     MOZ_ASSERT(!parentRuntime);
--- a/js/src/vm/JSScript.cpp
+++ b/js/src/vm/JSScript.cpp
@@ -1570,17 +1570,17 @@ UncompressedSourceCache::purge()
     map_.reset();
 }
 
 size_t
 UncompressedSourceCache::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf)
 {
     size_t n = 0;
     if (map_ && !map_->empty()) {
-        n += map_->sizeOfIncludingThis(mallocSizeOf);
+        n += map_->shallowSizeOfIncludingThis(mallocSizeOf);
         for (Map::Range r = map_->all(); !r.empty(); r.popFront())
             n += mallocSizeOf(r.front().value().get());
     }
     return n;
 }
 
 const char16_t*
 ScriptSource::chunkChars(JSContext* cx, UncompressedSourceCache::AutoHoldEntry& holder,
--- a/js/src/vm/ObjectGroup.cpp
+++ b/js/src/vm/ObjectGroup.cpp
@@ -1742,20 +1742,20 @@ ObjectGroupRealm::addSizeOfExcludingThis
                                                size_t* arrayObjectGroupTables,
                                                size_t* plainObjectGroupTables,
                                                size_t* realmTables)
 {
     if (allocationSiteTable)
         *allocationSiteTables += allocationSiteTable->sizeOfIncludingThis(mallocSizeOf);
 
     if (arrayObjectTable)
-        *arrayObjectGroupTables += arrayObjectTable->sizeOfIncludingThis(mallocSizeOf);
+        *arrayObjectGroupTables += arrayObjectTable->shallowSizeOfIncludingThis(mallocSizeOf);
 
     if (plainObjectTable) {
-        *plainObjectGroupTables += plainObjectTable->sizeOfIncludingThis(mallocSizeOf);
+        *plainObjectGroupTables += plainObjectTable->shallowSizeOfIncludingThis(mallocSizeOf);
 
         for (PlainObjectTable::Enum e(*plainObjectTable);
              !e.empty();
              e.popFront())
         {
             const PlainObjectKey& key = e.front().key();
             const PlainObjectEntry& value = e.front().value();
 
--- a/js/src/vm/Realm.cpp
+++ b/js/src/vm/Realm.cpp
@@ -922,23 +922,23 @@ Realm::addSizeOfIncludingThis(mozilla::M
 
     objects_.addSizeOfExcludingThis(mallocSizeOf,
                                     innerViewsArg,
                                     lazyArrayBuffersArg,
                                     objectMetadataTablesArg,
                                     nonSyntacticLexicalEnvironmentsArg);
 
     *savedStacksSet += savedStacks_.sizeOfExcludingThis(mallocSizeOf);
-    *varNamesSet += varNames_.sizeOfExcludingThis(mallocSizeOf);
+    *varNamesSet += varNames_.shallowSizeOfExcludingThis(mallocSizeOf);
 
     if (jitRealm_)
         *jitRealm += jitRealm_->sizeOfIncludingThis(mallocSizeOf);
 
     if (scriptCountsMap) {
-        *scriptCountsMapArg += scriptCountsMap->sizeOfIncludingThis(mallocSizeOf);
+        *scriptCountsMapArg += scriptCountsMap->shallowSizeOfIncludingThis(mallocSizeOf);
         for (auto r = scriptCountsMap->all(); !r.empty(); r.popFront())
             *scriptCountsMapArg += r.front().value()->sizeOfIncludingThis(mallocSizeOf);
     }
 }
 
 mozilla::HashCodeScrambler
 Realm::randomHashCodeScrambler()
 {
--- a/js/src/vm/Runtime.cpp
+++ b/js/src/vm/Runtime.cpp
@@ -389,17 +389,17 @@ JSRuntime::addSizeOfIncludingThis(mozill
         rtSizes->sharedImmutableStringsCache +=
             sharedImmutableStrings_->sizeOfExcludingThis(mallocSizeOf);
     }
 
     rtSizes->sharedIntlData += sharedIntlData.ref().sizeOfExcludingThis(mallocSizeOf);
 
     {
         AutoLockScriptData lock(this);
-        rtSizes->scriptData += scriptDataTable(lock).sizeOfExcludingThis(mallocSizeOf);
+        rtSizes->scriptData += scriptDataTable(lock).shallowSizeOfExcludingThis(mallocSizeOf);
         for (ScriptDataTable::Range r = scriptDataTable(lock).all(); !r.empty(); r.popFront())
             rtSizes->scriptData += mallocSizeOf(r.front());
     }
 
     if (jitRuntime_) {
         jitRuntime_->execAlloc().addSizeOfCode(&rtSizes->code);
 
         // Sizes of the IonBuilders we are holding for lazy linking
--- a/js/src/vm/SavedStacks.cpp
+++ b/js/src/vm/SavedStacks.cpp
@@ -1302,18 +1302,18 @@ void
 SavedStacks::clear()
 {
     frames.clear();
 }
 
 size_t
 SavedStacks::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf)
 {
-    return frames.sizeOfExcludingThis(mallocSizeOf) +
-           pcLocationMap.sizeOfExcludingThis(mallocSizeOf);
+    return frames.shallowSizeOfExcludingThis(mallocSizeOf) +
+           pcLocationMap.shallowSizeOfExcludingThis(mallocSizeOf);
 }
 
 // Given that we have captured a stack frame with the given principals and
 // source, return true if the requested `StackCapture` has been satisfied and
 // stack walking can halt. Return false otherwise (and stack walking and frame
 // capturing should continue).
 static inline bool
 captureIsSatisfied(JSContext* cx, JSPrincipals* principals, const JSAtom* source,
--- a/js/src/vm/Shape.cpp
+++ b/js/src/vm/Shape.cpp
@@ -2288,17 +2288,17 @@ JS::ubi::Concrete<js::Shape>::size(mozil
 {
     Size size = js::gc::Arena::thingSize(get().asTenured().getAllocKind());
 
     AutoCheckCannotGC nogc;
     if (ShapeTable* table = get().maybeTable(nogc))
         size += table->sizeOfIncludingThis(mallocSizeOf);
 
     if (!get().inDictionary() && get().kids.isHash())
-        size += get().kids.toHash()->sizeOfIncludingThis(mallocSizeOf);
+        size += get().kids.toHash()->shallowSizeOfIncludingThis(mallocSizeOf);
 
     return size;
 }
 
 JS::ubi::Node::Size
 JS::ubi::Concrete<js::BaseShape>::size(mozilla::MallocSizeOf mallocSizeOf) const
 {
     return js::gc::Arena::thingSize(get().asTenured().getAllocKind());
--- a/js/src/vm/Shape.h
+++ b/js/src/vm/Shape.h
@@ -845,17 +845,18 @@ class Shape : public gc::TenuredCell
         if (ShapeTable* table = maybeTable(nogc)) {
             if (inDictionary())
                 info->shapesMallocHeapDictTables += table->sizeOfIncludingThis(mallocSizeOf);
             else
                 info->shapesMallocHeapTreeTables += table->sizeOfIncludingThis(mallocSizeOf);
         }
 
         if (!inDictionary() && kids.isHash())
-            info->shapesMallocHeapTreeKids += kids.toHash()->sizeOfIncludingThis(mallocSizeOf);
+            info->shapesMallocHeapTreeKids +=
+                kids.toHash()->shallowSizeOfIncludingThis(mallocSizeOf);
     }
 
     bool isAccessorShape() const {
         MOZ_ASSERT_IF(immutableFlags & ACCESSOR_SHAPE,
                       getAllocKind() == gc::AllocKind::ACCESSOR_SHAPE);
         return immutableFlags & ACCESSOR_SHAPE;
     }
     AccessorShape& asAccessorShape() const {
--- a/js/src/vm/SharedImmutableStringsCache.h
+++ b/js/src/vm/SharedImmutableStringsCache.h
@@ -138,17 +138,17 @@ class SharedImmutableStringsCache
         MOZ_ASSERT(inner_);
         size_t n = mallocSizeOf(inner_);
 
         auto locked = inner_->lock();
         if (!locked->set.initialized())
             return n;
 
         // Size of the table.
-        n += locked->set.sizeOfExcludingThis(mallocSizeOf);
+        n += locked->set.shallowSizeOfExcludingThis(mallocSizeOf);
 
         // Sizes of the strings and their boxes.
         for (auto r = locked->set.all(); !r.empty(); r.popFront()) {
             n += mallocSizeOf(r.front().get());
             if (const char* chars = r.front()->chars())
                 n += mallocSizeOf(chars);
         }
 
--- a/js/src/vm/TraceLogging.cpp
+++ b/js/src/vm/TraceLogging.cpp
@@ -350,19 +350,19 @@ TraceLoggerThreadState::maybeEventText(u
 size_t
 TraceLoggerThreadState::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf)
 {
     LockGuard<Mutex> guard(lock);
 
     // Do not count threadLoggers since they are counted by JSContext::traceLogger.
 
     size_t size = 0;
-    size += pointerMap.sizeOfExcludingThis(mallocSizeOf);
+    size += pointerMap.shallowSizeOfExcludingThis(mallocSizeOf);
     if (textIdPayloads.initialized()) {
-        size += textIdPayloads.sizeOfExcludingThis(mallocSizeOf);
+        size += textIdPayloads.shallowSizeOfExcludingThis(mallocSizeOf);
         for (TextIdHashMap::Range r = textIdPayloads.all(); !r.empty(); r.popFront())
             r.front().value()->sizeOfIncludingThis(mallocSizeOf);
     }
     return size;
 }
 
 bool
 TraceLoggerThread::textIdIsScriptEvent(uint32_t id)
--- a/js/src/vm/TypeInference.cpp
+++ b/js/src/vm/TypeInference.cpp
@@ -4645,17 +4645,17 @@ Zone::addSizeOfIncludingThis(mozilla::Ma
                              size_t* compartmentObjects,
                              size_t* crossCompartmentWrappersTables,
                              size_t* compartmentsPrivateData)
 {
     *typePool += types.typeLifoAlloc().sizeOfExcludingThis(mallocSizeOf);
     *regexpZone += regExps.sizeOfExcludingThis(mallocSizeOf);
     if (jitZone_)
         jitZone_->addSizeOfIncludingThis(mallocSizeOf, jitZone, baselineStubsOptimized, cachedCFG);
-    *uniqueIdMap += uniqueIds().sizeOfExcludingThis(mallocSizeOf);
+    *uniqueIdMap += uniqueIds().shallowSizeOfExcludingThis(mallocSizeOf);
     *shapeTables += baseShapes().sizeOfExcludingThis(mallocSizeOf)
                   + initialShapes().sizeOfExcludingThis(mallocSizeOf);
     *atomsMarkBitmaps += markedAtoms().sizeOfExcludingThis(mallocSizeOf);
 
     for (CompartmentsInZoneIter comp(this); !comp.done(); comp.next()) {
         comp->addSizeOfIncludingThis(mallocSizeOf,
                                      compartmentObjects,
                                      crossCompartmentWrappersTables,
--- a/js/xpconnect/src/XPCMaps.cpp
+++ b/js/xpconnect/src/XPCMaps.cpp
@@ -83,17 +83,17 @@ JSObject2WrappedJSMap::ShutdownMarker()
         wrapper->SystemIsBeingShutDown();
     }
 }
 
 size_t
 JSObject2WrappedJSMap::SizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const
 {
     size_t n = mallocSizeOf(this);
-    n += mTable.sizeOfExcludingThis(mallocSizeOf);
+    n += mTable.shallowSizeOfExcludingThis(mallocSizeOf);
     return n;
 }
 
 size_t
 JSObject2WrappedJSMap::SizeOfWrappedJS(mozilla::MallocSizeOf mallocSizeOf) const
 {
     size_t n = 0;
     for (Map::Range r = mTable.all(); !r.empty(); r.popFront())
--- a/memory/replace/dmd/DMD.cpp
+++ b/memory/replace/dmd/DMD.cpp
@@ -601,17 +601,17 @@ public:
     MOZ_ALWAYS_TRUE(mSet.add(p, newString));
     return newString;
   }
 
   size_t
   SizeOfExcludingThis(mozilla::MallocSizeOf aMallocSizeOf) const
   {
     size_t n = 0;
-    n += mSet.sizeOfExcludingThis(aMallocSizeOf);
+    n += mSet.shallowSizeOfExcludingThis(aMallocSizeOf);
     for (auto r = mSet.all(); !r.empty(); r.popFront()) {
       n += aMallocSizeOf(r.front());
     }
     return n;
   }
 
 private:
   struct StringHasher
@@ -1667,21 +1667,23 @@ SizeOfInternal(Sizes* aSizes)
     if (usedStackTraces.has(st)) {
       aSizes->mStackTracesUsed += MallocSizeOf(st);
     } else {
       aSizes->mStackTracesUnused += MallocSizeOf(st);
     }
   }
 
   aSizes->mStackTraceTable =
-    gStackTraceTable->sizeOfIncludingThis(MallocSizeOf);
+    gStackTraceTable->shallowSizeOfIncludingThis(MallocSizeOf);
 
-  aSizes->mLiveBlockTable = gLiveBlockTable->sizeOfIncludingThis(MallocSizeOf);
+  aSizes->mLiveBlockTable =
+    gLiveBlockTable->shallowSizeOfIncludingThis(MallocSizeOf);
 
-  aSizes->mDeadBlockTable = gDeadBlockTable->sizeOfIncludingThis(MallocSizeOf);
+  aSizes->mDeadBlockTable =
+    gDeadBlockTable->shallowSizeOfIncludingThis(MallocSizeOf);
 }
 
 void
 DMDFuncs::SizeOf(Sizes* aSizes)
 {
   aSizes->Clear();
 
   AutoBlockIntercepts block(Thread::Fetch());
@@ -1727,17 +1729,17 @@ public:
     } else {
       id = p->value();
     }
     return Base32(id);
   }
 
   size_t sizeOfExcludingThis(mozilla::MallocSizeOf aMallocSizeOf) const
   {
-    return mIdMap.sizeOfExcludingThis(aMallocSizeOf);
+    return mIdMap.shallowSizeOfExcludingThis(aMallocSizeOf);
   }
 
 private:
   // This function converts an integer to base-32. We use base-32 values for
   // indexing into the traceTable and the frameTable, for the following reasons.
   //
   // - Base-32 gives more compact indices than base-16.
   //
@@ -2042,19 +2044,20 @@ AnalyzeImpl(UniquePtr<JSONWriteFunc> aWr
       Show(gDeadBlockTable->count(),    buf3, kBufLen));
 
     StatusMsg("    }\n");
     StatusMsg("    Data structures that are destroyed after Dump() ends {\n");
 
     StatusMsg("      Location service:      %10s bytes\n",
       Show(locService->SizeOfIncludingThis(MallocSizeOf), buf1, kBufLen));
     StatusMsg("      Used stack traces set: %10s bytes\n",
-      Show(usedStackTraces.sizeOfExcludingThis(MallocSizeOf), buf1, kBufLen));
+      Show(usedStackTraces.shallowSizeOfExcludingThis(MallocSizeOf), buf1,
+           kBufLen));
     StatusMsg("      Used PCs set:          %10s bytes\n",
-      Show(usedPcs.sizeOfExcludingThis(MallocSizeOf), buf1, kBufLen));
+      Show(usedPcs.shallowSizeOfExcludingThis(MallocSizeOf), buf1, kBufLen));
     StatusMsg("      Pointer ID map:        %10s bytes\n",
       Show(iscSize, buf1, kBufLen));
 
     StatusMsg("    }\n");
     StatusMsg("    Counts {\n");
 
     size_t hits   = locService->NumCacheHits();
     size_t misses = locService->NumCacheMisses();
--- a/mfbt/HashTable.h
+++ b/mfbt/HashTable.h
@@ -272,25 +272,27 @@ public:
 
   // Number of live elements in the map.
   uint32_t count() const { return mImpl.count(); }
 
   // Total number of allocation in the dynamic table. Note: resize will
   // happen well before count() == capacity().
   size_t capacity() const { return mImpl.capacity(); }
 
-  // Don't just call |mImpl.sizeOfExcludingThis()| because there's no
-  // guarantee that |mImpl| is the first field in HashMap.
-  size_t sizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
+  // Measure the size of the HashMap's entry storage. If the entries contain
+  // pointers to other heap blocks, you must iterate over the table and measure
+  // them separately; hence the "shallow" prefix.
+  size_t shallowSizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
   {
-    return mImpl.sizeOfExcludingThis(aMallocSizeOf);
+    return mImpl.shallowSizeOfExcludingThis(aMallocSizeOf);
   }
-  size_t sizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const
+  size_t shallowSizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const
   {
-    return aMallocSizeOf(this) + mImpl.sizeOfExcludingThis(aMallocSizeOf);
+    return aMallocSizeOf(this) +
+           mImpl.shallowSizeOfExcludingThis(aMallocSizeOf);
   }
 
   Generation generation() const { return mImpl.generation(); }
 
   /************************************************** Shorthand operations */
 
   bool has(const Lookup& aLookup) const
   {
@@ -559,25 +561,27 @@ public:
 
   // Number of live elements in the map.
   uint32_t count() const { return mImpl.count(); }
 
   // Total number of allocation in the dynamic table. Note: resize will
   // happen well before count() == capacity().
   size_t capacity() const { return mImpl.capacity(); }
 
-  // Don't just call |mImpl.sizeOfExcludingThis()| because there's no
-  // guarantee that |mImpl| is the first field in HashSet.
-  size_t sizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
+  // Measure the size of the HashSet's entry storage. If the entries contain
+  // pointers to other heap blocks, you must iterate over the table and measure
+  // them separately; hence the "shallow" prefix.
+  size_t shallowSizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
   {
-    return mImpl.sizeOfExcludingThis(aMallocSizeOf);
+    return mImpl.shallowSizeOfExcludingThis(aMallocSizeOf);
   }
-  size_t sizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const
+  size_t shallowSizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const
   {
-    return aMallocSizeOf(this) + mImpl.sizeOfExcludingThis(aMallocSizeOf);
+    return aMallocSizeOf(this) +
+           mImpl.shallowSizeOfExcludingThis(aMallocSizeOf);
   }
 
   Generation generation() const { return mImpl.generation(); }
 
   /************************************************** Shorthand operations */
 
   bool has(const Lookup& aLookup) const
   {
@@ -1987,24 +1991,24 @@ public:
   }
 
   Generation generation() const
   {
     MOZ_ASSERT(mTable);
     return Generation(mGen);
   }
 
-  size_t sizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
+  size_t shallowSizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
   {
     return aMallocSizeOf(mTable);
   }
 
-  size_t sizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const
+  size_t shallowSizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const
   {
-    return aMallocSizeOf(this) + sizeOfExcludingThis(aMallocSizeOf);
+    return aMallocSizeOf(this) + shallowSizeOfExcludingThis(aMallocSizeOf);
   }
 
   MOZ_ALWAYS_INLINE Ptr lookup(const Lookup& aLookup) const
   {
     ReentrancyGuard g(*this);
     if (!HasHash<HashPolicy>(aLookup)) {
       return Ptr();
     }