Bug 1358047 - Move Baseline CacheIR code map from JitCompartment to JitZone. r=djvj
authorJan de Mooij <jdemooij@mozilla.com>
Mon, 24 Apr 2017 12:41:04 +0200
changeset 354557 ccdbdd6e79e3114d88f972953f462555fc3083d1
parent 354556 84ef33cfa71a00408525c041fcbfeee6e0fc7d9a
child 354563 44e3d204f259dee6727107e099ac35544dde94b0
push id89489
push userjandemooij@gmail.com
push dateMon, 24 Apr 2017 10:41:27 +0000
treeherdermozilla-inbound@ccdbdd6e79e3 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersdjvj
bugs1358047
milestone55.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1358047 - Move Baseline CacheIR code map from JitCompartment to JitZone. r=djvj
js/public/MemoryMetrics.h
js/src/gc/Zone.cpp
js/src/gc/Zone.h
js/src/jit/BaselineCacheIRCompiler.cpp
js/src/jit/Ion.cpp
js/src/jit/JitCompartment.h
js/src/jsgc.cpp
js/src/vm/MemoryMetrics.cpp
js/src/vm/TypeInference.cpp
js/xpconnect/src/XPCJSRuntime.cpp
--- a/js/public/MemoryMetrics.h
+++ b/js/public/MemoryMetrics.h
@@ -644,16 +644,17 @@ struct ZoneStats
     macro(Other,   GCHeapUsed,  jitCodesGCHeap) \
     macro(Other,   GCHeapUsed,  objectGroupsGCHeap) \
     macro(Other,   MallocHeap,  objectGroupsMallocHeap) \
     macro(Other,   GCHeapUsed,  scopesGCHeap) \
     macro(Other,   MallocHeap,  scopesMallocHeap) \
     macro(Other,   GCHeapUsed,  regExpSharedsGCHeap) \
     macro(Other,   MallocHeap,  regExpSharedsMallocHeap) \
     macro(Other,   MallocHeap,  typePool) \
+    macro(Other,   MallocHeap,  jitZone) \
     macro(Other,   MallocHeap,  baselineStubsOptimized) \
     macro(Other,   MallocHeap,  cachedCFG) \
     macro(Other,   MallocHeap,  uniqueIdMap) \
     macro(Other,   MallocHeap,  shapeTables)
 
     ZoneStats()
       : FOR_EACH_SIZE(ZERO_SIZE)
         unusedGCThings(),
--- a/js/src/gc/Zone.cpp
+++ b/js/src/gc/Zone.cpp
@@ -275,17 +275,21 @@ Zone::gcNumber()
 js::jit::JitZone*
 Zone::createJitZone(JSContext* cx)
 {
     MOZ_ASSERT(!jitZone_);
 
     if (!cx->runtime()->getJitRuntime(cx))
         return nullptr;
 
-    jitZone_ = cx->new_<js::jit::JitZone>();
+    UniquePtr<jit::JitZone> jitZone(cx->new_<js::jit::JitZone>());
+    if (!jitZone || !jitZone->init(cx))
+        return nullptr;
+
+    jitZone_ = jitZone.release();
     return jitZone_;
 }
 
 bool
 Zone::hasMarkedCompartments()
 {
     for (CompartmentsInZoneIter comp(this); !comp.done(); comp.next()) {
         if (comp->marked)
--- a/js/src/gc/Zone.h
+++ b/js/src/gc/Zone.h
@@ -173,16 +173,17 @@ struct Zone : public JS::shadow::Zone,
     }
 
     void findOutgoingEdges(js::gc::ZoneComponentFinder& finder);
 
     void discardJitCode(js::FreeOp* fop, bool discardBaselineCode = true);
 
     void addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
                                 size_t* typePool,
+                                size_t* jitZone,
                                 size_t* baselineStubsOptimized,
                                 size_t* cachedCFG,
                                 size_t* uniqueIdMap,
                                 size_t* shapeTables,
                                 size_t* atomsMarkBitmaps);
 
     // Iterate over all cells in the zone. See the definition of ZoneCellIter
     // in jsgcinlines.h for the possible arguments and documentation.
--- a/js/src/jit/BaselineCacheIRCompiler.cpp
+++ b/js/src/jit/BaselineCacheIRCompiler.cpp
@@ -1939,43 +1939,44 @@ jit::AttachBaselineCacheIRStub(JSContext
         break;
       case CacheKind::SetProp:
       case CacheKind::SetElem:
         stubDataOffset = sizeof(ICCacheIR_Updated);
         stubKind = CacheIRStubKind::Updated;
         break;
     }
 
-    JitCompartment* jitCompartment = cx->compartment()->jitCompartment();
+    JitZone* jitZone = cx->zone()->jitZone();
 
     // Check if we already have JitCode for this stub.
     CacheIRStubInfo* stubInfo;
     CacheIRStubKey::Lookup lookup(kind, engine, writer.codeStart(), writer.codeLength());
-    JitCode* code = jitCompartment->getCacheIRStubCode(lookup, &stubInfo);
+    JitCode* code = jitZone->getBaselineCacheIRStubCode(lookup, &stubInfo);
     if (!code) {
         // We have to generate stub code.
         JitContext jctx(cx, nullptr);
         BaselineCacheIRCompiler comp(cx, writer, engine, stubDataOffset);
         if (!comp.init(kind))
             return nullptr;
 
         code = comp.compile();
         if (!code)
             return nullptr;
 
-        // Allocate the shared CacheIRStubInfo. Note that the putCacheIRStubCode
-        // call below will transfer ownership to the stub code HashMap, so we
-        // don't have to worry about freeing it below.
+        // Allocate the shared CacheIRStubInfo. Note that the
+        // putBaselineCacheIRStubCode call below will transfer ownership
+        // to the stub code HashMap, so we don't have to worry about freeing
+        // it below.
         MOZ_ASSERT(!stubInfo);
         stubInfo = CacheIRStubInfo::New(kind, engine, comp.makesGCCalls(), stubDataOffset, writer);
         if (!stubInfo)
             return nullptr;
 
         CacheIRStubKey key(stubInfo);
-        if (!jitCompartment->putCacheIRStubCode(lookup, key, code))
+        if (!jitZone->putBaselineCacheIRStubCode(lookup, key, code))
             return nullptr;
     }
 
     MOZ_ASSERT(code);
     MOZ_ASSERT(stubInfo);
     MOZ_ASSERT(stubInfo->stubDataSize() == writer.stubDataSize());
 
     // Ensure we don't attach duplicate stubs. This can happen if a stub failed
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -419,66 +419,66 @@ JitZoneGroup::patchIonBackedges(JSContex
 
 JitZoneGroup::JitZoneGroup(ZoneGroup* group)
   : backedgeTarget_(group, BackedgeLoopHeader),
     backedgeList_(group)
 {}
 
 JitCompartment::JitCompartment()
   : stubCodes_(nullptr),
-    cacheIRStubCodes_(nullptr),
     stringConcatStub_(nullptr),
     regExpMatcherStub_(nullptr),
     regExpSearcherStub_(nullptr),
     regExpTesterStub_(nullptr)
 {
 }
 
 JitCompartment::~JitCompartment()
 {
     js_delete(stubCodes_);
-    js_delete(cacheIRStubCodes_);
 }
 
 bool
 JitCompartment::initialize(JSContext* cx)
 {
     stubCodes_ = cx->new_<ICStubCodeMap>(cx->runtime());
     if (!stubCodes_)
         return false;
 
     if (!stubCodes_->init()) {
         ReportOutOfMemory(cx);
         return false;
     }
 
-    cacheIRStubCodes_ = cx->new_<CacheIRStubCodeMap>(cx->runtime());
-    if (!cacheIRStubCodes_)
-        return false;
-
-    if (!cacheIRStubCodes_->init()) {
-        ReportOutOfMemory(cx);
-        return false;
-    }
-
     return true;
 }
 
 bool
 JitCompartment::ensureIonStubsExist(JSContext* cx)
 {
     if (!stringConcatStub_) {
         stringConcatStub_ = generateStringConcatStub(cx);
         if (!stringConcatStub_)
             return false;
     }
 
     return true;
 }
 
+bool
+JitZone::init(JSContext* cx)
+{
+    if (!baselineCacheIRStubCodes_.init()) {
+        ReportOutOfMemory(cx);
+        return false;
+    }
+
+    return true;
+}
+
 void
 jit::FinishOffThreadBuilder(JSRuntime* runtime, IonBuilder* builder,
                             const AutoLockHelperThreadState& locked)
 {
     // Clean the references to the pending IonBuilder, if we just finished it.
     if (builder->script()->baselineScript()->hasPendingIonBuilder() &&
         builder->script()->baselineScript()->pendingIonBuilder() == builder)
     {
@@ -644,17 +644,16 @@ JitRuntime::SweepJitcodeGlobalTable(JSRu
 
 void
 JitCompartment::sweep(FreeOp* fop, JSCompartment* compartment)
 {
     // Any outstanding compilations should have been cancelled by the GC.
     MOZ_ASSERT(!HasOffThreadIonCompile(compartment));
 
     stubCodes_->sweep();
-    cacheIRStubCodes_->sweep();
 
     // If the sweep removed a bailout Fallback stub, nullptr the corresponding return addr.
     for (auto& it : bailoutReturnStubInfo_) {
         if (!stubCodes_->lookup(it.key))
            it = BailoutReturnStubInfo();
     }
 
     JSRuntime* rt = fop->runtime();
@@ -672,48 +671,71 @@ JitCompartment::sweep(FreeOp* fop, JSCom
 
     for (ReadBarrieredObject& obj : simdTemplateObjects_) {
         if (obj && IsAboutToBeFinalized(&obj))
             obj.set(nullptr);
     }
 }
 
 void
+JitZone::sweep(FreeOp* fop)
+{
+    baselineCacheIRStubCodes_.sweep();
+}
+
+void
 JitCompartment::toggleBarriers(bool enabled)
 {
     // Toggle barriers in compartment wide stubs that have patchable pre barriers.
     if (regExpMatcherStub_)
         regExpMatcherStub_->togglePreBarriers(enabled, Reprotect);
     if (regExpSearcherStub_)
         regExpSearcherStub_->togglePreBarriers(enabled, Reprotect);
     if (regExpTesterStub_)
         regExpTesterStub_->togglePreBarriers(enabled, Reprotect);
 
     // Toggle barriers in baseline IC stubs.
     for (ICStubCodeMap::Enum e(*stubCodes_); !e.empty(); e.popFront()) {
         JitCode* code = *e.front().value().unsafeGet();
         code->togglePreBarriers(enabled, Reprotect);
     }
-    for (CacheIRStubCodeMap::Enum e(*cacheIRStubCodes_); !e.empty(); e.popFront()) {
+}
+
+void
+JitZone::toggleBarriers(bool enabled)
+{
+    for (BaselineCacheIRStubCodeMap::Enum e(baselineCacheIRStubCodes_); !e.empty(); e.popFront()) {
         JitCode* code = *e.front().value().unsafeGet();
         code->togglePreBarriers(enabled, Reprotect);
     }
 }
 
 size_t
 JitCompartment::sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const
 {
     size_t n = mallocSizeOf(this);
     if (stubCodes_)
         n += stubCodes_->sizeOfIncludingThis(mallocSizeOf);
-    if (cacheIRStubCodes_)
-        n += cacheIRStubCodes_->sizeOfIncludingThis(mallocSizeOf);
     return n;
 }
 
+void
+JitZone::addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
+                                size_t* jitZone,
+                                size_t* baselineStubsOptimized,
+                                size_t* cachedCFG) const
+{
+    *jitZone += mallocSizeOf(this);
+    *jitZone += baselineCacheIRStubCodes_.sizeOfExcludingThis(mallocSizeOf);
+    *jitZone += ionCacheIRStubInfoSet_.sizeOfExcludingThis(mallocSizeOf);
+
+    *baselineStubsOptimized += optimizedStubSpace_.sizeOfExcludingThis(mallocSizeOf);
+    *cachedCFG += cfgSpace_.sizeOfExcludingThis(mallocSizeOf);
+}
+
 JitCode*
 JitRuntime::getBailoutTable(const FrameSizeClass& frameClass) const
 {
     MOZ_ASSERT(frameClass != FrameSizeClass::None());
     return bailoutTables_.ref()[frameClass.classId()];
 }
 
 JitCode*
@@ -1350,16 +1372,19 @@ jit::ToggleBarriers(JS::Zone* zone, bool
 
     for (auto script = zone->cellIter<JSScript>(); !script.done(); script.next()) {
         if (script->hasIonScript())
             script->ionScript()->toggleBarriers(needs);
         if (script->hasBaselineScript())
             script->baselineScript()->toggleBarriers(needs);
     }
 
+    if (JitZone* jitZone = zone->jitZone())
+        jitZone->toggleBarriers(needs);
+
     for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
         if (comp->jitCompartment())
             comp->jitCompartment()->toggleBarriers(needs);
     }
 }
 
 namespace js {
 namespace jit {
--- a/js/src/jit/JitCompartment.h
+++ b/js/src/jit/JitCompartment.h
@@ -388,35 +388,79 @@ struct CacheIRStubKey : public DefaultHa
     explicit CacheIRStubKey(CacheIRStubInfo* info) : stubInfo(info) {}
     CacheIRStubKey(CacheIRStubKey&& other) : stubInfo(Move(other.stubInfo)) { }
 
     void operator=(CacheIRStubKey&& other) {
         stubInfo = Move(other.stubInfo);
     }
 };
 
+template<typename Key>
+struct IcStubCodeMapGCPolicy
+{
+    static bool needsSweep(Key*, ReadBarrieredJitCode* value) {
+        return IsAboutToBeFinalized(value);
+    }
+};
+
 class JitZone
 {
     // Allocated space for optimized baseline stubs.
     OptimizedICStubSpace optimizedStubSpace_;
     // Allocated space for cached cfg.
     CFGSpace cfgSpace_;
 
     // Set of CacheIRStubInfo instances used by Ion stubs in this Zone.
     using IonCacheIRStubInfoSet = HashSet<CacheIRStubKey, CacheIRStubKey, SystemAllocPolicy>;
     IonCacheIRStubInfoSet ionCacheIRStubInfoSet_;
 
+    // Map CacheIRStubKey to shared JitCode objects.
+    using BaselineCacheIRStubCodeMap = GCHashMap<CacheIRStubKey,
+                                                 ReadBarrieredJitCode,
+                                                 CacheIRStubKey,
+                                                 SystemAllocPolicy,
+                                                 IcStubCodeMapGCPolicy<CacheIRStubKey>>;
+    BaselineCacheIRStubCodeMap baselineCacheIRStubCodes_;
+
   public:
+    MOZ_MUST_USE bool init(JSContext* cx);
+    void toggleBarriers(bool enabled);
+    void sweep(FreeOp* fop);
+
+    void addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
+                                size_t* jitZone,
+                                size_t* baselineStubsOptimized,
+                                size_t* cachedCFG) const;
+
     OptimizedICStubSpace* optimizedStubSpace() {
         return &optimizedStubSpace_;
     }
     CFGSpace* cfgSpace() {
         return &cfgSpace_;
     }
 
+    JitCode* getBaselineCacheIRStubCode(const CacheIRStubKey::Lookup& key,
+                                        CacheIRStubInfo** stubInfo) {
+        auto p = baselineCacheIRStubCodes_.lookup(key);
+        if (p) {
+            *stubInfo = p->key().stubInfo.get();
+            return p->value();
+        }
+        *stubInfo = nullptr;
+        return nullptr;
+    }
+    MOZ_MUST_USE bool putBaselineCacheIRStubCode(const CacheIRStubKey::Lookup& lookup,
+                                                 CacheIRStubKey& key,
+                                                 JitCode* stubCode)
+    {
+        auto p = baselineCacheIRStubCodes_.lookupForAdd(lookup);
+        MOZ_ASSERT(!p);
+        return baselineCacheIRStubCodes_.add(p, Move(key), stubCode);
+    }
+
     CacheIRStubInfo* getIonCacheIRStubInfo(const CacheIRStubKey::Lookup& key) {
         if (!ionCacheIRStubInfoSet_.initialized())
             return nullptr;
         IonCacheIRStubInfoSet::Ptr p = ionCacheIRStubInfoSet_.lookup(key);
         return p ? p->stubInfo.get() : nullptr;
     }
     MOZ_MUST_USE bool putIonCacheIRStubInfo(const CacheIRStubKey::Lookup& lookup,
                                             CacheIRStubKey& key)
@@ -439,39 +483,24 @@ enum class BailoutReturnStub {
     New,
     Count
 };
 
 class JitCompartment
 {
     friend class JitActivation;
 
-    template<typename Key>
-    struct IcStubCodeMapGCPolicy {
-        static bool needsSweep(Key*, ReadBarrieredJitCode* value) {
-            return IsAboutToBeFinalized(value);
-        }
-    };
-
     // Map ICStub keys to ICStub shared code objects.
     using ICStubCodeMap = GCHashMap<uint32_t,
                                     ReadBarrieredJitCode,
                                     DefaultHasher<uint32_t>,
                                     RuntimeAllocPolicy,
                                     IcStubCodeMapGCPolicy<uint32_t>>;
     ICStubCodeMap* stubCodes_;
 
-    // Map ICStub keys to ICStub shared code objects.
-    using CacheIRStubCodeMap = GCHashMap<CacheIRStubKey,
-                                         ReadBarrieredJitCode,
-                                         CacheIRStubKey,
-                                         RuntimeAllocPolicy,
-                                         IcStubCodeMapGCPolicy<CacheIRStubKey>>;
-    CacheIRStubCodeMap* cacheIRStubCodes_;
-
     // Keep track of offset into various baseline stubs' code at return
     // point from called script.
     struct BailoutReturnStubInfo
     {
         void* addr;
         uint32_t key;
 
         BailoutReturnStubInfo() : addr(nullptr), key(0) { }
@@ -533,44 +562,27 @@ class JitCompartment
     MOZ_MUST_USE bool putStubCode(JSContext* cx, uint32_t key, Handle<JitCode*> stubCode) {
         MOZ_ASSERT(stubCode);
         if (!stubCodes_->putNew(key, stubCode.get())) {
             ReportOutOfMemory(cx);
             return false;
         }
         return true;
     }
-    JitCode* getCacheIRStubCode(const CacheIRStubKey::Lookup& key, CacheIRStubInfo** stubInfo) {
-        CacheIRStubCodeMap::Ptr p = cacheIRStubCodes_->lookup(key);
-        if (p) {
-            *stubInfo = p->key().stubInfo.get();
-            return p->value();
-        }
-        *stubInfo = nullptr;
-        return nullptr;
-    }
-    MOZ_MUST_USE bool putCacheIRStubCode(const CacheIRStubKey::Lookup& lookup, CacheIRStubKey& key,
-                                         JitCode* stubCode)
-    {
-        CacheIRStubCodeMap::AddPtr p = cacheIRStubCodes_->lookupForAdd(lookup);
-        MOZ_ASSERT(!p);
-        return cacheIRStubCodes_->add(p, Move(key), stubCode);
-    }
     void initBailoutReturnAddr(void* addr, uint32_t key, BailoutReturnStub kind) {
         MOZ_ASSERT(bailoutReturnStubInfo_[kind].addr == nullptr);
         bailoutReturnStubInfo_[kind] = BailoutReturnStubInfo { addr, key };
     }
     void* bailoutReturnAddr(BailoutReturnStub kind) {
         MOZ_ASSERT(bailoutReturnStubInfo_[kind].addr);
         return bailoutReturnStubInfo_[kind].addr;
     }
 
     void toggleBarriers(bool enabled);
 
-  public:
     JitCompartment();
     ~JitCompartment();
 
     MOZ_MUST_USE bool initialize(JSContext* cx);
 
     // Initialize code stubs only used by Ion, not Baseline.
     MOZ_MUST_USE bool ensureIonStubsExist(JSContext* cx);
 
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -2179,16 +2179,19 @@ GCRuntime::sweepZoneAfterCompacting(Zone
     MOZ_ASSERT(zone->isCollecting());
     FreeOp* fop = rt->defaultFreeOp();
     sweepTypesAfterCompacting(zone);
     zone->sweepBreakpoints(fop);
     zone->sweepWeakMaps();
     for (auto* cache : zone->weakCaches())
         cache->sweep();
 
+    if (jit::JitZone* jitZone = zone->jitZone())
+        jitZone->sweep(fop);
+
     for (CompartmentsInZoneIter c(zone); !c.done(); c.next()) {
         c->objectGroups.sweep(fop);
         c->sweepRegExps();
         c->sweepSavedStacks();
         c->sweepTemplateLiteralMap();
         c->sweepVarNames();
         c->sweepGlobalObject(fop);
         c->sweepSelfHostingScriptSource();
@@ -5210,18 +5213,21 @@ GCRuntime::beginSweepingSweepGroup(AutoL
 
             for (GCCompartmentGroupIter c(rt); !c.done(); c.next()) {
                 c->sweepGlobalObject(&fop);
                 c->sweepDebugEnvironments();
                 c->sweepJitCompartment(&fop);
                 c->sweepTemplateObjects();
             }
 
-            for (GCSweepGroupIter zone(rt); !zone.done(); zone.next())
+            for (GCSweepGroupIter zone(rt); !zone.done(); zone.next()) {
                 zone->sweepWeakMaps();
+                if (jit::JitZone* jitZone = zone->jitZone())
+                    jitZone->sweep(&fop);
+            }
 
             // Bug 1071218: the following two methods have not yet been
             // refactored to work on a single zone-group at once.
 
             // Collect watch points associated with unreachable objects.
             WatchpointMap::sweepAll(rt);
 
             // Detach unreachable debuggers and global objects from each other.
--- a/js/src/vm/MemoryMetrics.cpp
+++ b/js/src/vm/MemoryMetrics.cpp
@@ -316,16 +316,17 @@ StatsZoneCallback(JSRuntime* rt, void* d
     ZoneStats& zStats = rtStats->zoneStatsVector.back();
     if (!zStats.initStrings(rt))
         MOZ_CRASH("oom");
     rtStats->initExtraZoneStats(zone, &zStats);
     rtStats->currZoneStats = &zStats;
 
     zone->addSizeOfIncludingThis(rtStats->mallocSizeOf_,
                                  &zStats.typePool,
+                                 &zStats.jitZone,
                                  &zStats.baselineStubsOptimized,
                                  &zStats.cachedCFG,
                                  &zStats.uniqueIdMap,
                                  &zStats.shapeTables,
                                  &rtStats->runtime.atomsMarkBitmaps);
 }
 
 static void
--- a/js/src/vm/TypeInference.cpp
+++ b/js/src/vm/TypeInference.cpp
@@ -4458,30 +4458,26 @@ void
 TypeScript::destroy()
 {
     js_delete(this);
 }
 
 void
 Zone::addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
                              size_t* typePool,
+                             size_t* jitZone,
                              size_t* baselineStubsOptimized,
                              size_t* cachedCFG,
                              size_t* uniqueIdMap,
                              size_t* shapeTables,
                              size_t* atomsMarkBitmaps)
 {
     *typePool += types.typeLifoAlloc().sizeOfExcludingThis(mallocSizeOf);
-    if (jitZone()) {
-        // These functions return pointers to struct that are embedded within
-        // JitZone, which is why we use sizeOfExcludingThis().
-        *baselineStubsOptimized +=
-            jitZone()->optimizedStubSpace()->sizeOfExcludingThis(mallocSizeOf);
-        *cachedCFG += jitZone()->cfgSpace()->sizeOfExcludingThis(mallocSizeOf);
-    }
+    if (jitZone_)
+        jitZone_->addSizeOfIncludingThis(mallocSizeOf, jitZone, baselineStubsOptimized, cachedCFG);
     *uniqueIdMap += uniqueIds().sizeOfExcludingThis(mallocSizeOf);
     *shapeTables += baseShapes().sizeOfExcludingThis(mallocSizeOf)
                   + initialShapes().sizeOfExcludingThis(mallocSizeOf);
     *atomsMarkBitmaps += markedAtoms().sizeOfExcludingThis(mallocSizeOf);
 }
 
 TypeZone::TypeZone(Zone* zone)
   : zone_(zone),
--- a/js/xpconnect/src/XPCJSRuntime.cpp
+++ b/js/xpconnect/src/XPCJSRuntime.cpp
@@ -1528,16 +1528,20 @@ ReportZoneStats(const JS::ZoneStats& zSt
     ZCREPORT_BYTES(pathPrefix + NS_LITERAL_CSTRING("regexp-shareds/malloc-heap"),
         zStats.regExpSharedsMallocHeap,
         "Shared compiled regexp data.");
 
     ZCREPORT_BYTES(pathPrefix + NS_LITERAL_CSTRING("type-pool"),
         zStats.typePool,
         "Type sets and related data.");
 
+    ZCREPORT_BYTES(pathPrefix + NS_LITERAL_CSTRING("jit-zone"),
+        zStats.jitZone,
+        "The JIT zone.");
+
     ZCREPORT_BYTES(pathPrefix + NS_LITERAL_CSTRING("baseline/optimized-stubs"),
         zStats.baselineStubsOptimized,
         "The Baseline JIT's optimized IC stubs (excluding code).");
 
     ZCREPORT_BYTES(pathPrefix + NS_LITERAL_CSTRING("jit-cached-cfg"),
         zStats.cachedCFG,
         "The cached CFG to construct Ion code out of it.");