Bug 848374 - Have a single allocator per compartment for optimized stubs. r=djvj,njn
authorJan de Mooij <jdemooij@mozilla.com>
Thu, 07 Mar 2013 11:42:24 +0100
changeset 124300 e696d62133a47b74432d2ab616277b413e4a033d
parent 124299 5b2f463cf9c271d23c4f741dd0a99aefe49fd4bc
child 124301 250482fc5b039cb40409287dd268a03f986460e0
push id1603
push userjandemooij@gmail.com
push dateThu, 07 Mar 2013 10:42:57 +0000
reviewersdjvj, njn
bugs848374
milestone22.0a1
Bug 848374 - Have a single allocator per compartment for optimized stubs. r=djvj,njn
js/public/MemoryMetrics.h
js/src/ion/BaselineIC.h
js/src/ion/BaselineJIT.cpp
js/src/ion/BaselineJIT.h
js/src/ion/IonCompartment.h
js/src/ion/shared/BaselineCompiler-shared.h
js/src/jscompartment.cpp
js/src/jscompartment.h
js/src/jsmemorymetrics.cpp
js/xpconnect/src/XPCJSRuntime.cpp
--- a/js/public/MemoryMetrics.h
+++ b/js/public/MemoryMetrics.h
@@ -165,17 +165,18 @@ struct CompartmentStats
       , stringCharsNonHuge(0)
       , shapesExtraTreeTables(0)
       , shapesExtraDictTables(0)
       , shapesExtraTreeShapeKids(0)
       , shapesCompartmentTables(0)
       , scriptData(0)
       , jaegerData(0)
       , baselineData(0)
-      , baselineStubs(0)
+      , baselineFallbackStubs(0)
+      , baselineOptimizedStubs(0)
       , ionData(0)
       , compartmentObject(0)
       , crossCompartmentWrappersTable(0)
       , regexpCompartment(0)
       , debuggeesSet(0)
       , typeInference()
       , hugeStrings()
     {}
@@ -203,17 +204,18 @@ struct CompartmentStats
       , stringCharsNonHuge(other.stringCharsNonHuge)
       , shapesExtraTreeTables(other.shapesExtraTreeTables)
       , shapesExtraDictTables(other.shapesExtraDictTables)
       , shapesExtraTreeShapeKids(other.shapesExtraTreeShapeKids)
       , shapesCompartmentTables(other.shapesCompartmentTables)
       , scriptData(other.scriptData)
       , jaegerData(other.jaegerData)
       , baselineData(other.baselineData)
-      , baselineStubs(other.baselineStubs)
+      , baselineFallbackStubs(other.baselineFallbackStubs)
+      , baselineOptimizedStubs(other.baselineOptimizedStubs)
       , ionData(other.ionData)
       , compartmentObject(other.compartmentObject)
       , crossCompartmentWrappersTable(other.crossCompartmentWrappersTable)
       , regexpCompartment(other.regexpCompartment)
       , debuggeesSet(other.debuggeesSet)
       , typeInference(other.typeInference)
     {
       hugeStrings.append(other.hugeStrings);
@@ -247,17 +249,18 @@ struct CompartmentStats
     size_t stringCharsNonHuge;
     size_t shapesExtraTreeTables;
     size_t shapesExtraDictTables;
     size_t shapesExtraTreeShapeKids;
     size_t shapesCompartmentTables;
     size_t scriptData;
     size_t jaegerData;
     size_t baselineData;
-    size_t baselineStubs;
+    size_t baselineFallbackStubs;
+    size_t baselineOptimizedStubs;
     size_t ionData;
     size_t compartmentObject;
     size_t crossCompartmentWrappersTable;
     size_t regexpCompartment;
     size_t debuggeesSet;
 
     TypeInferenceSizes typeInference;
     js::Vector<HugeStringInfo, 0, js::SystemAllocPolicy> hugeStrings;
@@ -289,17 +292,18 @@ struct CompartmentStats
         ADD(stringCharsNonHuge);
         ADD(shapesExtraTreeTables);
         ADD(shapesExtraDictTables);
         ADD(shapesExtraTreeShapeKids);
         ADD(shapesCompartmentTables);
         ADD(scriptData);
         ADD(jaegerData);
         ADD(baselineData);
-        ADD(baselineStubs);
+        ADD(baselineFallbackStubs);
+        ADD(baselineOptimizedStubs);
         ADD(ionData);
         ADD(compartmentObject);
         ADD(crossCompartmentWrappersTable);
         ADD(regexpCompartment);
         ADD(debuggeesSet);
 
         #undef ADD
 
--- a/js/src/ion/BaselineIC.h
+++ b/js/src/ion/BaselineIC.h
@@ -888,19 +888,19 @@ class ICStubCompiler
 
         return regs;
     }
 
   public:
     virtual ICStub *getStub(ICStubSpace *space) = 0;
 
     ICStubSpace *getStubSpace(JSScript *script) {
-        return ICStub::CanMakeCalls(kind)
-            ? script->baselineScript()->fallbackStubSpace()
-            : script->baselineScript()->optimizedStubSpace();
+        if (ICStub::CanMakeCalls(kind))
+            return script->baselineScript()->fallbackStubSpace();
+        return script->compartment()->ionCompartment()->optimizedStubSpace();
     }
 };
 
 // Base class for stub compilers that can generate multiple stubcodes.
 // These compilers need access to the JSOp they are compiling for.
 class ICMultiStubCompiler : public ICStubCompiler
 {
   protected:
--- a/js/src/ion/BaselineJIT.cpp
+++ b/js/src/ion/BaselineJIT.cpp
@@ -30,17 +30,16 @@ PCMappingSlotInfo::ToSlotLocation(const 
     }
     JS_ASSERT(stackVal->kind() != StackValue::Stack);
     return SlotIgnore;
 }
 
 BaselineScript::BaselineScript(uint32_t prologueOffset)
   : method_(NULL),
     fallbackStubSpace_(),
-    optimizedStubSpace_(),
     prologueOffset_(prologueOffset),
     flags_(0)
 { }
 
 static const size_t BUILDER_LIFO_ALLOC_PRIMARY_CHUNK_SIZE = 1 << 12; //XXX
 
 // XXX copied from Ion.cpp
 class AutoDestroyAllocator
@@ -504,17 +503,17 @@ BaselineScript::copyICEntries(HandleScri
         if (realEntry.firstStub()->isTableSwitch()) {
             ICTableSwitch *stub = realEntry.firstStub()->toTableSwitch();
             stub->fixupJumpTable(script, this);
         }
     }
 }
 
 void
-BaselineScript::adoptFallbackStubs(ICStubSpace *stubSpace)
+BaselineScript::adoptFallbackStubs(FallbackICStubSpace *stubSpace)
 {
     fallbackStubSpace_.adoptFrom(stubSpace);
 }
 
 void
 BaselineScript::copyPCMappingEntries(const CompactBufferWriter &entries)
 {
     JS_ASSERT(entries.length() > 0);
@@ -675,18 +674,16 @@ BaselineScript::purgeOptimizedStubs(Zone
 
         ICStub *stub = entry.firstStub();
         while (stub->next()) {
             JS_ASSERT(stub->allocatedInFallbackSpace());
             stub = stub->next();
         }
     }
 #endif
-
-    optimizedStubSpace_.free();
 }
 
 void
 ion::FinishDiscardBaselineScript(FreeOp *fop, UnrootedScript script)
 {
     if (!script->hasBaselineScript())
         return;
 
@@ -711,16 +708,16 @@ ion::IonCompartment::toggleBaselineStubB
     for (ICStubCodeMap::Enum e(*stubCodes_); !e.empty(); e.popFront()) {
         IonCode *code = *e.front().value.unsafeGet();
         code->togglePreBarriers(enabled);
     }
 }
 
 void
 ion::SizeOfBaselineData(JSScript *script, JSMallocSizeOfFun mallocSizeOf, size_t *data,
-                        size_t *stubs)
+                        size_t *fallbackStubs)
 {
     *data = 0;
-    *stubs = 0;
+    *fallbackStubs = 0;
 
     if (script->hasBaselineScript())
-        script->baseline->sizeOfIncludingThis(mallocSizeOf, data, stubs);
+        script->baseline->sizeOfIncludingThis(mallocSizeOf, data, fallbackStubs);
 }
--- a/js/src/ion/BaselineJIT.h
+++ b/js/src/ion/BaselineJIT.h
@@ -19,45 +19,16 @@
 
 namespace js {
 namespace ion {
 
 class StackValue;
 struct ICEntry;
 class ICStub;
 
-// ICStubSpace is an abstraction for allocation policy and storage for stub data.
-struct ICStubSpace
-{
-  private:
-    const static size_t STUB_DEFAULT_CHUNK_SIZE = 256;
-    LifoAlloc allocator_;
-
-  public:
-    inline ICStubSpace()
-      : allocator_(STUB_DEFAULT_CHUNK_SIZE) {}
-
-    inline void *alloc(size_t size) {
-        return allocator_.alloc(size);
-    }
-
-    JS_DECLARE_NEW_METHODS(allocate, alloc, inline)
-
-    inline void adoptFrom(ICStubSpace *other) {
-        allocator_.steal(&(other->allocator_));
-    }
-
-    void free() {
-        allocator_.freeAll();
-    }
-    size_t sizeOfExcludingThis(JSMallocSizeOfFun mallocSizeOf) const {
-        return allocator_.sizeOfExcludingThis(mallocSizeOf);
-    }
-};
-
 class PCMappingSlotInfo
 {
     uint8_t slotInfo_;
 
   public:
     // SlotInfo encoding:
     //  Bits 0 & 1: number of slots at top of stack which are unsynced.
     //  Bits 2 & 3: SlotLocation of top slot value (only relevant if numUnsynced > 0).
@@ -127,20 +98,17 @@ struct BaselineScript
   public:
     static const uint32_t MAX_JSSCRIPT_LENGTH = 0x0fffffffu;
 
   private:
     // Code pointer containing the actual method.
     HeapPtr<IonCode> method_;
 
     // Allocated space for fallback stubs.
-    ICStubSpace fallbackStubSpace_;
-
-    // Allocated space for optimized stubs.
-    ICStubSpace optimizedStubSpace_;
+    FallbackICStubSpace fallbackStubSpace_;
 
     // Native code offset right before the scope chain is initialized.
     uint32_t prologueOffset_;
 
   public:
     enum Flag {
         // Flag set by JSScript::argumentsOptimizationFailed. Similar to
         // JSScript::needsArgsObj_, but can be read from JIT code.
@@ -176,23 +144,23 @@ struct BaselineScript
     static void Destroy(FreeOp *fop, BaselineScript *script);
 
     void purgeOptimizedStubs(Zone *zone);
 
     static inline size_t offsetOfMethod() {
         return offsetof(BaselineScript, method_);
     }
 
-    void sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf, size_t *data, size_t *stubs) const {
+    void sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf, size_t *data,
+                             size_t *fallbackStubs) const {
         *data = mallocSizeOf(this);
 
         // data already includes the ICStubSpace itself, so use
         // sizeOfExcludingThis.
-        *stubs = fallbackStubSpace_.sizeOfExcludingThis(mallocSizeOf) +
-            optimizedStubSpace_.sizeOfExcludingThis(mallocSizeOf);
+        *fallbackStubs = fallbackStubSpace_.sizeOfExcludingThis(mallocSizeOf);
     }
 
     bool active() const {
         return flags_ & ACTIVE;
     }
     void setActive() {
         flags_ |= ACTIVE;
     }
@@ -215,24 +183,20 @@ struct BaselineScript
         return (ICEntry *)(reinterpret_cast<uint8_t *>(this) + icEntriesOffset_);
     }
     PCMappingIndexEntry *pcMappingIndexEntryList() {
         return (PCMappingIndexEntry *)(reinterpret_cast<uint8_t *>(this) + pcMappingIndexOffset_);
     }
     uint8_t *pcMappingData() {
         return reinterpret_cast<uint8_t *>(this) + pcMappingOffset_;
     }
-    ICStubSpace *fallbackStubSpace() {
+    FallbackICStubSpace *fallbackStubSpace() {
         return &fallbackStubSpace_;
     }
 
-    ICStubSpace *optimizedStubSpace() {
-        return &optimizedStubSpace_;
-    }
-
     IonCode *method() const {
         return method_;
     }
     void setMethod(IonCode *code) {
         JS_ASSERT(!method_);
         method_ = code;
     }
 
@@ -247,17 +211,17 @@ struct BaselineScript
     ICEntry &icEntryFromReturnAddress(uint8_t *returnAddr);
     uint8_t *returnAddressForIC(const ICEntry &ent);
 
     size_t numICEntries() const {
         return icEntries_;
     }
 
     void copyICEntries(HandleScript script, const ICEntry *entries, MacroAssembler &masm);
-    void adoptFallbackStubs(ICStubSpace *stubSpace);
+    void adoptFallbackStubs(FallbackICStubSpace *stubSpace);
 
     PCMappingIndexEntry &pcMappingIndexEntry(size_t index);
     CompactBufferReader pcMappingReader(size_t indexEntry);
 
     size_t numPCMappingIndexEntries() const {
         return pcMappingIndexEntries_;
     }
 
@@ -289,17 +253,18 @@ EnterBaselineMethod(JSContext *cx, Stack
 
 IonExecStatus
 EnterBaselineAtBranch(JSContext *cx, StackFrame *fp, jsbytecode *pc);
 
 void
 FinishDiscardBaselineScript(FreeOp *fop, UnrootedScript script);
 
 void
-SizeOfBaselineData(JSScript *script, JSMallocSizeOfFun mallocSizeOf, size_t *data, size_t *stubs);
+SizeOfBaselineData(JSScript *script, JSMallocSizeOfFun mallocSizeOf, size_t *data,
+                   size_t *fallbackStubs);
 
 struct BaselineBailoutInfo
 {
     // Pointer into the current C stack, where overwriting will start.
     uint8_t *incomingStack;
 
     // The top and bottom heapspace addresses of the reconstructed stack
     // which will be copied to the bottom.
--- a/js/src/ion/IonCompartment.h
+++ b/js/src/ion/IonCompartment.h
@@ -28,16 +28,76 @@ typedef void (*EnterIonCode)(void *code,
                              CalleeToken calleeToken, JSObject *scopeChain,
                              size_t numStackValues, Value *vp);
 
 class IonActivation;
 class IonBuilder;
 
 typedef Vector<IonBuilder*, 0, SystemAllocPolicy> OffThreadCompilationVector;
 
+// ICStubSpace is an abstraction for allocation policy and storage for stub data.
+// There are two kinds of stubs: optimized stubs and fallback stubs (the latter
+// also includes stubs that can make non-tail calls that can GC).
+//
+// Optimized stubs are allocated per-compartment and are always purged when
+// JIT-code is discarded. Fallback stubs are allocated per BaselineScript and
+// are only destroyed when the BaselineScript is destroyed.
+struct ICStubSpace
+{
+  protected:
+    LifoAlloc allocator_;
+
+    explicit ICStubSpace(size_t chunkSize)
+      : allocator_(chunkSize)
+    {}
+
+  public:
+    inline void *alloc(size_t size) {
+        return allocator_.alloc(size);
+    }
+
+    JS_DECLARE_NEW_METHODS(allocate, alloc, inline)
+
+    size_t sizeOfExcludingThis(JSMallocSizeOfFun mallocSizeOf) const {
+        return allocator_.sizeOfExcludingThis(mallocSizeOf);
+    }
+};
+
+// Space for optimized stubs. Every IonCompartment has a single
+// OptimizedICStubSpace.
+struct OptimizedICStubSpace : public ICStubSpace
+{
+    const static size_t STUB_DEFAULT_CHUNK_SIZE = 4 * 1024;
+
+  public:
+    OptimizedICStubSpace()
+      : ICStubSpace(STUB_DEFAULT_CHUNK_SIZE)
+    {}
+
+    void free() {
+        allocator_.freeAll();
+    }
+};
+
+// Space for fallback stubs. Every BaselineScript has a
+// FallbackICStubSpace.
+struct FallbackICStubSpace : public ICStubSpace
+{
+    const static size_t STUB_DEFAULT_CHUNK_SIZE = 256;
+
+  public:
+    FallbackICStubSpace()
+      : ICStubSpace(STUB_DEFAULT_CHUNK_SIZE)
+    {}
+
+    inline void adoptFrom(FallbackICStubSpace *other) {
+        allocator_.steal(&(other->allocator_));
+    }
+};
+
 class IonRuntime
 {
     friend class IonCompartment;
 
     // Executable allocator.
     JSC::ExecutableAllocator *execAlloc_;
 
     // Trampoline for entering JIT code. Contains OSR prologue.
@@ -126,16 +186,19 @@ class IonCompartment
     // Map ICStub keys to ICStub shared code objects.
     typedef WeakValueCache<uint32_t, ReadBarriered<IonCode> > ICStubCodeMap;
     ICStubCodeMap *stubCodes_;
 
     // Keep track of offset into baseline ICCall_Scripted stub's code at return
     // point from called script.
     void *baselineCallReturnAddr_;
 
+    // Allocated space for optimized baseline stubs.
+    OptimizedICStubSpace optimizedStubSpace_;
+
   public:
     IonCode *getVMWrapper(const VMFunction &f);
 
     OffThreadCompilationVector &finishedOffThreadCompilations() {
         return finishedOffThreadCompilations_;
     }
 
     IonCode *getStubCode(uint32_t key) {
@@ -216,16 +279,19 @@ class IonCompartment
 
     AutoFlushCache *flusher() {
         return flusher_;
     }
     void setFlusher(AutoFlushCache *fl) {
         if (!flusher_ || !fl)
             flusher_ = fl;
     }
+    OptimizedICStubSpace *optimizedStubSpace() {
+        return &optimizedStubSpace_;
+    }
 };
 
 class BailoutClosure;
 
 class IonActivation
 {
   private:
     JSContext *cx_;
--- a/js/src/ion/shared/BaselineCompiler-shared.h
+++ b/js/src/ion/shared/BaselineCompiler-shared.h
@@ -24,17 +24,17 @@ class BaselineCompilerShared
     RootedScript script;
     jsbytecode *pc;
     MacroAssembler masm;
     bool ionCompileable_;
     bool debugMode_;
 
     FrameInfo frame;
 
-    ICStubSpace stubSpace_;
+    FallbackICStubSpace stubSpace_;
     js::Vector<ICEntry, 16, SystemAllocPolicy> icEntries_;
 
     // Stores the native code offset for a bytecode pc.
     struct PCMappingEntry
     {
         uint32_t pcOffset;
         uint32_t nativeOffset;
         PCMappingSlotInfo slotInfo;
--- a/js/src/jscompartment.cpp
+++ b/js/src/jscompartment.cpp
@@ -629,16 +629,22 @@ JSCompartment::discardJitCode(FreeOp *fo
             /*
              * Use counts for scripts are reset on GC. After discarding code we
              * need to let it warm back up to get information such as which
              * opcodes are setting array holes or accessing getter properties.
              */
             script->resetUseCount();
         }
 
+#ifdef JS_ION
+        /* Free optimized baseline stubs. */
+        if (ionCompartment())
+            ionCompartment()->optimizedStubSpace()->free();
+#endif
+
         types.sweepCompilerOutputs(fop, discardConstraints);
     }
 
 #endif /* JS_METHODJIT */
 }
 
 void
 JSCompartment::sweep(FreeOp *fop, bool releaseTypes)
@@ -1011,26 +1017,29 @@ JSCompartment::sweepBreakpoints(FreeOp *
         }
     }
 }
 
 void
 JSCompartment::sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf, size_t *compartmentObject,
                                    TypeInferenceSizes *tiSizes, size_t *shapesCompartmentTables,
                                    size_t *crossCompartmentWrappersArg, size_t *regexpCompartment,
-                                   size_t *debuggeesSet)
+                                   size_t *debuggeesSet, size_t *baselineOptimizedStubs)
 {
     *compartmentObject = mallocSizeOf(this);
     sizeOfTypeInferenceData(tiSizes, mallocSizeOf);
     *shapesCompartmentTables = baseShapes.sizeOfExcludingThis(mallocSizeOf)
                              + initialShapes.sizeOfExcludingThis(mallocSizeOf)
                              + newTypeObjects.sizeOfExcludingThis(mallocSizeOf)
                              + lazyTypeObjects.sizeOfExcludingThis(mallocSizeOf);
     *crossCompartmentWrappersArg = crossCompartmentWrappers.sizeOfExcludingThis(mallocSizeOf);
     *regexpCompartment = regExps.sizeOfExcludingThis(mallocSizeOf);
     *debuggeesSet = debuggees.sizeOfExcludingThis(mallocSizeOf);
+    *baselineOptimizedStubs = ionCompartment()
+        ? ionCompartment()->optimizedStubSpace()->sizeOfExcludingThis(mallocSizeOf)
+        : 0;
 }
 
 void
 JSCompartment::adoptWorkerAllocator(Allocator *workerAllocator)
 {
     zone()->allocator.arenas.adoptArenas(rt, &workerAllocator->arenas);
 }
--- a/js/src/jscompartment.h
+++ b/js/src/jscompartment.h
@@ -359,17 +359,18 @@ struct JSCompartment : private JS::shado
 
   private:
     void sizeOfTypeInferenceData(JS::TypeInferenceSizes *stats, JSMallocSizeOfFun mallocSizeOf);
 
   public:
     void sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf, size_t *compartmentObject,
                              JS::TypeInferenceSizes *tiSizes,
                              size_t *shapesCompartmentTables, size_t *crossCompartmentWrappers,
-                             size_t *regexpCompartment, size_t *debuggeesSet);
+                             size_t *regexpCompartment, size_t *debuggeesSet,
+                             size_t *baselineOptimizedStubs);
 
     /*
      * Shared scope property tree, and arena-pool for allocating its nodes.
      */
     js::PropertyTree             propertyTree;
 
     /* Set of all unowned base shapes in the compartment. */
     js::BaseShapeSet             baseShapes;
--- a/js/src/jsmemorymetrics.cpp
+++ b/js/src/jsmemorymetrics.cpp
@@ -91,17 +91,18 @@ StatsCompartmentCallback(JSRuntime *rt, 
 
     // Measure the compartment object itself, and things hanging off it.
     compartment->sizeOfIncludingThis(rtStats->mallocSizeOf_,
                                      &cStats.compartmentObject,
                                      &cStats.typeInference,
                                      &cStats.shapesCompartmentTables,
                                      &cStats.crossCompartmentWrappersTable,
                                      &cStats.regexpCompartment,
-                                     &cStats.debuggeesSet);
+                                     &cStats.debuggeesSet,
+                                     &cStats.baselineOptimizedStubs);
 }
 
 static void
 StatsChunkCallback(JSRuntime *rt, void *data, gc::Chunk *chunk)
 {
     RuntimeStats *rtStats = static_cast<RuntimeStats *>(data);
     for (size_t i = 0; i < gc::ArenasPerChunk; i++)
         if (chunk->decommittedArenas.get(i))
@@ -213,20 +214,21 @@ StatsCellCallback(JSRuntime *rt, void *d
     case JSTRACE_SCRIPT:
     {
         JSScript *script = static_cast<JSScript *>(thing);
         cStats->gcHeapScripts += thingSize;
         cStats->scriptData += script->sizeOfData(rtStats->mallocSizeOf_);
 #ifdef JS_METHODJIT
         cStats->jaegerData += script->sizeOfJitScripts(rtStats->mallocSizeOf_);
 # ifdef JS_ION
-        size_t baselineData = 0, baselineStubs = 0;
-        ion::SizeOfBaselineData(script, rtStats->mallocSizeOf_, &baselineData, &baselineStubs);
+        size_t baselineData = 0, baselineFallbackStubs = 0;
+        ion::SizeOfBaselineData(script, rtStats->mallocSizeOf_, &baselineData,
+                                &baselineFallbackStubs);
         cStats->baselineData += baselineData;
-        cStats->baselineStubs += baselineStubs;
+        cStats->baselineFallbackStubs += baselineFallbackStubs;
         cStats->ionData += ion::SizeOfIonData(script, rtStats->mallocSizeOf_);
 # endif
 #endif
 
         ScriptSource *ss = script->scriptSource();
         SourceSet::AddPtr entry = closure->seenSources.lookupForAdd(ss);
         if (!entry) {
             closure->seenSources.add(entry, ss); // Not much to be done on failure.
--- a/js/xpconnect/src/XPCJSRuntime.cpp
+++ b/js/xpconnect/src/XPCJSRuntime.cpp
@@ -1711,19 +1711,23 @@ ReportCompartmentStats(const JS::Compart
                   "Memory used by the JaegerMonkey JIT for compilation data: "
                   "JITScripts, native maps, and inline cache structs.");
 
     CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("baseline-data"),
                   cStats.baselineData,
                   "Memory used by the Baseline JIT for compilation data: "
                   "BaselineScripts.");
 
-    CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("baseline-stubs"),
-                  cStats.baselineStubs,
-                  "Memory used by Baseline IC stubs (excluding code).");
+    CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("baseline-fallback-stubs"),
+                  cStats.baselineFallbackStubs,
+                  "Memory used by Baseline fallback IC stubs (excluding code).");
+
+    CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("baseline-optimized-stubs"),
+                  cStats.baselineOptimizedStubs,
+                  "Memory used by Baseline optimized IC stubs (excluding code).");
 
     CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("ion-data"),
                   cStats.ionData,
                   "Memory used by the IonMonkey JIT for compilation data: "
                   "IonScripts.");
 
     CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("compartment-object"),
                   cStats.compartmentObject,