Bug 747202 - Separate and clean up JaegerMonkey's and IonMonkey's memory reporting. r=dvander.
authorNicholas Nethercote <nnethercote@mozilla.com>
Wed, 19 Sep 2012 18:16:49 -0700
changeset 107678 fc78ad0511323a15c03f3c0cabcdd2db415b7dc7
parent 107677 debc0d341e7d1823da2d38db0ae54ffdb5791a2d
child 107679 74db71024d857293f3485afb69d3c3bf15fb40b2
push id82
push usershu@rfrn.org
push dateFri, 05 Oct 2012 13:20:22 +0000
reviewersdvander
bugs747202
milestone18.0a1
Bug 747202 - Separate and clean up JaegerMonkey's and IonMonkey's memory reporting. r=dvander.
js/public/MemoryMetrics.h
js/src/assembler/jit/ExecutableAllocator.cpp
js/src/assembler/jit/ExecutableAllocator.h
js/src/ion/IonCode.h
js/src/ion/IonLinker.h
js/src/jscntxt.cpp
js/src/jsmemorymetrics.cpp
js/src/methodjit/BaseCompiler.h
js/src/methodjit/Compiler.cpp
js/src/methodjit/MonoIC.cpp
js/src/methodjit/PolyIC.cpp
js/src/methodjit/StubCompiler.cpp
js/src/methodjit/TrampolineCompiler.cpp
js/xpconnect/src/XPCJSRuntime.cpp
--- a/js/public/MemoryMetrics.h
+++ b/js/public/MemoryMetrics.h
@@ -44,35 +44,37 @@ struct TypeInferenceSizes
 struct RuntimeSizes
 {
     RuntimeSizes()
       : object(0)
       , atomsTable(0)
       , contexts(0)
       , dtoa(0)
       , temporary(0)
-      , mjitCode(0)
+      , jaegerCode(0)
+      , ionCode(0)
       , regexpCode(0)
-      , unusedCodeMemory(0)
+      , unusedCode(0)
       , stackCommitted(0)
       , gcMarker(0)
       , mathCache(0)
       , scriptFilenames(0)
       , scriptSources(0)
       , compartmentObjects(0)
     {}
 
     size_t object;
     size_t atomsTable;
     size_t contexts;
     size_t dtoa;
     size_t temporary;
-    size_t mjitCode;
+    size_t jaegerCode;
+    size_t ionCode;
     size_t regexpCode;
-    size_t unusedCodeMemory;
+    size_t unusedCode;
     size_t stackCommitted;
     size_t gcMarker;
     size_t mathCache;
     size_t scriptFilenames;
     size_t scriptSources;
 
     // This is the exception to the "RuntimeSizes doesn't measure things within
     // compartments" rule.  We combine the sizes of all the JSCompartment
@@ -99,31 +101,33 @@ struct CompartmentStats
     size_t gcHeapObjectsNonFunction;
     size_t gcHeapObjectsFunction;
     size_t gcHeapStrings;
     size_t gcHeapShapesTree;
     size_t gcHeapShapesDict;
     size_t gcHeapShapesBase;
     size_t gcHeapScripts;
     size_t gcHeapTypeObjects;
+    size_t gcHeapIonCodes;
 #if JS_HAS_XML_SUPPORT
     size_t gcHeapXML;
 #endif
 
     size_t objectSlots;
     size_t objectElements;
     size_t objectMisc;
     size_t objectPrivate;
     size_t stringChars;
     size_t shapesExtraTreeTables;
     size_t shapesExtraDictTables;
     size_t shapesExtraTreeShapeKids;
     size_t shapesCompartmentTables;
     size_t scriptData;
-    size_t mjitData;
+    size_t jaegerData;
+    size_t ionData;
     size_t crossCompartmentWrappers;
 
     TypeInferenceSizes typeInferenceSizes;
 
     // Add cStats's numbers to this object's numbers.
     void add(CompartmentStats &cStats) {
         #define ADD(x)  this->x += cStats.x
 
@@ -133,31 +137,33 @@ struct CompartmentStats
         ADD(gcHeapObjectsNonFunction);
         ADD(gcHeapObjectsFunction);
         ADD(gcHeapStrings);
         ADD(gcHeapShapesTree);
         ADD(gcHeapShapesDict);
         ADD(gcHeapShapesBase);
         ADD(gcHeapScripts);
         ADD(gcHeapTypeObjects);
+        ADD(gcHeapIonCodes);
     #if JS_HAS_XML_SUPPORT
         ADD(gcHeapXML);
     #endif
 
         ADD(objectSlots);
         ADD(objectElements);
         ADD(objectMisc);
         ADD(objectPrivate);
         ADD(stringChars);
         ADD(shapesExtraTreeTables);
         ADD(shapesExtraDictTables);
         ADD(shapesExtraTreeShapeKids);
         ADD(shapesCompartmentTables);
         ADD(scriptData);
-        ADD(mjitData);
+        ADD(jaegerData);
+        ADD(ionData);
         ADD(crossCompartmentWrappers);
 
         #undef ADD
 
         typeInferenceSizes.add(cStats.typeInferenceSizes);
     }
 
     // The size of all the live things in the GC heap.
--- a/js/src/assembler/jit/ExecutableAllocator.cpp
+++ b/js/src/assembler/jit/ExecutableAllocator.cpp
@@ -35,27 +35,30 @@ size_t ExecutableAllocator::pageSize = 0
 size_t ExecutableAllocator::largeAllocSize = 0;
 
 ExecutablePool::~ExecutablePool()
 {
     m_allocator->releasePoolPages(this);
 }
 
 void
-ExecutableAllocator::sizeOfCode(size_t *method, size_t *regexp, size_t *unused) const
+ExecutableAllocator::sizeOfCode(size_t *jaeger, size_t *ion, size_t *regexp, size_t *unused) const
 {
-    *method = 0;
+    *jaeger = 0;
+    *ion    = 0;
     *regexp = 0;
     *unused = 0;
 
     if (m_pools.initialized()) {
         for (ExecPoolHashSet::Range r = m_pools.all(); !r.empty(); r.popFront()) {
             ExecutablePool* pool = r.front();
-            *method += pool->m_mjitCodeMethod;
-            *regexp += pool->m_mjitCodeRegexp;
-            *unused += pool->m_allocation.size - pool->m_mjitCodeMethod - pool->m_mjitCodeRegexp;
+            *jaeger += pool->m_jaegerCodeBytes;
+            *ion    += pool->m_ionCodeBytes;
+            *regexp += pool->m_regexpCodeBytes;
+            *unused += pool->m_allocation.size - pool->m_jaegerCodeBytes - pool->m_ionCodeBytes
+                                               - pool->m_regexpCodeBytes;
         }
     }
 }
 
 }
 
 #endif // HAVE(ASSEMBLER)
--- a/js/src/assembler/jit/ExecutableAllocator.h
+++ b/js/src/assembler/jit/ExecutableAllocator.h
@@ -77,19 +77,19 @@ extern  "C" void sync_instruction_memory
 #if ENABLE_ASSEMBLER
 
 //#define DEBUG_STRESS_JSC_ALLOCATOR
 
 namespace JSC {
 
   class ExecutableAllocator;
 
-  enum CodeKind { METHOD_CODE, REGEXP_CODE };
+  enum CodeKind { JAEGER_CODE, ION_CODE, REGEXP_CODE };
 
-  // These are reference-counted. A new one starts with a count of 1. 
+  // These are reference-counted. A new one starts with a count of 1.
   class ExecutablePool {
 
     friend class ExecutableAllocator;
 private:
     struct Allocation {
         char* pages;
         size_t size;
 #if WTF_OS_SYMBIAN
@@ -99,41 +99,43 @@ private:
 
     ExecutableAllocator* m_allocator;
     char* m_freePtr;
     char* m_end;
     Allocation m_allocation;
 
     // Reference count for automatic reclamation.
     unsigned m_refCount;
- 
+
     // Number of bytes currently used for Method and Regexp JIT code.
-    size_t m_mjitCodeMethod;
-    size_t m_mjitCodeRegexp;
+    size_t m_jaegerCodeBytes;
+    size_t m_ionCodeBytes;
+    size_t m_regexpCodeBytes;
 
 public:
     // Flag for downstream use, whether to try to release references to this pool.
     bool m_destroy;
 
     // GC number in which the m_destroy flag was most recently set. Used downstream to
     // remember whether m_destroy was computed for the currently active GC.
     size_t m_gcNumber;
 
     void release(bool willDestroy = false)
-    { 
+    {
         JS_ASSERT(m_refCount != 0);
         // XXX: disabled, see bug 654820.
         //JS_ASSERT_IF(willDestroy, m_refCount == 1);
         if (--m_refCount == 0)
             js_delete(this);
     }
 
     ExecutablePool(ExecutableAllocator* allocator, Allocation a)
       : m_allocator(allocator), m_freePtr(a.pages), m_end(m_freePtr + a.size), m_allocation(a),
-        m_refCount(1), m_mjitCodeMethod(0), m_mjitCodeRegexp(0), m_destroy(false), m_gcNumber(0)
+        m_refCount(1), m_jaegerCodeBytes(0), m_ionCodeBytes(0), m_regexpCodeBytes(0),
+        m_destroy(false), m_gcNumber(0)
     { }
 
     ~ExecutablePool();
 
 private:
     // It should be impossible for us to roll over, because only small
     // pools have multiple holders, and they have one holder per chunk
     // of generated code, and they only hold 16KB or so of code.
@@ -144,25 +146,26 @@ private:
     }
 
     void* alloc(size_t n, CodeKind kind)
     {
         JS_ASSERT(n <= available());
         void *result = m_freePtr;
         m_freePtr += n;
 
-        if ( kind == REGEXP_CODE )
-            m_mjitCodeRegexp += n;
-        else
-            m_mjitCodeMethod += n;
-
+        switch (kind) {
+          case JAEGER_CODE: m_jaegerCodeBytes += n;          break;
+          case ION_CODE:    m_ionCodeBytes    += n;          break;
+          case REGEXP_CODE: m_regexpCodeBytes += n;          break;
+          default:          JS_NOT_REACHED("bad code kind"); break;
+        }
         return result;
     }
-    
-    size_t available() const { 
+
+    size_t available() const {
         JS_ASSERT(m_end >= m_freePtr);
         return m_end - m_freePtr;
     }
 };
 
 enum AllocationBehavior
 {
     AllocationCanRandomize,
@@ -238,17 +241,17 @@ public:
         JS_ASSERT(pool->m_allocation.pages);
         if (destroyCallback)
             destroyCallback(pool->m_allocation.pages, pool->m_allocation.size);
         systemRelease(pool->m_allocation);
         JS_ASSERT(m_pools.initialized());
         m_pools.remove(m_pools.lookup(pool));   // this asserts if |pool| is not in m_pools
     }
 
-    void sizeOfCode(size_t *method, size_t *regexp, size_t *unused) const;
+    void sizeOfCode(size_t *jaeger, size_t *ion, size_t *regexp, size_t *unused) const;
 
     void setDestroyCallback(DestroyCallback destroyCallback) {
         this->destroyCallback = destroyCallback;
     }
 
     void setRandomize(bool enabled) {
         allocBehavior = enabled ? AllocationCanRandomize : AllocationDeterministic;
     }
--- a/js/src/ion/IonCode.h
+++ b/js/src/ion/IonCode.h
@@ -75,19 +75,16 @@ class IonCode : public gc::Cell
 
   public:
     uint8 *raw() const {
         return code_;
     }
     size_t instructionsSize() const {
         return insnSize_;
     }
-    size_t bufferSize() const {
-        return bufferSize_;
-    }
     void trace(JSTracer *trc);
     void finalize(FreeOp *fop);
     void setInvalidated() {
         invalidated_ = true;
     }
 
     // If this IonCode object has been, effectively, corrupted due to
     // invalidation patching, then we have to remember this so we don't try and
@@ -331,18 +328,18 @@ struct IonScript
     }
     JSScript *getScript(size_t i) const {
         JS_ASSERT(i < scriptEntries_);
         return scriptList()[i];
     }
     size_t scriptEntries() const {
         return scriptEntries_;
     }
-    size_t size() const {
-        return scriptList_ + scriptEntries_ * sizeof(JSScript *);
+    size_t sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf) const {
+        return mallocSizeOf(this);
     }
     HeapValue &getConstant(size_t index) {
         JS_ASSERT(index < numConstants());
         return constants()[index];
     }
     size_t numConstants() const {
         return constantEntries_;
     }
--- a/js/src/ion/IonLinker.h
+++ b/js/src/ion/IonLinker.h
@@ -35,17 +35,17 @@ class Linker
         if (masm.oom())
             return fail(cx);
 
         JSC::ExecutablePool *pool;
         size_t bytesNeeded = masm.bytesNeeded() + sizeof(IonCode *) + CodeAlignment;
         if (bytesNeeded >= MAX_BUFFER_SIZE)
             return fail(cx);
 
-        uint8 *result = (uint8 *)comp->execAlloc()->alloc(bytesNeeded, &pool, JSC::METHOD_CODE);
+        uint8 *result = (uint8 *)comp->execAlloc()->alloc(bytesNeeded, &pool, JSC::ION_CODE);
         if (!result)
             return fail(cx);
 
         // The IonCode pointer will be stored right before the code buffer.
         uint8 *codeStart = result + sizeof(IonCode *);
 
         // Bump the code up to a nice alignment.
         codeStart = (uint8 *)AlignBytes((uintptr_t)codeStart, CodeAlignment);
--- a/js/src/jscntxt.cpp
+++ b/js/src/jscntxt.cpp
@@ -110,61 +110,65 @@ struct CallbackData
 
 void CompartmentCallback(JSRuntime *rt, void *vdata, JSCompartment *compartment)
 {
     CallbackData *data = (CallbackData *) vdata;
     data->n += data->mallocSizeOf(compartment);
 }
 
 void
-JSRuntime::sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf, RuntimeSizes *runtime)
+JSRuntime::sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf, RuntimeSizes *rtSizes)
 {
-    runtime->object = mallocSizeOf(this);
+    rtSizes->object = mallocSizeOf(this);
 
-    runtime->atomsTable = atoms.sizeOfExcludingThis(mallocSizeOf);
+    rtSizes->atomsTable = atoms.sizeOfExcludingThis(mallocSizeOf);
 
-    runtime->contexts = 0;
+    rtSizes->contexts = 0;
     for (ContextIter acx(this); !acx.done(); acx.next())
-        runtime->contexts += acx->sizeOfIncludingThis(mallocSizeOf);
+        rtSizes->contexts += acx->sizeOfIncludingThis(mallocSizeOf);
 
-    runtime->dtoa = mallocSizeOf(dtoaState);
+    rtSizes->dtoa = mallocSizeOf(dtoaState);
 
-    runtime->temporary = tempLifoAlloc.sizeOfExcludingThis(mallocSizeOf);
+    rtSizes->temporary = tempLifoAlloc.sizeOfExcludingThis(mallocSizeOf);
 
-    if (execAlloc_)
-        execAlloc_->sizeOfCode(&runtime->mjitCode, &runtime->regexpCode,
-                               &runtime->unusedCodeMemory);
-    else
-        runtime->mjitCode = runtime->regexpCode = runtime->unusedCodeMemory = 0;
-
-    runtime->stackCommitted = stackSpace.sizeOfCommitted();
+    if (execAlloc_) {
+        execAlloc_->sizeOfCode(&rtSizes->jaegerCode, &rtSizes->ionCode, &rtSizes->regexpCode,
+                               &rtSizes->unusedCode);
+    } else {
+        rtSizes->jaegerCode = 0;
+        rtSizes->ionCode    = 0;
+        rtSizes->regexpCode = 0;
+        rtSizes->unusedCode = 0;
+    }
 
-    runtime->gcMarker = gcMarker.sizeOfExcludingThis(mallocSizeOf);
+    rtSizes->stackCommitted = stackSpace.sizeOfCommitted();
 
-    runtime->mathCache = mathCache_ ? mathCache_->sizeOfIncludingThis(mallocSizeOf) : 0;
+    rtSizes->gcMarker = gcMarker.sizeOfExcludingThis(mallocSizeOf);
+
+    rtSizes->mathCache = mathCache_ ? mathCache_->sizeOfIncludingThis(mallocSizeOf) : 0;
 
-    runtime->scriptFilenames = scriptFilenameTable.sizeOfExcludingThis(mallocSizeOf);
+    rtSizes->scriptFilenames = scriptFilenameTable.sizeOfExcludingThis(mallocSizeOf);
     for (ScriptFilenameTable::Range r = scriptFilenameTable.all(); !r.empty(); r.popFront())
-        runtime->scriptFilenames += mallocSizeOf(r.front());
+        rtSizes->scriptFilenames += mallocSizeOf(r.front());
 
-    runtime->compartmentObjects = 0;
+    rtSizes->compartmentObjects = 0;
     CallbackData data(mallocSizeOf);
     JS_IterateCompartments(this, &data, CompartmentCallback);
-    runtime->compartmentObjects = data.n;
+    rtSizes->compartmentObjects = data.n;
 }
 
 size_t
 JSRuntime::sizeOfExplicitNonHeap()
 {
     if (!execAlloc_)
         return 0;
 
-    size_t mjitCode, regexpCode, unusedCodeMemory;
-    execAlloc_->sizeOfCode(&mjitCode, &regexpCode, &unusedCodeMemory);
-    return mjitCode + regexpCode + unusedCodeMemory + stackSpace.sizeOfCommitted();
+    size_t jaegerCode, ionCode, regexpCode, unusedCode;
+    execAlloc_->sizeOfCode(&jaegerCode, &ionCode, &regexpCode, &unusedCode);
+    return jaegerCode + ionCode + regexpCode + unusedCode + stackSpace.sizeOfCommitted();
 }
 
 void
 JSRuntime::triggerOperationCallback()
 {
     /*
      * Invalidate ionTop to trigger its over-recursion check. Note this must be
      * set before interrupt, to avoid racing with js_InvokeOperationCallback,
--- a/js/src/jsmemorymetrics.cpp
+++ b/js/src/jsmemorymetrics.cpp
@@ -48,16 +48,17 @@ CompartmentStats::gcHeapThingsSize()
     n += gcHeapObjectsNonFunction;
     n += gcHeapObjectsFunction;
     n += gcHeapStrings;
     n += gcHeapShapesTree;
     n += gcHeapShapesDict;
     n += gcHeapShapesBase;
     n += gcHeapScripts;
     n += gcHeapTypeObjects;
+    n += gcHeapIonCodes;
 #if JS_HAS_XML_SUPPORT
     n += gcHeapXML;
 #endif
 
 #ifdef DEBUG
     size_t n2 = n;
     n2 += gcHeapArenaAdmin;
     n2 += gcHeapUnusedGcThings;
@@ -177,38 +178,37 @@ StatsCellCallback(JSRuntime *rt, void *d
         break;
     }
     case JSTRACE_SCRIPT:
     {
         JSScript *script = static_cast<JSScript *>(thing);
         cStats->gcHeapScripts += thingSize;
         cStats->scriptData += script->sizeOfData(rtStats->mallocSizeOf);
 #ifdef JS_METHODJIT
-        cStats->mjitData += script->sizeOfJitScripts(rtStats->mallocSizeOf);
+        cStats->jaegerData += script->sizeOfJitScripts(rtStats->mallocSizeOf);
 # ifdef JS_ION
         if (script->hasIonScript())
-            cStats->mjitData += script->ion->size();
+            cStats->ionData += script->ion->sizeOfIncludingThis(rtStats->mallocSizeOf);
 # endif
 #endif
 
         ScriptSource *ss = script->scriptSource();
         SourceSet::AddPtr entry = closure->seenSources.lookupForAdd(ss);
         if (!entry) {
             closure->seenSources.add(entry, ss); // Not much to be done on failure.
             rtStats->runtime.scriptSources += ss->sizeOfIncludingThis(rtStats->mallocSizeOf);
         }
         break;
     }
     case JSTRACE_IONCODE:
     {
 #ifdef JS_METHODJIT
 # ifdef JS_ION
-        ion::IonCode *code = static_cast<ion::IonCode *>(thing);
-        cStats->gcHeapScripts += thingSize;
-        cStats->mjitData += code->bufferSize();
+        cStats->gcHeapIonCodes += thingSize;
+        // The code for a script is counted in ExecutableAllocator::sizeOfCode().
 # endif
 #endif
         break;
     }
     case JSTRACE_TYPE_OBJECT:
     {
         types::TypeObject *obj = static_cast<types::TypeObject *>(thing);
         cStats->gcHeapTypeObjects += thingSize;
--- a/js/src/methodjit/BaseCompiler.h
+++ b/js/src/methodjit/BaseCompiler.h
@@ -165,17 +165,17 @@ class NativeStubLinker : public LinkerHe
   public:
 #ifdef JS_CPU_X64
     typedef JSC::MacroAssembler::DataLabelPtr FinalJump;
 #else
     typedef JSC::MacroAssembler::Jump FinalJump;
 #endif
 
     NativeStubLinker(Assembler &masm, JITChunk *chunk, jsbytecode *pc, FinalJump done)
-        : LinkerHelper(masm, JSC::METHOD_CODE), chunk(chunk), pc(pc), done(done)
+        : LinkerHelper(masm, JSC::JAEGER_CODE), chunk(chunk), pc(pc), done(done)
     {}
 
     bool init(JSContext *cx);
 
     void patchJump(JSC::CodeLocationLabel target) {
 #ifdef JS_CPU_X64
         patch(done, target);
 #else
--- a/js/src/methodjit/Compiler.cpp
+++ b/js/src/methodjit/Compiler.cpp
@@ -922,17 +922,17 @@ MakeJITScript(JSContext *cx, JSScript *s
     sps.setPushed(script);
     for (unsigned i = 0; i < jit->nedges; i++) {
         pc = script->code + jitEdges[i].target;
         jitEdges[i].shimLabel = (void *) masm.distanceOf(masm.label());
         masm.move(JSC::MacroAssembler::ImmPtr(&jitEdges[i]), Registers::ArgReg1);
         masm.fallibleVMCall(true, JS_FUNC_TO_DATA_PTR(void *, stubs::CrossChunkShim),
                             pc, NULL, script->nfixed + analysis->getCode(pc).stackDepth);
     }
-    LinkerHelper linker(masm, JSC::METHOD_CODE);
+    LinkerHelper linker(masm, JSC::JAEGER_CODE);
     JSC::ExecutablePool *ep = linker.init(cx);
     if (!ep)
         return NULL;
     jit->shimPool = ep;
 
     masm.finalize(linker);
     uint8_t *shimCode = (uint8_t *) linker.finalizeCodeAddendum().executableAddress();
 
@@ -1393,28 +1393,28 @@ mjit::Compiler::finishThisUp()
                       jumpTableEdges.length() * sizeof(void *);
 
     Vector<ChunkJumpTableEdge> chunkJumps(cx);
     if (!chunkJumps.reserve(jumpTableEdges.length()))
         return Compile_Error;
 
     JSC::ExecutableAllocator &execAlloc = cx->runtime->execAlloc();
     JSC::ExecutablePool *execPool;
-    uint8_t *result = (uint8_t *)execAlloc.alloc(codeSize, &execPool, JSC::METHOD_CODE);
+    uint8_t *result = (uint8_t *)execAlloc.alloc(codeSize, &execPool, JSC::JAEGER_CODE);
     if (!result) {
         js_ReportOutOfMemory(cx);
         return Compile_Error;
     }
     JS_ASSERT(execPool);
     JSC::ExecutableAllocator::makeWritable(result, codeSize);
     masm.executableCopy(result);
     stubcc.masm.executableCopy(result + masm.size());
 
-    JSC::LinkBuffer fullCode(result, codeSize, JSC::METHOD_CODE);
-    JSC::LinkBuffer stubCode(result + masm.size(), stubcc.size(), JSC::METHOD_CODE);
+    JSC::LinkBuffer fullCode(result, codeSize, JSC::JAEGER_CODE);
+    JSC::LinkBuffer stubCode(result + masm.size(), stubcc.size(), JSC::JAEGER_CODE);
 
     JS_ASSERT(!loop);
 
     size_t nNmapLive = loopEntries.length();
     for (size_t i = outerChunk.begin; i < outerChunk.end; i++) {
         Bytecode *opinfo = analysis->maybeCode(i);
         if (opinfo && opinfo->safePoint)
             nNmapLive++;
--- a/js/src/methodjit/MonoIC.cpp
+++ b/js/src/methodjit/MonoIC.cpp
@@ -168,17 +168,17 @@ ic::SetGlobalName(VMFrame &f, ic::SetGlo
 }
 
 class EqualityICLinker : public LinkerHelper
 {
     VMFrame &f;
 
   public:
     EqualityICLinker(Assembler &masm, VMFrame &f)
-        : LinkerHelper(masm, JSC::METHOD_CODE), f(f)
+        : LinkerHelper(masm, JSC::JAEGER_CODE), f(f)
     { }
 
     bool init(JSContext *cx) {
         JSC::ExecutablePool *pool = LinkerHelper::init(cx);
         if (!pool)
             return false;
         JS_ASSERT(!f.regs.inlined());
         if (!f.chunk()->execPools.append(pool)) {
@@ -907,17 +907,17 @@ class CallCompiler : public BaseCompiler
 
         /* Get nmap[ARITY], set argc, call. */
         if (ic.frameSize.isStatic())
             masm.move(Imm32(ic.frameSize.staticArgc()), JSParamReg_Argc);
         else
             masm.load32(FrameAddress(VMFrame::offsetOfDynamicArgc()), JSParamReg_Argc);
         masm.jump(t0);
 
-        LinkerHelper linker(masm, JSC::METHOD_CODE);
+        LinkerHelper linker(masm, JSC::JAEGER_CODE);
         JSC::ExecutablePool *ep = poolForSize(linker, CallICInfo::Pool_ScriptStub);
         if (!ep)
             return false;
 
         if (!linker.verifyRange(f.chunk())) {
             disable();
             return true;
         }
@@ -928,17 +928,17 @@ class CallCompiler : public BaseCompiler
 
         linker.link(notCompiled, ic.nativeRejoin());
         JSC::CodeLocationLabel cs = linker.finalize(f);
 
         JaegerSpew(JSpew_PICs, "generated CALL stub %p (%lu bytes)\n", cs.executableAddress(),
                    (unsigned long) masm.size());
 
         if (f.regs.inlined()) {
-            JSC::LinkBuffer code((uint8_t *) cs.executableAddress(), masm.size(), JSC::METHOD_CODE);
+            JSC::LinkBuffer code((uint8_t *) cs.executableAddress(), masm.size(), JSC::JAEGER_CODE);
             code.patch(inlined, f.regs.inlined());
         }
 
         Repatcher repatch(f.chunk());
         repatch.relink(ic.lastOolJump(), cs);
 
         return true;
     }
@@ -992,17 +992,17 @@ class CallCompiler : public BaseCompiler
         Jump claspGuard = masm.testObjClass(Assembler::NotEqual, ic.funObjReg, t0, &FunctionClass);
 
         /* Guard that it's the same script. */
         Address scriptAddr(ic.funObjReg, JSFunction::offsetOfNativeOrScript());
         Jump funGuard = masm.branchPtr(Assembler::NotEqual, scriptAddr,
                                        ImmPtr(obj->toFunction()->script()));
         Jump done = masm.jump();
 
-        LinkerHelper linker(masm, JSC::METHOD_CODE);
+        LinkerHelper linker(masm, JSC::JAEGER_CODE);
         JSC::ExecutablePool *ep = poolForSize(linker, CallICInfo::Pool_ClosureStub);
         if (!ep)
             return false;
 
         ic.hasJsFunCheck = true;
 
         if (!linker.verifyRange(f.chunk())) {
             disable();
@@ -1437,17 +1437,17 @@ ic::GenerateArgumentCheckStub(VMFrame &f
         types::TypeSet *types = types::TypeScript::ArgTypes(script, i);
         Address address(JSFrameReg, StackFrame::offsetOfFormalArg(fun, i));
         if (!masm.generateTypeCheck(f.cx, address, types, &mismatches))
             return;
     }
 
     Jump done = masm.jump();
 
-    LinkerHelper linker(masm, JSC::METHOD_CODE);
+    LinkerHelper linker(masm, JSC::JAEGER_CODE);
     JSC::ExecutablePool *ep = linker.init(f.cx);
     if (!ep)
         return;
     jit->argsCheckPool = ep;
 
     if (!linker.verifyRange(f.chunk())) {
         jit->resetArgsCheck();
         return;
--- a/js/src/methodjit/PolyIC.cpp
+++ b/js/src/methodjit/PolyIC.cpp
@@ -41,17 +41,17 @@ static const uint32_t INLINE_PATH_LENGTH
 // This guarantees correct OOM and refcount handling for buffers while they
 // are instantiated and rooted.
 class PICLinker : public LinkerHelper
 {
     ic::BasePolyIC &ic;
 
   public:
     PICLinker(Assembler &masm, ic::BasePolyIC &ic)
-      : LinkerHelper(masm, JSC::METHOD_CODE), ic(ic)
+      : LinkerHelper(masm, JSC::JAEGER_CODE), ic(ic)
     { }
 
     bool init(JSContext *cx) {
         JSC::ExecutablePool *pool = LinkerHelper::init(cx);
         if (!pool)
             return false;
         if (!ic.addPool(cx, pool)) {
             markVerified();
@@ -2790,17 +2790,17 @@ SetElementIC::attachHoleStub(VMFrame &f,
         masm.storeValue(vr, slot);
     }
 
     Jump done = masm.jump();
 
     JS_ASSERT(!execPool);
     JS_ASSERT(!inlineHoleGuardPatched);
 
-    LinkerHelper buffer(masm, JSC::METHOD_CODE);
+    LinkerHelper buffer(masm, JSC::JAEGER_CODE);
     execPool = buffer.init(cx);
     if (!execPool)
         return error(cx);
 
     if (!buffer.verifyRange(f.chunk()))
         return disable(f, "code memory is out of range");
 
     // Patch all guards.
@@ -2879,17 +2879,17 @@ SetElementIC::attachTypedArray(VMFrame &
     }
 
     Jump done = masm.jump();
 
     // The stub does not rely on any pointers or numbers that could be ruined
     // by a GC or shape regenerated GC. We let this stub live for the lifetime
     // of the script.
     JS_ASSERT(!execPool);
-    LinkerHelper buffer(masm, JSC::METHOD_CODE);
+    LinkerHelper buffer(masm, JSC::JAEGER_CODE);
     execPool = buffer.init(cx);
     if (!execPool)
         return error(cx);
 
     if (!buffer.verifyRange(f.chunk()))
         return disable(f, "code memory is out of range");
 
     // Note that the out-of-bounds path simply does nothing.
--- a/js/src/methodjit/StubCompiler.cpp
+++ b/js/src/methodjit/StubCompiler.cpp
@@ -172,18 +172,18 @@ StubCompiler::emitStubCall(void *ptr, Re
 
     cc.addCallSite(site);
     return cl;
 }
 
 void
 StubCompiler::fixCrossJumps(uint8_t *ncode, size_t offset, size_t total)
 {
-    JSC::LinkBuffer fast(ncode, total, JSC::METHOD_CODE);
-    JSC::LinkBuffer slow(ncode + offset, total - offset, JSC::METHOD_CODE);
+    JSC::LinkBuffer fast(ncode, total, JSC::JAEGER_CODE);
+    JSC::LinkBuffer slow(ncode + offset, total - offset, JSC::JAEGER_CODE);
 
     for (size_t i = 0; i < exits.length(); i++)
         fast.link(exits[i].from, slow.locationOf(exits[i].to));
 
     for (size_t i = 0; i < scriptJoins.length(); i++) {
         const CrossJumpInScript &cj = scriptJoins[i];
         slow.link(cj.from, fast.locationOf(cc.labelOf(cj.pc, cj.inlineIndex)));
     }
--- a/js/src/methodjit/TrampolineCompiler.cpp
+++ b/js/src/methodjit/TrampolineCompiler.cpp
@@ -59,17 +59,17 @@ TrampolineCompiler::compileTrampoline(Tr
 {
     Assembler masm;
 
     Label entry = masm.label();
     CHECK_RESULT(generator(masm));
     JS_ASSERT(entry.isSet());
 
     bool ok;
-    JSC::LinkBuffer buffer(&masm, execAlloc, poolp, &ok, JSC::METHOD_CODE);
+    JSC::LinkBuffer buffer(&masm, execAlloc, poolp, &ok, JSC::JAEGER_CODE);
     if (!ok)
         return false;
     masm.finalize(buffer);
     uint8_t *result = (uint8_t*)buffer.finalizeCodeAddendum().dataLocation();
     *where = JS_DATA_TO_FUNC_PTR(Trampolines::TrampolinePtr, result + masm.distanceOf(entry));
 
     return true;
 }
--- a/js/xpconnect/src/XPCJSRuntime.cpp
+++ b/js/xpconnect/src/XPCJSRuntime.cpp
@@ -1421,16 +1421,22 @@ ReportCompartmentStats(const JS::Compart
                      "Memory on the garbage-collected JavaScript "
                      "heap that collates data common to many shapes.");
 
     CREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/type-objects"),
                      cStats.gcHeapTypeObjects,
                      "Memory on the garbage-collected JavaScript "
                      "heap that holds type inference information.");
 
+    CREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/ion-codes"),
+                     cStats.gcHeapIonCodes,
+                     "Memory on the garbage-collected JavaScript "
+                     "heap that holds references to executable code pools "
+                     "used by IonMonkey.");
+
 #if JS_HAS_XML_SUPPORT
     CREPORT_GC_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("gc-heap/xml"),
                      cStats.gcHeapXML,
                      "Memory on the garbage-collected JavaScript "
                      "heap that holds E4X XML objects.");
 #endif
 
     CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("objects/slots"),
@@ -1489,21 +1495,25 @@ ReportCompartmentStats(const JS::Compart
                   "Memory used by compartment-wide tables storing shape "
                   "information for use during object construction.");
 
     CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("script-data"),
                   cStats.scriptData,
                   "Memory allocated for JSScript bytecode and various "
                   "variable-length tables.");
 
-    CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("mjit-data"),
-                  cStats.mjitData,
-                  "Memory used by the method JIT for "
-                  "compilation data: JITScripts, native maps, and inline "
-                  "cache structs.");
+    CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("jaeger-data"),
+                  cStats.jaegerData,
+                  "Memory used by the JaegerMonkey JIT for compilation data: "
+                  "JITScripts, native maps, and inline cache structs.");
+
+    CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("ion-data"),
+                  cStats.ionData,
+                  "Memory used by the IonMonkey JIT for compilation data: "
+                  "IonScripts.");
 
     CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("cross-compartment-wrappers"),
                   cStats.crossCompartmentWrappers,
                   "Memory used by cross-compartment wrappers.");
 
     CREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("type-inference/script-main"),
                   cStats.typeInferenceSizes.scripts,
                   "Memory used during type inference to store type sets of "
@@ -1592,28 +1602,33 @@ ReportJSRuntimeExplicitTreeStats(const J
                   "Memory used by DtoaState, which is used for converting "
                   "strings to numbers and vice versa.");
 
     RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/temporary"),
                   nsIMemoryReporter::KIND_HEAP, rtStats.runtime.temporary,
                   "Memory held transiently in JSRuntime and used during "
                   "compilation.  It mostly holds parse nodes.");
 
-    RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/mjit-code"),
-                  nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.mjitCode,
-                  "Memory used by the method JIT to hold the runtime's "
+    RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/jaeger-code"),
+                  nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.jaegerCode,
+                  "Memory used by the JaegerMonkey JIT to hold the runtime's "
+                  "generated code.");
+
+    RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/ion-code"),
+                  nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.ionCode,
+                  "Memory used by the IonMonkey JIT to hold the runtime's "
                   "generated code.");
 
     RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/regexp-code"),
                   nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.regexpCode,
                   "Memory used by the regexp JIT to hold generated code.");
 
-    RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/unused-code-memory"),
-                  nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.unusedCodeMemory,
-                  "Memory allocated by the method and/or regexp JIT to hold the "
+    RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/unused-code"),
+                  nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.unusedCode,
+                  "Memory allocated by one of the JITs to hold the "
                   "runtime's code, but which is currently unused.");
 
     RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/stack-committed"),
                   nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.stackCommitted,
                   "Memory used for the JS call stack.  This is the committed "
                   "portion of the stack; the uncommitted portion is not "
                   "measured because it hardly costs anything.");