Bug 850156 - Use separate memory reporters for Ion and baseline code. r=njn,dvander
authorJan de Mooij <jdemooij@mozilla.com>
Wed, 20 Mar 2013 11:24:17 +0100
changeset 125586 40b366dc7fad2dc222bbf2b5e2ec9005905c6fb1
parent 125585 279b55d18083d83fe609fd628b701bc9e89cff4a
child 125587 74b7f2c42ca41a51987f8a0fe875167678938f1f
push id25011
push userjandemooij@gmail.com
push dateWed, 20 Mar 2013 10:47:14 +0000
treeherdermozilla-inbound@40b366dc7fad [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersnjn, dvander
bugs850156
milestone22.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 850156 - Use separate memory reporters for Ion and baseline code. r=njn,dvander
js/public/MemoryMetrics.h
js/src/assembler/jit/ExecutableAllocator.cpp
js/src/assembler/jit/ExecutableAllocator.h
js/src/ion/CodeGenerator.cpp
js/src/ion/IonCaches.cpp
js/src/ion/IonLinker.h
js/src/ion/arm/Trampoline-arm.cpp
js/src/ion/x64/Trampoline-x64.cpp
js/src/ion/x86/Trampoline-x86.cpp
js/src/jscntxt.cpp
js/xpconnect/src/XPCJSRuntime.cpp
--- a/js/public/MemoryMetrics.h
+++ b/js/public/MemoryMetrics.h
@@ -80,16 +80,30 @@ struct TypeInferenceSizes
         this->analysisPool         += sizes.analysisPool;
         this->pendingArrays        += sizes.pendingArrays;
         this->allocationSiteTables += sizes.allocationSiteTables;
         this->arrayTypeTables      += sizes.arrayTypeTables;
         this->objectTypeTables     += sizes.objectTypeTables;
     }
 };
 
+// Data for tracking JIT-code memory usage.
+struct CodeSizes
+{
+    size_t jaeger;
+    size_t ion;
+    size_t asmJS;
+    size_t baseline;
+    size_t regexp;
+    size_t other;
+    size_t unused;
+
+    CodeSizes() { memset(this, 0, sizeof(CodeSizes)); }
+};
+
 // Holds data about a huge string (one which uses more HugeStringInfo::MinSize
 // bytes of memory), so we can report it individually.
 struct HugeStringInfo
 {
     HugeStringInfo() : length(0), size(0) { memset(&buffer, 0, sizeof(buffer)); }
 
     // A string needs to take up this many bytes of storage before we consider
     // it to be "huge".
@@ -113,27 +127,24 @@ struct RuntimeSizes
 {
     RuntimeSizes() { memset(this, 0, sizeof(RuntimeSizes)); }
 
     size_t object;
     size_t atomsTable;
     size_t contexts;
     size_t dtoa;
     size_t temporary;
-    size_t jaegerCode;
-    size_t ionCode;
-    size_t asmJSCode;
-    size_t regexpCode;
-    size_t unusedCode;
     size_t regexpData;
     size_t stack;
     size_t gcMarker;
     size_t mathCache;
     size_t scriptData;
     size_t scriptSources;
+
+    CodeSizes code;
 };
 
 struct ZoneStats
 {
     ZoneStats()
       : extra1(0),
         gcHeapArenaAdmin(0),
         gcHeapUnusedGcThings(0),
--- a/js/src/assembler/jit/ExecutableAllocator.cpp
+++ b/js/src/assembler/jit/ExecutableAllocator.cpp
@@ -1,9 +1,11 @@
-/*
+/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
+ * vim: set ts=8 sw=4 et tw=99:
+ *
  * Copyright (C) 2008 Apple Inc. All rights reserved.
  *
  * Redistribution and use in source and binary forms, with or without
  * modification, are permitted provided that the following conditions
  * are met:
  * 1. Redistributions of source code must retain the above copyright
  *    notice, this list of conditions and the following disclaimer.
  * 2. Redistributions in binary form must reproduce the above copyright
@@ -20,47 +22,51 @@
  * PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
  * OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
  * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 
  */
 
 #include "ExecutableAllocator.h"
 
+#include "js/MemoryMetrics.h"
+
 #if ENABLE_ASSEMBLER
 
 #include "prmjtime.h"
 
 namespace JSC {
 
 size_t ExecutableAllocator::pageSize = 0;
 size_t ExecutableAllocator::largeAllocSize = 0;
 
 ExecutablePool::~ExecutablePool()
 {
     m_allocator->releasePoolPages(this);
 }
 
 void
-ExecutableAllocator::sizeOfCode(size_t *jaeger, size_t *ion, size_t *asmJS, size_t *regexp, size_t *unused) const
+ExecutableAllocator::sizeOfCode(JS::CodeSizes *sizes) const
 {
-    *jaeger = 0;
-    *ion    = 0;
-    *asmJS  = 0;
-    *regexp = 0;
-    *unused = 0;
+    *sizes = JS::CodeSizes();
 
     if (m_pools.initialized()) {
         for (ExecPoolHashSet::Range r = m_pools.all(); !r.empty(); r.popFront()) {
             ExecutablePool* pool = r.front();
-            *jaeger += pool->m_jaegerCodeBytes;
-            *ion    += pool->m_ionCodeBytes;
-            *asmJS  += pool->m_asmJSCodeBytes;
-            *regexp += pool->m_regexpCodeBytes;
-            *unused += pool->m_allocation.size - pool->m_jaegerCodeBytes - pool->m_ionCodeBytes
-                                               - pool->m_asmJSCodeBytes - pool->m_regexpCodeBytes;
+            sizes->jaeger   += pool->m_jaegerCodeBytes;
+            sizes->ion      += pool->m_ionCodeBytes;
+            sizes->baseline += pool->m_baselineCodeBytes;
+            sizes->asmJS    += pool->m_asmJSCodeBytes;
+            sizes->regexp   += pool->m_regexpCodeBytes;
+            sizes->other    += pool->m_otherCodeBytes;
+            sizes->unused   += pool->m_allocation.size - pool->m_jaegerCodeBytes
+                                                       - pool->m_ionCodeBytes
+                                                       - pool->m_baselineCodeBytes
+                                                       - pool->m_asmJSCodeBytes
+                                                       - pool->m_regexpCodeBytes
+                                                       - pool->m_otherCodeBytes;
         }
     }
 }
 
 }
 
 #endif // HAVE(ASSEMBLER)
--- a/js/src/assembler/jit/ExecutableAllocator.h
+++ b/js/src/assembler/jit/ExecutableAllocator.h
@@ -73,21 +73,25 @@ extern  "C" void sync_instruction_memory
 #else
 #define INITIAL_PROTECTION_FLAGS (PROT_READ | PROT_WRITE | PROT_EXEC)
 #endif
 
 #if ENABLE_ASSEMBLER
 
 //#define DEBUG_STRESS_JSC_ALLOCATOR
 
+namespace JS {
+    struct CodeSizes;
+}
+
 namespace JSC {
 
   class ExecutableAllocator;
 
-  enum CodeKind { JAEGER_CODE, ION_CODE, REGEXP_CODE, ASMJS_CODE };
+  enum CodeKind { JAEGER_CODE, ION_CODE, BASELINE_CODE, REGEXP_CODE, ASMJS_CODE, OTHER_CODE };
 
   // These are reference-counted. A new one starts with a count of 1.
   class ExecutablePool {
 
     friend class ExecutableAllocator;
 private:
     struct Allocation {
         char* pages;
@@ -103,18 +107,20 @@ private:
     Allocation m_allocation;
 
     // Reference count for automatic reclamation.
     unsigned m_refCount;
 
     // Number of bytes currently used for Method and Regexp JIT code.
     size_t m_jaegerCodeBytes;
     size_t m_ionCodeBytes;
+    size_t m_baselineCodeBytes;
     size_t m_asmJSCodeBytes;
     size_t m_regexpCodeBytes;
+    size_t m_otherCodeBytes;
 
 public:
     // Flag for downstream use, whether to try to release references to this pool.
     bool m_destroy;
 
     // GC number in which the m_destroy flag was most recently set. Used downstream to
     // remember whether m_destroy was computed for the currently active GC.
     size_t m_gcNumber;
@@ -125,17 +131,18 @@ public:
         // XXX: disabled, see bug 654820.
         //JS_ASSERT_IF(willDestroy, m_refCount == 1);
         if (--m_refCount == 0)
             js_delete(this);
     }
 
     ExecutablePool(ExecutableAllocator* allocator, Allocation a)
       : m_allocator(allocator), m_freePtr(a.pages), m_end(m_freePtr + a.size), m_allocation(a),
-        m_refCount(1), m_jaegerCodeBytes(0), m_ionCodeBytes(0), m_asmJSCodeBytes(0), m_regexpCodeBytes(0),
+        m_refCount(1), m_jaegerCodeBytes(0), m_ionCodeBytes(0), m_baselineCodeBytes(0),
+        m_asmJSCodeBytes(0), m_regexpCodeBytes(0), m_otherCodeBytes(0),
         m_destroy(false), m_gcNumber(0)
     { }
 
     ~ExecutablePool();
 
 private:
     // It should be impossible for us to roll over, because only small
     // pools have multiple holders, and they have one holder per chunk
@@ -148,21 +155,23 @@ private:
 
     void* alloc(size_t n, CodeKind kind)
     {
         JS_ASSERT(n <= available());
         void *result = m_freePtr;
         m_freePtr += n;
 
         switch (kind) {
-          case JAEGER_CODE: m_jaegerCodeBytes += n;          break;
-          case ION_CODE:    m_ionCodeBytes    += n;          break;
-          case ASMJS_CODE:  m_asmJSCodeBytes  += n;          break;
-          case REGEXP_CODE: m_regexpCodeBytes += n;          break;
-          default:          JS_NOT_REACHED("bad code kind"); break;
+          case JAEGER_CODE:   m_jaegerCodeBytes   += n;        break;
+          case ION_CODE:      m_ionCodeBytes      += n;        break;
+          case BASELINE_CODE: m_baselineCodeBytes += n;        break;
+          case ASMJS_CODE:    m_asmJSCodeBytes    += n;        break;
+          case REGEXP_CODE:   m_regexpCodeBytes   += n;        break;
+          case OTHER_CODE:    m_otherCodeBytes    += n;        break;
+          default:            JS_NOT_REACHED("bad code kind"); break;
         }
         return result;
     }
 
     size_t available() const {
         JS_ASSERT(m_end >= m_freePtr);
         return m_end - m_freePtr;
     }
@@ -250,17 +259,17 @@ public:
         JS_ASSERT(pool->m_allocation.pages);
         if (destroyCallback)
             destroyCallback(pool->m_allocation.pages, pool->m_allocation.size);
         systemRelease(pool->m_allocation);
         JS_ASSERT(m_pools.initialized());
         m_pools.remove(m_pools.lookup(pool));   // this asserts if |pool| is not in m_pools
     }
 
-    void sizeOfCode(size_t *jaeger, size_t *ion, size_t *asmJS, size_t *regexp, size_t *unused) const;
+    void sizeOfCode(JS::CodeSizes *sizes) const;
 
     void setDestroyCallback(DestroyCallback destroyCallback) {
         this->destroyCallback = destroyCallback;
     }
 
     void setRandomize(bool enabled) {
         allocBehavior = enabled ? AllocationCanRandomize : AllocationDeterministic;
     }
--- a/js/src/ion/CodeGenerator.cpp
+++ b/js/src/ion/CodeGenerator.cpp
@@ -4479,17 +4479,17 @@ CodeGenerator::generate()
 }
 
 bool
 CodeGenerator::link()
 {
     JSContext *cx = GetIonContext()->cx;
 
     Linker linker(masm);
-    IonCode *code = linker.newCode(cx);
+    IonCode *code = linker.newCode(cx, JSC::ION_CODE);
     if (!code)
         return false;
 
     // We encode safepoints after the OSI-point offsets have been determined.
     encodeSafepoints();
 
     RootedScript script(cx, gen->info().script());
     ExecutionMode executionMode = gen->info().executionMode();
--- a/js/src/ion/IonCaches.cpp
+++ b/js/src/ion/IonCaches.cpp
@@ -92,17 +92,17 @@ IonCache::CacheName(IonCache::Kind kind)
     };
     return names[kind];
 }
 
 IonCache::LinkStatus
 IonCache::linkCode(JSContext *cx, MacroAssembler &masm, IonScript *ion, IonCode **code)
 {
     Linker linker(masm);
-    *code = linker.newCode(cx);
+    *code = linker.newCode(cx, JSC::ION_CODE);
     if (!code)
         return LINK_ERROR;
 
     if (ion->invalidated())
         return CACHE_FLUSHED;
 
     return LINK_GOOD;
 }
--- a/js/src/ion/IonLinker.h
+++ b/js/src/ion/IonLinker.h
@@ -24,27 +24,30 @@ class Linker
 {
     MacroAssembler &masm;
 
     IonCode *fail(JSContext *cx) {
         js_ReportOutOfMemory(cx);
         return NULL;
     }
 
-    IonCode *newCode(JSContext *cx, IonCompartment *comp) {
+    IonCode *newCode(JSContext *cx, IonCompartment *comp, JSC::CodeKind kind) {
+        JS_ASSERT(kind == JSC::ION_CODE ||
+                  kind == JSC::BASELINE_CODE ||
+                  kind == JSC::OTHER_CODE);
         gc::AutoSuppressGC suppressGC(cx);
         if (masm.oom())
             return fail(cx);
 
         JSC::ExecutablePool *pool;
         size_t bytesNeeded = masm.bytesNeeded() + sizeof(IonCode *) + CodeAlignment;
         if (bytesNeeded >= MAX_BUFFER_SIZE)
             return fail(cx);
 
-        uint8_t *result = (uint8_t *)comp->execAlloc()->alloc(bytesNeeded, &pool, JSC::ION_CODE);
+        uint8_t *result = (uint8_t *)comp->execAlloc()->alloc(bytesNeeded, &pool, kind);
         if (!result)
             return fail(cx);
 
         // The IonCode pointer will be stored right before the code buffer.
         uint8_t *codeStart = result + sizeof(IonCode *);
 
         // Bump the code up to a nice alignment.
         codeStart = (uint8_t *)AlignBytes((uintptr_t)codeStart, CodeAlignment);
@@ -62,18 +65,18 @@ class Linker
 
   public:
     Linker(MacroAssembler &masm)
       : masm(masm)
     {
         masm.finish();
     }
 
-    IonCode *newCode(JSContext *cx) {
-        return newCode(cx, cx->compartment->ionCompartment());
+    IonCode *newCode(JSContext *cx, JSC::CodeKind kind) {
+        return newCode(cx, cx->compartment->ionCompartment(), kind);
     }
 };
 
 } // namespace ion
 } // namespace js
 
 #endif // jsion_linker_h__
 
--- a/js/src/ion/arm/Trampoline-arm.cpp
+++ b/js/src/ion/arm/Trampoline-arm.cpp
@@ -191,17 +191,17 @@ IonRuntime::generateEnterJIT(JSContext *
 
     // Get rid of the bogus r0 push.
     aasm->as_add(sp, sp, Imm8(4));
 
     // Restore non-volatile registers and return.
     GenerateReturn(masm, JS_TRUE);
 
     Linker linker(masm);
-    return linker.newCode(cx);
+    return linker.newCode(cx, JSC::OTHER_CODE);
 }
 
 IonCode *
 IonRuntime::generateInvalidator(JSContext *cx)
 {
     // See large comment in x86's IonRuntime::generateInvalidator.
     MacroAssembler masm(cx);
     //masm.as_bkpt();
@@ -237,17 +237,17 @@ IonRuntime::generateInvalidator(JSContex
     // Remove the return address, the IonScript, the register state
     // (InvaliationBailoutStack) and the space that was allocated for the return value
     masm.ma_add(sp, Imm32(sizeof(InvalidationBailoutStack) + sizeOfRetval), sp);
     // remove the space that this frame was using before the bailout
     // (computed by InvalidationBailout)
     masm.ma_add(sp, r1, sp);
     masm.generateBailoutTail(r1);
     Linker linker(masm);
-    IonCode *code = linker.newCode(cx);
+    IonCode *code = linker.newCode(cx, JSC::OTHER_CODE);
     IonSpew(IonSpew_Invalidate, "   invalidation thunk created at %p", (void *) code->raw());
     return code;
 }
 
 IonCode *
 IonRuntime::generateArgumentsRectifier(JSContext *cx)
 {
     MacroAssembler masm(cx);
@@ -334,17 +334,17 @@ IonRuntime::generateArgumentsRectifier(J
     // sizeDescriptor
     // return address
 
     // Discard pushed arguments.
     masm.ma_alu(sp, lsr(r4, FRAMESIZE_SHIFT), sp, op_add);
 
     masm.ret();
     Linker linker(masm);
-    return linker.newCode(cx);
+    return linker.newCode(cx, JSC::OTHER_CODE);
 }
 
 static void
 GenerateBailoutThunk(MacroAssembler &masm, uint32_t frameClass)
 {
     // the stack should look like:
     // [IonFrame]
     // bailoutFrame.registersnapshot
@@ -442,27 +442,27 @@ IonRuntime::generateBailoutTable(JSConte
     Label bailout;
     for (size_t i = 0; i < BAILOUT_TABLE_SIZE; i++)
         masm.ma_bl(&bailout);
     masm.bind(&bailout);
 
     GenerateBailoutThunk(masm, frameClass);
 
     Linker linker(masm);
-    return linker.newCode(cx);
+    return linker.newCode(cx, JSC::OTHER_CODE);
 }
 
 IonCode *
 IonRuntime::generateBailoutHandler(JSContext *cx)
 {
     MacroAssembler masm(cx);
     GenerateBailoutThunk(masm, NO_FRAME_SIZE_CLASS_ID);
 
     Linker linker(masm);
-    return linker.newCode(cx);
+    return linker.newCode(cx, JSC::OTHER_CODE);
 }
 
 IonCode *
 IonRuntime::generateVMWrapper(JSContext *cx, const VMFunction &f)
 {
     typedef MoveResolver::MoveOperand MoveOperand;
 
     JS_ASSERT(functionWrappers_);
@@ -591,17 +591,17 @@ IonRuntime::generateVMWrapper(JSContext 
     }
     masm.leaveExitFrame();
     masm.retn(Imm32(sizeof(IonExitFrameLayout) + f.explicitStackSlots() * sizeof(void *)));
 
     masm.bind(&exception);
     masm.handleException();
 
     Linker linker(masm);
-    IonCode *wrapper = linker.newCode(cx);
+    IonCode *wrapper = linker.newCode(cx, JSC::OTHER_CODE);
     if (!wrapper)
         return NULL;
 
     // linker.newCode may trigger a GC and sweep functionWrappers_ so we have to
     // use relookupOrAdd instead of add.
     if (!functionWrappers_->relookupOrAdd(p, &f, wrapper))
         return NULL;
 
@@ -629,11 +629,11 @@ IonRuntime::generatePreBarrier(JSContext
         JS_ASSERT(type == MIRType_Shape);
         masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, MarkShapeFromIon));
     }
 
     masm.PopRegsInMask(save);
     masm.ret();
 
     Linker linker(masm);
-    return linker.newCode(cx);
+    return linker.newCode(cx, JSC::OTHER_CODE);
 }
 
--- a/js/src/ion/x64/Trampoline-x64.cpp
+++ b/js/src/ion/x64/Trampoline-x64.cpp
@@ -171,17 +171,17 @@ IonRuntime::generateEnterJIT(JSContext *
     masm.pop(r12);
     masm.pop(rbx);
 
     // Restore frame pointer and return.
     masm.pop(rbp);
     masm.ret();
 
     Linker linker(masm);
-    return linker.newCode(cx);
+    return linker.newCode(cx, JSC::OTHER_CODE);
 }
 
 IonCode *
 IonRuntime::generateInvalidator(JSContext *cx)
 {
     AutoIonContextAlloc aica(cx);
     MacroAssembler masm(cx);
 
@@ -213,17 +213,17 @@ IonRuntime::generateInvalidator(JSContex
     masm.pop(rbx); // Get the frameSize outparam.
 
     // Pop the machine state and the dead frame.
     masm.lea(Operand(rsp, rbx, TimesOne, sizeof(InvalidationBailoutStack)), rsp);
 
     masm.generateBailoutTail(rdx);
 
     Linker linker(masm);
-    return linker.newCode(cx);
+    return linker.newCode(cx, JSC::OTHER_CODE);
 }
 
 IonCode *
 IonRuntime::generateArgumentsRectifier(JSContext *cx)
 {
     // Do not erase the frame pointer in this function.
 
     MacroAssembler masm(cx);
@@ -299,17 +299,17 @@ IonRuntime::generateArgumentsRectifier(J
     masm.shrq(Imm32(FRAMESIZE_SHIFT), r9);
     masm.pop(r11);            // Discard calleeToken.
     masm.pop(r11);            // Discard numActualArgs.
     masm.addq(r9, rsp);       // Discard pushed arguments.
 
     masm.ret();
 
     Linker linker(masm);
-    return linker.newCode(cx);
+    return linker.newCode(cx, JSC::OTHER_CODE);
 }
 
 static void
 GenerateBailoutThunk(JSContext *cx, MacroAssembler &masm, uint32_t frameClass)
 {
     // Push registers such that we can access them from [base + code].
     masm.reserveStack(Registers::Total * sizeof(void *));
     for (uint32_t i = 0; i < Registers::Total; i++)
@@ -354,17 +354,17 @@ IonRuntime::generateBailoutTable(JSConte
 IonCode *
 IonRuntime::generateBailoutHandler(JSContext *cx)
 {
     MacroAssembler masm;
 
     GenerateBailoutThunk(cx, masm, NO_FRAME_SIZE_CLASS_ID);
 
     Linker linker(masm);
-    return linker.newCode(cx);
+    return linker.newCode(cx, JSC::OTHER_CODE);
 }
 
 IonCode *
 IonRuntime::generateVMWrapper(JSContext *cx, const VMFunction &f)
 {
     typedef MoveResolver::MoveOperand MoveOperand;
 
     JS_ASSERT(!StackKeptAligned);
@@ -499,17 +499,17 @@ IonRuntime::generateVMWrapper(JSContext 
     }
     masm.leaveExitFrame();
     masm.retn(Imm32(sizeof(IonExitFrameLayout) + f.explicitStackSlots() * sizeof(void *)));
 
     masm.bind(&exception);
     masm.handleException();
 
     Linker linker(masm);
-    IonCode *wrapper = linker.newCode(cx);
+    IonCode *wrapper = linker.newCode(cx, JSC::OTHER_CODE);
     if (!wrapper)
         return NULL;
 
     // linker.newCode may trigger a GC and sweep functionWrappers_ so we have to
     // use relookupOrAdd instead of add.
     if (!functionWrappers_->relookupOrAdd(p, &f, wrapper))
         return NULL;
 
@@ -537,11 +537,11 @@ IonRuntime::generatePreBarrier(JSContext
         JS_ASSERT(type == MIRType_Shape);
         masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, MarkShapeFromIon));
     }
 
     masm.PopRegsInMask(regs);
     masm.ret();
 
     Linker linker(masm);
-    return linker.newCode(cx);
+    return linker.newCode(cx, JSC::OTHER_CODE);
 }
 
--- a/js/src/ion/x86/Trampoline-x86.cpp
+++ b/js/src/ion/x86/Trampoline-x86.cpp
@@ -152,17 +152,17 @@ IonRuntime::generateEnterJIT(JSContext *
     masm.pop(esi);
     masm.pop(ebx);
 
     // Restore old stack frame pointer
     masm.pop(ebp);
     masm.ret();
 
     Linker linker(masm);
-    return linker.newCode(cx);
+    return linker.newCode(cx, JSC::OTHER_CODE);
 }
 
 IonCode *
 IonRuntime::generateInvalidator(JSContext *cx)
 {
     AutoIonContextAlloc aica(cx);
     MacroAssembler masm(cx);
 
@@ -200,17 +200,17 @@ IonRuntime::generateInvalidator(JSContex
     masm.pop(ebx); // Get the frameSize outparam.
 
     // Pop the machine state and the dead frame.
     masm.lea(Operand(esp, ebx, TimesOne, sizeof(InvalidationBailoutStack)), esp);
 
     masm.generateBailoutTail(edx);
 
     Linker linker(masm);
-    IonCode *code = linker.newCode(cx);
+    IonCode *code = linker.newCode(cx, JSC::OTHER_CODE);
     IonSpew(IonSpew_Invalidate, "   invalidation thunk created at %p", (void *) code->raw());
     return code;
 }
 
 IonCode *
 IonRuntime::generateArgumentsRectifier(JSContext *cx)
 {
     MacroAssembler masm(cx);
@@ -296,17 +296,17 @@ IonRuntime::generateArgumentsRectifier(J
     // Discard pushed arguments, but not the pushed frame pointer.
     BaseIndex unwind = BaseIndex(esp, ebx, TimesOne, -int32_t(sizeof(void*)));
     masm.lea(Operand(unwind), esp);
 
     masm.pop(FramePointer);
     masm.ret();
 
     Linker linker(masm);
-    return linker.newCode(cx);
+    return linker.newCode(cx, JSC::OTHER_CODE);
 }
 
 static void
 GenerateBailoutThunk(JSContext *cx, MacroAssembler &masm, uint32_t frameClass)
 {
     // Push registers such that we can access them from [base + code].
     masm.reserveStack(Registers::Total * sizeof(void *));
     for (uint32_t i = 0; i < Registers::Total; i++)
@@ -364,28 +364,28 @@ IonRuntime::generateBailoutTable(JSConte
     Label bailout;
     for (size_t i = 0; i < BAILOUT_TABLE_SIZE; i++)
         masm.call(&bailout);
     masm.bind(&bailout);
 
     GenerateBailoutThunk(cx, masm, frameClass);
 
     Linker linker(masm);
-    return linker.newCode(cx);
+    return linker.newCode(cx, JSC::OTHER_CODE);
 }
 
 IonCode *
 IonRuntime::generateBailoutHandler(JSContext *cx)
 {
     MacroAssembler masm;
 
     GenerateBailoutThunk(cx, masm, NO_FRAME_SIZE_CLASS_ID);
 
     Linker linker(masm);
-    return linker.newCode(cx);
+    return linker.newCode(cx, JSC::OTHER_CODE);
 }
 
 IonCode *
 IonRuntime::generateVMWrapper(JSContext *cx, const VMFunction &f)
 {
     typedef MoveResolver::MoveOperand MoveOperand;
 
     JS_ASSERT(!StackKeptAligned);
@@ -525,17 +525,17 @@ IonRuntime::generateVMWrapper(JSContext 
     }
     masm.leaveExitFrame();
     masm.retn(Imm32(sizeof(IonExitFrameLayout) + f.explicitStackSlots() * sizeof(void *)));
 
     masm.bind(&exception);
     masm.handleException();
 
     Linker linker(masm);
-    IonCode *wrapper = linker.newCode(cx);
+    IonCode *wrapper = linker.newCode(cx, JSC::OTHER_CODE);
     if (!wrapper)
         return NULL;
 
     // linker.newCode may trigger a GC and sweep functionWrappers_ so we have to
     // use relookupOrAdd instead of add.
     if (!functionWrappers_->relookupOrAdd(p, &f, wrapper))
         return NULL;
 
@@ -564,11 +564,11 @@ IonRuntime::generatePreBarrier(JSContext
         JS_ASSERT(type == MIRType_Shape);
         masm.callWithABI(JS_FUNC_TO_DATA_PTR(void *, MarkShapeFromIon));
     }
 
     masm.PopRegsInMask(save);
     masm.ret();
 
     Linker linker(masm);
-    return linker.newCode(cx);
+    return linker.newCode(cx, JSC::OTHER_CODE);
 }
 
--- a/js/src/jscntxt.cpp
+++ b/js/src/jscntxt.cpp
@@ -119,26 +119,19 @@ JSRuntime::sizeOfIncludingThis(JSMallocS
     rtSizes->contexts = 0;
     for (ContextIter acx(this); !acx.done(); acx.next())
         rtSizes->contexts += acx->sizeOfIncludingThis(mallocSizeOf);
 
     rtSizes->dtoa = mallocSizeOf(dtoaState);
 
     rtSizes->temporary = tempLifoAlloc.sizeOfExcludingThis(mallocSizeOf);
 
-    if (execAlloc_) {
-        execAlloc_->sizeOfCode(&rtSizes->jaegerCode, &rtSizes->ionCode, &rtSizes->asmJSCode,
-                               &rtSizes->regexpCode, &rtSizes->unusedCode);
-    } else {
-        rtSizes->jaegerCode = 0;
-        rtSizes->ionCode    = 0;
-        rtSizes->asmJSCode  = 0;
-        rtSizes->regexpCode = 0;
-        rtSizes->unusedCode = 0;
-    }
+    rtSizes->code = JS::CodeSizes();
+    if (execAlloc_)
+        execAlloc_->sizeOfCode(&rtSizes->code);
 
     rtSizes->regexpData = bumpAlloc_ ? bumpAlloc_->sizeOfNonHeapData() : 0;
 
     rtSizes->stack = stackSpace.sizeOf();
 
     rtSizes->gcMarker = gcMarker.sizeOfExcludingThis(mallocSizeOf);
 
     rtSizes->mathCache = mathCache_ ? mathCache_->sizeOfIncludingThis(mallocSizeOf) : 0;
@@ -149,19 +142,20 @@ JSRuntime::sizeOfIncludingThis(JSMallocS
 }
 
 size_t
 JSRuntime::sizeOfExplicitNonHeap()
 {
     size_t n = stackSpace.sizeOf();
 
     if (execAlloc_) {
-        size_t jaegerCode, ionCode, asmJSCode, regexpCode, unusedCode;
-        execAlloc_->sizeOfCode(&jaegerCode, &ionCode, &asmJSCode, &regexpCode, &unusedCode);
-        n += jaegerCode + ionCode + asmJSCode + regexpCode + unusedCode;
+        JS::CodeSizes sizes;
+        execAlloc_->sizeOfCode(&sizes);
+        n += sizes.jaeger + sizes.ion + sizes.baseline + sizes.asmJS +
+            sizes.regexp + sizes.other + sizes.unused;
     }
 
     if (bumpAlloc_)
         n += bumpAlloc_->sizeOfNonHeapData();
 
     return n;
 }
 
--- a/js/xpconnect/src/XPCJSRuntime.cpp
+++ b/js/xpconnect/src/XPCJSRuntime.cpp
@@ -1945,38 +1945,45 @@ ReportJSRuntimeExplicitTreeStats(const J
                   "Memory used by DtoaState, which is used for converting "
                   "strings to numbers and vice versa.");
 
     RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/temporary"),
                   nsIMemoryReporter::KIND_HEAP, rtStats.runtime.temporary,
                   "Memory held transiently in JSRuntime and used during "
                   "compilation.  It mostly holds parse nodes.");
 
-    RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/jaeger-code"),
-                  nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.jaegerCode,
-                  "Memory used by the JaegerMonkey JIT to hold the runtime's "
-                  "generated code.");
-
-    RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/ion-code"),
-                  nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.ionCode,
-                  "Memory used by the IonMonkey JIT to hold the runtime's "
-                  "generated code.");
-
-    RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/asm.js-code"),
-                  nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.asmJSCode,
+    RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/code/jaeger"),
+                  nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.code.jaeger,
+                  "Memory used by the JaegerMonkey JIT to hold generated code.");
+
+    RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/code/ion"),
+                  nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.code.ion,
+                  "Memory used by the IonMonkey JIT to hold generated code.");
+
+    RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/code/baseline"),
+                  nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.code.baseline,
+                  "Memory used by the Baseline JIT to hold generated code.");
+
+    RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/code/asm.js"),
+                  nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.code.asmJS,
                   "Memory used by AOT-compiled asm.js code.");
 
-    RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/regexp-code"),
-                  nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.regexpCode,
+    RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/code/regexp"),
+                  nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.code.regexp,
                   "Memory used by the regexp JIT to hold generated code.");
 
-    RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/unused-code"),
-                  nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.unusedCode,
-                  "Memory allocated by one of the JITs to hold the "
-                  "runtime's code, but which is currently unused.");
+    RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/code/other"),
+                  nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.code.other,
+                  "Memory used by the JITs to hold generated code for "
+                  "wrappers and trampolines.");
+
+    RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/code/unused"),
+                  nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.code.unused,
+                  "Memory allocated by one of the JITs to hold code, "
+                  "but which is currently unused.");
 
     RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/regexp-data"),
                   nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.regexpData,
                   "Memory used by the regexp JIT to hold data.");
 
     RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/stack"),
                   nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.stack,
                   "Memory used for the JS call stack.  This is the committed "