Bug 1085740 - Reduce heap churn caused by TempAllocator. r=jandem.
authorNicholas Nethercote <nnethercote@mozilla.com>
Tue, 21 Oct 2014 15:26:28 -0700
changeset 211625 352cdd69b5c6f605606ba1d6eea8a638d37442e7
parent 211624 ef5d07a500fdb98ce847a637bc50f65174a599cc
child 211626 5d7e28f80b98707eb584a843e3ea9a5d51618462
push id50750
push usernnethercote@mozilla.com
push dateWed, 22 Oct 2014 01:12:13 +0000
treeherdermozilla-inbound@352cdd69b5c6 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjandem
bugs1085740
milestone36.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1085740 - Reduce heap churn caused by TempAllocator. r=jandem.
js/src/asmjs/AsmJSValidate.cpp
js/src/jit/BaselineJIT.cpp
js/src/jit/Ion.cpp
js/src/jit/IonAllocPolicy.h
js/src/jit/IonAnalysis.cpp
--- a/js/src/asmjs/AsmJSValidate.cpp
+++ b/js/src/asmjs/AsmJSValidate.cpp
@@ -8476,17 +8476,17 @@ GenerateStubs(ModuleCompiler &m)
 
     return true;
 }
 
 static bool
 FinishModule(ModuleCompiler &m,
              ScopedJSDeletePtr<AsmJSModule> *module)
 {
-    LifoAlloc lifo(LIFO_ALLOC_PRIMARY_CHUNK_SIZE);
+    LifoAlloc lifo(TempAllocator::PreferredLifoChunkSize);
     TempAllocator alloc(&lifo);
     IonContext ionContext(m.cx(), &alloc);
 
     m.masm().resetForNewCodeGenerator(alloc);
 
     if (!GenerateStubs(m))
         return false;
 
--- a/js/src/jit/BaselineJIT.cpp
+++ b/js/src/jit/BaselineJIT.cpp
@@ -50,17 +50,16 @@ BaselineScript::BaselineScript(uint32_t 
 #ifdef DEBUG
     spsOn_(false),
 #endif
     spsPushToggleOffset_(spsPushToggleOffset),
     postDebugPrologueOffset_(postDebugPrologueOffset),
     flags_(0)
 { }
 
-static const size_t BASELINE_LIFO_ALLOC_PRIMARY_CHUNK_SIZE = 4096;
 static const unsigned BASELINE_MAX_ARGS_LENGTH = 20000;
 
 static bool
 CheckFrame(InterpreterFrame *fp)
 {
     if (fp->isGeneratorFrame()) {
         JitSpew(JitSpew_BaselineAbort, "generator frame");
         return false;
@@ -207,20 +206,20 @@ jit::EnterBaselineAtBranch(JSContext *cx
 }
 
 MethodStatus
 jit::BaselineCompile(JSContext *cx, JSScript *script)
 {
     MOZ_ASSERT(!script->hasBaselineScript());
     MOZ_ASSERT(script->canBaselineCompile());
     MOZ_ASSERT(IsBaselineEnabled(cx));
-    LifoAlloc alloc(BASELINE_LIFO_ALLOC_PRIMARY_CHUNK_SIZE);
 
     script->ensureNonLazyCanonicalFunction(cx);
 
+    LifoAlloc alloc(TempAllocator::PreferredLifoChunkSize);
     TempAllocator *temp = alloc.new_<TempAllocator>(&alloc);
     if (!temp)
         return Method_Error;
 
     IonContext ictx(cx, temp);
 
     BaselineCompiler compiler(cx, *temp, script);
     if (!compiler.init())
--- a/js/src/jit/Ion.cpp
+++ b/js/src/jit/Ion.cpp
@@ -1933,18 +1933,16 @@ AttachFinishedCompilations(JSContext *cx
                 cx->clearPendingException();
             }
         }
 
         FinishOffThreadBuilder(cx, builder);
     }
 }
 
-static const size_t BUILDER_LIFO_ALLOC_PRIMARY_CHUNK_SIZE = 1 << 12;
-
 static inline bool
 OffThreadCompilationAvailable(JSContext *cx)
 {
     // Even if off thread compilation is enabled, compilation must still occur
     // on the main thread in some cases.
     //
     // Require cpuCount > 1 so that Ion compilation jobs and main-thread
     // execution are not competing for the same resources.
@@ -1999,17 +1997,17 @@ IonCompile(JSContext *cx, JSScript *scri
     MOZ_ASSERT(optimizationLevel > Optimization_DontCompile);
 
     // Make sure the script's canonical function isn't lazy. We can't de-lazify
     // it in a helper thread.
     script->ensureNonLazyCanonicalFunction(cx);
 
     TrackPropertiesForSingletonScopes(cx, script, baselineFrame);
 
-    LifoAlloc *alloc = cx->new_<LifoAlloc>(BUILDER_LIFO_ALLOC_PRIMARY_CHUNK_SIZE);
+    LifoAlloc *alloc = cx->new_<LifoAlloc>(TempAllocator::PreferredLifoChunkSize);
     if (!alloc)
         return AbortReason_Alloc;
 
     ScopedJSDeletePtr<LifoAlloc> autoDelete(alloc);
 
     TempAllocator *temp = alloc->new_<TempAllocator>(alloc);
     if (!temp)
         return AbortReason_Alloc;
@@ -3372,8 +3370,13 @@ jit::JitSupportsFloatingPoint()
     return js::jit::MacroAssembler::SupportsFloatingPoint();
 }
 
 bool
 jit::JitSupportsSimd()
 {
     return js::jit::MacroAssembler::SupportsSimd();
 }
+
+// If you change these, please also change the comment in TempAllocator.
+/* static */ const size_t TempAllocator::BallastSize            = 16 * 1024;
+/* static */ const size_t TempAllocator::PreferredLifoChunkSize = 32 * 1024;
+
--- a/js/src/jit/IonAllocPolicy.h
+++ b/js/src/jit/IonAllocPolicy.h
@@ -19,16 +19,23 @@
 namespace js {
 namespace jit {
 
 class TempAllocator
 {
     LifoAllocScope lifoScope_;
 
   public:
+    // Most infallible Ion allocations are small, so we use a ballast of 16
+    // KiB. And with a ballast of 16 KiB, a chunk size of 32 KiB works well,
+    // because TempAllocators with a peak allocation size of less than 16 KiB
+    // (which is most of them) only have to allocate a single chunk.
+    static const size_t BallastSize;            // 16 KiB
+    static const size_t PreferredLifoChunkSize; // 32 KiB
+
     explicit TempAllocator(LifoAlloc *lifoAlloc)
       : lifoScope_(lifoAlloc)
     { }
 
     void *allocateInfallible(size_t bytes)
     {
         return lifoScope_.alloc().allocInfallible(bytes);
     }
@@ -53,19 +60,17 @@ class TempAllocator
     }
 
     LifoAlloc *lifoAlloc()
     {
         return &lifoScope_.alloc();
     }
 
     bool ensureBallast() {
-        // Most infallible Ion allocations are small, so we use a ballast of
-        // ~16K for now.
-        return lifoScope_.alloc().ensureUnusedApproximate(16 * 1024);
+        return lifoScope_.alloc().ensureUnusedApproximate(BallastSize);
     }
 };
 
 class IonAllocPolicy
 {
     TempAllocator &alloc_;
 
   public:
--- a/js/src/jit/IonAnalysis.cpp
+++ b/js/src/jit/IonAnalysis.cpp
@@ -2864,18 +2864,17 @@ jit::AnalyzeNewScriptDefiniteProperties(
         return true;
 
     static const uint32_t MAX_SCRIPT_SIZE = 2000;
     if (script->length() > MAX_SCRIPT_SIZE)
         return true;
 
     Vector<PropertyName *> accessedProperties(cx);
 
-    LifoAlloc alloc(types::TypeZone::TYPE_LIFO_ALLOC_PRIMARY_CHUNK_SIZE);
-
+    LifoAlloc alloc(TempAllocator::PreferredLifoChunkSize);
     TempAllocator temp(&alloc);
     IonContext ictx(cx, &temp);
 
     if (!cx->compartment()->ensureJitCompartmentExists(cx))
         return false;
 
     if (!script->hasBaselineScript()) {
         MethodStatus status = BaselineCompile(cx, script);
@@ -3104,18 +3103,17 @@ jit::AnalyzeArgumentsUsage(JSContext *cx
 
     static const uint32_t MAX_SCRIPT_SIZE = 10000;
     if (script->length() > MAX_SCRIPT_SIZE)
         return true;
 
     if (!script->ensureHasTypes(cx))
         return false;
 
-    LifoAlloc alloc(types::TypeZone::TYPE_LIFO_ALLOC_PRIMARY_CHUNK_SIZE);
-
+    LifoAlloc alloc(TempAllocator::PreferredLifoChunkSize);
     TempAllocator temp(&alloc);
     IonContext ictx(cx, &temp);
 
     if (!cx->compartment()->ensureJitCompartmentExists(cx))
         return false;
 
     MIRGraph graph(&temp);
     InlineScriptTree *inlineScriptTree = InlineScriptTree::New(&temp, nullptr, nullptr, script);