Bug 1123237 - Part 3. Monitoring allocation and gc events in nursery and tenured heaps. r=terrence
authorKan-Ru Chen <kanru@kanru.info>
Fri, 08 May 2015 11:13:51 +0800
changeset 295330 3763bf7faffd3bd88f24abcdb2082fda5a9ff85a
parent 295329 cab4a53058bde6f60238412251543688e133ef7b
child 295331 86316ff02d2bf864340a4fc97b1c243b08a89677
push id5245
push userraliiev@mozilla.com
push dateThu, 29 Oct 2015 11:30:51 +0000
treeherdermozilla-beta@dac831dc1bd0 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersterrence
bugs1123237
milestone43.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1123237 - Part 3. Monitoring allocation and gc events in nursery and tenured heaps. r=terrence Based on patch from Ting-Yuan Huang <laszio.bugzilla@gmail.com>
js/src/gc/Heap.h
js/src/gc/Marking.cpp
js/src/gc/Nursery.cpp
js/src/jsgc.cpp
--- a/js/src/gc/Heap.h
+++ b/js/src/gc/Heap.h
@@ -566,16 +566,17 @@ class FreeList
             // may be empty).
             setHead(reinterpret_cast<FreeSpan*>(thing));
         } else {
             // The free list head is empty.
             return nullptr;
         }
         head.checkSpan(thingSize);
         JS_EXTRA_POISON(reinterpret_cast<void*>(thing), JS_ALLOCATED_TENURED_PATTERN, thingSize);
+        MemProfiler::SampleTenured(reinterpret_cast<void*>(thing), thingSize);
         return reinterpret_cast<TenuredCell*>(thing);
     }
 };
 
 /* Every arena has a header. */
 struct ArenaHeader
 {
     friend struct FreeLists;
--- a/js/src/gc/Marking.cpp
+++ b/js/src/gc/Marking.cpp
@@ -2030,28 +2030,28 @@ js::TenuringTracer::moveToTenured(JSObje
     if (!t) {
         zone->arenas.checkEmptyFreeList(dstKind);
         AutoMaybeStartBackgroundAllocation maybeStartBackgroundAllocation;
         t = zone->arenas.allocateFromArena(zone, dstKind, maybeStartBackgroundAllocation);
         if (!t)
             CrashAtUnhandlableOOM("Failed to allocate object while tenuring.");
     }
     JSObject* dst = reinterpret_cast<JSObject*>(t);
-
     tenuredSize += moveObjectToTenured(dst, src, dstKind);
 
     RelocationOverlay* overlay = RelocationOverlay::fromCell(src);
     overlay->forwardTo(dst);
     insertIntoFixupList(overlay);
 
     if (MOZ_UNLIKELY(zone->hasDebuggers())) {
         zone->enqueueForPromotionToTenuredLogging(*dst);
     }
 
     TracePromoteToTenured(src, dst);
+    MemProfiler::MoveNurseryToTenured(src, dst);
     return dst;
 }
 
 void
 js::Nursery::collectToFixedPoint(TenuringTracer& mover, TenureCountCache& tenureCounts)
 {
     for (RelocationOverlay* p = mover.head; p; p = p->next()) {
         JSObject* obj = static_cast<JSObject*>(p->forwardingAddress());
--- a/js/src/gc/Nursery.cpp
+++ b/js/src/gc/Nursery.cpp
@@ -230,16 +230,17 @@ js::Nursery::allocate(size_t size)
             return nullptr;
         setCurrentChunk(currentChunk_ + 1);
     }
 
     void* thing = (void*)position();
     position_ = position() + size;
 
     JS_EXTRA_POISON(thing, JS_ALLOCATED_NURSERY_PATTERN, size);
+    MemProfiler::SampleNursery(reinterpret_cast<void*>(thing), size);
     return thing;
 }
 
 void*
 js::Nursery::allocateBuffer(Zone* zone, uint32_t nbytes)
 {
     MOZ_ASSERT(nbytes > 0);
 
@@ -673,16 +674,17 @@ js::Nursery::sweep()
             chunk(i).trailer.runtime = runtime();
         }
 #endif
         setCurrentChunk(0);
     }
 
     /* Set current start position for isEmpty checks. */
     currentStart_ = position();
+    MemProfiler::SweepNursery(runtime());
 }
 
 void
 js::Nursery::growAllocableSpace()
 {
 #ifdef JS_GC_ZEAL
     MOZ_ASSERT_IF(runtime()->gcZeal() == ZealGenerationalGCValue,
                   numActiveChunks_ == numNurseryChunks_);
--- a/js/src/jsgc.cpp
+++ b/js/src/jsgc.cpp
@@ -482,16 +482,24 @@ Arena::finalize(FreeOp* fop, AllocKind t
     uintptr_t firstThing = thingsStart(thingKind);
     uintptr_t firstThingOrSuccessorOfLastMarkedThing = firstThing;
     uintptr_t lastThing = thingsEnd() - thingSize;
 
     FreeSpan newListHead;
     FreeSpan* newListTail = &newListHead;
     size_t nmarked = 0;
 
+    if (MOZ_UNLIKELY(MemProfiler::enabled())) {
+        for (ArenaCellIterUnderFinalize i(&aheader); !i.done(); i.next()) {
+            T* t = i.get<T>();
+            if (t->asTenured().isMarked())
+                MemProfiler::MarkTenured(reinterpret_cast<void*>(t));
+        }
+    }
+
     for (ArenaCellIterUnderFinalize i(&aheader); !i.done(); i.next()) {
         T* t = i.get<T>();
         if (t->asTenured().isMarked()) {
             uintptr_t thing = reinterpret_cast<uintptr_t>(t);
             if (thing != firstThingOrSuccessorOfLastMarkedThing) {
                 // We just finished passing over one or more free things,
                 // so record a new FreeSpan.
                 newListTail->initBoundsUnchecked(firstThingOrSuccessorOfLastMarkedThing,
@@ -3874,16 +3882,17 @@ GCRuntime::beginMarkPhase(JS::gcreason::
      * arenas. This purge call ensures that we only mark arenas that have had
      * allocations after the incremental GC started.
      */
     if (isIncremental) {
         for (GCZonesIter zone(rt); !zone.done(); zone.next())
             zone->arenas.purge();
     }
 
+    MemProfiler::MarkTenuredStart(rt);
     marker.start();
     GCMarker* gcmarker = &marker;
 
     /* For non-incremental GC the following sweep discards the jit code. */
     if (isIncremental) {
         for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
             gcstats::AutoPhase ap(stats, gcstats::PHASE_MARK_DISCARD_CODE);
             zone->discardJitCode(rt->defaultFreeOp());
@@ -5530,16 +5539,17 @@ GCRuntime::endCompactPhase(JS::gcreason:
 }
 
 void
 GCRuntime::finishCollection(JS::gcreason::Reason reason)
 {
     MOZ_ASSERT(marker.isDrained());
     marker.stop();
     clearBufferedGrayRoots();
+    MemProfiler::SweepTenured(rt);
 
     uint64_t currentTime = PRMJ_Now();
     schedulingState.updateHighFrequencyMode(lastGCTime, currentTime, tunables);
 
     for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
         if (zone->isCollecting()) {
             MOZ_ASSERT(zone->isGCFinished());
             zone->setGCState(Zone::NoGC);