Bug 1444976 - Trace some initial real-time media stuff. r=pehrsons
☠☠ backed out by 73ac46f2c72b ☠ ☠
authorPaul Adenot <paul@paul.cx>
Tue, 20 Mar 2018 18:23:26 +0100
changeset 467814 5a183343f3076337083fee652bc79614e80b2a40
parent 467813 390ce943cda8bbb0c6ddfa6e7bfc92f415433b43
child 467815 73ac46f2c72b27d192c4f974f0177fc83bf5c673
push id1728
push userjlund@mozilla.com
push dateMon, 18 Jun 2018 21:12:27 +0000
treeherdermozilla-release@c296fde26f5f [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerspehrsons
bugs1444976
milestone61.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1444976 - Trace some initial real-time media stuff. r=pehrsons Tracing is very cheap, we should trace a bunch of stuff. MozReview-Commit-ID: IhOyyRPXfg9
dom/media/GraphDriver.cpp
dom/media/MediaStreamGraph.cpp
dom/media/MediaStreamGraphImpl.h
dom/media/TrackUnionStream.cpp
dom/media/webrtc/MediaEngineWebRTCAudio.cpp
media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
--- a/dom/media/GraphDriver.cpp
+++ b/dom/media/GraphDriver.cpp
@@ -1,19 +1,21 @@
 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include <MediaStreamGraphImpl.h>
 #include "mozilla/dom/AudioContext.h"
 #include "mozilla/SharedThreadPool.h"
 #include "mozilla/ClearOnShutdown.h"
 #include "mozilla/Unused.h"
 #include "CubebUtils.h"
+#include "Tracing.h"
 
 #ifdef MOZ_WEBRTC
 #include "webrtc/MediaEngineWebRTC.h"
 #endif
 
 #ifdef XP_MACOSX
 #include <sys/sysctl.h>
 #endif
@@ -876,16 +878,19 @@ AudioCallbackDriver::AutoInCallback::Aut
 AudioCallbackDriver::AutoInCallback::~AutoInCallback() {
   mDriver->mInCallback = false;
 }
 
 long
 AudioCallbackDriver::DataCallback(const AudioDataValue* aInputBuffer,
                                   AudioDataValue* aOutputBuffer, long aFrames)
 {
+   TRACE_AUDIO_CALLBACK_BUDGET(GraphImpl()->TraceLogger(), aFrames, mSampleRate);
+   TRACE_AUDIO_CALLBACK(GraphImpl()->TraceLogger());
+
   // Don't add the callback until we're inited and ready
   if (!mAddedMixer) {
     mGraphImpl->mMixer.AddCallback(this);
     mAddedMixer = true;
   }
 
 #ifdef DEBUG
   // DebugOnly<> doesn't work here... it forces an initialization that will cause
--- a/dom/media/MediaStreamGraph.cpp
+++ b/dom/media/MediaStreamGraph.cpp
@@ -27,16 +27,17 @@
 #include "mozilla/media/MediaUtils.h"
 #include <algorithm>
 #include "GeckoProfiler.h"
 #include "VideoFrameContainer.h"
 #include "mozilla/AbstractThread.h"
 #include "mozilla/Unused.h"
 #include "mtransport/runnable_utils.h"
 #include "VideoUtils.h"
+#include "Tracing.h"
 
 #include "webaudio/blink/DenormalDisabler.h"
 #include "webaudio/blink/HRTFDatabaseLoader.h"
 
 using namespace mozilla::layers;
 using namespace mozilla::dom;
 using namespace mozilla::gfx;
 using namespace mozilla::media;
@@ -63,16 +64,18 @@ enum SourceMediaStream::TrackCommands : 
 static nsDataHashtable<nsUint32HashKey, MediaStreamGraphImpl*> gGraphs;
 
 MediaStreamGraphImpl::~MediaStreamGraphImpl()
 {
   MOZ_ASSERT(mStreams.IsEmpty() && mSuspendedStreams.IsEmpty(),
              "All streams should have been destroyed by messages from the main thread");
   LOG(LogLevel::Debug, ("MediaStreamGraph %p destroyed", this));
   LOG(LogLevel::Debug, ("MediaStreamGraphImpl::~MediaStreamGraphImpl"));
+
+  mTraceLogger.Stop();
 }
 
 void
 MediaStreamGraphImpl::AddStreamGraphThread(MediaStream* aStream)
 {
   MOZ_ASSERT(OnGraphThreadOrNotRunning());
   aStream->mTracksStartTime = mProcessedTime;
 
@@ -1121,16 +1124,17 @@ MediaStreamGraphImpl::RunMessageAfterPro
   // Only one block is used for messages from the graph thread.
   MOZ_ASSERT(mFrontMessageQueue.Length() == 1);
   mFrontMessageQueue[0].mMessages.AppendElement(Move(aMessage));
 }
 
 void
 MediaStreamGraphImpl::RunMessagesInQueue()
 {
+  TRACE_AUDIO_CALLBACK(mTraceLogger);
   MOZ_ASSERT(OnGraphThread());
   // Calculate independent action times for each batch of messages (each
   // batch corresponding to an event loop task). This isolates the performance
   // of different scripts to some extent.
   for (uint32_t i = 0; i < mFrontMessageQueue.Length(); ++i) {
     nsTArray<UniquePtr<ControlMessage>>& messages = mFrontMessageQueue[i].mMessages;
 
     for (uint32_t j = 0; j < messages.Length(); ++j) {
@@ -1138,16 +1142,17 @@ MediaStreamGraphImpl::RunMessagesInQueue
     }
   }
   mFrontMessageQueue.Clear();
 }
 
 void
 MediaStreamGraphImpl::UpdateGraph(GraphTime aEndBlockingDecisions)
 {
+  TRACE_AUDIO_CALLBACK(mTraceLogger);
   MOZ_ASSERT(OnGraphThread());
   MOZ_ASSERT(aEndBlockingDecisions >= mProcessedTime);
   // The next state computed time can be the same as the previous: it
   // means the driver would have been blocking indefinitly, but the graph has
   // been woken up right after having been to sleep.
   MOZ_ASSERT(aEndBlockingDecisions >= mStateComputedTime);
 
   UpdateStreamOrder();
@@ -1233,16 +1238,17 @@ MediaStreamGraphImpl::UpdateGraph(GraphT
       aEndBlockingDecisions == mStateComputedTime) {
     EnsureNextIteration();
   }
 }
 
 void
 MediaStreamGraphImpl::Process()
 {
+  TRACE_AUDIO_CALLBACK(mTraceLogger);
   MOZ_ASSERT(OnGraphThread());
   // Play stream contents.
   bool allBlockedForever = true;
   // True when we've done ProcessInput for all processed streams.
   bool doneAllProducing = false;
   // This is the number of frame that are written to the AudioStreams, for
   // this cycle.
   StreamTime ticksPlayed = 0;
@@ -1337,16 +1343,17 @@ MediaStreamGraphImpl::UpdateMainThreadSt
 
   SwapMessageQueues();
   return true;
 }
 
 bool
 MediaStreamGraphImpl::OneIteration(GraphTime aStateEnd)
 {
+  TRACE_AUDIO_CALLBACK(mTraceLogger);
   // Changes to LIFECYCLE_RUNNING occur before starting or reviving the graph
   // thread, and so the monitor need not be held to check mLifecycleState.
   // LIFECYCLE_THREAD_NOT_STARTED is possible when shutting down offline
   // graphs that have not started.
   MOZ_DIAGNOSTIC_ASSERT(mLifecycleState <= LIFECYCLE_RUNNING);
   MOZ_ASSERT(OnGraphThread());
   WebCore::DenormalDisabler disabler;
 
@@ -1543,16 +1550,17 @@ public:
                                                bool aSourceIsMSG)
     : Runnable("MediaStreamGraphStableStateRunnable")
     , mGraph(aGraph)
     , mSourceIsMSG(aSourceIsMSG)
   {
   }
   NS_IMETHOD Run() override
   {
+    TRACE(static_cast<MediaStreamGraphImpl*>(mGraph)->TraceLogger());
     if (mGraph) {
       mGraph->RunInStableState(mSourceIsMSG);
     }
     return NS_OK;
   }
 private:
   RefPtr<MediaStreamGraphImpl> mGraph;
   bool mSourceIsMSG;
@@ -2764,16 +2772,17 @@ SourceMediaStream::SetPullEnabled(bool a
   GraphImpl()->AppendMessage(MakeUnique<Message>(this, aEnabled));
 }
 
 bool
 SourceMediaStream::PullNewData(
   StreamTime aDesiredUpToTime,
   nsTArray<RefPtr<SourceMediaStream::NotifyPullPromise>>& aPromises)
 {
+  TRACE_AUDIO_CALLBACK(GraphImpl()->TraceLogger());
   MutexAutoLock lock(mMutex);
   if (!mPullEnabled || mFinished) {
     return false;
   }
   // Compute how much stream time we'll need assuming we don't block
   // the stream at all.
   StreamTime t = GraphTimeToStreamTime(aDesiredUpToTime);
   StreamTime current = mTracks.GetEnd();
@@ -3600,29 +3609,34 @@ MediaStreamGraphImpl::MediaStreamGraphIm
   , mDetectedNotRunning(false)
   , mPostedRunInStableState(false)
   , mRealtime(aDriverRequested != OFFLINE_THREAD_DRIVER)
   , mNonRealtimeProcessing(false)
   , mStreamOrderDirty(false)
   , mLatencyLog(AsyncLatencyLogger::Get())
   , mAbstractMainThread(aMainThread)
   , mThreadPool(GetMediaThreadPool(MediaThreadType::MSG_CONTROL))
+  , mTraceLogger("MSGTracing")
   , mSelfRef(this)
   , mOutputChannels(std::min<uint32_t>(8, CubebUtils::MaxNumberOfChannels()))
 #ifdef DEBUG
   , mCanRunMessagesSynchronously(false)
 #endif
 {
   if (mRealtime) {
     if (aDriverRequested == AUDIO_THREAD_DRIVER) {
       AudioCallbackDriver* driver = new AudioCallbackDriver(this);
       mDriver = driver;
     } else {
       mDriver = new SystemClockDriver(this);
     }
+
+    // This is a noop if the logger has not been enabled.
+    mTraceLogger.Start();
+    mTraceLogger.Log("[");
   } else {
     mDriver = new OfflineClockDriver(this, MEDIA_GRAPH_TARGET_PERIOD_MS);
   }
 
   mLastMainThreadUpdate = TimeStamp::Now();
 
   RegisterWeakAsyncMemoryReporter(this);
 }
--- a/dom/media/MediaStreamGraphImpl.h
+++ b/dom/media/MediaStreamGraphImpl.h
@@ -18,16 +18,17 @@
 #include "mozilla/UniquePtr.h"
 #include "mozilla/WeakPtr.h"
 #include "nsDataHashtable.h"
 #include "nsIMemoryReporter.h"
 #include "nsINamed.h"
 #include "nsIRunnable.h"
 #include "nsIThread.h"
 #include "nsITimer.h"
+#include "AsyncLogger.h"
 
 namespace mozilla {
 
 namespace media {
 class ShutdownTicket;
 }
 
 template <typename T>
@@ -466,16 +467,20 @@ public:
 #ifdef DEBUG
     if (!OnGraphThreadOrNotRunning()) {
       mMonitor.AssertCurrentThreadOwns();
     }
 #endif
     return mDriver;
   }
 
+  AsyncLogger& TraceLogger() {
+    return mTraceLogger;
+  }
+
   /**
    * Effectively set the new driver, while we are switching.
    * It is only safe to call this at the very end of an iteration, when there
    * has been a SwitchAtNextIteration call during the iteration. The driver
    * should return and pass the control to the new driver shortly after.
    * We can also switch from Revive() (on MainThread). Monitor must be held.
    */
   void SetCurrentDriver(GraphDriver* aDriver)
@@ -815,16 +820,17 @@ public:
   bool mStreamOrderDirty;
   /**
    * Hold a ref to the Latency logger
    */
   RefPtr<AsyncLatencyLogger> mLatencyLog;
   AudioMixer mMixer;
   const RefPtr<AbstractThread> mAbstractMainThread;
   RefPtr<SharedThreadPool> mThreadPool;
+  AsyncLogger mTraceLogger;
 
   // used to limit graph shutdown time
   // Only accessed on the main thread.
   nsCOMPtr<nsITimer> mShutdownTimer;
 
 private:
   virtual ~MediaStreamGraphImpl();
 
--- a/dom/media/TrackUnionStream.cpp
+++ b/dom/media/TrackUnionStream.cpp
@@ -63,16 +63,17 @@ TrackUnionStream::TrackUnionStream()
         EndTrack(i);
         mTrackMap.RemoveElementAt(i);
       }
     }
     ProcessedMediaStream::RemoveInput(aPort);
   }
   void TrackUnionStream::ProcessInput(GraphTime aFrom, GraphTime aTo, uint32_t aFlags)
   {
+    TRACE(GraphImpl()->TraceLogger());
     if (IsFinishedOnGraphThread()) {
       return;
     }
     AutoTArray<bool,8> mappedTracksFinished;
     AutoTArray<bool,8> mappedTracksWithMatchingInputTracks;
     for (uint32_t i = 0; i < mTrackMap.Length(); ++i) {
       mappedTracksFinished.AppendElement(true);
       mappedTracksWithMatchingInputTracks.AppendElement(false);
--- a/dom/media/webrtc/MediaEngineWebRTCAudio.cpp
+++ b/dom/media/webrtc/MediaEngineWebRTCAudio.cpp
@@ -12,16 +12,17 @@
 #include "AudioConverter.h"
 #include "MediaManager.h"
 #include "MediaStreamGraphImpl.h"
 #include "MediaTrackConstraints.h"
 #include "mozilla/Assertions.h"
 #include "mozilla/ErrorNames.h"
 #include "mtransport/runnable_utils.h"
 #include "nsAutoPtr.h"
+#include "Tracing.h"
 
 // scoped_ptr.h uses FF
 #ifdef FF
 #undef FF
 #endif
 #include "webrtc/modules/audio_device/opensl/single_rw_fifo.h"
 #include "webrtc/voice_engine/voice_engine_defines.h"
 #include "webrtc/modules/audio_processing/include/audio_processing.h"
@@ -1186,16 +1187,17 @@ MediaEngineWebRTCMicrophoneSource::Inser
 // Note this can be called back after ::Shutdown()
 void
 MediaEngineWebRTCMicrophoneSource::NotifyInputData(MediaStreamGraph* aGraph,
                                                    const AudioDataValue* aBuffer,
                                                    size_t aFrames,
                                                    TrackRate aRate,
                                                    uint32_t aChannels)
 {
+  TRACE_AUDIO_CALLBACK(static_cast<MediaStreamGraphImpl*>(aGraph)->TraceLogger());
   // If some processing is necessary, packetize and insert in the WebRTC.org
   // code. Otherwise, directly insert the mic data in the MSG, bypassing all processing.
   if (PassThrough()) {
     InsertInGraph<AudioDataValue>(aBuffer, aFrames, aChannels);
   } else {
     PacketizeAndProcess(aGraph, aBuffer, aFrames, aRate, aChannels);
   }
 }
--- a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
+++ b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
@@ -45,16 +45,17 @@
 #include "nsThreadUtils.h"
 #include "nspr.h"
 #include "runnable_utils.h"
 #include "srtp.h"
 #include "transportflow.h"
 #include "transportlayer.h"
 #include "transportlayerdtls.h"
 #include "transportlayerice.h"
+#include "Tracing.h"
 
 #include "webrtc/base/bind.h"
 #include "webrtc/base/keep_ref_until_done.h"
 #include "webrtc/common_types.h"
 #include "webrtc/common_video/include/i420_buffer_pool.h"
 #include "webrtc/common_video/include/video_frame_buffer.h"
 #include "webrtc/common_video/libyuv/include/webrtc_libyuv.h"
 
@@ -1926,33 +1927,36 @@ MediaPipelineTransmit::PipelineListener:
     LOGTAG,
     "MediaPipeline::NotifyRealtimeTrackData() listener=%p, offset=%" PRId64
     ", duration=%" PRId64,
     this,
     aOffset,
     aMedia.GetDuration());
 
   if (aMedia.GetType() == MediaSegment::VIDEO) {
+    TRACE_COMMENT(static_cast<MediaStreamGraphImpl*>(aGraph)->TraceLogger(), "Video");
     // We have to call the upstream NotifyRealtimeTrackData and
     // MediaStreamVideoSink will route them to SetCurrentFrames.
     MediaStreamVideoSink::NotifyRealtimeTrackData(aGraph, aOffset, aMedia);
     return;
   }
-
+  TRACE_COMMENT(static_cast<MediaStreamGraphImpl*>(aGraph)->TraceLogger(), "Audio");
   NewData(aMedia, aGraph->GraphRate());
 }
 
 void
 MediaPipelineTransmit::PipelineListener::NotifyQueuedChanges(
   MediaStreamGraph* aGraph,
   StreamTime aOffset,
   const MediaSegment& aQueuedMedia)
 {
   CSFLogDebug(LOGTAG, "MediaPipeline::NotifyQueuedChanges()");
 
+  TRACE_COMMENT(static_cast<MediaStreamGraphImpl*>(aGraph)->TraceLogger(), "Audio");
+
   if (aQueuedMedia.GetType() == MediaSegment::VIDEO) {
     // We always get video from SetCurrentFrames().
     return;
   }
 
   if (mDirectConnect) {
     // ignore non-direct data if we're also getting direct data
     return;
@@ -2017,16 +2021,17 @@ MediaPipelineTransmit::PipelineListener:
 
     const AudioSegment* audio = static_cast<const AudioSegment*>(&aMedia);
     for (AudioSegment::ConstChunkIterator iter(*audio); !iter.IsEnded();
          iter.Next()) {
       mAudioProcessing->QueueAudioChunk(aRate, *iter, mEnabled);
     }
   } else {
     const VideoSegment* video = static_cast<const VideoSegment*>(&aMedia);
+
     for (VideoSegment::ConstChunkIterator iter(*video); !iter.IsEnded();
          iter.Next()) {
       mConverter->QueueVideoChunk(*iter, !mEnabled);
     }
   }
 }
 
 void
@@ -2229,16 +2234,17 @@ private:
   ~PipelineListener()
   {
     NS_ReleaseOnMainThreadSystemGroup("MediaPipeline::mConduit",
                                       mConduit.forget());
   }
 
   void NotifyPullImpl(StreamTime aDesiredTime)
   {
+    TRACE(mSource->GraphImpl()->TraceLogger());
     uint32_t samplesPer10ms = mRate / 100;
 
     // mSource's rate is not necessarily the same as the graph rate, since there
     // are sample-rate constraints on the inbound audio: only 16, 32, 44.1 and
     // 48kHz are supported. The audio frames we get here is going to be
     // resampled when inserted into the graph.
     TrackTicks desired = mSource->TimeToTicksRoundUp(mRate, aDesiredTime);
     TrackTicks framesNeeded = desired - mPlayedTicks;