Bug 1182737. Part 3 - make start/stop playback of DecodedStream more consistent with that of AudioSink.
authorJW Wang <jwwang@mozilla.com>
Sat, 11 Jul 2015 16:41:39 +0800
changeset 252681 ed6f4b96c7fe93488e287e49a4cd98ceee17395c
parent 252680 24352a7d9b7321456c5fd109c07e7b6d87a6333d
child 252682 b917532e46cf7db2c75f3b3a31b7af48d7f5d6fc
push id62213
push userjwwang@mozilla.com
push dateTue, 14 Jul 2015 02:59:29 +0000
treeherdermozilla-inbound@ed6f4b96c7fe [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
bugs1182737
milestone42.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1182737. Part 3 - make start/stop playback of DecodedStream more consistent with that of AudioSink.
dom/media/DecodedStream.cpp
dom/media/DecodedStream.h
dom/media/MediaDecoderStateMachine.cpp
--- a/dom/media/DecodedStream.cpp
+++ b/dom/media/DecodedStream.cpp
@@ -5,17 +5,16 @@
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "DecodedStream.h"
 #include "MediaStreamGraph.h"
 #include "AudioSegment.h"
 #include "VideoSegment.h"
 #include "MediaQueue.h"
 #include "MediaData.h"
-#include "MediaInfo.h"
 #include "SharedBuffer.h"
 #include "VideoUtils.h"
 
 namespace mozilla {
 
 class DecodedStreamGraphListener : public MediaStreamListener {
   typedef MediaStreamListener::MediaStreamGraphEvent MediaStreamGraphEvent;
 public:
@@ -191,16 +190,32 @@ DecodedStream::DecodedStream(MediaQueue<
   , mPlaying(false)
   , mAudioQueue(aAudioQueue)
   , mVideoQueue(aVideoQueue)
 {
   //
 }
 
 void
+DecodedStream::StartPlayback(int64_t aStartTime, const MediaInfo& aInfo)
+{
+  ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
+  if (mStartTime.isNothing()) {
+    mStartTime.emplace(aStartTime);
+    mInfo = aInfo;
+  }
+}
+
+void DecodedStream::StopPlayback()
+{
+  ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
+  mStartTime.reset();
+}
+
+void
 DecodedStream::DestroyData()
 {
   MOZ_ASSERT(NS_IsMainThread());
   ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
 
   // Avoid the redundant blocking to output stream.
   if (!mData) {
     return;
@@ -342,40 +357,40 @@ DecodedStream::SetPlaying(bool aPlaying)
   ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
   mPlaying = aPlaying;
   if (mData) {
     mData->SetPlaying(aPlaying);
   }
 }
 
 void
-DecodedStream::InitTracks(int64_t aStartTime, const MediaInfo& aInfo)
+DecodedStream::InitTracks()
 {
   GetReentrantMonitor().AssertCurrentThreadIn();
 
   if (mData->mStreamInitialized) {
     return;
   }
 
   SourceMediaStream* sourceStream = mData->mStream;
 
-  if (aInfo.HasAudio()) {
-    TrackID audioTrackId = aInfo.mAudio.mTrackId;
+  if (mInfo.HasAudio()) {
+    TrackID audioTrackId = mInfo.mAudio.mTrackId;
     AudioSegment* audio = new AudioSegment();
-    sourceStream->AddAudioTrack(audioTrackId, aInfo.mAudio.mRate, 0, audio,
+    sourceStream->AddAudioTrack(audioTrackId, mInfo.mAudio.mRate, 0, audio,
                                 SourceMediaStream::ADDTRACK_QUEUED);
-    mData->mNextAudioTime = aStartTime;
+    mData->mNextAudioTime = mStartTime.ref();
   }
 
-  if (aInfo.HasVideo()) {
-    TrackID videoTrackId = aInfo.mVideo.mTrackId;
+  if (mInfo.HasVideo()) {
+    TrackID videoTrackId = mInfo.mVideo.mTrackId;
     VideoSegment* video = new VideoSegment();
     sourceStream->AddTrack(videoTrackId, 0, video,
                            SourceMediaStream::ADDTRACK_QUEUED);
-    mData->mNextVideoTime = aStartTime;
+    mData->mNextVideoTime = mStartTime.ref();
   }
 
   sourceStream->FinishAddTracks();
   mData->mStreamInitialized = true;
 }
 
 static void
 SendStreamAudio(DecodedStreamData* aStream, int64_t aStartTime,
@@ -420,37 +435,35 @@ SendStreamAudio(DecodedStreamData* aStre
   aOutput->AppendFrames(buffer.forget(), channels, framesToWrite);
   aStream->mAudioFramesWritten += framesToWrite;
   aOutput->ApplyVolume(aVolume);
 
   aStream->mNextAudioTime = aAudio->GetEndTime();
 }
 
 void
-DecodedStream::SendAudio(int64_t aStartTime,
-                         const MediaInfo& aInfo,
-                         double aVolume, bool aIsSameOrigin)
+DecodedStream::SendAudio(double aVolume, bool aIsSameOrigin)
 {
   GetReentrantMonitor().AssertCurrentThreadIn();
 
-  if (!aInfo.HasAudio()) {
+  if (!mInfo.HasAudio()) {
     return;
   }
 
   AudioSegment output;
-  uint32_t rate = aInfo.mAudio.mRate;
+  uint32_t rate = mInfo.mAudio.mRate;
   nsAutoTArray<nsRefPtr<AudioData>,10> audio;
-  TrackID audioTrackId = aInfo.mAudio.mTrackId;
+  TrackID audioTrackId = mInfo.mAudio.mTrackId;
   SourceMediaStream* sourceStream = mData->mStream;
 
   // It's OK to hold references to the AudioData because AudioData
   // is ref-counted.
   mAudioQueue.GetElementsAfter(mData->mNextAudioTime, &audio);
   for (uint32_t i = 0; i < audio.Length(); ++i) {
-    SendStreamAudio(mData.get(), aStartTime, audio[i], &output, rate, aVolume);
+    SendStreamAudio(mData.get(), mStartTime.ref(), audio[i], &output, rate, aVolume);
   }
 
   if (!aIsSameOrigin) {
     output.ReplaceWithDisabled();
   }
 
   // |mNextAudioTime| is updated as we process each audio sample in
   // SendStreamAudio(). This is consistent with how |mNextVideoTime|
@@ -487,28 +500,26 @@ ZeroDurationAtLastChunk(VideoSegment& aI
   // If the start time is equal to the duration of aInput, means the last video
   // frame's duration is zero.
   StreamTime lastVideoStratTime;
   aInput.GetLastFrame(&lastVideoStratTime);
   return lastVideoStratTime == aInput.GetDuration();
 }
 
 void
-DecodedStream::SendVideo(int64_t aStartTime,
-                         const MediaInfo& aInfo,
-                         bool aIsSameOrigin)
+DecodedStream::SendVideo(bool aIsSameOrigin)
 {
   GetReentrantMonitor().AssertCurrentThreadIn();
 
-  if (!aInfo.HasVideo()) {
+  if (!mInfo.HasVideo()) {
     return;
   }
 
   VideoSegment output;
-  TrackID videoTrackId = aInfo.mVideo.mTrackId;
+  TrackID videoTrackId = mInfo.mVideo.mTrackId;
   nsAutoTArray<nsRefPtr<VideoData>, 10> video;
   SourceMediaStream* sourceStream = mData->mStream;
 
   // It's OK to hold references to the VideoData because VideoData
   // is ref-counted.
   mVideoQueue.GetElementsAfter(mData->mNextVideoTime, &video);
 
   for (uint32_t i = 0; i < video.Length(); ++i) {
@@ -567,67 +578,68 @@ DecodedStream::SendVideo(int64_t aStartT
       sourceStream->AppendToTrack(videoTrackId, &endSegment);
     }
     sourceStream->EndTrack(videoTrackId);
     mData->mHaveSentFinishVideo = true;
   }
 }
 
 void
-DecodedStream::AdvanceTracks(int64_t aStartTime, const MediaInfo& aInfo)
+DecodedStream::AdvanceTracks()
 {
   GetReentrantMonitor().AssertCurrentThreadIn();
 
   StreamTime endPosition = 0;
 
-  if (aInfo.HasAudio()) {
+  if (mInfo.HasAudio()) {
     StreamTime audioEnd = mData->mStream->TicksToTimeRoundDown(
-        aInfo.mAudio.mRate, mData->mAudioFramesWritten);
+        mInfo.mAudio.mRate, mData->mAudioFramesWritten);
     endPosition = std::max(endPosition, audioEnd);
   }
 
-  if (aInfo.HasVideo()) {
+  if (mInfo.HasVideo()) {
     StreamTime videoEnd = mData->mStream->MicrosecondsToStreamTimeRoundDown(
-        mData->mNextVideoTime - aStartTime);
+        mData->mNextVideoTime - mStartTime.ref());
     endPosition = std::max(endPosition, videoEnd);
   }
 
   if (!mData->mHaveSentFinish) {
     mData->mStream->AdvanceKnownTracksTime(endPosition);
   }
 }
 
 bool
-DecodedStream::SendData(int64_t aStartTime,
-                        const MediaInfo& aInfo,
-                        double aVolume, bool aIsSameOrigin)
+DecodedStream::SendData(double aVolume, bool aIsSameOrigin)
 {
   ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
+  MOZ_ASSERT(mStartTime.isSome(), "Must be called after StartPlayback()");
 
-  InitTracks(aStartTime, aInfo);
-  SendAudio(aStartTime, aInfo, aVolume, aIsSameOrigin);
-  SendVideo(aStartTime, aInfo, aIsSameOrigin);
-  AdvanceTracks(aStartTime, aInfo);
+  InitTracks();
+  SendAudio(aVolume, aIsSameOrigin);
+  SendVideo(aIsSameOrigin);
+  AdvanceTracks();
 
-  bool finished = (!aInfo.HasAudio() || mAudioQueue.IsFinished()) &&
-                  (!aInfo.HasVideo() || mVideoQueue.IsFinished());
+  bool finished = (!mInfo.HasAudio() || mAudioQueue.IsFinished()) &&
+                  (!mInfo.HasVideo() || mVideoQueue.IsFinished());
 
   if (finished && !mData->mHaveSentFinish) {
     mData->mHaveSentFinish = true;
     mData->mStream->Finish();
   }
 
   return finished;
 }
 
 CheckedInt64
-DecodedStream::AudioEndTime(int64_t aStartTime, uint32_t aRate) const
+DecodedStream::AudioEndTime() const
 {
   ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
-  return aStartTime + FramesToUsecs(mData->mAudioFramesWritten, aRate);
+  MOZ_ASSERT(mStartTime.isSome(), "Must be called after StartPlayback()");
+  return mStartTime.ref() +
+         FramesToUsecs(mData->mAudioFramesWritten, mInfo.mAudio.mRate);
 }
 
 int64_t
 DecodedStream::GetPosition() const
 {
   ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
   return mData->GetPosition();
 }
--- a/dom/media/DecodedStream.h
+++ b/dom/media/DecodedStream.h
@@ -4,27 +4,28 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef DecodedStream_h_
 #define DecodedStream_h_
 
 #include "nsRefPtr.h"
 #include "nsTArray.h"
+#include "MediaInfo.h"
 
 #include "mozilla/UniquePtr.h"
 #include "mozilla/gfx/Point.h"
 #include "mozilla/CheckedInt.h"
 #include "mozilla/ReentrantMonitor.h"
+#include "mozilla/Maybe.h"
 
 namespace mozilla {
 
 class AudioData;
 class VideoData;
-class MediaInfo;
 class AudioSegment;
 class MediaStream;
 class MediaInputPort;
 class SourceMediaStream;
 class ProcessedMediaStream;
 class DecodedStream;
 class DecodedStreamGraphListener;
 class OutputStreamListener;
@@ -93,62 +94,64 @@ public:
   nsRefPtr<OutputStreamListener> mListener;
 };
 
 class DecodedStream {
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(DecodedStream);
 public:
   DecodedStream(MediaQueue<AudioData>& aAudioQueue,
                 MediaQueue<VideoData>& aVideoQueue);
+
+  // Mimic MDSM::StartAudioThread.
+  // Must be called before any calls to SendData().
+  void StartPlayback(int64_t aStartTime, const MediaInfo& aInfo);
+  // Mimic MDSM::StopAudioThread.
+  void StopPlayback();
+
   void DestroyData();
   void RecreateData();
   void Connect(ProcessedMediaStream* aStream, bool aFinishWhenEnded);
   void Remove(MediaStream* aStream);
   void SetPlaying(bool aPlaying);
-  CheckedInt64 AudioEndTime(int64_t aStartTime, uint32_t aRate) const;
+  CheckedInt64 AudioEndTime() const;
   int64_t GetPosition() const;
   bool IsFinished() const;
 
   // Return true if stream is finished.
-  bool SendData(int64_t aStartTime,
-                const MediaInfo& aInfo,
-                double aVolume, bool aIsSameOrigin);
+  bool SendData(double aVolume, bool aIsSameOrigin);
 
 protected:
   virtual ~DecodedStream() {}
 
 private:
   ReentrantMonitor& GetReentrantMonitor() const;
   void RecreateData(MediaStreamGraph* aGraph);
   void Connect(OutputStreamData* aStream);
   nsTArray<OutputStreamData>& OutputStreams();
-  void InitTracks(int64_t aStartTime, const MediaInfo& aInfo);
-  void AdvanceTracks(int64_t aStartTime, const MediaInfo& aInfo);
-
-  void SendAudio(int64_t aStartTime,
-                 const MediaInfo& aInfo,
-                 double aVolume, bool aIsSameOrigin);
-
-  void SendVideo(int64_t aStartTime,
-                 const MediaInfo& aInfo,
-                 bool aIsSameOrigin);
+  void InitTracks();
+  void AdvanceTracks();
+  void SendAudio(double aVolume, bool aIsSameOrigin);
+  void SendVideo(bool aIsSameOrigin);
 
   UniquePtr<DecodedStreamData> mData;
   // Data about MediaStreams that are being fed by the decoder.
   nsTArray<OutputStreamData> mOutputStreams;
 
   // TODO: This is a temp solution to get rid of decoder monitor on the main
   // thread in MDSM::AddOutputStream and MDSM::RecreateDecodedStream as
   // required by bug 1146482. DecodedStream needs to release monitor before
   // calling back into MDSM functions in order to prevent deadlocks.
   //
   // Please move all capture-stream related code from MDSM into DecodedStream
   // and apply "dispatch + mirroring" to get rid of this monitor in the future.
   mutable ReentrantMonitor mMonitor;
 
   bool mPlaying;
+  Maybe<int64_t> mStartTime;
+  MediaInfo mInfo;
+
   MediaQueue<AudioData>& mAudioQueue;
   MediaQueue<VideoData>& mVideoQueue;
 };
 
 } // namespace mozilla
 
 #endif // DecodedStream_h_
--- a/dom/media/MediaDecoderStateMachine.cpp
+++ b/dom/media/MediaDecoderStateMachine.cpp
@@ -366,22 +366,20 @@ int64_t MediaDecoderStateMachine::GetDec
 }
 
 void MediaDecoderStateMachine::SendStreamData()
 {
   MOZ_ASSERT(OnTaskQueue());
   AssertCurrentThreadInMonitor();
   MOZ_ASSERT(!mAudioSink, "Should've been stopped in RunStateMachine()");
 
-  bool finished = mDecodedStream->SendData(
-      mStreamStartTime, mInfo, mVolume, mDecoder->IsSameOriginMedia());
+  bool finished = mDecodedStream->SendData(mVolume, mDecoder->IsSameOriginMedia());
 
   if (mInfo.HasAudio()) {
-    CheckedInt64 playedUsecs = mDecodedStream->AudioEndTime(
-        mStreamStartTime, mInfo.mAudio.mRate);
+    CheckedInt64 playedUsecs = mDecodedStream->AudioEndTime();
     if (playedUsecs.isValid()) {
       OnAudioEndTimeUpdate(playedUsecs.value());
     }
   }
 
   const auto clockTime = GetClock();
   while (true) {
     const AudioData* a = AudioQueue().PeekFront();
@@ -1089,16 +1087,22 @@ void MediaDecoderStateMachine::MaybeStar
 
   mDecoder->DispatchPlaybackStarted();
   SetPlayStartTime(TimeStamp::Now());
   MOZ_ASSERT(IsPlaying());
 
   nsresult rv = StartAudioThread();
   NS_ENSURE_SUCCESS_VOID(rv);
 
+  // Tell DecodedStream to start playback with specified start time and media
+  // info. This is consistent with how we create AudioSink in StartAudioThread().
+  if (mAudioCaptured) {
+    mDecodedStream->StartPlayback(GetMediaTime(), mInfo);
+  }
+
   mDecoder->GetReentrantMonitor().NotifyAll();
   DispatchDecodeTasksIfNeeded();
 }
 
 void MediaDecoderStateMachine::UpdatePlaybackPositionInternal(int64_t aTime)
 {
   MOZ_ASSERT(OnTaskQueue());
   SAMPLE_LOG("UpdatePlaybackPositionInternal(%lld)", aTime);
@@ -2416,16 +2420,17 @@ nsresult MediaDecoderStateMachine::RunSt
       if (mState != DECODER_STATE_COMPLETED) {
         // While we're presenting a frame we can change state. Whatever changed
         // our state should have scheduled another state machine run.
         NS_ASSERTION(IsStateMachineScheduled(), "Must have timer scheduled");
         return NS_OK;
       }
 
       StopAudioThread();
+      mDecodedStream->StopPlayback();
 
       if (mPlayState == MediaDecoder::PLAY_STATE_PLAYING &&
           !mSentPlaybackEndedEvent)
       {
         int64_t clockTime = std::max(mAudioEndTime, mVideoFrameEndTime);
         clockTime = std::max(int64_t(0), std::max(clockTime, Duration().ToMicroseconds()));
         UpdatePlaybackPosition(clockTime);
 
@@ -2457,16 +2462,17 @@ MediaDecoderStateMachine::Reset()
              mState == DECODER_STATE_SEEKING ||
              mState == DECODER_STATE_DORMANT ||
              mState == DECODER_STATE_DECODING_NONE);
 
   // Stop the audio thread. Otherwise, AudioSink might be accessing AudioQueue
   // outside of the decoder monitor while we are clearing the queue and causes
   // crash for no samples to be popped.
   StopAudioThread();
+  mDecodedStream->StopPlayback();
 
   mVideoFrameEndTime = -1;
   mDecodedVideoEndTime = -1;
   mStreamStartTime = 0;
   mAudioEndTime = -1;
   mDecodedAudioEndTime = -1;
   mAudioCompleted = false;
   AudioQueue().Reset();
@@ -3136,16 +3142,21 @@ void MediaDecoderStateMachine::DispatchA
     if (!self->mAudioCaptured) {
       // Stop the audio sink if it's running.
       self->StopAudioThread();
       self->mStreamStartTime = self->GetMediaTime();
       // Reset mAudioEndTime which will be updated as we send audio data to
       // stream. Otherwise it will remain -1 if we don't have audio.
       self->mAudioEndTime = -1;
       self->mAudioCaptured = true;
+      // Start DecodedStream if we are already playing. Otherwise it will be
+      // handled in MaybeStartPlayback().
+      if (self->IsPlaying()) {
+        self->mDecodedStream->StartPlayback(self->GetMediaTime(), self->mInfo);
+      }
       self->ScheduleStateMachine();
     }
   });
   TaskQueue()->Dispatch(r.forget());
 }
 
 void MediaDecoderStateMachine::AddOutputStream(ProcessedMediaStream* aStream,
                                                bool aFinishWhenEnded)