Bug 1189506. Use mProcessedTime in some places instead of passing aFrom. r=karlt
authorRobert O'Callahan <robert@ocallahan.org>
Wed, 16 Sep 2015 16:23:14 +1200
changeset 295522 aecf803829055e5ac24ffec3754de86a04512232
parent 295521 fe7a41713e1db49dd50ff1ab8a6c5138910a38c4
child 295523 07ddd70da2c9ddcb1fb79ea12acd6c8838dfc662
push id5245
push userraliiev@mozilla.com
push dateThu, 29 Oct 2015 11:30:51 +0000
treeherdermozilla-beta@dac831dc1bd0 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerskarlt
bugs1189506
milestone43.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1189506. Use mProcessedTime in some places instead of passing aFrom. r=karlt
dom/media/MediaStreamGraph.cpp
dom/media/MediaStreamGraphImpl.h
--- a/dom/media/MediaStreamGraph.cpp
+++ b/dom/media/MediaStreamGraph.cpp
@@ -587,18 +587,17 @@ MediaStreamGraphImpl::NotifyHasCurrentDa
       MediaStreamListener* l = aStream->mListeners[j];
       l->NotifyHasCurrentData(this);
     }
     aStream->mNotifiedHasCurrentData = true;
   }
 }
 
 void
-MediaStreamGraphImpl::CreateOrDestroyAudioStreams(GraphTime aAudioOutputStartTime,
-                                                  MediaStream* aStream)
+MediaStreamGraphImpl::CreateOrDestroyAudioStreams(MediaStream* aStream)
 {
   MOZ_ASSERT(mRealtime, "Should only attempt to create audio streams in real-time mode");
 
   if (aStream->mAudioOutputs.IsEmpty()) {
     aStream->mAudioOutputStreams.Clear();
     return;
   }
 
@@ -619,17 +618,17 @@ MediaStreamGraphImpl::CreateOrDestroyAud
         break;
       }
     }
     if (i < audioOutputStreamsFound.Length()) {
       audioOutputStreamsFound[i] = true;
     } else {
       MediaStream::AudioOutputStream* audioOutputStream =
         aStream->mAudioOutputStreams.AppendElement();
-      audioOutputStream->mAudioPlaybackStartTime = aAudioOutputStartTime;
+      audioOutputStream->mAudioPlaybackStartTime = mProcessedTime;
       audioOutputStream->mBlockedAudioTime = 0;
       audioOutputStream->mLastTickWritten = 0;
       audioOutputStream->mTrackID = tracks->GetID();
 
       if (!CurrentDriver()->AsAudioCallbackDriver() &&
           !CurrentDriver()->Switching()) {
         MonitorAutoLock mon(mMonitor);
         if (mLifecycleState == LIFECYCLE_RUNNING) {
@@ -644,18 +643,17 @@ MediaStreamGraphImpl::CreateOrDestroyAud
   for (int32_t i = audioOutputStreamsFound.Length() - 1; i >= 0; --i) {
     if (!audioOutputStreamsFound[i]) {
       aStream->mAudioOutputStreams.RemoveElementAt(i);
     }
   }
 }
 
 StreamTime
-MediaStreamGraphImpl::PlayAudio(MediaStream* aStream,
-                                GraphTime aFrom, GraphTime aTo)
+MediaStreamGraphImpl::PlayAudio(MediaStream* aStream, GraphTime aTo)
 {
   MOZ_ASSERT(mRealtime, "Should only attempt to play audio in realtime mode");
 
   float volume = 0.0f;
   for (uint32_t i = 0; i < aStream->mAudioOutputs.Length(); ++i) {
     volume += aStream->mAudioOutputs[i].mVolume;
   }
 
@@ -664,24 +662,24 @@ MediaStreamGraphImpl::PlayAudio(MediaStr
   for (uint32_t i = 0; i < aStream->mAudioOutputStreams.Length(); ++i) {
     ticksWritten = 0;
 
     MediaStream::AudioOutputStream& audioOutput = aStream->mAudioOutputStreams[i];
     StreamBuffer::Track* track = aStream->mBuffer.FindTrack(audioOutput.mTrackID);
     AudioSegment* audio = track->Get<AudioSegment>();
     AudioSegment output;
 
-    StreamTime offset = GraphTimeToStreamTimeWithBlocking(aStream, aFrom);
+    StreamTime offset = GraphTimeToStreamTimeWithBlocking(aStream, mProcessedTime);
 
     // We don't update aStream->mBufferStartTime here to account for time spent
     // blocked. Instead, we'll update it in UpdateCurrentTimeForStreams after
     // the blocked period has completed. But we do need to make sure we play
     // from the right offsets in the stream buffer, even if we've already
     // written silence for some amount of blocked time after the current time.
-    GraphTime t = aFrom;
+    GraphTime t = mProcessedTime;
     while (t < aTo) {
       bool blocked = t >= aStream->mStartBlocking;
       GraphTime end = blocked ? aTo : aStream->mStartBlocking;
       NS_ASSERTION(end <= aTo, "mStartBlocking is wrong!");
 
       // Check how many ticks of sound we can provide if we are blocked some
       // time in the middle of this cycle.
       StreamTime toWrite = end - t;
@@ -1080,17 +1078,17 @@ MediaStreamGraphImpl::UpdateGraph(GraphT
   // computed in next loop.
   if (ensureNextIteration ||
       aEndBlockingDecisions == mStateComputedTime) {
     EnsureNextIteration();
   }
 }
 
 void
-MediaStreamGraphImpl::Process(GraphTime aFrom, GraphTime aTo)
+MediaStreamGraphImpl::Process(GraphTime aTo)
 {
   // Play stream contents.
   bool allBlockedForever = true;
   // True when we've done ProcessInput for all processed streams.
   bool doneAllProducing = false;
   // This is the number of frame that are written to the AudioStreams, for
   // this cycle.
   StreamTime ticksPlayed = 0;
@@ -1112,42 +1110,43 @@ MediaStreamGraphImpl::Process(GraphTime 
             if (nextStream) {
               MOZ_ASSERT(n->SampleRate() == nextStream->SampleRate(),
                          "All AudioNodeStreams in the graph must have the same sampling rate");
             }
           }
 #endif
           // Since an AudioNodeStream is present, go ahead and
           // produce audio block by block for all the rest of the streams.
-          ProduceDataForStreamsBlockByBlock(i, n->SampleRate(), aFrom, aTo);
+          ProduceDataForStreamsBlockByBlock(i, n->SampleRate(),
+              mProcessedTime, aTo);
           doneAllProducing = true;
         } else {
-          ps->ProcessInput(aFrom, aTo, ProcessedMediaStream::ALLOW_FINISH);
+          ps->ProcessInput(mProcessedTime, aTo, ProcessedMediaStream::ALLOW_FINISH);
           NS_WARN_IF_FALSE(stream->mBuffer.GetEnd() >=
                            GraphTimeToStreamTimeWithBlocking(stream, aTo),
                            "Stream did not produce enough data");
         }
       }
     }
     NotifyHasCurrentData(stream);
     // Only playback audio and video in real-time mode
     if (mRealtime) {
-      CreateOrDestroyAudioStreams(aFrom, stream);
+      CreateOrDestroyAudioStreams(stream);
       if (CurrentDriver()->AsAudioCallbackDriver()) {
-        StreamTime ticksPlayedForThisStream = PlayAudio(stream, aFrom, aTo);
+        StreamTime ticksPlayedForThisStream = PlayAudio(stream, aTo);
         if (!ticksPlayed) {
           ticksPlayed = ticksPlayedForThisStream;
         } else {
           MOZ_ASSERT(!ticksPlayedForThisStream || ticksPlayedForThisStream == ticksPlayed,
               "Each stream should have the same number of frame.");
         }
       }
       PlayVideo(stream);
     }
-    if (stream->mStartBlocking > aFrom) {
+    if (stream->mStartBlocking > mProcessedTime) {
       allBlockedForever = false;
     }
   }
 
   if (CurrentDriver()->AsAudioCallbackDriver() && ticksPlayed) {
     mMixer.FinishMixing();
   }
 
@@ -1215,27 +1214,27 @@ MediaStreamGraphImpl::UpdateMainThreadSt
   return true;
 }
 
 bool
 MediaStreamGraphImpl::OneIteration(GraphTime aStateEnd)
 {
   MaybeProduceMemoryReport();
 
-  GraphTime stateFrom = mStateComputedTime;
   GraphTime stateEnd = std::min(aStateEnd, mEndTime);
   UpdateGraph(stateEnd);
 
   mStateComputedTime = stateEnd;
 
-  Process(stateFrom, stateEnd);
-
+  Process(stateEnd);
+
+  GraphTime oldProcessedTime = mProcessedTime;
   mProcessedTime = stateEnd;
 
-  UpdateCurrentTimeForStreams(stateFrom);
+  UpdateCurrentTimeForStreams(oldProcessedTime);
 
   return UpdateMainThreadState();
 }
 
 void
 MediaStreamGraphImpl::ApplyStreamUpdate(StreamUpdate* aUpdate)
 {
   mMonitor.AssertCurrentThreadOwns();
--- a/dom/media/MediaStreamGraphImpl.h
+++ b/dom/media/MediaStreamGraphImpl.h
@@ -233,17 +233,17 @@ public:
   void SwapMessageQueues()
   {
     mMonitor.AssertCurrentThreadOwns();
     mFrontMessageQueue.SwapElements(mBackMessageQueue);
   }
   /**
    * Do all the processing and play the audio and video, ffrom aFrom to aTo.
    */
-  void Process(GraphTime aFrom, GraphTime aTo);
+  void Process(GraphTime aTo);
   /**
    * Update the consumption state of aStream to reflect whether its data
    * is needed or not.
    */
   void UpdateConsumptionState(SourceMediaStream* aStream);
   /**
    * Extract any state updates pending in aStream, and apply them.
    */
@@ -335,22 +335,22 @@ public:
   /**
    * Call NotifyHaveCurrentData on aStream's listeners.
    */
   void NotifyHasCurrentData(MediaStream* aStream);
   /**
    * If aStream needs an audio stream but doesn't have one, create it.
    * If aStream doesn't need an audio stream but has one, destroy it.
    */
-  void CreateOrDestroyAudioStreams(GraphTime aAudioOutputStartTime, MediaStream* aStream);
+  void CreateOrDestroyAudioStreams(MediaStream* aStream);
   /**
    * Queue audio (mix of stream audio and silence for blocked intervals)
    * to the audio output stream. Returns the number of frames played.
    */
-  StreamTime PlayAudio(MediaStream* aStream, GraphTime aFrom, GraphTime aTo);
+  StreamTime PlayAudio(MediaStream* aStream, GraphTime aTo);
   /**
    * Set the correct current video frame for stream aStream.
    */
   void PlayVideo(MediaStream* aStream);
   /**
    * No more data will be forthcoming for aStream. The stream will end
    * at the current buffer end point. The StreamBuffer's tracks must be
    * explicitly set to finished by the caller.