Bug 1061046. Part 6: Remove MediaDecoderStateMachine's USECS_PER_S video rate and use the graph rate instead. r=karlt
authorRobert O'Callahan <robert@ocallahan.org>
Thu, 18 Sep 2014 11:50:01 +1200
changeset 240909 ed2ea9cfc90c5d66df6fd922d844639667306ebe
parent 240908 af358b9f4c392dac9fc828b52df2b32a06675553
child 240910 293cf7053cc66f26e5e3e833362ced46219c8602
push id4311
push userraliiev@mozilla.com
push dateMon, 12 Jan 2015 19:37:41 +0000
treeherdermozilla-beta@150c9fed433b [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerskarlt
bugs1061046
milestone36.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1061046. Part 6: Remove MediaDecoderStateMachine's USECS_PER_S video rate and use the graph rate instead. r=karlt
dom/media/MediaDecoderStateMachine.cpp
dom/media/MediaStreamGraph.h
--- a/dom/media/MediaDecoderStateMachine.cpp
+++ b/dom/media/MediaDecoderStateMachine.cpp
@@ -336,28 +336,32 @@ void MediaDecoderStateMachine::SendStrea
   }
   aOutput->AppendFrames(buffer.forget(), channels, aAudio->mFrames);
   VERBOSE_LOG("writing %d frames of data to MediaStream for AudioData at %lld",
               aAudio->mFrames - int32_t(offset), aAudio->mTime);
   aStream->mAudioFramesWritten += aAudio->mFrames - int32_t(offset);
   aOutput->ApplyVolume(mVolume);
 }
 
-static void WriteVideoToMediaStream(layers::Image* aImage,
-                                    int64_t aDuration,
+static void WriteVideoToMediaStream(MediaStream* aStream,
+                                    layers::Image* aImage,
+                                    int64_t aEndMicroseconds,
+                                    int64_t aStartMicroseconds,
                                     const IntSize& aIntrinsicSize,
                                     VideoSegment* aOutput)
 {
   nsRefPtr<layers::Image> image = aImage;
-  aOutput->AppendFrame(image.forget(), aDuration, aIntrinsicSize);
+  StreamTime duration =
+      aStream->MicrosecondsToStreamTimeRoundDown(aEndMicroseconds) -
+      aStream->MicrosecondsToStreamTimeRoundDown(aStartMicroseconds);
+  aOutput->AppendFrame(image.forget(), duration, aIntrinsicSize);
 }
 
 static const TrackID TRACK_AUDIO = 1;
 static const TrackID TRACK_VIDEO = 2;
-static const TrackRate RATE_VIDEO = USECS_PER_S;
 
 void MediaDecoderStateMachine::SendStreamData()
 {
   NS_ASSERTION(OnDecodeThread() || OnStateMachineThread(),
                "Should be on decode thread or state machine thread");
   AssertCurrentThreadInMonitor();
   MOZ_ASSERT(mState != DECODER_STATE_DECODING_NONE);
 
@@ -390,17 +394,18 @@ void MediaDecoderStateMachine::SendStrea
       if (mInfo.HasAudio()) {
         AudioSegment* audio = new AudioSegment();
         mediaStream->AddTrack(TRACK_AUDIO, mInfo.mAudio.mRate, 0, audio);
         stream->mStream->DispatchWhenNotEnoughBuffered(TRACK_AUDIO,
             GetStateMachineThread(), GetWakeDecoderRunnable());
       }
       if (mInfo.HasVideo()) {
         VideoSegment* video = new VideoSegment();
-        mediaStream->AddTrack(TRACK_VIDEO, RATE_VIDEO, 0, video);
+        mediaStream->AddTrack(TRACK_VIDEO,
+            mediaStream->GraphRate(), 0, video);
         stream->mStream->DispatchWhenNotEnoughBuffered(TRACK_VIDEO,
             GetStateMachineThread(), GetWakeDecoderRunnable());
       }
       stream->mStreamInitialized = true;
     }
 
     if (mInfo.HasAudio()) {
       nsAutoTArray<nsRefPtr<AudioData>,10> audio;
@@ -432,26 +437,26 @@ void MediaDecoderStateMachine::SendStrea
       VideoSegment output;
       for (uint32_t i = 0; i < video.Length(); ++i) {
         VideoData* v = video[i];
         if (stream->mNextVideoTime < v->mTime) {
           VERBOSE_LOG("writing last video to MediaStream %p for %lldus",
                       mediaStream, v->mTime - stream->mNextVideoTime);
           // Write last video frame to catch up. mLastVideoImage can be null here
           // which is fine, it just means there's no video.
-          WriteVideoToMediaStream(stream->mLastVideoImage,
-            v->mTime - stream->mNextVideoTime, stream->mLastVideoImageDisplaySize,
+          WriteVideoToMediaStream(mediaStream, stream->mLastVideoImage,
+            v->mTime, stream->mNextVideoTime, stream->mLastVideoImageDisplaySize,
               &output);
           stream->mNextVideoTime = v->mTime;
         }
         if (stream->mNextVideoTime < v->GetEndTime()) {
           VERBOSE_LOG("writing video frame %lldus to MediaStream %p for %lldus",
                       v->mTime, mediaStream, v->GetEndTime() - stream->mNextVideoTime);
-          WriteVideoToMediaStream(v->mImage,
-              v->GetEndTime() - stream->mNextVideoTime, v->mDisplay,
+          WriteVideoToMediaStream(mediaStream, v->mImage,
+              v->GetEndTime(), stream->mNextVideoTime, v->mDisplay,
               &output);
           stream->mNextVideoTime = v->GetEndTime();
           stream->mLastVideoImage = v->mImage;
           stream->mLastVideoImageDisplaySize = v->mDisplay;
         } else {
           VERBOSE_LOG("skipping writing video frame %lldus (end %lldus) to MediaStream",
                       v->mTime, v->GetEndTime());
         }
@@ -459,17 +464,18 @@ void MediaDecoderStateMachine::SendStrea
       if (output.GetDuration() > 0) {
         mediaStream->AppendToTrack(TRACK_VIDEO, &output);
       }
       if (VideoQueue().IsFinished() && !stream->mHaveSentFinishVideo) {
         mediaStream->EndTrack(TRACK_VIDEO);
         stream->mHaveSentFinishVideo = true;
       }
       endPosition = std::max(endPosition,
-          mediaStream->TicksToTimeRoundDown(RATE_VIDEO, stream->mNextVideoTime - stream->mInitialTime));
+          mediaStream->MicrosecondsToStreamTimeRoundDown(
+              stream->mNextVideoTime - stream->mInitialTime));
     }
 
     if (!stream->mHaveSentFinish) {
       stream->mStream->AdvanceKnownTracksTime(endPosition);
     }
 
     if (finished && !stream->mHaveSentFinish) {
       stream->mHaveSentFinish = true;
--- a/dom/media/MediaStreamGraph.h
+++ b/dom/media/MediaStreamGraph.h
@@ -490,16 +490,20 @@ public:
   double StreamTimeToSeconds(StreamTime aTime)
   {
     return TrackTicksToSeconds(mBuffer.GraphRate(), aTime);
   }
   int64_t StreamTimeToMicroseconds(StreamTime aTime)
   {
     return TimeToTicksRoundDown(1000000, aTime);
   }
+  StreamTime MicrosecondsToStreamTimeRoundDown(int64_t aMicroseconds) {
+    return (aMicroseconds*mBuffer.GraphRate())/1000000;
+  }
+
   TrackTicks TimeToTicksRoundUp(TrackRate aRate, StreamTime aTime)
   {
     return RateConvertTicksRoundUp(aRate, mBuffer.GraphRate(), aTime);
   }
   TrackTicks TimeToTicksRoundDown(TrackRate aRate, StreamTime aTime)
   {
     return RateConvertTicksRoundDown(aRate, mBuffer.GraphRate(), aTime);
   }