Bug 1061046. Part 13: Remove rate-conversion functions from Track. r=karlt
authorRobert O'Callahan <robert@ocallahan.org>
Thu, 18 Sep 2014 17:13:13 +1200
changeset 216602 eeda13df89b49a9bdb9410c12e50d0a5e8aedbe8
parent 216601 3565d8ea6d61bc53b3ee0b65940068adb40d6c2a
child 216603 098a4f0cd0ae3aa30bb0c3bed4a04c1d1ceca129
push idunknown
push userunknown
push dateunknown
reviewerskarlt
bugs1061046
milestone36.0a1
Bug 1061046. Part 13: Remove rate-conversion functions from Track. r=karlt Also removes some other unnecessary rate conversions.
dom/media/MediaSegment.h
dom/media/MediaStreamGraph.cpp
dom/media/MediaStreamGraph.h
dom/media/StreamBuffer.cpp
dom/media/StreamBuffer.h
dom/media/TrackUnionStream.cpp
--- a/dom/media/MediaSegment.h
+++ b/dom/media/MediaSegment.h
@@ -29,18 +29,17 @@ const TrackRate TRACK_RATE_MAX = 1 << TR
  * audio sample rate). We want to make sure that multiplying TrackTicks by
  * a TrackRate doesn't overflow, so we set its max accordingly.
  */
 typedef int64_t TrackTicks;
 const int64_t TRACK_TICKS_MAX = INT64_MAX >> TRACK_RATE_MAX_BITS;
 
 /**
  * We represent media times in 64-bit audio frame counts or ticks.
- * All audio tracks in a MediaStreamGraph have the same sample rate and all
- * streams in the graph measure time using ticks at the same audio rate.
+ * All tracks in a MediaStreamGraph have the same rate.
  */
 typedef int64_t MediaTime;
 const int64_t MEDIA_TIME_MAX = TRACK_TICKS_MAX;
 
 /**
  * A MediaSegment is a chunk of media data sequential in time. Different
  * types of data have different subclasses of MediaSegment, all inheriting
  * from MediaSegmentBase.
--- a/dom/media/MediaStreamGraph.cpp
+++ b/dom/media/MediaStreamGraph.cpp
@@ -257,17 +257,17 @@ MediaStreamGraphImpl::UpdateBufferSuffic
         // This track will end, so no point in firing not-enough-data
         // callbacks.
         continue;
       }
       StreamBuffer::Track* track = aStream->mBuffer.FindTrack(data->mID);
       // Note that track->IsEnded() must be false, otherwise we would have
       // removed the track from mUpdateTracks already.
       NS_ASSERTION(!track->IsEnded(), "What is this track doing here?");
-      data->mHaveEnough = track->GetEndTimeRoundDown() >= desiredEnd;
+      data->mHaveEnough = track->GetEnd() >= desiredEnd;
       if (!data->mHaveEnough) {
         runnables.MoveElementsFrom(data->mDispatchWhenNotEnough);
       }
     }
   }
 
   for (uint32_t i = 0; i < runnables.Length(); ++i) {
     runnables[i].mTarget->Dispatch(runnables[i].mRunnable, 0);
@@ -979,17 +979,17 @@ MediaStreamGraphImpl::PlayAudio(MediaStr
     AudioSegment* audio = track->Get<AudioSegment>();
     AudioSegment output;
     MOZ_ASSERT(track->GetRate() == mSampleRate);
 
     // offset and audioOutput.mLastTickWritten can differ by at most one sample,
     // because of the rounding issue. We track that to ensure we don't skip a
     // sample. One sample may be played twice, but this should not happen
     // again during an unblocked sequence of track samples.
-    TrackTicks offset = track->TimeToTicksRoundDown(GraphTimeToStreamTime(aStream, aFrom));
+    TrackTicks offset = GraphTimeToStreamTime(aStream, aFrom);
     if (audioOutput.mLastTickWritten &&
         audioOutput.mLastTickWritten != offset) {
       // If there is a global underrun of the MSG, this property won't hold, and
       // we reset the sample count tracking.
       if (offset - audioOutput.mLastTickWritten == 1) {
         offset = audioOutput.mLastTickWritten;
       }
     }
@@ -1091,37 +1091,35 @@ MediaStreamGraphImpl::PlayVideo(MediaStr
                      MillisecondsToMediaTime(5), "Graph thread slowdown?");
     framePosition = CurrentDriver()->StateComputedTime();
   }
   MOZ_ASSERT(framePosition >= aStream->mBufferStartTime, "frame position before buffer?");
   StreamTime frameBufferTime = GraphTimeToStreamTime(aStream, framePosition);
 
   TrackTicks start;
   const VideoFrame* frame = nullptr;
-  StreamBuffer::Track* track;
   for (StreamBuffer::TrackIter tracks(aStream->GetStreamBuffer(), MediaSegment::VIDEO);
        !tracks.IsEnded(); tracks.Next()) {
     VideoSegment* segment = tracks->Get<VideoSegment>();
     TrackTicks thisStart;
     const VideoFrame* thisFrame =
-      segment->GetFrameAt(tracks->TimeToTicksRoundDown(frameBufferTime), &thisStart);
+        segment->GetFrameAt(frameBufferTime, &thisStart);
     if (thisFrame && thisFrame->GetImage()) {
       start = thisStart;
       frame = thisFrame;
-      track = tracks.get();
     }
   }
   if (!frame || *frame == aStream->mLastPlayedVideoFrame)
     return;
 
   STREAM_LOG(PR_LOG_DEBUG+1, ("MediaStream %p writing video frame %p (%dx%d)",
                               aStream, frame->GetImage(), frame->GetIntrinsicSize().width,
                               frame->GetIntrinsicSize().height));
   GraphTime startTime = StreamTimeToGraphTime(aStream,
-      track->TicksToTimeRoundDown(start), INCLUDE_TRAILING_BLOCKED_INTERVAL);
+      start, INCLUDE_TRAILING_BLOCKED_INTERVAL);
   TimeStamp targetTime = CurrentDriver()->GetCurrentTimeStamp() +
       TimeDuration::FromMilliseconds(double(startTime - IterationEnd()));
   for (uint32_t i = 0; i < aStream->mVideoOutputs.Length(); ++i) {
     VideoFrameContainer* output = aStream->mVideoOutputs[i];
 
     if (frame->GetForceBlack()) {
       nsRefPtr<Image> image =
         output->GetImageContainer()->CreateImage(ImageFormat::PLANAR_YCBCR);
@@ -2517,18 +2515,17 @@ SourceMediaStream::EndAllTrackAndFinish(
 TrackTicks
 SourceMediaStream::GetBufferedTicks(TrackID aID)
 {
   StreamBuffer::Track* track  = mBuffer.FindTrack(aID);
   if (track) {
     MediaSegment* segment = track->GetSegment();
     if (segment) {
       return segment->GetDuration() -
-        track->TimeToTicksRoundDown(
-          GraphTimeToStreamTime(GraphImpl()->CurrentDriver()->StateComputedTime()));
+          GraphTimeToStreamTime(GraphImpl()->CurrentDriver()->StateComputedTime());
     }
   }
   return 0;
 }
 
 void
 SourceMediaStream::RegisterForAudioMixing()
 {
@@ -3023,18 +3020,17 @@ MediaStreamGraph::StartNonRealtimeProces
   NS_ASSERTION(NS_IsMainThread(), "main thread only");
 
   MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(this);
   NS_ASSERTION(!graph->mRealtime, "non-realtime only");
 
   if (graph->mNonRealtimeProcessing)
     return;
 
-  graph->mEndTime = graph->IterationEnd() +
-    RateConvertTicksRoundUp(graph->GraphRate(), aRate, aTicksToProcess);
+  graph->mEndTime = graph->IterationEnd() + aTicksToProcess;
   graph->mNonRealtimeProcessing = true;
   graph->EnsureRunInStableState();
 }
 
 void
 ProcessedMediaStream::AddInput(MediaInputPort* aPort)
 {
   mInputs.AppendElement(aPort);
--- a/dom/media/MediaStreamGraph.h
+++ b/dom/media/MediaStreamGraph.h
@@ -486,17 +486,17 @@ public:
   GraphTime GetStreamBufferStartTime() { return mBufferStartTime; }
 
   double StreamTimeToSeconds(StreamTime aTime)
   {
     return TrackTicksToSeconds(mBuffer.GraphRate(), aTime);
   }
   int64_t StreamTimeToMicroseconds(StreamTime aTime)
   {
-    return TimeToTicksRoundDown(1000000, aTime);
+    return (aTime*1000000)/mBuffer.GraphRate();
   }
   StreamTime MicrosecondsToStreamTimeRoundDown(int64_t aMicroseconds) {
     return (aMicroseconds*mBuffer.GraphRate())/1000000;
   }
 
   TrackTicks TimeToTicksRoundUp(TrackRate aRate, StreamTime aTime)
   {
     return RateConvertTicksRoundUp(aRate, mBuffer.GraphRate(), aTime);
--- a/dom/media/StreamBuffer.cpp
+++ b/dom/media/StreamBuffer.cpp
@@ -22,30 +22,30 @@ StreamBuffer::DumpTrackInfo() const
 {
   STREAM_LOG(PR_LOG_ALWAYS, ("DumpTracks: mTracksKnownTime %lld", mTracksKnownTime));
   for (uint32_t i = 0; i < mTracks.Length(); ++i) {
     Track* track = mTracks[i];
     if (track->IsEnded()) {
       STREAM_LOG(PR_LOG_ALWAYS, ("Track[%d] %d: ended", i, track->GetID()));
     } else {
       STREAM_LOG(PR_LOG_ALWAYS, ("Track[%d] %d: %lld", i, track->GetID(),
-                                 track->GetEndTimeRoundDown()));
+                                 track->GetEnd()));
     }
   }
 }
 #endif
 
 StreamTime
 StreamBuffer::GetEnd() const
 {
   StreamTime t = mTracksKnownTime;
   for (uint32_t i = 0; i < mTracks.Length(); ++i) {
     Track* track = mTracks[i];
     if (!track->IsEnded()) {
-      t = std::min(t, track->GetEndTimeRoundDown());
+      t = std::min(t, track->GetEnd());
     }
   }
   return t;
 }
 
 StreamTime
 StreamBuffer::GetAllTracksEnd() const
 {
@@ -54,17 +54,17 @@ StreamBuffer::GetAllTracksEnd() const
     return STREAM_TIME_MAX;
   }
   StreamTime t = 0;
   for (uint32_t i = 0; i < mTracks.Length(); ++i) {
     Track* track = mTracks[i];
     if (!track->IsEnded()) {
       return STREAM_TIME_MAX;
     }
-    t = std::max(t, track->GetEndTimeRoundDown());
+    t = std::max(t, track->GetEnd());
   }
   return t;
 }
 
 StreamBuffer::Track*
 StreamBuffer::FindTrack(TrackID aID)
 {
   if (aID == TRACK_NONE)
@@ -86,19 +86,19 @@ StreamBuffer::ForgetUpTo(StreamTime aTim
   const StreamTime minChunkSize = 2400;
   if (aTime < mForgottenTime + minChunkSize) {
     return;
   }
   mForgottenTime = aTime;
 
   for (uint32_t i = 0; i < mTracks.Length(); ++i) {
     Track* track = mTracks[i];
-    if (track->IsEnded() && track->GetEndTimeRoundDown() <= aTime) {
+    if (track->IsEnded() && track->GetEnd() <= aTime) {
       mTracks.RemoveElementAt(i);
       --i;
       continue;
     }
-    TrackTicks forgetTo = std::min(track->GetEnd() - 1, track->TimeToTicksRoundDown(aTime));
+    TrackTicks forgetTo = std::min(track->GetEnd() - 1, aTime);
     track->ForgetUpTo(forgetTo);
   }
 }
 
 }
--- a/dom/media/StreamBuffer.h
+++ b/dom/media/StreamBuffer.h
@@ -46,16 +46,17 @@ inline TrackTicks RateConvertTicksRoundU
 
 inline TrackTicks SecondsToTicksRoundDown(TrackRate aRate, double aSeconds)
 {
   NS_ASSERTION(0 < aRate && aRate <= TRACK_RATE_MAX, "Bad rate");
   NS_ASSERTION(0 <= aSeconds && aSeconds <= TRACK_TICKS_MAX/TRACK_RATE_MAX,
                "Bad seconds");
   return aSeconds * aRate;
 }
+
 inline double TrackTicksToSeconds(TrackRate aRate, TrackTicks aTicks)
 {
   NS_ASSERTION(0 < aRate && aRate <= TRACK_RATE_MAX, "Bad rate");
   NS_ASSERTION(0 <= aTicks && aTicks <= TRACK_TICKS_MAX, "Bad ticks");
   return static_cast<double>(aTicks)/aRate;
 }
 
 /**
@@ -111,32 +112,16 @@ public:
       return nullptr;
     }
     MediaSegment* GetSegment() const { return mSegment; }
     TrackRate GetRate() const { return mRate; }
     TrackID GetID() const { return mID; }
     bool IsEnded() const { return mEnded; }
     TrackTicks GetStart() const { return mStart; }
     TrackTicks GetEnd() const { return mSegment->GetDuration(); }
-    StreamTime GetEndTimeRoundDown() const
-    {
-      return TicksToTimeRoundDown(mSegment->GetDuration());
-    }
-    StreamTime GetStartTimeRoundDown() const
-    {
-      return TicksToTimeRoundDown(mStart);
-    }
-    TrackTicks TimeToTicksRoundDown(StreamTime aTime) const
-    {
-      return RateConvertTicksRoundDown(mRate, mGraphRate, aTime);
-    }
-    StreamTime TicksToTimeRoundDown(TrackTicks aTicks) const
-    {
-      return RateConvertTicksRoundDown(mGraphRate, mRate, aTicks);
-    }
     MediaSegment::Type GetType() const { return mSegment->GetType(); }
 
     void SetEnded() { mEnded = true; }
     void AppendFrom(Track* aTrack)
     {
       NS_ASSERTION(!mEnded, "Can't append to ended track");
       NS_ASSERTION(aTrack->mID == mID, "IDs must match");
       NS_ASSERTION(aTrack->mStart == 0, "Source track must start at zero");
@@ -250,18 +235,17 @@ public:
     Track* track = new Track(aID, aRate, aStart, aSegment, GraphRate());
     mTracks.InsertElementSorted(track, CompareTracksByID());
 
     if (mTracksKnownTime == STREAM_TIME_MAX) {
       // There exists code like
       // http://mxr.mozilla.org/mozilla-central/source/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp?rev=96b197deb91e&mark=1292-1297#1292
       NS_WARNING("Adding track to StreamBuffer that should have no more tracks");
     } else {
-      NS_ASSERTION(track->TimeToTicksRoundDown(mTracksKnownTime) <= aStart,
-                   "Start time too early");
+      NS_ASSERTION(mTracksKnownTime <= aStart, "Start time too early");
     }
     return *track;
   }
   void AdvanceKnownTracksTime(StreamTime aKnownTime)
   {
     NS_ASSERTION(aKnownTime >= mTracksKnownTime, "Can't move tracks-known time earlier");
     mTracksKnownTime = aKnownTime;
   }
--- a/dom/media/TrackUnionStream.cpp
+++ b/dom/media/TrackUnionStream.cpp
@@ -252,17 +252,17 @@ TrackUnionStream::TrackUnionStream(DOMMe
     *aOutputTrackFinished = false;
     for (GraphTime t = aFrom; t < aTo; t = next) {
       MediaInputPort::InputInterval interval = map->mInputPort->GetNextInputInterval(t);
       interval.mEnd = std::min(interval.mEnd, aTo);
       StreamTime inputEnd = source->GraphTimeToStreamTime(interval.mEnd);
       TrackTicks inputTrackEndPoint = TRACK_TICKS_MAX;
 
       if (aInputTrack->IsEnded() &&
-          aInputTrack->GetEndTimeRoundDown() <= inputEnd) {
+          aInputTrack->GetEnd() <= inputEnd) {
         inputTrackEndPoint = aInputTrack->GetEnd();
         *aOutputTrackFinished = true;
       }
 
       if (interval.mStart >= interval.mEnd) {
         break;
       }
       next = interval.mEnd;