Bug 1091008 - Remove the aStartTime argument from MediaDecoderReader::GetBuffered. r=cpearce
☠☠ backed out by 43a51201545a ☠ ☠
authorBobby Holley <bobbyholley@gmail.com>
Wed, 05 Nov 2014 10:08:58 +0100
changeset 214039 9de4746aa59a3236c122fc9fba27bd4fe2f6c544
parent 214038 856016c0118ab665cf79f947e2ee03e207b1fa38
child 214040 966093bbc26ad7d904ccf89d6775a72e2045b005
push id51401
push userbobbyholley@gmail.com
push dateWed, 05 Nov 2014 09:09:19 +0000
treeherdermozilla-inbound@21ddb8a58fea [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerscpearce
bugs1091008
milestone36.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1091008 - Remove the aStartTime argument from MediaDecoderReader::GetBuffered. r=cpearce We now have this stashed on the superclass.
dom/media/MediaDecoderReader.cpp
dom/media/MediaDecoderReader.h
dom/media/MediaDecoderStateMachine.cpp
dom/media/fmp4/MP4Reader.cpp
dom/media/fmp4/MP4Reader.h
dom/media/gstreamer/GStreamerReader.cpp
dom/media/gstreamer/GStreamerReader.h
dom/media/gtest/TestMP4Reader.cpp
dom/media/mediasource/SourceBufferDecoder.cpp
dom/media/ogg/OggReader.cpp
dom/media/ogg/OggReader.h
dom/media/omx/RtspMediaCodecReader.h
dom/media/omx/RtspOmxReader.h
dom/media/raw/RawReader.cpp
dom/media/raw/RawReader.h
dom/media/wave/WaveReader.cpp
dom/media/wave/WaveReader.h
dom/media/webm/WebMReader.cpp
dom/media/webm/WebMReader.h
--- a/dom/media/MediaDecoderReader.cpp
+++ b/dom/media/MediaDecoderReader.cpp
@@ -124,18 +124,17 @@ VideoData* MediaDecoderReader::DecodeToF
 void
 MediaDecoderReader::SetStartTime(int64_t aStartTime)
 {
   mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
   mStartTime = aStartTime;
 }
 
 nsresult
-MediaDecoderReader::GetBuffered(mozilla::dom::TimeRanges* aBuffered,
-                                int64_t aStartTime)
+MediaDecoderReader::GetBuffered(mozilla::dom::TimeRanges* aBuffered)
 {
   MediaResource* stream = mDecoder->GetResource();
   int64_t durationUs = 0;
   {
     ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
     durationUs = mDecoder->GetMediaDuration();
   }
   GetEstimatedBufferedTimeRanges(stream, durationUs, aBuffered);
--- a/dom/media/MediaDecoderReader.h
+++ b/dom/media/MediaDecoderReader.h
@@ -124,34 +124,31 @@ public:
   // Tell the reader that the data decoded are not for direct playback, so it
   // can accept more files, in particular those which have more channels than
   // available in the audio output.
   void SetIgnoreAudioOutputFormat()
   {
     mIgnoreAudioOutputFormat = true;
   }
 
-  // Populates aBuffered with the time ranges which are buffered. aStartTime
-  // must be the presentation time of the first frame in the media, e.g.
-  // the media time corresponding to playback time/position 0. This function
+  // Populates aBuffered with the time ranges which are buffered. This function
   // is called on the main, decode, and state machine threads.
   //
   // This base implementation in MediaDecoderReader estimates the time ranges
   // buffered by interpolating the cached byte ranges with the duration
   // of the media. Reader subclasses should override this method if they
   // can quickly calculate the buffered ranges more accurately.
   //
   // The primary advantage of this implementation in the reader base class
   // is that it's a fast approximation, which does not perform any I/O.
   //
   // The OggReader relies on this base implementation not performing I/O,
   // since in FirefoxOS we can't do I/O on the main thread, where this is
   // called.
-  virtual nsresult GetBuffered(dom::TimeRanges* aBuffered,
-                               int64_t aStartTime);
+  virtual nsresult GetBuffered(dom::TimeRanges* aBuffered);
 
   virtual int64_t ComputeStartTime(const VideoData* aVideo, const AudioData* aAudio);
 
   // Returns the number of bytes of memory allocated by structures/frames in
   // the video queue.
   size_t SizeOfVideoQueueInBytes() const;
 
   // Returns the number of bytes of memory allocated by structures/frames in
--- a/dom/media/MediaDecoderStateMachine.cpp
+++ b/dom/media/MediaDecoderStateMachine.cpp
@@ -1489,18 +1489,21 @@ void MediaDecoderStateMachine::NotifyDat
   NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
   mReader->NotifyDataArrived(aBuffer, aLength, aOffset);
 
   // While playing an unseekable stream of unknown duration, mEndTime is
   // updated (in AdvanceFrame()) as we play. But if data is being downloaded
   // faster than played, mEndTime won't reflect the end of playable data
   // since we haven't played the frame at the end of buffered data. So update
   // mEndTime here as new data is downloaded to prevent such a lag.
+  //
+  // Make sure to only do this if we have a start time, otherwise the reader
+  // doesn't know how to compute GetBuffered.
   nsRefPtr<dom::TimeRanges> buffered = new dom::TimeRanges();
-  if (mDecoder->IsInfinite() &&
+  if (mDecoder->IsInfinite() && (mStartTime != -1) &&
       NS_SUCCEEDED(mDecoder->GetBuffered(buffered)))
   {
     uint32_t length = 0;
     buffered->GetLength(&length);
     if (length) {
       double end = 0;
       buffered->End(length - 1, &end);
       ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
@@ -2981,17 +2984,17 @@ void MediaDecoderStateMachine::StartBuff
               stats.mDownloadRate/1024, stats.mDownloadRateReliable ? "" : " (unreliable)");
 #endif
 }
 
 nsresult MediaDecoderStateMachine::GetBuffered(dom::TimeRanges* aBuffered) {
   MediaResource* resource = mDecoder->GetResource();
   NS_ENSURE_TRUE(resource, NS_ERROR_FAILURE);
   resource->Pin();
-  nsresult res = mReader->GetBuffered(aBuffered, mStartTime);
+  nsresult res = mReader->GetBuffered(aBuffered);
   resource->Unpin();
   return res;
 }
 
 void MediaDecoderStateMachine::SetPlayStartTime(const TimeStamp& aTimeStamp)
 {
   AssertCurrentThreadInMonitor();
   mPlayStartTime = aTimeStamp;
--- a/dom/media/fmp4/MP4Reader.cpp
+++ b/dom/media/fmp4/MP4Reader.cpp
@@ -816,35 +816,36 @@ MP4Reader::GetEvictionOffset(double aTim
   if (!mIndexReady) {
     return 0;
   }
 
   return mDemuxer->GetEvictionOffset(aTime * 1000000.0);
 }
 
 nsresult
-MP4Reader::GetBuffered(dom::TimeRanges* aBuffered, int64_t aStartTime)
+MP4Reader::GetBuffered(dom::TimeRanges* aBuffered)
 {
   MonitorAutoLock mon(mIndexMonitor);
   if (!mIndexReady) {
     return NS_OK;
   }
+  MOZ_ASSERT(mStartTime != -1, "Need to finish metadata decode first");
 
   MediaResource* resource = mDecoder->GetResource();
   nsTArray<MediaByteRange> ranges;
   resource->Pin();
   nsresult rv = resource->GetCachedRanges(ranges);
   resource->Unpin();
 
   if (NS_SUCCEEDED(rv)) {
     nsTArray<Interval<Microseconds>> timeRanges;
     mDemuxer->ConvertByteRangesToTime(ranges, &timeRanges);
     for (size_t i = 0; i < timeRanges.Length(); i++) {
-      aBuffered->Add((timeRanges[i].start - aStartTime) / 1000000.0,
-                     (timeRanges[i].end - aStartTime) / 1000000.0);
+      aBuffered->Add((timeRanges[i].start - mStartTime) / 1000000.0,
+                     (timeRanges[i].end - mStartTime) / 1000000.0);
     }
   }
 
   return NS_OK;
 }
 
 bool MP4Reader::IsDormantNeeded()
 {
--- a/dom/media/fmp4/MP4Reader.h
+++ b/dom/media/fmp4/MP4Reader.h
@@ -52,18 +52,17 @@ public:
 
   virtual bool IsMediaSeekable() MOZ_OVERRIDE;
 
   virtual void NotifyDataArrived(const char* aBuffer, uint32_t aLength,
                                  int64_t aOffset) MOZ_OVERRIDE;
 
   virtual int64_t GetEvictionOffset(double aTime) MOZ_OVERRIDE;
 
-  virtual nsresult GetBuffered(dom::TimeRanges* aBuffered,
-                               int64_t aStartTime) MOZ_OVERRIDE;
+  virtual nsresult GetBuffered(dom::TimeRanges* aBuffered) MOZ_OVERRIDE;
 
   // For Media Resource Management
   virtual bool IsWaitingMediaResources() MOZ_OVERRIDE;
   virtual bool IsDormantNeeded() MOZ_OVERRIDE;
   virtual void ReleaseMediaResources() MOZ_OVERRIDE;
 
   virtual nsresult ResetDecode() MOZ_OVERRIDE;
 
--- a/dom/media/gstreamer/GStreamerReader.cpp
+++ b/dom/media/gstreamer/GStreamerReader.cpp
@@ -810,18 +810,17 @@ nsresult GStreamerReader::Seek(int64_t a
   GstMessage* message = gst_bus_timed_pop_filtered(mBus, GST_CLOCK_TIME_NONE,
                (GstMessageType)(GST_MESSAGE_ASYNC_DONE | GST_MESSAGE_ERROR));
   gst_message_unref(message);
   LOG(PR_LOG_DEBUG, "seek completed");
 
   return NS_OK;
 }
 
-nsresult GStreamerReader::GetBuffered(dom::TimeRanges* aBuffered,
-                                      int64_t aStartTime)
+nsresult GStreamerReader::GetBuffered(dom::TimeRanges* aBuffered)
 {
   if (!mInfo.HasValidMedia()) {
     return NS_OK;
   }
 
 #if GST_VERSION_MAJOR == 0
   GstFormat format = GST_FORMAT_TIME;
 #endif
--- a/dom/media/gstreamer/GStreamerReader.h
+++ b/dom/media/gstreamer/GStreamerReader.h
@@ -47,17 +47,17 @@ public:
   virtual bool DecodeVideoFrame(bool &aKeyframeSkip,
                                 int64_t aTimeThreshold);
   virtual nsresult ReadMetadata(MediaInfo* aInfo,
                                 MetadataTags** aTags);
   virtual nsresult Seek(int64_t aTime,
                         int64_t aStartTime,
                         int64_t aEndTime,
                         int64_t aCurrentTime);
-  virtual nsresult GetBuffered(dom::TimeRanges* aBuffered, int64_t aStartTime);
+  virtual nsresult GetBuffered(dom::TimeRanges* aBuffered);
 
   virtual void NotifyDataArrived(const char *aBuffer,
                                  uint32_t aLength,
                                  int64_t aOffset) MOZ_OVERRIDE;
 
   virtual bool HasAudio() {
     return mInfo.HasAudio();
   }
--- a/dom/media/gtest/TestMP4Reader.cpp
+++ b/dom/media/gtest/TestMP4Reader.cpp
@@ -31,16 +31,22 @@ public:
   {
     EXPECT_EQ(NS_OK, Preferences::SetBool(
                        "media.fragmented-mp4.use-blank-decoder", true));
 
     EXPECT_EQ(NS_OK, resource->Open(nullptr));
     decoder->SetResource(resource);
 
     reader->Init(nullptr);
+    {
+      // This needs to be done before invoking GetBuffered. This is normally
+      // done by MediaDecoderStateMachine.
+      ReentrantMonitorAutoEnter mon(decoder->GetReentrantMonitor());
+      reader->SetStartTime(0);
+    }
   }
 
   void Init() {
     nsCOMPtr<nsIThread> thread;
     nsresult rv = NS_NewThread(getter_AddRefs(thread),
                                NS_NewRunnableMethod(this, &TestBinding::ReadMetadata));
     EXPECT_EQ(NS_OK, rv);
     thread->Shutdown();
@@ -67,17 +73,17 @@ TEST(MP4Reader, BufferedRange)
 {
   nsRefPtr<TestBinding> b = new TestBinding();
   b->Init();
 
   // Video 3-4 sec, audio 2.986666-4.010666 sec
   b->resource->MockAddBufferedRange(248400, 327455);
 
   nsRefPtr<TimeRanges> ranges = new TimeRanges();
-  EXPECT_EQ(NS_OK, b->reader->GetBuffered(ranges, 0));
+  EXPECT_EQ(NS_OK, b->reader->GetBuffered(ranges));
   EXPECT_EQ(1U, ranges->Length());
   double start = 0;
   EXPECT_EQ(NS_OK, ranges->Start(0, &start));
   EXPECT_NEAR(270000 / 90000.0, start, 0.000001);
   double end = 0;
   EXPECT_EQ(NS_OK, ranges->End(0, &end));
   EXPECT_NEAR(360000 / 90000.0, end, 0.000001);
 }
@@ -88,17 +94,17 @@ TEST(MP4Reader, BufferedRangeMissingLast
   b->Init();
 
   // Dropping the last byte of the video
   b->resource->MockClearBufferedRanges();
   b->resource->MockAddBufferedRange(248400, 324912);
   b->resource->MockAddBufferedRange(324913, 327455);
 
   nsRefPtr<TimeRanges> ranges = new TimeRanges();
-  EXPECT_EQ(NS_OK, b->reader->GetBuffered(ranges, 0));
+  EXPECT_EQ(NS_OK, b->reader->GetBuffered(ranges));
   EXPECT_EQ(1U, ranges->Length());
   double start = 0;
   EXPECT_EQ(NS_OK, ranges->Start(0, &start));
   EXPECT_NEAR(270000.0 / 90000.0, start, 0.000001);
   double end = 0;
   EXPECT_EQ(NS_OK, ranges->End(0, &end));
   EXPECT_NEAR(357000 / 90000.0, end, 0.000001);
 }
@@ -109,17 +115,17 @@ TEST(MP4Reader, BufferedRangeSyncFrame)
   b->Init();
 
   // Check that missing the first byte at 2 seconds skips right through to 3
   // seconds because of a missing sync frame
   b->resource->MockClearBufferedRanges();
   b->resource->MockAddBufferedRange(146336, 327455);
 
   nsRefPtr<TimeRanges> ranges = new TimeRanges();
-  EXPECT_EQ(NS_OK, b->reader->GetBuffered(ranges, 0));
+  EXPECT_EQ(NS_OK, b->reader->GetBuffered(ranges));
   EXPECT_EQ(1U, ranges->Length());
   double start = 0;
   EXPECT_EQ(NS_OK, ranges->Start(0, &start));
   EXPECT_NEAR(270000.0 / 90000.0, start, 0.000001);
   double end = 0;
   EXPECT_EQ(NS_OK, ranges->End(0, &end));
   EXPECT_NEAR(360000 / 90000.0, end, 0.000001);
 }
@@ -167,17 +173,17 @@ TEST(MP4Reader, CompositionOrder)
   b->resource->MockAddBufferedRange(9734, 10314);
   b->resource->MockAddBufferedRange(10314, 10895);
   b->resource->MockAddBufferedRange(11207, 11787);
   b->resource->MockAddBufferedRange(12035, 12616);
   b->resource->MockAddBufferedRange(12616, 13196);
   b->resource->MockAddBufferedRange(13220, 13901);
 
   nsRefPtr<TimeRanges> ranges = new TimeRanges();
-  EXPECT_EQ(NS_OK, b->reader->GetBuffered(ranges, 0));
+  EXPECT_EQ(NS_OK, b->reader->GetBuffered(ranges));
   EXPECT_EQ(2U, ranges->Length());
 
   double start = 0;
   EXPECT_EQ(NS_OK, ranges->Start(0, &start));
   EXPECT_NEAR(166.0 / 2500.0, start, 0.000001);
   double end = 0;
   EXPECT_EQ(NS_OK, ranges->End(0, &end));
   EXPECT_NEAR(332.0 / 2500.0, end, 0.000001);
@@ -217,17 +223,17 @@ TEST(MP4Reader, Normalised)
   //     9 12035   581  8212      1014  Yes
   //    10 12616   580  9226      1015  Yes
   //    11 13220   581  10241     1014  Yes
 
   b->resource->MockClearBufferedRanges();
   b->resource->MockAddBufferedRange(48, 13901);
 
   nsRefPtr<TimeRanges> ranges = new TimeRanges();
-  EXPECT_EQ(NS_OK, b->reader->GetBuffered(ranges, 0));
+  EXPECT_EQ(NS_OK, b->reader->GetBuffered(ranges));
   EXPECT_EQ(1U, ranges->Length());
 
   double start = 0;
   EXPECT_EQ(NS_OK, ranges->Start(0, &start));
   EXPECT_NEAR(166.0 / 2500.0, start, 0.000001);
   double end = 0;
   EXPECT_EQ(NS_OK, ranges->End(0, &end));
   EXPECT_NEAR(11255.0 / 44100.0, end, 0.000001);
--- a/dom/media/mediasource/SourceBufferDecoder.cpp
+++ b/dom/media/mediasource/SourceBufferDecoder.cpp
@@ -205,18 +205,17 @@ SourceBufferDecoder::NotifyDataArrived(c
   // force parent decoder's state machine to recompute end time for
   // infinite length media.
   mParentDecoder->NotifyDataArrived(nullptr, 0, 0);
 }
 
 nsresult
 SourceBufferDecoder::GetBuffered(dom::TimeRanges* aBuffered)
 {
-  // XXX: Need mStartTime (from StateMachine) instead of passing 0.
-  return mReader->GetBuffered(aBuffered, 0);
+  return mReader->GetBuffered(aBuffered);
 }
 
 int64_t
 SourceBufferDecoder::ConvertToByteOffset(double aTime)
 {
   int64_t readerOffset = mReader->GetEvictionOffset(aTime);
   if (readerOffset >= 0) {
     return readerOffset;
--- a/dom/media/ogg/OggReader.cpp
+++ b/dom/media/ogg/OggReader.cpp
@@ -1876,25 +1876,26 @@ nsresult OggReader::SeekBisection(int64_
     NS_ASSERTION(endTime >= seekTarget, "End must be after seek target");
   }
 
   SEEK_LOG(PR_LOG_DEBUG, ("Seek complete in %d bisections.", hops));
 
   return NS_OK;
 }
 
-nsresult OggReader::GetBuffered(dom::TimeRanges* aBuffered, int64_t aStartTime)
+nsresult OggReader::GetBuffered(dom::TimeRanges* aBuffered)
 {
+  MOZ_ASSERT(mStartTime != -1, "Need to finish metadata decode first");
   {
     mozilla::ReentrantMonitorAutoEnter mon(mMonitor);
     if (mIsChained)
       return NS_ERROR_FAILURE;
   }
 #ifdef OGG_ESTIMATE_BUFFERED
-  return MediaDecoderReader::GetBuffered(aBuffered, aStartTime);
+  return MediaDecoderReader::GetBuffered(aBuffered);
 #else
   // HasAudio and HasVideo are not used here as they take a lock and cause
   // a deadlock. Accessing mInfo doesn't require a lock - it doesn't change
   // after metadata is read.
   if (!mInfo.HasValidMedia()) {
     // No need to search through the file if there are no audio or video tracks
     return NS_OK;
   }
@@ -1914,17 +1915,17 @@ nsresult OggReader::GetBuffered(dom::Tim
     // Ensure the offsets are after the header pages.
     int64_t startOffset = ranges[index].mStart;
     int64_t endOffset = ranges[index].mEnd;
 
     // Because the granulepos time is actually the end time of the page,
     // we special-case (startOffset == 0) so that the first
     // buffered range always appears to be buffered from the media start
     // time, rather than from the end-time of the first page.
-    int64_t startTime = (startOffset == 0) ? aStartTime : -1;
+    int64_t startTime = (startOffset == 0) ? mStartTime : -1;
 
     // Find the start time of the range. Read pages until we find one with a
     // granulepos which we can convert into a timestamp to use as the time of
     // the start of the buffered range.
     ogg_sync_reset(&sync.mState);
     while (startTime == -1) {
       ogg_page page;
       int32_t discard;
@@ -1982,18 +1983,18 @@ nsresult OggReader::GetBuffered(dom::Tim
       }
     }
 
     if (startTime != -1) {
       // We were able to find a start time for that range, see if we can
       // find an end time.
       int64_t endTime = RangeEndTime(startOffset, endOffset, true);
       if (endTime != -1) {
-        aBuffered->Add((startTime - aStartTime) / static_cast<double>(USECS_PER_S),
-                       (endTime - aStartTime) / static_cast<double>(USECS_PER_S));
+        aBuffered->Add((startTime - mStartTime) / static_cast<double>(USECS_PER_S),
+                       (endTime - mStartTime) / static_cast<double>(USECS_PER_S));
       }
     }
   }
 
   return NS_OK;
 #endif
 }
 
--- a/dom/media/ogg/OggReader.h
+++ b/dom/media/ogg/OggReader.h
@@ -74,17 +74,17 @@ public:
 
   virtual bool HasVideo() {
     return mTheoraState != 0 && mTheoraState->mActive;
   }
 
   virtual nsresult ReadMetadata(MediaInfo* aInfo,
                                 MetadataTags** aTags);
   virtual nsresult Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime, int64_t aCurrentTime);
-  virtual nsresult GetBuffered(dom::TimeRanges* aBuffered, int64_t aStartTime);
+  virtual nsresult GetBuffered(dom::TimeRanges* aBuffered);
 
   virtual bool IsMediaSeekable() MOZ_OVERRIDE;
 
 private:
   // TODO: DEPRECATED. This uses synchronous decoding.
   // Stores the presentation time of the first frame we'd be able to play if
   // we started playback at the current position. Returns the first video
   // frame, if we have video.
--- a/dom/media/omx/RtspMediaCodecReader.h
+++ b/dom/media/omx/RtspMediaCodecReader.h
@@ -43,18 +43,17 @@ public:
   // 1. Because the Rtsp stream is a/v separated. The buffered data in a/v
   // tracks are not consistent with time stamp.
   // For example: audio buffer: 1~2s, video buffer: 1.5~2.5s
   // 2. Since the Rtsp is a realtime streaming, the buffer we made for
   // RtspMediaResource is quite small. The small buffer implies the time ranges
   // we returned are not useful for the MediaDecodeStateMachine. Unlike the
   // ChannelMediaResource, it has a "cache" that can store the whole streaming
   // data so the |GetBuffered| function can retrieve useful time ranges.
-  virtual nsresult GetBuffered(dom::TimeRanges* aBuffered,
-                               int64_t aStartTime) MOZ_OVERRIDE {
+  virtual nsresult GetBuffered(dom::TimeRanges* aBuffered) MOZ_OVERRIDE {
     return NS_OK;
   }
 
   virtual void SetIdle() MOZ_OVERRIDE;
 
   // Disptach a DecodeVideoFrameTask to decode video data.
   virtual void RequestVideoData(bool aSkipToNextKeyframe,
                                 int64_t aTimeThreshold) MOZ_OVERRIDE;
--- a/dom/media/omx/RtspOmxReader.h
+++ b/dom/media/omx/RtspOmxReader.h
@@ -53,18 +53,17 @@ public:
   // 1. Because the Rtsp stream is a/v separated. The buffered data in a/v
   // tracks are not consistent with time stamp.
   // For example: audio buffer: 1~2s, video buffer: 1.5~2.5s
   // 2. Since the Rtsp is a realtime streaming, the buffer we made for
   // RtspMediaResource is quite small. The small buffer implies the time ranges
   // we returned are not useful for the MediaDecodeStateMachine. Unlike the
   // ChannelMediaResource, it has a "cache" that can store the whole streaming
   // data so the |GetBuffered| function can retrieve useful time ranges.
-  virtual nsresult GetBuffered(mozilla::dom::TimeRanges* aBuffered,
-                               int64_t aStartTime) MOZ_FINAL MOZ_OVERRIDE {
+  virtual nsresult GetBuffered(mozilla::dom::TimeRanges* aBuffered) MOZ_FINAL MOZ_OVERRIDE {
     return NS_OK;
   }
 
   virtual void SetIdle() MOZ_OVERRIDE;
 
   virtual nsresult ReadMetadata(MediaInfo *aInfo, MetadataTags **aTags)
     MOZ_FINAL MOZ_OVERRIDE;
 
--- a/dom/media/raw/RawReader.cpp
+++ b/dom/media/raw/RawReader.cpp
@@ -279,12 +279,12 @@ nsresult RawReader::Seek(int64_t aTime, 
     } else {
       video.forget();
     }
   }
 
   return NS_OK;
 }
 
-nsresult RawReader::GetBuffered(dom::TimeRanges* aBuffered, int64_t aStartTime)
+nsresult RawReader::GetBuffered(dom::TimeRanges* aBuffered)
 {
   return NS_OK;
 }
--- a/dom/media/raw/RawReader.h
+++ b/dom/media/raw/RawReader.h
@@ -35,17 +35,17 @@ public:
   virtual bool HasVideo()
   {
     return true;
   }
 
   virtual nsresult ReadMetadata(MediaInfo* aInfo,
                                 MetadataTags** aTags);
   virtual nsresult Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime, int64_t aCurrentTime);
-  virtual nsresult GetBuffered(dom::TimeRanges* aBuffered, int64_t aStartTime);
+  virtual nsresult GetBuffered(dom::TimeRanges* aBuffered);
 
   virtual bool IsMediaSeekable() MOZ_OVERRIDE;
 
 private:
   bool ReadFromResource(MediaResource *aResource, uint8_t *aBuf, uint32_t aLength);
 
   RawVideoHeader mMetadata;
   uint32_t mCurrentFrame;
--- a/dom/media/wave/WaveReader.cpp
+++ b/dom/media/wave/WaveReader.cpp
@@ -273,17 +273,17 @@ nsresult WaveReader::Seek(int64_t aTarge
   position += mWavePCMOffset;
   return mDecoder->GetResource()->Seek(nsISeekableStream::NS_SEEK_SET, position);
 }
 
 static double RoundToUsecs(double aSeconds) {
   return floor(aSeconds * USECS_PER_S) / USECS_PER_S;
 }
 
-nsresult WaveReader::GetBuffered(dom::TimeRanges* aBuffered, int64_t aStartTime)
+nsresult WaveReader::GetBuffered(dom::TimeRanges* aBuffered)
 {
   if (!mInfo.HasAudio()) {
     return NS_OK;
   }
   int64_t startOffset = mDecoder->GetResource()->GetNextCachedData(mWavePCMOffset);
   while (startOffset >= 0) {
     int64_t endOffset = mDecoder->GetResource()->GetCachedDataEnd(startOffset);
     // Bytes [startOffset..endOffset] are cached.
--- a/dom/media/wave/WaveReader.h
+++ b/dom/media/wave/WaveReader.h
@@ -39,17 +39,17 @@ public:
   virtual bool HasVideo()
   {
     return false;
   }
 
   virtual nsresult ReadMetadata(MediaInfo* aInfo,
                                 MetadataTags** aTags);
   virtual nsresult Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime, int64_t aCurrentTime);
-  virtual nsresult GetBuffered(dom::TimeRanges* aBuffered, int64_t aStartTime);
+  virtual nsresult GetBuffered(dom::TimeRanges* aBuffered);
 
   // To seek in a buffered range, we just have to seek the stream.
   virtual bool IsSeekableInBufferedRanges() {
     return true;
   }
 
   virtual bool IsMediaSeekable() MOZ_OVERRIDE;
 
--- a/dom/media/webm/WebMReader.cpp
+++ b/dom/media/webm/WebMReader.cpp
@@ -1080,18 +1080,19 @@ nsresult WebMReader::Seek(int64_t aTarge
                        this, offset, r));
     if (r != 0) {
       return NS_ERROR_FAILURE;
     }
   }
   return NS_OK;
 }
 
-nsresult WebMReader::GetBuffered(dom::TimeRanges* aBuffered, int64_t aStartTime)
+nsresult WebMReader::GetBuffered(dom::TimeRanges* aBuffered)
 {
+  MOZ_ASSERT(mStartTime != -1, "Need to finish metadata decode first");
   if (aBuffered->Length() != 0) {
     return NS_ERROR_FAILURE;
   }
 
   MediaResource* resource = mDecoder->GetResource();
 
   // Special case completely cached files.  This also handles local files.
   if (mContext && resource->IsDataCachedToEndOfResource(0)) {
@@ -1109,17 +1110,17 @@ nsresult WebMReader::GetBuffered(dom::Ti
   NS_ENSURE_SUCCESS(res, res);
 
   for (uint32_t index = 0; index < ranges.Length(); index++) {
     uint64_t start, end;
     bool rv = mBufferedState->CalculateBufferedForRange(ranges[index].mStart,
                                                         ranges[index].mEnd,
                                                         &start, &end);
     if (rv) {
-      int64_t startOffset = aStartTime * NS_PER_USEC;
+      int64_t startOffset = mStartTime * NS_PER_USEC;
       NS_ASSERTION(startOffset >= 0 && uint64_t(startOffset) <= start,
                    "startOffset negative or larger than start time");
       if (!(startOffset >= 0 && uint64_t(startOffset) <= start)) {
         startOffset = 0;
       }
       double startTime = (start - startOffset) / NS_PER_S;
       double endTime = (end - startOffset) / NS_PER_S;
       // If this range extends to the end of the file, the true end time
--- a/dom/media/webm/WebMReader.h
+++ b/dom/media/webm/WebMReader.h
@@ -131,17 +131,17 @@ public:
     NS_ASSERTION(mDecoder->OnDecodeThread(), "Should be on decode thread.");
     return mHasVideo;
   }
 
   virtual nsresult ReadMetadata(MediaInfo* aInfo,
                                 MetadataTags** aTags);
   virtual nsresult Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime,
                         int64_t aCurrentTime);
-  virtual nsresult GetBuffered(dom::TimeRanges* aBuffered, int64_t aStartTime);
+  virtual nsresult GetBuffered(dom::TimeRanges* aBuffered);
   virtual void NotifyDataArrived(const char* aBuffer, uint32_t aLength,
                                  int64_t aOffset);
   virtual int64_t GetEvictionOffset(double aTime);
 
   virtual bool IsMediaSeekable() MOZ_OVERRIDE;
 
 protected:
   // Value passed to NextPacket to determine if we are reading a video or an