Bug 1163445: Part5. Replace dom::TimeRanges with TimeIntervals object. r=mattwoodrow
authorJean-Yves Avenard <jyavenard@mozilla.com>
Mon, 18 May 2015 16:15:47 +1000
changeset 244288 24a7f0fda98b2d68efaf5b07d1d56b575e89316a
parent 244287 0fe9422de17bd3204764ee347915480045641e06
child 244289 461be7d30fda2cb8889349aa426e121d4e466f47
push id59889
push userjyavenard@mozilla.com
push dateMon, 18 May 2015 06:18:59 +0000
treeherdermozilla-inbound@24a7f0fda98b [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersmattwoodrow
bugs1163445
milestone41.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1163445: Part5. Replace dom::TimeRanges with TimeIntervals object. r=mattwoodrow
dom/html/HTMLMediaElement.cpp
dom/media/Intervals.h
dom/media/MediaDecoder.cpp
dom/media/MediaDecoder.h
dom/media/MediaDecoderReader.cpp
dom/media/MediaDecoderReader.h
dom/media/MediaDecoderStateMachine.cpp
dom/media/MediaDecoderStateMachine.h
dom/media/MediaFormatReader.cpp
dom/media/MediaFormatReader.h
dom/media/TimeUnits.h
dom/media/VideoUtils.cpp
dom/media/VideoUtils.h
dom/media/android/AndroidMediaPluginHost.cpp
dom/media/android/AndroidMediaReader.cpp
dom/media/fmp4/MP4Reader.cpp
dom/media/fmp4/MP4Reader.h
dom/media/gstreamer/GStreamerReader-0.10.cpp
dom/media/gstreamer/GStreamerReader.cpp
dom/media/gstreamer/GStreamerReader.h
dom/media/gtest/TestMP4Reader.cpp
dom/media/mediasource/MediaSource.cpp
dom/media/mediasource/MediaSourceDecoder.cpp
dom/media/mediasource/MediaSourceDecoder.h
dom/media/mediasource/MediaSourceReader.cpp
dom/media/mediasource/MediaSourceReader.h
dom/media/mediasource/MediaSourceUtils.cpp
dom/media/mediasource/MediaSourceUtils.h
dom/media/mediasource/SourceBuffer.cpp
dom/media/mediasource/SourceBufferDecoder.cpp
dom/media/mediasource/SourceBufferDecoder.h
dom/media/mediasource/TrackBuffer.cpp
dom/media/mediasource/TrackBuffer.h
dom/media/ogg/OggReader.cpp
dom/media/ogg/OggReader.h
dom/media/omx/MediaOmxReader.cpp
dom/media/omx/RtspMediaCodecReader.h
dom/media/omx/RtspOmxReader.h
dom/media/raw/RawReader.cpp
dom/media/raw/RawReader.h
dom/media/wave/WaveReader.cpp
dom/media/wave/WaveReader.h
dom/media/webm/SoftwareWebMVideoDecoder.cpp
dom/media/webm/WebMBufferedParser.cpp
dom/media/webm/WebMReader.cpp
dom/media/webm/WebMReader.h
dom/media/wmf/WMFReader.cpp
--- a/dom/html/HTMLMediaElement.cpp
+++ b/dom/html/HTMLMediaElement.cpp
@@ -1479,20 +1479,22 @@ HTMLMediaElement::Seek(double aTime,
   if (mReadyState == nsIDOMHTMLMediaElement::HAVE_NOTHING) {
     LOG(PR_LOG_DEBUG, ("%p SetCurrentTime(%f) failed: no source", this, aTime));
     aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
     return;
   }
 
   // Clamp the seek target to inside the seekable ranges.
   nsRefPtr<dom::TimeRanges> seekable = new dom::TimeRanges();
-  if (NS_FAILED(mDecoder->GetSeekable(seekable))) {
+  media::TimeIntervals seekableIntervals = mDecoder->GetSeekable();
+  if (seekableIntervals.IsInvalid()) {
     aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
     return;
   }
+  seekableIntervals.ToTimeRanges(seekable);
   uint32_t length = 0;
   seekable->GetLength(&length);
   if (!length) {
     return;
   }
 
   // If the position we want to seek to is not in a seekable range, we seek
   // to the closest position in the seekable ranges instead. If two positions
@@ -1596,19 +1598,18 @@ NS_IMETHODIMP HTMLMediaElement::GetDurat
   return NS_OK;
 }
 
 already_AddRefed<TimeRanges>
 HTMLMediaElement::Seekable() const
 {
   nsRefPtr<TimeRanges> ranges = new TimeRanges();
   if (mDecoder && mReadyState > nsIDOMHTMLMediaElement::HAVE_NOTHING) {
-    mDecoder->GetSeekable(ranges);
-  }
-  ranges->Normalize();
+    mDecoder->GetSeekable().ToTimeRanges(ranges);
+  }
   return ranges.forget();
 }
 
 /* readonly attribute nsIDOMHTMLTimeRanges seekable; */
 NS_IMETHODIMP HTMLMediaElement::GetSeekable(nsIDOMTimeRanges** aSeekable)
 {
   nsRefPtr<TimeRanges> ranges = Seekable();
   ranges.forget(aSeekable);
@@ -4164,22 +4165,22 @@ HTMLMediaElement::CopyInnerTo(Element* a
 }
 
 already_AddRefed<TimeRanges>
 HTMLMediaElement::Buffered() const
 {
   nsRefPtr<TimeRanges> ranges = new TimeRanges();
   if (mReadyState > nsIDOMHTMLMediaElement::HAVE_NOTHING) {
     if (mDecoder) {
-      // If GetBuffered fails we ignore the error result and just return the
-      // time ranges we found up till the error.
-      mDecoder->GetBuffered(ranges);
+      media::TimeIntervals buffered = mDecoder->GetBuffered();
+      if (!buffered.IsInvalid()) {
+        buffered.ToTimeRanges(ranges);
+      }
     }
   }
-  ranges->Normalize();
   return ranges.forget();
 }
 
 nsresult HTMLMediaElement::GetBuffered(nsIDOMTimeRanges** aBuffered)
 {
   nsRefPtr<TimeRanges> ranges = Buffered();
   ranges.forget(aBuffered);
   return NS_OK;
--- a/dom/media/Intervals.h
+++ b/dom/media/Intervals.h
@@ -508,18 +508,18 @@ public:
     }
     return false;
   }
 
   // Shift all values by aOffset.
   void Shift(const T& aOffset)
   {
     for (auto& interval : mIntervals) {
-      interval.mStart += aOffset;
-      interval.mEnd += aOffset;
+      interval.mStart = interval.mStart + aOffset;
+      interval.mEnd = interval.mEnd + aOffset;
     }
   }
 
   void SetFuzz(const T& aFuzz) {
     for (auto& interval : mIntervals) {
       interval.SetFuzz(aFuzz);
     }
     Normalize();
--- a/dom/media/MediaDecoder.cpp
+++ b/dom/media/MediaDecoder.cpp
@@ -7,17 +7,16 @@
 #include "MediaDecoder.h"
 #include "mozilla/FloatingPoint.h"
 #include "mozilla/MathAlgorithms.h"
 #include <limits>
 #include "nsIObserver.h"
 #include "nsTArray.h"
 #include "VideoUtils.h"
 #include "MediaDecoderStateMachine.h"
-#include "mozilla/dom/TimeRanges.h"
 #include "ImageContainer.h"
 #include "MediaResource.h"
 #include "nsError.h"
 #include "mozilla/Preferences.h"
 #include "mozilla/StaticPtr.h"
 #include "nsIMemoryReporter.h"
 #include "nsComponentManagerUtils.h"
 #include <algorithm>
@@ -1287,32 +1286,31 @@ MediaDecoder::IsTransportSeekable()
 
 bool MediaDecoder::IsMediaSeekable()
 {
   NS_ENSURE_TRUE(GetStateMachine(), false);
   ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
   return mMediaSeekable;
 }
 
-nsresult MediaDecoder::GetSeekable(dom::TimeRanges* aSeekable)
+media::TimeIntervals MediaDecoder::GetSeekable()
 {
-  double initialTime = 0.0;
-
   // We can seek in buffered range if the media is seekable. Also, we can seek
   // in unbuffered ranges if the transport level is seekable (local file or the
   // server supports range requests, etc.)
   if (!IsMediaSeekable()) {
-    return NS_OK;
+    return media::TimeIntervals();
   } else if (!IsTransportSeekable()) {
-    return GetBuffered(aSeekable);
+    return GetBuffered();
   } else {
-    double end = IsInfinite() ? std::numeric_limits<double>::infinity()
-                              : initialTime + GetDuration();
-    aSeekable->Add(initialTime, end);
-    return NS_OK;
+    return media::TimeIntervals(
+      media::TimeInterval(media::TimeUnit::FromMicroseconds(0),
+                          IsInfinite() ?
+                            media::TimeUnit::FromInfinity() :
+                            media::TimeUnit::FromSeconds(GetDuration())));
   }
 }
 
 void MediaDecoder::SetFragmentEndTime(double aTime)
 {
   MOZ_ASSERT(NS_IsMainThread());
   if (mDecoderStateMachine) {
     ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
@@ -1448,19 +1446,19 @@ void MediaDecoder::Invalidate()
 {
   if (mVideoFrameContainer) {
     mVideoFrameContainer->Invalidate();
   }
 }
 
 // Constructs the time ranges representing what segments of the media
 // are buffered and playable.
-nsresult MediaDecoder::GetBuffered(dom::TimeRanges* aBuffered) {
-  NS_ENSURE_TRUE(mDecoderStateMachine && !mShuttingDown, NS_ERROR_FAILURE);
-  return mDecoderStateMachine->GetBuffered(aBuffered);
+media::TimeIntervals MediaDecoder::GetBuffered() {
+  NS_ENSURE_TRUE(mDecoderStateMachine && !mShuttingDown, media::TimeIntervals::Invalid());
+  return mDecoderStateMachine->GetBuffered();
 }
 
 size_t MediaDecoder::SizeOfVideoQueue() {
   if (mDecoderStateMachine) {
     return mDecoderStateMachine->SizeOfVideoQueue();
   }
   return 0;
 }
--- a/dom/media/MediaDecoder.h
+++ b/dom/media/MediaDecoder.h
@@ -198,27 +198,22 @@ destroying the MediaDecoder object.
 #include "AbstractMediaDecoder.h"
 #include "DecodedStream.h"
 #include "StateMirroring.h"
 #include "StateWatching.h"
 #include "necko-config.h"
 #ifdef MOZ_EME
 #include "mozilla/CDMProxy.h"
 #endif
+#include "TimeUnits.h"
 
 class nsIStreamListener;
 class nsIPrincipal;
 
 namespace mozilla {
-namespace dom {
-class TimeRanges;
-}
-}
-
-namespace mozilla {
 
 class VideoFrameContainer;
 class MediaDecoderStateMachine;
 
 // GetCurrentTime is defined in winbase.h as zero argument macro forwarding to
 // GetTickCount() and conflicts with MediaDecoder::GetCurrentTime implementation.
 #ifdef GetCurrentTime
 #undef GetCurrentTime
@@ -521,17 +516,17 @@ public:
   // Returns true if this media supports seeking. False for example for WebM
   // files without an index and chained ogg files.
   virtual bool IsMediaSeekable() final override;
   // Returns true if seeking is supported on a transport level (e.g. the server
   // supports range requests, we are playing a file, etc.).
   virtual bool IsTransportSeekable() override;
 
   // Return the time ranges that can be seeked into.
-  virtual nsresult GetSeekable(dom::TimeRanges* aSeekable);
+  virtual media::TimeIntervals GetSeekable();
 
   // Set the end time of the media resource. When playback reaches
   // this point the media pauses. aTime is in seconds.
   virtual void SetFragmentEndTime(double aTime);
 
   // Set the end time of the media. aTime is in microseconds.
   void SetMediaEndTime(int64_t aTime) final override;
 
@@ -579,17 +574,17 @@ public:
   // state.
   ReentrantMonitor& GetReentrantMonitor() override;
 
   // Returns true if the decoder is shut down
   bool IsShutdown() const final override;
 
   // Constructs the time ranges representing what segments of the media
   // are buffered and playable.
-  virtual nsresult GetBuffered(dom::TimeRanges* aBuffered);
+  virtual media::TimeIntervals GetBuffered();
 
   // Returns the size, in bytes, of the heap memory used by the currently
   // queued decoded video and audio data.
   size_t SizeOfVideoQueue();
   size_t SizeOfAudioQueue();
 
   VideoFrameContainer* GetVideoFrameContainer() final override
   {
--- a/dom/media/MediaDecoderReader.cpp
+++ b/dom/media/MediaDecoderReader.cpp
@@ -142,27 +142,26 @@ VideoData* MediaDecoderReader::DecodeToF
 
 void
 MediaDecoderReader::SetStartTime(int64_t aStartTime)
 {
   mDecoder->GetReentrantMonitor().AssertCurrentThreadIn();
   mStartTime = aStartTime;
 }
 
-nsresult
-MediaDecoderReader::GetBuffered(mozilla::dom::TimeRanges* aBuffered)
+media::TimeIntervals
+MediaDecoderReader::GetBuffered()
 {
   AutoPinned<MediaResource> stream(mDecoder->GetResource());
   int64_t durationUs = 0;
   {
     ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
     durationUs = mDecoder->GetMediaDuration();
   }
-  GetEstimatedBufferedTimeRanges(stream, durationUs, aBuffered);
-  return NS_OK;
+  return GetEstimatedBufferedTimeRanges(stream, durationUs);
 }
 
 int64_t
 MediaDecoderReader::ComputeStartTime(const VideoData* aVideo, const AudioData* aAudio)
 {
   int64_t startTime = std::min<int64_t>(aAudio ? aAudio->mTime : INT64_MAX,
                                         aVideo ? aVideo->mTime : INT64_MAX);
   if (startTime == INT64_MAX) {
--- a/dom/media/MediaDecoderReader.h
+++ b/dom/media/MediaDecoderReader.h
@@ -7,23 +7,20 @@
 #define MediaDecoderReader_h_
 
 #include "AbstractMediaDecoder.h"
 #include "MediaInfo.h"
 #include "MediaData.h"
 #include "MediaPromise.h"
 #include "MediaQueue.h"
 #include "AudioCompactor.h"
+#include "TimeUnits.h"
 
 namespace mozilla {
 
-namespace dom {
-class TimeRanges;
-}
-
 class MediaDecoderReader;
 class SharedDecoderManager;
 
 struct WaitForDataRejectValue
 {
   enum Reason {
     SHUTDOWN,
     CANCELED
@@ -221,17 +218,17 @@ public:
   // can quickly calculate the buffered ranges more accurately.
   //
   // The primary advantage of this implementation in the reader base class
   // is that it's a fast approximation, which does not perform any I/O.
   //
   // The OggReader relies on this base implementation not performing I/O,
   // since in FirefoxOS we can't do I/O on the main thread, where this is
   // called.
-  virtual nsresult GetBuffered(dom::TimeRanges* aBuffered);
+  virtual media::TimeIntervals GetBuffered();
 
   virtual int64_t ComputeStartTime(const VideoData* aVideo, const AudioData* aAudio);
 
   // The MediaDecoderStateMachine uses various heuristics that assume that
   // raw media data is arriving sequentially from a network channel. This
   // makes sense in the <video src="foo"> case, but not for more advanced use
   // cases like MSE.
   virtual bool UseBufferingHeuristics() { return true; }
--- a/dom/media/MediaDecoderStateMachine.cpp
+++ b/dom/media/MediaDecoderStateMachine.cpp
@@ -16,17 +16,17 @@
 #include "MediaTimer.h"
 #include "AudioSink.h"
 #include "nsTArray.h"
 #include "MediaDecoder.h"
 #include "MediaDecoderReader.h"
 #include "mozilla/MathAlgorithms.h"
 #include "mozilla/mozalloc.h"
 #include "VideoUtils.h"
-#include "mozilla/dom/TimeRanges.h"
+#include "TimeUnits.h"
 #include "nsDeque.h"
 #include "AudioSegment.h"
 #include "VideoSegment.h"
 #include "ImageContainer.h"
 #include "nsComponentManagerUtils.h"
 #include "nsITimer.h"
 #include "nsContentUtils.h"
 #include "MediaShutdownManager.h"
@@ -1708,27 +1708,23 @@ void MediaDecoderStateMachine::NotifyDat
   // While playing an unseekable stream of unknown duration, mEndTime is
   // updated (in AdvanceFrame()) as we play. But if data is being downloaded
   // faster than played, mEndTime won't reflect the end of playable data
   // since we haven't played the frame at the end of buffered data. So update
   // mEndTime here as new data is downloaded to prevent such a lag.
   //
   // Make sure to only do this if we have a start time, otherwise the reader
   // doesn't know how to compute GetBuffered.
-  nsRefPtr<dom::TimeRanges> buffered = new dom::TimeRanges();
-  if (mDecoder->IsInfinite() && (mStartTime != -1) &&
-      NS_SUCCEEDED(mDecoder->GetBuffered(buffered)))
-  {
-    uint32_t length = 0;
-    buffered->GetLength(&length);
-    if (length) {
-      double end = 0;
-      buffered->End(length - 1, &end);
+  media::TimeIntervals buffered{mDecoder->GetBuffered()};
+  if (mDecoder->IsInfinite() && (mStartTime != -1) && !buffered.IsInvalid()) {
+    bool exists;
+    media::TimeUnit end{buffered.GetEnd(&exists)};
+    if (exists) {
       ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
-      mEndTime = std::max<int64_t>(mEndTime, end * USECS_PER_S);
+      mEndTime = std::max<int64_t>(mEndTime, end.ToMicroseconds());
     }
   }
 }
 
 nsRefPtr<MediaDecoder::SeekPromise>
 MediaDecoderStateMachine::Seek(SeekTarget aTarget)
 {
   MOZ_ASSERT(OnTaskQueue());
@@ -2144,36 +2140,40 @@ bool MediaDecoderStateMachine::HasLowUnd
 
   // If we don't have a duration, GetBuffered is probably not going to produce
   // a useful buffered range. Return false here so that we don't get stuck in
   // buffering mode for live streams.
   if (GetDuration() < 0) {
     return false;
   }
 
-  nsRefPtr<dom::TimeRanges> buffered = new dom::TimeRanges();
-  nsresult rv = mReader->GetBuffered(buffered.get());
-  NS_ENSURE_SUCCESS(rv, false);
+  media::TimeIntervals buffered{mReader->GetBuffered()};
+  if (buffered.IsInvalid()) {
+    return false;
+  }
 
   int64_t endOfDecodedVideoData = INT64_MAX;
   if (HasVideo() && !VideoQueue().AtEndOfStream()) {
     endOfDecodedVideoData = VideoQueue().Peek() ? VideoQueue().Peek()->GetEndTime() : mVideoFrameEndTime;
   }
   int64_t endOfDecodedAudioData = INT64_MAX;
   if (HasAudio() && !AudioQueue().AtEndOfStream()) {
     // mDecodedAudioEndTime could be -1 when no audio samples are decoded.
     // But that is fine since we consider ourself as low in decoded data when
     // we don't have any decoded audio samples at all.
     endOfDecodedAudioData = mDecodedAudioEndTime;
   }
   int64_t endOfDecodedData = std::min(endOfDecodedVideoData, endOfDecodedAudioData);
-
-  return endOfDecodedData != INT64_MAX &&
-         !buffered->Contains(static_cast<double>(endOfDecodedData) / USECS_PER_S,
-                             static_cast<double>(std::min(endOfDecodedData + aUsecs, GetDuration())) / USECS_PER_S);
+  if (GetDuration() < endOfDecodedData) {
+    // Our duration is not up to date. No point buffering.
+    return false;
+  }
+  media::TimeInterval interval(media::TimeUnit::FromMicroseconds(endOfDecodedData),
+                               media::TimeUnit::FromMicroseconds(std::min(endOfDecodedData + aUsecs, GetDuration())));
+  return endOfDecodedData != INT64_MAX && !buffered.Contains(interval);
 }
 
 void
 MediaDecoderStateMachine::DecodeError()
 {
   MOZ_ASSERT(OnTaskQueue());
   ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
   if (IsShutdown()) {
--- a/dom/media/MediaDecoderStateMachine.h
+++ b/dom/media/MediaDecoderStateMachine.h
@@ -272,26 +272,26 @@ public:
 
   // Must be called with the decode monitor held.
   bool IsSeeking() const {
     MOZ_ASSERT(OnTaskQueue());
     AssertCurrentThreadInMonitor();
     return mState == DECODER_STATE_SEEKING;
   }
 
-  nsresult GetBuffered(dom::TimeRanges* aBuffered) {
+  media::TimeIntervals GetBuffered() {
     // It's possible for JS to query .buffered before we've determined the start
     // time from metadata, in which case the reader isn't ready to be asked this
     // question.
     ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
     if (mStartTime < 0) {
-      return NS_OK;
+      return media::TimeIntervals();
     }
 
-    return mReader->GetBuffered(aBuffered);
+    return mReader->GetBuffered();
   }
 
   size_t SizeOfVideoQueue() {
     if (mReader) {
       return mReader->SizeOfVideoQueueInBytes();
     }
     return 0;
   }
--- a/dom/media/MediaFormatReader.cpp
+++ b/dom/media/MediaFormatReader.cpp
@@ -1,16 +1,15 @@
 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
 /* vim:set ts=2 sw=2 sts=2 et cindent: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "mozilla/dom/HTMLMediaElement.h"
-#include "mozilla/dom/TimeRanges.h"
 #include "mozilla/Preferences.h"
 #include "nsPrintfCString.h"
 #include "nsSize.h"
 #include "ImageContainer.h"
 #include "Layers.h"
 #include "MediaData.h"
 #include "MediaInfo.h"
 #include "MediaFormatReader.h"
@@ -1240,18 +1239,18 @@ MediaFormatReader::GetEvictionOffset(dou
   } else {
     MOZ_ASSERT(OnTaskQueue());
     audioOffset = HasAudio() ? mAudio.mTrackDemuxer->GetEvictionOffset(media::TimeUnit::FromSeconds(aTime)) : INT64_MAX;
     videoOffset = HasVideo() ? mVideo.mTrackDemuxer->GetEvictionOffset(media::TimeUnit::FromSeconds(aTime)) : INT64_MAX;
   }
   return std::min(audioOffset, videoOffset);
 }
 
-nsresult
-MediaFormatReader::GetBuffered(dom::TimeRanges* aBuffered)
+media::TimeIntervals
+MediaFormatReader::GetBuffered()
 {
   media::TimeIntervals videoti;
   media::TimeIntervals audioti;
 
   if (NS_IsMainThread()) {
     if (mVideoTrackDemuxer) {
       videoti = mVideoTrackDemuxer->GetBuffered();
     }
@@ -1265,24 +1264,24 @@ MediaFormatReader::GetBuffered(dom::Time
     }
     if (HasAudio()) {
       MonitorAutoLock lock(mAudio.mMonitor);
       audioti = mAudio.mTimeRanges;
     }
   }
   if (HasAudio() && HasVideo()) {
     videoti.Intersection(audioti);
-    videoti.ToTimeRanges(aBuffered);
+    return videoti;
   } else if (HasAudio()) {
-    audioti.ToTimeRanges(aBuffered);
+    return audioti;
   } else if (HasVideo()) {
-    videoti.ToTimeRanges(aBuffered);
+    return videoti;
   }
 
-  return NS_OK;
+  return media::TimeIntervals();
 }
 
 bool MediaFormatReader::IsDormantNeeded()
 {
 #if defined(READER_DORMANT_HEURISTIC)
   return mDormantEnabled;
 #else
   return false;
--- a/dom/media/MediaFormatReader.h
+++ b/dom/media/MediaFormatReader.h
@@ -12,20 +12,16 @@
 #include "mozilla/Monitor.h"
 #include "MediaDataDemuxer.h"
 #include "MediaDecoderReader.h"
 #include "MediaTaskQueue.h"
 #include "PlatformDecoderModule.h"
 
 namespace mozilla {
 
-namespace dom {
-class TimeRanges;
-}
-
 #if defined(MOZ_GONK_MEDIACODEC) || defined(XP_WIN) || defined(MOZ_APPLEMEDIA) || defined(MOZ_FFMPEG)
 #define READER_DORMANT_HEURISTIC
 #else
 #undef READER_DORMANT_HEURISTIC
 #endif
 
 class MediaFormatReader final : public MediaDecoderReader
 {
@@ -76,17 +72,17 @@ public:
     return mSeekable;
   }
 
   virtual int64_t GetEvictionOffset(double aTime) override;
   virtual void NotifyDataArrived(const char* aBuffer,
                                  uint32_t aLength,
                                  int64_t aOffset) override;
 
-  virtual nsresult GetBuffered(dom::TimeRanges* aBuffered) override;
+  virtual media::TimeIntervals GetBuffered() override;
 
   // For Media Resource Management
   virtual void SetIdle() override;
   virtual bool IsDormantNeeded() override;
   virtual void ReleaseMediaResources() override;
   virtual void SetSharedDecoderManager(SharedDecoderManager* aManager)
     override;
 
--- a/dom/media/TimeUnits.h
+++ b/dom/media/TimeUnits.h
@@ -3,24 +3,32 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef TIME_UNITS_H
 #define TIME_UNITS_H
 
 #include "Intervals.h"
-#include "VideoUtils.h"
 #include "mozilla/CheckedInt.h"
 #include "mozilla/FloatingPoint.h"
 #include "mozilla/dom/TimeRanges.h"
 
 namespace mozilla {
 namespace media {
 
+// Number of microseconds per second. 1e6.
+static const int64_t USECS_PER_S = 1000000;
+
+// Number of microseconds per millisecond.
+static const int64_t USECS_PER_MS = 1000;
+
+// Number of nanoseconds per second. 1e9.
+static const int64_t NSECS_PER_S = 1000000000;
+
 struct Microseconds {
   Microseconds()
     : mValue(0)
   {}
 
   explicit Microseconds(int64_t aValue)
     : mValue(aValue)
   {}
@@ -84,24 +92,32 @@ public:
   static TimeUnit FromMicroseconds(int64_t aValue) {
     return TimeUnit(aValue);
   }
 
   static TimeUnit FromMicroseconds(Microseconds aValue) {
     return TimeUnit(aValue.mValue);
   }
 
+  static TimeUnit FromNanoseconds(int64_t aValue) {
+    return TimeUnit(aValue / 1000);
+  }
+
   static TimeUnit FromInfinity() {
     return TimeUnit(INT64_MAX);
   }
 
   int64_t ToMicroseconds() const {
     return mValue.value();
   }
 
+  int64_t ToNanoseconds() const {
+    return mValue.value() * 1000;
+  }
+
   double ToSeconds() const {
     if (IsInfinite()) {
       return PositiveInfinity<double>();
     }
     return double(mValue.value()) / USECS_PER_S;
   }
 
   bool IsInfinite() const {
@@ -193,16 +209,27 @@ public:
   {}
   explicit TimeIntervals(const BaseType::ElemType& aOther)
     : BaseType(aOther)
   {}
   explicit TimeIntervals(BaseType::ElemType&& aOther)
     : BaseType(Move(aOther))
   {}
 
+  static TimeIntervals Invalid()
+  {
+    return TimeIntervals(TimeInterval(TimeUnit::FromMicroseconds(INT64_MIN),
+                                      TimeUnit::FromMicroseconds(INT64_MIN)));
+  }
+  bool IsInvalid()
+  {
+    return Length() == 1 && Start(0).ToMicroseconds() == INT64_MIN &&
+      End(0).ToMicroseconds() == INT64_MIN;
+  }
+
   TimeIntervals() = default;
 
   // Make TimeIntervals interchangeable with dom::TimeRanges.
   explicit TimeIntervals(dom::TimeRanges* aRanges)
   {
     for (uint32_t i = 0; i < aRanges->Length(); i++) {
       ErrorResult rv;
       *this +=
--- a/dom/media/VideoUtils.cpp
+++ b/dom/media/VideoUtils.cpp
@@ -1,15 +1,15 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "VideoUtils.h"
 #include "MediaResource.h"
-#include "mozilla/dom/TimeRanges.h"
+#include "TimeUnits.h"
 #include "nsMathUtils.h"
 #include "nsSize.h"
 #include "VorbisUtils.h"
 #include "ImageContainer.h"
 #include "SharedThreadPool.h"
 #include "mozilla/Preferences.h"
 #include "mozilla/Base64.h"
 #include "mozilla/Telemetry.h"
@@ -65,54 +65,58 @@ void ScaleDisplayByAspectRatio(nsIntSize
 static int64_t BytesToTime(int64_t offset, int64_t length, int64_t durationUs) {
   NS_ASSERTION(length > 0, "Must have positive length");
   double r = double(offset) / double(length);
   if (r > 1.0)
     r = 1.0;
   return int64_t(double(durationUs) * r);
 }
 
-void GetEstimatedBufferedTimeRanges(mozilla::MediaResource* aStream,
-                                    int64_t aDurationUsecs,
-                                    mozilla::dom::TimeRanges* aOutBuffered)
+media::TimeIntervals GetEstimatedBufferedTimeRanges(mozilla::MediaResource* aStream,
+                                                    int64_t aDurationUsecs)
 {
+  media::TimeIntervals buffered;
   // Nothing to cache if the media takes 0us to play.
-  if (aDurationUsecs <= 0 || !aStream || !aOutBuffered)
-    return;
+  if (aDurationUsecs <= 0 || !aStream)
+    return buffered;
 
   // Special case completely cached files.  This also handles local files.
   if (aStream->IsDataCachedToEndOfResource(0)) {
-    aOutBuffered->Add(0, double(aDurationUsecs) / USECS_PER_S);
-    return;
+    buffered +=
+      media::TimeInterval(media::TimeUnit::FromMicroseconds(0),
+                          media::TimeUnit::FromMicroseconds(aDurationUsecs));
+    return buffered;
   }
 
   int64_t totalBytes = aStream->GetLength();
 
   // If we can't determine the total size, pretend that we have nothing
   // buffered. This will put us in a state of eternally-low-on-undecoded-data
   // which is not great, but about the best we can do.
   if (totalBytes <= 0)
-    return;
+    return buffered;
 
   int64_t startOffset = aStream->GetNextCachedData(0);
   while (startOffset >= 0) {
     int64_t endOffset = aStream->GetCachedDataEnd(startOffset);
     // Bytes [startOffset..endOffset] are cached.
     NS_ASSERTION(startOffset >= 0, "Integer underflow in GetBuffered");
     NS_ASSERTION(endOffset >= 0, "Integer underflow in GetBuffered");
 
     int64_t startUs = BytesToTime(startOffset, totalBytes, aDurationUsecs);
     int64_t endUs = BytesToTime(endOffset, totalBytes, aDurationUsecs);
     if (startUs != endUs) {
-      aOutBuffered->Add(double(startUs) / USECS_PER_S,
-                        double(endUs) / USECS_PER_S);
+      buffered +=
+        media::TimeInterval(media::TimeUnit::FromMicroseconds(startUs),
+
+                              media::TimeUnit::FromMicroseconds(endUs));
     }
     startOffset = aStream->GetNextCachedData(endOffset);
   }
-  return;
+  return buffered;
 }
 
 int DownmixAudioToStereo(mozilla::AudioDataValue* buffer,
                          int channels, uint32_t frames)
 {
   int outChannels;
   outChannels = 2;
 #ifdef MOZ_SAMPLE_TYPE_FLOAT32
--- a/dom/media/VideoUtils.h
+++ b/dom/media/VideoUtils.h
@@ -18,16 +18,17 @@
     defined(MOZ_ASAN)
 // For MEDIA_THREAD_STACK_SIZE
 #include "nsIThreadManager.h"
 #endif
 #include "nsThreadUtils.h"
 #include "prtime.h"
 #include "AudioSampleFormat.h"
 #include "mozilla/RefPtr.h"
+#include "TimeUnits.h"
 
 using mozilla::CheckedInt64;
 using mozilla::CheckedUint64;
 using mozilla::CheckedInt32;
 using mozilla::CheckedUint32;
 
 // This file contains stuff we'd rather put elsewhere, but which is
 // dependent on other changes which we don't want to wait for. We plan to
@@ -110,29 +111,24 @@ private:
 
 template<class T>
 void DeleteOnMainThread(nsAutoPtr<T>& aObject) {
   NS_DispatchToMainThread(new DeleteObjectTask<T>(aObject));
 }
 
 class MediaResource;
 
-namespace dom {
-class TimeRanges;
-}
-
 // Estimates the buffered ranges of a MediaResource using a simple
 // (byteOffset/length)*duration method. Probably inaccurate, but won't
 // do file I/O, and can be used when we don't have detailed knowledge
 // of the byte->time mapping of a resource. aDurationUsecs is the duration
 // of the media in microseconds. Estimated buffered ranges are stored in
 // aOutBuffered. Ranges are 0-normalized, i.e. in the range of (0,duration].
-void GetEstimatedBufferedTimeRanges(mozilla::MediaResource* aStream,
-                                    int64_t aDurationUsecs,
-                                    mozilla::dom::TimeRanges* aOutBuffered);
+media::TimeIntervals GetEstimatedBufferedTimeRanges(mozilla::MediaResource* aStream,
+                                                    int64_t aDurationUsecs);
 
 // Converts from number of audio frames (aFrames) to microseconds, given
 // the specified audio rate (aRate). Stores result in aOutUsecs. Returns true
 // if the operation succeeded, or false if there was an integer overflow
 // while calulating the conversion.
 CheckedInt64 FramesToUsecs(int64_t aFrames, uint32_t aRate);
 
 // Converts from microseconds (aUsecs) to number of audio frames, given the
--- a/dom/media/android/AndroidMediaPluginHost.cpp
+++ b/dom/media/android/AndroidMediaPluginHost.cpp
@@ -1,15 +1,14 @@
 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
 /* vim:set ts=2 sw=2 sts=2 et cindent: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 #include "mozilla/Preferences.h"
-#include "mozilla/dom/TimeRanges.h"
 #include "MediaResource.h"
 #include "mozilla/dom/HTMLMediaElement.h"
 #include "AndroidMediaPluginHost.h"
 #include "nsXPCOMStrings.h"
 #include "nsISeekableStream.h"
 #include "AndroidMediaReader.h"
 #include "nsIGfxInfo.h"
 #include "gfxCrashReporterUtils.h"
--- a/dom/media/android/AndroidMediaReader.cpp
+++ b/dom/media/android/AndroidMediaReader.cpp
@@ -1,16 +1,15 @@
 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
 /* vim:set ts=2 sw=2 sts=2 et cindent: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 #include "AndroidMediaReader.h"
 #include "mozilla/TimeStamp.h"
-#include "mozilla/dom/TimeRanges.h"
 #include "mozilla/gfx/Point.h"
 #include "MediaResource.h"
 #include "VideoUtils.h"
 #include "AndroidMediaDecoder.h"
 #include "AndroidMediaPluginHost.h"
 #include "MediaDecoderStateMachine.h"
 #include "ImageContainer.h"
 #include "AbstractMediaDecoder.h"
--- a/dom/media/fmp4/MP4Reader.cpp
+++ b/dom/media/fmp4/MP4Reader.cpp
@@ -13,17 +13,16 @@
 #include "nsSize.h"
 #include "VideoUtils.h"
 #include "mozilla/dom/HTMLMediaElement.h"
 #include "ImageContainer.h"
 #include "Layers.h"
 #include "SharedThreadPool.h"
 #include "mozilla/Preferences.h"
 #include "mozilla/Telemetry.h"
-#include "mozilla/dom/TimeRanges.h"
 #include "mp4_demuxer/AnnexB.h"
 #include "mp4_demuxer/H264.h"
 #include "SharedDecoderManager.h"
 #include "mp4_demuxer/MP4TrackDemuxer.h"
 #include <algorithm>
 
 #ifdef MOZ_EME
 #include "mozilla/CDMProxy.h"
@@ -1068,40 +1067,42 @@ MP4Reader::GetEvictionOffset(double aTim
   MonitorAutoLock mon(mDemuxerMonitor);
   if (!mIndexReady) {
     return 0;
   }
 
   return mDemuxer->GetEvictionOffset(aTime * 1000000.0);
 }
 
-nsresult
-MP4Reader::GetBuffered(dom::TimeRanges* aBuffered)
+media::TimeIntervals
+MP4Reader::GetBuffered()
 {
   MonitorAutoLock mon(mDemuxerMonitor);
+  media::TimeIntervals buffered;
   if (!mIndexReady) {
-    return NS_OK;
+    return buffered;
   }
   UpdateIndex();
   MOZ_ASSERT(mStartTime != -1, "Need to finish metadata decode first");
 
   AutoPinned<MediaResource> resource(mDecoder->GetResource());
   nsTArray<MediaByteRange> ranges;
   nsresult rv = resource->GetCachedRanges(ranges);
 
   if (NS_SUCCEEDED(rv)) {
     nsTArray<Interval<Microseconds>> timeRanges;
     mDemuxer->ConvertByteRangesToTime(ranges, &timeRanges);
     for (size_t i = 0; i < timeRanges.Length(); i++) {
-      aBuffered->Add((timeRanges[i].start - mStartTime) / 1000000.0,
-                     (timeRanges[i].end - mStartTime) / 1000000.0);
+      buffered += media::TimeInterval(
+        media::TimeUnit::FromMicroseconds(timeRanges[i].start - mStartTime),
+        media::TimeUnit::FromMicroseconds(timeRanges[i].end - mStartTime));
     }
   }
 
-  return NS_OK;
+  return buffered;
 }
 
 bool MP4Reader::IsDormantNeeded()
 {
 #if defined(MP4_READER_DORMANT_HEURISTIC)
   return mDormantEnabled;
 #else
   return false;
--- a/dom/media/fmp4/MP4Reader.h
+++ b/dom/media/fmp4/MP4Reader.h
@@ -14,20 +14,16 @@
 #include "demuxer/TrackDemuxer.h"
 #include "MediaTaskQueue.h"
 
 #include <deque>
 #include "mozilla/Monitor.h"
 
 namespace mozilla {
 
-namespace dom {
-class TimeRanges;
-}
-
 typedef std::deque<nsRefPtr<MediaRawData>> MediaSampleQueue;
 
 class MP4Stream;
 
 #if defined(MOZ_GONK_MEDIACODEC) || defined(XP_WIN) || defined(MOZ_APPLEMEDIA) || defined(MOZ_FFMPEG)
 #define MP4_READER_DORMANT_HEURISTIC
 #else
 #undef MP4_READER_DORMANT_HEURISTIC
@@ -63,17 +59,17 @@ public:
   virtual nsRefPtr<SeekPromise>
   Seek(int64_t aTime, int64_t aEndTime) override;
 
   virtual bool IsMediaSeekable() override;
 
   virtual int64_t GetEvictionOffset(double aTime) override;
   virtual void NotifyDataArrived(const char* aBuffer, uint32_t aLength, int64_t aOffset) override;
 
-  virtual nsresult GetBuffered(dom::TimeRanges* aBuffered) override;
+  virtual media::TimeIntervals GetBuffered() override;
 
   // For Media Resource Management
   virtual void SetIdle() override;
   virtual bool IsDormantNeeded() override;
   virtual void ReleaseMediaResources() override;
   virtual void SetSharedDecoderManager(SharedDecoderManager* aManager)
     override;
 
--- a/dom/media/gstreamer/GStreamerReader-0.10.cpp
+++ b/dom/media/gstreamer/GStreamerReader-0.10.cpp
@@ -1,17 +1,16 @@
 #include "nsError.h"
 #include "MediaDecoderStateMachine.h"
 #include "AbstractMediaDecoder.h"
 #include "MediaResource.h"
 #include "GStreamerReader.h"
 #include "GStreamerMozVideoBuffer.h"
 #include "GStreamerFormatHelper.h"
 #include "VideoUtils.h"
-#include "mozilla/dom/TimeRanges.h"
 #include "mozilla/Endian.h"
 #include "mozilla/Preferences.h"
 
 using namespace mozilla;
 using mozilla::layers::PlanarYCbCrImage;
 using mozilla::layers::ImageContainer;
 
 GstFlowReturn GStreamerReader::AllocateVideoBufferCb(GstPad* aPad,
--- a/dom/media/gstreamer/GStreamerReader.cpp
+++ b/dom/media/gstreamer/GStreamerReader.cpp
@@ -10,17 +10,16 @@
 #include "AbstractMediaDecoder.h"
 #include "MediaResource.h"
 #include "GStreamerReader.h"
 #if GST_VERSION_MAJOR >= 1
 #include "GStreamerAllocator.h"
 #endif
 #include "GStreamerFormatHelper.h"
 #include "VideoUtils.h"
-#include "mozilla/dom/TimeRanges.h"
 #include "mozilla/Endian.h"
 #include "mozilla/Preferences.h"
 #include "mozilla/unused.h"
 #include "GStreamerLoader.h"
 #include "gfx2DGlue.h"
 
 namespace mozilla {
 
@@ -863,20 +862,21 @@ GStreamerReader::Seek(int64_t aTarget, i
   GstMessage* message = gst_bus_timed_pop_filtered(mBus, GST_CLOCK_TIME_NONE,
                (GstMessageType)(GST_MESSAGE_ASYNC_DONE | GST_MESSAGE_ERROR));
   gst_message_unref(message);
   LOG(PR_LOG_DEBUG, "seek completed");
 
   return SeekPromise::CreateAndResolve(aTarget, __func__);
 }
 
-nsresult GStreamerReader::GetBuffered(dom::TimeRanges* aBuffered)
+media::TimeIntervals GStreamerReader::GetBuffered()
 {
+  media::TimeIntervals buffered;
   if (!mInfo.HasValidMedia()) {
-    return NS_OK;
+    return buffered;
   }
 
 #if GST_VERSION_MAJOR == 0
   GstFormat format = GST_FORMAT_TIME;
 #endif
   AutoPinned<MediaResource> resource(mDecoder->GetResource());
   nsTArray<MediaByteRange> ranges;
   resource->GetCachedRanges(ranges);
@@ -885,21 +885,22 @@ nsresult GStreamerReader::GetBuffered(do
     /* fast path for local or completely cached files */
     gint64 duration = 0;
 
     {
       ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
       duration = mDecoder->GetMediaDuration();
     }
 
-    double end = (double) duration / GST_MSECOND;
     LOG(PR_LOG_DEBUG, "complete range [0, %f] for [0, %li]",
-          end, GetDataLength());
-    aBuffered->Add(0, end);
-    return NS_OK;
+        (double) duration / GST_MSECOND, GetDataLength());
+    buffered +=
+      media::TimeInterval(media::TimeUnit::FromMicroseconds(0),
+                          media::TimeUnit::FromMicroseconds(duration));
+    return buffered;
   }
 
   for(uint32_t index = 0; index < ranges.Length(); index++) {
     int64_t startOffset = ranges[index].mStart;
     int64_t endOffset = ranges[index].mEnd;
     gint64 startTime, endTime;
 
 #if GST_VERSION_MAJOR >= 1
@@ -913,24 +914,26 @@ nsresult GStreamerReader::GetBuffered(do
     if (!gst_element_query_convert(GST_ELEMENT(mPlayBin), GST_FORMAT_BYTES,
       startOffset, &format, &startTime) || format != GST_FORMAT_TIME)
       continue;
     if (!gst_element_query_convert(GST_ELEMENT(mPlayBin), GST_FORMAT_BYTES,
       endOffset, &format, &endTime) || format != GST_FORMAT_TIME)
       continue;
 #endif
 
-    double start = (double) GST_TIME_AS_USECONDS (startTime) / GST_MSECOND;
-    double end = (double) GST_TIME_AS_USECONDS (endTime) / GST_MSECOND;
     LOG(PR_LOG_DEBUG, "adding range [%f, %f] for [%li %li] size %li",
-          start, end, startOffset, endOffset, GetDataLength());
-    aBuffered->Add(start, end);
+        (double) GST_TIME_AS_USECONDS (startTime) / GST_MSECOND,
+        (double) GST_TIME_AS_USECONDS (endTime) / GST_MSECOND,
+        startOffset, endOffset, GetDataLength());
+    buffered +=
+      media::TimeInterval(media::TimeUnit::FromMicroseconds(GST_TIME_AS_USECONDS(startTime)),
+                          media::TimeUnit::FromMicroseconds(GST_TIME_AS_USECONDS(endTime)));
   }
 
-  return NS_OK;
+  return buffered;
 }
 
 void GStreamerReader::ReadAndPushData(guint aLength)
 {
   MediaResource* resource = mDecoder->GetResource();
   NS_ASSERTION(resource, "Decoder has no media resource");
   int64_t offset1 = resource->Tell();
   unused << offset1;
--- a/dom/media/gstreamer/GStreamerReader.h
+++ b/dom/media/gstreamer/GStreamerReader.h
@@ -24,20 +24,16 @@
 #include "MP3FrameParser.h"
 #include "ImageContainer.h"
 #include "nsRect.h"
 
 struct GstURIDecodeBin;
 
 namespace mozilla {
 
-namespace dom {
-class TimeRanges;
-}
-
 class AbstractMediaDecoder;
 
 class GStreamerReader : public MediaDecoderReader
 {
   typedef gfx::IntRect IntRect;
 
 public:
   explicit GStreamerReader(AbstractMediaDecoder* aDecoder);
@@ -48,17 +44,17 @@ public:
   virtual nsresult ResetDecode() override;
   virtual bool DecodeAudioData() override;
   virtual bool DecodeVideoFrame(bool &aKeyframeSkip,
                                 int64_t aTimeThreshold) override;
   virtual nsresult ReadMetadata(MediaInfo* aInfo,
                                 MetadataTags** aTags) override;
   virtual nsRefPtr<SeekPromise>
   Seek(int64_t aTime, int64_t aEndTime) override;
-  virtual nsresult GetBuffered(dom::TimeRanges* aBuffered) override;
+  virtual media::TimeIntervals GetBuffered() override;
 
   virtual void NotifyDataArrived(const char *aBuffer,
                                  uint32_t aLength,
                                  int64_t aOffset) override;
 
   virtual bool HasAudio() override {
     return mInfo.HasAudio();
   }
--- a/dom/media/gtest/TestMP4Reader.cpp
+++ b/dom/media/gtest/TestMP4Reader.cpp
@@ -5,17 +5,17 @@
 
 #include "gtest/gtest.h"
 #include "MP4Reader.h"
 #include "MP4Decoder.h"
 #include "SharedThreadPool.h"
 #include "MockMediaResource.h"
 #include "MockMediaDecoderOwner.h"
 #include "mozilla/Preferences.h"
-#include "mozilla/dom/TimeRanges.h"
+#include "TimeUnits.h"
 
 using namespace mozilla;
 using namespace mozilla::dom;
 
 class TestBinding
 {
 public:
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(TestBinding);
@@ -82,67 +82,52 @@ private:
 TEST(MP4Reader, BufferedRange)
 {
   nsRefPtr<TestBinding> b = new TestBinding();
   b->Init();
 
   // Video 3-4 sec, audio 2.986666-4.010666 sec
   b->resource->MockAddBufferedRange(248400, 327455);
 
-  nsRefPtr<TimeRanges> ranges = new TimeRanges();
-  EXPECT_EQ(NS_OK, b->reader->GetBuffered(ranges));
-  EXPECT_EQ(1U, ranges->Length());
-  double start = 0;
-  EXPECT_EQ(NS_OK, ranges->Start(0, &start));
-  EXPECT_NEAR(270000 / 90000.0, start, 0.000001);
-  double end = 0;
-  EXPECT_EQ(NS_OK, ranges->End(0, &end));
-  EXPECT_NEAR(360000 / 90000.0, end, 0.000001);
+  media::TimeIntervals ranges = b->reader->GetBuffered();
+  EXPECT_EQ(1U, ranges.Length());
+  EXPECT_NEAR(270000 / 90000.0, ranges.Start(0).ToSeconds(), 0.000001);
+  EXPECT_NEAR(360000 / 90000.0, ranges.End(0).ToSeconds(), 0.000001);
 }
 
 TEST(MP4Reader, BufferedRangeMissingLastByte)
 {
   nsRefPtr<TestBinding> b = new TestBinding();
   b->Init();
 
   // Dropping the last byte of the video
   b->resource->MockClearBufferedRanges();
   b->resource->MockAddBufferedRange(248400, 324912);
   b->resource->MockAddBufferedRange(324913, 327455);
 
-  nsRefPtr<TimeRanges> ranges = new TimeRanges();
-  EXPECT_EQ(NS_OK, b->reader->GetBuffered(ranges));
-  EXPECT_EQ(1U, ranges->Length());
-  double start = 0;
-  EXPECT_EQ(NS_OK, ranges->Start(0, &start));
-  EXPECT_NEAR(270000.0 / 90000.0, start, 0.000001);
-  double end = 0;
-  EXPECT_EQ(NS_OK, ranges->End(0, &end));
-  EXPECT_NEAR(357000 / 90000.0, end, 0.000001);
+  media::TimeIntervals ranges = b->reader->GetBuffered();
+  EXPECT_EQ(1U, ranges.Length());
+  EXPECT_NEAR(270000.0 / 90000.0, ranges.Start(0).ToSeconds(), 0.000001);
+  EXPECT_NEAR(357000 / 90000.0, ranges.End(0).ToSeconds(), 0.000001);
 }
 
 TEST(MP4Reader, BufferedRangeSyncFrame)
 {
   nsRefPtr<TestBinding> b = new TestBinding();
   b->Init();
 
   // Check that missing the first byte at 2 seconds skips right through to 3
   // seconds because of a missing sync frame
   b->resource->MockClearBufferedRanges();
   b->resource->MockAddBufferedRange(146336, 327455);
 
-  nsRefPtr<TimeRanges> ranges = new TimeRanges();
-  EXPECT_EQ(NS_OK, b->reader->GetBuffered(ranges));
-  EXPECT_EQ(1U, ranges->Length());
-  double start = 0;
-  EXPECT_EQ(NS_OK, ranges->Start(0, &start));
-  EXPECT_NEAR(270000.0 / 90000.0, start, 0.000001);
-  double end = 0;
-  EXPECT_EQ(NS_OK, ranges->End(0, &end));
-  EXPECT_NEAR(360000 / 90000.0, end, 0.000001);
+  media::TimeIntervals ranges = b->reader->GetBuffered();
+  EXPECT_EQ(1U, ranges.Length());
+  EXPECT_NEAR(270000.0 / 90000.0, ranges.Start(0).ToSeconds(), 0.000001);
+  EXPECT_NEAR(360000 / 90000.0, ranges.End(0).ToSeconds(), 0.000001);
 }
 
 TEST(MP4Reader, CompositionOrder)
 {
   nsRefPtr<TestBinding> b = new TestBinding("mediasource_test.mp4");
   b->Init();
 
   // The first 5 video samples of this file are:
@@ -182,33 +167,24 @@ TEST(MP4Reader, CompositionOrder)
   b->resource->MockAddBufferedRange(8962, 9563);
   b->resource->MockAddBufferedRange(9734, 10314);
   b->resource->MockAddBufferedRange(10314, 10895);
   b->resource->MockAddBufferedRange(11207, 11787);
   b->resource->MockAddBufferedRange(12035, 12616);
   b->resource->MockAddBufferedRange(12616, 13196);
   b->resource->MockAddBufferedRange(13220, 13901);
 
-  nsRefPtr<TimeRanges> ranges = new TimeRanges();
-  EXPECT_EQ(NS_OK, b->reader->GetBuffered(ranges));
-  EXPECT_EQ(2U, ranges->Length());
+  media::TimeIntervals ranges = b->reader->GetBuffered();
+  EXPECT_EQ(2U, ranges.Length());
 
-  double start = 0;
-  EXPECT_EQ(NS_OK, ranges->Start(0, &start));
-  EXPECT_NEAR(166.0 / 2500.0, start, 0.000001);
-  double end = 0;
-  EXPECT_EQ(NS_OK, ranges->End(0, &end));
-  EXPECT_NEAR(332.0 / 2500.0, end, 0.000001);
+  EXPECT_NEAR(166.0 / 2500.0, ranges.Start(0).ToSeconds(), 0.000001);
+  EXPECT_NEAR(332.0 / 2500.0, ranges.End(0).ToSeconds(), 0.000001);
 
-  start = 0;
-  EXPECT_EQ(NS_OK, ranges->Start(1, &start));
-  EXPECT_NEAR(581.0 / 2500.0, start, 0.000001);
-  end = 0;
-  EXPECT_EQ(NS_OK, ranges->End(1, &end));
-  EXPECT_NEAR(11255.0 / 44100.0, end, 0.000001);
+  EXPECT_NEAR(581.0 / 2500.0, ranges.Start(1).ToSeconds(), 0.000001);
+  EXPECT_NEAR(11255.0 / 44100.0, ranges.End(1).ToSeconds(), 0.000001);
 }
 
 TEST(MP4Reader, Normalised)
 {
   nsRefPtr<TestBinding> b = new TestBinding("mediasource_test.mp4");
   b->Init();
 
   // The first 5 video samples of this file are:
@@ -232,19 +208,14 @@ TEST(MP4Reader, Normalised)
   //     8 11207   580  7198      1014  Yes
   //     9 12035   581  8212      1014  Yes
   //    10 12616   580  9226      1015  Yes
   //    11 13220   581  10241     1014  Yes
 
   b->resource->MockClearBufferedRanges();
   b->resource->MockAddBufferedRange(48, 13901);
 
-  nsRefPtr<TimeRanges> ranges = new TimeRanges();
-  EXPECT_EQ(NS_OK, b->reader->GetBuffered(ranges));
-  EXPECT_EQ(1U, ranges->Length());
+  media::TimeIntervals ranges = b->reader->GetBuffered();
+  EXPECT_EQ(1U, ranges.Length());
 
-  double start = 0;
-  EXPECT_EQ(NS_OK, ranges->Start(0, &start));
-  EXPECT_NEAR(166.0 / 2500.0, start, 0.000001);
-  double end = 0;
-  EXPECT_EQ(NS_OK, ranges->End(0, &end));
-  EXPECT_NEAR(11255.0 / 44100.0, end, 0.000001);
+  EXPECT_NEAR(166.0 / 2500.0, ranges.Start(0).ToSeconds(), 0.000001);
+  EXPECT_NEAR(11255.0 / 44100.0, ranges.End(0).ToSeconds(), 0.000001);
 }
--- a/dom/media/mediasource/MediaSource.cpp
+++ b/dom/media/mediasource/MediaSource.cpp
@@ -11,17 +11,16 @@
 #include "MediaSourceUtils.h"
 #include "SourceBuffer.h"
 #include "SourceBufferList.h"
 #include "mozilla/ErrorResult.h"
 #include "mozilla/FloatingPoint.h"
 #include "mozilla/Preferences.h"
 #include "mozilla/dom/BindingDeclarations.h"
 #include "mozilla/dom/HTMLMediaElement.h"
-#include "mozilla/dom/TimeRanges.h"
 #include "mozilla/mozalloc.h"
 #include "nsContentTypeParser.h"
 #include "nsContentUtils.h"
 #include "nsDebug.h"
 #include "nsError.h"
 #include "nsIEffectiveTLDService.h"
 #include "nsIRunnable.h"
 #include "nsIScriptObjectPrincipal.h"
--- a/dom/media/mediasource/MediaSourceDecoder.cpp
+++ b/dom/media/mediasource/MediaSourceDecoder.cpp
@@ -2,17 +2,16 @@
 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 #include "MediaSourceDecoder.h"
 
 #include "prlog.h"
 #include "mozilla/dom/HTMLMediaElement.h"
-#include "mozilla/dom/TimeRanges.h"
 #include "MediaDecoderStateMachine.h"
 #include "MediaSource.h"
 #include "MediaSourceReader.h"
 #include "MediaSourceResource.h"
 #include "MediaSourceUtils.h"
 #include "SourceBufferDecoder.h"
 #include "VideoUtils.h"
 
@@ -58,36 +57,40 @@ MediaSourceDecoder::Load(nsIStreamListen
 
   nsresult rv = GetStateMachine()->Init(nullptr);
   NS_ENSURE_SUCCESS(rv, rv);
 
   SetStateMachineParameters();
   return ScheduleStateMachine();
 }
 
-nsresult
-MediaSourceDecoder::GetSeekable(dom::TimeRanges* aSeekable)
+media::TimeIntervals
+MediaSourceDecoder::GetSeekable()
 {
   MOZ_ASSERT(NS_IsMainThread());
   if (!mMediaSource) {
-    return NS_ERROR_FAILURE;
+    NS_WARNING("MediaSource element isn't attached");
+    return media::TimeIntervals::Invalid();
   }
 
+  media::TimeIntervals seekable;
   double duration = mMediaSource->Duration();
   if (IsNaN(duration)) {
     // Return empty range.
   } else if (duration > 0 && mozilla::IsInfinite(duration)) {
-    nsRefPtr<dom::TimeRanges> bufferedRanges = new dom::TimeRanges();
-    mReader->GetBuffered(bufferedRanges);
-    aSeekable->Add(bufferedRanges->GetStartTime(), bufferedRanges->GetEndTime());
+    media::TimeIntervals buffered = mReader->GetBuffered();
+    if (buffered.Length()) {
+      seekable += media::TimeInterval(buffered.GetStart(), buffered.GetEnd());
+    }
   } else {
-    aSeekable->Add(0, duration);
+    seekable += media::TimeInterval(media::TimeUnit::FromSeconds(0),
+                                    media::TimeUnit::FromSeconds(duration));
   }
-  MSE_DEBUG("ranges=%s", DumpTimeRanges(aSeekable).get());
-  return NS_OK;
+  MSE_DEBUG("ranges=%s", DumpTimeRanges(seekable).get());
+  return seekable;
 }
 
 void
 MediaSourceDecoder::Shutdown()
 {
   MSE_DEBUG("Shutdown");
   // Detach first so that TrackBuffers are unused on the main thread when
   // shut down on the decode task queue.
@@ -331,32 +334,34 @@ MediaSourceDecoder::GetDuration()
 
 already_AddRefed<SourceBufferDecoder>
 MediaSourceDecoder::SelectDecoder(int64_t aTarget,
                                   int64_t aTolerance,
                                   const nsTArray<nsRefPtr<SourceBufferDecoder>>& aTrackDecoders)
 {
   ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
 
+  media::TimeUnit target{media::TimeUnit::FromMicroseconds(aTarget)};
+  media::TimeUnit tolerance{media::TimeUnit::FromMicroseconds(aTolerance + aTarget)};
+
+  // aTolerance gives a slight bias toward the start of a range only.
   // Consider decoders in order of newest to oldest, as a newer decoder
   // providing a given buffered range is expected to replace an older one.
   for (int32_t i = aTrackDecoders.Length() - 1; i >= 0; --i) {
     nsRefPtr<SourceBufferDecoder> newDecoder = aTrackDecoders[i];
 
-    nsRefPtr<dom::TimeRanges> ranges = new dom::TimeRanges();
-    newDecoder->GetBuffered(ranges);
-    if (ranges->Find(double(aTarget) / USECS_PER_S,
-                     double(aTolerance) / USECS_PER_S) == dom::TimeRanges::NoIndex) {
-      MSE_DEBUGV("SelectDecoder(%lld fuzz:%lld) newDecoder=%p (%d/%d) target not in ranges=%s",
-                 aTarget, aTolerance, newDecoder.get(), i+1,
-                 aTrackDecoders.Length(), DumpTimeRanges(ranges).get());
-      continue;
+    media::TimeIntervals ranges = newDecoder->GetBuffered();
+    for (uint32_t j = 0; j < ranges.Length(); j++) {
+      if (target < ranges.End(j) && tolerance >= ranges.Start(j)) {
+        return newDecoder.forget();
+      }
     }
-
-    return newDecoder.forget();
+    MSE_DEBUGV("SelectDecoder(%lld fuzz:%lld) newDecoder=%p (%d/%d) target not in ranges=%s",
+               aTarget, aTolerance, newDecoder.get(), i+1,
+               aTrackDecoders.Length(), DumpTimeRanges(ranges).get());
   }
 
   return nullptr;
 }
 
 #undef MSE_DEBUG
 #undef MSE_DEBUGV
 
--- a/dom/media/mediasource/MediaSourceDecoder.h
+++ b/dom/media/mediasource/MediaSourceDecoder.h
@@ -33,17 +33,17 @@ class MediaSource;
 class MediaSourceDecoder : public MediaDecoder
 {
 public:
   explicit MediaSourceDecoder(dom::HTMLMediaElement* aElement);
 
   virtual MediaDecoder* Clone() override;
   virtual MediaDecoderStateMachine* CreateStateMachine() override;
   virtual nsresult Load(nsIStreamListener**, MediaDecoder*) override;
-  virtual nsresult GetSeekable(dom::TimeRanges* aSeekable) override;
+  virtual media::TimeIntervals GetSeekable() override;
 
   virtual void Shutdown() override;
 
   static already_AddRefed<MediaResource> CreateResource(nsIPrincipal* aPrincipal = nullptr);
 
   void AttachMediaSource(dom::MediaSource* aMediaSource);
   void DetachMediaSource();
 
--- a/dom/media/mediasource/MediaSourceReader.cpp
+++ b/dom/media/mediasource/MediaSourceReader.cpp
@@ -2,17 +2,16 @@
 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 #include "MediaSourceReader.h"
 
 #include <cmath>
 #include "prlog.h"
-#include "mozilla/dom/TimeRanges.h"
 #include "DecoderTraits.h"
 #include "MediaDecoderOwner.h"
 #include "MediaFormatReader.h"
 #include "MediaSourceDecoder.h"
 #include "MediaSourceUtils.h"
 #include "SourceBufferDecoder.h"
 #include "TrackBuffer.h"
 #include "nsPrintfCString.h"
@@ -31,18 +30,16 @@ extern PRLogModuleInfo* GetMediaSourceLo
 
 // When a stream hits EOS it needs to decide what other stream to switch to. Due
 // to inaccuracies is determining buffer end frames (Bug 1065207) and rounding
 // issues we use a fuzz factor to determine the end time of this stream for
 // switching to the new stream. This value is based on the end of frame
 // default value used in Blink, kDefaultBufferDurationInMs.
 #define EOS_FUZZ_US 125000
 
-using mozilla::dom::TimeRanges;
-
 namespace mozilla {
 
 MediaSourceReader::MediaSourceReader(MediaSourceDecoder* aDecoder)
   : MediaDecoderReader(aDecoder)
   , mLastAudioTime(0)
   , mLastVideoTime(0)
   , mPendingSeekTime(-1)
   , mWaitingForSeekData(false)
@@ -241,20 +238,19 @@ MediaSourceReader::OnAudioDecoded(AudioD
 // end time.
 // Reader switching is based on the buffered end time though so they can be
 // quite different. By using the EOS_FUZZ_US and the buffered end time we
 // attempt to account for this difference.
 static void
 AdjustEndTime(int64_t* aEndTime, SourceBufferDecoder* aDecoder)
 {
   if (aDecoder) {
-    nsRefPtr<dom::TimeRanges> ranges = new dom::TimeRanges();
-    aDecoder->GetBuffered(ranges);
-    if (ranges->Length() > 0) {
-      int64_t end = std::ceil(ranges->GetEndTime() * USECS_PER_S);
+    media::TimeIntervals ranges = aDecoder->GetBuffered();
+    if (ranges.Length()) {
+      int64_t end = ranges.GetEnd().ToMicroseconds();
       *aEndTime = std::max(*aEndTime, end);
     }
   }
 }
 
 void
 MediaSourceReader::OnAudioNotDecoded(NotDecodedReason aReason)
 {
@@ -596,19 +592,18 @@ MediaSourceReader::SwitchAudioSource(int
   if (newDecoder == mAudioSourceDecoder) {
     return SOURCE_EXISTING;
   }
   mAudioSourceDecoder = newDecoder;
   if (usedFuzz) {
     // A decoder buffered range is continuous. We would have failed the exact
     // search but succeeded the fuzzy one if our target was shortly before
     // start time.
-    nsRefPtr<dom::TimeRanges> ranges = new dom::TimeRanges();
-    newDecoder->GetBuffered(ranges);
-    int64_t startTime = ranges->GetStartTime() * USECS_PER_S;
+    media::TimeIntervals ranges = newDecoder->GetBuffered();
+    int64_t startTime = ranges.GetStart().ToMicroseconds();
     if (*aTarget < startTime) {
       *aTarget = startTime;
     }
   }
   MSE_DEBUGV("switched decoder to %p (fuzz:%d)",
              mAudioSourceDecoder.get(), usedFuzz);
   return SOURCE_NEW;
 }
@@ -642,19 +637,18 @@ MediaSourceReader::SwitchVideoSource(int
   if (newDecoder == mVideoSourceDecoder) {
     return SOURCE_EXISTING;
   }
   mVideoSourceDecoder = newDecoder;
   if (usedFuzz) {
     // A decoder buffered range is continuous. We would have failed the exact
     // search but succeeded the fuzzy one if our target was shortly before
     // start time.
-    nsRefPtr<dom::TimeRanges> ranges = new dom::TimeRanges();
-    newDecoder->GetBuffered(ranges);
-    int64_t startTime = ranges->GetStartTime() * USECS_PER_S;
+    media::TimeIntervals ranges = newDecoder->GetBuffered();
+    int64_t startTime = ranges.GetStart().ToMicroseconds();
     if (*aTarget < startTime) {
       *aTarget = startTime;
     }
   }
   MSE_DEBUGV("switched decoder to %p (fuzz:%d)",
              mVideoSourceDecoder.get(), usedFuzz);
   return SOURCE_NEW;
 }
@@ -998,78 +992,72 @@ MediaSourceReader::DoVideoSeek()
   GetVideoReader()->ResetDecode();
   mVideoSeekRequest.Begin(GetVideoReader()->Seek(GetReaderVideoTime(seekTime), 0)
                           ->RefableThen(GetTaskQueue(), __func__, this,
                                         &MediaSourceReader::OnVideoSeekCompleted,
                                         &MediaSourceReader::OnVideoSeekFailed));
   MSE_DEBUG("reader=%p", GetVideoReader());
 }
 
-nsresult
-MediaSourceReader::GetBuffered(dom::TimeRanges* aBuffered)
+media::TimeIntervals
+MediaSourceReader::GetBuffered()
 {
   ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
-  MOZ_ASSERT(aBuffered->Length() == 0);
-  if (mTrackBuffers.IsEmpty()) {
-    return NS_OK;
+  media::TimeIntervals buffered;
+
+  media::TimeUnit highestEndTime;
+  nsTArray<media::TimeIntervals> activeRanges;
+  // Must set the capacity of the nsTArray first: bug #1164444
+  activeRanges.SetCapacity(mTrackBuffers.Length());
+
+  for (const auto& trackBuffer : mTrackBuffers) {
+    activeRanges.AppendElement(trackBuffer->Buffered());
+    highestEndTime = std::max(highestEndTime, activeRanges.LastElement().GetEnd());
   }
 
-  double highestEndTime = 0;
+  buffered +=
+    media::TimeInterval(media::TimeUnit::FromMicroseconds(0), highestEndTime);
 
-  nsTArray<nsRefPtr<TimeRanges>> activeRanges;
-  for (uint32_t i = 0; i < mTrackBuffers.Length(); ++i) {
-    nsRefPtr<TimeRanges> r = new TimeRanges();
-    mTrackBuffers[i]->Buffered(r);
-    activeRanges.AppendElement(r);
-    highestEndTime = std::max(highestEndTime, activeRanges.LastElement()->GetEndTime());
-  }
-
-  TimeRanges* intersectionRanges = aBuffered;
-  intersectionRanges->Add(0, highestEndTime);
-
-  for (uint32_t i = 0; i < activeRanges.Length(); ++i) {
-    TimeRanges* sourceRanges = activeRanges[i];
-
-    if (IsEnded() && sourceRanges->GetEndTime() >= 0) {
+  for (auto& range : activeRanges) {
+    if (IsEnded() && range.Length()) {
       // Set the end time on the last range to highestEndTime by adding a
       // new range spanning the current end time to highestEndTime, which
       // Normalize() will then merge with the old last range.
-      sourceRanges->Add(sourceRanges->GetEndTime(), highestEndTime);
-      sourceRanges->Normalize();
+      range +=
+        media::TimeInterval(range.GetEnd(), highestEndTime);
     }
-
-    intersectionRanges->Intersection(sourceRanges);
+    buffered.Intersection(range);
   }
 
-  MSE_DEBUG("ranges=%s", DumpTimeRanges(intersectionRanges).get());
-  return NS_OK;
+  MSE_DEBUG("ranges=%s", DumpTimeRanges(buffered).get());
+  return buffered;
 }
 
 already_AddRefed<SourceBufferDecoder>
 MediaSourceReader::FirstDecoder(MediaData::Type aType)
 {
   ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
   TrackBuffer* trackBuffer =
     aType == MediaData::AUDIO_DATA ? mAudioTrack : mVideoTrack;
   MOZ_ASSERT(trackBuffer);
   const nsTArray<nsRefPtr<SourceBufferDecoder>>& decoders = trackBuffer->Decoders();
   if (decoders.IsEmpty()) {
     return nullptr;
   }
 
   nsRefPtr<SourceBufferDecoder> firstDecoder;
-  double lowestStartTime = PositiveInfinity<double>();
+  media::TimeUnit lowestStartTime{media::TimeUnit::FromInfinity()};
+
 
   for (uint32_t i = 0; i < decoders.Length(); ++i) {
-    nsRefPtr<TimeRanges> r = new TimeRanges();
-    decoders[i]->GetBuffered(r);
-    double start = r->GetStartTime();
-    if (start < 0) {
+    media::TimeIntervals r = decoders[i]->GetBuffered();
+    if (!r.Length()) {
       continue;
     }
+    media::TimeUnit start = r.GetStart();
     if (start < lowestStartTime) {
       firstDecoder = decoders[i];
       lowestStartTime = start;
     }
   }
   return firstDecoder.forget();
 }
 
@@ -1219,31 +1207,29 @@ bool
 MediaSourceReader::IsNearEnd(MediaData::Type aType, int64_t aTime)
 {
   ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
   if (!mEnded) {
     return false;
   }
   TrackBuffer* trackBuffer =
     aType == MediaData::AUDIO_DATA ? mAudioTrack : mVideoTrack;
-  nsRefPtr<dom::TimeRanges> buffered = new dom::TimeRanges();
-  trackBuffer->Buffered(buffered);
-  return aTime >= (buffered->GetEndTime() * USECS_PER_S - EOS_FUZZ_US);
+  media::TimeIntervals buffered = trackBuffer->Buffered();
+  return aTime >= buffered.GetEnd().ToMicroseconds() - EOS_FUZZ_US;
 }
 
 int64_t
 MediaSourceReader::LastSampleTime(MediaData::Type aType)
 {
   ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
 
   TrackBuffer* trackBuffer =
-  aType == MediaData::AUDIO_DATA ? mAudioTrack : mVideoTrack;
-  nsRefPtr<dom::TimeRanges> buffered = new dom::TimeRanges();
-  trackBuffer->Buffered(buffered);
-  return buffered->GetEndTime() * USECS_PER_S - 1;
+    aType == MediaData::AUDIO_DATA ? mAudioTrack : mVideoTrack;
+  media::TimeIntervals buffered = trackBuffer->Buffered();
+  return buffered.GetEnd().ToMicroseconds() - 1;
 }
 
 void
 MediaSourceReader::SetMediaSourceDuration(double aDuration)
 {
   ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
   mMediaSourceDuration = aDuration;
 }
@@ -1254,32 +1240,30 @@ MediaSourceReader::GetMozDebugReaderData
   ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
   nsAutoCString result;
   result += nsPrintfCString("Dumping data for reader %p:\n", this);
   if (mAudioTrack) {
     result += nsPrintfCString("\tDumping Audio Track Decoders: - mLastAudioTime: %f\n", double(mLastAudioTime) / USECS_PER_S);
     for (int32_t i = mAudioTrack->Decoders().Length() - 1; i >= 0; --i) {
       nsRefPtr<MediaDecoderReader> newReader = mAudioTrack->Decoders()[i]->GetReader();
 
-      nsRefPtr<dom::TimeRanges> ranges = new dom::TimeRanges();
-      mAudioTrack->Decoders()[i]->GetBuffered(ranges);
+      media::TimeIntervals ranges = mAudioTrack->Decoders()[i]->GetBuffered();
       result += nsPrintfCString("\t\tReader %d: %p ranges=%s active=%s size=%lld\n",
                                 i, newReader.get(), DumpTimeRanges(ranges).get(),
                                 newReader.get() == GetAudioReader() ? "true" : "false",
                                 mAudioTrack->Decoders()[i]->GetResource()->GetSize());
     }
   }
 
   if (mVideoTrack) {
     result += nsPrintfCString("\tDumping Video Track Decoders - mLastVideoTime: %f\n", double(mLastVideoTime) / USECS_PER_S);
     for (int32_t i = mVideoTrack->Decoders().Length() - 1; i >= 0; --i) {
       nsRefPtr<MediaDecoderReader> newReader = mVideoTrack->Decoders()[i]->GetReader();
 
-      nsRefPtr<dom::TimeRanges> ranges = new dom::TimeRanges();
-      mVideoTrack->Decoders()[i]->GetBuffered(ranges);
+      media::TimeIntervals ranges = mVideoTrack->Decoders()[i]->GetBuffered();
       result += nsPrintfCString("\t\tReader %d: %p ranges=%s active=%s size=%lld\n",
                                 i, newReader.get(), DumpTimeRanges(ranges).get(),
                                 newReader.get() == GetVideoReader() ? "true" : "false",
                                 mVideoTrack->Decoders()[i]->GetResource()->GetSize());
     }
   }
   aString += NS_ConvertUTF8toUTF16(result);
 }
--- a/dom/media/mediasource/MediaSourceReader.h
+++ b/dom/media/mediasource/MediaSourceReader.h
@@ -107,17 +107,17 @@ public:
   nsresult ReadMetadata(MediaInfo* aInfo, MetadataTags** aTags) override;
   void ReadUpdatedMetadata(MediaInfo* aInfo) override;
   nsRefPtr<SeekPromise>
   Seek(int64_t aTime, int64_t aEndTime) override;
 
   nsresult ResetDecode() override;
 
   // Acquires the decoder monitor, and is thus callable on any thread.
-  nsresult GetBuffered(dom::TimeRanges* aBuffered) override;
+  media::TimeIntervals GetBuffered() override;
 
   already_AddRefed<SourceBufferDecoder> CreateSubDecoder(const nsACString& aType,
                                                          int64_t aTimestampOffset /* microseconds */);
 
   void AddTrackBuffer(TrackBuffer* aTrackBuffer);
   void RemoveTrackBuffer(TrackBuffer* aTrackBuffer);
   void OnTrackBufferConfigured(TrackBuffer* aTrackBuffer, const MediaInfo& aInfo);
 
--- a/dom/media/mediasource/MediaSourceUtils.cpp
+++ b/dom/media/mediasource/MediaSourceUtils.cpp
@@ -1,34 +1,34 @@
 /* -*- mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 #include "MediaSourceUtils.h"
 
 #include "prlog.h"
-#include "mozilla/dom/TimeRanges.h"
 #include "nsPrintfCString.h"
 
 namespace mozilla {
 
 nsCString
-DumpTimeRanges(dom::TimeRanges* aRanges)
+DumpTimeRanges(const media::TimeIntervals& aRanges)
 {
   nsCString dump;
 
   dump = "[";
 
-  for (uint32_t i = 0; i < aRanges->Length(); ++i) {
+  for (uint32_t i = 0; i < aRanges.Length(); ++i) {
     if (i > 0) {
       dump += ", ";
     }
-    ErrorResult dummy;
-    dump += nsPrintfCString("(%f, %f)", aRanges->Start(i, dummy), aRanges->End(i, dummy));
+    dump += nsPrintfCString("(%f, %f)",
+                            aRanges.Start(i).ToSeconds(),
+                            aRanges.End(i).ToSeconds());
   }
 
   dump += "]";
 
   return dump;
 }
 
 } // namespace mozilla
--- a/dom/media/mediasource/MediaSourceUtils.h
+++ b/dom/media/mediasource/MediaSourceUtils.h
@@ -3,20 +3,17 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef MOZILLA_MEDIASOURCEUTILS_H_
 #define MOZILLA_MEDIASOURCEUTILS_H_
 
 #include "nsString.h"
+#include "TimeUnits.h"
 
 namespace mozilla {
 
-namespace dom {
-  class TimeRanges;
-} // namespace dom
-
-nsCString DumpTimeRanges(dom::TimeRanges* aRanges);
+nsCString DumpTimeRanges(const media::TimeIntervals& aRanges);
 
 } // namespace mozilla
 
 #endif /* MOZILLA_MEDIASOURCEUTILS_H_ */
--- a/dom/media/mediasource/SourceBuffer.cpp
+++ b/dom/media/mediasource/SourceBuffer.cpp
@@ -135,28 +135,24 @@ SourceBuffer::SetTimestampOffset(double 
 already_AddRefed<TimeRanges>
 SourceBuffer::GetBuffered(ErrorResult& aRv)
 {
   MOZ_ASSERT(NS_IsMainThread());
   if (!IsAttached()) {
     aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
     return nullptr;
   }
-  nsRefPtr<TimeRanges> ranges = new TimeRanges();
-  double highestEndTime = mTrackBuffer->Buffered(ranges);
-  if (mMediaSource->ReadyState() == MediaSourceReadyState::Ended &&
-      highestEndTime > 0) {
-    // Set the end time on the last range to highestEndTime by adding a
-    // new range spanning the current end time to highestEndTime, which
-    // Normalize() will then merge with the old last range.
-    ranges->Add(ranges->GetEndTime(), highestEndTime);
-    ranges->Normalize();
-  }
+  // We only manage a single trackbuffer in our source buffer.
+  // As such, there's no need to adjust the end of the trackbuffers as per
+  // Step 4: http://w3c.github.io/media-source/index.html#widl-SourceBuffer-buffered
+  media::TimeIntervals ranges = mTrackBuffer->Buffered();
   MSE_DEBUGV("ranges=%s", DumpTimeRanges(ranges).get());
-  return ranges.forget();
+  nsRefPtr<dom::TimeRanges> tr = new dom::TimeRanges();
+  ranges.ToTimeRanges(tr);
+  return tr.forget();
 }
 
 void
 SourceBuffer::SetAppendWindowStart(double aAppendWindowStart, ErrorResult& aRv)
 {
   MOZ_ASSERT(NS_IsMainThread());
   MSE_API("SetAppendWindowStart(aAppendWindowStart=%f)", aAppendWindowStart);
   if (!IsAttached() || mUpdating) {
@@ -277,18 +273,18 @@ SourceBuffer::RangeRemoval(double aStart
   NS_DispatchToMainThread(task);
 }
 
 void
 SourceBuffer::DoRangeRemoval(double aStart, double aEnd)
 {
   MSE_DEBUG("DoRangeRemoval(%f, %f)", aStart, aEnd);
   if (mTrackBuffer && !IsInfinite(aStart)) {
-    mTrackBuffer->RangeRemoval(media::Microseconds::FromSeconds(aStart),
-                               media::Microseconds::FromSeconds(aEnd));
+    mTrackBuffer->RangeRemoval(media::TimeUnit::FromSeconds(aStart),
+                               media::TimeUnit::FromSeconds(aEnd));
   }
 }
 
 void
 SourceBuffer::Detach()
 {
   MOZ_ASSERT(NS_IsMainThread());
   MSE_DEBUG("Detach");
--- a/dom/media/mediasource/SourceBufferDecoder.cpp
+++ b/dom/media/mediasource/SourceBufferDecoder.cpp
@@ -4,17 +4,16 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "MediaSourceUtils.h"
 #include "SourceBufferDecoder.h"
 #include "prlog.h"
 #include "AbstractMediaDecoder.h"
 #include "MediaDecoderReader.h"
-#include "mozilla/dom/TimeRanges.h"
 
 extern PRLogModuleInfo* GetMediaSourceLog();
 /* Polyfill __func__ on MSVC to pass to the log. */
 #ifdef _MSC_VER
 #define __func__ __FUNCTION__
 #endif
 
 #define MSE_DEBUG(arg, ...) PR_LOG(GetMediaSourceLog(), PR_LOG_DEBUG, ("SourceBufferDecoder(%p:%s)::%s: " arg, this, mResource->GetContentType().get(), __func__, ##__VA_ARGS__))
@@ -229,34 +228,33 @@ SourceBufferDecoder::NotifyDataArrived(c
 
   // XXX: Params make no sense to parent decoder as it relates to a
   // specific SourceBufferDecoder's data stream.  Pass bogus values here to
   // force parent decoder's state machine to recompute end time for
   // infinite length media.
   mParentDecoder->NotifyDataArrived(nullptr, 0, 0);
 }
 
-nsresult
-SourceBufferDecoder::GetBuffered(dom::TimeRanges* aBuffered)
+media::TimeIntervals
+SourceBufferDecoder::GetBuffered()
 {
-  nsresult rv = mReader->GetBuffered(aBuffered);
-  if (NS_FAILED(rv)) {
-    return rv;
+  media::TimeIntervals buffered = mReader->GetBuffered();
+  if (buffered.IsInvalid()) {
+    return buffered;
   }
 
   // Adjust buffered range according to timestamp offset.
-  aBuffered->Shift((double)mTimestampOffset / USECS_PER_S);
+  buffered.Shift(media::TimeUnit::FromMicroseconds(mTimestampOffset));
 
   if (!WasTrimmed()) {
-    return NS_OK;
+    return buffered;
   }
-  nsRefPtr<dom::TimeRanges> tr = new dom::TimeRanges();
-  tr->Add(0, mTrimmedOffset);
-  aBuffered->Intersection(tr);
-  return NS_OK;
+  media::TimeInterval filter(media::TimeUnit::FromSeconds(0),
+                             media::TimeUnit::FromSeconds(mTrimmedOffset));
+  return buffered.Intersection(filter);
 }
 
 int64_t
 SourceBufferDecoder::ConvertToByteOffset(double aTime)
 {
   int64_t readerOffset =
     mReader->GetEvictionOffset(aTime - double(mTimestampOffset) / USECS_PER_S);
   if (readerOffset >= 0) {
--- a/dom/media/mediasource/SourceBufferDecoder.h
+++ b/dom/media/mediasource/SourceBufferDecoder.h
@@ -16,22 +16,16 @@
 #endif
 #include "mozilla/ReentrantMonitor.h"
 
 namespace mozilla {
 
 class MediaResource;
 class MediaDecoderReader;
 
-namespace dom {
-
-class TimeRanges;
-
-} // namespace dom
-
 class SourceBufferDecoder final : public AbstractMediaDecoder
 {
 public:
   // This class holds a weak pointer to MediaResource.  It's the responsibility
   // of the caller to manage the memory of the MediaResource object.
   SourceBufferDecoder(MediaResource* aResource, AbstractMediaDecoder* aParentDecoder,
                       int64_t aTimestampOffset /* microseconds */);
 
@@ -67,17 +61,17 @@ public:
   virtual bool HasInitializationData() final override;
 
   // SourceBufferResource specific interface below.
   int64_t GetTimestampOffset() const { return mTimestampOffset; }
   void SetTimestampOffset(int64_t aOffset)  { mTimestampOffset = aOffset; }
 
   // Warning: this mirrors GetBuffered in MediaDecoder, but this class's base is
   // AbstractMediaDecoder, which does not supply this interface.
-  nsresult GetBuffered(dom::TimeRanges* aBuffered);
+  media::TimeIntervals GetBuffered();
 
   void SetReader(MediaDecoderReader* aReader)
   {
     MOZ_ASSERT(!mReader);
     mReader = aReader;
   }
 
   MediaDecoderReader* GetReader() const
--- a/dom/media/mediasource/TrackBuffer.cpp
+++ b/dom/media/mediasource/TrackBuffer.cpp
@@ -268,34 +268,28 @@ TrackBuffer::AppendDataToCurrentResource
   return true;
 }
 
 class DecoderSorter
 {
 public:
   bool LessThan(SourceBufferDecoder* aFirst, SourceBufferDecoder* aSecond) const
   {
-    nsRefPtr<dom::TimeRanges> first = new dom::TimeRanges();
-    aFirst->GetBuffered(first);
+    media::TimeIntervals first = aFirst->GetBuffered();
+    media::TimeIntervals second = aSecond->GetBuffered();
 
-    nsRefPtr<dom::TimeRanges> second = new dom::TimeRanges();
-    aSecond->GetBuffered(second);
-
-    return first->GetStartTime() < second->GetStartTime();
+    return first.GetStart() < second.GetStart();
   }
 
   bool Equals(SourceBufferDecoder* aFirst, SourceBufferDecoder* aSecond) const
   {
-    nsRefPtr<dom::TimeRanges> first = new dom::TimeRanges();
-    aFirst->GetBuffered(first);
+    media::TimeIntervals first = aFirst->GetBuffered();
+    media::TimeIntervals second = aSecond->GetBuffered();
 
-    nsRefPtr<dom::TimeRanges> second = new dom::TimeRanges();
-    aSecond->GetBuffered(second);
-
-    return first->GetStartTime() == second->GetStartTime();
+    return first.GetStart() == second.GetStart();
   }
 };
 
 bool
 TrackBuffer::EvictData(double aPlaybackTime,
                        uint32_t aThreshold,
                        double* aBufferStartTime)
 {
@@ -315,42 +309,43 @@ TrackBuffer::EvictData(double aPlaybackT
 
   // Get a list of initialized decoders.
   nsTArray<SourceBufferDecoder*> decoders;
   decoders.AppendElements(mInitializedDecoders);
 
   // First try to evict data before the current play position, starting
   // with the oldest decoder.
   for (uint32_t i = 0; i < decoders.Length() && toEvict > 0; ++i) {
-    nsRefPtr<dom::TimeRanges> buffered = new dom::TimeRanges();
-    decoders[i]->GetBuffered(buffered);
+    media::TimeIntervals buffered = decoders[i]->GetBuffered();
 
     MSE_DEBUG("Step1. decoder=%u/%u threshold=%u toEvict=%lld",
               i, decoders.Length(), aThreshold, toEvict);
 
     // To ensure we don't evict data past the current playback position
     // we apply a threshold of a few seconds back and evict data up to
     // that point.
     if (aPlaybackTime > MSE_EVICT_THRESHOLD_TIME) {
-      double time = aPlaybackTime - MSE_EVICT_THRESHOLD_TIME;
+      media::TimeUnit time = media::TimeUnit::FromSeconds(aPlaybackTime) -
+        media::TimeUnit::FromSeconds(MSE_EVICT_THRESHOLD_TIME);
       bool isActive = decoders[i] == mCurrentDecoder ||
         mParentDecoder->IsActiveReader(decoders[i]->GetReader());
-      if (!isActive && buffered->GetEndTime() < time) {
+      if (!isActive && buffered.GetEnd() < time) {
         // The entire decoder is contained before our current playback time.
         // It can be fully evicted.
         MSE_DEBUG("evicting all bufferedEnd=%f "
                   "aPlaybackTime=%f time=%f, size=%lld",
-                  buffered->GetEndTime(), aPlaybackTime, time,
+                  buffered.GetEnd().ToSeconds(), aPlaybackTime, time,
                   decoders[i]->GetResource()->GetSize());
         toEvict -= decoders[i]->GetResource()->EvictAll();
       } else {
-        int64_t playbackOffset = decoders[i]->ConvertToByteOffset(time);
+        int64_t playbackOffset =
+          decoders[i]->ConvertToByteOffset(time.ToMicroseconds());
         MSE_DEBUG("evicting some bufferedEnd=%f "
                   "aPlaybackTime=%f time=%f, playbackOffset=%lld size=%lld",
-                  buffered->GetEndTime(), aPlaybackTime, time,
+                  buffered.GetEnd().ToSeconds(), aPlaybackTime, time,
                   playbackOffset, decoders[i]->GetResource()->GetSize());
         if (playbackOffset > 0) {
           toEvict -= decoders[i]->GetResource()->EvictData(playbackOffset,
                                                            playbackOffset);
         }
       }
     }
   }
@@ -360,23 +355,22 @@ TrackBuffer::EvictData(double aPlaybackT
     MSE_DEBUG("Step2. decoder=%u/%u threshold=%u toEvict=%lld",
               i, decoders.Length(), aThreshold, toEvict);
     if (mParentDecoder->IsActiveReader(decoders[i]->GetReader())) {
       break;
     }
     if (decoders[i] == mCurrentDecoder) {
       continue;
     }
-    nsRefPtr<dom::TimeRanges> buffered = new dom::TimeRanges();
-    decoders[i]->GetBuffered(buffered);
+    media::TimeIntervals buffered = decoders[i]->GetBuffered();
 
     // Remove data from older decoders than the current one.
     MSE_DEBUG("evicting all "
               "bufferedStart=%f bufferedEnd=%f aPlaybackTime=%f size=%lld",
-              buffered->GetStartTime(), buffered->GetEndTime(),
+              buffered.GetStart().ToSeconds(), buffered.GetEnd().ToSeconds(),
               aPlaybackTime, decoders[i]->GetResource()->GetSize());
     toEvict -= decoders[i]->GetResource()->EvictAll();
   }
 
   // Evict all data from future decoders, starting furthest away from
   // current playback position.
   // We will ignore the currently playing decoder and the one playing after that
   // in order to ensure we give enough time to the DASH player to re-buffer
@@ -390,52 +384,49 @@ TrackBuffer::EvictData(double aPlaybackT
     if (mParentDecoder->IsActiveReader(decoders[i]->GetReader())) {
       playingDecoder = decoders[i];
       break;
     }
   }
   // Find the next decoder we're likely going to play with.
   nsRefPtr<SourceBufferDecoder> nextPlayingDecoder = nullptr;
   if (playingDecoder) {
-    nsRefPtr<dom::TimeRanges> buffered = new dom::TimeRanges();
-    playingDecoder->GetBuffered(buffered);
+    media::TimeIntervals buffered = playingDecoder->GetBuffered();
     nextPlayingDecoder =
-      mParentDecoder->SelectDecoder(buffered->GetEndTime() * USECS_PER_S + 1,
+      mParentDecoder->SelectDecoder(buffered.GetEnd().ToMicroseconds() + 1,
                                     EOS_FUZZ_US,
                                     mInitializedDecoders);
   }
 
   // Sort decoders by their start times.
   decoders.Sort(DecoderSorter());
 
   for (int32_t i = int32_t(decoders.Length()) - 1; i >= 0 && toEvict > 0; --i) {
     MSE_DEBUG("Step3. decoder=%u/%u threshold=%u toEvict=%lld",
               i, decoders.Length(), aThreshold, toEvict);
     if (decoders[i] == playingDecoder || decoders[i] == nextPlayingDecoder ||
         decoders[i] == mCurrentDecoder) {
       continue;
     }
-    nsRefPtr<dom::TimeRanges> buffered = new dom::TimeRanges();
-    decoders[i]->GetBuffered(buffered);
+    media::TimeIntervals buffered = decoders[i]->GetBuffered();
 
     MSE_DEBUG("evicting all "
               "bufferedStart=%f bufferedEnd=%f aPlaybackTime=%f size=%lld",
-              buffered->GetStartTime(), buffered->GetEndTime(),
+              buffered.GetStart().ToSeconds(), buffered.GetEnd().ToSeconds(),
               aPlaybackTime, decoders[i]->GetResource()->GetSize());
     toEvict -= decoders[i]->GetResource()->EvictAll();
   }
 
   RemoveEmptyDecoders(decoders);
 
   bool evicted = toEvict < (totalSize - aThreshold);
   if (evicted) {
     if (playingDecoder) {
-      nsRefPtr<dom::TimeRanges> ranges = new dom::TimeRanges();
-      playingDecoder->GetBuffered(ranges);
-      *aBufferStartTime = std::max(0.0, ranges->GetStartTime());
+      media::TimeIntervals ranges = playingDecoder->GetBuffered();
+      *aBufferStartTime = std::max(0.0, ranges.GetStart().ToSeconds());
     } else {
       // We do not currently have data to play yet.
       // Avoid evicting anymore data to minimize rebuffering time.
       *aBufferStartTime = 0.0;
     }
   }
 
   return evicted;
@@ -443,30 +434,28 @@ TrackBuffer::EvictData(double aPlaybackT
 
 void
 TrackBuffer::RemoveEmptyDecoders(nsTArray<mozilla::SourceBufferDecoder*>& aDecoders)
 {
   ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
 
   // Remove decoders that have no data in them
   for (uint32_t i = 0; i < aDecoders.Length(); ++i) {
-    nsRefPtr<dom::TimeRanges> buffered = new dom::TimeRanges();
-    aDecoders[i]->GetBuffered(buffered);
+    media::TimeIntervals buffered = aDecoders[i]->GetBuffered();
     MSE_DEBUG("maybe remove empty decoders=%d "
               "size=%lld start=%f end=%f",
               i, aDecoders[i]->GetResource()->GetSize(),
-              buffered->GetStartTime(), buffered->GetEndTime());
+              buffered.GetStart().ToSeconds(), buffered.GetEnd().ToSeconds());
     if (aDecoders[i] == mCurrentDecoder ||
         mParentDecoder->IsActiveReader(aDecoders[i]->GetReader())) {
       continue;
     }
 
-    if (aDecoders[i]->GetResource()->GetSize() == 0 ||
-        buffered->GetStartTime() < 0.0 ||
-        buffered->GetEndTime() < 0.0) {
+    if (aDecoders[i]->GetResource()->GetSize() == 0 || !buffered.Length() ||
+        buffered[0].IsEmpty()) {
       MSE_DEBUG("remove empty decoders=%d", i);
       RemoveDecoder(aDecoders[i]);
     }
   }
 }
 
 int64_t
 TrackBuffer::GetSize()
@@ -479,22 +468,21 @@ TrackBuffer::GetSize()
 }
 
 bool
 TrackBuffer::HasOnlyIncompleteMedia()
 {
   if (!mCurrentDecoder) {
     return false;
   }
-  nsRefPtr<dom::TimeRanges> buffered = new dom::TimeRanges();
-  mCurrentDecoder->GetBuffered(buffered);
+  media::TimeIntervals buffered = mCurrentDecoder->GetBuffered();
   MSE_DEBUG("mCurrentDecoder.size=%lld, start=%f end=%f",
             mCurrentDecoder->GetResource()->GetSize(),
-            buffered->GetStartTime(), buffered->GetEndTime());
-  return mCurrentDecoder->GetResource()->GetSize() && !buffered->Length();
+            buffered.GetStart(), buffered.GetEnd());
+  return mCurrentDecoder->GetResource()->GetSize() && !buffered.Length();
 }
 
 void
 TrackBuffer::EvictBefore(double aTime)
 {
   MOZ_ASSERT(NS_IsMainThread());
   ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
   for (uint32_t i = 0; i < mInitializedDecoders.Length(); ++i) {
@@ -502,33 +490,33 @@ TrackBuffer::EvictBefore(double aTime)
     if (endOffset > 0) {
       MSE_DEBUG("decoder=%u offset=%lld",
                 i, endOffset);
       mInitializedDecoders[i]->GetResource()->EvictBefore(endOffset);
     }
   }
 }
 
-double
-TrackBuffer::Buffered(dom::TimeRanges* aRanges)
+media::TimeIntervals
+TrackBuffer::Buffered()
 {
   ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
 
-  double highestEndTime = 0;
+  media::TimeIntervals buffered;
 
-  for (uint32_t i = 0; i < mInitializedDecoders.Length(); ++i) {
-    nsRefPtr<dom::TimeRanges> r = new dom::TimeRanges();
-    mInitializedDecoders[i]->GetBuffered(r);
-    if (r->Length() > 0) {
-      highestEndTime = std::max(highestEndTime, r->GetEndTime());
-      aRanges->Union(r, double(mParser->GetRoundingError()) / USECS_PER_S);
-    }
+  for (auto& decoder : mInitializedDecoders) {
+    buffered += decoder->GetBuffered();
+  }
+  // mParser may not be initialized yet, and will only be so if we have a
+  // buffered range.
+  if (buffered.Length()) {
+    buffered.SetFuzz(media::TimeUnit::FromMicroseconds(mParser->GetRoundingError()));
   }
 
-  return highestEndTime;
+  return buffered;
 }
 
 already_AddRefed<SourceBufferDecoder>
 TrackBuffer::NewDecoder(int64_t aTimestampOffset)
 {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_ASSERT(mParentDecoder);
 
@@ -893,21 +881,21 @@ TrackBuffer::IsWaitingOnCDMResource()
 {
   return mIsWaitingOnCDM;
 }
 
 bool
 TrackBuffer::ContainsTime(int64_t aTime, int64_t aTolerance)
 {
   ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
-  for (uint32_t i = 0; i < mInitializedDecoders.Length(); ++i) {
-    nsRefPtr<dom::TimeRanges> r = new dom::TimeRanges();
-    mInitializedDecoders[i]->GetBuffered(r);
-    if (r->Find(double(aTime) / USECS_PER_S,
-                double(aTolerance) / USECS_PER_S) != dom::TimeRanges::NoIndex) {
+  media::TimeUnit time{media::TimeUnit::FromMicroseconds(aTime)};
+  for (auto& decoder : mInitializedDecoders) {
+    media::TimeIntervals r = decoder->GetBuffered();
+    r.SetFuzz(media::TimeUnit::FromMicroseconds(aTolerance));
+    if (r.Contains(time)) {
       return true;
     }
   }
 
   return false;
 }
 
 void
@@ -1052,27 +1040,27 @@ TrackBuffer::RemoveDecoder(SourceBufferD
     MOZ_ASSERT(!mParentDecoder->IsActiveReader(aDecoder->GetReader()));
     mInitializedDecoders.RemoveElement(aDecoder);
     mDecoders.RemoveElement(aDecoder);
   }
   aDecoder->GetReader()->GetTaskQueue()->Dispatch(task);
 }
 
 bool
-TrackBuffer::RangeRemoval(media::Microseconds aStart,
-                          media::Microseconds aEnd)
+TrackBuffer::RangeRemoval(media::TimeUnit aStart,
+                          media::TimeUnit aEnd)
 {
   MOZ_ASSERT(NS_IsMainThread());
   ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
 
-  nsRefPtr<dom::TimeRanges> buffered = new dom::TimeRanges();
-  media::Microseconds bufferedEnd = media::Microseconds::FromSeconds(Buffered(buffered));
-  media::Microseconds bufferedStart = media::Microseconds::FromSeconds(buffered->GetStartTime());
+  media::TimeIntervals buffered = Buffered();
+  media::TimeUnit bufferedStart = buffered.GetStart();
+  media::TimeUnit bufferedEnd = buffered.GetEnd();
 
-  if (bufferedStart < media::Microseconds(0) || aStart > bufferedEnd || aEnd < bufferedStart) {
+  if (!buffered.Length() || aStart > bufferedEnd || aEnd < bufferedStart) {
     // Nothing to remove.
     return false;
   }
 
   if (aStart > bufferedStart && aEnd < bufferedEnd) {
     // TODO. We only handle trimming and removal from the start.
     NS_WARNING("RangeRemoval unsupported arguments. "
                "Can only handle trimming (trim left or trim right");
@@ -1080,42 +1068,41 @@ TrackBuffer::RangeRemoval(media::Microse
   }
 
   nsTArray<SourceBufferDecoder*> decoders;
   decoders.AppendElements(mInitializedDecoders);
 
   if (aStart <= bufferedStart && aEnd < bufferedEnd) {
     // Evict data from beginning.
     for (size_t i = 0; i < decoders.Length(); ++i) {
-      nsRefPtr<dom::TimeRanges> buffered = new dom::TimeRanges();
-      decoders[i]->GetBuffered(buffered);
-      if (media::Microseconds::FromSeconds(buffered->GetEndTime()) < aEnd) {
+      media::TimeIntervals buffered = decoders[i]->GetBuffered();
+      if (buffered.GetEnd() < aEnd) {
         // Can be fully removed.
         MSE_DEBUG("remove all bufferedEnd=%f size=%lld",
-                  buffered->GetEndTime(),
+                  buffered.GetEnd().ToSeconds(),
                   decoders[i]->GetResource()->GetSize());
         decoders[i]->GetResource()->EvictAll();
       } else {
         int64_t offset = decoders[i]->ConvertToByteOffset(aEnd.ToSeconds());
         MSE_DEBUG("removing some bufferedEnd=%f offset=%lld size=%lld",
-                  buffered->GetEndTime(), offset,
+                  buffered.GetEnd().ToSeconds(), offset,
                   decoders[i]->GetResource()->GetSize());
         if (offset > 0) {
           decoders[i]->GetResource()->EvictData(offset, offset);
         }
       }
     }
   } else {
     // Only trimming existing buffers.
     for (size_t i = 0; i < decoders.Length(); ++i) {
-      if (aStart <= media::Microseconds::FromSeconds(buffered->GetStartTime())) {
+      if (aStart <= buffered.GetStart()) {
         // It will be entirely emptied, can clear all data.
         decoders[i]->GetResource()->EvictAll();
       } else {
-        decoders[i]->Trim(aStart.mValue);
+        decoders[i]->Trim(aStart.ToMicroseconds());
       }
     }
   }
 
   RemoveEmptyDecoders(decoders);
 
   return true;
 }
--- a/dom/media/mediasource/TrackBuffer.h
+++ b/dom/media/mediasource/TrackBuffer.h
@@ -19,22 +19,16 @@
 #include "TimeUnits.h"
 
 namespace mozilla {
 
 class ContainerParser;
 class MediaSourceDecoder;
 class MediaLargeByteBuffer;
 
-namespace dom {
-
-class TimeRanges;
-
-} // namespace dom
-
 class TrackBuffer final {
 public:
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(TrackBuffer);
 
   TrackBuffer(MediaSourceDecoder* aParentDecoder, const nsACString& aType);
 
   nsRefPtr<ShutdownPromise> Shutdown();
 
@@ -51,20 +45,19 @@ public:
   // decoders buffered data after the eviction. Returns true if data was
   // evicted.
   bool EvictData(double aPlaybackTime, uint32_t aThreshold, double* aBufferStartTime);
 
   // Evicts data held in all the decoders SourceBufferResource from the start
   // of the buffer through to aTime.
   void EvictBefore(double aTime);
 
-  // Returns the highest end time of all of the buffered ranges in the
-  // decoders managed by this TrackBuffer, and returns the union of the
-  // decoders buffered ranges in aRanges. This may be called on any thread.
-  double Buffered(dom::TimeRanges* aRanges);
+  // Returns the union of the decoders buffered ranges in aRanges.
+  // This may be called on any thread.
+  media::TimeIntervals Buffered();
 
   // Mark the current decoder's resource as ended, clear mCurrentDecoder and
   // reset mLast{Start,End}Timestamp.  Main thread only.
   void DiscardCurrentDecoder();
   // Mark the current decoder's resource as ended.
   void EndCurrentDecoder();
 
   void Detach();
@@ -94,18 +87,18 @@ public:
   // TODO: Refactor to a cleaner interface between TrackBuffer and MediaSourceReader.
   const nsTArray<nsRefPtr<SourceBufferDecoder>>& Decoders();
 
   // Runs MSE range removal algorithm.
   // http://w3c.github.io/media-source/#sourcebuffer-coded-frame-removal
   // Implementation is only partial, we can only trim a buffer.
   // Returns true if data was evicted.
   // Times are in microseconds.
-  bool RangeRemoval(mozilla::media::Microseconds aStart,
-                    mozilla::media::Microseconds aEnd);
+  bool RangeRemoval(mozilla::media::TimeUnit aStart,
+                    mozilla::media::TimeUnit aEnd);
 
   // Abort any pending appendBuffer by rejecting any pending promises.
   void AbortAppendData();
 
   // Return the size used by all decoders managed by this TrackBuffer.
   int64_t GetSize();
 
   // Return true if we have a partial media segment being appended that is
--- a/dom/media/ogg/OggReader.cpp
+++ b/dom/media/ogg/OggReader.cpp
@@ -12,17 +12,16 @@
 #include "OggReader.h"
 #include "VideoUtils.h"
 #include "theora/theoradec.h"
 #include <algorithm>
 #include "opus/opus.h"
 extern "C" {
 #include "opus/opus_multistream.h"
 }
-#include "mozilla/dom/TimeRanges.h"
 #include "mozilla/TimeStamp.h"
 #include "VorbisUtils.h"
 #include "MediaMetadataManager.h"
 #include "nsISeekableStream.h"
 #include "gfx2DGlue.h"
 
 using namespace mozilla::gfx;
 
@@ -1848,39 +1847,41 @@ nsresult OggReader::SeekBisection(int64_
     NS_ASSERTION(endTime >= seekTarget, "End must be after seek target");
   }
 
   SEEK_LOG(PR_LOG_DEBUG, ("Seek complete in %d bisections.", hops));
 
   return NS_OK;
 }
 
-nsresult OggReader::GetBuffered(dom::TimeRanges* aBuffered)
+media::TimeIntervals OggReader::GetBuffered()
 {
   MOZ_ASSERT(mStartTime != -1, "Need to finish metadata decode first");
   {
     mozilla::ReentrantMonitorAutoEnter mon(mMonitor);
-    if (mIsChained)
-      return NS_ERROR_FAILURE;
+    if (mIsChained) {
+      return media::TimeIntervals::Invalid();
+    }
   }
 #ifdef OGG_ESTIMATE_BUFFERED
-  return MediaDecoderReader::GetBuffered(aBuffered);
+  return MediaDecoderReader::GetBuffered();
 #else
+  media::TimeIntervals buffered;
   // HasAudio and HasVideo are not used here as they take a lock and cause
   // a deadlock. Accessing mInfo doesn't require a lock - it doesn't change
   // after metadata is read.
   if (!mInfo.HasValidMedia()) {
     // No need to search through the file if there are no audio or video tracks
-    return NS_OK;
+    return buffered;
   }
 
   AutoPinned<MediaResource> resource(mDecoder->GetResource());
   nsTArray<MediaByteRange> ranges;
   nsresult res = resource->GetCachedRanges(ranges);
-  NS_ENSURE_SUCCESS(res, res);
+  NS_ENSURE_SUCCESS(res, media::TimeIntervals::Invalid());
 
   // Traverse across the buffered byte ranges, determining the time ranges
   // they contain. MediaResource::GetNextCachedData(offset) returns -1 when
   // offset is after the end of the media resource, or there's no more cached
   // data after the offset. This loop will run until we've checked every
   // buffered range in the media, in increasing order of offset.
   nsAutoOggSyncState sync;
   for (uint32_t index = 0; index < ranges.Length(); index++) {
@@ -1904,17 +1905,17 @@ nsresult OggReader::GetBuffered(dom::Tim
       PageSyncResult res = PageSync(resource,
                                     &sync.mState,
                                     true,
                                     startOffset,
                                     endOffset,
                                     &page,
                                     discard);
       if (res == PAGE_SYNC_ERROR) {
-        return NS_ERROR_FAILURE;
+        return media::TimeIntervals::Invalid();
       } else if (res == PAGE_SYNC_END_OF_RANGE) {
         // Hit the end of range without reading a page, give up trying to
         // find a start time for this buffered range, skip onto the next one.
         break;
       }
 
       int64_t granulepos = ogg_page_granulepos(&page);
       if (granulepos == -1) {
@@ -1944,32 +1945,33 @@ nsresult OggReader::GetBuffered(dom::Tim
         continue;
       }
       else {
         // Page is for a stream we don't know about (possibly a chained
         // ogg), return OK to abort the finding any further ranges. This
         // prevents us searching through the rest of the media when we
         // may not be able to extract timestamps from it.
         SetChained(true);
-        return NS_OK;
+        return buffered;
       }
     }
 
     if (startTime != -1) {
       // We were able to find a start time for that range, see if we can
       // find an end time.
       int64_t endTime = RangeEndTime(startOffset, endOffset, true);
-      if (endTime != -1) {
-        aBuffered->Add((startTime - mStartTime) / static_cast<double>(USECS_PER_S),
-                       (endTime - mStartTime) / static_cast<double>(USECS_PER_S));
+      if (endTime > startTime) {
+        buffered += media::TimeInterval(
+           media::TimeUnit::FromMicroseconds(startTime - mStartTime),
+           media::TimeUnit::FromMicroseconds(endTime - mStartTime));
       }
     }
   }
 
-  return NS_OK;
+  return buffered;
 #endif
 }
 
 VideoData* OggReader::FindStartTime(int64_t& aOutStartTime)
 {
   MOZ_ASSERT(OnTaskQueue() || mDecoder->OnStateMachineTaskQueue());
 
   // Extract the start times of the bitstreams in order to calculate
--- a/dom/media/ogg/OggReader.h
+++ b/dom/media/ogg/OggReader.h
@@ -14,22 +14,16 @@
 #include <vorbis/codec.h>
 #endif
 #include "MediaDecoderReader.h"
 #include "OggCodecState.h"
 #include "VideoUtils.h"
 #include "mozilla/Monitor.h"
 
 namespace mozilla {
-namespace dom {
-class TimeRanges;
-}
-}
-
-namespace mozilla {
 
 // Thread safe container to store the codec information and the serial for each
 // streams.
 class OggCodecStore
 {
   public:
     OggCodecStore();
     void Add(uint32_t serial, OggCodecState* codecState);
@@ -72,17 +66,17 @@ public:
   virtual bool HasVideo() override {
     return mTheoraState != 0 && mTheoraState->mActive;
   }
 
   virtual nsresult ReadMetadata(MediaInfo* aInfo,
                                 MetadataTags** aTags) override;
   virtual nsRefPtr<SeekPromise>
   Seek(int64_t aTime, int64_t aEndTime) override;
-  virtual nsresult GetBuffered(dom::TimeRanges* aBuffered) override;
+  virtual media::TimeIntervals GetBuffered() override;
 
   virtual bool IsMediaSeekable() override;
 
 private:
   // TODO: DEPRECATED. This uses synchronous decoding.
   // Stores the presentation time of the first frame we'd be able to play if
   // we started playback at the current position. Returns the first video
   // frame, if we have video.
--- a/dom/media/omx/MediaOmxReader.cpp
+++ b/dom/media/omx/MediaOmxReader.cpp
@@ -3,17 +3,16 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "MediaOmxReader.h"
 
 #include "MediaDecoderStateMachine.h"
 #include "mozilla/TimeStamp.h"
-#include "mozilla/dom/TimeRanges.h"
 #include "MediaResource.h"
 #include "VideoUtils.h"
 #include "MediaOmxDecoder.h"
 #include "AbstractMediaDecoder.h"
 #include "AudioChannelService.h"
 #include "OmxDecoder.h"
 #include "MPAPI.h"
 #include "gfx2DGlue.h"
--- a/dom/media/omx/RtspMediaCodecReader.h
+++ b/dom/media/omx/RtspMediaCodecReader.h
@@ -6,20 +6,16 @@
 
 #if !defined(RtspMediaCodecReader_h_)
 #define RtspMediaCodecReader_h_
 
 #include "MediaCodecReader.h"
 
 namespace mozilla {
 
-namespace dom {
-  class TimeRanges;
-}
-
 class AbstractMediaDecoder;
 class RtspMediaResource;
 
 /* RtspMediaCodecReader is a subclass of MediaCodecReader.
  * The major reason that RtspMediaCodecReader inherit from MediaCodecReader is
  * the same video/audio decoding logic we can reuse.
  */
 class RtspMediaCodecReader final : public MediaCodecReader
@@ -43,18 +39,18 @@ public:
   // 1. Because the Rtsp stream is a/v separated. The buffered data in a/v
   // tracks are not consistent with time stamp.
   // For example: audio buffer: 1~2s, video buffer: 1.5~2.5s
   // 2. Since the Rtsp is a realtime streaming, the buffer we made for
   // RtspMediaResource is quite small. The small buffer implies the time ranges
   // we returned are not useful for the MediaDecodeStateMachine. Unlike the
   // ChannelMediaResource, it has a "cache" that can store the whole streaming
   // data so the |GetBuffered| function can retrieve useful time ranges.
-  virtual nsresult GetBuffered(dom::TimeRanges* aBuffered) override {
-    return NS_ERROR_NOT_IMPLEMENTED;
+  virtual media::TimeIntervals GetBuffered() override {
+    return media::TimeIntervals::Invalid();
   }
 
   virtual void SetIdle() override;
 
   // Disptach a DecodeVideoFrameTask to decode video data.
   virtual nsRefPtr<VideoDataPromise>
   RequestVideoData(bool aSkipToNextKeyframe,
                    int64_t aTimeThreshold) override;
--- a/dom/media/omx/RtspOmxReader.h
+++ b/dom/media/omx/RtspOmxReader.h
@@ -55,18 +55,18 @@ public:
   // 1. Because the Rtsp stream is a/v separated. The buffered data in a/v
   // tracks are not consistent with time stamp.
   // For example: audio buffer: 1~2s, video buffer: 1.5~2.5s
   // 2. Since the Rtsp is a realtime streaming, the buffer we made for
   // RtspMediaResource is quite small. The small buffer implies the time ranges
   // we returned are not useful for the MediaDecodeStateMachine. Unlike the
   // ChannelMediaResource, it has a "cache" that can store the whole streaming
   // data so the |GetBuffered| function can retrieve useful time ranges.
-  virtual nsresult GetBuffered(mozilla::dom::TimeRanges* aBuffered) final override {
-    return NS_ERROR_NOT_IMPLEMENTED;
+  virtual media::TimeIntervals GetBuffered() final override {
+    return media::TimeIntervals::Invalid();
   }
 
   virtual void SetIdle() override;
 
   virtual nsresult ReadMetadata(MediaInfo *aInfo, MetadataTags **aTags)
     final override;
 
 private:
--- a/dom/media/raw/RawReader.cpp
+++ b/dom/media/raw/RawReader.cpp
@@ -279,12 +279,12 @@ nsresult RawReader::SeekInternal(int64_t
     if (mVideoQueue.PeekFront() && mVideoQueue.PeekFront()->GetEndTime() < aTime) {
       nsRefPtr<VideoData> releaseMe = mVideoQueue.PopFront();
     }
   }
 
   return NS_OK;
 }
 
-nsresult RawReader::GetBuffered(dom::TimeRanges* aBuffered)
+media::TimeIntervals RawReader::GetBuffered()
 {
-  return NS_OK;
+  return media::TimeIntervals();
 }
--- a/dom/media/raw/RawReader.h
+++ b/dom/media/raw/RawReader.h
@@ -37,17 +37,17 @@ public:
     return true;
   }
 
   virtual nsresult ReadMetadata(MediaInfo* aInfo,
                                 MetadataTags** aTags) override;
   virtual nsRefPtr<SeekPromise>
   Seek(int64_t aTime, int64_t aEndTime) override;
 
-  virtual nsresult GetBuffered(dom::TimeRanges* aBuffered) override;
+  virtual media::TimeIntervals GetBuffered() override;
 
   virtual bool IsMediaSeekable() override;
 
 private:
   bool ReadFromResource(MediaResource *aResource, uint8_t *aBuf, uint32_t aLength);
 
   nsresult SeekInternal(int64_t aTime);
 
--- a/dom/media/wave/WaveReader.cpp
+++ b/dom/media/wave/WaveReader.cpp
@@ -2,17 +2,16 @@
 /* vim:set ts=2 sw=2 sts=2 et cindent: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 #include "nsError.h"
 #include "AbstractMediaDecoder.h"
 #include "MediaResource.h"
 #include "WaveReader.h"
-#include "mozilla/dom/TimeRanges.h"
 #include "MediaDecoderStateMachine.h"
 #include "VideoUtils.h"
 #include "nsISeekableStream.h"
 
 #include <stdint.h>
 #include "mozilla/ArrayUtils.h"
 #include "mozilla/CheckedInt.h"
 #include "mozilla/Endian.h"
@@ -270,41 +269,39 @@ WaveReader::Seek(int64_t aTarget, int64_
   nsresult res = mDecoder->GetResource()->Seek(nsISeekableStream::NS_SEEK_SET, position);
   if (NS_FAILED(res)) {
     return SeekPromise::CreateAndReject(res, __func__);
   } else {
     return SeekPromise::CreateAndResolve(aTarget, __func__);
   }
 }
 
-static double RoundToUsecs(double aSeconds) {
-  return floor(aSeconds * USECS_PER_S) / USECS_PER_S;
-}
-
-nsresult WaveReader::GetBuffered(dom::TimeRanges* aBuffered)
+media::TimeIntervals WaveReader::GetBuffered()
 {
   if (!mInfo.HasAudio()) {
-    return NS_OK;
+    return media::TimeIntervals();
   }
+  media::TimeIntervals buffered;
   AutoPinned<MediaResource> resource(mDecoder->GetResource());
   int64_t startOffset = resource->GetNextCachedData(mWavePCMOffset);
   while (startOffset >= 0) {
     int64_t endOffset = resource->GetCachedDataEnd(startOffset);
     // Bytes [startOffset..endOffset] are cached.
     NS_ASSERTION(startOffset >= mWavePCMOffset, "Integer underflow in GetBuffered");
     NS_ASSERTION(endOffset >= mWavePCMOffset, "Integer underflow in GetBuffered");
 
     // We need to round the buffered ranges' times to microseconds so that they
     // have the same precision as the currentTime and duration attribute on
     // the media element.
-    aBuffered->Add(RoundToUsecs(BytesToTime(startOffset - mWavePCMOffset)),
-                   RoundToUsecs(BytesToTime(endOffset - mWavePCMOffset)));
+    buffered += media::TimeInterval(
+      media::TimeUnit::FromSeconds(BytesToTime(startOffset - mWavePCMOffset)),
+      media::TimeUnit::FromSeconds(BytesToTime(endOffset - mWavePCMOffset)));
     startOffset = resource->GetNextCachedData(endOffset);
   }
-  return NS_OK;
+  return buffered;
 }
 
 bool
 WaveReader::ReadAll(char* aBuf, int64_t aSize, int64_t* aBytesRead)
 {
   uint32_t got = 0;
   if (aBytesRead) {
     *aBytesRead = 0;
--- a/dom/media/wave/WaveReader.h
+++ b/dom/media/wave/WaveReader.h
@@ -5,22 +5,16 @@
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 #if !defined(WaveReader_h_)
 #define WaveReader_h_
 
 #include "MediaDecoderReader.h"
 #include "mozilla/dom/HTMLMediaElement.h"
 
 namespace mozilla {
-namespace dom {
-class TimeRanges;
-}
-}
-
-namespace mozilla {
 
 class WaveReader : public MediaDecoderReader
 {
 public:
   explicit WaveReader(AbstractMediaDecoder* aDecoder);
 
 protected:
   ~WaveReader();
@@ -41,17 +35,17 @@ public:
     return false;
   }
 
   virtual nsresult ReadMetadata(MediaInfo* aInfo,
                                 MetadataTags** aTags) override;
   virtual nsRefPtr<SeekPromise>
   Seek(int64_t aTime, int64_t aEndTime) override;
 
-  virtual nsresult GetBuffered(dom::TimeRanges* aBuffered) override;
+  virtual media::TimeIntervals GetBuffered() override;
 
   virtual bool IsMediaSeekable() override;
 
 private:
   bool ReadAll(char* aBuf, int64_t aSize, int64_t* aBytesRead = nullptr);
   bool LoadRIFFChunk();
   bool GetNextChunk(uint32_t* aChunk, uint32_t* aChunkSize);
   bool LoadFormatChunk(uint32_t aChunkSize);
--- a/dom/media/webm/SoftwareWebMVideoDecoder.cpp
+++ b/dom/media/webm/SoftwareWebMVideoDecoder.cpp
@@ -3,19 +3,19 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 #include "SoftwareWebMVideoDecoder.h"
 #include "AbstractMediaDecoder.h"
 #include "gfx2DGlue.h"
 #include "MediaDecoderStateMachine.h"
 #include "MediaResource.h"
-#include "mozilla/dom/TimeRanges.h"
 #include "nsError.h"
 #include "OggReader.h"
+#include "TimeUnits.h"
 #include "VorbisUtils.h"
 #include "WebMBufferedParser.h"
 
 #include <algorithm>
 
 #define VPX_DONT_DEFINE_STDINT_TYPES
 #include "vpx/vp8dx.h"
 #include "vpx/vpx_decoder.h"
--- a/dom/media/webm/WebMBufferedParser.cpp
+++ b/dom/media/webm/WebMBufferedParser.cpp
@@ -1,17 +1,16 @@
 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
 /* vim:set ts=2 sw=2 sts=2 et cindent: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "nsAlgorithm.h"
 #include "WebMBufferedParser.h"
-#include "mozilla/dom/TimeRanges.h"
 #include "nsThreadUtils.h"
 #include <algorithm>
 
 namespace mozilla {
 
 static uint32_t
 VIntLength(unsigned char aFirstByte, uint32_t* aMask)
 {
--- a/dom/media/webm/WebMReader.cpp
+++ b/dom/media/webm/WebMReader.cpp
@@ -5,17 +5,16 @@
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 #include "nsError.h"
 #include "MediaDecoderStateMachine.h"
 #include "AbstractMediaDecoder.h"
 #include "MediaResource.h"
 #include "SoftwareWebMVideoDecoder.h"
 #include "WebMReader.h"
 #include "WebMBufferedParser.h"
-#include "mozilla/dom/TimeRanges.h"
 #include "VorbisUtils.h"
 #include "gfx2DGlue.h"
 #include "Layers.h"
 #include "mozilla/Preferences.h"
 #include "SharedThreadPool.h"
 
 #include <algorithm>
 
@@ -1102,39 +1101,38 @@ nsresult WebMReader::SeekInternal(int64_
                        this, offset, r));
     if (r != 0) {
       return NS_ERROR_FAILURE;
     }
   }
   return NS_OK;
 }
 
-nsresult WebMReader::GetBuffered(dom::TimeRanges* aBuffered)
+media::TimeIntervals WebMReader::GetBuffered()
 {
   MOZ_ASSERT(mStartTime != -1, "Need to finish metadata decode first");
-  if (aBuffered->Length() != 0) {
-    return NS_ERROR_FAILURE;
-  }
-
   AutoPinned<MediaResource> resource(mDecoder->GetResource());
 
+  media::TimeIntervals buffered;
   // Special case completely cached files.  This also handles local files.
   if (mContext && resource->IsDataCachedToEndOfResource(0)) {
     uint64_t duration = 0;
     if (nestegg_duration(mContext, &duration) == 0) {
-      aBuffered->Add(0, duration / NS_PER_S);
-      return NS_OK;
+      buffered +=
+        media::TimeInterval(media::TimeUnit::FromSeconds(0),
+                            media::TimeUnit::FromSeconds(duration / NS_PER_S));
+      return buffered;
     }
   }
 
   // Either we the file is not fully cached, or we couldn't find a duration in
   // the WebM bitstream.
   nsTArray<MediaByteRange> ranges;
   nsresult res = resource->GetCachedRanges(ranges);
-  NS_ENSURE_SUCCESS(res, res);
+  NS_ENSURE_SUCCESS(res, media::TimeIntervals::Invalid());
 
   for (uint32_t index = 0; index < ranges.Length(); index++) {
     uint64_t start, end;
     bool rv = mBufferedState->CalculateBufferedForRange(ranges[index].mStart,
                                                         ranges[index].mEnd,
                                                         &start, &end);
     if (rv) {
       int64_t startOffset = mStartTime * NS_PER_USEC;
@@ -1149,22 +1147,22 @@ nsresult WebMReader::GetBuffered(dom::Ti
       // is the file's duration.
       if (mContext &&
           resource->IsDataCachedToEndOfResource(ranges[index].mStart)) {
         uint64_t duration = 0;
         if (nestegg_duration(mContext, &duration) == 0) {
           endTime = duration / NS_PER_S;
         }
       }
-
-      aBuffered->Add(startTime, endTime);
+      buffered += media::TimeInterval(media::TimeUnit::FromSeconds(startTime),
+                                      media::TimeUnit::FromSeconds(endTime));
     }
   }
 
-  return NS_OK;
+  return buffered;
 }
 
 void WebMReader::NotifyDataArrived(const char* aBuffer, uint32_t aLength,
                                    int64_t aOffset)
 {
   mBufferedState->NotifyDataArrived(aBuffer, aLength, aOffset);
 }
 
--- a/dom/media/webm/WebMReader.h
+++ b/dom/media/webm/WebMReader.h
@@ -167,17 +167,17 @@ public:
     return mHasVideo;
   }
 
   virtual nsresult ReadMetadata(MediaInfo* aInfo,
                                 MetadataTags** aTags) override;
   virtual nsRefPtr<SeekPromise>
   Seek(int64_t aTime, int64_t aEndTime) override;
 
-  virtual nsresult GetBuffered(dom::TimeRanges* aBuffered) override;
+  virtual media::TimeIntervals GetBuffered() override;
   virtual void NotifyDataArrived(const char* aBuffer, uint32_t aLength,
                                  int64_t aOffset) override;
   virtual int64_t GetEvictionOffset(double aTime) override;
 
   virtual bool IsMediaSeekable() override;
 
   // Value passed to NextPacket to determine if we are reading a video or an
   // audio packet.
--- a/dom/media/wmf/WMFReader.cpp
+++ b/dom/media/wmf/WMFReader.cpp
@@ -5,17 +5,16 @@
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "WMFReader.h"
 #include "WMFDecoder.h"
 #include "WMFUtils.h"
 #include "WMFByteStream.h"
 #include "WMFSourceReaderCallback.h"
 #include "mozilla/ArrayUtils.h"
-#include "mozilla/dom/TimeRanges.h"
 #include "mozilla/dom/HTMLMediaElement.h"
 #include "mozilla/Preferences.h"
 #include "DXVA2Manager.h"
 #include "ImageContainer.h"
 #include "Layers.h"
 #include "mozilla/layers/LayersTypes.h"
 #include "gfxWindowsPlatform.h"