Bug 1059058 - Introduce abstraction to manager mapping between SourceBuffers and SourceBufferDecoders for the MediaSourceReader. r=cajbir
authorMatthew Gregan <kinetik@flim.org>
Thu, 04 Sep 2014 13:57:06 +1200
changeset 203497 8a4df73f1ab8c5e927a8f139953bda69f762ef28
parent 203496 c00ae6dd85ec681b380798d4eed472cc94f93476
child 203498 4f30572ae7d72c4bbf2a9d81596872e751f36b26
push id27428
push usercbook@mozilla.com
push dateThu, 04 Sep 2014 13:00:04 +0000
treeherdermozilla-central@7bfd030e8fc8 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerscajbir
bugs1059058
milestone35.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1059058 - Introduce abstraction to manager mapping between SourceBuffers and SourceBufferDecoders for the MediaSourceReader. r=cajbir
content/media/mediasource/MediaSourceDecoder.cpp
content/media/mediasource/MediaSourceDecoder.h
content/media/mediasource/MediaSourceReader.cpp
content/media/mediasource/MediaSourceReader.h
content/media/mediasource/SourceBuffer.cpp
content/media/mediasource/SourceBuffer.h
content/media/mediasource/SourceBufferDecoder.cpp
content/media/mediasource/SourceBufferDecoder.h
content/media/mediasource/TrackBuffer.cpp
content/media/mediasource/TrackBuffer.h
content/media/mediasource/moz.build
content/media/mediasource/test/mochitest.ini
content/media/mediasource/test/test_SplitAppend.html
content/media/mediasource/test/test_SplitAppendDelay.html
--- a/content/media/mediasource/MediaSourceDecoder.cpp
+++ b/content/media/mediasource/MediaSourceDecoder.cpp
@@ -128,16 +128,37 @@ MediaSourceDecoder::DetachMediaSource()
 already_AddRefed<SourceBufferDecoder>
 MediaSourceDecoder::CreateSubDecoder(const nsACString& aType)
 {
   MOZ_ASSERT(mReader);
   return mReader->CreateSubDecoder(aType);
 }
 
 void
+MediaSourceDecoder::AddTrackBuffer(TrackBuffer* aTrackBuffer)
+{
+  MOZ_ASSERT(mReader);
+  mReader->AddTrackBuffer(aTrackBuffer);
+}
+
+void
+MediaSourceDecoder::RemoveTrackBuffer(TrackBuffer* aTrackBuffer)
+{
+  MOZ_ASSERT(mReader);
+  mReader->RemoveTrackBuffer(aTrackBuffer);
+}
+
+void
+MediaSourceDecoder::OnTrackBufferConfigured(TrackBuffer* aTrackBuffer, const MediaInfo& aInfo)
+{
+  MOZ_ASSERT(mReader);
+  mReader->OnTrackBufferConfigured(aTrackBuffer, aInfo);
+}
+
+void
 MediaSourceDecoder::Ended()
 {
   ReentrantMonitorAutoEnter mon(GetReentrantMonitor());
   mReader->Ended();
   mon.NotifyAll();
 }
 
 void
--- a/content/media/mediasource/MediaSourceDecoder.h
+++ b/content/media/mediasource/MediaSourceDecoder.h
@@ -15,16 +15,17 @@
 class nsIStreamListener;
 
 namespace mozilla {
 
 class MediaResource;
 class MediaDecoderStateMachine;
 class MediaSourceReader;
 class SourceBufferDecoder;
+class TrackBuffer;
 
 namespace dom {
 
 class HTMLMediaElement;
 class MediaSource;
 
 } // namespace dom
 
@@ -41,16 +42,19 @@ public:
   virtual void Shutdown() MOZ_OVERRIDE;
 
   static already_AddRefed<MediaResource> CreateResource();
 
   void AttachMediaSource(dom::MediaSource* aMediaSource);
   void DetachMediaSource();
 
   already_AddRefed<SourceBufferDecoder> CreateSubDecoder(const nsACString& aType);
+  void AddTrackBuffer(TrackBuffer* aTrackBuffer);
+  void RemoveTrackBuffer(TrackBuffer* aTrackBuffer);
+  void OnTrackBufferConfigured(TrackBuffer* aTrackBuffer, const MediaInfo& aInfo);
 
   void Ended();
 
   void SetMediaSourceDuration(double aDuration);
 
   // Provide a mechanism for MediaSourceReader to block waiting on data from a SourceBuffer.
   void WaitForData();
 
--- a/content/media/mediasource/MediaSourceReader.cpp
+++ b/content/media/mediasource/MediaSourceReader.cpp
@@ -9,16 +9,17 @@
 #include "mozilla/dom/TimeRanges.h"
 #include "DecoderTraits.h"
 #include "MediaDataDecodedListener.h"
 #include "MediaDecoderOwner.h"
 #include "MediaSource.h"
 #include "MediaSourceDecoder.h"
 #include "MediaSourceUtils.h"
 #include "SourceBufferDecoder.h"
+#include "TrackBuffer.h"
 
 #ifdef MOZ_FMP4
 #include "MP4Decoder.h"
 #include "MP4Reader.h"
 #endif
 
 #ifdef PR_LOGGING
 extern PRLogModuleInfo* GetMediaSourceLog();
@@ -32,47 +33,59 @@ extern PRLogModuleInfo* GetMediaSourceAP
 #define MSE_DEBUGV(...)
 #define MSE_API(...)
 #endif
 
 namespace mozilla {
 
 MediaSourceReader::MediaSourceReader(MediaSourceDecoder* aDecoder)
   : MediaDecoderReader(aDecoder)
+  , mLastAudioTime(-1)
+  , mLastVideoTime(-1)
   , mTimeThreshold(-1)
   , mDropAudioBeforeThreshold(false)
   , mDropVideoBeforeThreshold(false)
   , mEnded(false)
   , mAudioIsSeeking(false)
   , mVideoIsSeeking(false)
 {
 }
 
 bool
 MediaSourceReader::IsWaitingMediaResources()
 {
-  return mDecoders.IsEmpty() && mPendingDecoders.IsEmpty();
+  ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
+  for (uint32_t i = 0; i < mTrackBuffers.Length(); ++i) {
+    if (!mTrackBuffers[i]->IsReady()) {
+      return true;
+    }
+  }
+  return mTrackBuffers.IsEmpty();
 }
 
 void
 MediaSourceReader::RequestAudioData()
 {
+  MSE_DEBUGV("MediaSourceReader(%p)::RequestAudioData", this);
   if (!mAudioReader) {
     MSE_DEBUG("MediaSourceReader(%p)::RequestAudioData called with no audio reader", this);
-    MOZ_ASSERT(mPendingDecoders.IsEmpty());
     GetCallback()->OnDecodeError();
     return;
   }
-  SwitchReaders(SWITCH_OPTIONAL);
+  if (SwitchAudioReader(double(mLastAudioTime) / USECS_PER_S)) {
+    MSE_DEBUGV("MediaSourceReader(%p)::RequestAudioData switching audio reader", this);
+  }
   mAudioReader->RequestAudioData();
 }
 
 void
 MediaSourceReader::OnAudioDecoded(AudioData* aSample)
 {
+  MSE_DEBUGV("MediaSourceReader(%p)::OnAudioDecoded mTime=%lld mDuration=%lld d=%d",
+             this, aSample->mTime, aSample->mDuration, aSample->mDiscontinuity);
   if (mDropAudioBeforeThreshold) {
     if (aSample->mTime < mTimeThreshold) {
       MSE_DEBUG("MediaSourceReader(%p)::OnAudioDecoded mTime=%lld < mTimeThreshold=%lld",
                 this, aSample->mTime, mTimeThreshold);
       delete aSample;
       mAudioReader->RequestAudioData();
       return;
     }
@@ -81,266 +94,211 @@ MediaSourceReader::OnAudioDecoded(AudioD
 
   // If we are seeking we need to make sure the first sample decoded after
   // that seek has the mDiscontinuity field set to ensure the media decoder
   // state machine picks up that the seek is complete.
   if (mAudioIsSeeking) {
     mAudioIsSeeking = false;
     aSample->mDiscontinuity = true;
   }
+  mLastAudioTime = aSample->mTime + aSample->mDuration;
   GetCallback()->OnAudioDecoded(aSample);
 }
 
 void
 MediaSourceReader::OnAudioEOS()
 {
-  MSE_DEBUG("MediaSourceReader(%p)::OnAudioEOS reader=%p (readers=%u)",
-            this, mAudioReader.get(), mDecoders.Length());
-  if (SwitchReaders(SWITCH_FORCED)) {
+  MSE_DEBUG("MediaSourceReader(%p)::OnAudioEOS reader=%p (decoders=%u)",
+            this, mAudioReader.get(), mAudioTrack->Decoders().Length());
+  if (SwitchAudioReader(double(mLastAudioTime) / USECS_PER_S)) {
     // Success! Resume decoding with next audio decoder.
     RequestAudioData();
   } else if (IsEnded()) {
     // End of stream.
-    MSE_DEBUG("MediaSourceReader(%p)::OnAudioEOS reader=%p EOS (readers=%u)",
-              this, mAudioReader.get(), mDecoders.Length());
+    MSE_DEBUG("MediaSourceReader(%p)::OnAudioEOS reader=%p EOS (decoders=%u)",
+              this, mAudioReader.get(), mAudioTrack->Decoders().Length());
     GetCallback()->OnAudioEOS();
   }
 }
 
 void
 MediaSourceReader::RequestVideoData(bool aSkipToNextKeyframe, int64_t aTimeThreshold)
 {
+  MSE_DEBUGV("MediaSourceReader(%p)::RequestVideoData(%d, %lld)",
+             this, aSkipToNextKeyframe, aTimeThreshold);
   if (!mVideoReader) {
     MSE_DEBUG("MediaSourceReader(%p)::RequestVideoData called with no video reader", this);
-    MOZ_ASSERT(mPendingDecoders.IsEmpty());
     GetCallback()->OnDecodeError();
     return;
   }
-  mTimeThreshold = aTimeThreshold;
-  SwitchReaders(SWITCH_OPTIONAL);
+  if (aSkipToNextKeyframe) {
+    mTimeThreshold = aTimeThreshold;
+    mDropAudioBeforeThreshold = true;
+    mDropVideoBeforeThreshold = true;
+  }
+  if (SwitchVideoReader(double(mLastVideoTime) / USECS_PER_S)) {
+    MSE_DEBUGV("MediaSourceReader(%p)::RequestVideoData switching video reader", this);
+  }
   mVideoReader->RequestVideoData(aSkipToNextKeyframe, aTimeThreshold);
 }
 
 void
 MediaSourceReader::OnVideoDecoded(VideoData* aSample)
 {
+  MSE_DEBUGV("MediaSourceReader(%p)::OnVideoDecoded mTime=%lld mDuration=%lld d=%d",
+             this, aSample->mTime, aSample->mDuration, aSample->mDiscontinuity);
   if (mDropVideoBeforeThreshold) {
     if (aSample->mTime < mTimeThreshold) {
       MSE_DEBUG("MediaSourceReader(%p)::OnVideoDecoded mTime=%lld < mTimeThreshold=%lld",
                 this, aSample->mTime, mTimeThreshold);
       delete aSample;
-      mVideoReader->RequestVideoData(false, mTimeThreshold);
+      mVideoReader->RequestVideoData(false, 0);
       return;
     }
     mDropVideoBeforeThreshold = false;
   }
 
   // If we are seeking we need to make sure the first sample decoded after
   // that seek has the mDiscontinuity field set to ensure the media decoder
   // state machine picks up that the seek is complete.
   if (mVideoIsSeeking) {
     mVideoIsSeeking = false;
     aSample->mDiscontinuity = true;
   }
-
+  mLastVideoTime = aSample->mTime + aSample->mDuration;
   GetCallback()->OnVideoDecoded(aSample);
 }
 
 void
 MediaSourceReader::OnVideoEOS()
 {
   // End of stream. See if we can switch to another video decoder.
-  MSE_DEBUG("MediaSourceReader(%p)::OnVideoEOS reader=%p (readers=%u)",
-            this, mVideoReader.get(), mDecoders.Length());
-  if (SwitchReaders(SWITCH_FORCED)) {
+  MSE_DEBUG("MediaSourceReader(%p)::OnVideoEOS reader=%p (decoders=%u)",
+            this, mVideoReader.get(), mVideoTrack->Decoders().Length());
+  if (SwitchVideoReader(double(mLastVideoTime) / USECS_PER_S)) {
     // Success! Resume decoding with next video decoder.
-    RequestVideoData(false, mTimeThreshold);
+    RequestVideoData(false, 0);
   } else if (IsEnded()) {
     // End of stream.
-    MSE_DEBUG("MediaSourceReader(%p)::OnVideoEOS reader=%p EOS (readers=%u)",
-              this, mVideoReader.get(), mDecoders.Length());
+    MSE_DEBUG("MediaSourceReader(%p)::OnVideoEOS reader=%p EOS (decoders=%u)",
+              this, mVideoReader.get(), mVideoTrack->Decoders().Length());
     GetCallback()->OnVideoEOS();
   }
 }
 
 void
 MediaSourceReader::OnDecodeError()
 {
+  MSE_DEBUG("MediaSourceReader(%p)::OnDecodeError", this);
   GetCallback()->OnDecodeError();
 }
 
 void
 MediaSourceReader::Shutdown()
 {
   MediaDecoderReader::Shutdown();
-  for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
-    mDecoders[i]->GetReader()->Shutdown();
+  for (uint32_t i = 0; i < mTrackBuffers.Length(); ++i) {
+    mTrackBuffers[i]->Shutdown();
   }
+  mTrackBuffers.Clear();
+  mAudioTrack = nullptr;
+  mAudioReader = nullptr;
+  mVideoTrack = nullptr;
+  mVideoReader = nullptr;
 }
 
 void
 MediaSourceReader::BreakCycles()
 {
   MediaDecoderReader::BreakCycles();
-  for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
-    mDecoders[i]->GetReader()->BreakCycles();
+    for (uint32_t i = 0; i < mTrackBuffers.Length(); ++i) {
+    mTrackBuffers[i]->BreakCycles();
   }
+  mTrackBuffers.Clear();
+  mAudioTrack = nullptr;
+  mAudioReader = nullptr;
+  mVideoTrack = nullptr;
+  mVideoReader = nullptr;
 }
 
 bool
-MediaSourceReader::SwitchAudioReader(MediaDecoderReader* aTargetReader)
+MediaSourceReader::SwitchAudioReader(double aTarget)
 {
-  if (aTargetReader == mAudioReader) {
+  ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
+  // XXX: Can't handle adding an audio track after ReadMetadata yet.
+  if (!mAudioTrack) {
     return false;
   }
-  if (mAudioReader) {
-    AudioInfo targetInfo = aTargetReader->GetMediaInfo().mAudio;
+  auto& decoders = mAudioTrack->Decoders();
+  for (uint32_t i = 0; i < decoders.Length(); ++i) {
+    nsRefPtr<dom::TimeRanges> ranges = new dom::TimeRanges();
+    decoders[i]->GetBuffered(ranges);
+
+    MediaDecoderReader* newReader = decoders[i]->GetReader();
+    MSE_DEBUGV("MediaDecoderReader(%p)::SwitchAudioReader(%f) audioReader=%p reader=%p ranges=%s",
+               this, aTarget, mAudioReader.get(), newReader, DumpTimeRanges(ranges).get());
+
+    AudioInfo targetInfo = newReader->GetMediaInfo().mAudio;
     AudioInfo currentInfo = mAudioReader->GetMediaInfo().mAudio;
 
     // TODO: We can't handle switching audio formats yet.
     if (currentInfo.mRate != targetInfo.mRate ||
         currentInfo.mChannels != targetInfo.mChannels) {
-      return false;
-    }
-
-    mAudioReader->SetIdle();
-  }
-  mAudioReader = aTargetReader;
-  mDropAudioBeforeThreshold = true;
-  MSE_DEBUG("MediaDecoderReader(%p)::SwitchReaders(%p) switching audio reader",
-            this, mAudioReader.get());
-  return true;
-}
-
-bool
-MediaSourceReader::SwitchVideoReader(MediaDecoderReader* aTargetReader)
-{
-  if (aTargetReader == mVideoReader) {
-    return false;
-  }
-  if (mVideoReader) {
-    mVideoReader->SetIdle();
-  }
-  mVideoReader = aTargetReader;
-  mDropVideoBeforeThreshold = true;
-  MSE_DEBUG("MediaDecoderReader(%p)::SwitchVideoReader(%p) switching video reader",
-            this, mVideoReader.get());
-  return true;
-}
-
-bool
-MediaSourceReader::SwitchReaders(SwitchType aType)
-{
-  InitializePendingDecoders();
-
-  // This monitor must be held after the call to InitializePendingDecoders
-  // as that method also obtains the lock, and then attempts to exit it
-  // to call ReadMetadata on the readers. If we hold it before the call then
-  // it remains held during the ReadMetadata call causing a deadlock.
-  ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
-
-  bool didSwitch = false;
-  double decodeTarget = double(mTimeThreshold) / USECS_PER_S;
-
-  for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
-    SourceBufferDecoder* decoder = mDecoders[i];
-    const MediaInfo& info = decoder->GetReader()->GetMediaInfo();
-
-    nsRefPtr<dom::TimeRanges> ranges = new dom::TimeRanges();
-    decoder->GetBuffered(ranges);
-
-    MSE_DEBUGV("MediaDecoderReader(%p)::SwitchReaders(%d) decoder=%u (%p) discarded=%d"
-               " reader=%p audioReader=%p videoReader=%p"
-               " hasAudio=%d hasVideo=%d decodeTarget=%f ranges=%s",
-               this, aType, i, decoder, decoder->IsDiscarded(),
-               decoder->GetReader(), mAudioReader.get(), mVideoReader.get(),
-               info.HasAudio(), info.HasVideo(), decodeTarget,
-               DumpTimeRanges(ranges).get());
-
-    if (decoder->IsDiscarded()) {
       continue;
     }
 
-    if (aType == SWITCH_FORCED || ranges->Find(decodeTarget) != dom::TimeRanges::NoIndex) {
-      if (info.HasAudio()) {
-        didSwitch |= SwitchAudioReader(mDecoders[i]->GetReader());
+    if (ranges->Find(aTarget) != dom::TimeRanges::NoIndex) {
+      if (newReader->AudioQueue().AtEndOfStream()) {
+        continue;
       }
-      if (info.HasVideo()) {
-        didSwitch |= SwitchVideoReader(mDecoders[i]->GetReader());
+      if (mAudioReader) {
+        mAudioReader->SetIdle();
       }
+      mAudioReader = newReader;
+      MSE_DEBUG("MediaDecoderReader(%p)::SwitchAudioReader(%f) switching to audio reader %p",
+                this, aTarget, mAudioReader.get());
+      return true;
     }
   }
 
-  return didSwitch;
+  return false;
 }
 
-class ReleaseDecodersTask : public nsRunnable {
-public:
-  explicit ReleaseDecodersTask(nsTArray<nsRefPtr<SourceBufferDecoder>>& aDecoders)
-  {
-    mDecoders.SwapElements(aDecoders);
-  }
-
-  NS_IMETHOD Run() MOZ_OVERRIDE MOZ_FINAL {
-    mDecoders.Clear();
-    return NS_OK;
-  }
-
-private:
-  nsTArray<nsRefPtr<SourceBufferDecoder>> mDecoders;
-};
-
-void
-MediaSourceReader::InitializePendingDecoders()
+bool
+MediaSourceReader::SwitchVideoReader(double aTarget)
 {
   ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
-  for (uint32_t i = 0; i < mPendingDecoders.Length(); ++i) {
-    nsRefPtr<SourceBufferDecoder> decoder = mPendingDecoders[i];
-    MediaDecoderReader* reader = decoder->GetReader();
-    MSE_DEBUG("MediaSourceReader(%p): Initializing subdecoder %p reader %p",
-              this, decoder.get(), reader);
-
-    MediaInfo mi;
-    nsAutoPtr<MetadataTags> tags; // TODO: Handle metadata.
-    nsresult rv;
-    {
-      ReentrantMonitorAutoExit exitMon(mDecoder->GetReentrantMonitor());
-      rv = reader->ReadMetadata(&mi, getter_Transfers(tags));
-    }
-    reader->SetIdle();
-    if (NS_FAILED(rv)) {
-      // XXX: Need to signal error back to owning SourceBuffer.
-      MSE_DEBUG("MediaSourceReader(%p): Reader %p failed to initialize rv=%x", this, reader, rv);
-      continue;
-    }
+  // XXX: Can't handle adding a video track after ReadMetadata yet.
+  if (!mVideoTrack) {
+    return false;
+  }
+  auto& decoders = mVideoTrack->Decoders();
+  for (uint32_t i = 0; i < decoders.Length(); ++i) {
+    nsRefPtr<dom::TimeRanges> ranges = new dom::TimeRanges();
+    decoders[i]->GetBuffered(ranges);
 
-    bool active = false;
-    if (mi.HasVideo() || mi.HasAudio()) {
-      MSE_DEBUG("MediaSourceReader(%p): Reader %p has video=%d audio=%d",
-                this, reader, mi.HasVideo(), mi.HasAudio());
-      if (mi.HasVideo()) {
-        MSE_DEBUG("MediaSourceReader(%p): Reader %p video resolution=%dx%d",
-                  this, reader, mi.mVideo.mDisplay.width, mi.mVideo.mDisplay.height);
+    MediaDecoderReader* newReader = decoders[i]->GetReader();
+    MSE_DEBUGV("MediaDecoderReader(%p)::SwitchVideoReader(%f) videoReader=%p reader=%p ranges=%s",
+               this, aTarget, mVideoReader.get(), newReader, DumpTimeRanges(ranges).get());
+
+    if (ranges->Find(aTarget) != dom::TimeRanges::NoIndex) {
+      if (newReader->VideoQueue().AtEndOfStream()) {
+        continue;
       }
-      if (mi.HasAudio()) {
-        MSE_DEBUG("MediaSourceReader(%p): Reader %p audio sampleRate=%d channels=%d",
-                  this, reader, mi.mAudio.mRate, mi.mAudio.mChannels);
+      if (mVideoReader) {
+        mVideoReader->SetIdle();
       }
-      active = true;
-    }
-
-    if (active) {
-      mDecoders.AppendElement(decoder);
-    } else {
-      MSE_DEBUG("MediaSourceReader(%p): Reader %p not activated", this, reader);
+      mVideoReader = newReader;
+      MSE_DEBUG("MediaDecoderReader(%p)::SwitchVideoReader(%f) switching to video reader %p",
+                this, aTarget, mVideoReader.get());
+      return true;
     }
   }
-  NS_DispatchToMainThread(new ReleaseDecodersTask(mPendingDecoders));
-  MOZ_ASSERT(mPendingDecoders.IsEmpty());
-  mDecoder->NotifyWaitingForResourcesStatusChanged();
+
+  return false;
 }
 
 MediaDecoderReader*
 CreateReaderForType(const nsACString& aType, AbstractMediaDecoder* aDecoder)
 {
 #ifdef MOZ_FMP4
   // The MP4Reader that supports fragmented MP4 and uses
   // PlatformDecoderModules is hidden behind prefs for regular video
@@ -371,32 +329,61 @@ MediaSourceReader::CreateSubDecoder(cons
   // Set a callback on the subreader that forwards calls to this reader.
   // This reader will then forward them onto the state machine via this
   // reader's callback.
   RefPtr<MediaDataDecodedListener<MediaSourceReader>> callback =
     new MediaDataDecodedListener<MediaSourceReader>(this, GetTaskQueue());
   reader->SetCallback(callback);
   reader->SetTaskQueue(GetTaskQueue());
   reader->Init(nullptr);
-  ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
+
   MSE_DEBUG("MediaSourceReader(%p)::CreateSubDecoder subdecoder %p subreader %p",
             this, decoder.get(), reader.get());
   decoder->SetReader(reader);
-  mPendingDecoders.AppendElement(decoder);
-  RefPtr<nsIRunnable> task =
-    NS_NewRunnableMethod(this, &MediaSourceReader::InitializePendingDecoders);
-  if (NS_FAILED(GetTaskQueue()->Dispatch(task))) {
-    MSE_DEBUG("MediaSourceReader(%p): Failed to enqueue decoder initialization task", this);
-    return nullptr;
-  }
-  mDecoder->NotifyWaitingForResourcesStatusChanged();
   return decoder.forget();
 }
 
-namespace {
+void
+MediaSourceReader::AddTrackBuffer(TrackBuffer* aTrackBuffer)
+{
+  ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
+  MSE_DEBUG("MediaSourceReader(%p)::AddTrackBuffer %p", this, aTrackBuffer);
+  mTrackBuffers.AppendElement(aTrackBuffer);
+}
+
+void
+MediaSourceReader::RemoveTrackBuffer(TrackBuffer* aTrackBuffer)
+{
+  ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
+  MSE_DEBUG("MediaSourceReader(%p)::RemoveTrackBuffer %p", this, aTrackBuffer);
+  mTrackBuffers.RemoveElement(aTrackBuffer);
+  if (mAudioTrack == aTrackBuffer) {
+    mAudioTrack = nullptr;
+  }
+  if (mVideoTrack == aTrackBuffer) {
+    mVideoTrack = nullptr;
+  }
+}
+
+void
+MediaSourceReader::OnTrackBufferConfigured(TrackBuffer* aTrackBuffer, const MediaInfo& aInfo)
+{
+  ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
+  MOZ_ASSERT(mTrackBuffers.Contains(aTrackBuffer));
+  if (aInfo.HasAudio() && !mAudioTrack) {
+    MSE_DEBUG("MediaSourceReader(%p)::OnTrackBufferConfigured %p audio", this, aTrackBuffer);
+    mAudioTrack = aTrackBuffer;
+  }
+  if (aInfo.HasVideo() && !mVideoTrack) {
+    MSE_DEBUG("MediaSourceReader(%p)::OnTrackBufferConfigured %p video", this, aTrackBuffer);
+    mVideoTrack = aTrackBuffer;
+  }
+  mDecoder->NotifyWaitingForResourcesStatusChanged();
+}
+
 class ChangeToHaveMetadata : public nsRunnable {
 public:
   explicit ChangeToHaveMetadata(AbstractMediaDecoder* aDecoder) :
     mDecoder(aDecoder)
   {
   }
 
   NS_IMETHOD Run() MOZ_OVERRIDE MOZ_FINAL {
@@ -405,117 +392,126 @@ public:
       owner->UpdateReadyStateForData(MediaDecoderOwner::NEXT_FRAME_WAIT_FOR_MSE_DATA);
     }
     return NS_OK;
   }
 
 private:
   nsRefPtr<AbstractMediaDecoder> mDecoder;
 };
-}
 
 bool
-MediaSourceReader::DecodersContainTime(double aTime)
+MediaSourceReader::TrackBuffersContainTime(double aTime)
 {
-  bool found = false;
-
-  for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
-    if (!mDecoders[i]->IsDiscarded()) {
-      if (!mDecoders[i]->ContainsTime(aTime)) {
-        // No use to continue searching, one source buffer isn't ready yet
-        return false;
-      }
-      found = true;
-    }
+  ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
+  if (mAudioTrack && !mAudioTrack->ContainsTime(aTime)) {
+    return false;
   }
-  return found;
+  if (mVideoTrack && !mVideoTrack->ContainsTime(aTime)) {
+    return false;
+  }
+  return true;
 }
 
 nsresult
 MediaSourceReader::Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime,
                         int64_t aCurrentTime)
 {
   MSE_DEBUG("MediaSourceReader(%p)::Seek(aTime=%lld, aStart=%lld, aEnd=%lld, aCurrent=%lld)",
             this, aTime, aStartTime, aEndTime, aCurrentTime);
+
+  ResetDecode();
+  for (uint32_t i = 0; i < mTrackBuffers.Length(); ++i) {
+    mTrackBuffers[i]->ResetDecode();
+  }
+
+  // Decoding discontinuity upon seek, reset last times to seek target.
+  mLastAudioTime = aTime;
+  mLastVideoTime = aTime;
+
   double target = static_cast<double>(aTime) / USECS_PER_S;
-  if (!DecodersContainTime(target)) {
+  if (!TrackBuffersContainTime(target)) {
     MSE_DEBUG("MediaSourceReader(%p)::Seek no active buffer contains target=%f", this, target);
     NS_DispatchToMainThread(new ChangeToHaveMetadata(mDecoder));
   }
 
   // Loop until we have the requested time range in the source buffers.
   // This is a workaround for our lack of async functionality in the
   // MediaDecoderStateMachine. Bug 979104 implements what we need and
-  // we'll remove this for an async approach based on that in bug XXXXXXX.
-  while (!DecodersContainTime(target) && !IsShutdown() && !IsEnded()) {
+  // we'll remove this for an async approach based on that in bug 1056441.
+  while (!TrackBuffersContainTime(target) && !IsShutdown() && !IsEnded()) {
     MSE_DEBUG("MediaSourceReader(%p)::Seek waiting for target=%f", this, target);
     static_cast<MediaSourceDecoder*>(mDecoder)->WaitForData();
-    SwitchReaders(SWITCH_FORCED);
   }
 
   if (IsShutdown()) {
     return NS_ERROR_FAILURE;
   }
 
-  ResetDecode();
-  if (mAudioReader) {
+  if (mAudioTrack) {
     mAudioIsSeeking = true;
+    DebugOnly<bool> ok = SwitchAudioReader(target);
+    MOZ_ASSERT(ok && static_cast<SourceBufferDecoder*>(mAudioReader->GetDecoder())->ContainsTime(target));
     nsresult rv = mAudioReader->Seek(aTime, aStartTime, aEndTime, aCurrentTime);
     MSE_DEBUG("MediaSourceReader(%p)::Seek audio reader=%p rv=%x", this, mAudioReader.get(), rv);
     if (NS_FAILED(rv)) {
       return rv;
     }
   }
-  if (mVideoReader) {
+  if (mVideoTrack) {
     mVideoIsSeeking = true;
+    DebugOnly<bool> ok = SwitchVideoReader(target);
+    MOZ_ASSERT(ok && static_cast<SourceBufferDecoder*>(mVideoReader->GetDecoder())->ContainsTime(target));
     nsresult rv = mVideoReader->Seek(aTime, aStartTime, aEndTime, aCurrentTime);
     MSE_DEBUG("MediaSourceReader(%p)::Seek video reader=%p rv=%x", this, mVideoReader.get(), rv);
     if (NS_FAILED(rv)) {
       return rv;
     }
   }
   return NS_OK;
 }
 
 nsresult
 MediaSourceReader::ReadMetadata(MediaInfo* aInfo, MetadataTags** aTags)
 {
-  InitializePendingDecoders();
-
-  MSE_DEBUG("MediaSourceReader(%p)::ReadMetadata decoders=%u", this, mDecoders.Length());
+  MSE_DEBUG("MediaSourceReader(%p)::ReadMetadata tracks=%u", this, mTrackBuffers.Length());
+  // ReadMetadata is called *before* checking IsWaitingMediaResources.
+  if (IsWaitingMediaResources()) {
+    return NS_OK;
+  }
+  if (!mAudioTrack && !mVideoTrack) {
+    MSE_DEBUG("MediaSourceReader(%p)::ReadMetadata missing track: mAudioTrack=%p mVideoTrack=%p",
+              this, mAudioTrack.get(), mVideoTrack.get());
+    return NS_ERROR_FAILURE;
+  }
 
-  // XXX: Make subdecoder setup async, so that use cases like bug 989888 can
-  // work.  This will require teaching the state machine about dynamic track
-  // changes (and multiple tracks).
-  // Shorter term, make this block until we've got at least one video track
-  // and lie about having an audio track, then resample/remix as necessary
-  // to match any audio track added later to fit the format we lied about
-  // now.  For now we just configure what we've got and cross our fingers.
   int64_t maxDuration = -1;
-  for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
-    MediaDecoderReader* reader = mDecoders[i]->GetReader();
 
-    MediaInfo mi = reader->GetMediaInfo();
+  if (mAudioTrack) {
+    MOZ_ASSERT(mAudioTrack->IsReady());
+    mAudioReader = mAudioTrack->Decoders()[0]->GetReader();
 
-    if (mi.HasVideo() && !mInfo.HasVideo()) {
-      MOZ_ASSERT(!mVideoReader);
-      mVideoReader = reader;
-      mInfo.mVideo = mi.mVideo;
-      maxDuration = std::max(maxDuration, mDecoders[i]->GetMediaDuration());
-      MSE_DEBUG("MediaSourceReader(%p)::ReadMetadata video reader=%p maxDuration=%lld",
-                this, reader, maxDuration);
-    }
-    if (mi.HasAudio() && !mInfo.HasAudio()) {
-      MOZ_ASSERT(!mAudioReader);
-      mAudioReader = reader;
-      mInfo.mAudio = mi.mAudio;
-      maxDuration = std::max(maxDuration, mDecoders[i]->GetMediaDuration());
-      MSE_DEBUG("MediaSourceReader(%p)::ReadMetadata audio reader=%p maxDuration=%lld",
-                this, reader, maxDuration);
-    }
+    const MediaInfo& info = mAudioReader->GetMediaInfo();
+    MOZ_ASSERT(info.HasAudio());
+    mInfo.mAudio = info.mAudio;
+    maxDuration = std::max(maxDuration, mAudioReader->GetDecoder()->GetMediaDuration());
+    MSE_DEBUG("MediaSourceReader(%p)::ReadMetadata audio reader=%p maxDuration=%lld",
+              this, mAudioReader.get(), maxDuration);
+  }
+
+  if (mVideoTrack) {
+    MOZ_ASSERT(mVideoTrack->IsReady());
+    mVideoReader = mVideoTrack->Decoders()[0]->GetReader();
+
+    const MediaInfo& info = mVideoReader->GetMediaInfo();
+    MOZ_ASSERT(info.HasVideo());
+    mInfo.mVideo = info.mVideo;
+    maxDuration = std::max(maxDuration, mVideoReader->GetDecoder()->GetMediaDuration());
+    MSE_DEBUG("MediaSourceReader(%p)::ReadMetadata video reader=%p maxDuration=%lld",
+              this, mVideoReader.get(), maxDuration);
   }
 
   if (maxDuration != -1) {
     ReentrantMonitorAutoEnter mon(mDecoder->GetReentrantMonitor());
     mDecoder->SetMediaDuration(maxDuration);
     nsRefPtr<nsIRunnable> task (
       NS_NewRunnableMethodWithArg<double>(static_cast<MediaSourceDecoder*>(mDecoder),
                                           &MediaSourceDecoder::SetMediaSourceDuration,
--- a/content/media/mediasource/MediaSourceReader.h
+++ b/content/media/mediasource/MediaSourceReader.h
@@ -14,16 +14,17 @@
 #include "nsString.h"
 #include "nsTArray.h"
 #include "MediaDecoderReader.h"
 
 namespace mozilla {
 
 class MediaSourceDecoder;
 class SourceBufferDecoder;
+class TrackBuffer;
 
 namespace dom {
 
 class MediaSource;
 
 } // namespace dom
 
 class MediaSourceReader : public MediaDecoderReader
@@ -65,61 +66,61 @@ public:
     return mInfo.HasAudio();
   }
 
   bool IsMediaSeekable() { return true; }
 
   nsresult ReadMetadata(MediaInfo* aInfo, MetadataTags** aTags) MOZ_OVERRIDE;
   nsresult Seek(int64_t aTime, int64_t aStartTime, int64_t aEndTime,
                 int64_t aCurrentTime) MOZ_OVERRIDE;
+
   already_AddRefed<SourceBufferDecoder> CreateSubDecoder(const nsACString& aType);
 
+  void AddTrackBuffer(TrackBuffer* aTrackBuffer);
+  void RemoveTrackBuffer(TrackBuffer* aTrackBuffer);
+  void OnTrackBufferConfigured(TrackBuffer* aTrackBuffer, const MediaInfo& aInfo);
+
   void Shutdown();
 
   virtual void BreakCycles();
 
-  void InitializePendingDecoders();
-
   bool IsShutdown()
   {
     ReentrantMonitorAutoEnter decoderMon(mDecoder->GetReentrantMonitor());
     return mDecoder->IsShutdown();
   }
 
-  // Return true if any of the active decoders contain data for the given time
-  bool DecodersContainTime(double aTime);
+  // Return true if all of the active tracks contain data for the specified time.
+  bool TrackBuffersContainTime(double aTime);
 
   // Mark the reader to indicate that EndOfStream has been called on our MediaSource
   void Ended();
 
   // Return true if the Ended method has been called
   bool IsEnded();
 
 private:
-  enum SwitchType {
-    SWITCH_OPTIONAL,
-    SWITCH_FORCED
-  };
+  bool SwitchAudioReader(double aTarget);
+  bool SwitchVideoReader(double aTarget);
 
-  bool SwitchReaders(SwitchType aType);
+  nsRefPtr<MediaDecoderReader> mAudioReader;
+  nsRefPtr<MediaDecoderReader> mVideoReader;
 
-  bool SwitchAudioReader(MediaDecoderReader* aTargetReader);
-  bool SwitchVideoReader(MediaDecoderReader* aTargetReader);
+  nsTArray<nsRefPtr<TrackBuffer>> mTrackBuffers;
+  nsRefPtr<TrackBuffer> mAudioTrack;
+  nsRefPtr<TrackBuffer> mVideoTrack;
 
   // These are read and written on the decode task queue threads.
+  int64_t mLastAudioTime;
+  int64_t mLastVideoTime;
+
   int64_t mTimeThreshold;
   bool mDropAudioBeforeThreshold;
   bool mDropVideoBeforeThreshold;
 
-  nsTArray<nsRefPtr<SourceBufferDecoder>> mPendingDecoders;
-  nsTArray<nsRefPtr<SourceBufferDecoder>> mDecoders;
-
-  nsRefPtr<MediaDecoderReader> mAudioReader;
-  nsRefPtr<MediaDecoderReader> mVideoReader;
-
   bool mEnded;
 
   // For a seek to complete we need to send a sample with
   // the mDiscontinuity field set to true once we have the
   // first decoded sample. These flags are set during seeking
   // so we can detect when we have the first decoded sample
   // after a seek.
   bool mAudioIsSeeking;
--- a/content/media/mediasource/SourceBuffer.cpp
+++ b/content/media/mediasource/SourceBuffer.cpp
@@ -1,37 +1,33 @@
 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "SourceBuffer.h"
 
 #include "AsyncEventRunner.h"
-#include "DecoderTraits.h"
-#include "MediaDecoder.h"
-#include "MediaSourceDecoder.h"
 #include "MediaSourceUtils.h"
-#include "SourceBufferResource.h"
+#include "TrackBuffer.h"
+#include "VideoUtils.h"
+#include "WebMBufferedParser.h"
 #include "mozilla/Endian.h"
 #include "mozilla/ErrorResult.h"
 #include "mozilla/FloatingPoint.h"
+#include "mozilla/Preferences.h"
 #include "mozilla/dom/MediaSourceBinding.h"
 #include "mozilla/dom/TimeRanges.h"
 #include "mp4_demuxer/BufferStream.h"
 #include "mp4_demuxer/MoofParser.h"
 #include "nsError.h"
 #include "nsIEventTarget.h"
 #include "nsIRunnable.h"
 #include "nsThreadUtils.h"
 #include "prlog.h"
-#include "SourceBufferDecoder.h"
-#include "mozilla/Preferences.h"
-
-#include "WebMBufferedParser.h"
 
 struct JSContext;
 class JSObject;
 
 #ifdef PR_LOGGING
 extern PRLogModuleInfo* GetMediaSourceLog();
 extern PRLogModuleInfo* GetMediaSourceAPILog();
 
@@ -330,29 +326,18 @@ SourceBuffer::SetTimestampOffset(double 
 already_AddRefed<TimeRanges>
 SourceBuffer::GetBuffered(ErrorResult& aRv)
 {
   MOZ_ASSERT(NS_IsMainThread());
   if (!IsAttached()) {
     aRv.Throw(NS_ERROR_DOM_INVALID_STATE_ERR);
     return nullptr;
   }
-  double highestEndTime = 0;
   nsRefPtr<TimeRanges> ranges = new TimeRanges();
-  // TODO: Need to adjust mDecoders so it only tracks active decoders.
-  // Once we have an abstraction for track buffers, this needs to report the
-  // intersection of buffered ranges within those track buffers.
-  for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
-    nsRefPtr<TimeRanges> r = new TimeRanges();
-    mDecoders[i]->GetBuffered(r);
-    if (r->Length() > 0) {
-      highestEndTime = std::max(highestEndTime, r->GetEndTime());
-      ranges->Union(r);
-    }
-  }
+  double highestEndTime = mTrackBuffer->Buffered(ranges);
   if (mMediaSource->ReadyState() == MediaSourceReadyState::Ended) {
     // Set the end time on the last range to highestEndTime by adding a
     // new range spanning the current end time to highestEndTime, which
     // Normalize() will then merge with the old last range.
     ranges->Add(ranges->GetEndTime(), highestEndTime);
     ranges->Normalize();
   }
   MSE_DEBUGV("SourceBuffer(%p)::GetBuffered ranges=%s", this, DumpTimeRanges(ranges).get());
@@ -427,17 +412,17 @@ SourceBuffer::Abort(ErrorResult& aRv)
     // TODO: Abort segment parser loop, buffer append, and stream append loop algorithms.
     AbortUpdating();
   }
   // TODO: Run reset parser algorithm.
   mAppendWindowStart = 0;
   mAppendWindowEnd = PositiveInfinity<double>();
 
   MSE_DEBUG("SourceBuffer(%p)::Abort() Discarding decoder", this);
-  DiscardDecoder();
+  mTrackBuffer->DiscardDecoder();
 }
 
 void
 SourceBuffer::Remove(double aStart, double aEnd, ErrorResult& aRv)
 {
   MOZ_ASSERT(NS_IsMainThread());
   MSE_API("SourceBuffer(%p)::Remove(aStart=%f, aEnd=%f)", this, aStart, aEnd);
   if (!IsAttached()) {
@@ -459,55 +444,55 @@ SourceBuffer::Remove(double aStart, doub
   StopUpdating();
 }
 
 void
 SourceBuffer::Detach()
 {
   MOZ_ASSERT(NS_IsMainThread());
   MSE_DEBUG("SourceBuffer(%p)::Detach", this);
-  Ended();
-  DiscardDecoder();
+  if (mTrackBuffer) {
+    mTrackBuffer->Detach();
+  }
+  mTrackBuffer = nullptr;
   mMediaSource = nullptr;
 }
 
 void
 SourceBuffer::Ended()
 {
   MOZ_ASSERT(NS_IsMainThread());
+  MOZ_ASSERT(IsAttached());
   MSE_DEBUG("SourceBuffer(%p)::Ended", this);
-  if (mDecoder) {
-    mDecoder->GetResource()->Ended();
-  }
+  mTrackBuffer->DiscardDecoder();
 }
 
 SourceBuffer::SourceBuffer(MediaSource* aMediaSource, const nsACString& aType)
   : DOMEventTargetHelper(aMediaSource->GetParentObject())
   , mMediaSource(aMediaSource)
   , mType(aType)
-  , mLastParsedTimestamp(UnspecifiedNaN<double>())
   , mAppendWindowStart(0)
   , mAppendWindowEnd(PositiveInfinity<double>())
   , mTimestampOffset(0)
   , mAppendMode(SourceBufferAppendMode::Segments)
   , mUpdating(false)
-  , mDecoderInitialized(false)
 {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_ASSERT(aMediaSource);
   mParser = ContainerParser::CreateForMIMEType(aType);
-  MSE_DEBUG("SourceBuffer(%p)::SourceBuffer: Creating initial decoder, mParser=%p", this, mParser.get());
-  InitNewDecoder();
+  mTrackBuffer = new TrackBuffer(aMediaSource->GetDecoder(), aType);
+  MSE_DEBUG("SourceBuffer(%p)::SourceBuffer: Create mParser=%p mTrackBuffer=%p",
+            this, mParser.get(), mTrackBuffer.get());
 }
 
 SourceBuffer::~SourceBuffer()
 {
   MOZ_ASSERT(NS_IsMainThread());
+  MOZ_ASSERT(!mMediaSource);
   MSE_DEBUG("SourceBuffer(%p)::~SourceBuffer", this);
-  DiscardDecoder();
 }
 
 MediaSource*
 SourceBuffer::GetParentObject() const
 {
   return mMediaSource;
 }
 
@@ -528,47 +513,16 @@ SourceBuffer::DispatchSimpleEvent(const 
 void
 SourceBuffer::QueueAsyncSimpleEvent(const char* aName)
 {
   MSE_DEBUG("SourceBuffer(%p) Queuing event '%s'", this, aName);
   nsCOMPtr<nsIRunnable> event = new AsyncEventRunner<SourceBuffer>(this, aName);
   NS_DispatchToMainThread(event, NS_DISPATCH_NORMAL);
 }
 
-bool
-SourceBuffer::InitNewDecoder()
-{
-  MOZ_ASSERT(NS_IsMainThread());
-  MSE_DEBUG("SourceBuffer(%p)::InitNewDecoder", this);
-  MOZ_ASSERT(!mDecoder);
-  MediaSourceDecoder* parentDecoder = mMediaSource->GetDecoder();
-  nsRefPtr<SourceBufferDecoder> decoder = parentDecoder->CreateSubDecoder(mType);
-  if (!decoder) {
-    return false;
-  }
-  mDecoder = decoder;
-  mDecoderInitialized = false;
-  mDecoders.AppendElement(mDecoder);
-  return true;
-}
-
-void
-SourceBuffer::DiscardDecoder()
-{
-  MOZ_ASSERT(NS_IsMainThread());
-  MSE_DEBUG("SourceBuffer(%p)::DiscardDecoder mDecoder=%p", this, mDecoder.get());
-  if (mDecoder) {
-    mDecoder->SetDiscarded();
-  }
-  mDecoder = nullptr;
-  mDecoderInitialized = false;
-  // XXX: Parser reset may be required?
-  mLastParsedTimestamp = UnspecifiedNaN<double>();
-}
-
 void
 SourceBuffer::StartUpdating()
 {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_ASSERT(!mUpdating);
   mUpdating = true;
   QueueAsyncSimpleEvent("updatestart");
 }
@@ -605,80 +559,75 @@ SourceBuffer::AppendData(const uint8_t* 
     mMediaSource->SetReadyState(MediaSourceReadyState::Open);
   }
   // TODO: Run coded frame eviction algorithm.
   // TODO: Test buffer full flag.
   StartUpdating();
   // TODO: Run buffer append algorithm asynchronously (would call StopUpdating()).
   if (mParser->IsInitSegmentPresent(aData, aLength)) {
     MSE_DEBUG("SourceBuffer(%p)::AppendData: New initialization segment.", this);
-    if (mDecoderInitialized) {
-      // Existing decoder has been used, time for a new one.
-      DiscardDecoder();
-    }
-
-    // If we've got a decoder here, it's not initialized, so we can use it
-    // rather than creating a new one.
-    if (!mDecoder && !InitNewDecoder()) {
+    mTrackBuffer->DiscardDecoder();
+    if (!mTrackBuffer->NewDecoder()) {
       aRv.Throw(NS_ERROR_FAILURE); // XXX: Review error handling.
       return;
     }
     MSE_DEBUG("SourceBuffer(%p)::AppendData: Decoder marked as initialized.", this);
-    mDecoderInitialized = true;
-  } else if (!mDecoderInitialized) {
-    MSE_DEBUG("SourceBuffer(%p)::AppendData: Non-init segment appended during initialization.");
+  } else if (!mTrackBuffer->HasInitSegment()) {
+    MSE_DEBUG("SourceBuffer(%p)::AppendData: Non-init segment appended during initialization.", this);
     Optional<MediaSourceEndOfStreamError> decodeError(MediaSourceEndOfStreamError::Decode);
     ErrorResult dummy;
     mMediaSource->EndOfStream(decodeError, dummy);
     aRv.Throw(NS_ERROR_FAILURE);
     return;
   }
   double start, end;
   if (mParser->ParseStartAndEndTimestamps(aData, aLength, start, end)) {
+    double lastStart, lastEnd;
+    mTrackBuffer->LastTimestamp(lastStart, lastEnd);
     if (mParser->IsMediaSegmentPresent(aData, aLength) &&
-        (start < mLastParsedTimestamp || start - mLastParsedTimestamp > 0.1)) {
-      MSE_DEBUG("SourceBuffer(%p)::AppendData: Data (%f, %f) overlaps %f.",
-                this, start, end, mLastParsedTimestamp);
+        (start < lastEnd || start - lastEnd > 0.1)) {
+      MSE_DEBUG("SourceBuffer(%p)::AppendData: Data last=[%f, %f] overlaps [%f, %f]",
+                this, lastStart, lastEnd, start, end);
 
       // This data is earlier in the timeline than data we have already
       // processed, so we must create a new decoder to handle the decoding.
-      DiscardDecoder();
+      mTrackBuffer->DiscardDecoder();
 
       // If we've got a decoder here, it's not initialized, so we can use it
       // rather than creating a new one.
-      if (!InitNewDecoder()) {
+      if (!mTrackBuffer->NewDecoder()) {
         aRv.Throw(NS_ERROR_FAILURE); // XXX: Review error handling.
         return;
       }
       MSE_DEBUG("SourceBuffer(%p)::AppendData: Decoder marked as initialized.", this);
-      mDecoderInitialized = true;
       const nsTArray<uint8_t>& initData = mParser->InitData();
-      mDecoder->NotifyDataArrived(reinterpret_cast<const char*>(initData.Elements()),
-                                  initData.Length(),
-                                  0);
-      mDecoder->GetResource()->AppendData(initData.Elements(), initData.Length());
+      mTrackBuffer->AppendData(initData.Elements(), initData.Length());
+      mTrackBuffer->SetLastStartTimestamp(start);
     }
-    mLastParsedTimestamp = end;
-    MSE_DEBUG("SourceBuffer(%p)::AppendData: Segment start=%f end=%f", this, start, end);
+    mTrackBuffer->SetLastEndTimestamp(end);
+    MSE_DEBUG("SourceBuffer(%p)::AppendData: Segment last=[%f, %f] [%f, %f]",
+              this, lastStart, lastEnd, start, end);
   }
-  // XXX: For future reference: NDA call must run on the main thread.
-  mDecoder->NotifyDataArrived(reinterpret_cast<const char*>(aData),
-                              aLength,
-                              mDecoder->GetResource()->GetLength());
-  mDecoder->GetResource()->AppendData(aData, aLength);
+  if (!mTrackBuffer->AppendData(aData, aLength)) {
+    Optional<MediaSourceEndOfStreamError> decodeError(MediaSourceEndOfStreamError::Decode);
+    ErrorResult dummy;
+    mMediaSource->EndOfStream(decodeError, dummy);
+    aRv.Throw(NS_ERROR_FAILURE);
+    return;
+  }
 
   // Eviction uses a byte threshold. If the buffer is greater than the
   // number of bytes then data is evicted. The time range for this
   // eviction is reported back to the media source. It will then
   // evict data before that range across all SourceBuffers it knows
   // about.
   // TODO: Make the eviction threshold smaller for audio-only streams.
   // TODO: Drive evictions off memory pressure notifications.
   const uint32_t evict_threshold = 75 * (1 << 20);
-  bool evicted = mDecoder->GetResource()->EvictData(evict_threshold);
+  bool evicted = mTrackBuffer->EvictData(evict_threshold);
   if (evicted) {
     MSE_DEBUG("SourceBuffer(%p)::AppendData Evict; current buffered start=%f",
               this, GetBufferedStart());
 
     // We notify that we've evicted from the time range 0 through to
     // the current start point.
     mMediaSource->NotifyEvicted(0.0, GetBufferedStart());
   }
@@ -709,30 +658,23 @@ SourceBuffer::GetBufferedEnd()
   return ranges->Length() > 0 ? ranges->GetEndTime() : 0;
 }
 
 void
 SourceBuffer::Evict(double aStart, double aEnd)
 {
   MOZ_ASSERT(NS_IsMainThread());
   MSE_DEBUG("SourceBuffer(%p)::Evict(aStart=%f, aEnd=%f)", this, aStart, aEnd);
-  if (!mDecoder) {
-    return;
-  }
   double currentTime = mMediaSource->GetDecoder()->GetCurrentTime();
   double evictTime = aEnd;
   const double safety_threshold = 5;
   if (currentTime + safety_threshold >= evictTime) {
     evictTime -= safety_threshold;
   }
-  int64_t endOffset = mDecoder->ConvertToByteOffset(evictTime);
-  if (endOffset > 0) {
-    mDecoder->GetResource()->EvictBefore(endOffset);
-  }
-  MSE_DEBUG("SourceBuffer(%p)::Evict offset=%lld", this, endOffset);
+  mTrackBuffer->EvictBefore(evictTime);
 }
 
 NS_IMPL_CYCLE_COLLECTION_INHERITED(SourceBuffer, DOMEventTargetHelper,
                                    mMediaSource)
 
 NS_IMPL_ADDREF_INHERITED(SourceBuffer, DOMEventTargetHelper)
 NS_IMPL_RELEASE_INHERITED(SourceBuffer, DOMEventTargetHelper)
 
--- a/content/media/mediasource/SourceBuffer.h
+++ b/content/media/mediasource/SourceBuffer.h
@@ -2,42 +2,40 @@
 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef mozilla_dom_SourceBuffer_h_
 #define mozilla_dom_SourceBuffer_h_
 
-#include "MediaDecoderReader.h"
 #include "MediaSource.h"
 #include "js/RootingAPI.h"
 #include "mozilla/Assertions.h"
 #include "mozilla/Attributes.h"
+#include "mozilla/DOMEventTargetHelper.h"
 #include "mozilla/dom/SourceBufferBinding.h"
 #include "mozilla/dom/TypedArray.h"
-#include "mozilla/DOMEventTargetHelper.h"
 #include "mozilla/mozalloc.h"
 #include "nsAutoPtr.h"
 #include "nsCOMPtr.h"
 #include "nsCycleCollectionNoteChild.h"
 #include "nsCycleCollectionParticipant.h"
 #include "nsISupports.h"
 #include "nsString.h"
 #include "nscore.h"
 
 class JSObject;
 struct JSContext;
 
 namespace mozilla {
 
 class ContainerParser;
 class ErrorResult;
-class SourceBufferResource;
-class SourceBufferDecoder;
+class TrackBuffer;
 template <typename T> class AsyncEventRunner;
 
 namespace dom {
 
 class TimeRanges;
 
 class SourceBuffer MOZ_FINAL : public DOMEventTargetHelper
 {
@@ -134,28 +132,23 @@ private:
   void AppendData(const uint8_t* aData, uint32_t aLength, ErrorResult& aRv);
 
   nsRefPtr<MediaSource> mMediaSource;
 
   const nsCString mType;
 
   nsAutoPtr<ContainerParser> mParser;
 
-  double mLastParsedTimestamp;
-
-  nsRefPtr<SourceBufferDecoder> mDecoder;
-  nsTArray<nsRefPtr<SourceBufferDecoder>> mDecoders;
+  nsRefPtr<TrackBuffer> mTrackBuffer;
 
   double mAppendWindowStart;
   double mAppendWindowEnd;
 
   double mTimestampOffset;
 
   SourceBufferAppendMode mAppendMode;
   bool mUpdating;
-
-  bool mDecoderInitialized;
 };
 
 } // namespace dom
 
 } // namespace mozilla
 #endif /* mozilla_dom_SourceBuffer_h_ */
--- a/content/media/mediasource/SourceBufferDecoder.cpp
+++ b/content/media/mediasource/SourceBufferDecoder.cpp
@@ -33,17 +33,16 @@ class ImageContainer;
 NS_IMPL_ISUPPORTS0(SourceBufferDecoder)
 
 SourceBufferDecoder::SourceBufferDecoder(MediaResource* aResource,
                                          AbstractMediaDecoder* aParentDecoder)
   : mResource(aResource)
   , mParentDecoder(aParentDecoder)
   , mReader(nullptr)
   , mMediaDuration(-1)
-  , mDiscarded(false)
 {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_COUNT_CTOR(SourceBufferDecoder);
 }
 
 SourceBufferDecoder::~SourceBufferDecoder()
 {
   MOZ_COUNT_DTOR(SourceBufferDecoder);
@@ -142,16 +141,20 @@ bool
 SourceBufferDecoder::OnStateMachineThread() const
 {
   return mParentDecoder->OnStateMachineThread();
 }
 
 bool
 SourceBufferDecoder::OnDecodeThread() const
 {
+  // During initialization we run on our TrackBuffer's task queue.
+  if (mTaskQueue) {
+    return mTaskQueue->IsCurrentThreadIn();
+  }
   return mParentDecoder->OnDecodeThread();
 }
 
 SourceBufferResource*
 SourceBufferDecoder::GetResource() const
 {
   return static_cast<SourceBufferResource*>(mResource.get());
 }
--- a/content/media/mediasource/SourceBufferDecoder.h
+++ b/content/media/mediasource/SourceBufferDecoder.h
@@ -3,19 +3,20 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef MOZILLA_SOURCEBUFFERDECODER_H_
 #define MOZILLA_SOURCEBUFFERDECODER_H_
 
 #include "AbstractMediaDecoder.h"
+#include "MediaDecoderReader.h"
+#include "SourceBufferResource.h"
 #include "mozilla/Attributes.h"
 #include "mozilla/ReentrantMonitor.h"
-#include "SourceBufferResource.h"
 
 namespace mozilla {
 
 class MediaResource;
 class MediaDecoderReader;
 
 namespace dom {
 
@@ -69,40 +70,37 @@ public:
     mReader = aReader;
   }
 
   MediaDecoderReader* GetReader()
   {
     return mReader;
   }
 
+  void SetTaskQueue(MediaTaskQueue* aTaskQueue)
+  {
+    MOZ_ASSERT((!mTaskQueue && aTaskQueue) || (mTaskQueue && !aTaskQueue));
+    mTaskQueue = aTaskQueue;
+  }
+
   // Given a time convert it into an approximate byte offset from the
   // cached data. Returns -1 if no such value is computable.
   int64_t ConvertToByteOffset(double aTime);
 
-  bool IsDiscarded()
-  {
-    return mDiscarded;
-  }
-
-  void SetDiscarded()
-  {
-    GetResource()->Ended();
-    mDiscarded = true;
-  }
-
   // Returns true if the data buffered by this decoder contains the given time.
   bool ContainsTime(double aTime);
 
 private:
   virtual ~SourceBufferDecoder();
 
+  // Our TrackBuffer's task queue, this is only non-null during initialization.
+  RefPtr<MediaTaskQueue> mTaskQueue;
+
   nsRefPtr<MediaResource> mResource;
 
   AbstractMediaDecoder* mParentDecoder;
   nsRefPtr<MediaDecoderReader> mReader;
   int64_t mMediaDuration;
-  bool mDiscarded;
 };
 
 } // namespace mozilla
 
 #endif /* MOZILLA_SOURCEBUFFERDECODER_H_ */
new file mode 100644
--- /dev/null
+++ b/content/media/mediasource/TrackBuffer.cpp
@@ -0,0 +1,339 @@
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "TrackBuffer.h"
+
+#include "MediaSourceDecoder.h"
+#include "SharedThreadPool.h"
+#include "MediaTaskQueue.h"
+#include "SourceBufferDecoder.h"
+#include "SourceBufferResource.h"
+#include "VideoUtils.h"
+#include "mozilla/FloatingPoint.h"
+#include "mozilla/dom/MediaSourceBinding.h"
+#include "mozilla/dom/TimeRanges.h"
+#include "nsError.h"
+#include "nsIRunnable.h"
+#include "nsThreadUtils.h"
+#include "prlog.h"
+
+struct JSContext;
+class JSObject;
+
+#ifdef PR_LOGGING
+extern PRLogModuleInfo* GetMediaSourceLog();
+extern PRLogModuleInfo* GetMediaSourceAPILog();
+
+#define MSE_DEBUG(...) PR_LOG(GetMediaSourceLog(), PR_LOG_DEBUG, (__VA_ARGS__))
+#define MSE_DEBUGV(...) PR_LOG(GetMediaSourceLog(), PR_LOG_DEBUG+1, (__VA_ARGS__))
+#define MSE_API(...) PR_LOG(GetMediaSourceAPILog(), PR_LOG_DEBUG, (__VA_ARGS__))
+#else
+#define MSE_DEBUG(...)
+#define MSE_DEBUGV(...)
+#define MSE_API(...)
+#endif
+
+namespace mozilla {
+
+TrackBuffer::TrackBuffer(MediaSourceDecoder* aParentDecoder, const nsACString& aType)
+  : mParentDecoder(aParentDecoder)
+  , mType(aType)
+  , mLastStartTimestamp(0)
+  , mLastEndTimestamp(UnspecifiedNaN<double>())
+  , mHasInit(false)
+  , mHasAudio(false)
+  , mHasVideo(false)
+{
+  MOZ_COUNT_CTOR(TrackBuffer);
+  mTaskQueue = new MediaTaskQueue(GetMediaDecodeThreadPool());
+  aParentDecoder->AddTrackBuffer(this);
+}
+
+TrackBuffer::~TrackBuffer()
+{
+  MOZ_COUNT_DTOR(TrackBuffer);
+}
+
+class ReleaseDecoderTask : public nsRunnable {
+public:
+  explicit ReleaseDecoderTask(nsRefPtr<SourceBufferDecoder> aDecoder)
+  {
+    mDecoders.AppendElement(aDecoder);
+  }
+
+  explicit ReleaseDecoderTask(nsTArray<nsRefPtr<SourceBufferDecoder>>& aDecoders)
+  {
+    mDecoders.SwapElements(aDecoders);
+  }
+
+  NS_IMETHOD Run() MOZ_OVERRIDE MOZ_FINAL {
+    mDecoders.Clear();
+    return NS_OK;
+  }
+
+private:
+  nsTArray<nsRefPtr<SourceBufferDecoder>> mDecoders;
+};
+
+void
+TrackBuffer::Shutdown()
+{
+  // Shutdown waits for any pending events, which may require the monitor,
+  // so we must not hold the monitor during this call.
+  mParentDecoder->GetReentrantMonitor().AssertNotCurrentThreadIn();
+  mTaskQueue->Shutdown();
+  mTaskQueue = nullptr;
+
+  ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
+  DiscardDecoder();
+  for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
+    mDecoders[i]->GetReader()->Shutdown();
+  }
+  NS_DispatchToMainThread(new ReleaseDecoderTask(mDecoders));
+  MOZ_ASSERT(mDecoders.IsEmpty());
+  mParentDecoder = nullptr;
+}
+
+bool
+TrackBuffer::AppendData(const uint8_t* aData, uint32_t aLength)
+{
+  MOZ_ASSERT(NS_IsMainThread());
+  if (!mCurrentDecoder) {
+    return false;
+  }
+
+  SourceBufferResource* resource = mCurrentDecoder->GetResource();
+  // XXX: For future reference: NDA call must run on the main thread.
+  mCurrentDecoder->NotifyDataArrived(reinterpret_cast<const char*>(aData),
+                                     aLength, resource->GetLength());
+  resource->AppendData(aData, aLength);
+  return true;
+}
+
+bool
+TrackBuffer::EvictData(uint32_t aThreshold)
+{
+  MOZ_ASSERT(NS_IsMainThread());
+  // XXX Call EvictData on mDecoders?
+  return mCurrentDecoder->GetResource()->EvictData(aThreshold);
+}
+
+void
+TrackBuffer::EvictBefore(double aTime)
+{
+  MOZ_ASSERT(NS_IsMainThread());
+  // XXX Call EvictBefore on mDecoders?
+  int64_t endOffset = mCurrentDecoder->ConvertToByteOffset(aTime);
+  if (endOffset > 0) {
+    mCurrentDecoder->GetResource()->EvictBefore(endOffset);
+  }
+  MSE_DEBUG("TrackBuffer(%p)::EvictBefore offset=%lld", this, endOffset);
+}
+
+double
+TrackBuffer::Buffered(dom::TimeRanges* aRanges)
+{
+  MOZ_ASSERT(NS_IsMainThread());
+  // XXX check default if mDecoders empty?
+  double highestEndTime = 0;
+
+  for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
+    nsRefPtr<dom::TimeRanges> r = new dom::TimeRanges();
+    mDecoders[i]->GetBuffered(r);
+    if (r->Length() > 0) {
+      highestEndTime = std::max(highestEndTime, r->GetEndTime());
+      aRanges->Union(r);
+    }
+  }
+
+  return highestEndTime;
+}
+
+bool
+TrackBuffer::NewDecoder()
+{
+  MOZ_ASSERT(NS_IsMainThread());
+  MOZ_ASSERT(!mCurrentDecoder && mParentDecoder);
+
+  nsRefPtr<SourceBufferDecoder> decoder = mParentDecoder->CreateSubDecoder(mType);
+  if (!decoder) {
+    return false;
+  }
+  ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
+  mCurrentDecoder = decoder;
+
+  mLastStartTimestamp = 0;
+  mLastEndTimestamp = UnspecifiedNaN<double>();
+  mHasInit = true;
+
+  return QueueInitializeDecoder(decoder);
+}
+
+bool
+TrackBuffer::QueueInitializeDecoder(nsRefPtr<SourceBufferDecoder> aDecoder)
+{
+  RefPtr<nsIRunnable> task =
+    NS_NewRunnableMethodWithArg<nsRefPtr<SourceBufferDecoder>>(this,
+                                                               &TrackBuffer::InitializeDecoder,
+                                                               aDecoder);
+  aDecoder->SetTaskQueue(mTaskQueue);
+  if (NS_FAILED(mTaskQueue->Dispatch(task))) {
+    MSE_DEBUG("MediaSourceReader(%p): Failed to enqueue decoder initialization task", this);
+    return false;
+  }
+  return true;
+}
+
+void
+TrackBuffer::InitializeDecoder(nsRefPtr<SourceBufferDecoder> aDecoder)
+{
+  // ReadMetadata may block the thread waiting on data, so it must not be
+  // called with the monitor held.
+  mParentDecoder->GetReentrantMonitor().AssertNotCurrentThreadIn();
+
+  MediaDecoderReader* reader = aDecoder->GetReader();
+  MSE_DEBUG("TrackBuffer(%p): Initializing subdecoder %p reader %p",
+            this, aDecoder.get(), reader);
+
+  MediaInfo mi;
+  nsAutoPtr<MetadataTags> tags; // TODO: Handle metadata.
+  nsresult rv = reader->ReadMetadata(&mi, getter_Transfers(tags));
+  reader->SetIdle();
+  if (NS_FAILED(rv) || (!mi.HasVideo() && !mi.HasAudio())) {
+    // XXX: Need to signal error back to owning SourceBuffer.
+    MSE_DEBUG("TrackBuffer(%p): Reader %p failed to initialize rv=%x audio=%d video=%d",
+              this, reader, rv, mi.HasAudio(), mi.HasVideo());
+    aDecoder->SetTaskQueue(nullptr);
+    NS_DispatchToMainThread(new ReleaseDecoderTask(aDecoder));
+    return;
+  }
+
+  if (mi.HasVideo()) {
+    MSE_DEBUG("TrackBuffer(%p): Reader %p video resolution=%dx%d",
+              this, reader, mi.mVideo.mDisplay.width, mi.mVideo.mDisplay.height);
+  }
+  if (mi.HasAudio()) {
+    MSE_DEBUG("TrackBuffer(%p): Reader %p audio sampleRate=%d channels=%d",
+              this, reader, mi.mAudio.mRate, mi.mAudio.mChannels);
+  }
+
+  MSE_DEBUG("TrackBuffer(%p): Reader %p activated", this, reader);
+  RegisterDecoder(aDecoder);
+}
+
+void
+TrackBuffer::RegisterDecoder(nsRefPtr<SourceBufferDecoder> aDecoder)
+{
+  ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
+  aDecoder->SetTaskQueue(nullptr);
+  const MediaInfo& info = aDecoder->GetReader()->GetMediaInfo();
+  // Initialize the track info since this is the first decoder.
+  if (mDecoders.IsEmpty()) {
+    mHasAudio = info.HasAudio();
+    mHasVideo = info.HasVideo();
+    mParentDecoder->OnTrackBufferConfigured(this, info);
+  } else if ((info.HasAudio() && !mHasAudio) || (info.HasVideo() && !mHasVideo)) {
+    MSE_DEBUG("TrackBuffer(%p)::RegisterDecoder with mismatched audio/video tracks", this);
+  }
+  mDecoders.AppendElement(aDecoder);
+}
+
+void
+TrackBuffer::DiscardDecoder()
+{
+  ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
+  if (mCurrentDecoder) {
+    mCurrentDecoder->GetResource()->Ended();
+  }
+  mCurrentDecoder = nullptr;
+}
+
+void
+TrackBuffer::Detach()
+{
+  MOZ_ASSERT(NS_IsMainThread());
+  if (mCurrentDecoder) {
+    DiscardDecoder();
+  }
+}
+
+bool
+TrackBuffer::HasInitSegment()
+{
+  ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
+  return mHasInit;
+}
+
+bool
+TrackBuffer::IsReady()
+{
+  ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
+  MOZ_ASSERT((mHasAudio || mHasVideo) || mDecoders.IsEmpty());
+  return HasInitSegment() && (mHasAudio || mHasVideo);
+}
+
+void
+TrackBuffer::LastTimestamp(double& aStart, double& aEnd)
+{
+  MOZ_ASSERT(NS_IsMainThread());
+  aStart = mLastStartTimestamp;
+  aEnd = mLastEndTimestamp;
+}
+
+void
+TrackBuffer::SetLastStartTimestamp(double aStart)
+{
+  MOZ_ASSERT(NS_IsMainThread());
+  mLastStartTimestamp = aStart;
+}
+
+void
+TrackBuffer::SetLastEndTimestamp(double aEnd)
+{
+  MOZ_ASSERT(NS_IsMainThread());
+  mLastEndTimestamp = aEnd;
+}
+
+bool
+TrackBuffer::ContainsTime(double aTime)
+{
+  ReentrantMonitorAutoEnter mon(mParentDecoder->GetReentrantMonitor());
+  for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
+    nsRefPtr<dom::TimeRanges> r = new dom::TimeRanges();
+    mDecoders[i]->GetBuffered(r);
+    if (r->Find(aTime) != dom::TimeRanges::NoIndex) {
+      return true;
+    }
+  }
+
+  return false;
+}
+
+void
+TrackBuffer::BreakCycles()
+{
+  for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
+    mDecoders[i]->GetReader()->BreakCycles();
+  }
+  mDecoders.Clear();
+  mParentDecoder = nullptr;
+}
+
+void
+TrackBuffer::ResetDecode()
+{
+  for (uint32_t i = 0; i < mDecoders.Length(); ++i) {
+    mDecoders[i]->GetReader()->ResetDecode();
+  }
+}
+
+const nsTArray<nsRefPtr<SourceBufferDecoder>>&
+TrackBuffer::Decoders()
+{
+  // XXX assert OnDecodeThread
+  return mDecoders;
+}
+
+} // namespace mozilla
new file mode 100644
--- /dev/null
+++ b/content/media/mediasource/TrackBuffer.h
@@ -0,0 +1,131 @@
+/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
+/* vim: set ts=8 sts=2 et sw=2 tw=80: */
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this
+ * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef MOZILLA_TRACKBUFFER_H_
+#define MOZILLA_TRACKBUFFER_H_
+
+#include "SourceBufferDecoder.h"
+#include "mozilla/Assertions.h"
+#include "mozilla/Attributes.h"
+#include "mozilla/mozalloc.h"
+#include "nsCOMPtr.h"
+#include "nsString.h"
+#include "nscore.h"
+
+namespace mozilla {
+
+class MediaSourceDecoder;
+
+namespace dom {
+
+class TimeRanges;
+
+} // namespace dom
+
+class TrackBuffer MOZ_FINAL {
+public:
+  NS_INLINE_DECL_THREADSAFE_REFCOUNTING(TrackBuffer);
+
+  TrackBuffer(MediaSourceDecoder* aParentDecoder, const nsACString& aType);
+
+  void Shutdown();
+
+  // Append data to the current decoder.  Also responsible for calling
+  // NotifyDataArrived on the decoder to keep buffered range computation up
+  // to date.  Returns false if the append failed.
+  bool AppendData(const uint8_t* aData, uint32_t aLength);
+  bool EvictData(uint32_t aThreshold);
+  void EvictBefore(double aTime);
+
+  // Returns the highest end time of all of the buffered ranges in the
+  // decoders managed by this TrackBuffer, and returns the union of the
+  // decoders buffered ranges in aRanges.
+  double Buffered(dom::TimeRanges* aRanges);
+
+  // Create a new decoder, set mCurrentDecoder to the new decoder, and queue
+  // the decoder for initialization.  The decoder is not considered
+  // initialized until it is added to mDecoders.
+  bool NewDecoder();
+
+  // Mark the current decoder's resource as ended, clear mCurrentDecoder and
+  // reset mLast{Start,End}Timestamp.
+  void DiscardDecoder();
+
+  void Detach();
+
+  // Returns true if an init segment has been appended.
+  bool HasInitSegment();
+
+  // Returns true iff HasInitSegment() and the decoder using that init
+  // segment has successfully initialized by setting mHas{Audio,Video}..
+  bool IsReady();
+
+  // Query and update mLast{Start,End}Timestamp.
+  void LastTimestamp(double& aStart, double& aEnd);
+  void SetLastStartTimestamp(double aStart);
+  void SetLastEndTimestamp(double aEnd);
+
+  // Returns true if any of the decoders managed by this track buffer
+  // contain aTime in their buffered ranges.
+  bool ContainsTime(double aTime);
+
+  void BreakCycles();
+
+  // Call ResetDecode() on each decoder in mDecoders.
+  void ResetDecode();
+
+  // Returns a reference to mDecoders, used by MediaSourceReader to select
+  // decoders.
+  // TODO: Refactor to a clenaer interface between TrackBuffer and MediaSourceReader.
+  const nsTArray<nsRefPtr<SourceBufferDecoder>>& Decoders();
+
+private:
+  ~TrackBuffer();
+
+  // Queue execution of InitializeDecoder on mTaskQueue.
+  bool QueueInitializeDecoder(nsRefPtr<SourceBufferDecoder> aDecoder);
+
+  // Runs decoder initialization including calling ReadMetadata.  Runs as an
+  // event on the decode thread pool.
+  void InitializeDecoder(nsRefPtr<SourceBufferDecoder> aDecoder);
+
+  // Adds a successfully initialized decoder to mDecoders and (if it's the
+  // first decoder initialized), initializes mHasAudio/mHasVideo.  Called
+  // from the decode thread pool.
+  void RegisterDecoder(nsRefPtr<SourceBufferDecoder> aDecoder);
+
+  // A task queue using the shared media thread pool.  Used exclusively to
+  // initialize (i.e. call ReadMetadata on) decoders as they are created via
+  // NewDecoder.
+  RefPtr<MediaTaskQueue> mTaskQueue;
+
+  // All of the initialized decoders managed by this TrackBuffer.  Access
+  // protected by mParentDecoder's monitor.
+  nsTArray<nsRefPtr<SourceBufferDecoder>> mDecoders;
+
+  // The decoder that the owning SourceBuffer is currently appending data to.
+  nsRefPtr<SourceBufferDecoder> mCurrentDecoder;
+
+  nsRefPtr<MediaSourceDecoder> mParentDecoder;
+  const nsCString mType;
+
+  // The last start and end timestamps added to the TrackBuffer via
+  // AppendData.  Accessed on the main thread only.
+  double mLastStartTimestamp;
+  double mLastEndTimestamp;
+
+  // Set when the initialization segment is first seen and cached (implied
+  // by new decoder creation).  Protected by mParentDecoder's monitor.
+  bool mHasInit;
+
+  // Set when the first decoder used by this TrackBuffer is initialized.
+  // Protected by mParentDecoder's monitor.
+  bool mHasAudio;
+  bool mHasVideo;
+};
+
+} // namespace mozilla
+#endif /* MOZILLA_TRACKBUFFER_H_ */
--- a/content/media/mediasource/moz.build
+++ b/content/media/mediasource/moz.build
@@ -20,13 +20,14 @@ UNIFIED_SOURCES += [
     'MediaSource.cpp',
     'MediaSourceDecoder.cpp',
     'MediaSourceReader.cpp',
     'MediaSourceUtils.cpp',
     'SourceBuffer.cpp',
     'SourceBufferDecoder.cpp',
     'SourceBufferList.cpp',
     'SourceBufferResource.cpp',
+    'TrackBuffer.cpp',
 ]
 
 FAIL_ON_WARNINGS = True
 
 FINAL_LIBRARY = 'xul'
--- a/content/media/mediasource/test/mochitest.ini
+++ b/content/media/mediasource/test/mochitest.ini
@@ -1,6 +1,12 @@
 [DEFAULT]
 skip-if = e10s
 support-files = seek.webm seek.webm^headers^
 
 [test_MediaSource.html]
 skip-if = buildapp == 'b2g' # b2g( ReferenceError: MediaSource is not defined)
+
+[test_SplitAppend.html]
+skip-if = buildapp == 'b2g' # b2g( ReferenceError: MediaSource is not defined)
+
+[test_SplitAppendDelay.html]
+skip-if = buildapp == 'b2g' # b2g( ReferenceError: MediaSource is not defined)
new file mode 100644
--- /dev/null
+++ b/content/media/mediasource/test/test_SplitAppend.html
@@ -0,0 +1,83 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+  <title>Test whether we can create an MediaSource interface</title>
+  <script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
+  <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+addLoadEvent(function() {
+  ok(!window.MediaSource, "MediaSource should be hidden behind a pref");
+  var accessThrows = false;
+  try {
+    new MediaSource();
+  } catch (e) {
+    accessThrows = true;
+  }
+  ok(accessThrows, "MediaSource should be hidden behind a pref");
+  SpecialPowers.pushPrefEnv({"set": [[ "media.mediasource.enabled", true ]]},
+                            function () {
+    SpecialPowers.setBoolPref("media.mediasource.enabled", true);
+    var ms = new MediaSource();
+    ok(ms, "Create a MediaSource object");
+    ok(ms instanceof EventTarget, "MediaSource must be an EventTarget");
+    is(ms.readyState, "closed", "New MediaSource must be in closed state");
+    // Force wrapper creation, tests for leaks.
+    ms.foo = null;
+    var o = URL.createObjectURL(ms);
+    ok(o, "Create an objectURL from the MediaSource");
+    var v = document.createElement("video");
+    v.preload = "auto";
+    document.body.appendChild(v);
+    v.src = o;
+    ms.addEventListener("sourceopen", function () {
+      ok(true, "Receive a sourceopen event");
+      is(ms.readyState, "open", "MediaSource must be in open state after sourceopen");
+      var sb = ms.addSourceBuffer("video/webm");
+      ok(sb, "Create a SourceBuffer");
+      is(ms.sourceBuffers.length, 1, "MediaSource.sourceBuffers is expected length");
+      is(ms.sourceBuffers[0], sb, "SourceBuffer in list matches our SourceBuffer");
+      fetch("seek.webm", function (blob) {
+        var r = new FileReader();
+        r.addEventListener("load", function (e) {
+          sb.appendBuffer(new Uint8Array(e.target.result, 0, 318));
+          sb.appendBuffer(new Uint8Array(e.target.result, 318));
+          ms.endOfStream();
+          v.play();
+        });
+        r.readAsArrayBuffer(blob);
+      });
+    });
+    ms.addEventListener("sourceended", function () {
+      ok(true, "Receive a sourceended event");
+      is(ms.readyState, "ended", "MediaSource must be in ended state after sourceended");
+    });
+    v.addEventListener("ended", function () {
+      is(v.duration, 4, "Video has correct duration");
+      v.parentNode.removeChild(v);
+      SimpleTest.finish();
+    });
+  });
+});
+
+function fetch(src, cb) {
+  var xhr = new XMLHttpRequest();
+  xhr.open("GET", src, true);
+  xhr.responseType = "blob";
+  xhr.addEventListener("load", function (e) {
+    if (xhr.status != 200) {
+      return false;
+    }
+    cb(xhr.response);
+  });
+  xhr.send();
+};
+</script>
+</pre>
+</body>
+</html>
new file mode 100644
--- /dev/null
+++ b/content/media/mediasource/test/test_SplitAppendDelay.html
@@ -0,0 +1,85 @@
+<!DOCTYPE HTML>
+<html>
+<head>
+  <title>Test whether we can create an MediaSource interface</title>
+  <script type="text/javascript" src="/tests/SimpleTest/SimpleTest.js"></script>
+  <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
+</head>
+<body>
+<pre id="test">
+<script class="testbody" type="text/javascript">
+
+SimpleTest.waitForExplicitFinish();
+
+addLoadEvent(function() {
+  ok(!window.MediaSource, "MediaSource should be hidden behind a pref");
+  var accessThrows = false;
+  try {
+    new MediaSource();
+  } catch (e) {
+    accessThrows = true;
+  }
+  ok(accessThrows, "MediaSource should be hidden behind a pref");
+  SpecialPowers.pushPrefEnv({"set": [[ "media.mediasource.enabled", true ]]},
+                            function () {
+    SpecialPowers.setBoolPref("media.mediasource.enabled", true);
+    var ms = new MediaSource();
+    ok(ms, "Create a MediaSource object");
+    ok(ms instanceof EventTarget, "MediaSource must be an EventTarget");
+    is(ms.readyState, "closed", "New MediaSource must be in closed state");
+    // Force wrapper creation, tests for leaks.
+    ms.foo = null;
+    var o = URL.createObjectURL(ms);
+    ok(o, "Create an objectURL from the MediaSource");
+    var v = document.createElement("video");
+    v.preload = "auto";
+    document.body.appendChild(v);
+    v.src = o;
+    ms.addEventListener("sourceopen", function () {
+      ok(true, "Receive a sourceopen event");
+      is(ms.readyState, "open", "MediaSource must be in open state after sourceopen");
+      var sb = ms.addSourceBuffer("video/webm");
+      ok(sb, "Create a SourceBuffer");
+      is(ms.sourceBuffers.length, 1, "MediaSource.sourceBuffers is expected length");
+      is(ms.sourceBuffers[0], sb, "SourceBuffer in list matches our SourceBuffer");
+      fetch("seek.webm", function (blob) {
+        var r = new FileReader();
+        r.addEventListener("load", function (e) {
+          sb.appendBuffer(new Uint8Array(e.target.result, 0, 318));
+          window.setTimeout(function () {
+            sb.appendBuffer(new Uint8Array(e.target.result, 318));
+            ms.endOfStream();
+          }, 1000);
+          v.play();
+        });
+        r.readAsArrayBuffer(blob);
+      });
+    });
+    ms.addEventListener("sourceended", function () {
+      ok(true, "Receive a sourceended event");
+      is(ms.readyState, "ended", "MediaSource must be in ended state after sourceended");
+    });
+    v.addEventListener("ended", function () {
+      is(v.duration, 4, "Video has correct duration");
+      v.parentNode.removeChild(v);
+      SimpleTest.finish();
+    });
+  });
+});
+
+function fetch(src, cb) {
+  var xhr = new XMLHttpRequest();
+  xhr.open("GET", src, true);
+  xhr.responseType = "blob";
+  xhr.addEventListener("load", function (e) {
+    if (xhr.status != 200) {
+      return false;
+    }
+    cb(xhr.response);
+  });
+  xhr.send();
+};
+</script>
+</pre>
+</body>
+</html>