Bug 1423241 - Refactor DecodedStream. r=jya
authorAndreas Pehrson <apehrson@mozilla.com>
Fri, 23 Nov 2018 15:02:03 +0000
changeset 507053 243a33803d16d50cfcc8cf0a5c4739a5ed08dc55
parent 507052 12f91d8a1f694cb3186dd32624faee8242812c87
child 507054 897c956c3cf3eb7ca15e246169c0863f9440b4b0
push id1905
push userffxbld-merge
push dateMon, 21 Jan 2019 12:33:13 +0000
treeherdermozilla-release@c2fca1944d8c [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjya
bugs1423241, 1172394
milestone65.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1423241 - Refactor DecodedStream. r=jya This removes DecodedStream's use of MediaStreamListener in favor of MediaStreamTrackListener. This change has however rippled through to a lot more cleanup, per below. This moves the MediaStreamTrack lifetime ownership for captured HTMLMediaElements from the media element to DecodedStream, where the MediaStreamGraph-side tracks are already created and ended today. This makes MediaStreamTrack creation explicit across the entire codebase and lets us remove the MediaStreamTrackSourceGetter class and the infrastructure of adding MediaStreamTracks after they've already been created in the graph from DOMMediaStream. With track ownership, and thus TrackID allocation ownership, happening exclusively in DecodedStream for its output tracks, we also stop throwing away and recreating the SourceMediaStream to which we feed data on seek. This is one step closer to fixing bug 1172394 and spec compliance of HTMLMediaElement.captureStream(). Differential Revision: https://phabricator.services.mozilla.com/D12273
dom/html/HTMLMediaElement.cpp
dom/html/HTMLMediaElement.h
dom/media/CanvasCaptureMediaStream.cpp
dom/media/DOMMediaStream.cpp
dom/media/DOMMediaStream.h
dom/media/MediaDecoder.cpp
dom/media/MediaDecoder.h
dom/media/MediaDecoderOwner.h
dom/media/MediaDecoderStateMachine.cpp
dom/media/MediaDecoderStateMachine.h
dom/media/MediaManager.cpp
dom/media/MediaStreamTrack.cpp
dom/media/mediasink/DecodedStream.cpp
dom/media/mediasink/DecodedStream.h
dom/media/mediasink/OutputStreamManager.cpp
dom/media/mediasink/OutputStreamManager.h
media/webrtc/signaling/gtest/mediapipeline_unittest.cpp
--- a/dom/html/HTMLMediaElement.cpp
+++ b/dom/html/HTMLMediaElement.cpp
@@ -589,149 +589,16 @@ NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(
   HTMLMediaElement::StreamCaptureTrackSource)
 NS_INTERFACE_MAP_END_INHERITING(MediaStreamTrackSource)
 NS_IMPL_CYCLE_COLLECTION_INHERITED(HTMLMediaElement::StreamCaptureTrackSource,
                                    MediaStreamTrackSource,
                                    mElement,
                                    mCapturedTrackSource,
                                    mOwningStream)
 
-class HTMLMediaElement::DecoderCaptureTrackSource
-  : public MediaStreamTrackSource
-  , public DecoderPrincipalChangeObserver
-{
-public:
-  NS_DECL_ISUPPORTS_INHERITED
-  NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(DecoderCaptureTrackSource,
-                                           MediaStreamTrackSource)
-
-  explicit DecoderCaptureTrackSource(HTMLMediaElement* aElement)
-    : MediaStreamTrackSource(
-        nsCOMPtr<nsIPrincipal>(aElement->GetCurrentPrincipal()).get(),
-        nsString())
-    , mElement(aElement)
-  {
-    MOZ_ASSERT(mElement);
-    mElement->AddDecoderPrincipalChangeObserver(this);
-  }
-
-  void Destroy() override
-  {
-    if (mElement) {
-      DebugOnly<bool> res =
-        mElement->RemoveDecoderPrincipalChangeObserver(this);
-      NS_ASSERTION(res,
-                   "Removing decoder principal changed observer failed. "
-                   "Had it already been removed?");
-      mElement = nullptr;
-    }
-  }
-
-  MediaSourceEnum GetMediaSource() const override
-  {
-    return MediaSourceEnum::Other;
-  }
-
-  CORSMode GetCORSMode() const override
-  {
-    if (!mElement) {
-      MOZ_ASSERT(false, "Should always have an element if in use");
-      return CORS_NONE;
-    }
-
-    return mElement->GetCORSMode();
-  }
-
-  void Stop() override
-  {
-    // We don't notify the source that a track was stopped since it will keep
-    // producing tracks until the element ends. The decoder also needs the
-    // tracks it created to be live at the source since the decoder's clock is
-    // based on MediaStreams during capture.
-  }
-
-  void Disable() override {}
-
-  void Enable() override {}
-
-  void NotifyDecoderPrincipalChanged() override
-  {
-    nsCOMPtr<nsIPrincipal> newPrincipal = mElement->GetCurrentPrincipal();
-    if (nsContentUtils::CombineResourcePrincipals(&mPrincipal, newPrincipal)) {
-      PrincipalChanged();
-    }
-  }
-
-protected:
-  virtual ~DecoderCaptureTrackSource() = default;
-
-  RefPtr<HTMLMediaElement> mElement;
-};
-
-NS_IMPL_ADDREF_INHERITED(HTMLMediaElement::DecoderCaptureTrackSource,
-                         MediaStreamTrackSource)
-NS_IMPL_RELEASE_INHERITED(HTMLMediaElement::DecoderCaptureTrackSource,
-                          MediaStreamTrackSource)
-NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(
-  HTMLMediaElement::DecoderCaptureTrackSource)
-NS_INTERFACE_MAP_END_INHERITING(MediaStreamTrackSource)
-NS_IMPL_CYCLE_COLLECTION_INHERITED(HTMLMediaElement::DecoderCaptureTrackSource,
-                                   MediaStreamTrackSource,
-                                   mElement)
-
-class HTMLMediaElement::CaptureStreamTrackSourceGetter
-  : public MediaStreamTrackSourceGetter
-{
-public:
-  NS_DECL_ISUPPORTS_INHERITED
-  NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(CaptureStreamTrackSourceGetter,
-                                           MediaStreamTrackSourceGetter)
-
-  explicit CaptureStreamTrackSourceGetter(HTMLMediaElement* aElement)
-    : MediaStreamTrackSourceGetter(false)
-    , mElement(aElement)
-  {
-  }
-
-  already_AddRefed<dom::MediaStreamTrackSource> GetMediaStreamTrackSource(
-    TrackID aInputTrackID) override
-  {
-    if (mElement && mElement->mSrcStream) {
-      NS_ERROR("Captured media element playing a stream adds tracks explicitly "
-               "on main thread.");
-      return nullptr;
-    }
-
-    // We can return a new source each time here, even for different streams,
-    // since the sources don't keep any internal state and all of them call
-    // through to the same HTMLMediaElement.
-    // If this changes (after implementing Stop()?) we'll have to ensure we
-    // return the same source for all requests to the same TrackID, and only
-    // have one getter.
-    return do_AddRef(new DecoderCaptureTrackSource(mElement));
-  }
-
-protected:
-  virtual ~CaptureStreamTrackSourceGetter() = default;
-
-  RefPtr<HTMLMediaElement> mElement;
-};
-
-NS_IMPL_ADDREF_INHERITED(HTMLMediaElement::CaptureStreamTrackSourceGetter,
-                         MediaStreamTrackSourceGetter)
-NS_IMPL_RELEASE_INHERITED(HTMLMediaElement::CaptureStreamTrackSourceGetter,
-                          MediaStreamTrackSourceGetter)
-NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(
-  HTMLMediaElement::CaptureStreamTrackSourceGetter)
-NS_INTERFACE_MAP_END_INHERITING(MediaStreamTrackSourceGetter)
-NS_IMPL_CYCLE_COLLECTION_INHERITED(
-  HTMLMediaElement::CaptureStreamTrackSourceGetter,
-  MediaStreamTrackSourceGetter,
-  mElement)
-
 /**
  * There is a reference cycle involving this class: MediaLoadListener
  * holds a reference to the HTMLMediaElement, which holds a reference
  * to an nsIChannel, which holds a reference to this listener.
  * We break the reference cycle in OnStartRequest by clearing mElement.
  */
 class HTMLMediaElement::MediaLoadListener final
   : public nsIStreamListener
@@ -1722,18 +1589,16 @@ NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mSrcAttrStream)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mSourcePointer)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mLoadBlockedDoc)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mSourceLoadCandidate)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mAudioChannelWrapper)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mErrorSink->mError)
   for (uint32_t i = 0; i < tmp->mOutputStreams.Length(); ++i) {
     NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mOutputStreams[i].mStream)
-    NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mOutputStreams[i].mTrackSourceGetter)
-    NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mOutputStreams[i].mPreCreatedTracks)
   }
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mPlayed);
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mTextTrackManager)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mAudioTrackList)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mVideoTrackList)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mMediaKeys)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mIncomingMediaKeys)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mSelectedVideoStreamTrack)
@@ -1757,17 +1622,17 @@ NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_IN
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mLoadBlockedDoc)
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mSourceLoadCandidate)
   if (tmp->mAudioChannelWrapper) {
     tmp->mAudioChannelWrapper->Shutdown();
   }
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mAudioChannelWrapper)
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mErrorSink->mError)
   for (OutputMediaStream& s : tmp->mOutputStreams) {
-    s.mTrackSourceGetter->FinishOnNextInactive(s.mStream);
+    s.mStream->SetFinishedOnInactive(true);
   }
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mOutputStreams)
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mPlayed)
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mTextTrackManager)
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mAudioTrackList)
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mVideoTrackList)
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mMediaKeys)
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mIncomingMediaKeys)
@@ -1974,23 +1839,19 @@ HTMLMediaElement::ShutdownDecoder()
 {
   RemoveMediaElementFromURITable();
   NS_ASSERTION(mDecoder, "Must have decoder to shut down");
 
   mWaitingForKeyListener.DisconnectIfExists();
   if (mMediaSource) {
     mMediaSource->CompletePendingTransactions();
   }
-  for (OutputMediaStream& out : mOutputStreams) {
-    if (!out.mCapturingDecoder) {
-      continue;
-    }
-    out.mNextAvailableTrackID = std::max<TrackID>(
-      mDecoder->NextAvailableTrackIDFor(out.mStream->GetInputStream()),
-      out.mNextAvailableTrackID);
+  if (!mOutputStreams.IsEmpty()) {
+    mNextAvailableMediaDecoderOutputTrackID =
+      mDecoder->GetNextOutputStreamTrackID();
   }
   mDecoder->Shutdown();
   DDUNLINKCHILD(mDecoder.get());
   mDecoder = nullptr;
   ReportAudioTrackSilenceProportionTelemetry();
 }
 
 void
@@ -3532,21 +3393,19 @@ HTMLMediaElement::CaptureStreamInternal(
 
   // We don't support routing to a different graph.
   if (!mOutputStreams.IsEmpty() &&
       aGraph != mOutputStreams[0].mStream->GetInputStream()->Graph()) {
     return nullptr;
   }
 
   OutputMediaStream* out = mOutputStreams.AppendElement();
-  out->mTrackSourceGetter =
-    new CaptureStreamTrackSourceGetter(this);
   nsPIDOMWindowInner* window = OwnerDoc()->GetInnerWindow();
-  out->mStream = DOMMediaStream::CreateTrackUnionStreamAsInput(
-    window, aGraph, out->mTrackSourceGetter);
+  out->mStream = DOMMediaStream::CreateTrackUnionStreamAsInput(window, aGraph);
+  out->mStream->SetFinishedOnInactive(false);
   out->mFinishWhenEnded =
     aFinishBehavior == StreamCaptureBehavior::FINISH_WHEN_ENDED;
   out->mCapturingAudioOnly =
     aStreamCaptureType == StreamCaptureType::CAPTURE_AUDIO;
 
   if (aStreamCaptureType == StreamCaptureType::CAPTURE_AUDIO) {
     if (mSrcStream) {
       // We don't support applying volume and mute to the captured stream, when
@@ -3563,55 +3422,27 @@ HTMLMediaElement::CaptureStreamInternal(
     // mAudioCaptured tells the user that the audio played by this media element
     // is being routed to the captureStreams *instead* of being played to
     // speakers.
     mAudioCaptured = true;
   }
 
   if (mDecoder) {
     out->mCapturingDecoder = true;
-    mDecoder->AddOutputStream(
-      out->mStream->GetInputStream()->AsProcessedStream(),
-      out->mNextAvailableTrackID,
-      aFinishBehavior == StreamCaptureBehavior::FINISH_WHEN_ENDED);
+    mDecoder->AddOutputStream(out->mStream);
   } else if (mSrcStream) {
     out->mCapturingMediaStream = true;
   }
 
   if (mReadyState == HAVE_NOTHING) {
     // Do not expose the tracks until we have metadata.
     RefPtr<DOMMediaStream> result = out->mStream;
     return result.forget();
   }
 
-  if (mDecoder) {
-    if (HasAudio()) {
-      TrackID audioTrackId = out->mNextAvailableTrackID++;
-      RefPtr<MediaStreamTrackSource> trackSource =
-        out->mTrackSourceGetter->GetMediaStreamTrackSource(audioTrackId);
-      RefPtr<MediaStreamTrack> track = out->mStream->CreateDOMTrack(
-        audioTrackId, MediaSegment::AUDIO, trackSource);
-      out->mPreCreatedTracks.AppendElement(track);
-      out->mStream->AddTrackInternal(track);
-      LOG(LogLevel::Debug,
-          ("Created audio track %d for captured decoder", audioTrackId));
-    }
-    if (IsVideo() && HasVideo() && !out->mCapturingAudioOnly) {
-      TrackID videoTrackId = out->mNextAvailableTrackID++;
-      RefPtr<MediaStreamTrackSource> trackSource =
-        out->mTrackSourceGetter->GetMediaStreamTrackSource(videoTrackId);
-      RefPtr<MediaStreamTrack> track = out->mStream->CreateDOMTrack(
-        videoTrackId, MediaSegment::VIDEO, trackSource);
-      out->mPreCreatedTracks.AppendElement(track);
-      out->mStream->AddTrackInternal(track);
-      LOG(LogLevel::Debug,
-          ("Created video track %d for captured decoder", videoTrackId));
-    }
-  }
-
   if (mSrcStream) {
     MOZ_DIAGNOSTIC_ASSERT(AudioTracks(), "Element can't have been unlinked");
     for (size_t i = 0; i < AudioTracks()->Length(); ++i) {
       AudioTrack* t = (*AudioTracks())[i];
       if (t->Enabled()) {
         AddCaptureMediaTrackToOutputStream(t, *out, false);
       }
     }
@@ -5127,26 +4958,32 @@ HTMLMediaElement::FinishDecoderSetup(Med
       [](const GenericPromise::ResolveOrRejectValue& aValue) {
         MOZ_ASSERT(aValue.IsResolve() && !aValue.ResolveValue());
       });
 #else
     ;
 #endif
   }
 
+  // Set CORSMode now before any streams are added. It won't change over time.
+  mDecoder->SetOutputStreamCORSMode(mCORSMode);
+
+  if (!mOutputStreams.IsEmpty()) {
+    mDecoder->SetNextOutputStreamTrackID(
+      mNextAvailableMediaDecoderOutputTrackID);
+  }
+
   for (OutputMediaStream& ms : mOutputStreams) {
     if (ms.mCapturingMediaStream) {
       MOZ_ASSERT(!ms.mCapturingDecoder);
       continue;
     }
 
     ms.mCapturingDecoder = true;
-    aDecoder->AddOutputStream(ms.mStream->GetInputStream()->AsProcessedStream(),
-                              ms.mNextAvailableTrackID,
-                              ms.mFinishWhenEnded);
+    aDecoder->AddOutputStream(ms.mStream);
   }
 
   if (mMediaKeys) {
     if (mMediaKeys->GetCDMProxy()) {
       mDecoder->SetCDMProxy(mMediaKeys->GetCDMProxy());
     } else {
       // CDM must have crashed.
       ShutdownDecoder();
@@ -5725,20 +5562,22 @@ HTMLMediaElement::PlaybackEnded()
 
   NS_ASSERTION(!mDecoder || mDecoder->IsEnded(),
                "Decoder fired ended, but not in ended state");
 
   // Discard all output streams that have finished now.
   for (int32_t i = mOutputStreams.Length() - 1; i >= 0; --i) {
     if (mOutputStreams[i].mFinishWhenEnded) {
       LOG(LogLevel::Debug,
-          ("Playback ended. Removing output stream %p",
+          ("Playback ended. Letting output stream %p go inactive",
            mOutputStreams[i].mStream.get()));
-      mOutputStreams[i].mTrackSourceGetter->FinishOnNextInactive(
-        mOutputStreams[i].mStream);
+      mOutputStreams[i].mStream->SetFinishedOnInactive(true);
+      if (mOutputStreams[i].mCapturingDecoder) {
+        mDecoder->RemoveOutputStream(mOutputStreams[i].mStream);
+      }
       mOutputStreams.RemoveElementAt(i);
     }
   }
 
   if (mSrcStream) {
     LOG(LogLevel::Debug,
         ("%p, got duration by reaching the end of the resource", this));
     mSrcStreamPlaybackEnded = true;
@@ -6592,35 +6431,16 @@ HTMLMediaElement::GetCurrentVideoPrincip
 }
 
 void
 HTMLMediaElement::NotifyDecoderPrincipalChanged()
 {
   RefPtr<nsIPrincipal> principal = GetCurrentPrincipal();
 
   mDecoder->UpdateSameOriginStatus(!principal || IsCORSSameOrigin());
-
-  for (DecoderPrincipalChangeObserver* observer :
-       mDecoderPrincipalChangeObservers) {
-    observer->NotifyDecoderPrincipalChanged();
-  }
-}
-
-void
-HTMLMediaElement::AddDecoderPrincipalChangeObserver(
-  DecoderPrincipalChangeObserver* aObserver)
-{
-  mDecoderPrincipalChangeObservers.AppendElement(aObserver);
-}
-
-bool
-HTMLMediaElement::RemoveDecoderPrincipalChangeObserver(
-  DecoderPrincipalChangeObserver* aObserver)
-{
-  return mDecoderPrincipalChangeObservers.RemoveElement(aObserver);
 }
 
 void
 HTMLMediaElement::Invalidate(bool aImageSizeChanged,
                              Maybe<nsIntSize>& aNewIntrinsicSize,
                              bool aForceInvalidate)
 {
   nsIFrame* frame = GetPrimaryFrame();
@@ -7789,23 +7609,21 @@ HTMLMediaElement::AudioCaptureStreamChan
   } else if (!aCapture && mCaptureStreamPort) {
     if (mDecoder) {
       ProcessedMediaStream* ps =
         mCaptureStreamPort->GetSource()->AsProcessedStream();
       MOZ_ASSERT(ps);
 
       for (uint32_t i = 0; i < mOutputStreams.Length(); i++) {
         if (mOutputStreams[i].mStream->GetPlaybackStream() == ps) {
-          mOutputStreams[i].mTrackSourceGetter->FinishOnNextInactive(
-            mOutputStreams[i].mStream);
+          mDecoder->RemoveOutputStream(mOutputStreams[i].mStream);
           mOutputStreams.RemoveElementAt(i);
           break;
         }
       }
-      mDecoder->RemoveOutputStream(ps);
     }
     mCaptureStreamPort->Destroy();
     mCaptureStreamPort = nullptr;
   }
 }
 
 void
 HTMLMediaElement::NotifyCueDisplayStatesChanged()
@@ -8120,31 +7938,16 @@ HTMLMediaElement::RemoveMediaTracks()
     mAudioTrackList->RemoveTracks();
   }
 
   if (mVideoTrackList) {
     mVideoTrackList->RemoveTracks();
   }
 
   mMediaTracksConstructed = false;
-
-  for (OutputMediaStream& ms : mOutputStreams) {
-    if (!ms.mCapturingDecoder) {
-      continue;
-    }
-    for (RefPtr<MediaStreamTrack>& t : ms.mPreCreatedTracks) {
-      if (t->Ended()) {
-        continue;
-      }
-      mAbstractMainThread->Dispatch(NewRunnableMethod(
-        "dom::HTMLMediaElement::RemoveMediaTracks",
-        t, &MediaStreamTrack::OverrideEnded));
-    }
-    ms.mPreCreatedTracks.Clear();
-  }
 }
 
 class MediaElementGMPCrashHelper : public GMPCrashHelper
 {
 public:
   explicit MediaElementGMPCrashHelper(HTMLMediaElement* aElement)
     : mElement(aElement)
   {
--- a/dom/html/HTMLMediaElement.h
+++ b/dom/html/HTMLMediaElement.h
@@ -295,42 +295,17 @@ public:
   // the image container be empty with no live video tracks, we return nullptr.
   already_AddRefed<nsIPrincipal> GetCurrentVideoPrincipal();
 
   // called to notify that the principal of the decoder's media resource has changed.
   void NotifyDecoderPrincipalChanged() final;
 
   void GetEMEInfo(nsString& aEMEInfo);
 
-  // An interface for observing principal changes on the media elements
-  // MediaDecoder. This will also be notified if the active CORSMode changes.
-  class DecoderPrincipalChangeObserver
-  {
-  public:
-    virtual void NotifyDecoderPrincipalChanged() = 0;
-  };
-
-  /**
-   * Add a DecoderPrincipalChangeObserver to this media element.
-   *
-   * Ownership of the DecoderPrincipalChangeObserver remains with the caller,
-   * and it's the caller's responsibility to remove the observer before it dies.
-   */
-  void AddDecoderPrincipalChangeObserver(DecoderPrincipalChangeObserver* aObserver);
-
-  /**
-   * Remove an added DecoderPrincipalChangeObserver from this media element.
-   *
-   * Returns true if it was successfully removed.
-   */
-  bool RemoveDecoderPrincipalChangeObserver(DecoderPrincipalChangeObserver* aObserver);
-
   class StreamCaptureTrackSource;
-  class DecoderCaptureTrackSource;
-  class CaptureStreamTrackSourceGetter;
 
   // Update the visual size of the media. Called from the decoder on the
   // main thread when/if the size changes.
   void UpdateMediaSize(const nsIntSize& aSize);
   // Like UpdateMediaSize, but only updates the size if no size has yet
   // been set.
   void UpdateInitialMediaSize(const nsIntSize& aSize);
 
@@ -863,23 +838,16 @@ protected:
 
     RefPtr<DOMMediaStream> mStream;
     TrackID mNextAvailableTrackID;
     bool mFinishWhenEnded;
     bool mCapturingAudioOnly;
     bool mCapturingDecoder;
     bool mCapturingMediaStream;
 
-    RefPtr<CaptureStreamTrackSourceGetter> mTrackSourceGetter;
-
-    // The following members are keeping state for a captured MediaDecoder.
-    // Tracks that were created on main thread before MediaDecoder fed them
-    // to the MediaStreamGraph.
-    nsTArray<RefPtr<MediaStreamTrack>> mPreCreatedTracks;
-
     // The following members are keeping state for a captured MediaStream.
     nsTArray<Pair<nsString, RefPtr<MediaInputPort>>> mTrackPorts;
   };
 
   void PlayInternal(bool aHandlingUserInput);
 
   /** Use this method to change the mReadyState member, so required
    * events can be fired.
@@ -1407,20 +1375,16 @@ protected:
 
   // The DocGroup-specific nsISerialEventTarget of this HTML element on the main
   // thread.
   nsCOMPtr<nsISerialEventTarget> mMainThreadEventTarget;
 
   // The DocGroup-specific AbstractThread::MainThread() of this HTML element.
   RefPtr<AbstractThread> mAbstractMainThread;
 
-  // Observers listening to changes to the mDecoder principal.
-  // Used by streams captured from this element.
-  nsTArray<DecoderPrincipalChangeObserver*> mDecoderPrincipalChangeObservers;
-
   // A reference to the VideoFrameContainer which contains the current frame
   // of video to display.
   RefPtr<VideoFrameContainer> mVideoFrameContainer;
 
   // Holds a reference to the DOM wrapper for the MediaStream that has been
   // set in the src attribute.
   RefPtr<DOMMediaStream> mSrcAttrStream;
 
@@ -1430,18 +1394,18 @@ protected:
   // Holds a reference to the DOM wrapper for the MediaStream that we're
   // actually playing.
   // At most one of mDecoder and mSrcStream can be non-null.
   RefPtr<DOMMediaStream> mSrcStream;
 
   // True once mSrcStream's initial set of tracks are known.
   bool mSrcStreamTracksAvailable = false;
 
-  // If different from GRAPH_TIME_MAX, the time we should return for
-  // currentTime while playing mSrcStream.
+  // While mPaused is true and mSrcStream is set, this is the value to use for
+  // CurrentTime(). Otherwise this is set to GRAPH_TIME_MAX.
   GraphTime mSrcStreamPausedGraphTime = GRAPH_TIME_MAX;
 
   // The offset in GraphTime that this media element started playing the
   // playback stream of mSrcStream.
   GraphTime mSrcStreamGraphTimeOffset = 0;
 
   // True once PlaybackEnded() is called and we're playing a MediaStream.
   // Reset to false if we start playing mSrcStream again.
@@ -1449,16 +1413,19 @@ protected:
 
   // Holds a reference to the stream connecting this stream to the capture sink.
   RefPtr<MediaInputPort> mCaptureStreamPort;
 
   // Holds references to the DOM wrappers for the MediaStreams that we're
   // writing to.
   nsTArray<OutputMediaStream> mOutputStreams;
 
+  // The next track id to use for a captured MediaDecoder.
+  TrackID mNextAvailableMediaDecoderOutputTrackID = 1;
+
   // Holds a reference to the size-getting track listener attached to
   // mSelectedVideoStreamTrack.
   RefPtr<VideoFrameListener> mVideoFrameListener;
   // The currently selected video stream track.
   RefPtr<VideoStreamTrack> mSelectedVideoStreamTrack;
 
   const RefPtr<ShutdownObserver> mShutdownObserver;
 
--- a/dom/media/CanvasCaptureMediaStream.cpp
+++ b/dom/media/CanvasCaptureMediaStream.cpp
@@ -220,19 +220,17 @@ NS_IMPL_CYCLE_COLLECTION_INHERITED(Canva
 NS_IMPL_ADDREF_INHERITED(CanvasCaptureMediaStream, DOMMediaStream)
 NS_IMPL_RELEASE_INHERITED(CanvasCaptureMediaStream, DOMMediaStream)
 
 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(CanvasCaptureMediaStream)
 NS_INTERFACE_MAP_END_INHERITING(DOMMediaStream)
 
 CanvasCaptureMediaStream::CanvasCaptureMediaStream(nsPIDOMWindowInner* aWindow,
                                                    HTMLCanvasElement* aCanvas)
-    : DOMMediaStream(aWindow, nullptr),
-      mCanvas(aCanvas),
-      mOutputStreamDriver(nullptr) {}
+    : DOMMediaStream(aWindow), mCanvas(aCanvas), mOutputStreamDriver(nullptr) {}
 
 CanvasCaptureMediaStream::~CanvasCaptureMediaStream() {
   if (mOutputStreamDriver) {
     mOutputStreamDriver->Forget();
   }
 }
 
 JSObject* CanvasCaptureMediaStream::WrapObject(
--- a/dom/media/DOMMediaStream.cpp
+++ b/dom/media/DOMMediaStream.cpp
@@ -52,28 +52,16 @@ static bool ContainsLiveTracks(
     if (port->GetTrack()->ReadyState() == MediaStreamTrackState::Live) {
       return true;
     }
   }
 
   return false;
 }
 
-void MediaStreamTrackSourceGetter::FinishOnNextInactive(
-    RefPtr<DOMMediaStream>& aStream) {
-  if (mFinishedOnInactive) {
-    return;
-  }
-
-  mFinishedOnInactive = true;
-
-  // We notify now with a dummy track in case there are no live tracks.
-  aStream->NotifyTrackRemoved(nullptr);
-}
-
 DOMMediaStream::TrackPort::TrackPort(MediaInputPort* aInputPort,
                                      MediaStreamTrack* aTrack,
                                      const InputPortOwnership aOwnership)
     : mInputPort(aInputPort), mTrack(aTrack), mOwnership(aOwnership) {
   MOZ_ASSERT(mInputPort);
   MOZ_ASSERT(mTrack);
 
   MOZ_COUNT_CTOR(TrackPort);
@@ -111,23 +99,16 @@ already_AddRefed<Pledge<bool>> DOMMediaS
   rejected->Reject(NS_ERROR_FAILURE);
   return rejected.forget();
 }
 
 NS_IMPL_CYCLE_COLLECTION(DOMMediaStream::TrackPort, mTrack)
 NS_IMPL_CYCLE_COLLECTION_ROOT_NATIVE(DOMMediaStream::TrackPort, AddRef)
 NS_IMPL_CYCLE_COLLECTION_UNROOT_NATIVE(DOMMediaStream::TrackPort, Release)
 
-NS_IMPL_CYCLE_COLLECTING_ADDREF(MediaStreamTrackSourceGetter)
-NS_IMPL_CYCLE_COLLECTING_RELEASE(MediaStreamTrackSourceGetter)
-NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(MediaStreamTrackSourceGetter)
-  NS_INTERFACE_MAP_ENTRY(nsISupports)
-NS_INTERFACE_MAP_END
-NS_IMPL_CYCLE_COLLECTION_0(MediaStreamTrackSourceGetter)
-
 /**
  * Listener registered on the Owned stream to detect added and ended owned
  * tracks for keeping the list of MediaStreamTracks in sync with the tracks
  * added and ended directly at the source.
  */
 class DOMMediaStream::OwnedStreamListener : public MediaStreamListener {
  public:
   explicit OwnedStreamListener(DOMMediaStream* aStream) : mStream(aStream) {}
@@ -148,42 +129,23 @@ class DOMMediaStream::OwnedStreamListene
 
     if (track) {
       LOG(LogLevel::Debug, ("DOMMediaStream %p Track %d from owned stream %p "
                             "bound to MediaStreamTrack %p.",
                             mStream, aTrackID, aInputStream, track));
       return;
     }
 
-    // Track had not been created on main thread before, create it now.
-    NS_WARNING_ASSERTION(
-        !mStream->mTracks.IsEmpty(),
+    // Track must exist on main thread before it's added to the graph.
+    MOZ_RELEASE_ASSERT(
+        false,
         "A new track was detected on the input stream; creating a "
         "corresponding "
         "MediaStreamTrack. Initial tracks should be added manually to "
         "immediately and synchronously be available to JS.");
-    RefPtr<MediaStreamTrackSource> source;
-    if (mStream->mTrackSourceGetter) {
-      source = mStream->mTrackSourceGetter->GetMediaStreamTrackSource(aTrackID);
-    }
-    if (!source) {
-      NS_ASSERTION(false,
-                   "Dynamic track created without an explicit TrackSource");
-      nsPIDOMWindowInner* window = mStream->GetParentObject();
-      nsIDocument* doc = window ? window->GetExtantDoc() : nullptr;
-      nsIPrincipal* principal = doc ? doc->NodePrincipal() : nullptr;
-      source = new BasicTrackSource(principal);
-    }
-
-    RefPtr<MediaStreamTrack> newTrack =
-        mStream->CreateDOMTrack(aTrackID, aType, source);
-    aGraph->AbstractMainThread()->Dispatch(
-        NewRunnableMethod<RefPtr<MediaStreamTrack>>(
-            "DOMMediaStream::AddTrackInternal", mStream,
-            &DOMMediaStream::AddTrackInternal, newTrack));
   }
 
   void DoNotifyTrackEnded(MediaStreamGraph* aGraph, MediaStream* aInputStream,
                           TrackID aInputTrackID, TrackID aTrackID) {
     MOZ_ASSERT(NS_IsMainThread());
 
     if (!mStream) {
       return;
@@ -312,29 +274,27 @@ NS_IMPL_CYCLE_COLLECTION_CLASS(DOMMediaS
 
 NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_INHERITED(DOMMediaStream,
                                                 DOMEventTargetHelper)
   tmp->Destroy();
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mWindow)
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mOwnedTracks)
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mTracks)
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mConsumersToKeepAlive)
-  NS_IMPL_CYCLE_COLLECTION_UNLINK(mTrackSourceGetter)
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mPlaybackTrackListener)
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mPrincipal)
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mVideoPrincipal)
 NS_IMPL_CYCLE_COLLECTION_UNLINK_END
 
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(DOMMediaStream,
                                                   DOMEventTargetHelper)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mWindow)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mOwnedTracks)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mTracks)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mConsumersToKeepAlive)
-  NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mTrackSourceGetter)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mPlaybackTrackListener)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mPrincipal)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mVideoPrincipal)
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
 
 NS_IMPL_ADDREF_INHERITED(DOMMediaStream, DOMEventTargetHelper)
 NS_IMPL_RELEASE_INHERITED(DOMMediaStream, DOMEventTargetHelper)
 
@@ -346,28 +306,27 @@ NS_IMPL_CYCLE_COLLECTION_INHERITED(DOMAu
                                    mStreamNode)
 
 NS_IMPL_ADDREF_INHERITED(DOMAudioNodeMediaStream, DOMMediaStream)
 NS_IMPL_RELEASE_INHERITED(DOMAudioNodeMediaStream, DOMMediaStream)
 
 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(DOMAudioNodeMediaStream)
 NS_INTERFACE_MAP_END_INHERITING(DOMMediaStream)
 
-DOMMediaStream::DOMMediaStream(nsPIDOMWindowInner* aWindow,
-                               MediaStreamTrackSourceGetter* aTrackSourceGetter)
+DOMMediaStream::DOMMediaStream(nsPIDOMWindowInner* aWindow)
     : mWindow(aWindow),
       mInputStream(nullptr),
       mOwnedStream(nullptr),
       mPlaybackStream(nullptr),
       mTracksPendingRemoval(0),
-      mTrackSourceGetter(aTrackSourceGetter),
       mPlaybackTrackListener(MakeAndAddRef<PlaybackTrackListener>(this)),
       mTracksCreated(false),
       mNotifiedOfMediaStreamGraphShutdown(false),
       mActive(false),
+      mFinishedOnInactive(true),
       mCORSMode(CORS_NONE) {
   nsresult rv;
   nsCOMPtr<nsIUUIDGenerator> uuidgen =
       do_GetService("@mozilla.org/uuid-generator;1", &rv);
 
   if (NS_SUCCEEDED(rv) && uuidgen) {
     nsID uuid;
     memset(&uuid, 0, sizeof(uuid));
@@ -470,19 +429,17 @@ JSObject* DOMMediaStream::WrapObject(JSC
     ErrorResult& aRv) {
   nsCOMPtr<nsPIDOMWindowInner> ownerWindow =
       do_QueryInterface(aGlobal.GetAsSupports());
   if (!ownerWindow) {
     aRv.Throw(NS_ERROR_FAILURE);
     return nullptr;
   }
 
-  // Streams created from JS cannot have dynamically created tracks.
-  MediaStreamTrackSourceGetter* getter = nullptr;
-  RefPtr<DOMMediaStream> newStream = new DOMMediaStream(ownerWindow, getter);
+  auto newStream = MakeRefPtr<DOMMediaStream>(ownerWindow);
 
   for (MediaStreamTrack& track : aTracks) {
     if (!newStream->GetPlaybackStream()) {
       MOZ_RELEASE_ASSERT(track.Graph());
       newStream->InitPlaybackStreamCommon(track.Graph());
     }
     newStream->AddTrack(track);
   }
@@ -677,61 +634,21 @@ void DOMMediaStream::RemoveTrack(MediaSt
   if (!aTrack.Ended()) {
     BlockPlaybackTrack(toRemove);
     NotifyTrackRemoved(&aTrack);
   }
 
   LOG(LogLevel::Debug, ("DOMMediaStream %p Removed track %p", this, &aTrack));
 }
 
-class ClonedStreamSourceGetter : public MediaStreamTrackSourceGetter {
- public:
-  NS_DECL_ISUPPORTS_INHERITED
-  NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(ClonedStreamSourceGetter,
-                                           MediaStreamTrackSourceGetter)
-
-  explicit ClonedStreamSourceGetter(DOMMediaStream* aStream)
-      : mStream(aStream) {}
-
-  already_AddRefed<MediaStreamTrackSource> GetMediaStreamTrackSource(
-      TrackID aInputTrackID) override {
-    MediaStreamTrack* sourceTrack =
-        mStream->FindOwnedDOMTrack(mStream->GetOwnedStream(), aInputTrackID);
-    MOZ_RELEASE_ASSERT(sourceTrack);
-
-    return do_AddRef(&sourceTrack->GetSource());
-  }
-
- protected:
-  virtual ~ClonedStreamSourceGetter() {}
-
-  RefPtr<DOMMediaStream> mStream;
-};
-
-NS_IMPL_ADDREF_INHERITED(ClonedStreamSourceGetter, MediaStreamTrackSourceGetter)
-NS_IMPL_RELEASE_INHERITED(ClonedStreamSourceGetter,
-                          MediaStreamTrackSourceGetter)
-NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(ClonedStreamSourceGetter)
-NS_INTERFACE_MAP_END_INHERITING(MediaStreamTrackSourceGetter)
-NS_IMPL_CYCLE_COLLECTION_INHERITED(ClonedStreamSourceGetter,
-                                   MediaStreamTrackSourceGetter, mStream)
-
 already_AddRefed<DOMMediaStream> DOMMediaStream::Clone() {
-  return CloneInternal(TrackForwardingOption::CURRENT);
-}
-
-already_AddRefed<DOMMediaStream> DOMMediaStream::CloneInternal(
-    TrackForwardingOption aForwarding) {
-  RefPtr<DOMMediaStream> newStream =
-      new DOMMediaStream(GetParentObject(), new ClonedStreamSourceGetter(this));
+  auto newStream = MakeRefPtr<DOMMediaStream>(GetParentObject());
 
   LOG(LogLevel::Info,
-      ("DOMMediaStream %p created clone %p, forwarding %s tracks", this,
-       newStream.get(),
-       aForwarding == TrackForwardingOption::ALL ? "all" : "current"));
+      ("DOMMediaStream %p created clone %p", this, newStream.get()));
 
   MOZ_RELEASE_ASSERT(mPlaybackStream);
   MOZ_RELEASE_ASSERT(mPlaybackStream->Graph());
   MediaStreamGraph* graph = mPlaybackStream->Graph();
 
   // We initiate the owned and playback streams first, since we need to create
   // all existing DOM tracks before we add the generic input port from
   // mInputStream to mOwnedStream (see AllocateInputPort wrt. destination
@@ -746,37 +663,16 @@ already_AddRefed<DOMMediaStream> DOMMedi
 
     LOG(LogLevel::Debug,
         ("DOMMediaStream %p forwarding external track %p to clone %p", this,
          &track, newStream.get()));
     RefPtr<MediaStreamTrack> trackClone =
         newStream->CloneDOMTrack(track, allocatedTrackID++);
   }
 
-  if (aForwarding == TrackForwardingOption::ALL) {
-    // Set up an input port from our input stream to the new DOM stream's owned
-    // stream, to allow for dynamically added tracks at the source to appear in
-    // the clone. The clone may treat mInputStream as its own mInputStream but
-    // ownership remains with us.
-    newStream->mInputStream = mInputStream;
-    if (mInputStream) {
-      // We have already set up track-locked input ports for all existing DOM
-      // tracks, so now we need to block those in the generic input port to
-      // avoid ending up with double instances of them.
-      nsTArray<TrackID> tracksToBlock;
-      for (const RefPtr<TrackPort>& info : mOwnedTracks) {
-        tracksToBlock.AppendElement(info->GetTrack()->mTrackID);
-      }
-
-      newStream->mInputStream->RegisterUser();
-      newStream->mOwnedPort = newStream->mOwnedStream->AllocateInputPort(
-          mInputStream, TRACK_ANY, TRACK_ANY, 0, 0, &tracksToBlock);
-    }
-  }
-
   return newStream.forget();
 }
 
 bool DOMMediaStream::Active() const { return mActive; }
 
 MediaStreamTrack* DOMMediaStream::GetTrackById(const nsAString& aId) const {
   for (const RefPtr<TrackPort>& info : mTracks) {
     nsString id;
@@ -894,40 +790,34 @@ void DOMMediaStream::InitPlaybackStreamC
   mPlaybackStream->AddListener(mPlaybackListener);
 
   LOG(LogLevel::Debug, ("DOMMediaStream %p Initiated with mInputStream=%p, "
                         "mOwnedStream=%p, mPlaybackStream=%p",
                         this, mInputStream, mOwnedStream, mPlaybackStream));
 }
 
 already_AddRefed<DOMMediaStream> DOMMediaStream::CreateSourceStreamAsInput(
-    nsPIDOMWindowInner* aWindow, MediaStreamGraph* aGraph,
-    MediaStreamTrackSourceGetter* aTrackSourceGetter) {
-  RefPtr<DOMMediaStream> stream =
-      new DOMMediaStream(aWindow, aTrackSourceGetter);
+    nsPIDOMWindowInner* aWindow, MediaStreamGraph* aGraph) {
+  auto stream = MakeRefPtr<DOMMediaStream>(aWindow);
   stream->InitSourceStream(aGraph);
   return stream.forget();
 }
 
 already_AddRefed<DOMMediaStream> DOMMediaStream::CreateTrackUnionStreamAsInput(
-    nsPIDOMWindowInner* aWindow, MediaStreamGraph* aGraph,
-    MediaStreamTrackSourceGetter* aTrackSourceGetter) {
-  RefPtr<DOMMediaStream> stream =
-      new DOMMediaStream(aWindow, aTrackSourceGetter);
+    nsPIDOMWindowInner* aWindow, MediaStreamGraph* aGraph) {
+  auto stream = MakeRefPtr<DOMMediaStream>(aWindow);
   stream->InitTrackUnionStream(aGraph);
   return stream.forget();
 }
 
 already_AddRefed<DOMMediaStream>
 DOMMediaStream::CreateAudioCaptureStreamAsInput(nsPIDOMWindowInner* aWindow,
                                                 nsIPrincipal* aPrincipal,
                                                 MediaStreamGraph* aGraph) {
-  // Audio capture doesn't create tracks dynamically
-  MediaStreamTrackSourceGetter* getter = nullptr;
-  RefPtr<DOMMediaStream> stream = new DOMMediaStream(aWindow, getter);
+  auto stream = MakeRefPtr<DOMMediaStream>(aWindow);
   stream->InitAudioCaptureStream(aPrincipal, aGraph);
   return stream.forget();
 }
 
 void DOMMediaStream::PrincipalChanged(MediaStreamTrack* aTrack) {
   MOZ_ASSERT(aTrack);
   NS_ASSERTION(HasTrack(*aTrack), "Principal changed for an unknown track");
   LOG(LogLevel::Info,
@@ -1218,16 +1108,30 @@ void DOMMediaStream::RegisterTrackListen
   mTrackListeners.AppendElement(aListener);
 }
 
 void DOMMediaStream::UnregisterTrackListener(TrackListener* aListener) {
   MOZ_ASSERT(NS_IsMainThread());
   mTrackListeners.RemoveElement(aListener);
 }
 
+void DOMMediaStream::SetFinishedOnInactive(bool aFinishedOnInactive) {
+  MOZ_ASSERT(NS_IsMainThread());
+
+  if (mFinishedOnInactive == aFinishedOnInactive) {
+    return;
+  }
+
+  mFinishedOnInactive = aFinishedOnInactive;
+
+  if (mFinishedOnInactive && !ContainsLiveTracks(mTracks)) {
+    NotifyTrackRemoved(nullptr);
+  }
+}
+
 void DOMMediaStream::NotifyTrackAdded(const RefPtr<MediaStreamTrack>& aTrack) {
   MOZ_ASSERT(NS_IsMainThread());
 
   if (mTracksPendingRemoval > 0) {
     // If there are tracks pending removal we may not degrade the current
     // principals until those tracks have been confirmed removed from the
     // playback stream. Instead combine with the new track and the (potentially)
     // degraded principal will be calculated when it's safe.
@@ -1289,19 +1193,17 @@ void DOMMediaStream::NotifyTrackRemoved(
     // BlockPlaybackTrack().
 
     if (!mActive) {
       NS_ASSERTION(false, "Shouldn't remove a live track if already inactive");
       return;
     }
   }
 
-  if (mTrackSourceGetter && !mTrackSourceGetter->FinishedOnInactive()) {
-    // For compatibility with mozCaptureStream we in some cases do not go
-    // inactive until the track source lets us.
+  if (!mFinishedOnInactive) {
     return;
   }
 
   // Check if we became inactive.
   if (!ContainsLiveTracks(mTracks)) {
     mActive = false;
     NotifyInactive();
   }
@@ -1341,17 +1243,17 @@ void DOMMediaStream::NotifyPlaybackTrack
                           "finish. Recomputing principal.",
                           this));
     RecomputePrincipal();
   }
 }
 
 DOMAudioNodeMediaStream::DOMAudioNodeMediaStream(nsPIDOMWindowInner* aWindow,
                                                  AudioNode* aNode)
-    : DOMMediaStream(aWindow, nullptr), mStreamNode(aNode) {}
+    : DOMMediaStream(aWindow), mStreamNode(aNode) {}
 
 DOMAudioNodeMediaStream::~DOMAudioNodeMediaStream() {}
 
 already_AddRefed<DOMAudioNodeMediaStream>
 DOMAudioNodeMediaStream::CreateTrackUnionStreamAsInput(
     nsPIDOMWindowInner* aWindow, AudioNode* aNode, MediaStreamGraph* aGraph) {
   RefPtr<DOMAudioNodeMediaStream> stream =
       new DOMAudioNodeMediaStream(aWindow, aNode);
--- a/dom/media/DOMMediaStream.h
+++ b/dom/media/DOMMediaStream.h
@@ -63,54 +63,18 @@ class Pledge;
 
 class OnTracksAvailableCallback {
  public:
   virtual ~OnTracksAvailableCallback() {}
   virtual void NotifyTracksAvailable(DOMMediaStream* aStream) = 0;
 };
 
 /**
- * Interface through which a DOMMediaStream can query its producer for a
- * MediaStreamTrackSource. This will be used whenever a track occurs in the
- * DOMMediaStream's owned stream that has not yet been created on the main
- * thread (see DOMMediaStream::CreateOwnDOMTrack).
- */
-class MediaStreamTrackSourceGetter : public nsISupports {
-  NS_DECL_CYCLE_COLLECTING_ISUPPORTS
-  NS_DECL_CYCLE_COLLECTION_CLASS(MediaStreamTrackSourceGetter)
-
- public:
-  explicit MediaStreamTrackSourceGetter(bool aFinishedOnInactive = true)
-      : mFinishedOnInactive(aFinishedOnInactive) {}
-
-  virtual already_AddRefed<dom::MediaStreamTrackSource>
-  GetMediaStreamTrackSource(TrackID aInputTrackID) = 0;
-
-  bool FinishedOnInactive() { return mFinishedOnInactive; }
-
-  /**
-   * Called by the source to signal to aStream that it should go inactive
-   * the next time there are no live tracks. This could be now if there are no
-   * live tracks currently.
-   *
-   * This is a temporary measure to allow HTMLMediaElement::MozCaptureStream
-   * to not end playback prematurely after stream.finished became stream.active.
-   * This will be removed in bug 1302379.
-   */
-  void FinishOnNextInactive(RefPtr<DOMMediaStream>& aStream);
-
- protected:
-  virtual ~MediaStreamTrackSourceGetter() {}
-
- private:
-  bool mFinishedOnInactive;
-};
 
 // clang-format off
-/**
  * DOM wrapper for MediaStreams.
  *
  * To account for track operations such as clone(), addTrack() and
  * removeTrack(), a DOMMediaStream wraps three internal (and chained)
  * MediaStreams:
  *   1. mInputStream
  *      - Controlled by the owner/source of the DOMMediaStream.
  *        It's a stream of the type indicated by
@@ -213,17 +177,16 @@ class MediaStreamTrackSourceGetter : pub
  *                                                     (pointing to t2 in A')
  */
 // clang-format on
 class DOMMediaStream
     : public DOMEventTargetHelper,
       public dom::PrincipalChangeObserver<dom::MediaStreamTrack>,
       public RelativeTimeline {
   friend class dom::MediaStreamTrack;
-  friend class MediaStreamTrackSourceGetter;
   typedef dom::MediaStreamTrack MediaStreamTrack;
   typedef dom::AudioStreamTrack AudioStreamTrack;
   typedef dom::VideoStreamTrack VideoStreamTrack;
   typedef dom::MediaStreamTrackSource MediaStreamTrackSource;
   typedef dom::AudioTrack AudioTrack;
   typedef dom::VideoTrack VideoTrack;
   typedef dom::AudioTrackList AudioTrackList;
   typedef dom::VideoTrackList VideoTrackList;
@@ -327,18 +290,17 @@ class DOMMediaStream
     RefPtr<MediaInputPort> mInputPort;
     RefPtr<MediaStreamTrack> mTrack;
 
     // Defines if we've been given ownership of the input port or if it's owned
     // externally. The owner is responsible for destroying the port.
     const InputPortOwnership mOwnership;
   };
 
-  DOMMediaStream(nsPIDOMWindowInner* aWindow,
-                 MediaStreamTrackSourceGetter* aTrackSourceGetter);
+  explicit DOMMediaStream(nsPIDOMWindowInner* aWindow);
 
   NS_DECL_ISUPPORTS_INHERITED
   NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(DOMMediaStream, DOMEventTargetHelper)
   NS_DECLARE_STATIC_IID_ACCESSOR(NS_DOMMEDIASTREAM_IID)
 
   nsPIDOMWindowInner* GetParentObject() const { return mWindow; }
   virtual JSObject* WrapObject(JSContext* aCx,
                                JS::Handle<JSObject*> aGivenProto) override;
@@ -365,39 +327,25 @@ class DOMMediaStream
   void GetAudioTracks(nsTArray<RefPtr<AudioStreamTrack>>& aTracks) const;
   void GetAudioTracks(nsTArray<RefPtr<MediaStreamTrack>>& aTracks) const;
   void GetVideoTracks(nsTArray<RefPtr<VideoStreamTrack>>& aTracks) const;
   void GetVideoTracks(nsTArray<RefPtr<MediaStreamTrack>>& aTracks) const;
   void GetTracks(nsTArray<RefPtr<MediaStreamTrack>>& aTracks) const;
   MediaStreamTrack* GetTrackById(const nsAString& aId) const;
   void AddTrack(MediaStreamTrack& aTrack);
   void RemoveTrack(MediaStreamTrack& aTrack);
-
-  /** Identical to CloneInternal(TrackForwardingOption::EXPLICIT) */
   already_AddRefed<DOMMediaStream> Clone();
 
   bool Active() const;
 
   IMPL_EVENT_HANDLER(addtrack)
   IMPL_EVENT_HANDLER(removetrack)
 
   // NON-WebIDL
 
-  /**
-   * Option to provide to CloneInternal() of which tracks should be forwarded
-   * from the source stream (`this`) to the returned stream clone.
-   *
-   * CURRENT forwards the tracks currently in the source stream's track set.
-   * ALL     forwards like EXPLICIT plus any and all future tracks originating
-   *         from the same input stream as the source DOMMediaStream (`this`).
-   */
-  enum class TrackForwardingOption { CURRENT, ALL };
-  already_AddRefed<DOMMediaStream> CloneInternal(
-      TrackForwardingOption aForwarding);
-
   MediaStreamTrack* GetOwnedTrackById(const nsAString& aId);
 
   /**
    * Returns true if this DOMMediaStream has aTrack in its mPlaybackStream.
    */
   bool HasTrack(const MediaStreamTrack& aTrack) const;
 
   /**
@@ -493,26 +441,24 @@ class DOMMediaStream
   // need to surface this to content.
   void AssignId(const nsAString& aID) { mID = aID; }
 
   /**
    * Create a DOMMediaStream whose underlying input stream is a
    * SourceMediaStream.
    */
   static already_AddRefed<DOMMediaStream> CreateSourceStreamAsInput(
-      nsPIDOMWindowInner* aWindow, MediaStreamGraph* aGraph,
-      MediaStreamTrackSourceGetter* aTrackSourceGetter = nullptr);
+      nsPIDOMWindowInner* aWindow, MediaStreamGraph* aGraph);
 
   /**
    * Create a DOMMediaStream whose underlying input stream is a
    * TrackUnionStream.
    */
   static already_AddRefed<DOMMediaStream> CreateTrackUnionStreamAsInput(
-      nsPIDOMWindowInner* aWindow, MediaStreamGraph* aGraph,
-      MediaStreamTrackSourceGetter* aTrackSourceGetter = nullptr);
+      nsPIDOMWindowInner* aWindow, MediaStreamGraph* aGraph);
 
   /**
    * Create an DOMMediaStream whose underlying input stream is an
    * AudioCaptureStream.
    */
   static already_AddRefed<DOMMediaStream> CreateAudioCaptureStreamAsInput(
       nsPIDOMWindowInner* aWindow, nsIPrincipal* aPrincipal,
       MediaStreamGraph* aGraph);
@@ -569,16 +515,20 @@ class DOMMediaStream
   // being destroyed, so we don't hold on to a dead pointer. Main thread only.
   void RegisterTrackListener(TrackListener* aListener);
 
   // Unregisters a track listener from this MediaStream. The caller must call
   // UnregisterTrackListener before being destroyed, so we don't hold on to
   // a dead pointer. Main thread only.
   void UnregisterTrackListener(TrackListener* aListener);
 
+  // Tells this MediaStream whether it can go inactive as soon as no tracks
+  // are live anymore.
+  void SetFinishedOnInactive(bool aFinishedOnInactive);
+
  protected:
   virtual ~DOMMediaStream();
 
   void Destroy();
   void InitSourceStream(MediaStreamGraph* aGraph);
   void InitTrackUnionStream(MediaStreamGraph* aGraph);
   void InitAudioCaptureStream(nsIPrincipal* aPrincipal,
                               MediaStreamGraph* aGraph);
@@ -680,20 +630,16 @@ class DOMMediaStream
 
   // MediaStreamTracks corresponding to tracks in our mPlaybackStream.
   AutoTArray<RefPtr<TrackPort>, 2> mTracks;
 
   // Number of MediaStreamTracks that have been removed on main thread but are
   // waiting to be removed on MediaStreamGraph thread.
   size_t mTracksPendingRemoval;
 
-  // The interface through which we can query the stream producer for
-  // track sources.
-  RefPtr<MediaStreamTrackSourceGetter> mTrackSourceGetter;
-
   // Listener tracking changes to mOwnedStream. We use this to notify the
   // MediaStreamTracks we own about state changes.
   RefPtr<OwnedStreamListener> mOwnedListener;
 
   // Listener tracking changes to mPlaybackStream. This drives state changes
   // in this DOMMediaStream and notifications to mTrackListeners.
   RefPtr<PlaybackStreamListener> mPlaybackListener;
 
@@ -713,16 +659,20 @@ class DOMMediaStream
   bool mNotifiedOfMediaStreamGraphShutdown;
 
   // The track listeners subscribe to changes in this stream's track set.
   nsTArray<TrackListener*> mTrackListeners;
 
   // True if this stream has live tracks.
   bool mActive;
 
+  // For compatibility with mozCaptureStream, we in some cases do not go
+  // inactive until the MediaDecoder lets us. (Remove this in Bug 1302379)
+  bool mFinishedOnInactive;
+
  private:
   void NotifyPrincipalChanged();
   // Principal identifying who may access the collected contents of this stream.
   // If null, this stream can be used by anyone because it has no content yet.
   nsCOMPtr<nsIPrincipal> mPrincipal;
   // Video principal is used by video element as access is requested to its
   // image data.
   nsCOMPtr<nsIPrincipal> mVideoPrincipal;
--- a/dom/media/MediaDecoder.cpp
+++ b/dom/media/MediaDecoder.cpp
@@ -1,16 +1,17 @@
 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "MediaDecoder.h"
 
+#include "DOMMediaStream.h"
 #include "ImageContainer.h"
 #include "Layers.h"
 #include "MediaDecoderStateMachine.h"
 #include "MediaFormatReader.h"
 #include "MediaResource.h"
 #include "MediaShutdownManager.h"
 #include "VideoFrameContainer.h"
 #include "VideoUtils.h"
@@ -234,39 +235,50 @@ void MediaDecoder::SetVolume(double aVol
 }
 
 RefPtr<GenericPromise> MediaDecoder::SetSink(AudioDeviceInfo* aSink) {
   MOZ_ASSERT(NS_IsMainThread());
   AbstractThread::AutoEnter context(AbstractMainThread());
   return GetStateMachine()->InvokeSetSink(aSink);
 }
 
-void MediaDecoder::AddOutputStream(ProcessedMediaStream* aStream,
-                                   TrackID aNextAvailableTrackID,
-                                   bool aFinishWhenEnded) {
+void MediaDecoder::SetOutputStreamCORSMode(CORSMode aCORSMode) {
+  MOZ_ASSERT(NS_IsMainThread());
+  AbstractThread::AutoEnter context(AbstractMainThread());
+  mDecoderStateMachine->SetOutputStreamCORSMode(aCORSMode);
+}
+
+void MediaDecoder::AddOutputStream(DOMMediaStream* aStream) {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_ASSERT(mDecoderStateMachine, "Must be called after Load().");
   AbstractThread::AutoEnter context(AbstractMainThread());
-  mDecoderStateMachine->AddOutputStream(aStream, aNextAvailableTrackID,
-                                        aFinishWhenEnded);
+  mDecoderStateMachine->EnsureOutputStreamManager(
+      aStream->GetInputStream()->Graph(), ToMaybe(mInfo.get()));
+  mDecoderStateMachine->AddOutputStream(aStream);
 }
 
-void MediaDecoder::RemoveOutputStream(MediaStream* aStream) {
+void MediaDecoder::RemoveOutputStream(DOMMediaStream* aStream) {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_ASSERT(mDecoderStateMachine, "Must be called after Load().");
   AbstractThread::AutoEnter context(AbstractMainThread());
   mDecoderStateMachine->RemoveOutputStream(aStream);
 }
 
-TrackID MediaDecoder::NextAvailableTrackIDFor(
-    MediaStream* aOutputStream) const {
+void MediaDecoder::SetNextOutputStreamTrackID(TrackID aNextTrackID) {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_ASSERT(mDecoderStateMachine, "Must be called after Load().");
   AbstractThread::AutoEnter context(AbstractMainThread());
-  return mDecoderStateMachine->NextAvailableTrackIDFor(aOutputStream);
+  mDecoderStateMachine->SetNextOutputStreamTrackID(aNextTrackID);
+}
+
+TrackID MediaDecoder::GetNextOutputStreamTrackID() {
+  MOZ_ASSERT(NS_IsMainThread());
+  MOZ_ASSERT(mDecoderStateMachine, "Must be called after Load().");
+  AbstractThread::AutoEnter context(AbstractMainThread());
+  return mDecoderStateMachine->GetNextOutputStreamTrackID();
 }
 
 double MediaDecoder::GetDuration() {
   MOZ_ASSERT(NS_IsMainThread());
   AbstractThread::AutoEnter context(AbstractMainThread());
   return mDuration;
 }
 
@@ -302,17 +314,16 @@ MediaDecoder::MediaDecoder(MediaDecoderI
       INIT_MIRROR(mCurrentPosition, TimeUnit::Zero()),
       INIT_MIRROR(mStateMachineDuration, NullableTimeUnit()),
       INIT_MIRROR(mIsAudioDataAudible, false),
       INIT_CANONICAL(mVolume, aInit.mVolume),
       INIT_CANONICAL(mPreservesPitch, aInit.mPreservesPitch),
       INIT_CANONICAL(mLooping, aInit.mLooping),
       INIT_CANONICAL(mPlayState, PLAY_STATE_LOADING),
       INIT_CANONICAL(mSameOriginMedia, false),
-      INIT_CANONICAL(mMediaPrincipalHandle, PRINCIPAL_HANDLE_NONE),
       mVideoDecodingOberver(
           new BackgroundVideoDecodingPermissionObserver(this)),
       mIsBackgroundVideoDecodingAllowed(false),
       mTelemetryReported(false),
       mContainerType(aInit.mContainerType) {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_ASSERT(mAbstractMainThread);
   MediaMemoryTracker::AddMediaDecoder(this);
@@ -737,16 +748,18 @@ void MediaDecoder::DecodeError(const Med
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_DIAGNOSTIC_ASSERT(!IsShutdown());
   GetOwner()->DecodeError(aError);
 }
 
 void MediaDecoder::UpdateSameOriginStatus(bool aSameOrigin) {
   MOZ_ASSERT(NS_IsMainThread());
   AbstractThread::AutoEnter context(AbstractMainThread());
+  nsCOMPtr<nsIPrincipal> principal = GetCurrentPrincipal();
+  mDecoderStateMachine->SetOutputStreamPrincipal(principal);
   mSameOriginMedia = aSameOrigin;
 }
 
 bool MediaDecoder::IsSeeking() const {
   MOZ_ASSERT(NS_IsMainThread());
   return mLogicallySeeking;
 }
 
@@ -784,18 +797,16 @@ void MediaDecoder::PlaybackEnded() {
   InvalidateWithFlags(VideoFrameContainer::INVALIDATE_FORCE);
   GetOwner()->PlaybackEnded();
 }
 
 void MediaDecoder::NotifyPrincipalChanged() {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_DIAGNOSTIC_ASSERT(!IsShutdown());
   AbstractThread::AutoEnter context(AbstractMainThread());
-  nsCOMPtr<nsIPrincipal> newPrincipal = GetCurrentPrincipal();
-  mMediaPrincipalHandle = MakePrincipalHandle(newPrincipal);
   GetOwner()->NotifyDecoderPrincipalChanged();
 }
 
 void MediaDecoder::OnSeekResolved() {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_DIAGNOSTIC_ASSERT(!IsShutdown());
   AbstractThread::AutoEnter context(AbstractMainThread());
   mLogicallySeeking = false;
--- a/dom/media/MediaDecoder.h
+++ b/dom/media/MediaDecoder.h
@@ -14,16 +14,17 @@
 #include "MediaEventSource.h"
 #include "MediaMetadataManager.h"
 #include "MediaPromiseDefs.h"
 #include "MediaResource.h"
 #include "MediaStatistics.h"
 #include "MediaStreamGraph.h"
 #include "SeekTarget.h"
 #include "TimeUnits.h"
+#include "TrackID.h"
 #include "mozilla/Atomics.h"
 #include "mozilla/CDMProxy.h"
 #include "mozilla/MozPromise.h"
 #include "mozilla/ReentrantMonitor.h"
 #include "mozilla/StateMirroring.h"
 #include "mozilla/StateWatching.h"
 #include "nsAutoPtr.h"
 #include "nsCOMPtr.h"
@@ -35,16 +36,17 @@ class nsIPrincipal;
 
 namespace mozilla {
 
 namespace dom {
 class MediaMemoryInfo;
 }
 
 class AbstractThread;
+class DOMMediaStream;
 class FrameStatistics;
 class VideoFrameContainer;
 class MediaFormatReader;
 class MediaDecoderStateMachine;
 struct MediaPlaybackEvent;
 
 enum class Visibility : uint8_t;
 
@@ -160,26 +162,32 @@ class MediaDecoder : public DecoderDocto
 
   // All MediaStream-related data is protected by mReentrantMonitor.
   // We have at most one DecodedStreamData per MediaDecoder. Its stream
   // is used as the input for each ProcessedMediaStream created by calls to
   // captureStream(UntilEnded). Seeking creates a new source stream, as does
   // replaying after the input as ended. In the latter case, the new source is
   // not connected to streams created by captureStreamUntilEnded.
 
+  // Sets the CORSMode for MediaStreamTracks that will be created by us.
+  void SetOutputStreamCORSMode(CORSMode aCORSMode);
+
   // Add an output stream. All decoder output will be sent to the stream.
   // The stream is initially blocked. The decoder is responsible for unblocking
   // it while it is playing back.
-  virtual void AddOutputStream(ProcessedMediaStream* aStream,
-                               TrackID aNextAvailableTrackID,
-                               bool aFinishWhenEnded);
+  void AddOutputStream(DOMMediaStream* aStream);
   // Remove an output stream added with AddOutputStream.
-  virtual void RemoveOutputStream(MediaStream* aStream);
-  // The next TrackID that can be used without risk of a collision.
-  virtual TrackID NextAvailableTrackIDFor(MediaStream* aOutputStream) const;
+  void RemoveOutputStream(DOMMediaStream* aStream);
+
+  // Set the TrackID to be used as the initial id by the next DecodedStream
+  // sink.
+  void SetNextOutputStreamTrackID(TrackID aNextTrackID);
+  // Get the next TrackID to be allocated by DecodedStream,
+  // or the last set TrackID if there is no DecodedStream sink.
+  TrackID GetNextOutputStreamTrackID();
 
   // Return the duration of the video in seconds.
   virtual double GetDuration();
 
   // Return true if the stream is infinite.
   bool IsInfinite() const;
 
   // Return true if we are currently seeking in the media resource.
@@ -608,20 +616,16 @@ class MediaDecoder : public DecoderDocto
 
   // This can only be changed on the main thread.
   PlayState mNextState = PLAY_STATE_PAUSED;
 
   // True if the media is same-origin with the element. Data can only be
   // passed to MediaStreams when this is true.
   Canonical<bool> mSameOriginMedia;
 
-  // An identifier for the principal of the media. Used to track when
-  // main-thread induced principal changes get reflected on MSG thread.
-  Canonical<PrincipalHandle> mMediaPrincipalHandle;
-
   // We can allow video decoding in background when we match some special
   // conditions, eg. when the cursor is hovering over the tab. This observer is
   // used to listen the related events.
   RefPtr<BackgroundVideoDecodingPermissionObserver> mVideoDecodingOberver;
 
   // True if we want to resume video decoding even the media element is in the
   // background.
   bool mIsBackgroundVideoDecodingAllowed;
@@ -631,19 +635,16 @@ class MediaDecoder : public DecoderDocto
   AbstractCanonical<bool>* CanonicalPreservesPitch() {
     return &mPreservesPitch;
   }
   AbstractCanonical<bool>* CanonicalLooping() { return &mLooping; }
   AbstractCanonical<PlayState>* CanonicalPlayState() { return &mPlayState; }
   AbstractCanonical<bool>* CanonicalSameOriginMedia() {
     return &mSameOriginMedia;
   }
-  AbstractCanonical<PrincipalHandle>* CanonicalMediaPrincipalHandle() {
-    return &mMediaPrincipalHandle;
-  }
 
  private:
   // Notify owner when the audible state changed
   void NotifyAudibleStateChanged();
 
   bool mTelemetryReported;
   const MediaContainerType mContainerType;
   bool mCanPlayThrough = false;
--- a/dom/media/MediaDecoderOwner.h
+++ b/dom/media/MediaDecoderOwner.h
@@ -5,16 +5,17 @@
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 #ifndef MediaDecoderOwner_h_
 #define MediaDecoderOwner_h_
 
 #include "mozilla/UniquePtr.h"
 #include "MediaInfo.h"
 #include "MediaSegment.h"
 #include "nsSize.h"
+#include "TrackID.h"
 
 class nsIDocument;
 
 namespace mozilla {
 
 class AbstractThread;
 class GMPCrashHelper;
 class VideoFrameContainer;
--- a/dom/media/MediaDecoderStateMachine.cpp
+++ b/dom/media/MediaDecoderStateMachine.cpp
@@ -20,16 +20,17 @@
 #include "mozilla/Sprintf.h"
 #include "mozilla/StaticPrefs.h"
 #include "mozilla/Telemetry.h"
 #include "mozilla/TaskQueue.h"
 #include "mozilla/Tuple.h"
 #include "nsIMemoryReporter.h"
 #include "nsPrintfCString.h"
 #include "nsTArray.h"
+#include "DOMMediaStream.h"
 #include "ImageContainer.h"
 #include "MediaDecoder.h"
 #include "MediaDecoderStateMachine.h"
 #include "MediaShutdownManager.h"
 #include "MediaTimer.h"
 #include "ReaderProxy.h"
 #include "TimeUnits.h"
 #include "VideoUtils.h"
@@ -2574,17 +2575,16 @@ RefPtr<ShutdownPromise> MediaDecoderStat
 
   // Disconnect canonicals and mirrors before shutting down our task queue.
   master->mBuffered.DisconnectIfConnected();
   master->mPlayState.DisconnectIfConnected();
   master->mVolume.DisconnectIfConnected();
   master->mPreservesPitch.DisconnectIfConnected();
   master->mLooping.DisconnectIfConnected();
   master->mSameOriginMedia.DisconnectIfConnected();
-  master->mMediaPrincipalHandle.DisconnectIfConnected();
 
   master->mDuration.DisconnectAll();
   master->mCurrentPosition.DisconnectAll();
   master->mIsAudioDataAudible.DisconnectAll();
 
   // Shut down the watch manager to stop further notifications.
   master->mWatchManager.Shutdown();
 
@@ -2615,27 +2615,26 @@ MediaDecoderStateMachine::MediaDecoderSt
       mReader(new ReaderProxy(mTaskQueue, aReader)),
       mPlaybackRate(1.0),
       mAmpleAudioThreshold(detail::AMPLE_AUDIO_THRESHOLD),
       mAudioCaptured(false),
       mMinimizePreroll(aDecoder->GetMinimizePreroll()),
       mSentFirstFrameLoadedEvent(false),
       mVideoDecodeSuspended(false),
       mVideoDecodeSuspendTimer(mTaskQueue),
-      mOutputStreamManager(new OutputStreamManager()),
+      mOutputStreamManager(nullptr),
       mVideoDecodeMode(VideoDecodeMode::Normal),
       mIsMSE(aDecoder->IsMSE()),
       mSeamlessLoopingAllowed(false),
       INIT_MIRROR(mBuffered, TimeIntervals()),
       INIT_MIRROR(mPlayState, MediaDecoder::PLAY_STATE_LOADING),
       INIT_MIRROR(mVolume, 1.0),
       INIT_MIRROR(mPreservesPitch, true),
       INIT_MIRROR(mLooping, false),
       INIT_MIRROR(mSameOriginMedia, false),
-      INIT_MIRROR(mMediaPrincipalHandle, PRINCIPAL_HANDLE_NONE),
       INIT_CANONICAL(mDuration, NullableTimeUnit()),
       INIT_CANONICAL(mCurrentPosition, TimeUnit::Zero()),
       INIT_CANONICAL(mIsAudioDataAudible, false),
       mSetSinkRequestsCount(0) {
   MOZ_COUNT_CTOR(MediaDecoderStateMachine);
   NS_ASSERTION(NS_IsMainThread(), "Should be on main thread.");
 
   InitVideoQueuePrefs();
@@ -2657,17 +2656,16 @@ void MediaDecoderStateMachine::Initializ
 
   // Connect mirrors.
   mBuffered.Connect(mReader->CanonicalBuffered());
   mPlayState.Connect(aDecoder->CanonicalPlayState());
   mVolume.Connect(aDecoder->CanonicalVolume());
   mPreservesPitch.Connect(aDecoder->CanonicalPreservesPitch());
   mLooping.Connect(aDecoder->CanonicalLooping());
   mSameOriginMedia.Connect(aDecoder->CanonicalSameOriginMedia());
-  mMediaPrincipalHandle.Connect(aDecoder->CanonicalMediaPrincipalHandle());
 
   // Initialize watchers.
   mWatchManager.Watch(mBuffered,
                       &MediaDecoderStateMachine::BufferedRangeUpdated);
   mWatchManager.Watch(mVolume, &MediaDecoderStateMachine::VolumeChanged);
   mWatchManager.Watch(mPreservesPitch,
                       &MediaDecoderStateMachine::PreservesPitchChanged);
   mWatchManager.Watch(mPlayState, &MediaDecoderStateMachine::PlayStateChanged);
@@ -2695,23 +2693,23 @@ media::MediaSink* MediaDecoderStateMachi
         self->mTaskQueue, self.get(),
         &MediaDecoderStateMachine::AudioAudibleChanged);
     return audioSink;
   };
   return new AudioSinkWrapper(mTaskQueue, mAudioQueue, audioSinkCreator);
 }
 
 already_AddRefed<media::MediaSink> MediaDecoderStateMachine::CreateMediaSink(
-    bool aAudioCaptured) {
+    bool aAudioCaptured, OutputStreamManager* aManager) {
+  MOZ_ASSERT_IF(aAudioCaptured, aManager);
   RefPtr<media::MediaSink> audioSink =
-      aAudioCaptured ? new DecodedStream(
-                           mTaskQueue, mAbstractMainThread, mAudioQueue,
-                           mVideoQueue, mOutputStreamManager,
-                           mSameOriginMedia.Ref(), mMediaPrincipalHandle.Ref())
-                     : CreateAudioSink();
+      aAudioCaptured
+          ? new DecodedStream(mTaskQueue, mAbstractMainThread, mAudioQueue,
+                              mVideoQueue, aManager, mSameOriginMedia.Ref())
+          : CreateAudioSink();
 
   RefPtr<media::MediaSink> mediaSink =
       new VideoSink(mTaskQueue, audioSink, mVideoQueue, mVideoFrameContainer,
                     *mFrameStats, sVideoQueueSendToCompositorSize);
   return mediaSink.forget();
 }
 
 TimeUnit MediaDecoderStateMachine::GetDecodedAudioDuration() {
@@ -2791,17 +2789,17 @@ nsresult MediaDecoderStateMachine::Init(
   mVideoQueueListener = VideoQueue().PopFrontEvent().Connect(
       mTaskQueue, this, &MediaDecoderStateMachine::OnVideoPopped);
 
   mMetadataManager.Connect(mReader->TimedMetadataEvent(), OwnerThread());
 
   mOnMediaNotSeekable = mReader->OnMediaNotSeekable().Connect(
       OwnerThread(), this, &MediaDecoderStateMachine::SetMediaNotSeekable);
 
-  mMediaSink = CreateMediaSink(mAudioCaptured);
+  mMediaSink = CreateMediaSink(mAudioCaptured, mOutputStreamManager);
 
   nsresult rv = mReader->Init();
   NS_ENSURE_SUCCESS(rv, rv);
 
   mReader->SetCanonicalDuration(&mDuration);
 
   return NS_OK;
 }
@@ -3324,17 +3322,18 @@ void MediaDecoderStateMachine::FinishDec
   mReader->ReadUpdatedMetadata(mInfo.ptr());
 
   EnqueueFirstFrameLoadedEvent();
 }
 
 RefPtr<ShutdownPromise> MediaDecoderStateMachine::BeginShutdown() {
   MOZ_ASSERT(NS_IsMainThread());
   if (mOutputStreamManager) {
-    mOutputStreamManager->Clear();
+    mNextOutputStreamTrackID = mOutputStreamManager->NextTrackID();
+    mOutputStreamManager->Disconnect();
   }
   return InvokeAsync(OwnerThread(), this, __func__,
                      &MediaDecoderStateMachine::Shutdown);
 }
 
 RefPtr<ShutdownPromise> MediaDecoderStateMachine::FinishShutdown() {
   MOZ_ASSERT(OnTaskQueue());
   LOG("Shutting down state machine task queue");
@@ -3627,17 +3626,18 @@ void MediaDecoderStateMachine::OnMediaSi
     return;
   }
 
   // Otherwise notify media decoder/element about this error for it makes
   // no sense to play an audio-only file without sound output.
   DecodeError(MediaResult(NS_ERROR_DOM_MEDIA_MEDIASINK_ERR, __func__));
 }
 
-void MediaDecoderStateMachine::SetAudioCaptured(bool aCaptured) {
+void MediaDecoderStateMachine::SetAudioCaptured(bool aCaptured,
+                                                OutputStreamManager* aManager) {
   MOZ_ASSERT(OnTaskQueue());
 
   if (aCaptured == mAudioCaptured) {
     return;
   }
 
   // Rest these flags so they are consistent with the status of the sink.
   // TODO: Move these flags into MediaSink to improve cohesion so we don't need
@@ -3648,17 +3648,17 @@ void MediaDecoderStateMachine::SetAudioC
   // Backup current playback parameters.
   MediaSink::PlaybackParams params = mMediaSink->GetPlaybackParams();
 
   // Stop and shut down the existing sink.
   StopMediaSink();
   mMediaSink->Shutdown();
 
   // Create a new sink according to whether audio is captured.
-  mMediaSink = CreateMediaSink(aCaptured);
+  mMediaSink = CreateMediaSink(aCaptured, aManager);
 
   // Restore playback parameters.
   mMediaSink->SetPlaybackParams(params);
 
   mAudioCaptured = aCaptured;
 
   // Don't buffer as much when audio is captured because we don't need to worry
   // about high latency audio devices.
@@ -3712,48 +3712,101 @@ MediaDecoderStateMachine::RequestDebugIn
           "MediaDecoderStateMachine::RequestDebugInfo",
           [self, p]() { p->Resolve(self->GetDebugInfo(), __func__); }),
       AbstractThread::TailDispatch);
   MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
   Unused << rv;
   return p.forget();
 }
 
-void MediaDecoderStateMachine::AddOutputStream(ProcessedMediaStream* aStream,
-                                               TrackID aNextAvailableTrackID,
-                                               bool aFinishWhenEnded) {
+void MediaDecoderStateMachine::SetOutputStreamPrincipal(
+    const nsCOMPtr<nsIPrincipal>& aPrincipal) {
+  MOZ_ASSERT(NS_IsMainThread());
+  mOutputStreamPrincipal = aPrincipal;
+  if (mOutputStreamManager) {
+    mOutputStreamManager->SetPrincipal(mOutputStreamPrincipal);
+  }
+}
+
+void MediaDecoderStateMachine::SetOutputStreamCORSMode(CORSMode aCORSMode) {
+  MOZ_ASSERT(NS_IsMainThread());
+  MOZ_ASSERT(mOutputStreamCORSMode == CORS_NONE);
+  MOZ_ASSERT(!mOutputStreamManager);
+  mOutputStreamCORSMode = aCORSMode;
+}
+
+void MediaDecoderStateMachine::AddOutputStream(DOMMediaStream* aStream) {
   MOZ_ASSERT(NS_IsMainThread());
   LOG("AddOutputStream aStream=%p!", aStream);
-  mOutputStreamManager->Add(aStream, aNextAvailableTrackID, aFinishWhenEnded);
-  nsCOMPtr<nsIRunnable> r = NewRunnableMethod<bool>(
-      "MediaDecoderStateMachine::SetAudioCaptured", this,
-      &MediaDecoderStateMachine::SetAudioCaptured, true);
+  mOutputStreamManager->Add(aStream);
+  nsCOMPtr<nsIRunnable> r =
+      NS_NewRunnableFunction("MediaDecoderStateMachine::SetAudioCaptured",
+                             [self = RefPtr<MediaDecoderStateMachine>(this),
+                              manager = mOutputStreamManager]() {
+                               self->SetAudioCaptured(true, manager);
+                             });
   nsresult rv = OwnerThread()->Dispatch(r.forget());
   MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
   Unused << rv;
 }
 
-void MediaDecoderStateMachine::RemoveOutputStream(MediaStream* aStream) {
+void MediaDecoderStateMachine::RemoveOutputStream(DOMMediaStream* aStream) {
   MOZ_ASSERT(NS_IsMainThread());
   LOG("RemoveOutputStream=%p!", aStream);
   mOutputStreamManager->Remove(aStream);
   if (mOutputStreamManager->IsEmpty()) {
-    nsCOMPtr<nsIRunnable> r = NewRunnableMethod<bool>(
-        "MediaDecoderStateMachine::SetAudioCaptured", this,
-        &MediaDecoderStateMachine::SetAudioCaptured, false);
+    mOutputStreamManager->Disconnect();
+    mOutputStreamManager = nullptr;
+    nsCOMPtr<nsIRunnable> r = NS_NewRunnableFunction(
+        "MediaDecoderStateMachine::SetAudioCaptured",
+        [self = RefPtr<MediaDecoderStateMachine>(this)]() {
+          self->SetAudioCaptured(false);
+        });
     nsresult rv = OwnerThread()->Dispatch(r.forget());
     MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
     Unused << rv;
   }
 }
 
-TrackID MediaDecoderStateMachine::NextAvailableTrackIDFor(
-    MediaStream* aOutputStream) const {
+void MediaDecoderStateMachine::EnsureOutputStreamManager(
+    MediaStreamGraph* aGraph, const Maybe<MediaInfo>& aLoadedInfo) {
   MOZ_ASSERT(NS_IsMainThread());
-  return mOutputStreamManager->NextAvailableTrackIDFor(aOutputStream);
+  if (mOutputStreamManager) {
+    return;
+  }
+  mOutputStreamManager = new OutputStreamManager(
+      aGraph->CreateSourceStream(), mNextOutputStreamTrackID,
+      mOutputStreamPrincipal, mOutputStreamCORSMode, mAbstractMainThread);
+  if (!aLoadedInfo) {
+    return;
+  }
+  TrackID mirroredTrackIDAllocation = mNextOutputStreamTrackID;
+  if (aLoadedInfo->HasAudio()) {
+    mOutputStreamManager->AddTrack(mirroredTrackIDAllocation++,
+                                   MediaSegment::AUDIO);
+  }
+  if (aLoadedInfo->HasVideo()) {
+    mOutputStreamManager->AddTrack(mirroredTrackIDAllocation++,
+                                   MediaSegment::VIDEO);
+  }
+}
+
+void MediaDecoderStateMachine::SetNextOutputStreamTrackID(
+    TrackID aNextTrackID) {
+  MOZ_ASSERT(NS_IsMainThread());
+  LOG("SetNextOutputStreamTrackID aNextTrackID=%d", aNextTrackID);
+  mNextOutputStreamTrackID = aNextTrackID;
+}
+
+TrackID MediaDecoderStateMachine::GetNextOutputStreamTrackID() {
+  MOZ_ASSERT(NS_IsMainThread());
+  if (mOutputStreamManager) {
+    return mOutputStreamManager->NextTrackID();
+  }
+  return mNextOutputStreamTrackID;
 }
 
 class VideoQueueMemoryFunctor : public nsDequeFunctor {
  public:
   VideoQueueMemoryFunctor() : mSize(0) {}
 
   MOZ_DEFINE_MALLOC_SIZE_OF(MallocSizeOf);
 
--- a/dom/media/MediaDecoderStateMachine.h
+++ b/dom/media/MediaDecoderStateMachine.h
@@ -103,16 +103,17 @@ namespace mozilla {
 
 namespace media {
 class MediaSink;
 }
 
 class AbstractThread;
 class AudioSegment;
 class DecodedStream;
+class DOMMediaStream;
 class OutputStreamManager;
 class ReaderProxy;
 class TaskQueue;
 
 extern LazyLogModule gMediaDecoderLog;
 
 struct MediaPlaybackEvent {
   enum EventType {
@@ -182,21 +183,35 @@ class MediaDecoderStateMachine
     DECODER_STATE_SHUTDOWN
   };
 
   // Returns the state machine task queue.
   TaskQueue* OwnerThread() const { return mTaskQueue; }
 
   RefPtr<MediaDecoder::DebugInfoPromise> RequestDebugInfo();
 
-  void AddOutputStream(ProcessedMediaStream* aStream,
-                       TrackID aNextAvailableTrackID, bool aFinishWhenEnded);
-  // Remove an output stream added with AddOutputStream.
-  void RemoveOutputStream(MediaStream* aStream);
-  TrackID NextAvailableTrackIDFor(MediaStream* aOutputStream) const;
+  void SetOutputStreamPrincipal(const nsCOMPtr<nsIPrincipal>& aPrincipal);
+  void SetOutputStreamCORSMode(CORSMode aCORSMode);
+  // If an OutputStreamManager does not exist, one will be created and tracks
+  // matching aLoadedInfo will be created ahead of being created by the
+  // DecodedStream sink.
+  void EnsureOutputStreamManager(MediaStreamGraph* aGraph,
+                                 const Maybe<MediaInfo>& aLoadedInfo);
+  // Add an output stream to the output stream manager. The manager must have
+  // been created through EnsureOutputStreamManager() before this.
+  void AddOutputStream(DOMMediaStream* aStream);
+  // Remove an output stream added with AddOutputStream. If the last output
+  // stream was removed, we will also tear down the OutputStreamManager.
+  void RemoveOutputStream(DOMMediaStream* aStream);
+  // Set the TrackID to be used as the initial id by the next DecodedStream
+  // sink.
+  void SetNextOutputStreamTrackID(TrackID aNextTrackID);
+  // Get the next TrackID to be allocated by DecodedStream,
+  // or the last set TrackID if there is no DecodedStream sink.
+  TrackID GetNextOutputStreamTrackID();
 
   // Seeks to the decoder to aTarget asynchronously.
   RefPtr<MediaDecoder::SeekPromise> InvokeSeek(const SeekTarget& aTarget);
 
   void DispatchSetPlaybackRate(double aPlaybackRate) {
     OwnerThread()->DispatchStateChange(NewRunnableMethod<double>(
         "MediaDecoderStateMachine::SetPlaybackRate", this,
         &MediaDecoderStateMachine::SetPlaybackRate, aPlaybackRate));
@@ -302,17 +317,20 @@ class MediaDecoderStateMachine
   // on the appropriate threads.
   bool OnTaskQueue() const;
 
   // Initialization that needs to happen on the task queue. This is the first
   // task that gets run on the task queue, and is dispatched from the MDSM
   // constructor immediately after the task queue is created.
   void InitializationTask(MediaDecoder* aDecoder);
 
-  void SetAudioCaptured(bool aCaptured);
+  // Sets the audio-captured state and recreates the media sink if needed.
+  // A manager must be passed in if setting the audio-captured state to true.
+  void SetAudioCaptured(bool aCaptured,
+                        OutputStreamManager* aManager = nullptr);
 
   RefPtr<MediaDecoder::SeekPromise> Seek(const SeekTarget& aTarget);
 
   RefPtr<ShutdownPromise> Shutdown();
 
   RefPtr<ShutdownPromise> FinishShutdown();
 
   // Update the playback position. This can result in a timeupdate event
@@ -422,17 +440,19 @@ class MediaDecoderStateMachine
 
   // Update playback position and trigger next update by default time period.
   // Called on the state machine thread.
   void UpdatePlaybackPositionPeriodically();
 
   media::MediaSink* CreateAudioSink();
 
   // Always create mediasink which contains an AudioSink or StreamSink inside.
-  already_AddRefed<media::MediaSink> CreateMediaSink(bool aAudioCaptured);
+  // A manager must be passed in if aAudioCaptured is true.
+  already_AddRefed<media::MediaSink> CreateMediaSink(
+      bool aAudioCaptured, OutputStreamManager* aManager = nullptr);
 
   // Stops the media sink and shut it down.
   // The decoder monitor must be held with exactly one lock count.
   // Called on the state machine thread.
   void StopMediaSink();
 
   // Create and start the media sink.
   // The decoder monitor must be held with exactly one lock count.
@@ -652,17 +672,28 @@ class MediaDecoderStateMachine
 
   // True if the media is seekable only in buffered ranges.
   bool mMediaSeekableOnlyInBufferedRanges = false;
 
   // Track enabling video decode suspension via timer
   DelayedScheduler mVideoDecodeSuspendTimer;
 
   // Data about MediaStreams that are being fed by the decoder.
-  const RefPtr<OutputStreamManager> mOutputStreamManager;
+  // Main thread only.
+  RefPtr<OutputStreamManager> mOutputStreamManager;
+
+  // Principal used by output streams. Main thread only.
+  nsCOMPtr<nsIPrincipal> mOutputStreamPrincipal;
+
+  // CORSMode used by output streams. Main thread only.
+  CORSMode mOutputStreamCORSMode = CORS_NONE;
+
+  // The next TrackID to be used when a DecodedStream allocates a track.
+  // Main thread only.
+  TrackID mNextOutputStreamTrackID = 1;
 
   // Track the current video decode mode.
   VideoDecodeMode mVideoDecodeMode;
 
   // Track the complete & error for audio/video separately
   MozPromiseRequestHolder<GenericPromise> mMediaSinkAudioPromise;
   MozPromiseRequestHolder<GenericPromise> mMediaSinkVideoPromise;
 
@@ -712,20 +743,16 @@ class MediaDecoderStateMachine
   // Whether to seek back to the start of the media resource
   // upon reaching the end.
   Mirror<bool> mLooping;
 
   // True if the media is same-origin with the element. Data can only be
   // passed to MediaStreams when this is true.
   Mirror<bool> mSameOriginMedia;
 
-  // An identifier for the principal of the media. Used to track when
-  // main-thread induced principal changes get reflected on MSG thread.
-  Mirror<PrincipalHandle> mMediaPrincipalHandle;
-
   // Duration of the media. This is guaranteed to be non-null after we finish
   // decoding the first frame.
   Canonical<media::NullableTimeUnit> mDuration;
 
   // The time of the current frame, corresponding to the "current
   // playback position" in HTML5. This is referenced from 0, which is the
   // initial playback position.
   Canonical<media::TimeUnit> mCurrentPosition;
--- a/dom/media/MediaManager.cpp
+++ b/dom/media/MediaManager.cpp
@@ -1107,54 +1107,16 @@ static bool IsOn(const OwningBooleanOrMe
 static const MediaTrackConstraints& GetInvariant(
     const OwningBooleanOrMediaTrackConstraints& aUnion) {
   static const MediaTrackConstraints empty;
   return aUnion.IsMediaTrackConstraints() ? aUnion.GetAsMediaTrackConstraints()
                                           : empty;
 }
 
 /**
- * This class is only needed since fake tracks are added dynamically.
- * Instead of refactoring to add them explicitly we let the DOMMediaStream
- * query us for the source as they become available.
- * Since they are used only for testing the API surface, we make them very
- * simple.
- */
-class FakeTrackSourceGetter : public MediaStreamTrackSourceGetter {
- public:
-  NS_DECL_ISUPPORTS_INHERITED
-  NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(FakeTrackSourceGetter,
-                                           MediaStreamTrackSourceGetter)
-
-  explicit FakeTrackSourceGetter(nsIPrincipal* aPrincipal)
-      : mPrincipal(aPrincipal) {}
-
-  already_AddRefed<dom::MediaStreamTrackSource> GetMediaStreamTrackSource(
-      TrackID aInputTrackID) override {
-    NS_ASSERTION(kAudioTrack != aInputTrackID,
-                 "Only fake tracks should appear dynamically");
-    NS_ASSERTION(kVideoTrack != aInputTrackID,
-                 "Only fake tracks should appear dynamically");
-    return do_AddRef(new BasicTrackSource(mPrincipal));
-  }
-
- protected:
-  virtual ~FakeTrackSourceGetter() {}
-
-  nsCOMPtr<nsIPrincipal> mPrincipal;
-};
-
-NS_IMPL_ADDREF_INHERITED(FakeTrackSourceGetter, MediaStreamTrackSourceGetter)
-NS_IMPL_RELEASE_INHERITED(FakeTrackSourceGetter, MediaStreamTrackSourceGetter)
-NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(FakeTrackSourceGetter)
-NS_INTERFACE_MAP_END_INHERITING(MediaStreamTrackSourceGetter)
-NS_IMPL_CYCLE_COLLECTION_INHERITED(FakeTrackSourceGetter,
-                                   MediaStreamTrackSourceGetter, mPrincipal)
-
-/**
  * Creates a MediaStream, attaches a listener and fires off a success callback
  * to the DOM with the stream. We also pass in the error callback so it can
  * be released correctly.
  *
  * All of this must be done on the main thread!
  *
  * Note that the various GetUserMedia Runnable classes currently allow for
  * two streams.  If we ever need to support getting more than two streams
@@ -1396,18 +1358,17 @@ class GetUserMediaStreamRunnable : publi
         principal = window->GetExtantDoc()->NodePrincipal();
       }
 
       // Normal case, connect the source stream to the track union stream to
       // avoid us blocking. Pass a simple TrackSourceGetter for potential
       // fake tracks. Apart from them gUM never adds tracks dynamically.
       domStream = new nsMainThreadPtrHolder<DOMMediaStream>(
           "GetUserMediaStreamRunnable::DOMMediaStreamMainThreadHolder",
-          DOMMediaStream::CreateSourceStreamAsInput(
-              window, msg, new FakeTrackSourceGetter(principal)));
+          DOMMediaStream::CreateSourceStreamAsInput(window, msg));
       stream = domStream->GetInputStream()->AsSourceStream();
 
       if (mAudioDevice) {
         nsString audioDeviceName;
         mAudioDevice->GetName(audioDeviceName);
         const MediaSourceEnum source = mAudioDevice->GetMediaSource();
         RefPtr<MediaStreamTrackSource> audioSource =
             new LocalTrackSource(principal, audioDeviceName, mSourceListener,
--- a/dom/media/MediaStreamTrack.cpp
+++ b/dom/media/MediaStreamTrack.cpp
@@ -425,20 +425,19 @@ void MediaStreamTrack::RemoveConsumer(Me
   // Remove destroyed consumers for cleanliness
   while (mConsumers.RemoveElement(nullptr)) {
     MOZ_ASSERT_UNREACHABLE("A consumer was not explicitly removed");
   }
 }
 
 already_AddRefed<MediaStreamTrack> MediaStreamTrack::Clone() {
   // MediaStreamTracks are currently governed by streams, so we need a dummy
-  // DOMMediaStream to own our track clone. The dummy will never see any
-  // dynamically created tracks (no input stream) so no need for a SourceGetter.
+  // DOMMediaStream to own our track clone.
   RefPtr<DOMMediaStream> newStream =
-      new DOMMediaStream(mOwningStream->GetParentObject(), nullptr);
+      new DOMMediaStream(mOwningStream->GetParentObject());
 
   MediaStreamGraph* graph = Graph();
   newStream->InitOwnedStreamCommon(graph);
   newStream->InitPlaybackStreamCommon(graph);
 
   return newStream->CloneDOMTrack(*this, mTrackID);
 }
 
--- a/dom/media/mediasink/DecodedStream.cpp
+++ b/dom/media/mediasink/DecodedStream.cpp
@@ -3,16 +3,17 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "mozilla/AbstractThread.h"
 #include "mozilla/CheckedInt.h"
 #include "mozilla/gfx/Point.h"
 #include "mozilla/SyncRunnable.h"
+#include "nsProxyRelease.h"
 
 #include "AudioSegment.h"
 #include "DecodedStream.h"
 #include "MediaData.h"
 #include "MediaQueue.h"
 #include "MediaStreamGraph.h"
 #include "MediaStreamListener.h"
 #include "OutputStreamManager.h"
@@ -26,235 +27,302 @@ using media::TimeUnit;
 
 /*
  * A container class to make it easier to pass the playback info all the
  * way to DecodedStreamGraphListener from DecodedStream.
  */
 struct PlaybackInfoInit {
   TimeUnit mStartTime;
   MediaInfo mInfo;
+  TrackID mAudioTrackID;
+  TrackID mVideoTrackID;
 };
 
-class DecodedStreamGraphListener : public MediaStreamListener {
+class DecodedStreamGraphListener;
+
+class DecodedStreamTrackListener : public MediaStreamTrackListener {
  public:
-  DecodedStreamGraphListener(MediaStream* aStream,
-                             MozPromiseHolder<GenericPromise>&& aPromise,
+  DecodedStreamTrackListener(DecodedStreamGraphListener* aGraphListener,
+                             SourceMediaStream* aStream, TrackID aTrackID);
+
+  void NotifyOutput(MediaStreamGraph* aGraph,
+                    StreamTime aCurrentTrackTime) override;
+  void NotifyEnded() override;
+
+ private:
+  const RefPtr<DecodedStreamGraphListener> mGraphListener;
+  const RefPtr<SourceMediaStream> mStream;
+  const mozilla::TrackID mTrackID;
+};
+
+class DecodedStreamGraphListener {
+  NS_INLINE_DECL_THREADSAFE_REFCOUNTING(DecodedStreamGraphListener)
+ public:
+  DecodedStreamGraphListener(SourceMediaStream* aStream, TrackID aAudioTrackID,
+                             MozPromiseHolder<GenericPromise>&& aAudioEndHolder,
+                             TrackID aVideoTrackID,
+                             MozPromiseHolder<GenericPromise>&& aVideoEndHolder,
                              AbstractThread* aMainThread)
       : mMutex("DecodedStreamGraphListener::mMutex"),
-        mStream(aStream),
+        mAudioTrackListener(IsTrackIDExplicit(aAudioTrackID)
+                                ? MakeRefPtr<DecodedStreamTrackListener>(
+                                      this, aStream, aAudioTrackID)
+                                : nullptr),
+        mVideoTrackListener(IsTrackIDExplicit(aVideoTrackID)
+                                ? MakeRefPtr<DecodedStreamTrackListener>(
+                                      this, aStream, aVideoTrackID)
+                                : nullptr),
+        mAudioTrackID(aAudioTrackID),
+        mAudioEndHolder(std::move(aAudioEndHolder)),
+        mVideoTrackID(aVideoTrackID),
+        mVideoEndHolder(std::move(aVideoEndHolder)),
         mAbstractMainThread(aMainThread) {
-    mFinishPromise = std::move(aPromise);
+    if (mAudioTrackListener) {
+      aStream->AddTrackListener(mAudioTrackListener, mAudioTrackID);
+    } else {
+      mAudioEndHolder.ResolveIfExists(true, __func__);
+    }
+
+    if (mVideoTrackListener) {
+      aStream->AddTrackListener(mVideoTrackListener, mVideoTrackID);
+    } else {
+      mVideoEndHolder.ResolveIfExists(true, __func__);
+    }
   }
 
-  void NotifyOutput(MediaStreamGraph* aGraph, GraphTime aCurrentTime) override {
-    MutexAutoLock lock(mMutex);
-    if (mStream) {
-      int64_t t = mStream->StreamTimeToMicroseconds(
-          mStream->GraphTimeToStreamTime(aCurrentTime));
+  void NotifyOutput(const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
+                    StreamTime aCurrentTrackTime) {
+    if (aTrackID != mAudioTrackID && mAudioTrackID != TRACK_NONE) {
+      // Only audio playout drives the clock forward, if present.
+      return;
+    }
+    if (aStream) {
+      int64_t t = aStream->StreamTimeToMicroseconds(aCurrentTrackTime);
       mOnOutput.Notify(t);
     }
   }
 
-  void NotifyEvent(MediaStreamGraph* aGraph,
-                   MediaStreamGraphEvent event) override {
-    if (event == MediaStreamGraphEvent::EVENT_FINISHED) {
-      aGraph->DispatchToMainThreadAfterStreamStateUpdate(NewRunnableMethod(
-          "DecodedStreamGraphListener::DoNotifyFinished", this,
-          &DecodedStreamGraphListener::DoNotifyFinished));
+  TrackID AudioTrackID() const { return mAudioTrackID; }
+
+  TrackID VideoTrackID() const { return mVideoTrackID; }
+
+  void DoNotifyTrackEnded(TrackID aTrackID) {
+    MOZ_ASSERT(NS_IsMainThread());
+    if (aTrackID == mAudioTrackID) {
+      mAudioEndHolder.ResolveIfExists(true, __func__);
+    } else if (aTrackID == mVideoTrackID) {
+      mVideoEndHolder.ResolveIfExists(true, __func__);
+    } else {
+      MOZ_CRASH("Unexpected track id");
     }
   }
 
-  void DoNotifyFinished() {
-    MOZ_ASSERT(NS_IsMainThread());
-    mFinishPromise.ResolveIfExists(true, __func__);
-  }
-
   void Forget() {
     RefPtr<DecodedStreamGraphListener> self = this;
     mAbstractMainThread->Dispatch(
         NS_NewRunnableFunction("DecodedStreamGraphListener::Forget", [self]() {
           MOZ_ASSERT(NS_IsMainThread());
-          self->mFinishPromise.ResolveIfExists(true, __func__);
+          self->mAudioEndHolder.ResolveIfExists(false, __func__);
+          self->mVideoEndHolder.ResolveIfExists(false, __func__);
         }));
     MutexAutoLock lock(mMutex);
-    mStream = nullptr;
+    mAudioTrackListener = nullptr;
+    mVideoTrackListener = nullptr;
   }
 
   MediaEventSource<int64_t>& OnOutput() { return mOnOutput; }
 
  private:
+  ~DecodedStreamGraphListener() {
+    MOZ_ASSERT(mAudioEndHolder.IsEmpty());
+    MOZ_ASSERT(mVideoEndHolder.IsEmpty());
+  }
+
   MediaEventProducer<int64_t> mOnOutput;
 
   Mutex mMutex;
   // Members below are protected by mMutex.
-  RefPtr<MediaStream> mStream;
+  RefPtr<DecodedStreamTrackListener> mAudioTrackListener;
+  RefPtr<DecodedStreamTrackListener> mVideoTrackListener;
   // Main thread only.
-  MozPromiseHolder<GenericPromise> mFinishPromise;
+  const TrackID mAudioTrackID;
+  MozPromiseHolder<GenericPromise> mAudioEndHolder;
+  const TrackID mVideoTrackID;
+  MozPromiseHolder<GenericPromise> mVideoEndHolder;
 
   const RefPtr<AbstractThread> mAbstractMainThread;
 };
 
-static void UpdateStreamSuspended(AbstractThread* aMainThread,
-                                  MediaStream* aStream, bool aBlocking) {
-  if (NS_IsMainThread()) {
-    if (aBlocking) {
-      aStream->Suspend();
-    } else {
-      aStream->Resume();
-    }
-  } else {
-    nsCOMPtr<nsIRunnable> r;
-    if (aBlocking) {
-      r = NewRunnableMethod("MediaStream::Suspend", aStream,
-                            &MediaStream::Suspend);
-    } else {
-      r = NewRunnableMethod("MediaStream::Resume", aStream,
-                            &MediaStream::Resume);
-    }
-    aMainThread->Dispatch(r.forget());
-  }
+DecodedStreamTrackListener::DecodedStreamTrackListener(
+    DecodedStreamGraphListener* aGraphListener, SourceMediaStream* aStream,
+    mozilla::TrackID aTrackID)
+    : mGraphListener(aGraphListener), mStream(aStream), mTrackID(aTrackID) {}
+
+void DecodedStreamTrackListener::NotifyOutput(MediaStreamGraph* aGraph,
+                                              StreamTime aCurrentTrackTime) {
+  mGraphListener->NotifyOutput(mStream, mTrackID, aCurrentTrackTime);
+}
+
+void DecodedStreamTrackListener::NotifyEnded() {
+  mStream->Graph()->DispatchToMainThreadAfterStreamStateUpdate(
+      NewRunnableMethod<mozilla::TrackID>(
+          "DecodedStreamGraphListener::DoNotifyTrackEnded", mGraphListener,
+          &DecodedStreamGraphListener::DoNotifyTrackEnded, mTrackID));
 }
 
 /*
  * All MediaStream-related data is protected by the decoder's monitor.
  * We have at most one DecodedStreamDaata per MediaDecoder. Its stream
  * is used as the input for each ProcessedMediaStream created by calls to
  * captureStream(UntilEnded). Seeking creates a new source stream, as does
  * replaying after the input as ended. In the latter case, the new source is
  * not connected to streams created by captureStreamUntilEnded.
  */
 class DecodedStreamData {
  public:
   DecodedStreamData(OutputStreamManager* aOutputStreamManager,
                     PlaybackInfoInit&& aInit,
-                    MozPromiseHolder<GenericPromise>&& aPromise,
+                    MozPromiseHolder<GenericPromise>&& aAudioPromise,
+                    MozPromiseHolder<GenericPromise>&& aVideoPromise,
                     AbstractThread* aMainThread);
   ~DecodedStreamData();
-  void SetPlaying(bool aPlaying);
   MediaEventSource<int64_t>& OnOutput();
   void Forget();
   nsCString GetDebugInfo();
 
   /* The following group of fields are protected by the decoder's monitor
    * and can be read or written on any thread.
    */
   // Count of audio frames written to the stream
   int64_t mAudioFramesWritten;
+  // Count of video frames written to the stream in the stream's rate
+  StreamTime mStreamVideoWritten;
+  // Count of audio frames written to the stream in the stream's rate
+  StreamTime mStreamAudioWritten;
   // mNextVideoTime is the end timestamp for the last packet sent to the stream.
   // Therefore video packets starting at or after this time need to be copied
   // to the output stream.
   TimeUnit mNextVideoTime;
   TimeUnit mNextAudioTime;
   // The last video image sent to the stream. Useful if we need to replicate
   // the image.
   RefPtr<layers::Image> mLastVideoImage;
   gfx::IntSize mLastVideoImageDisplaySize;
-  bool mHaveSentFinish;
   bool mHaveSentFinishAudio;
   bool mHaveSentFinishVideo;
 
   // The decoder is responsible for calling Destroy() on this stream.
   const RefPtr<SourceMediaStream> mStream;
   const RefPtr<DecodedStreamGraphListener> mListener;
-  bool mPlaying;
   // True if we need to send a compensation video frame to ensure the
   // StreamTime going forward.
   bool mEOSVideoCompensation;
 
   const RefPtr<OutputStreamManager> mOutputStreamManager;
   const RefPtr<AbstractThread> mAbstractMainThread;
 };
 
 DecodedStreamData::DecodedStreamData(
     OutputStreamManager* aOutputStreamManager, PlaybackInfoInit&& aInit,
-    MozPromiseHolder<GenericPromise>&& aPromise, AbstractThread* aMainThread)
+    MozPromiseHolder<GenericPromise>&& aAudioPromise,
+    MozPromiseHolder<GenericPromise>&& aVideoPromise,
+    AbstractThread* aMainThread)
     : mAudioFramesWritten(0),
+      mStreamVideoWritten(0),
+      mStreamAudioWritten(0),
       mNextVideoTime(aInit.mStartTime),
       mNextAudioTime(aInit.mStartTime),
-      mHaveSentFinish(false),
       mHaveSentFinishAudio(false),
       mHaveSentFinishVideo(false),
-      mStream(aOutputStreamManager->Graph()->CreateSourceStream())
-      // DecodedStreamGraphListener will resolve this promise.
+      mStream(aOutputStreamManager->mSourceStream)
+      // DecodedStreamGraphListener will resolve these promises.
       ,
-      mListener(new DecodedStreamGraphListener(mStream, std::move(aPromise),
-                                               aMainThread))
-      // mPlaying is initially true because MDSM won't start playback until
-      // playing becomes true. This is consistent with the settings of
-      // AudioSink.
-      ,
-      mPlaying(true),
+      mListener(MakeRefPtr<DecodedStreamGraphListener>(
+          mStream, aInit.mAudioTrackID, std::move(aAudioPromise),
+          aInit.mVideoTrackID, std::move(aVideoPromise), aMainThread)),
       mEOSVideoCompensation(false),
       mOutputStreamManager(aOutputStreamManager),
       mAbstractMainThread(aMainThread) {
-  mStream->AddListener(mListener);
-  TrackID audioTrack = TRACK_NONE;
-  TrackID videoTrack = TRACK_NONE;
-
-  // Initialize tracks.
-  if (aInit.mInfo.HasAudio()) {
-    audioTrack = aInit.mInfo.mAudio.mTrackId;
-    mStream->AddAudioTrack(audioTrack, aInit.mInfo.mAudio.mRate,
+  MOZ_ASSERT(NS_IsMainThread());
+  // Initialize tracks on main thread and in the MediaStreamGraph.
+  // Tracks on main thread may have been created early in OutputStreamManager
+  // by the state machine, since creating them here is async from the js call.
+  // If they were pre-created in OutputStreamManager and the MediaInfo has
+  // changed since then, we end them and create new tracks.
+  if (!mOutputStreamManager->HasTracks(aInit.mAudioTrackID,
+                                       aInit.mVideoTrackID)) {
+    // Because these tracks were pre-allocated, we also have to increment the
+    // internal track allocator by the same number of tracks, so we don't risk
+    // a TrackID collision.
+    for (size_t i = 0; i < mOutputStreamManager->NumberOfTracks(); ++i) {
+      Unused << mOutputStreamManager->AllocateNextTrackID();
+    }
+    mOutputStreamManager->RemoveTracks();
+  }
+  if (IsTrackIDExplicit(aInit.mAudioTrackID)) {
+    if (!mOutputStreamManager->HasTrack(aInit.mAudioTrackID)) {
+      mOutputStreamManager->AddTrack(aInit.mAudioTrackID, MediaSegment::AUDIO);
+    }
+    mStream->AddAudioTrack(aInit.mAudioTrackID, aInit.mInfo.mAudio.mRate,
                            new AudioSegment());
   }
-  if (aInit.mInfo.HasVideo()) {
-    videoTrack = aInit.mInfo.mVideo.mTrackId;
-    mStream->AddTrack(videoTrack, new VideoSegment());
+  if (IsTrackIDExplicit(aInit.mVideoTrackID)) {
+    if (!mOutputStreamManager->HasTrack(aInit.mVideoTrackID)) {
+      mOutputStreamManager->AddTrack(aInit.mVideoTrackID, MediaSegment::VIDEO);
+    }
+    mStream->AddTrack(aInit.mVideoTrackID, new VideoSegment());
   }
-
-  mOutputStreamManager->Connect(mStream, audioTrack, videoTrack);
 }
 
-DecodedStreamData::~DecodedStreamData() {
-  mOutputStreamManager->Disconnect();
-  mStream->Destroy();
-}
+DecodedStreamData::~DecodedStreamData() { MOZ_ASSERT(NS_IsMainThread()); }
 
 MediaEventSource<int64_t>& DecodedStreamData::OnOutput() {
   return mListener->OnOutput();
 }
 
-void DecodedStreamData::SetPlaying(bool aPlaying) {
-  if (mPlaying != aPlaying) {
-    mPlaying = aPlaying;
-    UpdateStreamSuspended(mAbstractMainThread, mStream, !mPlaying);
-  }
-}
-
 void DecodedStreamData::Forget() { mListener->Forget(); }
 
 nsCString DecodedStreamData::GetDebugInfo() {
   return nsPrintfCString(
-      "DecodedStreamData=%p mPlaying=%d mAudioFramesWritten=%" PRId64
+      "DecodedStreamData=%p mAudioFramesWritten=%" PRId64
+      " mStreamAudioWritten=%" PRId64 " mStreamVideoWritten=%" PRId64
       " mNextAudioTime=%" PRId64 " mNextVideoTime=%" PRId64
-      " mHaveSentFinish=%d "
       "mHaveSentFinishAudio=%d mHaveSentFinishVideo=%d",
-      this, mPlaying, mAudioFramesWritten, mNextAudioTime.ToMicroseconds(),
-      mNextVideoTime.ToMicroseconds(), mHaveSentFinish, mHaveSentFinishAudio,
-      mHaveSentFinishVideo);
+      this, mAudioFramesWritten, mStreamAudioWritten, mStreamVideoWritten,
+      mNextAudioTime.ToMicroseconds(), mNextVideoTime.ToMicroseconds(),
+      mHaveSentFinishAudio, mHaveSentFinishVideo);
 }
 
 DecodedStream::DecodedStream(AbstractThread* aOwnerThread,
                              AbstractThread* aMainThread,
                              MediaQueue<AudioData>& aAudioQueue,
                              MediaQueue<VideoData>& aVideoQueue,
                              OutputStreamManager* aOutputStreamManager,
-                             const bool& aSameOrigin,
-                             const PrincipalHandle& aPrincipalHandle)
+                             const bool& aSameOrigin)
     : mOwnerThread(aOwnerThread),
       mAbstractMainThread(aMainThread),
       mOutputStreamManager(aOutputStreamManager),
-      mPlaying(false),
+      mWatchManager(this, mOwnerThread),
+      mPlaying(false, "DecodedStream::mPlaying"),
       mSameOrigin(aSameOrigin),
-      mPrincipalHandle(aPrincipalHandle),
+      mPrincipalHandle(aOwnerThread, PRINCIPAL_HANDLE_NONE,
+                       "DecodedStream::mPrincipalHandle (Mirror)"),
       mAudioQueue(aAudioQueue),
-      mVideoQueue(aVideoQueue) {}
+      mVideoQueue(aVideoQueue) {
+  mPrincipalHandle.Connect(mOutputStreamManager->CanonicalPrincipalHandle());
+
+  mWatchManager.Watch(mPlaying, &DecodedStream::PlayingChanged);
+
+  PlayingChanged();  // Notify of the initial state
+}
 
 DecodedStream::~DecodedStream() {
   MOZ_ASSERT(mStartTime.isNothing(), "playback should've ended.");
+  NS_ProxyRelease("DecodedStream::mOutputStreamManager", mAbstractMainThread,
+                  do_AddRef(mOutputStreamManager));
 }
 
 const media::MediaSink::PlaybackParams& DecodedStream::GetPlaybackParams()
     const {
   AssertOwnerThread();
   return mParams;
 }
 
@@ -263,22 +331,19 @@ void DecodedStream::SetPlaybackParams(co
   mParams = aParams;
 }
 
 RefPtr<GenericPromise> DecodedStream::OnEnded(TrackType aType) {
   AssertOwnerThread();
   MOZ_ASSERT(mStartTime.isSome());
 
   if (aType == TrackInfo::kAudioTrack && mInfo.HasAudio()) {
-    // TODO: we should return a promise which is resolved when the audio track
-    // is finished. For now this promise is resolved when the whole stream is
-    // finished.
-    return mFinishPromise;
+    return mAudioEndPromise;
   } else if (aType == TrackInfo::kVideoTrack && mInfo.HasVideo()) {
-    return mFinishPromise;
+    return mVideoEndPromise;
   }
   return nullptr;
 }
 
 nsresult DecodedStream::Start(const TimeUnit& aStartTime,
                               const MediaInfo& aInfo) {
   AssertOwnerThread();
   MOZ_ASSERT(mStartTime.isNothing(), "playback already started.");
@@ -288,90 +353,111 @@ nsresult DecodedStream::Start(const Time
   mInfo = aInfo;
   mPlaying = true;
   ConnectListener();
 
   class R : public Runnable {
     typedef MozPromiseHolder<GenericPromise> Promise;
 
    public:
-    R(PlaybackInfoInit&& aInit, Promise&& aPromise,
-      OutputStreamManager* aManager, AbstractThread* aMainThread)
+    R(PlaybackInfoInit&& aInit, Promise&& aAudioPromise,
+      Promise&& aVideoPromise, OutputStreamManager* aManager,
+      AbstractThread* aMainThread)
         : Runnable("CreateDecodedStreamData"),
           mInit(std::move(aInit)),
+          mAudioPromise(std::move(aAudioPromise)),
+          mVideoPromise(std::move(aVideoPromise)),
           mOutputStreamManager(aManager),
-          mAbstractMainThread(aMainThread) {
-      mPromise = std::move(aPromise);
-    }
+          mAbstractMainThread(aMainThread) {}
     NS_IMETHOD Run() override {
       MOZ_ASSERT(NS_IsMainThread());
       // No need to create a source stream when there are no output streams.
       // This happens when RemoveOutput() is called immediately after
       // StartPlayback().
-      if (!mOutputStreamManager->Graph()) {
+      if (mOutputStreamManager->IsEmpty()) {
         // Resolve the promise to indicate the end of playback.
-        mPromise.Resolve(true, __func__);
+        mAudioPromise.Resolve(true, __func__);
+        mVideoPromise.Resolve(true, __func__);
         return NS_OK;
       }
+      mInit.mAudioTrackID = mInit.mInfo.HasAudio()
+                                ? mOutputStreamManager->AllocateNextTrackID()
+                                : TRACK_NONE;
+      mInit.mVideoTrackID = mInit.mInfo.HasVideo()
+                                ? mOutputStreamManager->AllocateNextTrackID()
+                                : TRACK_NONE;
       mData = MakeUnique<DecodedStreamData>(
-          mOutputStreamManager, std::move(mInit), std::move(mPromise),
-          mAbstractMainThread);
+          mOutputStreamManager, std::move(mInit), std::move(mAudioPromise),
+          std::move(mVideoPromise), mAbstractMainThread);
       return NS_OK;
     }
     UniquePtr<DecodedStreamData> ReleaseData() { return std::move(mData); }
 
    private:
     PlaybackInfoInit mInit;
-    Promise mPromise;
+    Promise mAudioPromise;
+    Promise mVideoPromise;
     RefPtr<OutputStreamManager> mOutputStreamManager;
     UniquePtr<DecodedStreamData> mData;
     const RefPtr<AbstractThread> mAbstractMainThread;
   };
 
-  MozPromiseHolder<GenericPromise> promise;
-  mFinishPromise = promise.Ensure(__func__);
-  PlaybackInfoInit init{aStartTime, aInfo};
-  nsCOMPtr<nsIRunnable> r = new R(std::move(init), std::move(promise),
-                                  mOutputStreamManager, mAbstractMainThread);
+  MozPromiseHolder<GenericPromise> audioHolder;
+  mAudioEndPromise = audioHolder.Ensure(__func__);
+  MozPromiseHolder<GenericPromise> videoHolder;
+  mVideoEndPromise = videoHolder.Ensure(__func__);
+  PlaybackInfoInit init{aStartTime, aInfo, TRACK_INVALID, TRACK_INVALID};
+  nsCOMPtr<nsIRunnable> r =
+      new R(std::move(init), std::move(audioHolder), std::move(videoHolder),
+            mOutputStreamManager, mAbstractMainThread);
   SyncRunnable::DispatchToThread(
       SystemGroup::EventTargetFor(mozilla::TaskCategory::Other), r);
   mData = static_cast<R*>(r.get())->ReleaseData();
 
   if (mData) {
+    mInfo.mAudio.mTrackId = mData->mListener->AudioTrackID();
+    mInfo.mVideo.mTrackId = mData->mListener->VideoTrackID();
     mOutputListener = mData->OnOutput().Connect(mOwnerThread, this,
                                                 &DecodedStream::NotifyOutput);
-    mData->SetPlaying(mPlaying);
     SendData();
   }
   return NS_OK;
 }
 
 void DecodedStream::Stop() {
   AssertOwnerThread();
   MOZ_ASSERT(mStartTime.isSome(), "playback not started.");
 
+  mStreamTimeOffset += SentDuration();
   mStartTime.reset();
   DisconnectListener();
-  mFinishPromise = nullptr;
+  mAudioEndPromise = nullptr;
+  mVideoEndPromise = nullptr;
 
   // Clear mData immediately when this playback session ends so we won't
   // send data to the wrong stream in SendData() in next playback session.
   DestroyData(std::move(mData));
 }
 
 bool DecodedStream::IsStarted() const {
   AssertOwnerThread();
   return mStartTime.isSome();
 }
 
 bool DecodedStream::IsPlaying() const {
   AssertOwnerThread();
   return IsStarted() && mPlaying;
 }
 
+void DecodedStream::Shutdown() {
+  AssertOwnerThread();
+  mPrincipalHandle.DisconnectIfConnected();
+  mWatchManager.Shutdown();
+}
+
 void DecodedStream::DestroyData(UniquePtr<DecodedStreamData> aData) {
   AssertOwnerThread();
 
   if (!aData) {
     return;
   }
 
   mOutputListener.Disconnect();
@@ -387,19 +473,16 @@ void DecodedStream::SetPlaying(bool aPla
   AssertOwnerThread();
 
   // Resume/pause matters only when playback started.
   if (mStartTime.isNothing()) {
     return;
   }
 
   mPlaying = aPlaying;
-  if (mData) {
-    mData->SetPlaying(aPlaying);
-  }
 }
 
 void DecodedStream::SetVolume(double aVolume) {
   AssertOwnerThread();
   mParams.mVolume = aVolume;
 }
 
 void DecodedStream::SetPlaybackRate(double aPlaybackRate) {
@@ -462,16 +545,20 @@ static void SendStreamAudio(DecodedStrea
 void DecodedStream::SendAudio(double aVolume, bool aIsSameOrigin,
                               const PrincipalHandle& aPrincipalHandle) {
   AssertOwnerThread();
 
   if (!mInfo.HasAudio()) {
     return;
   }
 
+  if (mData->mHaveSentFinishAudio) {
+    return;
+  }
+
   AudioSegment output;
   uint32_t rate = mInfo.mAudio.mRate;
   AutoTArray<RefPtr<AudioData>, 10> audio;
   TrackID audioTrackId = mInfo.mAudio.mTrackId;
   SourceMediaStream* sourceStream = mData->mStream;
 
   // It's OK to hold references to the AudioData because AudioData
   // is ref-counted.
@@ -486,17 +573,18 @@ void DecodedStream::SendAudio(double aVo
   if (!aIsSameOrigin) {
     output.ReplaceWithDisabled();
   }
 
   // |mNextAudioTime| is updated as we process each audio sample in
   // SendStreamAudio(). This is consistent with how |mNextVideoTime|
   // is updated for video samples.
   if (output.GetDuration() > 0) {
-    sourceStream->AppendToTrack(audioTrackId, &output);
+    mData->mStreamAudioWritten +=
+        sourceStream->AppendToTrack(audioTrackId, &output);
   }
 
   if (mAudioQueue.IsFinished() && !mData->mHaveSentFinishAudio) {
     sourceStream->EndTrack(audioTrackId);
     mData->mHaveSentFinishAudio = true;
   }
 }
 
@@ -528,16 +616,20 @@ static bool ZeroDurationAtLastChunk(Vide
 void DecodedStream::SendVideo(bool aIsSameOrigin,
                               const PrincipalHandle& aPrincipalHandle) {
   AssertOwnerThread();
 
   if (!mInfo.HasVideo()) {
     return;
   }
 
+  if (mData->mHaveSentFinishVideo) {
+    return;
+  }
+
   VideoSegment output;
   TrackID videoTrackId = mInfo.mVideo.mTrackId;
   AutoTArray<RefPtr<VideoData>, 10> video;
   SourceMediaStream* sourceStream = mData->mStream;
 
   // It's OK to hold references to the VideoData because VideoData
   // is ref-counted.
   mVideoQueue.GetElementsAfter(mData->mNextVideoTime, &video);
@@ -587,17 +679,18 @@ void DecodedStream::SendVideo(bool aIsSa
     mData->mEOSVideoCompensation = ZeroDurationAtLastChunk(output);
   }
 
   if (!aIsSameOrigin) {
     output.ReplaceWithDisabled();
   }
 
   if (output.GetDuration() > 0) {
-    sourceStream->AppendToTrack(videoTrackId, &output);
+    mData->mStreamVideoWritten +=
+        sourceStream->AppendToTrack(videoTrackId, &output);
   }
 
   if (mVideoQueue.IsFinished() && !mData->mHaveSentFinishVideo) {
     if (mData->mEOSVideoCompensation) {
       VideoSegment endSegment;
       // Calculate the deviation clock time from DecodedStream.
       auto deviation =
           FromMicroseconds(sourceStream->StreamTimeToMicroseconds(1));
@@ -608,70 +701,52 @@ void DecodedStream::SendVideo(bool aIsSa
           tracksStartTimeStamp +
               (mData->mNextVideoTime + deviation).ToTimeDuration(),
           &endSegment, aPrincipalHandle);
       mData->mNextVideoTime += deviation;
       MOZ_ASSERT(endSegment.GetDuration() > 0);
       if (!aIsSameOrigin) {
         endSegment.ReplaceWithDisabled();
       }
-      sourceStream->AppendToTrack(videoTrackId, &endSegment);
+      mData->mStreamVideoWritten +=
+          sourceStream->AppendToTrack(videoTrackId, &endSegment);
     }
     sourceStream->EndTrack(videoTrackId);
     mData->mHaveSentFinishVideo = true;
   }
 }
 
+StreamTime DecodedStream::SentDuration() {
+  AssertOwnerThread();
+
+  if (!mData) {
+    return 0;
+  }
+
+  return std::max(mData->mStreamAudioWritten, mData->mStreamVideoWritten);
+}
+
 void DecodedStream::AdvanceTracks() {
   AssertOwnerThread();
 
-  StreamTime endPosition = 0;
-
-  if (mInfo.HasAudio()) {
-    StreamTime audioEnd = mData->mStream->TicksToTimeRoundDown(
-        mInfo.mAudio.mRate, mData->mAudioFramesWritten);
-    endPosition = std::max(endPosition, audioEnd);
-  }
-
-  if (mInfo.HasVideo()) {
-    StreamTime videoEnd = mData->mStream->MicrosecondsToStreamTimeRoundDown(
-        (mData->mNextVideoTime - mStartTime.ref()).ToMicroseconds());
-    endPosition = std::max(endPosition, videoEnd);
-  }
-
-  if (!mData->mHaveSentFinish) {
-    mData->mStream->AdvanceKnownTracksTime(endPosition);
-  }
+  mData->mStream->AdvanceKnownTracksTime(mStreamTimeOffset + SentDuration());
 }
 
 void DecodedStream::SendData() {
   AssertOwnerThread();
   MOZ_ASSERT(mStartTime.isSome(), "Must be called after StartPlayback()");
 
   // Not yet created on the main thread. MDSM will try again later.
   if (!mData) {
     return;
   }
 
-  // Nothing to do when the stream is finished.
-  if (mData->mHaveSentFinish) {
-    return;
-  }
-
   SendAudio(mParams.mVolume, mSameOrigin, mPrincipalHandle);
   SendVideo(mSameOrigin, mPrincipalHandle);
   AdvanceTracks();
-
-  bool finished = (!mInfo.HasAudio() || mAudioQueue.IsFinished()) &&
-                  (!mInfo.HasVideo() || mVideoQueue.IsFinished());
-
-  if (finished && !mData->mHaveSentFinish) {
-    mData->mHaveSentFinish = true;
-    mData->mStream->FinishPending();
-  }
 }
 
 TimeUnit DecodedStream::GetEndTime(TrackType aType) const {
   AssertOwnerThread();
   if (aType == TrackInfo::kAudioTrack && mInfo.HasAudio() && mData) {
     auto t = mStartTime.ref() +
              FramesToTimeUnit(mData->mAudioFramesWritten, mInfo.mAudio.mRate);
     if (t.IsValid()) {
@@ -702,16 +777,24 @@ void DecodedStream::NotifyOutput(int64_t
   // Remove audio samples that have been played by MSG from the queue.
   RefPtr<AudioData> a = mAudioQueue.PeekFront();
   for (; a && a->mTime < currentTime;) {
     RefPtr<AudioData> releaseMe = mAudioQueue.PopFront();
     a = mAudioQueue.PeekFront();
   }
 }
 
+void DecodedStream::PlayingChanged() {
+  AssertOwnerThread();
+
+  mAbstractMainThread->Dispatch(NewRunnableMethod<bool>(
+      "OutputStreamManager::SetPlaying", mOutputStreamManager,
+      &OutputStreamManager::SetPlaying, mPlaying));
+}
+
 void DecodedStream::ConnectListener() {
   AssertOwnerThread();
 
   mAudioPushListener = mAudioQueue.PushEvent().Connect(
       mOwnerThread, this, &DecodedStream::SendData);
   mAudioFinishListener = mAudioQueue.FinishEvent().Connect(
       mOwnerThread, this, &DecodedStream::SendData);
   mVideoPushListener = mVideoQueue.PushEvent().Connect(
@@ -727,19 +810,20 @@ void DecodedStream::DisconnectListener()
   mVideoPushListener.Disconnect();
   mAudioFinishListener.Disconnect();
   mVideoFinishListener.Disconnect();
 }
 
 nsCString DecodedStream::GetDebugInfo() {
   AssertOwnerThread();
   int64_t startTime = mStartTime.isSome() ? mStartTime->ToMicroseconds() : -1;
-  auto str = nsPrintfCString(
-      "DecodedStream=%p mStartTime=%" PRId64 " mLastOutputTime=%" PRId64
-      " mPlaying=%d mData=%p",
-      this, startTime, mLastOutputTime.ToMicroseconds(), mPlaying, mData.get());
+  auto str =
+      nsPrintfCString("DecodedStream=%p mStartTime=%" PRId64
+                      " mLastOutputTime=%" PRId64 " mPlaying=%d mData=%p",
+                      this, startTime, mLastOutputTime.ToMicroseconds(),
+                      mPlaying.Ref(), mData.get());
   if (mData) {
     AppendStringIfNotEmpty(str, mData->GetDebugInfo());
   }
   return std::move(str);
 }
 
 }  // namespace mozilla
--- a/dom/media/mediasink/DecodedStream.h
+++ b/dom/media/mediasink/DecodedStream.h
@@ -11,16 +11,17 @@
 #include "MediaInfo.h"
 #include "MediaSegment.h"
 #include "MediaSink.h"
 
 #include "mozilla/AbstractThread.h"
 #include "mozilla/Maybe.h"
 #include "mozilla/MozPromise.h"
 #include "mozilla/RefPtr.h"
+#include "mozilla/StateMirroring.h"
 #include "mozilla/UniquePtr.h"
 
 namespace mozilla {
 
 class DecodedStreamData;
 class AudioData;
 class VideoData;
 class MediaStream;
@@ -35,18 +36,17 @@ class MediaQueue;
 class DecodedStream : public media::MediaSink {
   using media::MediaSink::PlaybackParams;
 
  public:
   DecodedStream(AbstractThread* aOwnerThread, AbstractThread* aMainThread,
                 MediaQueue<AudioData>& aAudioQueue,
                 MediaQueue<VideoData>& aVideoQueue,
                 OutputStreamManager* aOutputStreamManager,
-                const bool& aSameOrigin,
-                const PrincipalHandle& aPrincipalHandle);
+                const bool& aSameOrigin);
 
   // MediaSink functions.
   const PlaybackParams& GetPlaybackParams() const override;
   void SetPlaybackParams(const PlaybackParams& aParams) override;
 
   RefPtr<GenericPromise> OnEnded(TrackType aType) override;
   media::TimeUnit GetEndTime(TrackType aType) const override;
   media::TimeUnit GetPosition(TimeStamp* aTimeStamp = nullptr) const override;
@@ -60,65 +60,73 @@ class DecodedStream : public media::Medi
   void SetPreservesPitch(bool aPreservesPitch) override;
   void SetPlaying(bool aPlaying) override;
 
   nsresult Start(const media::TimeUnit& aStartTime,
                  const MediaInfo& aInfo) override;
   void Stop() override;
   bool IsStarted() const override;
   bool IsPlaying() const override;
+  void Shutdown() override;
 
   nsCString GetDebugInfo() override;
 
  protected:
   virtual ~DecodedStream();
 
  private:
   media::TimeUnit FromMicroseconds(int64_t aTime) {
     return media::TimeUnit::FromMicroseconds(aTime);
   }
   void DestroyData(UniquePtr<DecodedStreamData> aData);
-  void AdvanceTracks();
   void SendAudio(double aVolume, bool aIsSameOrigin,
                  const PrincipalHandle& aPrincipalHandle);
   void SendVideo(bool aIsSameOrigin, const PrincipalHandle& aPrincipalHandle);
+  StreamTime SentDuration();
+  void AdvanceTracks();
   void SendData();
   void NotifyOutput(int64_t aTime);
+  void NotifyTrackEnd(StreamTime aEndTime);
 
   void AssertOwnerThread() const {
     MOZ_ASSERT(mOwnerThread->IsCurrentThreadIn());
   }
 
+  void PlayingChanged();
+
   void ConnectListener();
   void DisconnectListener();
 
   const RefPtr<AbstractThread> mOwnerThread;
 
   const RefPtr<AbstractThread> mAbstractMainThread;
 
   /*
    * Main thread only members.
    */
   // Data about MediaStreams that are being fed by the decoder.
   const RefPtr<OutputStreamManager> mOutputStreamManager;
 
   /*
    * Worker thread only members.
    */
+  WatchManager<DecodedStream> mWatchManager;
   UniquePtr<DecodedStreamData> mData;
-  RefPtr<GenericPromise> mFinishPromise;
+  RefPtr<GenericPromise> mAudioEndPromise;
+  RefPtr<GenericPromise> mVideoEndPromise;
 
-  bool mPlaying;
-  const bool& mSameOrigin;                  // valid until Shutdown() is called.
-  const PrincipalHandle& mPrincipalHandle;  // valid until Shutdown() is called.
+  Watchable<bool> mPlaying;
+  const bool& mSameOrigin;  // valid until Shutdown() is called.
+  Mirror<PrincipalHandle> mPrincipalHandle;
 
   PlaybackParams mParams;
 
   media::NullableTimeUnit mStartTime;
   media::TimeUnit mLastOutputTime;
+  StreamTime mStreamTimeOffset = 0;
   MediaInfo mInfo;
 
   MediaQueue<AudioData>& mAudioQueue;
   MediaQueue<VideoData>& mVideoQueue;
 
   MediaEventListener mAudioPushListener;
   MediaEventListener mVideoPushListener;
   MediaEventListener mAudioFinishListener;
--- a/dom/media/mediasink/OutputStreamManager.cpp
+++ b/dom/media/mediasink/OutputStreamManager.cpp
@@ -1,151 +1,316 @@
 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
-#include "MediaStreamGraph.h"
 #include "OutputStreamManager.h"
 
+#include "DOMMediaStream.h"
+#include "MediaStreamGraph.h"
+#include "mozilla/dom/MediaStreamTrack.h"
+#include "nsContentUtils.h"
+
 namespace mozilla {
 
+class DecodedStreamTrackSource : public dom::MediaStreamTrackSource {
+ public:
+  NS_DECL_ISUPPORTS_INHERITED
+  NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(DecodedStreamTrackSource,
+                                           dom::MediaStreamTrackSource)
+
+  explicit DecodedStreamTrackSource(OutputStreamManager* aManager,
+                                    OutputStreamData* aData, TrackID aTrackID,
+                                    nsIPrincipal* aPrincipal,
+                                    CORSMode aCORSMode,
+                                    AbstractThread* aAbstractMainThread)
+      : dom::MediaStreamTrackSource(aPrincipal, nsString()),
+        mCORSMode(aCORSMode) {
+    MOZ_ASSERT(NS_IsMainThread());
+  }
+
+  dom::MediaSourceEnum GetMediaSource() const override {
+    return dom::MediaSourceEnum::Other;
+  }
+
+  CORSMode GetCORSMode() const override {
+    MOZ_ASSERT(NS_IsMainThread());
+    return mCORSMode;
+  }
+
+  void Stop() override {
+    MOZ_ASSERT(NS_IsMainThread());
+
+    // We don't notify the source that a track was stopped since it will keep
+    // producing tracks until the element ends. The decoder also needs the
+    // tracks it created to be live at the source since the decoder's clock is
+    // based on MediaStreams during capture.
+  }
+
+  void Disable() override {}
+
+  void Enable() override {}
+
+  void SetPrincipal(nsIPrincipal* aPrincipal) {
+    MOZ_ASSERT(NS_IsMainThread());
+    mPrincipal = aPrincipal;
+    PrincipalChanged();
+  }
+
+ protected:
+  virtual ~DecodedStreamTrackSource() { MOZ_ASSERT(NS_IsMainThread()); }
+
+  const CORSMode mCORSMode;
+};
+
+NS_IMPL_ADDREF_INHERITED(DecodedStreamTrackSource, dom::MediaStreamTrackSource)
+NS_IMPL_RELEASE_INHERITED(DecodedStreamTrackSource, dom::MediaStreamTrackSource)
+NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(DecodedStreamTrackSource)
+NS_INTERFACE_MAP_END_INHERITING(dom::MediaStreamTrackSource)
+NS_IMPL_CYCLE_COLLECTION_INHERITED(DecodedStreamTrackSource,
+                                   dom::MediaStreamTrackSource)
+
+OutputStreamData::OutputStreamData(OutputStreamManager* aManager,
+                                   AbstractThread* aAbstractMainThread,
+                                   DOMMediaStream* aDOMStream)
+    : mManager(aManager),
+      mAbstractMainThread(aAbstractMainThread),
+      mDOMStream(aDOMStream),
+      mInputStream(mDOMStream->GetInputStream()->AsProcessedStream()),
+      mPort(mInputStream->AllocateInputPort(mManager->mSourceStream)) {
+  MOZ_ASSERT(NS_IsMainThread());
+}
+
 OutputStreamData::~OutputStreamData() {
   MOZ_ASSERT(NS_IsMainThread());
-  // Break the connection to the input stream if necessary.
-  for (RefPtr<MediaInputPort>& port : mPorts) {
-    port->Destroy();
-  }
-}
 
-void OutputStreamData::Init(OutputStreamManager* aOwner,
-                            ProcessedMediaStream* aStream,
-                            TrackID aNextAvailableTrackID) {
-  mOwner = aOwner;
-  mStream = aStream;
-  mNextAvailableTrackID = aNextAvailableTrackID;
-}
-
-bool OutputStreamData::Connect(MediaStream* aStream, TrackID aInputAudioTrackID,
-                               TrackID aInputVideoTrackID) {
-  MOZ_ASSERT(NS_IsMainThread());
-  MOZ_ASSERT(mPorts.IsEmpty(), "Already connected?");
-
-  if (mStream->IsDestroyed()) {
-    return false;
-  }
-
-  for (TrackID tid : {aInputAudioTrackID, aInputVideoTrackID}) {
-    if (tid == TRACK_NONE) {
-      continue;
-    }
-    MOZ_ASSERT(IsTrackIDExplicit(tid));
-    mPorts.AppendElement(
-        mStream->AllocateInputPort(aStream, tid, mNextAvailableTrackID++));
-  }
-  return true;
-}
-
-bool OutputStreamData::Disconnect() {
-  MOZ_ASSERT(NS_IsMainThread());
-
-  // During cycle collection, DOMMediaStream can be destroyed and send
+  // During cycle collection, MediaStream can be destroyed and send
   // its Destroy message before this decoder is destroyed. So we have to
   // be careful not to send any messages after the Destroy().
-  if (mStream->IsDestroyed()) {
-    return false;
+  if (mInputStream->IsDestroyed()) {
+    return;
   }
 
   // Disconnect any existing port.
-  for (RefPtr<MediaInputPort>& port : mPorts) {
-    port->Destroy();
+  if (mPort) {
+    mPort->Destroy();
   }
-  mPorts.Clear();
-  return true;
 }
 
-bool OutputStreamData::Equals(MediaStream* aStream) const {
-  return mStream == aStream;
+void OutputStreamData::AddTrack(TrackID aTrackID, MediaSegment::Type aType,
+                                nsIPrincipal* aPrincipal, CORSMode aCORSMode,
+                                bool aAsyncAddTrack) {
+  MOZ_ASSERT(NS_IsMainThread());
+
+  RefPtr<dom::MediaStreamTrackSource> source = new DecodedStreamTrackSource(
+      mManager, this, aTrackID, aPrincipal, aCORSMode, mAbstractMainThread);
+  RefPtr<dom::MediaStreamTrack> track =
+      mDOMStream->CreateDOMTrack(aTrackID, aType, source);
+  mTracks.AppendElement(track);
+  if (aAsyncAddTrack) {
+    GetMainThreadEventTarget()->Dispatch(
+        NewRunnableMethod<RefPtr<dom::MediaStreamTrack>>(
+            "DOMMediaStream::AddTrackInternal", mDOMStream,
+            &DOMMediaStream::AddTrackInternal, track));
+  } else {
+    mDOMStream->AddTrackInternal(track);
+  }
 }
 
-MediaStreamGraph* OutputStreamData::Graph() const { return mStream->Graph(); }
+void OutputStreamData::RemoveTrack(TrackID aTrackID) {
+  MOZ_ASSERT(NS_IsMainThread());
 
-TrackID OutputStreamData::NextAvailableTrackID() const {
-  return mNextAvailableTrackID;
+  RefPtr<dom::MediaStreamTrack> track =
+      mDOMStream->FindOwnedDOMTrack(mInputStream, aTrackID);
+  MOZ_DIAGNOSTIC_ASSERT(track);
+  mTracks.RemoveElement(track);
+  GetMainThreadEventTarget()->Dispatch(
+      NewRunnableMethod("MediaStreamTrack::OverrideEnded", track,
+                        &dom::MediaStreamTrack::OverrideEnded));
 }
 
-void OutputStreamManager::Add(ProcessedMediaStream* aStream,
-                              TrackID aNextAvailableTrackID,
-                              bool aFinishWhenEnded) {
-  MOZ_ASSERT(NS_IsMainThread());
-  // All streams must belong to the same graph.
-  MOZ_ASSERT(!Graph() || Graph() == aStream->Graph());
+void OutputStreamData::SetPrincipal(nsIPrincipal* aPrincipal) {
+  for (const RefPtr<dom::MediaStreamTrack>& track : mTracks) {
+    DecodedStreamTrackSource& source =
+        static_cast<DecodedStreamTrackSource&>(track->GetSource());
+    source.SetPrincipal(aPrincipal);
+  }
+}
 
-  // Ensure that aStream finishes the moment mDecodedStream does.
-  if (aFinishWhenEnded) {
-    aStream->QueueSetAutofinish(true);
-  }
+OutputStreamManager::OutputStreamManager(SourceMediaStream* aSourceStream,
+                                         TrackID aNextTrackID,
+                                         nsIPrincipal* aPrincipal,
+                                         CORSMode aCORSMode,
+                                         AbstractThread* aAbstractMainThread)
+    : mSourceStream(aSourceStream),
+      mAbstractMainThread(aAbstractMainThread),
+      mPrincipalHandle(
+          aAbstractMainThread,
+          aPrincipal ? MakePrincipalHandle(aPrincipal) : PRINCIPAL_HANDLE_NONE,
+          "OutputStreamManager::mPrincipalHandle (Canonical)"),
+      mPrincipal(aPrincipal),
+      mCORSMode(aCORSMode),
+      mNextTrackID(aNextTrackID),
+      mPlaying(true)  // mSourceStream always starts non-suspended
+{
+  MOZ_ASSERT(NS_IsMainThread());
+}
 
-  OutputStreamData* p = mStreams.AppendElement();
-  p->Init(this, aStream, aNextAvailableTrackID);
+void OutputStreamManager::Add(DOMMediaStream* aDOMStream) {
+  MOZ_ASSERT(NS_IsMainThread());
+  MOZ_ASSERT(!mSourceStream->IsDestroyed());
+  // All streams must belong to the same graph.
+  MOZ_ASSERT(mSourceStream->Graph() == aDOMStream->GetInputStream()->Graph());
 
-  // Connect to the input stream if we have one. Otherwise the output stream
-  // will be connected in Connect().
-  if (mInputStream) {
-    p->Connect(mInputStream, mInputAudioTrackID, mInputVideoTrackID);
+  OutputStreamData* p = mStreams
+                            .AppendElement(new OutputStreamData(
+                                this, mAbstractMainThread, aDOMStream))
+                            ->get();
+  for (const Pair<TrackID, MediaSegment::Type>& pair : mLiveTracks) {
+    p->AddTrack(pair.first(), pair.second(), mPrincipal, mCORSMode, false);
   }
 }
 
-void OutputStreamManager::Remove(MediaStream* aStream) {
+void OutputStreamManager::Remove(DOMMediaStream* aDOMStream) {
+  MOZ_ASSERT(NS_IsMainThread());
+  MOZ_ASSERT(!mSourceStream->IsDestroyed());
+
+  mStreams.ApplyIf(
+      aDOMStream, 0, StreamComparator(),
+      [&](const UniquePtr<OutputStreamData>& aData) {
+        for (const Pair<TrackID, MediaSegment::Type>& pair : mLiveTracks) {
+          aData->RemoveTrack(pair.first());
+        }
+      },
+      []() { MOZ_ASSERT_UNREACHABLE("Didn't exist"); });
+  DebugOnly<bool> rv = mStreams.RemoveElement(aDOMStream, StreamComparator());
+  MOZ_ASSERT(rv);
+}
+
+bool OutputStreamManager::HasTrack(TrackID aTrackID) {
+  MOZ_ASSERT(NS_IsMainThread());
+
+  return mLiveTracks.Contains(aTrackID, TrackIDComparator());
+}
+
+bool OutputStreamManager::HasTracks(TrackID aAudioTrack, TrackID aVideoTrack) {
   MOZ_ASSERT(NS_IsMainThread());
-  for (int32_t i = mStreams.Length() - 1; i >= 0; --i) {
-    if (mStreams[i].Equals(aStream)) {
-      mStreams.RemoveElementAt(i);
-      break;
-    }
+
+  size_t nrExpectedTracks = 0;
+  bool asExpected = true;
+  if (IsTrackIDExplicit(aAudioTrack)) {
+    Unused << ++nrExpectedTracks;
+    asExpected = asExpected && mLiveTracks.Contains(
+                                   MakePair(aAudioTrack, MediaSegment::AUDIO),
+                                   TrackTypeComparator());
+  }
+  if (IsTrackIDExplicit(aVideoTrack)) {
+    Unused << ++nrExpectedTracks;
+    asExpected = asExpected && mLiveTracks.Contains(
+                                   MakePair(aVideoTrack, MediaSegment::VIDEO),
+                                   TrackTypeComparator());
+  }
+  asExpected = asExpected && mLiveTracks.Length() == nrExpectedTracks;
+  return asExpected;
+}
+
+size_t OutputStreamManager::NumberOfTracks() {
+  MOZ_ASSERT(NS_IsMainThread());
+  return mLiveTracks.Length();
+}
+
+void OutputStreamManager::AddTrack(TrackID aTrackID, MediaSegment::Type aType) {
+  MOZ_ASSERT(NS_IsMainThread());
+  MOZ_ASSERT(!mSourceStream->IsDestroyed());
+  MOZ_ASSERT(!HasTrack(aTrackID));
+
+  mLiveTracks.AppendElement(MakePair(aTrackID, aType));
+  for (const auto& data : mStreams) {
+    data->AddTrack(aTrackID, aType, mPrincipal, mCORSMode, true);
   }
 }
 
-void OutputStreamManager::Clear() {
+void OutputStreamManager::RemoveTrack(TrackID aTrackID) {
   MOZ_ASSERT(NS_IsMainThread());
-  mStreams.Clear();
+  MOZ_ASSERT(!mSourceStream->IsDestroyed());
+  DebugOnly<bool> rv = mLiveTracks.RemoveElement(aTrackID, TrackIDComparator());
+  MOZ_ASSERT(rv);
+  for (const auto& data : mStreams) {
+    data->RemoveTrack(aTrackID);
+  }
 }
 
-TrackID OutputStreamManager::NextAvailableTrackIDFor(
-    MediaStream* aOutputStream) const {
+void OutputStreamManager::RemoveTracks() {
   MOZ_ASSERT(NS_IsMainThread());
-  for (const OutputStreamData& out : mStreams) {
-    if (out.Equals(aOutputStream)) {
-      return out.NextAvailableTrackID();
+  MOZ_ASSERT(!mSourceStream->IsDestroyed());
+  for (const Pair<TrackID, MediaSegment::Type>& pair : mLiveTracks) {
+    for (const auto& data : mStreams) {
+      data->RemoveTrack(pair.first());
     }
   }
-  return TRACK_INVALID;
-}
-
-void OutputStreamManager::Connect(MediaStream* aStream, TrackID aAudioTrackID,
-                                  TrackID aVideoTrackID) {
-  MOZ_ASSERT(NS_IsMainThread());
-  mInputStream = aStream;
-  mInputAudioTrackID = aAudioTrackID;
-  mInputVideoTrackID = aVideoTrackID;
-  for (int32_t i = mStreams.Length() - 1; i >= 0; --i) {
-    if (!mStreams[i].Connect(aStream, mInputAudioTrackID, mInputVideoTrackID)) {
-      // Probably the DOMMediaStream was GCed. Clean up.
-      mStreams.RemoveElementAt(i);
-    }
-  }
+  mLiveTracks.Clear();
 }
 
 void OutputStreamManager::Disconnect() {
   MOZ_ASSERT(NS_IsMainThread());
-  mInputStream = nullptr;
-  mInputAudioTrackID = TRACK_INVALID;
-  mInputVideoTrackID = TRACK_INVALID;
-  for (int32_t i = mStreams.Length() - 1; i >= 0; --i) {
-    if (!mStreams[i].Disconnect()) {
-      // Probably the DOMMediaStream was GCed. Clean up.
-      mStreams.RemoveElementAt(i);
+  nsTArray<Pair<TrackID, MediaSegment::Type>> liveTracks(mLiveTracks);
+  for (const auto& pair : liveTracks) {
+    RemoveTrack(pair.first());
+  }
+  MOZ_ASSERT(mLiveTracks.IsEmpty());
+  nsTArray<RefPtr<DOMMediaStream>> domStreams(mStreams.Length());
+  for (const auto& data : mStreams) {
+    domStreams.AppendElement(data->mDOMStream);
+  }
+  for (auto& domStream : domStreams) {
+    Remove(domStream);
+  }
+  MOZ_ASSERT(mStreams.IsEmpty());
+  if (!mSourceStream->IsDestroyed()) {
+    mSourceStream->Destroy();
+  }
+}
+
+AbstractCanonical<PrincipalHandle>*
+OutputStreamManager::CanonicalPrincipalHandle() {
+  return &mPrincipalHandle;
+}
+
+void OutputStreamManager::SetPrincipal(nsIPrincipal* aPrincipal) {
+  MOZ_ASSERT(NS_IsMainThread());
+  nsCOMPtr<nsIPrincipal> principal = mPrincipal;
+  if (nsContentUtils::CombineResourcePrincipals(&principal, aPrincipal)) {
+    mPrincipal = principal;
+    for (const UniquePtr<OutputStreamData>& data : mStreams) {
+      data->SetPrincipal(mPrincipal);
     }
+    mPrincipalHandle = MakePrincipalHandle(principal);
+  }
+}
+
+TrackID OutputStreamManager::NextTrackID() const {
+  MOZ_ASSERT(NS_IsMainThread());
+  return mNextTrackID;
+}
+
+TrackID OutputStreamManager::AllocateNextTrackID() {
+  MOZ_ASSERT(NS_IsMainThread());
+  MOZ_RELEASE_ASSERT(IsTrackIDExplicit(mNextTrackID));
+  return mNextTrackID++;
+}
+
+void OutputStreamManager::SetPlaying(bool aPlaying) {
+  MOZ_ASSERT(NS_IsMainThread());
+  if (mPlaying == aPlaying) {
+    return;
+  }
+
+  mPlaying = aPlaying;
+  if (mPlaying) {
+    mSourceStream->Resume();
+  } else {
+    mSourceStream->Suspend();
   }
 }
 
 }  // namespace mozilla
--- a/dom/media/mediasink/OutputStreamManager.h
+++ b/dom/media/mediasink/OutputStreamManager.h
@@ -2,92 +2,153 @@
 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef OutputStreamManager_h
 #define OutputStreamManager_h
 
+#include "mozilla/CORSMode.h"
 #include "mozilla/RefPtr.h"
+#include "mozilla/StateMirroring.h"
 #include "nsTArray.h"
 #include "TrackID.h"
 
 namespace mozilla {
 
+class DOMMediaStream;
 class MediaInputPort;
 class MediaStream;
-class MediaStreamGraph;
 class OutputStreamManager;
 class ProcessedMediaStream;
+class SourceMediaStream;
+
+namespace dom {
+class MediaStreamTrack;
+}
 
 class OutputStreamData {
  public:
+  OutputStreamData(OutputStreamManager* aManager,
+                   AbstractThread* aAbstractMainThread,
+                   DOMMediaStream* aDOMStream);
+  OutputStreamData(const OutputStreamData& aOther) = delete;
+  OutputStreamData(OutputStreamData&& aOther) = delete;
   ~OutputStreamData();
-  void Init(OutputStreamManager* aOwner, ProcessedMediaStream* aStream,
-            TrackID aNextAvailableTrackID);
 
-  // Connect the given input stream's audio and video tracks to mStream.
-  // Return false is mStream is already destroyed, otherwise true.
-  bool Connect(MediaStream* aStream, TrackID aAudioTrackID,
-               TrackID aVideoTrackID);
-  // Disconnect mStream from its input stream.
-  // Return false is mStream is already destroyed, otherwise true.
-  bool Disconnect();
-  // Return true if aStream points to the same object as mStream.
-  // Used by OutputStreamManager to remove an output stream.
-  bool Equals(MediaStream* aStream) const;
-  // Return the graph mStream belongs to.
-  MediaStreamGraph* Graph() const;
-  // The next TrackID that will not cause a collision in mStream.
-  TrackID NextAvailableTrackID() const;
+  // Creates and adds a MediaStreamTrack to mDOMStream so that we can feed data
+  // to it. For a true aAsyncAddTrack we will dispatch a task to add the
+  // created track to mDOMStream, as is required by spec for the "addtrack"
+  // event.
+  void AddTrack(TrackID aTrackID, MediaSegment::Type aType,
+                nsIPrincipal* aPrincipal, CORSMode aCORSMode,
+                bool aAsyncAddTrack);
+  // Ends the MediaStreamTrack with aTrackID. Calling this with a TrackID that
+  // doesn't exist in mDOMStream is an error.
+  void RemoveTrack(TrackID aTrackID);
+
+  void SetPrincipal(nsIPrincipal* aPrincipal);
+
+  // The source stream DecodedStream is feeding tracks to.
+  const RefPtr<OutputStreamManager> mManager;
+  const RefPtr<AbstractThread> mAbstractMainThread;
+  // The DOMMediaStream we add tracks to and represent.
+  const RefPtr<DOMMediaStream> mDOMStream;
+  // The input stream of mDOMStream.
+  const RefPtr<ProcessedMediaStream> mInputStream;
 
  private:
-  OutputStreamManager* mOwner;
-  RefPtr<ProcessedMediaStream> mStream;
-  // mPort connects an input stream to our mStream.
-  nsTArray<RefPtr<MediaInputPort>> mPorts;
-  // For guaranteeing TrackID uniqueness in our mStream.
-  TrackID mNextAvailableTrackID = TRACK_INVALID;
+  // mPort connects mSourceStream to mInputStream.
+  const RefPtr<MediaInputPort> mPort;
+
+  // Tracks that have been added and not yet removed.
+  nsTArray<RefPtr<dom::MediaStreamTrack>> mTracks;
 };
 
 class OutputStreamManager {
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(OutputStreamManager);
 
  public:
+  explicit OutputStreamManager(SourceMediaStream* aSourceStream,
+                               TrackID aNextTrackID, nsIPrincipal* aPrincipal,
+                               CORSMode aCORSMode,
+                               AbstractThread* aAbstractMainThread);
   // Add the output stream to the collection.
-  void Add(ProcessedMediaStream* aStream, TrackID aNextAvailableTrackID,
-           bool aFinishWhenEnded);
+  void Add(DOMMediaStream* aDOMStream);
   // Remove the output stream from the collection.
-  void Remove(MediaStream* aStream);
-  // Clear all output streams from the collection.
-  void Clear();
-  // The next TrackID that will not cause a collision in aOutputStream.
-  TrackID NextAvailableTrackIDFor(MediaStream* aOutputStream) const;
-  // Return true if the collection empty.
+  void Remove(DOMMediaStream* aDOMStream);
+  // Returns true if aTrackID has been added to all output streams.
+  bool HasTrack(TrackID aTrackID);
+  // Returns true if the given tracks and no others are currently live.
+  // Use a non-explicit TrackID to make it ignored for that type.
+  bool HasTracks(TrackID aAudioTrack, TrackID aVideoTrack);
+  // Returns the number of live tracks.
+  size_t NumberOfTracks();
+  // Add aTrackID to all output streams.
+  void AddTrack(TrackID aTrackID, MediaSegment::Type aType);
+  // Remove aTrackID from all output streams.
+  void RemoveTrack(TrackID aTrackID);
+  // Remove all added tracks from all output streams.
+  void RemoveTracks();
+  // Disconnect mSourceStream from all output streams.
+  void Disconnect();
+  // The principal handle for the underlying decoder.
+  AbstractCanonical<PrincipalHandle>* CanonicalPrincipalHandle();
+  // Called when the underlying decoder's principal has changed.
+  void SetPrincipal(nsIPrincipal* aPrincipal);
+  // The CORSMode for the media element owning the decoder.
+  AbstractCanonical<CORSMode>* CanonicalCORSMode();
+  // Called when the CORSMode for the media element owning the decoder has
+  // changed.
+  void SetCORSMode(CORSMode aCORSMode);
+  // Returns the track id that would be used the next time a track is allocated.
+  TrackID NextTrackID() const;
+  // Like NextTrackID() but advances internal state, so the next call returns a
+  // new unique TrackID.
+  TrackID AllocateNextTrackID();
+  // Called by DecodedStream when its playing state changes. While not playing
+  // we suspend mSourceStream.
+  void SetPlaying(bool aPlaying);
+  // Return true if the collection of output streams is empty.
   bool IsEmpty() const {
     MOZ_ASSERT(NS_IsMainThread());
     return mStreams.IsEmpty();
   }
-  // Connect the given input stream's tracks to all output streams.
-  void Connect(MediaStream* aStream, TrackID aAudioTrackID,
-               TrackID aVideoTrackID);
-  // Disconnect the input stream to all output streams.
-  void Disconnect();
-  // Return the graph these streams belong to or null if empty.
-  MediaStreamGraph* Graph() const {
-    MOZ_ASSERT(NS_IsMainThread());
-    return !IsEmpty() ? mStreams[0].Graph() : nullptr;
-  }
+
+  // Keep the source stream so we can connect the output streams that
+  // are added after Connect().
+  const RefPtr<SourceMediaStream> mSourceStream;
+  const RefPtr<AbstractThread> mAbstractMainThread;
 
  private:
-  ~OutputStreamManager() {}
-  // Keep the input stream so we can connect the output streams that
-  // are added after Connect().
-  RefPtr<MediaStream> mInputStream;
-  TrackID mInputAudioTrackID = TRACK_INVALID;
-  TrackID mInputVideoTrackID = TRACK_INVALID;
-  nsTArray<OutputStreamData> mStreams;
+  ~OutputStreamManager() = default;
+  struct StreamComparator {
+    static bool Equals(const UniquePtr<OutputStreamData>& aData,
+                       DOMMediaStream* aStream) {
+      return aData->mDOMStream == aStream;
+    }
+  };
+  struct TrackIDComparator {
+    static bool Equals(const Pair<TrackID, MediaSegment::Type>& aLiveTrack,
+                       TrackID aTrackID) {
+      return aLiveTrack.first() == aTrackID;
+    }
+  };
+  struct TrackTypeComparator {
+    static bool Equals(const Pair<TrackID, MediaSegment::Type>& aLiveTrack,
+                       const Pair<TrackID, MediaSegment::Type>& aOther) {
+      return aLiveTrack.first() == aOther.first() &&
+             aLiveTrack.second() == aOther.second();
+    }
+  };
+  nsTArray<UniquePtr<OutputStreamData>> mStreams;
+  nsTArray<Pair<TrackID, MediaSegment::Type>> mLiveTracks;
+  Canonical<PrincipalHandle> mPrincipalHandle;
+  nsCOMPtr<nsIPrincipal> mPrincipal;
+  const CORSMode mCORSMode;
+  TrackID mNextTrackID;
+  bool mPlaying;
 };
 
 }  // namespace mozilla
 
 #endif  // OutputStreamManager_h
--- a/media/webrtc/signaling/gtest/mediapipeline_unittest.cpp
+++ b/media/webrtc/signaling/gtest/mediapipeline_unittest.cpp
@@ -87,17 +87,17 @@ public:
 
 };
 
 class FakeAudioStreamTrack : public mozilla::dom::AudioStreamTrack {
 
 public:
 
   FakeAudioStreamTrack()
-    : AudioStreamTrack(new DOMMediaStream(nullptr, nullptr), 0, 1,
+    : AudioStreamTrack(new DOMMediaStream(nullptr), 0, 1,
                        new FakeMediaStreamTrackSource())
     , mMutex("Fake AudioStreamTrack")
     , mStop(false)
     , mCount(0)
   {
     NS_NewTimerWithFuncCallback(getter_AddRefs(mTimer),
                                 FakeAudioStreamTrackGenerateData, this, 20,
                                 nsITimer::TYPE_REPEATING_SLACK,
@@ -114,21 +114,16 @@ public:
   }
 
   virtual void AddListener(MediaStreamTrackListener* aListener) override
   {
     mozilla::MutexAutoLock lock(mMutex);
     mListeners.push_back(aListener);
   }
 
-  virtual already_AddRefed<mozilla::dom::MediaStreamTrack> CloneInternal(DOMMediaStream* aOwningStream, TrackID aTrackID) override
-  {
-    return RefPtr<MediaStreamTrack>(new FakeAudioStreamTrack).forget();
-  }
-
   private:
     std::vector<MediaStreamTrackListener*> mListeners;
     mozilla::Mutex mMutex;
     bool mStop;
     nsCOMPtr<nsITimer> mTimer;
     int mCount;
 
     static void FakeAudioStreamTrackGenerateData(nsITimer* timer, void* closure)