Bug 1493613 - Move MediaStream control from DOMMediaStream to MediaStreamTrack. r=padenot
authorAndreas Pehrson <apehrson@mozilla.com>
Wed, 31 Jul 2019 07:58:17 +0000
changeset 485576 c54cb3c109922bc15fb6d0ca67e5f2a05980c1a5
parent 485575 0b03dd9d20ace235502714101097fe34d1864563
child 485577 cab1435d2f337308dd7a021a141d8995b2a34446
push id91325
push userpehrsons@gmail.com
push dateWed, 31 Jul 2019 10:50:47 +0000
treeherderautoland@cab1435d2f33 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerspadenot
bugs1493613
milestone70.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1493613 - Move MediaStream control from DOMMediaStream to MediaStreamTrack. r=padenot This is inherently large, because modifying these bits of DOMMediaStream and MediaStreamTrack affects all consumers and producers of all DOMMediaStreams and MediaStreamTracks. Things are generally much simpler now. Producers of tracks now create a MediaStream in the graph, add it to a MediaStreamTrackSource subclass that takes ownership of it, and add the source to a MediaStreamTrack. Should the producer need a DOMMediaStream it is now much simpler to create as the only thing needed is the current window. The stream is a rather simple wrapper around an array of MediaStreamTracks. HTMLMediaElement is still not as straight forward as other consumers since it consumes the DOMMediaStream directly, as opposed to a set of tracks. The new MediaStreamRenderer helper class helps bridge the gap between this fact and the new track-based MediaStreamGraph interface, as it needs to juggle registering multiple audio tracks for audio output. This hooks into existing HTMLMediaElement logic and brings a welcome simplification to all the glue previously needed there. Differential Revision: https://phabricator.services.mozilla.com/D37934
dom/html/HTMLCanvasElement.cpp
dom/html/HTMLMediaElement.cpp
dom/html/HTMLMediaElement.h
dom/media/AudioStreamTrack.cpp
dom/media/AudioStreamTrack.h
dom/media/AudioTrack.cpp
dom/media/AudioTrack.h
dom/media/CanvasCaptureMediaStream.cpp
dom/media/CanvasCaptureMediaStream.h
dom/media/DOMMediaStream.cpp
dom/media/DOMMediaStream.h
dom/media/MediaDecoder.cpp
dom/media/MediaDecoder.h
dom/media/MediaDecoderStateMachine.cpp
dom/media/MediaDecoderStateMachine.h
dom/media/MediaManager.cpp
dom/media/MediaManager.h
dom/media/MediaRecorder.cpp
dom/media/MediaStreamGraph.cpp
dom/media/MediaStreamGraph.h
dom/media/MediaStreamTrack.cpp
dom/media/MediaStreamTrack.h
dom/media/MediaStreamWindowCapturer.cpp
dom/media/MediaStreamWindowCapturer.h
dom/media/MediaTrackList.cpp
dom/media/MediaTrackList.h
dom/media/VideoStreamTrack.cpp
dom/media/VideoStreamTrack.h
dom/media/VideoTrack.cpp
dom/media/VideoTrack.h
dom/media/mediasink/DecodedStream.cpp
dom/media/mediasink/OutputStreamManager.cpp
dom/media/mediasink/OutputStreamManager.h
dom/media/moz.build
dom/media/tests/mochitest/test_getUserMedia_GC_MediaStream.html
dom/media/webaudio/MediaStreamAudioDestinationNode.cpp
dom/media/webaudio/MediaStreamAudioDestinationNode.h
dom/media/webaudio/MediaStreamAudioSourceNode.cpp
dom/media/webaudio/MediaStreamAudioSourceNode.h
dom/media/webrtc/MediaEngineDefault.cpp
dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
dom/media/webrtc/MediaEngineWebRTCAudio.cpp
media/webrtc/signaling/gtest/mediapipeline_unittest.cpp
media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp
media/webrtc/signaling/src/peerconnection/RemoteTrackSource.h
testing/web-platform/meta/mediacapture-streams/MediaStream-clone.https.html.ini
--- a/dom/html/HTMLCanvasElement.cpp
+++ b/dom/html/HTMLCanvasElement.cpp
@@ -5,26 +5,26 @@
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "mozilla/dom/HTMLCanvasElement.h"
 
 #include "ImageEncoder.h"
 #include "jsapi.h"
 #include "jsfriendapi.h"
 #include "Layers.h"
-#include "MediaSegment.h"
+#include "MediaStreamGraph.h"
 #include "mozilla/Assertions.h"
 #include "mozilla/Base64.h"
 #include "mozilla/CheckedInt.h"
 #include "mozilla/dom/CanvasCaptureMediaStream.h"
 #include "mozilla/dom/CanvasRenderingContext2D.h"
 #include "mozilla/dom/Event.h"
 #include "mozilla/dom/File.h"
 #include "mozilla/dom/HTMLCanvasElementBinding.h"
-#include "mozilla/dom/MediaStreamTrack.h"
+#include "mozilla/dom/VideoStreamTrack.h"
 #include "mozilla/dom/MouseEvent.h"
 #include "mozilla/dom/OffscreenCanvas.h"
 #include "mozilla/EventDispatcher.h"
 #include "mozilla/gfx/Rect.h"
 #include "mozilla/layers/AsyncCanvasRenderer.h"
 #include "mozilla/layers/WebRenderCanvasRenderer.h"
 #include "mozilla/layers/WebRenderUserData.h"
 #include "mozilla/MouseEvents.h"
@@ -672,34 +672,29 @@ already_AddRefed<CanvasCaptureMediaStrea
     return nullptr;
   }
 
   if (!mCurrentContext) {
     aRv.Throw(NS_ERROR_NOT_INITIALIZED);
     return nullptr;
   }
 
-  RefPtr<CanvasCaptureMediaStream> stream =
-      CanvasCaptureMediaStream::CreateSourceStream(window, this);
-  if (!stream) {
-    aRv.Throw(NS_ERROR_FAILURE);
-    return nullptr;
-  }
+  auto stream = MakeRefPtr<CanvasCaptureMediaStream>(window, this);
 
-  TrackID videoTrackId = 1;
+  const TrackID videoTrackId = 1;
   nsCOMPtr<nsIPrincipal> principal = NodePrincipal();
   nsresult rv = stream->Init(aFrameRate, videoTrackId, principal);
   if (NS_FAILED(rv)) {
     aRv.Throw(rv);
     return nullptr;
   }
 
   RefPtr<MediaStreamTrack> track =
-      stream->CreateDOMTrack(videoTrackId, MediaSegment::VIDEO,
-                             new CanvasCaptureTrackSource(principal, stream));
+      new VideoStreamTrack(window, stream->GetSourceStream(), videoTrackId,
+                           new CanvasCaptureTrackSource(principal, stream));
   stream->AddTrackInternal(track);
 
   // Check site-specific permission and display prompt if appropriate.
   // If no permission, arrange for the frame capture listener to return
   // all-white, opaque image data.
   bool usePlaceholder = !CanvasUtils::IsImageExtractionAllowed(
       OwnerDoc(), nsContentUtils::GetCurrentJSContext(), aSubjectPrincipal);
 
--- a/dom/html/HTMLMediaElement.cpp
+++ b/dom/html/HTMLMediaElement.cpp
@@ -33,18 +33,19 @@
 #include "MediaContainerType.h"
 #include "MediaError.h"
 #include "MediaManager.h"
 #include "MediaMetadataManager.h"
 #include "MediaResource.h"
 #include "MediaShutdownManager.h"
 #include "MediaSourceDecoder.h"
 #include "MediaStreamError.h"
-#include "MediaStreamGraph.h"
+#include "MediaStreamGraphImpl.h"
 #include "MediaStreamListener.h"
+#include "MediaStreamWindowCapturer.h"
 #include "MediaTrackList.h"
 #include "SVGObserverUtils.h"
 #include "TimeRanges.h"
 #include "VideoFrameContainer.h"
 #include "VideoOutput.h"
 #include "VideoStreamTrack.h"
 #include "base/basictypes.h"
 #include "jsapi.h"
@@ -414,64 +415,291 @@ class HTMLMediaElement::FirstFrameListen
 
  private:
   // Whether a frame with a concrete size has been received. May only be
   // accessed on the MSG's appending thread. (this is a direct listener so we
   // get called by whoever is producing this track's data)
   bool mInitialSizeFound = false;
 };
 
+/**
+ * Helper class that manages audio and video outputs for all enabled tracks in a
+ * media element. It also manages calculating the current time when playing a
+ * MediaStream.
+ */
+class HTMLMediaElement::MediaStreamRenderer
+    : public DOMMediaStream::TrackListener {
+ public:
+  NS_INLINE_DECL_REFCOUNTING(MediaStreamRenderer)
+
+  MediaStreamRenderer(AbstractThread* aMainThread,
+                      VideoFrameContainer* aVideoContainer,
+                      void* aAudioOutputKey)
+      : mVideoContainer(aVideoContainer),
+        mAudioOutputKey(aAudioOutputKey),
+        mWatchManager(this, aMainThread) {}
+
+  void UpdateGraphTime() {
+    mGraphTime = mGraph->CurrentTime() - *mGraphTimeOffset;
+  }
+
+  void Start() {
+    if (mRendering) {
+      return;
+    }
+
+    mRendering = true;
+
+    if (!mGraph) {
+      return;
+    }
+
+    *mGraphTimeOffset = mGraph->CurrentTime() - mGraphTime;
+
+    mWatchManager.Watch(mGraph->CurrentTime(),
+                        &MediaStreamRenderer::UpdateGraphTime);
+
+    for (const auto& t : mAudioTracks) {
+      if (t) {
+        t->AsAudioStreamTrack()->AddAudioOutput(mAudioOutputKey);
+        t->AsAudioStreamTrack()->SetAudioOutputVolume(mAudioOutputKey,
+                                                      mAudioOutputVolume);
+      }
+    }
+
+    if (mVideoTrack) {
+      mVideoTrack->AsVideoStreamTrack()->AddVideoOutput(mVideoContainer);
+    }
+  }
+
+  void Stop() {
+    if (!mRendering) {
+      return;
+    }
+
+    mRendering = false;
+
+    if (!mGraph) {
+      return;
+    }
+
+    mWatchManager.Unwatch(mGraph->CurrentTime(),
+                          &MediaStreamRenderer::UpdateGraphTime);
+
+    for (const auto& t : mAudioTracks) {
+      if (t) {
+        t->AsAudioStreamTrack()->RemoveAudioOutput(mAudioOutputKey);
+      }
+    }
+
+    if (mVideoTrack) {
+      mVideoTrack->AsVideoStreamTrack()->RemoveVideoOutput(mVideoContainer);
+    }
+  }
+
+  void SetAudioOutputVolume(float aVolume) {
+    if (mAudioOutputVolume == aVolume) {
+      return;
+    }
+    mAudioOutputVolume = aVolume;
+    if (!mRendering) {
+      return;
+    }
+    for (const auto& t : mAudioTracks) {
+      if (t) {
+        t->AsAudioStreamTrack()->SetAudioOutputVolume(mAudioOutputKey,
+                                                      mAudioOutputVolume);
+      }
+    }
+  }
+
+  void AddTrack(AudioStreamTrack* aTrack) {
+    MOZ_DIAGNOSTIC_ASSERT(!mAudioTracks.Contains(aTrack));
+    mAudioTracks.AppendElement(aTrack);
+    EnsureGraph();
+    if (mRendering) {
+      aTrack->AddAudioOutput(mAudioOutputKey);
+      aTrack->SetAudioOutputVolume(mAudioOutputKey, mAudioOutputVolume);
+    }
+  }
+  void AddTrack(VideoStreamTrack* aTrack) {
+    MOZ_DIAGNOSTIC_ASSERT(!mVideoTrack);
+    if (!mVideoContainer) {
+      return;
+    }
+    mVideoTrack = aTrack;
+    EnsureGraph();
+    if (mRendering) {
+      aTrack->AddVideoOutput(mVideoContainer);
+    }
+  }
+
+  void RemoveTrack(AudioStreamTrack* aTrack) {
+    MOZ_DIAGNOSTIC_ASSERT(mAudioTracks.Contains(aTrack));
+    if (mRendering) {
+      aTrack->RemoveAudioOutput(mAudioOutputKey);
+    }
+    mAudioTracks.RemoveElement(aTrack);
+  }
+  void RemoveTrack(VideoStreamTrack* aTrack) {
+    MOZ_DIAGNOSTIC_ASSERT(mVideoTrack == aTrack);
+    if (!mVideoContainer) {
+      return;
+    }
+    if (mRendering) {
+      aTrack->RemoveVideoOutput(mVideoContainer);
+    }
+    mVideoTrack = nullptr;
+  }
+
+  double CurrentTime() const {
+    if (!mGraph) {
+      return 0.0;
+    }
+
+    return mGraph->MediaTimeToSeconds(mGraphTime);
+  }
+
+  Watchable<GraphTime>& CurrentGraphTime() { return mGraphTime; }
+
+  // Set if we're rendering video.
+  const RefPtr<VideoFrameContainer> mVideoContainer;
+
+  // Set if we're rendering audio, nullptr otherwise.
+  void* const mAudioOutputKey;
+
+ private:
+  ~MediaStreamRenderer() {
+    for (const auto& t : nsTArray<WeakPtr<MediaStreamTrack>>(mAudioTracks)) {
+      if (t) {
+        RemoveTrack(t->AsAudioStreamTrack());
+      }
+    }
+    if (mVideoTrack) {
+      RemoveTrack(mVideoTrack->AsVideoStreamTrack());
+    }
+
+    MOZ_DIAGNOSTIC_ASSERT(mAudioTracks.IsEmpty());
+    MOZ_DIAGNOSTIC_ASSERT(!mVideoTrack);
+  };
+
+  void EnsureGraph() {
+    if (mGraph) {
+      return;
+    }
+
+    MediaStreamGraph* graph = nullptr;
+    for (const auto& t : mAudioTracks) {
+      if (t && !t->Ended()) {
+        graph = t->Graph();
+        break;
+      }
+    }
+
+    if (!graph && mVideoTrack && !mVideoTrack->Ended()) {
+      graph = mVideoTrack->Graph();
+    }
+
+    if (!graph) {
+      return;
+    }
+
+    mGraph = static_cast<MediaStreamGraphImpl*>(graph);
+
+    // The current graph time will represent 0 for this session.
+    mGraphTimeOffset = Some(mGraph->CurrentTime().Ref());
+    mGraphTime = 0;
+
+    if (mRendering) {
+      mWatchManager.Watch(mGraph->CurrentTime(),
+                          &MediaStreamRenderer::UpdateGraphTime);
+    }
+  }
+
+  // True when all tracks are being rendered, i.e., when the media element is
+  // playing.
+  bool mRendering = false;
+
+  // The audio output volume for all audio tracks.
+  float mAudioOutputVolume = 1.0f;
+
+  // WatchManager for mGraphTime.
+  WatchManager<MediaStreamRenderer> mWatchManager;
+
+  // The MediaStreamGraph used to track current time. Set once the first track
+  // is added.
+  RefPtr<MediaStreamGraphImpl> mGraph;
+
+  // Watchable that relays the graph's currentTime updates to the media element
+  // only while we're rendering. This is the current time of the rendering in
+  // GraphTime units.
+  Watchable<GraphTime> mGraphTime = {0, "MediaStreamRenderer::mGraphTime"};
+
+  // Nothing until we start playing a track from mSrcStream. Then the offset in
+  // mGraph's GraphTime at which started rendering the first track.
+  Maybe<GraphTime> mGraphTimeOffset;
+
+  // Currently enabled (and rendered) audio tracks.
+  nsTArray<WeakPtr<MediaStreamTrack>> mAudioTracks;
+
+  // Currently selected (and rendered) video track.
+  WeakPtr<MediaStreamTrack> mVideoTrack;
+};
+
 class HTMLMediaElement::StreamCaptureTrackSource
     : public MediaStreamTrackSource,
       public MediaStreamTrackSource::Sink {
  public:
   NS_DECL_ISUPPORTS_INHERITED
   NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(StreamCaptureTrackSource,
                                            MediaStreamTrackSource)
 
-  StreamCaptureTrackSource(HTMLMediaElement* aElement,
-                           MediaStreamTrackSource* aCapturedTrackSource,
-                           DOMMediaStream* aOwningStream,
-                           TrackID aDestinationTrackID)
+  StreamCaptureTrackSource(MediaStreamTrackSource* aCapturedTrackSource,
+                           ProcessedMediaStream* aStream, MediaInputPort* aPort)
       : MediaStreamTrackSource(aCapturedTrackSource->GetPrincipal(),
                                nsString()),
-        mElement(aElement),
         mCapturedTrackSource(aCapturedTrackSource),
-        mOwningStream(aOwningStream),
-        mDestinationTrackID(aDestinationTrackID) {
-    MOZ_ASSERT(mElement);
+        mStream(aStream),
+        mPort(aPort) {
     MOZ_ASSERT(mCapturedTrackSource);
-    MOZ_ASSERT(mOwningStream);
-    MOZ_ASSERT(IsTrackIDExplicit(mDestinationTrackID));
+    MOZ_ASSERT(mStream);
+    MOZ_ASSERT(mPort);
 
     mCapturedTrackSource->RegisterSink(this);
   }
 
+  void SetEnabled(bool aEnabled) {
+    if (!mStream) {
+      return;
+    }
+    mStream->SetTrackEnabled(mPort->GetDestinationTrackId(),
+                             aEnabled ? DisabledTrackMode::ENABLED
+                                      : DisabledTrackMode::SILENCE_FREEZE);
+  }
+
   void Destroy() override {
     if (mCapturedTrackSource) {
       mCapturedTrackSource->UnregisterSink(this);
       mCapturedTrackSource = nullptr;
     }
+    if (mStream) {
+      mStream->Destroy();
+      mStream = nullptr;
+    }
+    if (mPort) {
+      mPort->Destroy();
+      mPort = nullptr;
+    }
   }
 
   MediaSourceEnum GetMediaSource() const override {
     return MediaSourceEnum::Other;
   }
 
-  void Stop() override {
-    if (mElement && mElement->mSrcStream) {
-      // Only notify if we're still playing the source stream. GC might have
-      // cleared it before the track sources.
-      mElement->NotifyOutputTrackStopped(mOwningStream, mDestinationTrackID);
-    }
-    mElement = nullptr;
-    mOwningStream = nullptr;
-
-    Destroy();
-  }
+  void Stop() override { Destroy(); }
 
   /**
    * Do not keep the track source alive. The source lifetime is controlled by
    * its associated tracks.
    */
   bool KeepsSourceAlive() const override { return false; }
 
   /**
@@ -497,35 +725,47 @@ class HTMLMediaElement::StreamCaptureTra
     if (!mCapturedTrackSource) {
       // This could happen during shutdown.
       return;
     }
 
     MediaStreamTrackSource::MutedChanged(aNewState);
   }
 
+  void OverrideEnded() override {
+    if (!mCapturedTrackSource) {
+      // This could happen during shutdown.
+      return;
+    }
+
+    Destroy();
+    MediaStreamTrackSource::OverrideEnded();
+  }
+
  private:
-  virtual ~StreamCaptureTrackSource() = default;
-
-  RefPtr<HTMLMediaElement> mElement;
+  virtual ~StreamCaptureTrackSource() {
+    MOZ_ASSERT(!mCapturedTrackSource);
+    MOZ_ASSERT(!mStream);
+    MOZ_ASSERT(!mPort);
+  };
+
   RefPtr<MediaStreamTrackSource> mCapturedTrackSource;
-  RefPtr<DOMMediaStream> mOwningStream;
-  TrackID mDestinationTrackID;
+  RefPtr<ProcessedMediaStream> mStream;
+  RefPtr<MediaInputPort> mPort;
 };
 
 NS_IMPL_ADDREF_INHERITED(HTMLMediaElement::StreamCaptureTrackSource,
                          MediaStreamTrackSource)
 NS_IMPL_RELEASE_INHERITED(HTMLMediaElement::StreamCaptureTrackSource,
                           MediaStreamTrackSource)
 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(
     HTMLMediaElement::StreamCaptureTrackSource)
 NS_INTERFACE_MAP_END_INHERITING(MediaStreamTrackSource)
 NS_IMPL_CYCLE_COLLECTION_INHERITED(HTMLMediaElement::StreamCaptureTrackSource,
-                                   MediaStreamTrackSource, mElement,
-                                   mCapturedTrackSource, mOwningStream)
+                                   MediaStreamTrackSource, mCapturedTrackSource)
 
 /**
  * There is a reference cycle involving this class: MediaLoadListener
  * holds a reference to the HTMLMediaElement, which holds a reference
  * to an nsIChannel, which holds a reference to this listener.
  * We break the reference cycle in OnStartRequest by clearing mElement.
  */
 class HTMLMediaElement::MediaLoadListener final
@@ -1589,20 +1829,17 @@ already_AddRefed<layers::Image> HTMLMedi
 }
 
 bool HTMLMediaElement::HasSuspendTaint() const {
   MOZ_ASSERT(!mDecoder || (mDecoder->HasSuspendTaint() == mHasSuspendTaint));
   return mHasSuspendTaint;
 }
 
 already_AddRefed<DOMMediaStream> HTMLMediaElement::GetSrcObject() const {
-  NS_ASSERTION(!mSrcAttrStream || mSrcAttrStream->GetPlaybackStream(),
-               "MediaStream should have been set up properly");
-  RefPtr<DOMMediaStream> stream = mSrcAttrStream;
-  return stream.forget();
+  return do_AddRef(mSrcAttrStream);
 }
 
 void HTMLMediaElement::SetSrcObject(DOMMediaStream& aValue) {
   SetSrcObject(&aValue);
 }
 
 void HTMLMediaElement::SetSrcObject(DOMMediaStream* aValue) {
   mSrcAttrStream = aValue;
@@ -1631,20 +1868,16 @@ nsresult HTMLMediaElement::OnChannelRedi
 void HTMLMediaElement::ShutdownDecoder() {
   RemoveMediaElementFromURITable();
   NS_ASSERTION(mDecoder, "Must have decoder to shut down");
 
   mWaitingForKeyListener.DisconnectIfExists();
   if (mMediaSource) {
     mMediaSource->CompletePendingTransactions();
   }
-  if (!mOutputStreams.IsEmpty()) {
-    mNextAvailableMediaDecoderOutputTrackID =
-        mDecoder->GetNextOutputStreamTrackID();
-  }
   DiscardFinishWhenEndedOutputStreams();
   mDecoder->Shutdown();
   DDUNLINKCHILD(mDecoder.get());
   mDecoder = nullptr;
   ReportAudioTrackSilenceProportionTelemetry();
 }
 
 void HTMLMediaElement::ReportPlayedTimeAfterBlockedTelemetry() {
@@ -1982,16 +2215,18 @@ void HTMLMediaElement::ResetState() {
   // There might be a pending MediaDecoder::PlaybackPositionChanged() which
   // will overwrite |mMediaInfo.mVideo.mDisplay| in UpdateMediaSize() to give
   // staled videoWidth and videoHeight. We have to call ForgetElement() here
   // such that the staled callbacks won't reach us.
   if (mVideoFrameContainer) {
     mVideoFrameContainer->ForgetElement();
     mVideoFrameContainer = nullptr;
   }
+  // mMediaStreamRenderer, has a strong reference to mVideoFrameContainer.
+  mMediaStreamRenderer = nullptr;
 }
 
 void HTMLMediaElement::SelectResourceWrapper() {
   SelectResource();
   MaybeBeginCloningVisually();
   mIsRunningSelectResource = false;
   mHaveQueuedSelectResource = false;
   mIsDoingExplicitLoad = false;
@@ -2106,25 +2341,30 @@ void HTMLMediaElement::NotifyMediaTrackE
       return;
     }
     mDisableVideo = false;
   } else {
     MOZ_ASSERT(false, "Unknown track type");
   }
 
   if (mSrcStream) {
-    if (aTrack->AsVideoTrack()) {
+    MOZ_ASSERT(mMediaStreamRenderer);
+    if (AudioTrack* t = aTrack->AsAudioTrack()) {
+      mMediaStreamRenderer->AddTrack(t->GetAudioStreamTrack());
+    } else if (VideoTrack* t = aTrack->AsVideoTrack()) {
       MOZ_ASSERT(!mSelectedVideoStreamTrack);
 
-      mSelectedVideoStreamTrack = aTrack->AsVideoTrack()->GetVideoStreamTrack();
-      VideoFrameContainer* container = GetVideoFrameContainer();
-      if (container) {
+      mSelectedVideoStreamTrack = t->GetVideoStreamTrack();
+      mSelectedVideoStreamTrack->AddPrincipalChangeObserver(this);
+      mMediaStreamRenderer->AddTrack(mSelectedVideoStreamTrack);
+      nsContentUtils::CombineResourcePrincipals(
+          &mSrcStreamVideoPrincipal, mSelectedVideoStreamTrack->GetPrincipal());
+      if (VideoFrameContainer* container = GetVideoFrameContainer()) {
         HTMLVideoElement* self = static_cast<HTMLVideoElement*>(this);
         if (mSrcStreamIsPlaying) {
-          mSelectedVideoStreamTrack->AddVideoOutput(container);
           MaybeBeginCloningVisually();
         } else if (self->VideoWidth() <= 1 && self->VideoHeight() <= 1) {
           // MediaInfo uses dummy values of 1 for width and height to
           // mark video as valid. We need a new first-frame listener
           // if size is 0x0 or 1x1.
           if (!mFirstFrameListener) {
             mFirstFrameListener =
                 new FirstFrameListener(container, mAbstractMainThread);
@@ -2160,43 +2400,45 @@ void HTMLMediaElement::NotifyMediaTrackD
   LOG(LogLevel::Debug, ("MediaElement %p %sTrack with id %s disabled", this,
                         aTrack->AsAudioTrack() ? "Audio" : "Video",
                         NS_ConvertUTF16toUTF8(id).get()));
 #endif
 
   MOZ_ASSERT((!aTrack->AsAudioTrack() || !aTrack->AsAudioTrack()->Enabled()) &&
              (!aTrack->AsVideoTrack() || !aTrack->AsVideoTrack()->Selected()));
 
-  if (aTrack->AsAudioTrack()) {
-    // If we don't have any alive track , we don't need to mute MediaElement.
+  if (AudioTrack* t = aTrack->AsAudioTrack()) {
+    if (mMediaStreamRenderer) {
+      mMediaStreamRenderer->RemoveTrack(t->GetAudioStreamTrack());
+    }
+    // If we don't have any live tracks, we don't need to mute MediaElement.
     MOZ_DIAGNOSTIC_ASSERT(AudioTracks(), "Element can't have been unlinked");
     if (AudioTracks()->Length() > 0) {
       bool shouldMute = true;
       for (uint32_t i = 0; i < AudioTracks()->Length(); ++i) {
         if ((*AudioTracks())[i]->Enabled()) {
           shouldMute = false;
           break;
         }
       }
 
       if (shouldMute) {
         SetMutedInternal(mMuted | MUTED_BY_AUDIO_TRACK);
       }
     }
-  } else if (aTrack->AsVideoTrack()) {
-    if (mSrcStream) {
+  } else if (VideoTrack* t = aTrack->AsVideoTrack()) {
+    if (mMediaStreamRenderer) {
+      MOZ_DIAGNOSTIC_ASSERT(mSelectedVideoStreamTrack ==
+                            t->GetVideoStreamTrack());
       if (mFirstFrameListener) {
         mSelectedVideoStreamTrack->RemoveVideoOutput(mFirstFrameListener);
         mFirstFrameListener = nullptr;
       }
-
-      VideoFrameContainer* container = GetVideoFrameContainer();
-      if (mSrcStreamIsPlaying && container) {
-        mSelectedVideoStreamTrack->RemoveVideoOutput(container);
-      }
+      mMediaStreamRenderer->RemoveTrack(mSelectedVideoStreamTrack);
+      mSelectedVideoStreamTrack->RemovePrincipalChangeObserver(this);
       mSelectedVideoStreamTrack = nullptr;
     }
   }
 
   if (mReadyState == HAVE_NOTHING) {
     // No MediaStreamTracks are captured until we have metadata, and code
     // below doesn't do anything for captured decoders.
     return;
@@ -2206,39 +2448,34 @@ void HTMLMediaElement::NotifyMediaTrackD
     if (ms.mCapturingDecoder) {
       MOZ_ASSERT(!ms.mCapturingMediaStream);
       continue;
     }
     if (ms.mCapturingAudioOnly && aTrack->AsVideoTrack()) {
       continue;
     }
     MOZ_ASSERT(ms.mCapturingMediaStream);
-    for (int32_t i = ms.mTrackPorts.Length() - 1; i >= 0; --i) {
-      if (ms.mTrackPorts[i].first() == aTrack->GetId()) {
-        // The source of this track just ended. Force-notify that it ended.
-        // If we bounce it to the MediaStreamGraph it might not be picked up,
-        // for instance if the MediaInputPort was destroyed in the same
-        // iteration as it was added.
-        MediaStreamTrack* outputTrack = ms.mStream->FindOwnedDOMTrack(
-            ms.mTrackPorts[i].second()->GetDestination(),
-            ms.mTrackPorts[i].second()->GetDestinationTrackId());
-        MOZ_ASSERT(outputTrack);
-        if (outputTrack) {
-          mMainThreadEventTarget->Dispatch(
-              NewRunnableMethod("MediaStreamTrack::OverrideEnded", outputTrack,
-                                &MediaStreamTrack::OverrideEnded));
-        }
-
-        ms.mTrackPorts[i].second()->Destroy();
-        ms.mTrackPorts.RemoveElementAt(i);
-        break;
+    for (int32_t i = ms.mTracks.Length() - 1; i >= 0; --i) {
+      if (ms.mTracks[i].first() != aTrack->GetId()) {
+        continue;
       }
+      // The source of this track just ended. Force-notify that it ended.
+      // If we bounce it to the MediaStreamGraph it might not be picked up,
+      // for instance if the MediaInputPort was destroyed in the same
+      // iteration as it was added.
+      mMainThreadEventTarget->Dispatch(NewRunnableMethod(
+          "StreamCaptureTrackSource::OverrideEnded",
+          static_cast<StreamCaptureTrackSource*>(ms.mTracks[i].second().get()),
+          &StreamCaptureTrackSource::OverrideEnded));
+
+      ms.mTracks.RemoveElementAt(i);
+      break;
     }
 #ifdef DEBUG
-    for (auto pair : ms.mTrackPorts) {
+    for (auto pair : ms.mTracks) {
       MOZ_ASSERT(pair.first() != aTrack->GetId(),
                  "The same MediaTrack was forwarded to the output stream more "
                  "than once. This shouldn't happen.");
     }
 #endif
   }
 }
 
@@ -2248,43 +2485,16 @@ void HTMLMediaElement::DealWithFailedEle
   }
 
   DispatchAsyncSourceError(aSourceElement);
   mMainThreadEventTarget->Dispatch(
       NewRunnableMethod("HTMLMediaElement::QueueLoadFromSourceTask", this,
                         &HTMLMediaElement::QueueLoadFromSourceTask));
 }
 
-void HTMLMediaElement::NotifyOutputTrackStopped(DOMMediaStream* aOwningStream,
-                                                TrackID aDestinationTrackID) {
-  for (OutputMediaStream& ms : mOutputStreams) {
-    if (!ms.mCapturingMediaStream) {
-      continue;
-    }
-
-    if (ms.mStream != aOwningStream) {
-      continue;
-    }
-
-    for (int32_t i = ms.mTrackPorts.Length() - 1; i >= 0; --i) {
-      MediaInputPort* port = ms.mTrackPorts[i].second();
-      if (port->GetDestinationTrackId() != aDestinationTrackID) {
-        continue;
-      }
-
-      port->Destroy();
-      ms.mTrackPorts.RemoveElementAt(i);
-      return;
-    }
-  }
-
-  // An output track ended but its port is already gone.
-  // It was probably cleared by the removal of the source MediaTrack.
-}
-
 void HTMLMediaElement::LoadFromSourceChildren() {
   NS_ASSERTION(mDelayingLoadEvent,
                "Should delay load event (if in document) during load");
   NS_ASSERTION(mIsLoadingFromSourceChildren,
                "Must remember we're loading from source children");
 
   AddMutationObserverUnlessExists(this);
 
@@ -2577,22 +2787,18 @@ nsresult HTMLMediaElement::LoadWithChann
   return NS_OK;
 }
 
 bool HTMLMediaElement::Seeking() const {
   return mDecoder && mDecoder->IsSeeking();
 }
 
 double HTMLMediaElement::CurrentTime() const {
-  if (MediaStream* stream = GetSrcMediaStream()) {
-    MediaStreamGraph* graph = stream->Graph();
-    GraphTime currentGraphTime =
-        mSrcStreamPausedGraphTime.valueOr(graph->CurrentTime());
-    StreamTime currentStreamTime = currentGraphTime - mSrcStreamGraphTimeOffset;
-    return stream->StreamTimeToSeconds(currentStreamTime);
+  if (mMediaStreamRenderer) {
+    return mMediaStreamRenderer->CurrentTime();
   }
 
   if (mDefaultPlaybackStartPosition == 0.0 && mDecoder) {
     return mDecoder->GetCurrentTime();
   }
 
   return mDefaultPlaybackStartPosition;
 }
@@ -2940,20 +3146,18 @@ void HTMLMediaElement::PauseIfShouldNotB
   }
 }
 
 void HTMLMediaElement::SetVolumeInternal() {
   float effectiveVolume = ComputedVolume();
 
   if (mDecoder) {
     mDecoder->SetVolume(effectiveVolume);
-  } else if (MediaStream* stream = GetSrcMediaStream()) {
-    if (mSrcStreamIsPlaying) {
-      stream->SetAudioOutputVolume(this, effectiveVolume);
-    }
+  } else if (mMediaStreamRenderer) {
+    mMediaStreamRenderer->SetAudioOutputVolume(effectiveVolume);
   }
 
   NotifyAudioPlaybackChanged(
       AudioChannelService::AudibleChangedReasons::eVolumeChanged);
 }
 
 void HTMLMediaElement::SetMuted(bool aMuted) {
   LOG(LogLevel::Debug, ("%p SetMuted(%d) called by JS", this, aMuted));
@@ -2975,31 +3179,23 @@ void HTMLMediaElement::SetMuted(bool aMu
 }
 
 void HTMLMediaElement::SetCapturedOutputStreamsEnabled(bool aEnabled) {
   for (OutputMediaStream& ms : mOutputStreams) {
     if (ms.mCapturingDecoder) {
       MOZ_ASSERT(!ms.mCapturingMediaStream);
       continue;
     }
-    for (auto pair : ms.mTrackPorts) {
-      MediaStream* outputSource = ms.mStream->GetInputStream();
-      if (!outputSource) {
-        NS_ERROR("No output source stream");
-        return;
-      }
-
-      TrackID id = pair.second()->GetDestinationTrackId();
-      outputSource->SetTrackEnabled(
-          id, aEnabled ? DisabledTrackMode::ENABLED
-                       : DisabledTrackMode::SILENCE_FREEZE);
-
-      LOG(LogLevel::Debug,
-          ("%s track %d for captured MediaStream %p",
-           aEnabled ? "Enabled" : "Disabled", id, ms.mStream.get()));
+    for (auto pair : ms.mTracks) {
+      static_cast<StreamCaptureTrackSource*>(pair.second().get())
+          ->SetEnabled(aEnabled);
+
+      LOG(LogLevel::Debug, ("%s track %p for captured MediaStream %p",
+                            aEnabled ? "Enabled" : "Disabled",
+                            pair.second().get(), ms.mStream.get()));
     }
   }
 }
 
 void HTMLMediaElement::AddCaptureMediaTrackToOutputStream(
     MediaTrack* aTrack, OutputMediaStream& aOutputStream, bool aAsyncAddtrack) {
   if (aOutputStream.mCapturingDecoder) {
     MOZ_ASSERT(!aOutputStream.mCapturingMediaStream);
@@ -3008,88 +3204,69 @@ void HTMLMediaElement::AddCaptureMediaTr
   aOutputStream.mCapturingMediaStream = true;
 
   if (aOutputStream.mStream == mSrcStream) {
     // Cycle detected. This can happen since tracks are added async.
     // We avoid forwarding it to the output here or we'd get into an infloop.
     return;
   }
 
-  MediaStream* outputSource = aOutputStream.mStream->GetInputStream();
-  if (!outputSource) {
-    NS_ERROR("No output source stream");
-    return;
-  }
-
-  ProcessedMediaStream* processedOutputSource =
-      outputSource->AsProcessedStream();
-  if (!processedOutputSource) {
-    NS_ERROR("Input stream not a ProcessedMediaStream");
-    return;
-  }
-
   if (!aTrack) {
     MOZ_ASSERT(false, "Bad MediaTrack");
     return;
   }
 
   MediaStreamTrack* inputTrack = mSrcStream->GetTrackById(aTrack->GetId());
   MOZ_ASSERT(inputTrack);
   if (!inputTrack) {
     NS_ERROR("Input track not found in source stream");
     return;
   }
-
-#if DEBUG
-  for (auto pair : aOutputStream.mTrackPorts) {
-    MOZ_ASSERT(pair.first() != aTrack->GetId(),
-               "Captured track already captured to output stream");
-  }
-#endif
-
-  TrackID destinationTrackID = aOutputStream.mNextAvailableTrackID++;
-  RefPtr<MediaStreamTrackSource> source =
-      new StreamCaptureTrackSource(this, &inputTrack->GetSource(),
-                                   aOutputStream.mStream, destinationTrackID);
-
-  MediaSegment::Type type = inputTrack->AsAudioStreamTrack()
-                                ? MediaSegment::AUDIO
-                                : MediaSegment::VIDEO;
-
-  RefPtr<MediaStreamTrack> track =
-      aOutputStream.mStream->CreateDOMTrack(destinationTrackID, type, source);
+  MOZ_DIAGNOSTIC_ASSERT(!inputTrack->Ended());
+
+  nsPIDOMWindowInner* window = OwnerDoc()->GetInnerWindow();
+  if (!window) {
+    return;
+  }
+
+  ProcessedMediaStream* stream = inputTrack->Graph()->CreateTrackUnionStream();
+  RefPtr<MediaInputPort> port = inputTrack->ForwardTrackContentsTo(stream);
+  auto source = MakeRefPtr<StreamCaptureTrackSource>(&inputTrack->GetSource(),
+                                                     stream, port);
+
+  // Track is muted initially, so we don't leak data if it's added while paused
+  // and an MSG iteration passes before the mute comes into effect.
+  source->SetEnabled(mSrcStreamIsPlaying);
+
+  RefPtr<MediaStreamTrack> track;
+  if (inputTrack->AsAudioStreamTrack()) {
+    track =
+        new AudioStreamTrack(window, stream, inputTrack->GetTrackID(), source);
+  } else {
+    track =
+        new VideoStreamTrack(window, stream, inputTrack->GetTrackID(), source);
+  }
+
+  aOutputStream.mTracks.AppendElement(
+      Pair<nsString, RefPtr<MediaStreamTrackSource>>(aTrack->GetId(),
+                                                     source.get()));
 
   if (aAsyncAddtrack) {
     mMainThreadEventTarget->Dispatch(
         NewRunnableMethod<StoreRefPtrPassByPtr<MediaStreamTrack>>(
             "DOMMediaStream::AddTrackInternal", aOutputStream.mStream,
             &DOMMediaStream::AddTrackInternal, track));
   } else {
     aOutputStream.mStream->AddTrackInternal(track);
   }
 
-  // Track is muted initially, so we don't leak data if it's added while paused
-  // and an MSG iteration passes before the mute comes into effect.
-  processedOutputSource->SetTrackEnabled(destinationTrackID,
-                                         DisabledTrackMode::SILENCE_FREEZE);
-  RefPtr<MediaInputPort> port = inputTrack->ForwardTrackContentsTo(
-      processedOutputSource, destinationTrackID);
-
-  Pair<nsString, RefPtr<MediaInputPort>> p(aTrack->GetId(), port);
-  aOutputStream.mTrackPorts.AppendElement(std::move(p));
-
-  if (mSrcStreamIsPlaying) {
-    processedOutputSource->SetTrackEnabled(destinationTrackID,
-                                           DisabledTrackMode::ENABLED);
-  }
-
   LOG(LogLevel::Debug,
-      ("Created %s track %p with id %d from track %p through MediaInputPort %p",
+      ("Created %s track %p from track %p through MediaInputPort %p",
        inputTrack->AsAudioStreamTrack() ? "audio" : "video", track.get(),
-       destinationTrackID, inputTrack, port.get()));
+       inputTrack, port.get()));
 }
 
 void HTMLMediaElement::DiscardFinishWhenEndedOutputStreams() {
   // Discard all output streams that have finished now.
   for (int32_t i = mOutputStreams.Length() - 1; i >= 0; --i) {
     if (!mOutputStreams[i].mFinishWhenEnded) {
       continue;
     }
@@ -3124,48 +3301,51 @@ already_AddRefed<DOMMediaStream> HTMLMed
     MediaStreamGraph* aGraph) {
   MOZ_RELEASE_ASSERT(aGraph);
   MOZ_ASSERT(CanBeCaptured(aStreamCaptureType));
 
   MarkAsContentSource(CallerAPI::CAPTURE_STREAM);
   MarkAsTainted();
 
   // We don't support routing to a different graph.
-  if (!mOutputStreams.IsEmpty() &&
-      aGraph != mOutputStreams[0].mStream->GetInputStream()->Graph()) {
+  if (!mOutputStreams.IsEmpty() && aGraph != mOutputStreams[0].mGraph) {
     return nullptr;
   }
 
   OutputMediaStream* out = mOutputStreams.AppendElement();
   nsPIDOMWindowInner* window = OwnerDoc()->GetInnerWindow();
-  out->mStream = DOMMediaStream::CreateTrackUnionStreamAsInput(window, aGraph);
+  out->mGraph = static_cast<MediaStreamGraphImpl*>(aGraph);
+  out->mGraphKeepAliveDummyStream =
+      mOutputStreams.Length() == 1
+          ? MakeRefPtr<SharedDummyStream>(aGraph->CreateSourceStream())
+          : mOutputStreams[0].mGraphKeepAliveDummyStream;
+  out->mStream = MakeAndAddRef<DOMMediaStream>(window);
   out->mStream->SetFinishedOnInactive(false);
   out->mFinishWhenEnded =
       aFinishBehavior == StreamCaptureBehavior::FINISH_WHEN_ENDED;
   out->mCapturingAudioOnly =
       aStreamCaptureType == StreamCaptureType::CAPTURE_AUDIO;
 
   if (aStreamCaptureType == StreamCaptureType::CAPTURE_AUDIO) {
     if (mSrcStream) {
       // We don't support applying volume and mute to the captured stream, when
       // capturing a MediaStream.
       ReportToConsole(nsIScriptError::errorFlag,
                       "MediaElementAudioCaptureOfMediaStreamError");
-      return nullptr;
     }
 
     // mAudioCaptured tells the user that the audio played by this media element
     // is being routed to the captureStreams *instead* of being played to
     // speakers.
     mAudioCaptured = true;
   }
 
   if (mDecoder) {
     out->mCapturingDecoder = true;
-    mDecoder->AddOutputStream(out->mStream);
+    mDecoder->AddOutputStream(out->mStream, out->mGraph);
   } else if (mSrcStream) {
     out->mCapturingMediaStream = true;
   }
 
   if (mReadyState == HAVE_NOTHING) {
     // Do not expose the tracks until we have metadata.
     RefPtr<DOMMediaStream> result = out->mStream;
     return result.forget();
@@ -3550,21 +3730,16 @@ HTMLMediaElement::~HTMLMediaElement() {
   if (mVideoDecodeSuspendTimer) {
     mVideoDecodeSuspendTimer->Cancel();
     mVideoDecodeSuspendTimer = nullptr;
   }
   if (mSrcStream) {
     EndSrcMediaStreamPlayback();
   }
 
-  if (mCaptureStreamPort) {
-    mCaptureStreamPort->Destroy();
-    mCaptureStreamPort = nullptr;
-  }
-
   NS_ASSERTION(MediaElementTableCount(this, mLoadingSrc) == 0,
                "Destroyed media element should no longer be in element table");
 
   if (mChannelLoader) {
     mChannelLoader->Cancel();
   }
 
   if (mAudioChannelWrapper) {
@@ -3867,28 +4042,31 @@ void HTMLMediaElement::ReleaseAudioWakeL
     mWakeLock->Unlock(rv);
     rv.SuppressException();
     mWakeLock = nullptr;
   }
 }
 
 void HTMLMediaElement::WakeLockRelease() { ReleaseAudioWakeLockIfExists(); }
 
+HTMLMediaElement::SharedDummyStream::SharedDummyStream(MediaStream* aStream)
+    : mStream(aStream) {
+  mStream->Suspend();
+}
+HTMLMediaElement::SharedDummyStream::~SharedDummyStream() {
+  mStream->Destroy();
+}
+
 HTMLMediaElement::OutputMediaStream::OutputMediaStream()
-    : mNextAvailableTrackID(1),
-      mFinishWhenEnded(false),
+    : mFinishWhenEnded(false),
       mCapturingAudioOnly(false),
       mCapturingDecoder(false),
       mCapturingMediaStream(false) {}
 
-HTMLMediaElement::OutputMediaStream::~OutputMediaStream() {
-  for (auto pair : mTrackPorts) {
-    pair.second()->Destroy();
-  }
-}
+HTMLMediaElement::OutputMediaStream::~OutputMediaStream() = default;
 
 void HTMLMediaElement::GetEventTargetParent(EventChainPreVisitor& aVisitor) {
   if (!this->Controls() || !aVisitor.mEvent->mFlags.mIsTrusted) {
     nsGenericHTMLElement::GetEventTargetParent(aVisitor);
     return;
   }
 
   HTMLInputElement* el = nullptr;
@@ -4513,29 +4691,24 @@ nsresult HTMLMediaElement::FinishDecoder
                [](const GenericPromise::ResolveOrRejectValue& aValue) {
                  MOZ_ASSERT(aValue.IsResolve() && !aValue.ResolveValue());
                });
 #else
         ;
 #endif
   }
 
-  if (!mOutputStreams.IsEmpty()) {
-    mDecoder->SetNextOutputStreamTrackID(
-        mNextAvailableMediaDecoderOutputTrackID);
-  }
-
   for (OutputMediaStream& ms : mOutputStreams) {
     if (ms.mCapturingMediaStream) {
       MOZ_ASSERT(!ms.mCapturingDecoder);
       continue;
     }
 
     ms.mCapturingDecoder = true;
-    aDecoder->AddOutputStream(ms.mStream);
+    aDecoder->AddOutputStream(ms.mStream, ms.mGraph);
   }
 
   if (mMediaKeys) {
     if (mMediaKeys->GetCDMProxy()) {
       mDecoder->SetCDMProxy(mMediaKeys->GetCDMProxy());
     } else {
       // CDM must have crashed.
       ShutdownDecoder();
@@ -4641,89 +4814,64 @@ class HTMLMediaElement::MediaStreamTrack
  protected:
   const WeakPtr<HTMLMediaElement> mElement;
 };
 
 void HTMLMediaElement::UpdateSrcMediaStreamPlaying(uint32_t aFlags) {
   if (!mSrcStream) {
     return;
   }
-  // We might be in cycle collection with mSrcStream->GetPlaybackStream()
-  // already returning null due to unlinking.
-
-  MediaStream* stream = GetSrcMediaStream();
-  MediaStreamGraph* graph = stream ? stream->Graph() : nullptr;
+
   bool shouldPlay = !(aFlags & REMOVING_SRC_STREAM) && !mPaused &&
-                    !mPausedForInactiveDocumentOrChannel && stream;
+                    !mPausedForInactiveDocumentOrChannel;
   if (shouldPlay == mSrcStreamIsPlaying) {
     return;
   }
   mSrcStreamIsPlaying = shouldPlay;
 
   LOG(LogLevel::Debug,
       ("MediaElement %p %s playback of DOMMediaStream %p", this,
        shouldPlay ? "Setting up" : "Removing", mSrcStream.get()));
 
   if (shouldPlay) {
     mSrcStreamPlaybackEnded = false;
-    mSrcStreamGraphTimeOffset +=
-        graph->CurrentTime() - mSrcStreamPausedGraphTime.ref();
-    mSrcStreamPausedGraphTime = Nothing();
-
-    mWatchManager.Watch(graph->CurrentTime(),
-                        &HTMLMediaElement::UpdateSrcStreamTime);
-
-    stream->AddAudioOutput(this);
-    SetVolumeInternal();
+
+    mMediaStreamRenderer->Start();
     if (mSink.second()) {
       NS_WARNING(
           "setSinkId() when playing a MediaStream is not supported yet and "
           "will be ignored");
     }
 
-    VideoFrameContainer* container = GetVideoFrameContainer();
-    if (mSelectedVideoStreamTrack && container) {
-      mSelectedVideoStreamTrack->AddVideoOutput(container);
+    if (mSelectedVideoStreamTrack && GetVideoFrameContainer()) {
       MaybeBeginCloningVisually();
     }
 
     SetCapturedOutputStreamsEnabled(true);  // Unmute
     // If the input is a media stream, we don't check its data and always regard
     // it as audible when it's playing.
     SetAudibleState(true);
   } else {
-    if (stream) {
-      MOZ_DIAGNOSTIC_ASSERT(mSrcStreamPausedGraphTime.isNothing());
-      mSrcStreamPausedGraphTime = Some(graph->CurrentTime().Ref());
-
-      mWatchManager.Unwatch(graph->CurrentTime(),
-                            &HTMLMediaElement::UpdateSrcStreamTime);
-
-      stream->RemoveAudioOutput(this);
-      VideoFrameContainer* container = GetVideoFrameContainer();
-      if (mSelectedVideoStreamTrack && container) {
-        mSelectedVideoStreamTrack->RemoveVideoOutput(container);
-
-        HTMLVideoElement* self = static_cast<HTMLVideoElement*>(this);
-        if (self->VideoWidth() <= 1 && self->VideoHeight() <= 1) {
-          // MediaInfo uses dummy values of 1 for width and height to
-          // mark video as valid. We need a new first-frame listener
-          // if size is 0x0 or 1x1.
-          if (!mFirstFrameListener) {
-            mFirstFrameListener =
-                new FirstFrameListener(container, mAbstractMainThread);
-          }
-          mSelectedVideoStreamTrack->AddVideoOutput(mFirstFrameListener);
+    mMediaStreamRenderer->Stop();
+    VideoFrameContainer* container = GetVideoFrameContainer();
+    if (mSelectedVideoStreamTrack && container) {
+      HTMLVideoElement* self = static_cast<HTMLVideoElement*>(this);
+      if (self->VideoWidth() <= 1 && self->VideoHeight() <= 1) {
+        // MediaInfo uses dummy values of 1 for width and height to
+        // mark video as valid. We need a new first-frame listener
+        // if size is 0x0 or 1x1.
+        if (!mFirstFrameListener) {
+          mFirstFrameListener =
+              new FirstFrameListener(container, mAbstractMainThread);
         }
+        mSelectedVideoStreamTrack->AddVideoOutput(mFirstFrameListener);
       }
-
-      SetCapturedOutputStreamsEnabled(false);  // Mute
-    }
-    // If stream is null, then DOMMediaStream::Destroy must have been
-    // called and that will remove all listeners/outputs.
+    }
+
+    SetCapturedOutputStreamsEnabled(false);  // Mute
   }
 }
 
 void HTMLMediaElement::UpdateSrcStreamTime() {
   MOZ_ASSERT(NS_IsMainThread());
 
   if (mSrcStreamPlaybackEnded) {
     // We do a separate FireTimeUpdate() when this is set.
@@ -4739,86 +4887,89 @@ void HTMLMediaElement::SetupSrcMediaStre
 
   mSrcStream = aStream;
 
   nsPIDOMWindowInner* window = OwnerDoc()->GetInnerWindow();
   if (!window) {
     return;
   }
 
-  mSrcStreamPausedGraphTime = Some(0);
-  if (MediaStream* stream = GetSrcMediaStream()) {
-    if (MediaStreamGraph* graph = stream->Graph()) {
-      // The current graph time will represent 0 for this media element.
-      mSrcStreamGraphTimeOffset = graph->CurrentTime();
-      mSrcStreamPausedGraphTime = Some(mSrcStreamGraphTimeOffset);
-    }
-  }
+  mMediaStreamRenderer = MakeAndAddRef<MediaStreamRenderer>(
+      mAbstractMainThread, GetVideoFrameContainer(), this);
+  mWatchManager.Watch(mMediaStreamRenderer->CurrentGraphTime(),
+                      &HTMLMediaElement::UpdateSrcStreamTime);
+  SetVolumeInternal();
 
   UpdateSrcMediaStreamPlaying();
+  mSrcStreamVideoPrincipal = NodePrincipal();
 
   // If we pause this media element, track changes in the underlying stream
   // will continue to fire events at this element and alter its track list.
   // That's simpler than delaying the events, but probably confusing...
   nsTArray<RefPtr<MediaStreamTrack>> tracks;
   mSrcStream->GetTracks(tracks);
   for (const RefPtr<MediaStreamTrack>& track : tracks) {
     NotifyMediaStreamTrackAdded(track);
   }
 
   mMediaStreamTrackListener = MakeUnique<MediaStreamTrackListener>(this);
   mSrcStream->RegisterTrackListener(mMediaStreamTrackListener.get());
 
-  mSrcStream->AddPrincipalChangeObserver(this);
-  mSrcStreamVideoPrincipal = mSrcStream->GetVideoPrincipal();
-
   ChangeNetworkState(NETWORK_IDLE);
   ChangeDelayLoadStatus(false);
 
   // FirstFrameLoaded() will be called when the stream has tracks.
 }
 
 void HTMLMediaElement::EndSrcMediaStreamPlayback() {
   MOZ_ASSERT(mSrcStream);
 
   UpdateSrcMediaStreamPlaying(REMOVING_SRC_STREAM);
 
+  if (mSelectedVideoStreamTrack) {
+    mSelectedVideoStreamTrack->RemovePrincipalChangeObserver(this);
+  }
   if (mFirstFrameListener) {
     mSelectedVideoStreamTrack->RemoveVideoOutput(mFirstFrameListener);
   }
   mSelectedVideoStreamTrack = nullptr;
   mFirstFrameListener = nullptr;
 
+  if (mMediaStreamRenderer) {
+    mWatchManager.Unwatch(mMediaStreamRenderer->CurrentGraphTime(),
+                          &HTMLMediaElement::UpdateSrcStreamTime);
+    mMediaStreamRenderer = nullptr;
+  }
+
   mSrcStream->UnregisterTrackListener(mMediaStreamTrackListener.get());
   mMediaStreamTrackListener = nullptr;
   mSrcStreamTracksAvailable = false;
   mSrcStreamPlaybackEnded = false;
-
-  mSrcStream->RemovePrincipalChangeObserver(this);
   mSrcStreamVideoPrincipal = nullptr;
 
+#ifdef DEBUG
   for (OutputMediaStream& ms : mOutputStreams) {
-    for (auto pair : ms.mTrackPorts) {
-      pair.second()->Destroy();
-    }
-    ms.mTrackPorts.Clear();
-  }
+    // These tracks were removed by clearing AudioTracks() and VideoTracks().
+    MOZ_ASSERT(ms.mTracks.IsEmpty());
+  }
+#endif
 
   mSrcStream = nullptr;
 }
 
 static already_AddRefed<AudioTrack> CreateAudioTrack(
     AudioStreamTrack* aStreamTrack, nsIGlobalObject* aOwnerGlobal) {
   nsAutoString id;
   nsAutoString label;
   aStreamTrack->GetId(id);
   aStreamTrack->GetLabel(label, CallerType::System);
 
-  return MediaTrackList::CreateAudioTrack(
-      aOwnerGlobal, id, NS_LITERAL_STRING("main"), label, EmptyString(), true);
+  return MediaTrackList::CreateAudioTrack(aOwnerGlobal, id,
+                                          NS_LITERAL_STRING("main"), label,
+                                          EmptyString(), true, aStreamTrack);
 }
 
 static already_AddRefed<VideoTrack> CreateVideoTrack(
     VideoStreamTrack* aStreamTrack, nsIGlobalObject* aOwnerGlobal) {
   nsAutoString id;
   nsAutoString label;
   aStreamTrack->GetId(id);
   aStreamTrack->GetLabel(label, CallerType::System);
@@ -5708,57 +5859,55 @@ VideoFrameContainer* HTMLMediaElement::G
   }
 
   mVideoFrameContainer = new VideoFrameContainer(
       this, LayerManager::CreateImageContainer(ImageContainer::ASYNCHRONOUS));
 
   return mVideoFrameContainer;
 }
 
-void HTMLMediaElement::PrincipalChanged(DOMMediaStream* aStream) {
-  LOG(LogLevel::Info, ("HTMLMediaElement %p Stream principal changed.", this));
+void HTMLMediaElement::PrincipalChanged(MediaStreamTrack* aTrack) {
+  if (aTrack != mSelectedVideoStreamTrack) {
+    return;
+  }
+
   nsContentUtils::CombineResourcePrincipals(&mSrcStreamVideoPrincipal,
-                                            aStream->GetVideoPrincipal());
+                                            aTrack->GetPrincipal());
 
   LOG(LogLevel::Debug,
-      ("HTMLMediaElement %p Stream video principal changed to "
-       "%p. Waiting for it to reach VideoFrameContainer before "
-       "setting.",
-       this, aStream->GetVideoPrincipal()));
+      ("HTMLMediaElement %p video track principal changed to %p (combined "
+       "into %p). Waiting for it to reach VideoFrameContainer before setting.",
+       this, aTrack->GetPrincipal(), mSrcStreamVideoPrincipal.get()));
+
   if (mVideoFrameContainer) {
     UpdateSrcStreamVideoPrincipal(
         mVideoFrameContainer->GetLastPrincipalHandle());
   }
 }
 
 void HTMLMediaElement::UpdateSrcStreamVideoPrincipal(
     const PrincipalHandle& aPrincipalHandle) {
   nsTArray<RefPtr<VideoStreamTrack>> videoTracks;
   mSrcStream->GetVideoTracks(videoTracks);
 
   PrincipalHandle handle(aPrincipalHandle);
-  bool matchesTrackPrincipal = false;
   for (const RefPtr<VideoStreamTrack>& track : videoTracks) {
     if (PrincipalHandleMatches(handle, track->GetPrincipal()) &&
         !track->Ended()) {
       // When the PrincipalHandle for the VideoFrameContainer changes to that of
-      // a track in mSrcStream we know that a removed track was displayed but
-      // is no longer so.
-      matchesTrackPrincipal = true;
+      // a live track in mSrcStream we know that a removed track was displayed
+      // but is no longer so.
       LOG(LogLevel::Debug, ("HTMLMediaElement %p VideoFrameContainer's "
                             "PrincipalHandle matches track %p. That's all we "
                             "need.",
                             this, track.get()));
+      mSrcStreamVideoPrincipal = track->GetPrincipal();
       break;
     }
   }
-
-  if (matchesTrackPrincipal) {
-    mSrcStreamVideoPrincipal = mSrcStream->GetVideoPrincipal();
-  }
 }
 
 void HTMLMediaElement::PrincipalHandleChangedForVideoFrameContainer(
     VideoFrameContainer* aContainer,
     const PrincipalHandle& aNewPrincipalHandle) {
   MOZ_ASSERT(NS_IsMainThread());
 
   if (!mSrcStream) {
@@ -5869,16 +6018,18 @@ bool HTMLMediaElement::IsPlaybackEnded()
   }
 }
 
 already_AddRefed<nsIPrincipal> HTMLMediaElement::GetCurrentPrincipal() {
   if (mDecoder) {
     return mDecoder->GetCurrentPrincipal();
   }
   if (mSrcStream) {
+    nsTArray<RefPtr<MediaStreamTrack>> tracks;
+    mSrcStream->GetTracks(tracks);
     nsCOMPtr<nsIPrincipal> principal = mSrcStream->GetPrincipal();
     return principal.forget();
   }
   return nullptr;
 }
 
 bool HTMLMediaElement::HadCrossOriginRedirects() {
   if (mDecoder) {
@@ -6242,23 +6393,16 @@ void HTMLMediaElement::FireTimeUpdate(bo
   // Here mTextTrackManager can be null if the cycle collector has unlinked
   // us before our parent. In that case UnbindFromTree will call us
   // when our parent is unlinked.
   if (mTextTrackManager) {
     mTextTrackManager->TimeMarchesOn();
   }
 }
 
-MediaStream* HTMLMediaElement::GetSrcMediaStream() const {
-  if (!mSrcStream) {
-    return nullptr;
-  }
-  return mSrcStream->GetPlaybackStream();
-}
-
 MediaError* HTMLMediaElement::GetError() const { return mErrorSink->mError; }
 
 void HTMLMediaElement::GetCurrentSpec(nsCString& aString) {
   if (mLoadingSrc) {
     mLoadingSrc->GetSpec(aString);
   } else {
     aString.Truncate();
   }
@@ -6896,57 +7040,46 @@ void HTMLMediaElement::SetMediaInfo(cons
   }
   if (mAudioChannelWrapper) {
     mAudioChannelWrapper->AudioCaptureStreamChangeIfNeeded();
   }
   UpdateWakeLock();
 }
 
 void HTMLMediaElement::AudioCaptureStreamChange(bool aCapture) {
-  // No need to capture a silence media element.
+  // No need to capture a silent media element.
   if (!HasAudio()) {
     return;
   }
 
-  if (aCapture && !mCaptureStreamPort) {
-    nsCOMPtr<nsPIDOMWindowInner> window = OwnerDoc()->GetInnerWindow();
-    if (!OwnerDoc()->GetInnerWindow()) {
+  if (aCapture && !mStreamWindowCapturer) {
+    nsPIDOMWindowInner* window = OwnerDoc()->GetInnerWindow();
+    if (!window) {
       return;
     }
 
-    uint64_t id = window->WindowID();
     MediaStreamGraph* msg = MediaStreamGraph::GetInstance(
         MediaStreamGraph::AUDIO_THREAD_DRIVER, window,
         MediaStreamGraph::REQUEST_DEFAULT_SAMPLE_RATE);
-
-    if (GetSrcMediaStream()) {
-      mCaptureStreamPort = msg->ConnectToCaptureStream(id, GetSrcMediaStream());
-    } else {
-      RefPtr<DOMMediaStream> stream =
-          CaptureStreamInternal(StreamCaptureBehavior::CONTINUE_WHEN_ENDED,
-                                StreamCaptureType::CAPTURE_AUDIO, msg);
-      mCaptureStreamPort =
-          msg->ConnectToCaptureStream(id, stream->GetPlaybackStream());
-    }
-  } else if (!aCapture && mCaptureStreamPort) {
-    if (mDecoder) {
-      ProcessedMediaStream* ps =
-          mCaptureStreamPort->GetSource()->AsProcessedStream();
-      MOZ_ASSERT(ps);
-
-      for (uint32_t i = 0; i < mOutputStreams.Length(); i++) {
-        if (mOutputStreams[i].mStream->GetPlaybackStream() == ps) {
+    RefPtr<DOMMediaStream> stream =
+        CaptureStreamInternal(StreamCaptureBehavior::CONTINUE_WHEN_ENDED,
+                              StreamCaptureType::CAPTURE_AUDIO, msg);
+    mStreamWindowCapturer =
+        MakeUnique<MediaStreamWindowCapturer>(stream, window->WindowID());
+  } else if (!aCapture && mStreamWindowCapturer) {
+    for (size_t i = 0; i < mOutputStreams.Length(); i++) {
+      if (mOutputStreams[i].mStream == mStreamWindowCapturer->mStream) {
+        if (mOutputStreams[i].mCapturingDecoder && mDecoder) {
           mDecoder->RemoveOutputStream(mOutputStreams[i].mStream);
-          mOutputStreams.RemoveElementAt(i);
-          break;
         }
+        mOutputStreams.RemoveElementAt(i);
+        break;
       }
     }
-    mCaptureStreamPort->Destroy();
-    mCaptureStreamPort = nullptr;
+    mStreamWindowCapturer = nullptr;
   }
 }
 
 void HTMLMediaElement::NotifyCueDisplayStatesChanged() {
   if (!mTextTrackManager) {
     return;
   }
 
@@ -7344,17 +7477,17 @@ already_AddRefed<Promise> HTMLMediaEleme
                     if (aValue.IsResolve()) {
                       return SinkInfoPromise::CreateAndResolve(aInfo, __func__);
                     }
                     return SinkInfoPromise::CreateAndReject(
                         aValue.RejectValue(), __func__);
                   });
               return p;
             }
-            if (self->GetSrcMediaStream()) {
+            if (self->mSrcAttrStream) {
               // Set Sink Id through MSG is not supported yet.
               return SinkInfoPromise::CreateAndReject(NS_ERROR_ABORT, __func__);
             }
             // No media attached to the element save it for later.
             return SinkInfoPromise::CreateAndResolve(aInfo, __func__);
           },
           [](nsresult res) {
             // Promise is rejected, sink not found.
--- a/dom/html/HTMLMediaElement.h
+++ b/dom/html/HTMLMediaElement.h
@@ -49,24 +49,27 @@ class ChannelMediaDecoder;
 class DecoderDoctorDiagnostics;
 class DOMMediaStream;
 class ErrorResult;
 class MediaResource;
 class MediaDecoder;
 class MediaInputPort;
 class MediaStream;
 class MediaStreamGraph;
+class MediaStreamGraphImpl;
+class MediaStreamWindowCapturer;
 class VideoFrameContainer;
 namespace dom {
 class MediaKeys;
 class TextTrack;
 class TimeRanges;
 class WakeLock;
+class MediaStreamTrack;
+class MediaStreamTrackSource;
 class MediaTrack;
-class MediaStreamTrack;
 class VideoStreamTrack;
 }  // namespace dom
 }  // namespace mozilla
 
 class AudioDeviceInfo;
 class nsIChannel;
 class nsIHttpChannel;
 class nsILoadGroup;
@@ -93,17 +96,17 @@ enum class StreamCaptureType : uint8_t {
 
 enum class StreamCaptureBehavior : uint8_t {
   CONTINUE_WHEN_ENDED,
   FINISH_WHEN_ENDED
 };
 
 class HTMLMediaElement : public nsGenericHTMLElement,
                          public MediaDecoderOwner,
-                         public PrincipalChangeObserver<DOMMediaStream>,
+                         public PrincipalChangeObserver<MediaStreamTrack>,
                          public SupportsWeakPtr<HTMLMediaElement>,
                          public nsStubMutationObserver {
  public:
   typedef mozilla::TimeStamp TimeStamp;
   typedef mozilla::layers::ImageContainer ImageContainer;
   typedef mozilla::VideoFrameContainer VideoFrameContainer;
   typedef mozilla::MediaStream MediaStream;
   typedef mozilla::MediaResource MediaResource;
@@ -224,18 +227,18 @@ class HTMLMediaElement : public nsGeneri
   layers::ImageContainer* GetImageContainer();
 
   /**
    * Call this to reevaluate whether we should start/stop due to our owner
    * document being active, inactive, visible or hidden.
    */
   void NotifyOwnerDocumentActivityChanged();
 
-  // From PrincipalChangeObserver<DOMMediaStream>.
-  void PrincipalChanged(DOMMediaStream* aStream) override;
+  // From PrincipalChangeObserver<MediaStreamTrack>.
+  void PrincipalChanged(MediaStreamTrack* aTrack) override;
 
   void UpdateSrcStreamVideoPrincipal(const PrincipalHandle& aPrincipalHandle);
 
   // Called after the MediaStream we're playing rendered a frame to aContainer
   // with a different principalHandle than the previous frame.
   void PrincipalHandleChangedForVideoFrameContainer(
       VideoFrameContainer* aContainer,
       const PrincipalHandle& aNewPrincipalHandle) override;
@@ -291,18 +294,16 @@ class HTMLMediaElement : public nsGeneri
   already_AddRefed<nsIPrincipal> GetCurrentVideoPrincipal();
 
   // called to notify that the principal of the decoder's media resource has
   // changed.
   void NotifyDecoderPrincipalChanged() final;
 
   void GetEMEInfo(dom::EMEDebugInfo& aInfo);
 
-  class StreamCaptureTrackSource;
-
   // Update the visual size of the media. Called from the decoder on the
   // main thread when/if the size changes.
   virtual void UpdateMediaSize(const nsIntSize& aSize);
   // Like UpdateMediaSize, but only updates the size if no size has yet
   // been set.
   void UpdateInitialMediaSize(const nsIntSize& aSize);
 
   void Invalidate(bool aImageSizeChanged, Maybe<nsIntSize>& aNewIntrinsicSize,
@@ -333,23 +334,16 @@ class HTMLMediaElement : public nsGeneri
 
   /**
    * Called by one of our associated MediaTrackLists (audio/video) when an
    * AudioTrack is disabled or a VideoTrack is unselected.
    */
   void NotifyMediaTrackDisabled(MediaTrack* aTrack);
 
   /**
-   * Called when a captured MediaStreamTrack is stopped so we can clean up its
-   * MediaInputPort.
-   */
-  void NotifyOutputTrackStopped(DOMMediaStream* aOwningStream,
-                                TrackID aDestinationTrackID);
-
-  /**
    * Returns the current load ID. Asynchronous events store the ID that was
    * current when they were enqueued, and if it has changed when they come to
    * fire, they consider themselves cancelled, and don't fire.
    */
   uint32_t GetCurrentLoadID() { return mCurrentLoadID; }
 
   /**
    * Returns the load group for this media element's owner document.
@@ -387,23 +381,16 @@ class HTMLMediaElement : public nsGeneri
   /**
    * Fires a timeupdate event. If aPeriodic is true, the event will only
    * be fired if we've not fired a timeupdate event (for any reason) in the
    * last 250ms, as required by the spec when the current time is periodically
    * increasing during playback.
    */
   void FireTimeUpdate(bool aPeriodic) final;
 
-  /**
-   * This will return null if mSrcStream is null, or if mSrcStream is not
-   * null but its GetPlaybackStream() returns null --- which can happen during
-   * cycle collection unlinking!
-   */
-  MediaStream* GetSrcMediaStream() const;
-
   // WebIDL
 
   MediaError* GetError() const;
 
   void GetSrc(nsAString& aSrc) { GetURIAttr(nsGkAtoms::src, nullptr, aSrc); }
   void SetSrc(const nsAString& aSrc, ErrorResult& aError) {
     SetHTMLAttr(nsGkAtoms::src, aSrc, aError);
   }
@@ -747,39 +734,54 @@ class HTMLMediaElement : public nsGeneri
 
  protected:
   virtual ~HTMLMediaElement();
 
   class AudioChannelAgentCallback;
   class ChannelLoader;
   class ErrorSink;
   class MediaLoadListener;
+  class MediaStreamRenderer;
   class MediaStreamTrackListener;
   class FirstFrameListener;
   class ShutdownObserver;
+  class StreamCaptureTrackSource;
 
   MediaDecoderOwner::NextFrameStatus NextFrameStatus();
 
   void SetDecoder(MediaDecoder* aDecoder);
 
+  struct SharedDummyStream {
+    NS_INLINE_DECL_REFCOUNTING(SharedDummyStream)
+    explicit SharedDummyStream(MediaStream* aStream);
+    const RefPtr<MediaStream> mStream;
+
+   private:
+    ~SharedDummyStream();
+  };
+
   // Holds references to the DOM wrappers for the MediaStreams that we're
   // writing to.
   struct OutputMediaStream {
     OutputMediaStream();
     ~OutputMediaStream();
 
     RefPtr<DOMMediaStream> mStream;
-    TrackID mNextAvailableTrackID;
+    RefPtr<MediaStreamGraphImpl> mGraph;
+    // Dummy stream to keep mGraph from shutting down when MediaDecoder shuts
+    // down. Shared across all OutputMediaStreams as one stream is enough to
+    // keep the graph alive.
+    RefPtr<SharedDummyStream> mGraphKeepAliveDummyStream;
     bool mFinishWhenEnded;
     bool mCapturingAudioOnly;
     bool mCapturingDecoder;
     bool mCapturingMediaStream;
 
     // The following members are keeping state for a captured MediaStream.
-    nsTArray<Pair<nsString, RefPtr<MediaInputPort>>> mTrackPorts;
+    nsTArray<Pair<nsString, RefPtr<MediaStreamTrackSource>>> mTracks;
   };
 
   void PlayInternal(bool aHandlingUserInput);
 
   /** Use this method to change the mReadyState member, so required
    * events can be fired.
    */
   void ChangeReadyState(nsMediaReadyState aState);
@@ -1326,41 +1328,35 @@ class HTMLMediaElement : public nsGeneri
   // Holds the triggering principal for the src attribute.
   nsCOMPtr<nsIPrincipal> mSrcAttrTriggeringPrincipal;
 
   // Holds a reference to the DOM wrapper for the MediaStream that we're
   // actually playing.
   // At most one of mDecoder and mSrcStream can be non-null.
   RefPtr<DOMMediaStream> mSrcStream;
 
+  // The MediaStreamRenderer handles rendering of our selected video track, and
+  // enabled audio tracks, while mSrcStream is set.
+  RefPtr<MediaStreamRenderer> mMediaStreamRenderer;
+
   // True once mSrcStream's initial set of tracks are known.
   bool mSrcStreamTracksAvailable = false;
 
-  // While mPaused is true and mSrcStream is set, this is the value to use for
-  // CurrentTime(). Otherwise this is Nothing.
-  Maybe<GraphTime> mSrcStreamPausedGraphTime;
-
-  // The offset in GraphTime at which this media element started playing the
-  // playback stream of mSrcStream.
-  GraphTime mSrcStreamGraphTimeOffset = 0;
-
   // True once PlaybackEnded() is called and we're playing a MediaStream.
   // Reset to false if we start playing mSrcStream again.
   bool mSrcStreamPlaybackEnded = false;
 
-  // Holds a reference to the stream connecting this stream to the capture sink.
-  RefPtr<MediaInputPort> mCaptureStreamPort;
+  // Holds a reference to the stream connecting this stream to the window
+  // capture sink.
+  UniquePtr<MediaStreamWindowCapturer> mStreamWindowCapturer;
 
   // Holds references to the DOM wrappers for the MediaStreams that we're
   // writing to.
   nsTArray<OutputMediaStream> mOutputStreams;
 
-  // The next track id to use for a captured MediaDecoder.
-  TrackID mNextAvailableMediaDecoderOutputTrackID = 1;
-
   // Holds a reference to the first-frame-getting track listener attached to
   // mSelectedVideoStreamTrack.
   RefPtr<FirstFrameListener> mFirstFrameListener;
   // The currently selected video stream track.
   RefPtr<VideoStreamTrack> mSelectedVideoStreamTrack;
 
   const RefPtr<ShutdownObserver> mShutdownObserver;
 
--- a/dom/media/AudioStreamTrack.cpp
+++ b/dom/media/AudioStreamTrack.cpp
@@ -1,20 +1,42 @@
 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "AudioStreamTrack.h"
 
+#include "MediaStreamGraph.h"
 #include "nsContentUtils.h"
 
 namespace mozilla {
 namespace dom {
 
+void AudioStreamTrack::AddAudioOutput(void* aKey) {
+  if (Ended()) {
+    return;
+  }
+  mStream->AddAudioOutput(aKey);
+}
+
+void AudioStreamTrack::RemoveAudioOutput(void* aKey) {
+  if (Ended()) {
+    return;
+  }
+  mStream->RemoveAudioOutput(aKey);
+}
+
+void AudioStreamTrack::SetAudioOutputVolume(void* aKey, float aVolume) {
+  if (Ended()) {
+    return;
+  }
+  mStream->SetAudioOutputVolume(aKey, aVolume);
+}
+
 void AudioStreamTrack::GetLabel(nsAString& aLabel, CallerType aCallerType) {
   if (nsContentUtils::ResistFingerprinting(aCallerType)) {
     aLabel.AssignLiteral("Internal Microphone");
     return;
   }
   MediaStreamTrack::GetLabel(aLabel, aCallerType);
 }
 
--- a/dom/media/AudioStreamTrack.h
+++ b/dom/media/AudioStreamTrack.h
@@ -10,34 +10,38 @@
 #include "DOMMediaStream.h"
 
 namespace mozilla {
 namespace dom {
 
 class AudioStreamTrack : public MediaStreamTrack {
  public:
   AudioStreamTrack(
-      DOMMediaStream* aStream, TrackID aTrackID, TrackID aInputTrackID,
+      nsPIDOMWindowInner* aWindow, MediaStream* aInputStream, TrackID aTrackID,
       MediaStreamTrackSource* aSource,
       const MediaTrackConstraints& aConstraints = MediaTrackConstraints())
-      : MediaStreamTrack(aStream, aTrackID, aInputTrackID, aSource,
+      : MediaStreamTrack(aWindow, aInputStream, aTrackID, aSource,
                          aConstraints) {}
 
   AudioStreamTrack* AsAudioStreamTrack() override { return this; }
   const AudioStreamTrack* AsAudioStreamTrack() const override { return this; }
 
+  void AddAudioOutput(void* aKey);
+  void RemoveAudioOutput(void* aKey);
+  void SetAudioOutputVolume(void* aKey, float aVolume);
+
   // WebIDL
   void GetKind(nsAString& aKind) override { aKind.AssignLiteral("audio"); }
 
   void GetLabel(nsAString& aLabel, CallerType aCallerType) override;
 
  protected:
-  already_AddRefed<MediaStreamTrack> CloneInternal(
-      DOMMediaStream* aOwningStream, TrackID aTrackID) override {
-    return do_AddRef(new AudioStreamTrack(
-        aOwningStream, aTrackID, mInputTrackID, mSource, mConstraints));
+  already_AddRefed<MediaStreamTrack> CloneInternal() override {
+    return do_AddRef(
+        new AudioStreamTrack(mWindow, Ended() ? nullptr : mInputStream.get(),
+                             mTrackID, mSource, mConstraints));
   }
 };
 
 }  // namespace dom
 }  // namespace mozilla
 
 #endif /* AUDIOSTREAMTRACK_H_ */
--- a/dom/media/AudioTrack.cpp
+++ b/dom/media/AudioTrack.cpp
@@ -9,19 +9,30 @@
 #include "mozilla/dom/AudioTrackList.h"
 #include "mozilla/dom/HTMLMediaElement.h"
 
 namespace mozilla {
 namespace dom {
 
 AudioTrack::AudioTrack(nsIGlobalObject* aOwnerGlobal, const nsAString& aId,
                        const nsAString& aKind, const nsAString& aLabel,
-                       const nsAString& aLanguage, bool aEnabled)
+                       const nsAString& aLanguage, bool aEnabled,
+                       AudioStreamTrack* aStreamTrack)
     : MediaTrack(aOwnerGlobal, aId, aKind, aLabel, aLanguage),
-      mEnabled(aEnabled) {}
+      mEnabled(aEnabled),
+      mAudioStreamTrack(aStreamTrack) {}
+
+AudioTrack::~AudioTrack() = default;
+
+NS_IMPL_CYCLE_COLLECTION_INHERITED(AudioTrack, MediaTrack, mAudioStreamTrack)
+
+NS_IMPL_ADDREF_INHERITED(AudioTrack, MediaTrack)
+NS_IMPL_RELEASE_INHERITED(AudioTrack, MediaTrack)
+NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(AudioTrack)
+NS_INTERFACE_MAP_END_INHERITING(MediaTrack)
 
 JSObject* AudioTrack::WrapObject(JSContext* aCx,
                                  JS::Handle<JSObject*> aGivenProto) {
   return AudioTrack_Binding::Wrap(aCx, this, aGivenProto);
 }
 
 void AudioTrack::SetEnabled(bool aEnabled) {
   SetEnabledInternal(aEnabled, MediaTrack::DEFAULT);
--- a/dom/media/AudioTrack.h
+++ b/dom/media/AudioTrack.h
@@ -7,34 +7,48 @@
 #ifndef mozilla_dom_AudioTrack_h
 #define mozilla_dom_AudioTrack_h
 
 #include "MediaTrack.h"
 
 namespace mozilla {
 namespace dom {
 
+class AudioStreamTrack;
+
 class AudioTrack : public MediaTrack {
  public:
   AudioTrack(nsIGlobalObject* aOwnerGlobal, const nsAString& aId,
              const nsAString& aKind, const nsAString& aLabel,
-             const nsAString& aLanguage, bool aEnabled);
+             const nsAString& aLanguage, bool aEnabled,
+             AudioStreamTrack* aStreamTrack = nullptr);
+
+  NS_DECL_ISUPPORTS_INHERITED
+  NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(AudioTrack, MediaTrack)
 
   JSObject* WrapObject(JSContext* aCx,
                        JS::Handle<JSObject*> aGivenProto) override;
 
   AudioTrack* AsAudioTrack() override { return this; }
 
   void SetEnabledInternal(bool aEnabled, int aFlags) override;
 
+  // Get associated audio stream track when the audio track comes from
+  // MediaStream. This might be nullptr when the src of owning HTMLMediaElement
+  // is not MediaStream.
+  AudioStreamTrack* GetAudioStreamTrack() { return mAudioStreamTrack; }
+
   // WebIDL
   bool Enabled() const { return mEnabled; }
 
   void SetEnabled(bool aEnabled);
 
  private:
+  virtual ~AudioTrack();
+
   bool mEnabled;
+  RefPtr<AudioStreamTrack> mAudioStreamTrack;
 };
 
 }  // namespace dom
 }  // namespace mozilla
 
 #endif  // mozilla_dom_AudioTrack_h
--- a/dom/media/CanvasCaptureMediaStream.cpp
+++ b/dom/media/CanvasCaptureMediaStream.cpp
@@ -39,17 +39,19 @@ OutputStreamDriver::OutputStreamDriver(S
 
 OutputStreamDriver::~OutputStreamDriver() {
   MOZ_ASSERT(NS_IsMainThread());
   EndTrack();
 }
 
 void OutputStreamDriver::EndTrack() {
   MOZ_ASSERT(NS_IsMainThread());
-  mSourceStream->EndTrack(mTrackId);
+  if (!mSourceStream->IsDestroyed()) {
+    mSourceStream->Destroy();
+  }
 }
 
 void OutputStreamDriver::SetImage(const RefPtr<layers::Image>& aImage,
                                   const TimeStamp& aTime) {
   MOZ_ASSERT(NS_IsMainThread());
 
   TRACE_COMMENT("SourceMediaStream %p track %i", mSourceStream.get(), mTrackId);
 
@@ -143,17 +145,17 @@ NS_IMPL_CYCLE_COLLECTION_INHERITED(Canva
 NS_IMPL_ADDREF_INHERITED(CanvasCaptureMediaStream, DOMMediaStream)
 NS_IMPL_RELEASE_INHERITED(CanvasCaptureMediaStream, DOMMediaStream)
 
 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(CanvasCaptureMediaStream)
 NS_INTERFACE_MAP_END_INHERITING(DOMMediaStream)
 
 CanvasCaptureMediaStream::CanvasCaptureMediaStream(nsPIDOMWindowInner* aWindow,
                                                    HTMLCanvasElement* aCanvas)
-    : DOMMediaStream(aWindow), mCanvas(aCanvas), mOutputStreamDriver(nullptr) {}
+    : DOMMediaStream(aWindow), mCanvas(aCanvas) {}
 
 CanvasCaptureMediaStream::~CanvasCaptureMediaStream() {
   if (mOutputStreamDriver) {
     mOutputStreamDriver->Forget();
   }
 }
 
 JSObject* CanvasCaptureMediaStream::WrapObject(
@@ -163,54 +165,51 @@ JSObject* CanvasCaptureMediaStream::Wrap
 
 void CanvasCaptureMediaStream::RequestFrame() {
   if (mOutputStreamDriver) {
     mOutputStreamDriver->RequestFrameCapture();
   }
 }
 
 nsresult CanvasCaptureMediaStream::Init(const dom::Optional<double>& aFPS,
-                                        const TrackID& aTrackId,
+                                        const TrackID aTrackId,
                                         nsIPrincipal* aPrincipal) {
+  MediaStreamGraph* graph = MediaStreamGraph::GetInstance(
+      MediaStreamGraph::SYSTEM_THREAD_DRIVER, mWindow,
+      MediaStreamGraph::REQUEST_DEFAULT_SAMPLE_RATE);
+  SourceMediaStream* source = graph->CreateSourceStream();
   PrincipalHandle principalHandle = MakePrincipalHandle(aPrincipal);
-
   if (!aFPS.WasPassed()) {
-    mOutputStreamDriver = new AutoDriver(GetInputStream()->AsSourceStream(),
-                                         aTrackId, principalHandle);
+    mOutputStreamDriver = new AutoDriver(source, aTrackId, principalHandle);
   } else if (aFPS.Value() < 0) {
     return NS_ERROR_ILLEGAL_VALUE;
   } else {
     // Cap frame rate to 60 FPS for sanity
     double fps = std::min(60.0, aFPS.Value());
-    mOutputStreamDriver = new TimerDriver(GetInputStream()->AsSourceStream(),
-                                          fps, aTrackId, principalHandle);
+    mOutputStreamDriver =
+        new TimerDriver(source, fps, aTrackId, principalHandle);
   }
   return NS_OK;
 }
 
-already_AddRefed<CanvasCaptureMediaStream>
-CanvasCaptureMediaStream::CreateSourceStream(nsPIDOMWindowInner* aWindow,
-                                             HTMLCanvasElement* aCanvas) {
-  RefPtr<CanvasCaptureMediaStream> stream =
-      new CanvasCaptureMediaStream(aWindow, aCanvas);
-  MediaStreamGraph* graph = MediaStreamGraph::GetInstance(
-      MediaStreamGraph::SYSTEM_THREAD_DRIVER, aWindow,
-      MediaStreamGraph::REQUEST_DEFAULT_SAMPLE_RATE);
-  stream->InitSourceStream(graph);
-  return stream.forget();
-}
-
 FrameCaptureListener* CanvasCaptureMediaStream::FrameCaptureListener() {
   return mOutputStreamDriver;
 }
 
 void CanvasCaptureMediaStream::StopCapture() {
   if (!mOutputStreamDriver) {
     return;
   }
 
   mOutputStreamDriver->EndTrack();
   mOutputStreamDriver->Forget();
   mOutputStreamDriver = nullptr;
 }
 
+SourceMediaStream* CanvasCaptureMediaStream::GetSourceStream() const {
+  if (!mOutputStreamDriver) {
+    return nullptr;
+  }
+  return mOutputStreamDriver->mSourceStream;
+}
+
 }  // namespace dom
 }  // namespace mozilla
--- a/dom/media/CanvasCaptureMediaStream.h
+++ b/dom/media/CanvasCaptureMediaStream.h
@@ -79,57 +79,51 @@ class OutputStreamDriver : public FrameC
   void EndTrack();
 
   /*
    * Makes sure any internal resources this driver is holding that may create
    * reference cycles are released.
    */
   virtual void Forget() {}
 
- protected:
-  virtual ~OutputStreamDriver();
-
- private:
   const TrackID mTrackId;
   const RefPtr<SourceMediaStream> mSourceStream;
   const PrincipalHandle mPrincipalHandle;
+
+ protected:
+  virtual ~OutputStreamDriver();
 };
 
 class CanvasCaptureMediaStream : public DOMMediaStream {
  public:
   CanvasCaptureMediaStream(nsPIDOMWindowInner* aWindow,
                            HTMLCanvasElement* aCanvas);
 
   NS_DECL_ISUPPORTS_INHERITED
   NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(CanvasCaptureMediaStream,
                                            DOMMediaStream)
 
-  nsresult Init(const dom::Optional<double>& aFPS, const TrackID& aTrackId,
+  nsresult Init(const dom::Optional<double>& aFPS, const TrackID aTrackId,
                 nsIPrincipal* aPrincipal);
 
   JSObject* WrapObject(JSContext* aCx,
                        JS::Handle<JSObject*> aGivenProto) override;
 
   // WebIDL
   HTMLCanvasElement* Canvas() const { return mCanvas; }
   void RequestFrame();
 
   dom::FrameCaptureListener* FrameCaptureListener();
 
   /**
    * Stops capturing for this stream at mCanvas.
    */
   void StopCapture();
 
-  /**
-   * Create a CanvasCaptureMediaStream whose underlying stream is a
-   * SourceMediaStream.
-   */
-  static already_AddRefed<CanvasCaptureMediaStream> CreateSourceStream(
-      nsPIDOMWindowInner* aWindow, HTMLCanvasElement* aCanvas);
+  SourceMediaStream* GetSourceStream() const;
 
  protected:
   ~CanvasCaptureMediaStream();
 
  private:
   RefPtr<HTMLCanvasElement> mCanvas;
   RefPtr<OutputStreamDriver> mOutputStreamDriver;
 };
--- a/dom/media/DOMMediaStream.cpp
+++ b/dom/media/DOMMediaStream.cpp
@@ -8,29 +8,28 @@
 #include "AudioCaptureStream.h"
 #include "AudioChannelAgent.h"
 #include "AudioStreamTrack.h"
 #include "Layers.h"
 #include "MediaStreamGraph.h"
 #include "MediaStreamGraphImpl.h"
 #include "MediaStreamListener.h"
 #include "VideoStreamTrack.h"
-#include "mozilla/BasePrincipal.h"
-#include "mozilla/dom/AudioNode.h"
 #include "mozilla/dom/AudioTrack.h"
 #include "mozilla/dom/AudioTrackList.h"
 #include "mozilla/dom/DocGroup.h"
 #include "mozilla/dom/HTMLCanvasElement.h"
 #include "mozilla/dom/MediaStreamBinding.h"
 #include "mozilla/dom/MediaStreamTrackEvent.h"
 #include "mozilla/dom/Promise.h"
 #include "mozilla/dom/VideoTrack.h"
 #include "mozilla/dom/VideoTrackList.h"
 #include "mozilla/media/MediaUtils.h"
 #include "nsContentUtils.h"
+#include "nsGlobalWindowInner.h"
 #include "nsIScriptError.h"
 #include "nsIUUIDGenerator.h"
 #include "nsPIDOMWindow.h"
 #include "nsProxyRelease.h"
 #include "nsRFPService.h"
 #include "nsServiceManagerUtils.h"
 
 #ifdef LOG
@@ -43,71 +42,26 @@ using namespace mozilla::layers;
 using namespace mozilla::media;
 
 static LazyLogModule gMediaStreamLog("MediaStream");
 #define LOG(type, msg) MOZ_LOG(gMediaStreamLog, type, msg)
 
 const TrackID TRACK_VIDEO_PRIMARY = 1;
 
 static bool ContainsLiveTracks(
-    nsTArray<RefPtr<DOMMediaStream::TrackPort>>& aTracks) {
-  for (auto& port : aTracks) {
-    if (port->GetTrack()->ReadyState() == MediaStreamTrackState::Live) {
+    const nsTArray<RefPtr<MediaStreamTrack>>& aTracks) {
+  for (const auto& track : aTracks) {
+    if (track->ReadyState() == MediaStreamTrackState::Live) {
       return true;
     }
   }
 
   return false;
 }
 
-DOMMediaStream::TrackPort::TrackPort(MediaInputPort* aInputPort,
-                                     MediaStreamTrack* aTrack,
-                                     const InputPortOwnership aOwnership)
-    : mInputPort(aInputPort), mTrack(aTrack), mOwnership(aOwnership) {
-  MOZ_ASSERT(mInputPort);
-  MOZ_ASSERT(mTrack);
-
-  MOZ_COUNT_CTOR(TrackPort);
-}
-
-DOMMediaStream::TrackPort::~TrackPort() {
-  MOZ_COUNT_DTOR(TrackPort);
-
-  if (mOwnership == InputPortOwnership::OWNED) {
-    DestroyInputPort();
-  }
-}
-
-void DOMMediaStream::TrackPort::DestroyInputPort() {
-  if (mInputPort) {
-    mInputPort->Destroy();
-    mInputPort = nullptr;
-  }
-}
-
-MediaStream* DOMMediaStream::TrackPort::GetSource() const {
-  return mInputPort ? mInputPort->GetSource() : nullptr;
-}
-
-TrackID DOMMediaStream::TrackPort::GetSourceTrackId() const {
-  return mInputPort ? mInputPort->GetSourceTrackId() : TRACK_INVALID;
-}
-
-RefPtr<GenericPromise> DOMMediaStream::TrackPort::BlockSourceTrackId(
-    TrackID aTrackId, BlockingMode aBlockingMode) {
-  if (!mInputPort) {
-    return GenericPromise::CreateAndReject(NS_ERROR_FAILURE, __func__);
-  }
-  return mInputPort->BlockSourceTrackId(aTrackId, aBlockingMode);
-}
-
-NS_IMPL_CYCLE_COLLECTION(DOMMediaStream::TrackPort, mTrack)
-NS_IMPL_CYCLE_COLLECTION_ROOT_NATIVE(DOMMediaStream::TrackPort, AddRef)
-NS_IMPL_CYCLE_COLLECTION_UNROOT_NATIVE(DOMMediaStream::TrackPort, Release)
-
 class DOMMediaStream::PlaybackTrackListener : public MediaStreamTrackConsumer {
  public:
   explicit PlaybackTrackListener(DOMMediaStream* aStream) : mStream(aStream) {}
 
   NS_INLINE_DECL_CYCLE_COLLECTING_NATIVE_REFCOUNTING(PlaybackTrackListener)
   NS_DECL_CYCLE_COLLECTION_NATIVE_CLASS(PlaybackTrackListener)
 
   void NotifyEnded(MediaStreamTrack* aTrack) override {
@@ -138,60 +92,39 @@ NS_IMPL_CYCLE_COLLECTION_UNROOT_NATIVE(D
 NS_IMPL_CYCLE_COLLECTION(DOMMediaStream::PlaybackTrackListener, mStream)
 
 NS_IMPL_CYCLE_COLLECTION_CLASS(DOMMediaStream)
 
 NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_INHERITED(DOMMediaStream,
                                                 DOMEventTargetHelper)
   tmp->Destroy();
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mWindow)
-  NS_IMPL_CYCLE_COLLECTION_UNLINK(mOwnedTracks)
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mTracks)
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mConsumersToKeepAlive)
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mPlaybackTrackListener)
-  NS_IMPL_CYCLE_COLLECTION_UNLINK(mPrincipal)
-  NS_IMPL_CYCLE_COLLECTION_UNLINK(mVideoPrincipal)
 NS_IMPL_CYCLE_COLLECTION_UNLINK_END
 
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(DOMMediaStream,
                                                   DOMEventTargetHelper)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mWindow)
-  NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mOwnedTracks)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mTracks)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mConsumersToKeepAlive)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mPlaybackTrackListener)
-  NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mPrincipal)
-  NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mVideoPrincipal)
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
 
 NS_IMPL_ADDREF_INHERITED(DOMMediaStream, DOMEventTargetHelper)
 NS_IMPL_RELEASE_INHERITED(DOMMediaStream, DOMEventTargetHelper)
 
 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(DOMMediaStream)
   NS_INTERFACE_MAP_ENTRY(DOMMediaStream)
 NS_INTERFACE_MAP_END_INHERITING(DOMEventTargetHelper)
 
-NS_IMPL_CYCLE_COLLECTION_INHERITED(DOMAudioNodeMediaStream, DOMMediaStream,
-                                   mStreamNode)
-
-NS_IMPL_ADDREF_INHERITED(DOMAudioNodeMediaStream, DOMMediaStream)
-NS_IMPL_RELEASE_INHERITED(DOMAudioNodeMediaStream, DOMMediaStream)
-
-NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(DOMAudioNodeMediaStream)
-NS_INTERFACE_MAP_END_INHERITING(DOMMediaStream)
-
 DOMMediaStream::DOMMediaStream(nsPIDOMWindowInner* aWindow)
     : mWindow(aWindow),
-      mInputStream(nullptr),
-      mOwnedStream(nullptr),
-      mPlaybackStream(nullptr),
-      mTracksPendingRemoval(0),
       mPlaybackTrackListener(MakeAndAddRef<PlaybackTrackListener>(this)),
-      mTracksCreated(false),
-      mNotifiedOfMediaStreamGraphShutdown(false),
       mActive(false),
       mFinishedOnInactive(true) {
   nsresult rv;
   nsCOMPtr<nsIUUIDGenerator> uuidgen =
       do_GetService("@mozilla.org/uuid-generator;1", &rv);
 
   if (NS_SUCCEEDED(rv) && uuidgen) {
     nsID uuid;
@@ -204,47 +137,23 @@ DOMMediaStream::DOMMediaStream(nsPIDOMWi
     }
   }
 }
 
 DOMMediaStream::~DOMMediaStream() { Destroy(); }
 
 void DOMMediaStream::Destroy() {
   LOG(LogLevel::Debug, ("DOMMediaStream %p Being destroyed.", this));
-  for (const RefPtr<TrackPort>& info : mTracks) {
+  for (const auto& track : mTracks) {
     // We must remove ourselves from each track's principal change observer list
-    // before we die. CC may have cleared info->mTrack so guard against it.
-    MediaStreamTrack* track = info->GetTrack();
-    if (track) {
-      track->RemovePrincipalChangeObserver(this);
-      if (!track->Ended()) {
-        track->RemoveConsumer(mPlaybackTrackListener);
-      }
+    // before we die.
+    if (!track->Ended()) {
+      track->RemoveConsumer(mPlaybackTrackListener);
     }
   }
-  if (mPlaybackPort) {
-    mPlaybackPort->Destroy();
-    mPlaybackPort = nullptr;
-  }
-  if (mOwnedPort) {
-    mOwnedPort->Destroy();
-    mOwnedPort = nullptr;
-  }
-  if (mPlaybackStream) {
-    mPlaybackStream->UnregisterUser();
-    mPlaybackStream = nullptr;
-  }
-  if (mOwnedStream) {
-    mOwnedStream->UnregisterUser();
-    mOwnedStream = nullptr;
-  }
-  if (mInputStream) {
-    mInputStream->UnregisterUser();
-    mInputStream = nullptr;
-  }
   mTrackListeners.Clear();
 }
 
 JSObject* DOMMediaStream::WrapObject(JSContext* aCx,
                                      JS::Handle<JSObject*> aGivenProto) {
   return dom::MediaStream_Binding::Wrap(aCx, this, aGivenProto);
 }
 
@@ -284,33 +193,19 @@ already_AddRefed<DOMMediaStream> DOMMedi
   nsCOMPtr<nsPIDOMWindowInner> ownerWindow =
       do_QueryInterface(aGlobal.GetAsSupports());
   if (!ownerWindow) {
     aRv.Throw(NS_ERROR_FAILURE);
     return nullptr;
   }
 
   auto newStream = MakeRefPtr<DOMMediaStream>(ownerWindow);
-
   for (MediaStreamTrack& track : aTracks) {
-    if (!newStream->GetPlaybackStream()) {
-      MOZ_RELEASE_ASSERT(track.Graph());
-      newStream->InitPlaybackStreamCommon(track.Graph());
-    }
     newStream->AddTrack(track);
   }
-
-  if (!newStream->GetPlaybackStream()) {
-    MOZ_ASSERT(aTracks.IsEmpty());
-    MediaStreamGraph* graph = MediaStreamGraph::GetInstance(
-        MediaStreamGraph::SYSTEM_THREAD_DRIVER, ownerWindow,
-        MediaStreamGraph::REQUEST_DEFAULT_SAMPLE_RATE);
-    newStream->InitPlaybackStreamCommon(graph);
-  }
-
   return newStream.forget();
 }
 
 already_AddRefed<Promise> DOMMediaStream::CountUnderlyingStreams(
     const GlobalObject& aGlobal, ErrorResult& aRv) {
   nsCOMPtr<nsPIDOMWindowInner> window =
       do_QueryInterface(aGlobal.GetAsSupports());
   if (!window) {
@@ -377,538 +272,139 @@ already_AddRefed<Promise> DOMMediaStream
 
   return p.forget();
 }
 
 void DOMMediaStream::GetId(nsAString& aID) const { aID = mID; }
 
 void DOMMediaStream::GetAudioTracks(
     nsTArray<RefPtr<AudioStreamTrack>>& aTracks) const {
-  for (const RefPtr<TrackPort>& info : mTracks) {
-    if (AudioStreamTrack* t = info->GetTrack()->AsAudioStreamTrack()) {
+  for (const auto& track : mTracks) {
+    if (AudioStreamTrack* t = track->AsAudioStreamTrack()) {
       aTracks.AppendElement(t);
     }
   }
 }
 
 void DOMMediaStream::GetAudioTracks(
     nsTArray<RefPtr<MediaStreamTrack>>& aTracks) const {
-  for (const RefPtr<TrackPort>& info : mTracks) {
-    if (info->GetTrack()->AsAudioStreamTrack()) {
-      aTracks.AppendElement(info->GetTrack());
+  for (const auto& track : mTracks) {
+    if (track->AsAudioStreamTrack()) {
+      aTracks.AppendElement(track);
     }
   }
 }
 
 void DOMMediaStream::GetVideoTracks(
     nsTArray<RefPtr<VideoStreamTrack>>& aTracks) const {
-  for (const RefPtr<TrackPort>& info : mTracks) {
-    if (VideoStreamTrack* t = info->GetTrack()->AsVideoStreamTrack()) {
+  for (const auto& track : mTracks) {
+    if (VideoStreamTrack* t = track->AsVideoStreamTrack()) {
       aTracks.AppendElement(t);
     }
   }
 }
 
 void DOMMediaStream::GetVideoTracks(
     nsTArray<RefPtr<MediaStreamTrack>>& aTracks) const {
-  for (const RefPtr<TrackPort>& info : mTracks) {
-    if (info->GetTrack()->AsVideoStreamTrack()) {
-      aTracks.AppendElement(info->GetTrack());
+  for (const auto& track : mTracks) {
+    if (track->AsVideoStreamTrack()) {
+      aTracks.AppendElement(track);
     }
   }
 }
 
 void DOMMediaStream::GetTracks(
     nsTArray<RefPtr<MediaStreamTrack>>& aTracks) const {
-  for (const RefPtr<TrackPort>& info : mTracks) {
-    aTracks.AppendElement(info->GetTrack());
+  for (const auto& track : mTracks) {
+    aTracks.AppendElement(track);
   }
 }
 
 void DOMMediaStream::AddTrack(MediaStreamTrack& aTrack) {
-  MOZ_RELEASE_ASSERT(mPlaybackStream);
-
-  RefPtr<ProcessedMediaStream> dest = mPlaybackStream->AsProcessedStream();
-  MOZ_ASSERT(dest);
-  if (!dest) {
-    return;
-  }
-
   LOG(LogLevel::Info,
       ("DOMMediaStream %p Adding track %p (from stream %p with ID %d)", this,
-       &aTrack, aTrack.mOwningStream.get(), aTrack.mTrackID));
-
-  if (mPlaybackStream->Graph() != aTrack.Graph()) {
-    NS_ASSERTION(false,
-                 "Cannot combine tracks from different MediaStreamGraphs");
-    LOG(LogLevel::Error, ("DOMMediaStream %p Own MSG %p != aTrack's MSG %p",
-                          this, mPlaybackStream->Graph(), aTrack.Graph()));
-
-    AutoTArray<nsString, 1> params;
-    aTrack.GetId(*params.AppendElement());
-    nsCOMPtr<nsPIDOMWindowInner> pWindow = GetParentObject();
-    Document* document = pWindow ? pWindow->GetExtantDoc() : nullptr;
-    nsContentUtils::ReportToConsole(
-        nsIScriptError::errorFlag, NS_LITERAL_CSTRING("Media"), document,
-        nsContentUtils::eDOM_PROPERTIES,
-        "MediaStreamAddTrackDifferentAudioChannel", params);
-    return;
-  }
+       &aTrack, aTrack.GetStream(), aTrack.GetTrackID()));
 
   if (HasTrack(aTrack)) {
     LOG(LogLevel::Debug,
         ("DOMMediaStream %p already contains track %p", this, &aTrack));
     return;
   }
 
-  // Hook up the underlying track with our underlying playback stream.
-  RefPtr<MediaInputPort> inputPort = GetPlaybackStream()->AllocateInputPort(
-      aTrack.GetOwnedStream(), aTrack.mTrackID);
-  RefPtr<TrackPort> trackPort =
-      new TrackPort(inputPort, &aTrack, TrackPort::InputPortOwnership::OWNED);
-  mTracks.AppendElement(trackPort.forget());
+  mTracks.AppendElement(&aTrack);
   NotifyTrackAdded(&aTrack);
-
-  LOG(LogLevel::Debug, ("DOMMediaStream %p Added track %p", this, &aTrack));
 }
 
 void DOMMediaStream::RemoveTrack(MediaStreamTrack& aTrack) {
   LOG(LogLevel::Info,
       ("DOMMediaStream %p Removing track %p (from stream %p with ID %d)", this,
-       &aTrack, aTrack.mOwningStream.get(), aTrack.mTrackID));
+       &aTrack, aTrack.GetStream(), aTrack.GetTrackID()));
 
-  RefPtr<TrackPort> toRemove = FindPlaybackTrackPort(aTrack);
-  if (!toRemove) {
+  if (!mTracks.RemoveElement(&aTrack)) {
     LOG(LogLevel::Debug,
         ("DOMMediaStream %p does not contain track %p", this, &aTrack));
     return;
   }
 
-  DebugOnly<bool> removed = mTracks.RemoveElement(toRemove);
-  NS_ASSERTION(removed,
-               "If there's a track port we should be able to remove it");
-
-  // If the track comes from a TRACK_ANY input port (i.e., mOwnedPort), we need
-  // to block it in the port. Doing this for a locked track is still OK as it
-  // will first block the track, then destroy the port. Both cause the track to
-  // end.
-  // If the track has already ended, it's input port might be gone, so in those
-  // cases blocking the underlying track should be avoided.
   if (!aTrack.Ended()) {
-    BlockPlaybackTrack(toRemove);
     NotifyTrackRemoved(&aTrack);
   }
-
-  LOG(LogLevel::Debug, ("DOMMediaStream %p Removed track %p", this, &aTrack));
 }
 
 already_AddRefed<DOMMediaStream> DOMMediaStream::Clone() {
   auto newStream = MakeRefPtr<DOMMediaStream>(GetParentObject());
 
   LOG(LogLevel::Info,
       ("DOMMediaStream %p created clone %p", this, newStream.get()));
 
-  MOZ_RELEASE_ASSERT(mPlaybackStream);
-  MOZ_RELEASE_ASSERT(mPlaybackStream->Graph());
-  MediaStreamGraph* graph = mPlaybackStream->Graph();
-
-  // We initiate the owned and playback streams first, since we need to create
-  // all existing DOM tracks before we add the generic input port from
-  // mInputStream to mOwnedStream (see AllocateInputPort wrt. destination
-  // TrackID as to why).
-  newStream->InitOwnedStreamCommon(graph);
-  newStream->InitPlaybackStreamCommon(graph);
-
-  // Set up existing DOM tracks.
-  TrackID allocatedTrackID = 1;
-  for (const RefPtr<TrackPort>& info : mTracks) {
-    MediaStreamTrack& track = *info->GetTrack();
-
+  for (const auto& track : mTracks) {
     LOG(LogLevel::Debug,
         ("DOMMediaStream %p forwarding external track %p to clone %p", this,
-         &track, newStream.get()));
-    RefPtr<MediaStreamTrack> trackClone =
-        newStream->CloneDOMTrack(track, allocatedTrackID++);
+         track.get(), newStream.get()));
+    RefPtr<MediaStreamTrack> clone = track->Clone();
+    newStream->AddTrack(*clone);
   }
 
   return newStream.forget();
 }
 
 bool DOMMediaStream::Active() const { return mActive; }
 
 MediaStreamTrack* DOMMediaStream::GetTrackById(const nsAString& aId) const {
-  for (const RefPtr<TrackPort>& info : mTracks) {
+  for (const auto& track : mTracks) {
     nsString id;
-    info->GetTrack()->GetId(id);
+    track->GetId(id);
     if (id == aId) {
-      return info->GetTrack();
-    }
-  }
-  return nullptr;
-}
-
-MediaStreamTrack* DOMMediaStream::GetOwnedTrackById(const nsAString& aId) {
-  for (const RefPtr<TrackPort>& info : mOwnedTracks) {
-    nsString id;
-    info->GetTrack()->GetId(id);
-    if (id == aId) {
-      return info->GetTrack();
+      return track;
     }
   }
   return nullptr;
 }
 
 bool DOMMediaStream::HasTrack(const MediaStreamTrack& aTrack) const {
-  return !!FindPlaybackTrackPort(aTrack);
-}
-
-bool DOMMediaStream::OwnsTrack(const MediaStreamTrack& aTrack) const {
-  return !!FindOwnedTrackPort(aTrack);
-}
-
-bool DOMMediaStream::IsFinished() const {
-  return !mPlaybackStream || mPlaybackStream->IsFinished();
-}
-
-TrackRate DOMMediaStream::GraphRate() {
-  if (mPlaybackStream) {
-    return mPlaybackStream->GraphRate();
-  }
-  if (mOwnedStream) {
-    return mOwnedStream->GraphRate();
-  }
-  if (mInputStream) {
-    return mInputStream->GraphRate();
-  }
-
-  MOZ_ASSERT(false, "Not hooked up to a graph");
-  return 0;
-}
-
-void DOMMediaStream::InitSourceStream(MediaStreamGraph* aGraph) {
-  InitInputStreamCommon(aGraph->CreateSourceStream(), aGraph);
-  InitOwnedStreamCommon(aGraph);
-  InitPlaybackStreamCommon(aGraph);
-}
-
-void DOMMediaStream::InitTrackUnionStream(MediaStreamGraph* aGraph) {
-  InitInputStreamCommon(aGraph->CreateTrackUnionStream(), aGraph);
-  InitOwnedStreamCommon(aGraph);
-  InitPlaybackStreamCommon(aGraph);
-}
-
-void DOMMediaStream::InitAudioCaptureStream(nsIPrincipal* aPrincipal,
-                                            MediaStreamGraph* aGraph) {
-  const TrackID AUDIO_TRACK = 1;
-
-  RefPtr<BasicTrackSource> audioCaptureSource =
-      new BasicTrackSource(aPrincipal, MediaSourceEnum::AudioCapture);
-
-  AudioCaptureStream* audioCaptureStream = static_cast<AudioCaptureStream*>(
-      aGraph->CreateAudioCaptureStream(AUDIO_TRACK));
-  InitInputStreamCommon(audioCaptureStream, aGraph);
-  InitOwnedStreamCommon(aGraph);
-  InitPlaybackStreamCommon(aGraph);
-  RefPtr<MediaStreamTrack> track =
-      CreateDOMTrack(AUDIO_TRACK, MediaSegment::AUDIO, audioCaptureSource);
-  AddTrackInternal(track);
-
-  audioCaptureStream->Start();
-}
-
-void DOMMediaStream::InitInputStreamCommon(MediaStream* aStream,
-                                           MediaStreamGraph* aGraph) {
-  MOZ_ASSERT(!mOwnedStream,
-             "Input stream must be initialized before owned stream");
-
-  mInputStream = aStream;
-  mInputStream->RegisterUser();
-}
-
-void DOMMediaStream::InitOwnedStreamCommon(MediaStreamGraph* aGraph) {
-  MOZ_ASSERT(!mPlaybackStream,
-             "Owned stream must be initialized before playback stream");
-
-  mOwnedStream = aGraph->CreateTrackUnionStream();
-  mOwnedStream->QueueSetAutofinish(true);
-  mOwnedStream->RegisterUser();
-  if (mInputStream) {
-    mOwnedPort = mOwnedStream->AllocateInputPort(mInputStream);
-  }
-}
-
-void DOMMediaStream::InitPlaybackStreamCommon(MediaStreamGraph* aGraph) {
-  mPlaybackStream = aGraph->CreateTrackUnionStream();
-  mPlaybackStream->QueueSetAutofinish(true);
-  mPlaybackStream->RegisterUser();
-  if (mOwnedStream) {
-    mPlaybackPort = mPlaybackStream->AllocateInputPort(mOwnedStream);
-  }
-
-  LOG(LogLevel::Debug, ("DOMMediaStream %p Initiated with mInputStream=%p, "
-                        "mOwnedStream=%p, mPlaybackStream=%p",
-                        this, mInputStream, mOwnedStream, mPlaybackStream));
-}
-
-already_AddRefed<DOMMediaStream> DOMMediaStream::CreateSourceStreamAsInput(
-    nsPIDOMWindowInner* aWindow, MediaStreamGraph* aGraph) {
-  auto stream = MakeRefPtr<DOMMediaStream>(aWindow);
-  stream->InitSourceStream(aGraph);
-  return stream.forget();
-}
-
-already_AddRefed<DOMMediaStream> DOMMediaStream::CreateTrackUnionStreamAsInput(
-    nsPIDOMWindowInner* aWindow, MediaStreamGraph* aGraph) {
-  auto stream = MakeRefPtr<DOMMediaStream>(aWindow);
-  stream->InitTrackUnionStream(aGraph);
-  return stream.forget();
-}
-
-already_AddRefed<DOMMediaStream>
-DOMMediaStream::CreateAudioCaptureStreamAsInput(nsPIDOMWindowInner* aWindow,
-                                                nsIPrincipal* aPrincipal,
-                                                MediaStreamGraph* aGraph) {
-  auto stream = MakeRefPtr<DOMMediaStream>(aWindow);
-  stream->InitAudioCaptureStream(aPrincipal, aGraph);
-  return stream.forget();
-}
-
-void DOMMediaStream::PrincipalChanged(MediaStreamTrack* aTrack) {
-  MOZ_ASSERT(aTrack);
-  NS_ASSERTION(HasTrack(*aTrack), "Principal changed for an unknown track");
-  LOG(LogLevel::Info,
-      ("DOMMediaStream %p Principal changed for track %p", this, aTrack));
-  RecomputePrincipal();
-}
-
-void DOMMediaStream::RecomputePrincipal() {
-  nsCOMPtr<nsIPrincipal> previousPrincipal = mPrincipal.forget();
-  nsCOMPtr<nsIPrincipal> previousVideoPrincipal = mVideoPrincipal.forget();
-
-  if (mTracksPendingRemoval > 0) {
-    LOG(LogLevel::Info, ("DOMMediaStream %p RecomputePrincipal() Cannot "
-                         "recompute stream principal with tracks pending "
-                         "removal.",
-                         this));
-    return;
-  }
-
-  LOG(LogLevel::Debug, ("DOMMediaStream %p Recomputing principal. "
-                        "Old principal was %p.",
-                        this, previousPrincipal.get()));
-
-  // mPrincipal is recomputed based on all current tracks, and tracks that have
-  // not ended in our playback stream.
-  for (const RefPtr<TrackPort>& info : mTracks) {
-    if (info->GetTrack()->Ended()) {
-      continue;
-    }
-    LOG(LogLevel::Debug,
-        ("DOMMediaStream %p Taking live track %p with "
-         "principal %p into account.",
-         this, info->GetTrack(), info->GetTrack()->GetPrincipal()));
-    nsContentUtils::CombineResourcePrincipals(&mPrincipal,
-                                              info->GetTrack()->GetPrincipal());
-    if (info->GetTrack()->AsVideoStreamTrack()) {
-      nsContentUtils::CombineResourcePrincipals(
-          &mVideoPrincipal, info->GetTrack()->GetPrincipal());
-    }
-  }
-
-  LOG(LogLevel::Debug,
-      ("DOMMediaStream %p new principal is %p.", this, mPrincipal.get()));
-
-  if (previousPrincipal != mPrincipal ||
-      previousVideoPrincipal != mVideoPrincipal) {
-    NotifyPrincipalChanged();
-  }
-}
-
-void DOMMediaStream::NotifyPrincipalChanged() {
-  if (!mPrincipal) {
-    // When all tracks are removed, mPrincipal will change to nullptr.
-    LOG(LogLevel::Info,
-        ("DOMMediaStream %p Principal changed to nothing.", this));
-  } else {
-    LOG(LogLevel::Info, ("DOMMediaStream %p Principal changed. Now: "
-                         "null=%d, codebase=%d, expanded=%d, system=%d",
-                         this, mPrincipal->GetIsNullPrincipal(),
-                         mPrincipal->GetIsContentPrincipal(),
-                         mPrincipal->GetIsExpandedPrincipal(),
-                         mPrincipal->IsSystemPrincipal()));
-  }
-
-  for (uint32_t i = 0; i < mPrincipalChangeObservers.Length(); ++i) {
-    mPrincipalChangeObservers[i]->PrincipalChanged(this);
-  }
-}
-
-bool DOMMediaStream::AddPrincipalChangeObserver(
-    PrincipalChangeObserver<DOMMediaStream>* aObserver) {
-  return mPrincipalChangeObservers.AppendElement(aObserver) != nullptr;
-}
-
-bool DOMMediaStream::RemovePrincipalChangeObserver(
-    PrincipalChangeObserver<DOMMediaStream>* aObserver) {
-  return mPrincipalChangeObservers.RemoveElement(aObserver);
+  return mTracks.Contains(&aTrack);
 }
 
 void DOMMediaStream::AddTrackInternal(MediaStreamTrack* aTrack) {
-  MOZ_ASSERT(aTrack->mOwningStream == this);
-  MOZ_ASSERT(FindOwnedDOMTrack(aTrack->GetInputStream(), aTrack->mInputTrackID,
-                               aTrack->mTrackID));
-  MOZ_ASSERT(!FindPlaybackDOMTrack(aTrack->GetOwnedStream(), aTrack->mTrackID));
-
   LOG(LogLevel::Debug,
       ("DOMMediaStream %p Adding owned track %p", this, aTrack));
-
-  mTracks.AppendElement(new TrackPort(mPlaybackPort, aTrack,
-                                      TrackPort::InputPortOwnership::EXTERNAL));
-
-  NotifyTrackAdded(aTrack);
-
+  AddTrack(*aTrack);
   DispatchTrackEvent(NS_LITERAL_STRING("addtrack"), aTrack);
 }
 
-already_AddRefed<MediaStreamTrack> DOMMediaStream::CreateDOMTrack(
-    TrackID aTrackID, MediaSegment::Type aType, MediaStreamTrackSource* aSource,
-    const MediaTrackConstraints& aConstraints) {
-  MOZ_RELEASE_ASSERT(mInputStream);
-  MOZ_RELEASE_ASSERT(mOwnedStream);
-
-  MOZ_ASSERT(FindOwnedDOMTrack(GetInputStream(), aTrackID) == nullptr);
-
-  RefPtr<MediaStreamTrack> track;
-  switch (aType) {
-    case MediaSegment::AUDIO:
-      track =
-          new AudioStreamTrack(this, aTrackID, aTrackID, aSource, aConstraints);
-      break;
-    case MediaSegment::VIDEO:
-      track =
-          new VideoStreamTrack(this, aTrackID, aTrackID, aSource, aConstraints);
-      break;
-    default:
-      MOZ_CRASH("Unhandled track type");
+already_AddRefed<nsIPrincipal> DOMMediaStream::GetPrincipal() {
+  nsCOMPtr<nsIPrincipal> principal =
+      nsGlobalWindowInner::Cast(mWindow)->GetPrincipal();
+  for (const auto& t : mTracks) {
+    if (t->Ended()) {
+      continue;
+    }
+    nsContentUtils::CombineResourcePrincipals(&principal, t->GetPrincipal());
   }
-
-  LOG(LogLevel::Debug, ("DOMMediaStream %p Created new track %p with ID %u",
-                        this, track.get(), aTrackID));
-
-  mOwnedTracks.AppendElement(new TrackPort(
-      mOwnedPort, track, TrackPort::InputPortOwnership::EXTERNAL));
-
-  return track.forget();
-}
-
-already_AddRefed<MediaStreamTrack> DOMMediaStream::CloneDOMTrack(
-    MediaStreamTrack& aTrack, TrackID aCloneTrackID) {
-  MOZ_RELEASE_ASSERT(mOwnedStream);
-  MOZ_RELEASE_ASSERT(mPlaybackStream);
-  MOZ_RELEASE_ASSERT(IsTrackIDExplicit(aCloneTrackID));
-
-  TrackID inputTrackID = aTrack.mInputTrackID;
-  MediaStream* inputStream = aTrack.GetInputStream();
-
-  RefPtr<MediaStreamTrack> newTrack = aTrack.CloneInternal(this, aCloneTrackID);
-
-  newTrack->mOriginalTrack =
-      aTrack.mOriginalTrack ? aTrack.mOriginalTrack.get() : &aTrack;
-
-  LOG(LogLevel::Debug,
-      ("DOMMediaStream %p Created new track %p cloned from stream %p track %d",
-       this, newTrack.get(), inputStream, inputTrackID));
-
-  RefPtr<MediaInputPort> inputPort =
-      mOwnedStream->AllocateInputPort(inputStream, inputTrackID, aCloneTrackID);
-
-  mOwnedTracks.AppendElement(
-      new TrackPort(inputPort, newTrack, TrackPort::InputPortOwnership::OWNED));
-
-  mTracks.AppendElement(new TrackPort(mPlaybackPort, newTrack,
-                                      TrackPort::InputPortOwnership::EXTERNAL));
-
-  NotifyTrackAdded(newTrack);
-
-  newTrack->SetEnabled(aTrack.Enabled());
-  newTrack->SetMuted(aTrack.Muted());
-  newTrack->SetReadyState(aTrack.ReadyState());
-
-  if (aTrack.Ended()) {
-    // For extra suspenders, make sure that we don't forward data by mistake
-    // to the clone when the original has already ended.
-    // We only block END_EXISTING to allow any pending clones to end.
-    Unused << inputPort->BlockSourceTrackId(inputTrackID,
-                                            BlockingMode::END_EXISTING);
-  }
-
-  return newTrack.forget();
-}
-
-static DOMMediaStream::TrackPort* FindTrackPortAmongTracks(
-    const MediaStreamTrack& aTrack,
-    const nsTArray<RefPtr<DOMMediaStream::TrackPort>>& aTracks) {
-  for (const RefPtr<DOMMediaStream::TrackPort>& info : aTracks) {
-    if (info->GetTrack() == &aTrack) {
-      return info;
-    }
-  }
-  return nullptr;
-}
-
-MediaStreamTrack* DOMMediaStream::FindOwnedDOMTrack(MediaStream* aInputStream,
-                                                    TrackID aInputTrackID,
-                                                    TrackID aTrackID) const {
-  MOZ_RELEASE_ASSERT(mOwnedStream);
-
-  for (const RefPtr<TrackPort>& info : mOwnedTracks) {
-    if (info->GetInputPort() &&
-        info->GetInputPort()->GetSource() == aInputStream &&
-        info->GetTrack()->mInputTrackID == aInputTrackID &&
-        (aTrackID == TRACK_ANY || info->GetTrack()->mTrackID == aTrackID)) {
-      // This track is owned externally but in our playback stream.
-      return info->GetTrack();
-    }
-  }
-  return nullptr;
-}
-
-DOMMediaStream::TrackPort* DOMMediaStream::FindOwnedTrackPort(
-    const MediaStreamTrack& aTrack) const {
-  return FindTrackPortAmongTracks(aTrack, mOwnedTracks);
-}
-
-MediaStreamTrack* DOMMediaStream::FindPlaybackDOMTrack(
-    MediaStream* aInputStream, TrackID aInputTrackID) const {
-  if (!mPlaybackStream) {
-    // One would think we can assert mPlaybackStream here, but track clones have
-    // a dummy DOMMediaStream that doesn't have a playback stream, so we can't.
-    return nullptr;
-  }
-
-  for (const RefPtr<TrackPort>& info : mTracks) {
-    if (info->GetInputPort() == mPlaybackPort && aInputStream == mOwnedStream &&
-        info->GetTrack()->mInputTrackID == aInputTrackID) {
-      // This track is in our owned and playback streams.
-      return info->GetTrack();
-    }
-    if (info->GetInputPort() &&
-        info->GetInputPort()->GetSource() == aInputStream &&
-        info->GetSourceTrackId() == aInputTrackID) {
-      // This track is owned externally but in our playback stream.
-      MOZ_ASSERT(IsTrackIDExplicit(aInputTrackID));
-      return info->GetTrack();
-    }
-  }
-  return nullptr;
-}
-
-DOMMediaStream::TrackPort* DOMMediaStream::FindPlaybackTrackPort(
-    const MediaStreamTrack& aTrack) const {
-  return FindTrackPortAmongTracks(aTrack, mTracks);
+  return principal.forget();
 }
 
 void DOMMediaStream::NotifyActive() {
   LOG(LogLevel::Info, ("DOMMediaStream %p NotifyActive(). ", this));
 
   MOZ_ASSERT(mActive);
   for (int32_t i = mTrackListeners.Length() - 1; i >= 0; --i) {
     mTrackListeners[i]->NotifyActive();
@@ -922,20 +418,16 @@ void DOMMediaStream::NotifyInactive() {
   for (int32_t i = mTrackListeners.Length() - 1; i >= 0; --i) {
     mTrackListeners[i]->NotifyInactive();
   }
 }
 
 void DOMMediaStream::RegisterTrackListener(TrackListener* aListener) {
   MOZ_ASSERT(NS_IsMainThread());
 
-  if (mNotifiedOfMediaStreamGraphShutdown) {
-    // No more tracks will ever be added, so just do nothing.
-    return;
-  }
   mTrackListeners.AppendElement(aListener);
 }
 
 void DOMMediaStream::UnregisterTrackListener(TrackListener* aListener) {
   MOZ_ASSERT(NS_IsMainThread());
   mTrackListeners.RemoveElement(aListener);
 }
 
@@ -951,39 +443,16 @@ void DOMMediaStream::SetFinishedOnInacti
   if (mFinishedOnInactive && !ContainsLiveTracks(mTracks)) {
     NotifyTrackRemoved(nullptr);
   }
 }
 
 void DOMMediaStream::NotifyTrackAdded(const RefPtr<MediaStreamTrack>& aTrack) {
   MOZ_ASSERT(NS_IsMainThread());
 
-  if (mTracksPendingRemoval > 0) {
-    // If there are tracks pending removal we may not degrade the current
-    // principals until those tracks have been confirmed removed from the
-    // playback stream. Instead combine with the new track and the (potentially)
-    // degraded principal will be calculated when it's safe.
-    nsContentUtils::CombineResourcePrincipals(&mPrincipal,
-                                              aTrack->GetPrincipal());
-    LOG(LogLevel::Debug, ("DOMMediaStream %p saw a track get added. Combining "
-                          "its principal %p into our while waiting for pending "
-                          "tracks to be removed. New principal is %p.",
-                          this, aTrack->GetPrincipal(), mPrincipal.get()));
-    if (aTrack->AsVideoStreamTrack()) {
-      nsContentUtils::CombineResourcePrincipals(&mVideoPrincipal,
-                                                aTrack->GetPrincipal());
-    }
-  } else {
-    LOG(LogLevel::Debug, ("DOMMediaStream %p saw a track get added. "
-                          "Recomputing principal.",
-                          this));
-    RecomputePrincipal();
-  }
-
-  aTrack->AddPrincipalChangeObserver(this);
   aTrack->AddConsumer(mPlaybackTrackListener);
 
   for (int32_t i = mTrackListeners.Length() - 1; i >= 0; --i) {
     mTrackListeners[i]->NotifyTrackAdded(aTrack);
   }
 
   if (mActive) {
     return;
@@ -1002,27 +471,21 @@ void DOMMediaStream::NotifyTrackRemoved(
 
   if (aTrack) {
     // aTrack may be null to allow HTMLMediaElement::MozCaptureStream streams
     // to be played until the source media element has ended. The source media
     // element will then call NotifyTrackRemoved(nullptr) to signal that we can
     // go inactive, regardless of the timing of the last track ending.
 
     aTrack->RemoveConsumer(mPlaybackTrackListener);
-    aTrack->RemovePrincipalChangeObserver(this);
 
     for (int32_t i = mTrackListeners.Length() - 1; i >= 0; --i) {
       mTrackListeners[i]->NotifyTrackRemoved(aTrack);
     }
 
-    // Don't call RecomputePrincipal here as the track may still exist in the
-    // playback stream in the MediaStreamGraph. It will instead be called when
-    // the track has been confirmed removed by the graph. See
-    // BlockPlaybackTrack().
-
     if (!mActive) {
       NS_ASSERTION(false, "Shouldn't remove a live track if already inactive");
       return;
     }
   }
 
   if (!mFinishedOnInactive) {
     return;
@@ -1040,51 +503,8 @@ nsresult DOMMediaStream::DispatchTrackEv
   MediaStreamTrackEventInit init;
   init.mTrack = aTrack;
 
   RefPtr<MediaStreamTrackEvent> event =
       MediaStreamTrackEvent::Constructor(this, aName, init);
 
   return DispatchTrustedEvent(event);
 }
-
-void DOMMediaStream::BlockPlaybackTrack(TrackPort* aTrack) {
-  MOZ_ASSERT(aTrack);
-  ++mTracksPendingRemoval;
-  RefPtr<DOMMediaStream> that = this;
-  aTrack
-      ->BlockSourceTrackId(aTrack->GetTrack()->mTrackID, BlockingMode::CREATION)
-      ->Then(
-          GetCurrentThreadSerialEventTarget(), __func__,
-          [this, that](bool aIgnore) { NotifyPlaybackTrackBlocked(); },
-          [](const nsresult& aIgnore) {
-            NS_ERROR("Could not remove track from MSG");
-          });
-}
-
-void DOMMediaStream::NotifyPlaybackTrackBlocked() {
-  MOZ_ASSERT(mTracksPendingRemoval > 0,
-             "A track reported finished blocking more times than we asked for");
-  if (--mTracksPendingRemoval == 0) {
-    // The MediaStreamGraph has reported a track was blocked and we are not
-    // waiting for any further tracks to get blocked. It is now safe to
-    // recompute the principal based on our main thread track set state.
-    LOG(LogLevel::Debug, ("DOMMediaStream %p saw all tracks pending removal "
-                          "finish. Recomputing principal.",
-                          this));
-    RecomputePrincipal();
-  }
-}
-
-DOMAudioNodeMediaStream::DOMAudioNodeMediaStream(nsPIDOMWindowInner* aWindow,
-                                                 AudioNode* aNode)
-    : DOMMediaStream(aWindow), mStreamNode(aNode) {}
-
-DOMAudioNodeMediaStream::~DOMAudioNodeMediaStream() {}
-
-already_AddRefed<DOMAudioNodeMediaStream>
-DOMAudioNodeMediaStream::CreateTrackUnionStreamAsInput(
-    nsPIDOMWindowInner* aWindow, AudioNode* aNode, MediaStreamGraph* aGraph) {
-  RefPtr<DOMAudioNodeMediaStream> stream =
-      new DOMAudioNodeMediaStream(aWindow, aNode);
-  stream->InitTrackUnionStream(aGraph);
-  return stream.forget();
-}
--- a/dom/media/DOMMediaStream.h
+++ b/dom/media/DOMMediaStream.h
@@ -12,181 +12,59 @@
 #include "nsCycleCollectionParticipant.h"
 #include "nsWrapperCache.h"
 #include "StreamTracks.h"
 #include "nsIDOMWindow.h"
 #include "nsIPrincipal.h"
 #include "MediaTrackConstraints.h"
 #include "mozilla/DOMEventTargetHelper.h"
 #include "mozilla/RelativeTimeline.h"
-#include "PrincipalChangeObserver.h"
 
 namespace mozilla {
 
 class AbstractThread;
 class DOMMediaStream;
 class MediaStream;
 class MediaInputPort;
-class MediaStreamGraph;
 class ProcessedMediaStream;
 
 enum class BlockingMode;
 
 namespace dom {
-class AudioNode;
 class HTMLCanvasElement;
 class MediaStreamTrack;
 class MediaStreamTrackSource;
 class AudioStreamTrack;
 class VideoStreamTrack;
-class AudioTrack;
-class VideoTrack;
-class AudioTrackList;
-class VideoTrackList;
-class MediaTrackListListener;
 }  // namespace dom
 
 namespace layers {
 class ImageContainer;
 class OverlayImage;
 }  // namespace layers
 
 #define NS_DOMMEDIASTREAM_IID                        \
   {                                                  \
     0x8cb65468, 0x66c0, 0x444e, {                    \
       0x89, 0x9f, 0x89, 0x1d, 0x9e, 0xd2, 0xbe, 0x7c \
     }                                                \
   }
 
 /**
-
-// clang-format off
- * DOM wrapper for MediaStreams.
- *
- * To account for track operations such as clone(), addTrack() and
- * removeTrack(), a DOMMediaStream wraps three internal (and chained)
- * MediaStreams:
- *   1. mInputStream
- *      - Controlled by the owner/source of the DOMMediaStream.
- *        It's a stream of the type indicated by
- *      - DOMMediaStream::CreateSourceStream/CreateTrackUnionStream. A source
- *        typically creates its DOMMediaStream, creates the MediaStreamTracks
- *        owned by said stream, then gets the internal input stream to which it
- *        feeds data for the previously created tracks.
- *      - When necessary it can create tracks on the internal stream only and
- *        their corresponding MediaStreamTracks will be asynchronously created.
- *   2. mOwnedStream
- *      - A TrackUnionStream containing tracks owned by this stream.
- *      - The internal model of a MediaStreamTrack consists of its owning
- *        DOMMediaStream and the TrackID of the corresponding internal track in
- *        the owning DOMMediaStream's mOwnedStream.
- *      - The owned stream is different from the input stream since a cloned
- *        DOMMediaStream is also the owner of its (cloned) MediaStreamTracks.
- *      - Stopping an original track shall not stop its clone. This is
- *        solved by stopping it at the owned stream, while the clone's owned
- *        stream gets data directly from the original input stream.
- *      - A DOMMediaStream (original or clone) gets all tracks dynamically
- *        added by the source automatically forwarded by having a TRACK_ANY
- *        MediaInputPort set up from the owning DOMMediaStream's input stream
- *        to this DOMMediaStream's owned stream.
- *   3. mPlaybackStream
- *      - A TrackUnionStream containing the tracks corresponding to the
- *        MediaStreamTracks currently in this DOMMediaStream (per getTracks()).
- *      - Similarly as for mOwnedStream, there's a TRACK_ANY MediaInputPort set
- *        up from the owned stream to the playback stream to allow tracks
- *        dynamically added by the source to be automatically forwarded to any
- *        audio or video sinks.
- *      - MediaStreamTracks added by addTrack() are set up with a MediaInputPort
- *        locked to their internal TrackID, from their owning DOMMediaStream's
- *        owned stream to this playback stream.
- *
- *
- * A graphical representation of how tracks are connected in various cases as
- * follows:
- *
- *                     addTrack()ed case:
- * DOMStream A
- *           Input        Owned          Playback
- *            t1 ---------> t1 ------------> t1     <- MediaStreamTrack X
- *                                                     (pointing to t1 in A)
- *                                 --------> t2     <- MediaStreamTrack Y
- *                                /                    (pointing to t1 in B)
- * DOMStream B                   /
- *           Input        Owned /        Playback
- *            t1 ---------> t1 ------------> t1     <- MediaStreamTrack Y
- *                                                     (pointing to t1 in B)
+ * DOMMediaStream is the implementation of the js-exposed MediaStream interface.
  *
- *                     removeTrack()ed case:
- * DOMStream A
- *           Input        Owned          Playback
- *            t1 ---------> t1                      <- No tracks
- *
- *
- *                     clone()d case:
- * DOMStream A
- *           Input        Owned          Playback
- *            t1 ---------> t1 ------------> t1     <- MediaStreamTrack X
- *               \                                     (pointing to t1 in A)
- *                -----
- * DOMStream B         \
- *           Input      \ Owned          Playback
- *                       -> t1 ------------> t1     <- MediaStreamTrack Y
- *                                                     (pointing to t1 in B)
- *
- *
- *            addTrack()ed, removeTrack()ed and clone()d case:
- *
- *  Here we have done the following:
- *    var A = someStreamWithTwoTracks;
- *    var B = someStreamWithOneTrack;
- *    var X = A.getTracks()[0];
- *    var Y = A.getTracks()[1];
- *    var Z = B.getTracks()[0];
- *    A.addTrack(Z);
- *    A.removeTrack(X);
- *    B.removeTrack(Z);
- *    var A' = A.clone();
- *
- * DOMStream A
- *           Input        Owned          Playback
- *            t1 ---------> t1                      <- MediaStreamTrack X
- *                                                     (removed)
- *                                                     (pointing to t1 in A)
- *            t2 ---------> t2 ------------> t2     <- MediaStreamTrack Y
- *             \                                       (pointing to t2 in A)
- *              \                    ------> t3     <- MediaStreamTrack Z
- *               \                  /                  (pointing to t1 in B)
- * DOMStream B    \                /
- *           Input \      Owned   /      Playback
- *            t1 ---^-----> t1 ---                  <- MediaStreamTrack Z
- *                                                     (removed)
- *              \    \                                 (pointing to t1 in B)
- *               \    \
- * DOMStream A'   \    \
- *           Input \    \ Owned          Playback
- *                  \    -> t1 ------------> t1     <- MediaStreamTrack Y'
- *                   \                                 (pointing to t1 in A')
- *                    ----> t2 ------------> t2     <- MediaStreamTrack Z'
- *                                                     (pointing to t2 in A')
+ * This is a thin main-thread class grouping MediaStreamTracks together.
  */
-// clang-format on
-class DOMMediaStream
-    : public DOMEventTargetHelper,
-      public dom::PrincipalChangeObserver<dom::MediaStreamTrack>,
-      public RelativeTimeline,
-      public SupportsWeakPtr<DOMMediaStream> {
-  friend class dom::MediaStreamTrack;
+class DOMMediaStream : public DOMEventTargetHelper,
+                       public RelativeTimeline,
+                       public SupportsWeakPtr<DOMMediaStream> {
   typedef dom::MediaStreamTrack MediaStreamTrack;
   typedef dom::AudioStreamTrack AudioStreamTrack;
   typedef dom::VideoStreamTrack VideoStreamTrack;
   typedef dom::MediaStreamTrackSource MediaStreamTrackSource;
-  typedef dom::AudioTrack AudioTrack;
-  typedef dom::VideoTrack VideoTrack;
-  typedef dom::AudioTrackList AudioTrackList;
-  typedef dom::VideoTrackList VideoTrackList;
 
  public:
   typedef dom::MediaTrackConstraints MediaTrackConstraints;
 
   MOZ_DECLARE_WEAKREFERENCE_TYPENAME(DOMMediaStream)
 
   class TrackListener {
    public:
@@ -210,90 +88,16 @@ class DOMMediaStream
     virtual void NotifyActive(){};
 
     /**
      * Called when the DOMMediaStream has become inactive.
      */
     virtual void NotifyInactive(){};
   };
 
-  /**
-   * TrackPort is a representation of a MediaStreamTrack-MediaInputPort pair
-   * that make up a link between the Owned stream and the Playback stream.
-   *
-   * Semantically, the track is the identifier/key and the port the value of
-   * this connection.
-   *
-   * The input port can be shared between several TrackPorts. This is the case
-   * for DOMMediaStream's mPlaybackPort which forwards all tracks in its
-   * mOwnedStream automatically.
-   *
-   * If the MediaStreamTrack is owned by another DOMMediaStream (called A) than
-   * the one owning the TrackPort (called B), the input port (locked to the
-   * MediaStreamTrack's TrackID) connects A's mOwnedStream to B's
-   * mPlaybackStream.
-   *
-   * A TrackPort may never leave the DOMMediaStream it was created in. Internal
-   * use only.
-   */
-  class TrackPort {
-   public:
-    NS_INLINE_DECL_CYCLE_COLLECTING_NATIVE_REFCOUNTING(TrackPort)
-    NS_DECL_CYCLE_COLLECTION_NATIVE_CLASS(TrackPort)
-
-    /**
-     * Indicates MediaInputPort ownership to the TrackPort.
-     *
-     * OWNED    - Owned by the TrackPort itself. TrackPort must destroy the
-     *            input port when it's destructed.
-     * EXTERNAL - Owned by another entity. It's the caller's responsibility to
-     *            ensure the the MediaInputPort outlives the TrackPort.
-     */
-    enum class InputPortOwnership { OWNED = 1, EXTERNAL };
-
-    TrackPort(MediaInputPort* aInputPort, MediaStreamTrack* aTrack,
-              const InputPortOwnership aOwnership);
-
-   protected:
-    virtual ~TrackPort();
-
-   public:
-    void DestroyInputPort();
-
-    /**
-     * Returns the source stream of the input port.
-     */
-    MediaStream* GetSource() const;
-
-    /**
-     * Returns the track ID this track is locked to in the source stream of the
-     * input port.
-     */
-    TrackID GetSourceTrackId() const;
-
-    MediaInputPort* GetInputPort() const { return mInputPort; }
-    MediaStreamTrack* GetTrack() const { return mTrack; }
-
-    /**
-     * Blocks aTrackId from going into mInputPort unless the port has been
-     * destroyed. Returns a promise that gets resolved when the MediaStreamGraph
-     * has applied the block in the playback stream.
-     */
-    RefPtr<GenericPromise> BlockSourceTrackId(TrackID aTrackId,
-                                              BlockingMode aBlockingMode);
-
-   private:
-    RefPtr<MediaInputPort> mInputPort;
-    RefPtr<MediaStreamTrack> mTrack;
-
-    // Defines if we've been given ownership of the input port or if it's owned
-    // externally. The owner is responsible for destroying the port.
-    const InputPortOwnership mOwnership;
-  };
-
   explicit DOMMediaStream(nsPIDOMWindowInner* aWindow);
 
   NS_DECL_ISUPPORTS_INHERITED
   NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(DOMMediaStream, DOMEventTargetHelper)
   NS_DECLARE_STATIC_IID_ACCESSOR(NS_DOMMEDIASTREAM_IID)
 
   nsPIDOMWindowInner* GetParentObject() const { return mWindow; }
   virtual JSObject* WrapObject(JSContext* aCx,
@@ -330,168 +134,41 @@ class DOMMediaStream
 
   bool Active() const;
 
   IMPL_EVENT_HANDLER(addtrack)
   IMPL_EVENT_HANDLER(removetrack)
 
   // NON-WebIDL
 
-  MediaStreamTrack* GetOwnedTrackById(const nsAString& aId);
-
   /**
-   * Returns true if this DOMMediaStream has aTrack in its mPlaybackStream.
+   * Returns true if this DOMMediaStream has aTrack in mTracks.
    */
   bool HasTrack(const MediaStreamTrack& aTrack) const;
 
   /**
-   * Returns true if this DOMMediaStream owns aTrack.
-   */
-  bool OwnsTrack(const MediaStreamTrack& aTrack) const;
-
-  /**
-   * Returns the corresponding MediaStreamTrack if it's in our mOwnedStream.
-   * aInputTrackID should match the track's TrackID in its input stream,
-   * and aTrackID the TrackID in mOwnedStream.
-   *
-   * When aTrackID is not supplied or set to TRACK_ANY, we return the first
-   * MediaStreamTrack that matches the given input track. Note that there may
-   * be multiple MediaStreamTracks matching the same input track, but that they
-   * in that case all share the same MediaStreamTrackSource.
-   */
-  MediaStreamTrack* FindOwnedDOMTrack(MediaStream* aInputStream,
-                                      TrackID aInputTrackID,
-                                      TrackID aTrackID = TRACK_ANY) const;
-
-  /**
-   * Returns the TrackPort connecting aTrack's input stream to mOwnedStream,
-   * or nullptr if aTrack is not owned by this DOMMediaStream.
-   */
-  TrackPort* FindOwnedTrackPort(const MediaStreamTrack& aTrack) const;
-
-  /**
-   * Returns the corresponding MediaStreamTrack if it's in our mPlaybackStream.
-   * aInputTrackID should match the track's TrackID in its owned stream.
-   */
-  MediaStreamTrack* FindPlaybackDOMTrack(MediaStream* aInputStream,
-                                         TrackID aInputTrackID) const;
-
-  /**
-   * Returns the TrackPort connecting mOwnedStream to mPlaybackStream for
-   * aTrack.
-   */
-  TrackPort* FindPlaybackTrackPort(const MediaStreamTrack& aTrack) const;
-
-  MediaStream* GetInputStream() const { return mInputStream; }
-  ProcessedMediaStream* GetOwnedStream() const { return mOwnedStream; }
-  ProcessedMediaStream* GetPlaybackStream() const { return mPlaybackStream; }
-
-  /**
-   * Allows a video element to identify this stream as a camera stream, which
-   * needs special treatment.
-   */
-  virtual MediaStream* GetCameraStream() const { return nullptr; }
-
-  /**
-   * Legacy method that returns true when the playback stream has finished.
-   */
-  bool IsFinished() const;
-
-  TrackRate GraphRate();
-
-  /**
    * Returns a principal indicating who may access this stream. The stream
    * contents can only be accessed by principals subsuming this principal.
    */
-  nsIPrincipal* GetPrincipal() { return mPrincipal; }
-
-  /**
-   * Returns a principal indicating who may access video data of this stream.
-   * The video principal will be a combination of all live video tracks.
-   */
-  nsIPrincipal* GetVideoPrincipal() { return mVideoPrincipal; }
-
-  // From PrincipalChangeObserver<MediaStreamTrack>.
-  void PrincipalChanged(MediaStreamTrack* aTrack) override;
-
-  /**
-   * Add a PrincipalChangeObserver to this stream.
-   *
-   * Returns true if it was successfully added.
-   *
-   * Ownership of the PrincipalChangeObserver remains with the caller, and it's
-   * the caller's responsibility to remove the observer before it dies.
-   */
-  bool AddPrincipalChangeObserver(
-      dom::PrincipalChangeObserver<DOMMediaStream>* aObserver);
-
-  /**
-   * Remove an added PrincipalChangeObserver from this stream.
-   *
-   * Returns true if it was successfully removed.
-   */
-  bool RemovePrincipalChangeObserver(
-      dom::PrincipalChangeObserver<DOMMediaStream>* aObserver);
+  already_AddRefed<nsIPrincipal> GetPrincipal();
 
   // Webrtc allows the remote side to name a stream whatever it wants, and we
   // need to surface this to content.
   void AssignId(const nsAString& aID) { mID = aID; }
 
   /**
-   * Create a DOMMediaStream whose underlying input stream is a
-   * SourceMediaStream.
-   */
-  static already_AddRefed<DOMMediaStream> CreateSourceStreamAsInput(
-      nsPIDOMWindowInner* aWindow, MediaStreamGraph* aGraph);
-
-  /**
-   * Create a DOMMediaStream whose underlying input stream is a
-   * TrackUnionStream.
-   */
-  static already_AddRefed<DOMMediaStream> CreateTrackUnionStreamAsInput(
-      nsPIDOMWindowInner* aWindow, MediaStreamGraph* aGraph);
-
-  /**
-   * Create an DOMMediaStream whose underlying input stream is an
-   * AudioCaptureStream.
-   */
-  static already_AddRefed<DOMMediaStream> CreateAudioCaptureStreamAsInput(
-      nsPIDOMWindowInner* aWindow, nsIPrincipal* aPrincipal,
-      MediaStreamGraph* aGraph);
-
-  /**
    * Adds a MediaStreamTrack to mTracks and raises "addtrack".
    *
    * Note that "addtrack" is raised synchronously and only has an effect if
    * this MediaStream is already exposed to script. For spec compliance this is
    * to be called from an async task.
    */
   void AddTrackInternal(MediaStreamTrack* aTrack);
 
   /**
-   * Called for each track in our owned stream to indicate to JS that we
-   * are carrying that track.
-   *
-   * Pre-creates a MediaStreamTrack and returns it.
-   * It is up to the caller to make sure it is added through AddTrackInternal.
-   */
-  already_AddRefed<MediaStreamTrack> CreateDOMTrack(
-      TrackID aTrackID, MediaSegment::Type aType,
-      MediaStreamTrackSource* aSource,
-      const MediaTrackConstraints& aConstraints = MediaTrackConstraints());
-
-  /**
-   * Creates a MediaStreamTrack cloned from aTrack, adds it to mTracks and
-   * returns it.
-   * aCloneTrackID is the TrackID the new track will get in mOwnedStream.
-   */
-  already_AddRefed<MediaStreamTrack> CloneDOMTrack(MediaStreamTrack& aTrack,
-                                                   TrackID aCloneTrackID);
-
-  /**
    * Add an nsISupports object that this stream will keep alive as long as
    * the stream itself is alive.
    */
   void AddConsumerToKeepAlive(nsISupports* aConsumer) {
     mConsumersToKeepAlive.AppendElement(aConsumer);
   }
 
   // Registers a track listener to this MediaStream, for listening to changes
@@ -507,37 +184,16 @@ class DOMMediaStream
   // Tells this MediaStream whether it can go inactive as soon as no tracks
   // are live anymore.
   void SetFinishedOnInactive(bool aFinishedOnInactive);
 
  protected:
   virtual ~DOMMediaStream();
 
   void Destroy();
-  void InitSourceStream(MediaStreamGraph* aGraph);
-  void InitTrackUnionStream(MediaStreamGraph* aGraph);
-  void InitAudioCaptureStream(nsIPrincipal* aPrincipal,
-                              MediaStreamGraph* aGraph);
-
-  // Sets up aStream as mInputStream. A producer may append data to a
-  // SourceMediaStream input stream, or connect another stream to a
-  // TrackUnionStream input stream.
-  void InitInputStreamCommon(MediaStream* aStream, MediaStreamGraph* aGraph);
-
-  // Sets up a new TrackUnionStream as mOwnedStream and connects it to
-  // mInputStream with a TRACK_ANY MediaInputPort if available.
-  // If this DOMMediaStream should have an input stream (producing data),
-  // it has to be initiated before the owned stream.
-  void InitOwnedStreamCommon(MediaStreamGraph* aGraph);
-
-  // Sets up a new TrackUnionStream as mPlaybackStream and connects it to
-  // mOwnedStream with a TRACK_ANY MediaInputPort if available.
-  // If this DOMMediaStream should have an owned stream (producer or clone),
-  // it has to be initiated before the playback stream.
-  void InitPlaybackStreamCommon(MediaStreamGraph* aGraph);
 
   // Dispatches NotifyActive() to all registered track listeners.
   void NotifyActive();
 
   // Dispatches NotifyInactive() to all registered track listeners.
   void NotifyInactive();
 
   // Dispatches NotifyTrackAdded() to all registered track listeners.
@@ -545,132 +201,39 @@ class DOMMediaStream
 
   // Dispatches NotifyTrackRemoved() to all registered track listeners.
   void NotifyTrackRemoved(const RefPtr<MediaStreamTrack>& aTrack);
 
   // Dispatches "addtrack" or "removetrack".
   nsresult DispatchTrackEvent(const nsAString& aName,
                               const RefPtr<MediaStreamTrack>& aTrack);
 
-  class PlaybackTrackListener;
-  friend class PlaybackTrackListener;
-
-  /**
-   * Block a track in our playback stream. Calls NotifyPlaybackTrackBlocked()
-   * after the MediaStreamGraph has applied the block and the track is no longer
-   * live.
-   */
-  void BlockPlaybackTrack(TrackPort* aTrack);
-
-  /**
-   * Called on main thread after MediaStreamGraph has applied a track block in
-   * our playback stream.
-   */
-  void NotifyPlaybackTrackBlocked();
-
-  // Recomputes the current principal of this stream based on the set of tracks
-  // it currently contains. PrincipalChangeObservers will be notified only if
-  // the principal changes.
-  void RecomputePrincipal();
-
   // We need this to track our parent object.
   nsCOMPtr<nsPIDOMWindowInner> mWindow;
 
-  // MediaStreams are owned by the graph, but we tell them when to die,
-  // and they won't die until we let them.
-
-  // This stream contains tracks used as input by us. Cloning happens from this
-  // stream. Tracks may exist in these stream but not in |mOwnedStream| if they
-  // have been stopped.
-  MediaStream* mInputStream;
-
-  // This stream contains tracks owned by us (if we were created directly from
-  // source, or cloned from some other stream). Tracks map to |mOwnedTracks|.
-  ProcessedMediaStream* mOwnedStream;
-
-  // This stream contains tracks currently played by us, despite of owner.
-  // Tracks map to |mTracks|.
-  ProcessedMediaStream* mPlaybackStream;
-
-  // This port connects mInputStream to mOwnedStream. All tracks forwarded.
-  RefPtr<MediaInputPort> mOwnedPort;
-
-  // This port connects mOwnedStream to mPlaybackStream. All tracks not
-  // explicitly blocked due to removal are forwarded.
-  RefPtr<MediaInputPort> mPlaybackPort;
-
-  // MediaStreamTracks corresponding to tracks in our mOwnedStream.
-  AutoTArray<RefPtr<TrackPort>, 2> mOwnedTracks;
-
-  // MediaStreamTracks corresponding to tracks in our mPlaybackStream.
-  AutoTArray<RefPtr<TrackPort>, 2> mTracks;
-
-  // Number of MediaStreamTracks that have been removed on main thread but are
-  // waiting to be removed on MediaStreamGraph thread.
-  size_t mTracksPendingRemoval;
+  // MediaStreamTracks contained by this DOMMediaStream.
+  nsTArray<RefPtr<MediaStreamTrack>> mTracks;
 
   // Listener tracking when live MediaStreamTracks in mTracks end.
+  class PlaybackTrackListener;
   RefPtr<PlaybackTrackListener> mPlaybackTrackListener;
 
-  // Set to true after MediaStreamGraph has created tracks for mPlaybackStream.
-  bool mTracksCreated;
-
   nsString mID;
 
   // Keep these alive while the stream is alive.
   nsTArray<nsCOMPtr<nsISupports>> mConsumersToKeepAlive;
 
-  bool mNotifiedOfMediaStreamGraphShutdown;
-
   // The track listeners subscribe to changes in this stream's track set.
   nsTArray<TrackListener*> mTrackListeners;
 
   // True if this stream has live tracks.
   bool mActive;
 
   // For compatibility with mozCaptureStream, we in some cases do not go
   // inactive until the MediaDecoder lets us. (Remove this in Bug 1302379)
   bool mFinishedOnInactive;
-
- private:
-  void NotifyPrincipalChanged();
-  // Principal identifying who may access the collected contents of this stream.
-  // If null, this stream can be used by anyone because it has no content yet.
-  nsCOMPtr<nsIPrincipal> mPrincipal;
-  // Video principal is used by video element as access is requested to its
-  // image data.
-  nsCOMPtr<nsIPrincipal> mVideoPrincipal;
-  nsTArray<dom::PrincipalChangeObserver<DOMMediaStream>*>
-      mPrincipalChangeObservers;
 };
 
 NS_DEFINE_STATIC_IID_ACCESSOR(DOMMediaStream, NS_DOMMEDIASTREAM_IID)
 
-class DOMAudioNodeMediaStream : public DOMMediaStream {
-  typedef dom::AudioNode AudioNode;
-
- public:
-  DOMAudioNodeMediaStream(nsPIDOMWindowInner* aWindow, AudioNode* aNode);
-
-  NS_DECL_ISUPPORTS_INHERITED
-  NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(DOMAudioNodeMediaStream,
-                                           DOMMediaStream)
-
-  /**
-   * Create a DOMAudioNodeMediaStream whose underlying stream is a
-   * TrackUnionStream.
-   */
-  static already_AddRefed<DOMAudioNodeMediaStream>
-  CreateTrackUnionStreamAsInput(nsPIDOMWindowInner* aWindow, AudioNode* aNode,
-                                MediaStreamGraph* aGraph);
-
- protected:
-  ~DOMAudioNodeMediaStream();
-
- private:
-  // If this object wraps a stream owned by an AudioNode, we need to ensure that
-  // the node isn't cycle-collected too early.
-  RefPtr<AudioNode> mStreamNode;
-};
-
 }  // namespace mozilla
 
 #endif /* NSDOMMEDIASTREAM_H_ */
--- a/dom/media/MediaDecoder.cpp
+++ b/dom/media/MediaDecoder.cpp
@@ -232,49 +232,35 @@ void MediaDecoder::SetVolume(double aVol
 }
 
 RefPtr<GenericPromise> MediaDecoder::SetSink(AudioDeviceInfo* aSink) {
   MOZ_ASSERT(NS_IsMainThread());
   AbstractThread::AutoEnter context(AbstractMainThread());
   return GetStateMachine()->InvokeSetSink(aSink);
 }
 
-void MediaDecoder::AddOutputStream(DOMMediaStream* aStream) {
+void MediaDecoder::AddOutputStream(DOMMediaStream* aStream,
+                                   MediaStreamGraphImpl* aGraph) {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_ASSERT(mDecoderStateMachine, "Must be called after Load().");
   AbstractThread::AutoEnter context(AbstractMainThread());
-  mDecoderStateMachine->EnsureOutputStreamManager(
-      aStream->GetInputStream()->Graph());
+  mDecoderStateMachine->EnsureOutputStreamManager(aGraph);
   if (mInfo) {
     mDecoderStateMachine->EnsureOutputStreamManagerHasTracks(*mInfo);
   }
   mDecoderStateMachine->AddOutputStream(aStream);
 }
 
 void MediaDecoder::RemoveOutputStream(DOMMediaStream* aStream) {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_ASSERT(mDecoderStateMachine, "Must be called after Load().");
   AbstractThread::AutoEnter context(AbstractMainThread());
   mDecoderStateMachine->RemoveOutputStream(aStream);
 }
 
-void MediaDecoder::SetNextOutputStreamTrackID(TrackID aNextTrackID) {
-  MOZ_ASSERT(NS_IsMainThread());
-  MOZ_ASSERT(mDecoderStateMachine, "Must be called after Load().");
-  AbstractThread::AutoEnter context(AbstractMainThread());
-  mDecoderStateMachine->SetNextOutputStreamTrackID(aNextTrackID);
-}
-
-TrackID MediaDecoder::GetNextOutputStreamTrackID() {
-  MOZ_ASSERT(NS_IsMainThread());
-  MOZ_ASSERT(mDecoderStateMachine, "Must be called after Load().");
-  AbstractThread::AutoEnter context(AbstractMainThread());
-  return mDecoderStateMachine->GetNextOutputStreamTrackID();
-}
-
 void MediaDecoder::SetOutputStreamPrincipal(nsIPrincipal* aPrincipal) {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_ASSERT(mDecoderStateMachine, "Must be called after Load().");
   AbstractThread::AutoEnter context(AbstractMainThread());
   mDecoderStateMachine->SetOutputStreamPrincipal(aPrincipal);
 }
 
 double MediaDecoder::GetDuration() {
--- a/dom/media/MediaDecoder.h
+++ b/dom/media/MediaDecoder.h
@@ -11,48 +11,48 @@
 #  include "DecoderDoctorDiagnostics.h"
 #  include "MediaContainerType.h"
 #  include "MediaDecoderOwner.h"
 #  include "MediaEventSource.h"
 #  include "MediaMetadataManager.h"
 #  include "MediaPromiseDefs.h"
 #  include "MediaResource.h"
 #  include "MediaStatistics.h"
-#  include "MediaStreamGraph.h"
 #  include "SeekTarget.h"
 #  include "TimeUnits.h"
-#  include "TrackID.h"
 #  include "mozilla/Atomics.h"
 #  include "mozilla/CDMProxy.h"
 #  include "mozilla/MozPromise.h"
 #  include "mozilla/ReentrantMonitor.h"
 #  include "mozilla/StateMirroring.h"
 #  include "mozilla/StateWatching.h"
 #  include "mozilla/dom/MediaDebugInfoBinding.h"
 #  include "nsAutoPtr.h"
 #  include "nsCOMPtr.h"
 #  include "nsIObserver.h"
 #  include "nsISupports.h"
 #  include "nsITimer.h"
 
+class AudioDeviceInfo;
 class nsIPrincipal;
 
 namespace mozilla {
 
 namespace dom {
 class MediaMemoryInfo;
 }
 
 class AbstractThread;
 class DOMMediaStream;
 class FrameStatistics;
 class VideoFrameContainer;
 class MediaFormatReader;
 class MediaDecoderStateMachine;
 struct MediaPlaybackEvent;
+class MediaStreamGraphImpl;
 
 enum class Visibility : uint8_t;
 
 // GetCurrentTime is defined in winbase.h as zero argument macro forwarding to
 // GetTickCount() and conflicts with MediaDecoder::GetCurrentTime
 // implementation.
 #  ifdef GetCurrentTime
 #    undef GetCurrentTime
@@ -170,27 +170,20 @@ class MediaDecoder : public DecoderDocto
   // is used as the input for each ProcessedMediaStream created by calls to
   // captureStream(UntilEnded). Seeking creates a new source stream, as does
   // replaying after the input as ended. In the latter case, the new source is
   // not connected to streams created by captureStreamUntilEnded.
 
   // Add an output stream. All decoder output will be sent to the stream.
   // The stream is initially blocked. The decoder is responsible for unblocking
   // it while it is playing back.
-  void AddOutputStream(DOMMediaStream* aStream);
+  void AddOutputStream(DOMMediaStream* aStream, MediaStreamGraphImpl* aGraph);
   // Remove an output stream added with AddOutputStream.
   void RemoveOutputStream(DOMMediaStream* aStream);
 
-  // Set the TrackID to be used as the initial id by the next DecodedStream
-  // sink.
-  void SetNextOutputStreamTrackID(TrackID aNextTrackID);
-  // Get the next TrackID to be allocated by DecodedStream,
-  // or the last set TrackID if there is no DecodedStream sink.
-  TrackID GetNextOutputStreamTrackID();
-
   // Update the principal for any output streams and their tracks.
   void SetOutputStreamPrincipal(nsIPrincipal* aPrincipal);
 
   // Return the duration of the video in seconds.
   virtual double GetDuration();
 
   // Return true if the stream is infinite.
   bool IsInfinite() const;
--- a/dom/media/MediaDecoderStateMachine.cpp
+++ b/dom/media/MediaDecoderStateMachine.cpp
@@ -3334,17 +3334,16 @@ void MediaDecoderStateMachine::FinishDec
 
   EnqueueFirstFrameLoadedEvent();
 }
 
 RefPtr<ShutdownPromise> MediaDecoderStateMachine::BeginShutdown() {
   MOZ_ASSERT(NS_IsMainThread());
   if (mOutputStreamManager) {
     mOutputStreamManager->Disconnect();
-    mNextOutputStreamTrackID = mOutputStreamManager->NextTrackID();
   }
   return InvokeAsync(OwnerThread(), this, __func__,
                      &MediaDecoderStateMachine::Shutdown);
 }
 
 RefPtr<ShutdownPromise> MediaDecoderStateMachine::FinishShutdown() {
   MOZ_ASSERT(OnTaskQueue());
   LOG("Shutting down state machine task queue");
@@ -3772,67 +3771,53 @@ void MediaDecoderStateMachine::RemoveOut
         });
     nsresult rv = OwnerThread()->Dispatch(r.forget());
     MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
     Unused << rv;
   }
 }
 
 void MediaDecoderStateMachine::EnsureOutputStreamManager(
-    MediaStreamGraph* aGraph) {
+    MediaStreamGraphImpl* aGraph) {
   MOZ_ASSERT(NS_IsMainThread());
   if (mOutputStreamManager) {
     return;
   }
-  mOutputStreamManager = new OutputStreamManager(
-      aGraph->CreateSourceStream(), mNextOutputStreamTrackID,
-      mOutputStreamPrincipal, mAbstractMainThread);
+  mOutputStreamManager = new OutputStreamManager(aGraph, mOutputStreamPrincipal,
+                                                 mAbstractMainThread);
 }
 
 void MediaDecoderStateMachine::EnsureOutputStreamManagerHasTracks(
     const MediaInfo& aLoadedInfo) {
   MOZ_ASSERT(NS_IsMainThread());
   if (!mOutputStreamManager) {
     return;
   }
   if ((!aLoadedInfo.HasAudio() ||
        mOutputStreamManager->HasTrackType(MediaSegment::AUDIO)) &&
       (!aLoadedInfo.HasVideo() ||
        mOutputStreamManager->HasTrackType(MediaSegment::VIDEO))) {
     return;
   }
   if (aLoadedInfo.HasAudio()) {
     MOZ_ASSERT(!mOutputStreamManager->HasTrackType(MediaSegment::AUDIO));
-    mOutputStreamManager->AddTrack(MediaSegment::AUDIO);
-    LOG("Pre-created audio track with id %d",
-        mOutputStreamManager->GetLiveTrackIDFor(MediaSegment::AUDIO));
+    RefPtr<SourceMediaStream> dummy =
+        mOutputStreamManager->AddTrack(MediaSegment::AUDIO);
+    LOG("Pre-created audio track with underlying stream %p", dummy.get());
+    Unused << dummy;
   }
   if (aLoadedInfo.HasVideo()) {
     MOZ_ASSERT(!mOutputStreamManager->HasTrackType(MediaSegment::VIDEO));
-    mOutputStreamManager->AddTrack(MediaSegment::VIDEO);
-    LOG("Pre-created video track with id %d",
-        mOutputStreamManager->GetLiveTrackIDFor(MediaSegment::VIDEO));
+    RefPtr<SourceMediaStream> dummy =
+        mOutputStreamManager->AddTrack(MediaSegment::VIDEO);
+    LOG("Pre-created video track with underlying stream %p", dummy.get());
+    Unused << dummy;
   }
 }
 
-void MediaDecoderStateMachine::SetNextOutputStreamTrackID(
-    TrackID aNextTrackID) {
-  MOZ_ASSERT(NS_IsMainThread());
-  LOG("SetNextOutputStreamTrackID aNextTrackID=%d", aNextTrackID);
-  mNextOutputStreamTrackID = aNextTrackID;
-}
-
-TrackID MediaDecoderStateMachine::GetNextOutputStreamTrackID() {
-  MOZ_ASSERT(NS_IsMainThread());
-  if (mOutputStreamManager) {
-    return mOutputStreamManager->NextTrackID();
-  }
-  return mNextOutputStreamTrackID;
-}
-
 class VideoQueueMemoryFunctor : public nsDequeFunctor {
  public:
   VideoQueueMemoryFunctor() : mSize(0) {}
 
   MOZ_DEFINE_MALLOC_SIZE_OF(MallocSizeOf);
 
   virtual void operator()(void* aObject) override {
     const VideoData* v = static_cast<const VideoData*>(aObject);
--- a/dom/media/MediaDecoderStateMachine.h
+++ b/dom/media/MediaDecoderStateMachine.h
@@ -183,32 +183,26 @@ class MediaDecoderStateMachine
   // Returns the state machine task queue.
   TaskQueue* OwnerThread() const { return mTaskQueue; }
 
   RefPtr<GenericPromise> RequestDebugInfo(
       dom::MediaDecoderStateMachineDebugInfo& aInfo);
 
   void SetOutputStreamPrincipal(nsIPrincipal* aPrincipal);
   // If an OutputStreamManager does not exist, one will be created.
-  void EnsureOutputStreamManager(MediaStreamGraph* aGraph);
+  void EnsureOutputStreamManager(MediaStreamGraphImpl* aGraph);
   // If an OutputStreamManager exists, tracks matching aLoadedInfo will be
   // created unless they already exist in the manager.
   void EnsureOutputStreamManagerHasTracks(const MediaInfo& aLoadedInfo);
   // Add an output stream to the output stream manager. The manager must have
   // been created through EnsureOutputStreamManager() before this.
   void AddOutputStream(DOMMediaStream* aStream);
   // Remove an output stream added with AddOutputStream. If the last output
   // stream was removed, we will also tear down the OutputStreamManager.
   void RemoveOutputStream(DOMMediaStream* aStream);
-  // Set the TrackID to be used as the initial id by the next DecodedStream
-  // sink.
-  void SetNextOutputStreamTrackID(TrackID aNextTrackID);
-  // Get the next TrackID to be allocated by DecodedStream,
-  // or the last set TrackID if there is no DecodedStream sink.
-  TrackID GetNextOutputStreamTrackID();
 
   // Seeks to the decoder to aTarget asynchronously.
   RefPtr<MediaDecoder::SeekPromise> InvokeSeek(const SeekTarget& aTarget);
 
   void DispatchSetPlaybackRate(double aPlaybackRate) {
     OwnerThread()->DispatchStateChange(NewRunnableMethod<double>(
         "MediaDecoderStateMachine::SetPlaybackRate", this,
         &MediaDecoderStateMachine::SetPlaybackRate, aPlaybackRate));
@@ -680,20 +674,16 @@ class MediaDecoderStateMachine
 
   // Data about MediaStreams that are being fed by the decoder.
   // Main thread only.
   RefPtr<OutputStreamManager> mOutputStreamManager;
 
   // Principal used by output streams. Main thread only.
   nsCOMPtr<nsIPrincipal> mOutputStreamPrincipal;
 
-  // The next TrackID to be used when a DecodedStream allocates a track.
-  // Main thread only.
-  TrackID mNextOutputStreamTrackID = 1;
-
   // Track the current video decode mode.
   VideoDecodeMode mVideoDecodeMode;
 
   // Track the complete & error for audio/video separately
   MozPromiseRequestHolder<MediaSink::EndedPromise> mMediaSinkAudioEndedPromise;
   MozPromiseRequestHolder<MediaSink::EndedPromise> mMediaSinkVideoEndedPromise;
 
   MediaEventListener mAudioQueueListener;
--- a/dom/media/MediaManager.cpp
+++ b/dom/media/MediaManager.cpp
@@ -1,20 +1,21 @@
 /* -*- Mode: c++; c-basic-offset: 2; indent-tabs-mode: nil; tab-width: 40 -*- */
 /* vim: set ts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "MediaManager.h"
 
+#include "AudioCaptureStream.h"
 #include "AudioDeviceInfo.h"
+#include "AudioStreamTrack.h"
 #include "MediaStreamGraphImpl.h"
 #include "MediaTimer.h"
-#include "mozilla/dom/MediaStreamTrack.h"
 #include "mozilla/dom/MediaDeviceInfo.h"
 #include "MediaStreamListener.h"
 #include "nsArray.h"
 #include "nsContentUtils.h"
 #include "nsGlobalWindow.h"
 #include "nsHashPropertyBag.h"
 #include "nsIEventTarget.h"
 #include "nsIUUIDGenerator.h"
@@ -50,16 +51,17 @@
 #include "mozilla/dom/GetUserMediaRequestBinding.h"
 #include "mozilla/dom/Promise.h"
 #include "mozilla/dom/MediaDevices.h"
 #include "mozilla/Base64.h"
 #include "mozilla/ipc/BackgroundChild.h"
 #include "mozilla/media/MediaChild.h"
 #include "mozilla/media/MediaTaskUtils.h"
 #include "MediaTrackConstraints.h"
+#include "VideoStreamTrack.h"
 #include "VideoUtils.h"
 #include "ThreadSafeRefcountingWithMainThreadDestruction.h"
 #include "nsProxyRelease.h"
 #include "nsVariant.h"
 
 // For snprintf
 #include "mozilla/Sprintf.h"
 
@@ -172,17 +174,18 @@ namespace mozilla {
 
 #ifdef LOG
 #  undef LOG
 #endif
 
 LazyLogModule gMediaManagerLog("MediaManager");
 #define LOG(...) MOZ_LOG(gMediaManagerLog, LogLevel::Debug, (__VA_ARGS__))
 
-using dom::BasicTrackSource;
+class LocalTrackSource;
+
 using dom::CallerType;
 using dom::ConstrainDOMStringParameters;
 using dom::ConstrainDoubleRange;
 using dom::ConstrainLongRange;
 using dom::DisplayMediaStreamConstraints;
 using dom::Document;
 using dom::FeaturePolicyUtils;
 using dom::File;
@@ -205,19 +208,23 @@ using dom::Promise;
 using dom::Sequence;
 using media::NewRunnableFrom;
 using media::NewTaskFrom;
 using media::Refcountable;
 
 static Atomic<bool> sHasShutdown;
 
 struct DeviceState {
-  DeviceState(RefPtr<MediaDevice> aDevice, bool aOffWhileDisabled)
-      : mOffWhileDisabled(aOffWhileDisabled), mDevice(std::move(aDevice)) {
+  DeviceState(RefPtr<MediaDevice> aDevice,
+              RefPtr<LocalTrackSource> aTrackSource, bool aOffWhileDisabled)
+      : mOffWhileDisabled(aOffWhileDisabled),
+        mDevice(std::move(aDevice)),
+        mTrackSource(std::move(aTrackSource)) {
     MOZ_ASSERT(mDevice);
+    MOZ_ASSERT(mTrackSource);
   }
 
   // true if we have stopped mDevice, this is a terminal state.
   // MainThread only.
   bool mStopped = false;
 
   // true if mDevice is currently enabled, i.e., turned on and capturing.
   // MainThread only.
@@ -245,16 +252,21 @@ struct DeviceState {
   // disabled. When the timer fires we initiate Stop()ing mDevice.
   // If set we allow dynamically stopping and starting mDevice.
   // Any thread.
   const RefPtr<MediaTimer> mDisableTimer = new MediaTimer();
 
   // The underlying device we keep state for. Always non-null.
   // Threadsafe access, but see method declarations for individual constraints.
   const RefPtr<MediaDevice> mDevice;
+
+  // The MediaStreamTrackSource for any tracks (original and clones) originating
+  // from this device. Always non-null. Threadsafe access, but see method
+  // declarations for individual constraints.
+  const RefPtr<LocalTrackSource> mTrackSource;
 };
 
 /**
  * This mimics the capture state from nsIMediaManagerService.
  */
 enum class CaptureState : uint16_t {
   Off = nsIMediaManagerService::STATE_NOCAPTURE,
   Enabled = nsIMediaManagerService::STATE_CAPTURE_ENABLED,
@@ -310,19 +322,20 @@ class SourceListener : public SupportsWe
   /**
    * Registers this source listener as belonging to the given window listener.
    */
   void Register(GetUserMediaWindowListener* aListener);
 
   /**
    * Marks this listener as active and adds itself as a listener to aStream.
    */
-  void Activate(RefPtr<SourceMediaStream> aStream,
-                RefPtr<MediaDevice> aAudioDevice,
-                RefPtr<MediaDevice> aVideoDevice);
+  void Activate(RefPtr<MediaDevice> aAudioDevice,
+                RefPtr<LocalTrackSource> aAudioTrackSource,
+                RefPtr<MediaDevice> aVideoDevice,
+                RefPtr<LocalTrackSource> aVideoTrackSource);
 
   /**
    * Posts a task to initialize and start all associated devices.
    */
   RefPtr<SourceListenerPromise> InitializeAsync();
 
   /**
    * Stops all live tracks, finishes the associated MediaStream and cleans up
@@ -372,29 +385,25 @@ class SourceListener : public SupportsWe
   void SetEnabledFor(TrackID aTrackID, bool aEnabled);
 
   /**
    * Stops all screen/app/window/audioCapture sharing, but not camera or
    * microphone.
    */
   void StopSharing();
 
-  MediaStream* Stream() const { return mStream; }
-
-  SourceMediaStream* GetSourceStream();
-
   MediaDevice* GetAudioDevice() const {
     return mAudioDeviceState ? mAudioDeviceState->mDevice.get() : nullptr;
   }
 
   MediaDevice* GetVideoDevice() const {
     return mVideoDeviceState ? mVideoDeviceState->mDevice.get() : nullptr;
   }
 
-  bool Activated() const { return mStream; }
+  bool Activated() const { return mAudioDeviceState || mVideoDeviceState; }
 
   bool Stopped() const { return mStopped; }
 
   bool CapturingVideo() const;
 
   bool CapturingAudio() const;
 
   CaptureState CapturingSource(MediaSourceEnum aSource) const;
@@ -431,17 +440,16 @@ class SourceListener : public SupportsWe
 
   // Weak pointer to the window listener that owns us. MainThread only.
   GetUserMediaWindowListener* mWindowListener;
 
   // Accessed from MediaStreamGraph thread, MediaManager thread, and MainThread
   // No locking needed as they're set on Activate() and never assigned to again.
   UniquePtr<DeviceState> mAudioDeviceState;
   UniquePtr<DeviceState> mVideoDeviceState;
-  RefPtr<SourceMediaStream> mStream;  // threadsafe refcnt
 };
 
 /**
  * This class represents a WindowID and handles all MediaStreamTrackListeners
  * (here subclassed as SourceListeners) used to feed GetUserMedia source
  * streams. It proxies feedback from them into messages for browser chrome.
  * The SourceListeners are used to Start() and Stop() the underlying
  * MediaEngineSource when MediaStreams are assigned and deassigned in content.
@@ -474,29 +482,30 @@ class GetUserMediaWindowListener {
     mInactiveListeners.AppendElement(std::move(aListener));
   }
 
   /**
    * Activates an already registered and inactive gUM source listener for this
    * WindowListener.
    */
   void Activate(RefPtr<SourceListener> aListener,
-                RefPtr<SourceMediaStream> aStream,
                 RefPtr<MediaDevice> aAudioDevice,
-                RefPtr<MediaDevice> aVideoDevice) {
+                RefPtr<LocalTrackSource> aAudioTrackSource,
+                RefPtr<MediaDevice> aVideoDevice,
+                RefPtr<LocalTrackSource> aVideoTrackSource) {
     MOZ_ASSERT(NS_IsMainThread());
     MOZ_ASSERT(aListener);
     MOZ_ASSERT(!aListener->Activated());
     MOZ_ASSERT(mInactiveListeners.Contains(aListener),
                "Must be registered to activate");
     MOZ_ASSERT(!mActiveListeners.Contains(aListener), "Already activated");
 
     mInactiveListeners.RemoveElement(aListener);
-    aListener->Activate(std::move(aStream), std::move(aAudioDevice),
-                        std::move(aVideoDevice));
+    aListener->Activate(std::move(aAudioDevice), std::move(aAudioTrackSource),
+                        std::move(aVideoDevice), std::move(aVideoTrackSource));
     mActiveListeners.AppendElement(std::move(aListener));
   }
 
   /**
    * Removes all SourceListeners from this window listener.
    * Removes this window listener from the list of active windows, so callers
    * need to make sure to hold a strong reference.
    */
@@ -701,16 +710,139 @@ class GetUserMediaWindowListener {
   // true if we have scheduled a task to notify chrome in the next stable state.
   // The task will reset this to false. MainThread only.
   bool mChromeNotificationTaskPosted;
 
   nsTArray<RefPtr<SourceListener>> mInactiveListeners;
   nsTArray<RefPtr<SourceListener>> mActiveListeners;
 };
 
+class LocalTrackSource : public MediaStreamTrackSource {
+ public:
+  LocalTrackSource(nsIPrincipal* aPrincipal, const nsString& aLabel,
+                   const RefPtr<SourceListener>& aListener,
+                   MediaSourceEnum aSource, MediaStream* aStream,
+                   TrackID aTrackID, RefPtr<PeerIdentity> aPeerIdentity)
+      : MediaStreamTrackSource(aPrincipal, aLabel),
+        mSource(aSource),
+        mStream(aStream),
+        mTrackID(aTrackID),
+        mPeerIdentity(std::move(aPeerIdentity)),
+        mListener(aListener.get()) {}
+
+  MediaSourceEnum GetMediaSource() const override { return mSource; }
+
+  const PeerIdentity* GetPeerIdentity() const override { return mPeerIdentity; }
+
+  RefPtr<MediaStreamTrackSource::ApplyConstraintsPromise> ApplyConstraints(
+      const MediaTrackConstraints& aConstraints,
+      CallerType aCallerType) override {
+    MOZ_ASSERT(NS_IsMainThread());
+    if (sHasShutdown || !mListener) {
+      // Track has been stopped, or we are in shutdown. In either case
+      // there's no observable outcome, so pretend we succeeded.
+      return MediaStreamTrackSource::ApplyConstraintsPromise::CreateAndResolve(
+          false, __func__);
+    }
+    return mListener->ApplyConstraintsToTrack(mTrackID, aConstraints,
+                                              aCallerType);
+  }
+
+  void GetSettings(MediaTrackSettings& aOutSettings) override {
+    if (mListener) {
+      mListener->GetSettingsFor(mTrackID, aOutSettings);
+    }
+  }
+
+  void Stop() override {
+    if (mListener) {
+      mListener->StopTrack(mTrackID);
+      mListener = nullptr;
+    }
+    if (!mStream->IsDestroyed()) {
+      mStream->Destroy();
+    }
+  }
+
+  void Disable() override {
+    if (mListener) {
+      mListener->SetEnabledFor(mTrackID, false);
+    }
+  }
+
+  void Enable() override {
+    if (mListener) {
+      mListener->SetEnabledFor(mTrackID, true);
+    }
+  }
+
+  const MediaSourceEnum mSource;
+  const RefPtr<MediaStream> mStream;
+  const TrackID mTrackID;
+  const RefPtr<const PeerIdentity> mPeerIdentity;
+
+ protected:
+  ~LocalTrackSource() {
+    MOZ_ASSERT(NS_IsMainThread());
+    MOZ_ASSERT(mStream->IsDestroyed());
+  }
+
+  // This is a weak pointer to avoid having the SourceListener (which may
+  // have references to threads and threadpools) kept alive by DOM-objects
+  // that may have ref-cycles and thus are released very late during
+  // shutdown, even after xpcom-shutdown-threads. See bug 1351655 for what
+  // can happen.
+  WeakPtr<SourceListener> mListener;
+};
+
+class AudioCaptureTrackSource : public LocalTrackSource {
+ public:
+  AudioCaptureTrackSource(nsIPrincipal* aPrincipal, nsPIDOMWindowInner* aWindow,
+                          const nsString& aLabel,
+                          AudioCaptureStream* aAudioCaptureStream,
+                          RefPtr<PeerIdentity> aPeerIdentity)
+      : LocalTrackSource(aPrincipal, aLabel, nullptr,
+                         MediaSourceEnum::AudioCapture, aAudioCaptureStream,
+                         kAudioTrack, std::move(aPeerIdentity)),
+        mWindow(aWindow),
+        mAudioCaptureStream(aAudioCaptureStream) {
+    mAudioCaptureStream->Start();
+    mAudioCaptureStream->Graph()->RegisterCaptureStreamForWindow(
+        mWindow->WindowID(), mAudioCaptureStream);
+    mWindow->SetAudioCapture(true);
+  }
+
+  void Stop() override {
+    MOZ_ASSERT(NS_IsMainThread());
+    if (!mAudioCaptureStream->IsDestroyed()) {
+      MOZ_ASSERT(mWindow);
+      mWindow->SetAudioCapture(false);
+      mAudioCaptureStream->Graph()->UnregisterCaptureStreamForWindow(
+          mWindow->WindowID());
+      mWindow = nullptr;
+    }
+    // LocalTrackSource destroys the stream.
+    LocalTrackSource::Stop();
+    MOZ_ASSERT(mAudioCaptureStream->IsDestroyed());
+  }
+
+  ProcessedMediaStream* InputStream() const {
+    return mAudioCaptureStream.get();
+  }
+
+ protected:
+  ~AudioCaptureTrackSource() {
+    MOZ_ASSERT(NS_IsMainThread());
+    MOZ_ASSERT(mAudioCaptureStream->IsDestroyed());
+  }
+
+  RefPtr<nsPIDOMWindowInner> mWindow;
+  const RefPtr<AudioCaptureStream> mAudioCaptureStream;
+};
+
 /**
  * nsIMediaDevice implementation.
  */
 NS_IMPL_ISUPPORTS(MediaDevice, nsIMediaDevice)
 
 MediaDevice::MediaDevice(const RefPtr<MediaEngineSource>& aSource,
                          const nsString& aName, const nsString& aID,
                          const nsString& aGroupID, const nsString& aRawID)
@@ -1112,181 +1244,103 @@ class GetUserMediaStreamRunnable : publi
     }
 
     MediaStreamGraph::GraphDriverType graphDriverType =
         mAudioDevice ? MediaStreamGraph::AUDIO_THREAD_DRIVER
                      : MediaStreamGraph::SYSTEM_THREAD_DRIVER;
     MediaStreamGraph* msg = MediaStreamGraph::GetInstance(
         graphDriverType, window, MediaStreamGraph::REQUEST_DEFAULT_SAMPLE_RATE);
 
-    RefPtr<DOMMediaStream> domStream;
-    RefPtr<SourceMediaStream> stream;
+    auto domStream = MakeRefPtr<DOMMediaStream>(window);
+    RefPtr<LocalTrackSource> audioTrackSource;
+    RefPtr<LocalTrackSource> videoTrackSource;
+    nsCOMPtr<nsIPrincipal> principal;
+    if (mPeerIdentity) {
+      principal = NullPrincipal::CreateWithInheritedAttributes(
+          window->GetExtantDoc()->NodePrincipal());
+    } else {
+      principal = window->GetExtantDoc()->NodePrincipal();
+    }
     RefPtr<GenericNonExclusivePromise> firstFramePromise;
-    // AudioCapture is a special case, here, in the sense that we're not really
-    // using the audio source and the SourceMediaStream, which acts as
-    // placeholders. We re-route a number of stream internaly in the MSG and mix
-    // them down instead.
     if (mAudioDevice &&
         mAudioDevice->GetMediaSource() == MediaSourceEnum::AudioCapture) {
+      // AudioCapture is a special case, here, in the sense that we're not
+      // really using the audio source and the SourceMediaStream, which acts as
+      // placeholders. We re-route a number of streams internally in the MSG and
+      // mix them down instead.
       NS_WARNING(
           "MediaCaptureWindowState doesn't handle "
           "MediaSourceEnum::AudioCapture. This must be fixed with UX "
           "before shipping.");
-      // It should be possible to pipe the capture stream to anything. CORS is
-      // not a problem here, we got explicit user content.
-      nsCOMPtr<nsIPrincipal> principal =
-          window->GetExtantDoc()->NodePrincipal();
-      domStream = DOMMediaStream::CreateAudioCaptureStreamAsInput(
-          window, principal, msg);
-
-      stream = msg->CreateSourceStream();  // Placeholder
-      msg->RegisterCaptureStreamForWindow(
-          mWindowID, domStream->GetInputStream()->AsProcessedStream());
-      window->SetAudioCapture(true);
+      auto audioCaptureSource = MakeRefPtr<AudioCaptureTrackSource>(
+          principal, window, NS_LITERAL_STRING("Window audio capture"),
+          msg->CreateAudioCaptureStream(kAudioTrack), mPeerIdentity);
+      audioTrackSource = audioCaptureSource;
+      RefPtr<MediaStreamTrack> track =
+          new dom::AudioStreamTrack(window, audioCaptureSource->InputStream(),
+                                    kAudioTrack, audioCaptureSource);
+      domStream->AddTrackInternal(track);
     } else {
-      class LocalTrackSource : public MediaStreamTrackSource {
-       public:
-        LocalTrackSource(nsIPrincipal* aPrincipal, const nsString& aLabel,
-                         const RefPtr<SourceListener>& aListener,
-                         MediaSourceEnum aSource, TrackID aTrackID,
-                         RefPtr<PeerIdentity> aPeerIdentity)
-            : MediaStreamTrackSource(aPrincipal, aLabel),
-              mListener(aListener.get()),
-              mSource(aSource),
-              mTrackID(aTrackID),
-              mPeerIdentity(std::move(aPeerIdentity)) {}
-
-        MediaSourceEnum GetMediaSource() const override { return mSource; }
-
-        const PeerIdentity* GetPeerIdentity() const override {
-          return mPeerIdentity;
-        }
-
-        RefPtr<MediaStreamTrackSource::ApplyConstraintsPromise>
-        ApplyConstraints(const MediaTrackConstraints& aConstraints,
-                         CallerType aCallerType) override {
-          MOZ_ASSERT(NS_IsMainThread());
-          if (sHasShutdown || !mListener) {
-            // Track has been stopped, or we are in shutdown. In either case
-            // there's no observable outcome, so pretend we succeeded.
-            return MediaStreamTrackSource::ApplyConstraintsPromise::
-                CreateAndResolve(false, __func__);
-          }
-          return mListener->ApplyConstraintsToTrack(mTrackID, aConstraints,
-                                                    aCallerType);
-        }
-
-        void GetSettings(MediaTrackSettings& aOutSettings) override {
-          if (mListener) {
-            mListener->GetSettingsFor(mTrackID, aOutSettings);
-          }
-        }
-
-        void Stop() override {
-          if (mListener) {
-            mListener->StopTrack(mTrackID);
-            mListener = nullptr;
-          }
-        }
-
-        void Disable() override {
-          if (mListener) {
-            mListener->SetEnabledFor(mTrackID, false);
-          }
-        }
-
-        void Enable() override {
-          if (mListener) {
-            mListener->SetEnabledFor(mTrackID, true);
-          }
-        }
-
-       protected:
-        ~LocalTrackSource() {}
-
-        // This is a weak pointer to avoid having the SourceListener (which may
-        // have references to threads and threadpools) kept alive by DOM-objects
-        // that may have ref-cycles and thus are released very late during
-        // shutdown, even after xpcom-shutdown-threads. See bug 1351655 for what
-        // can happen.
-        WeakPtr<SourceListener> mListener;
-        const MediaSourceEnum mSource;
-        const TrackID mTrackID;
-        const RefPtr<const PeerIdentity> mPeerIdentity;
-      };
-
-      nsCOMPtr<nsIPrincipal> principal;
-      if (mPeerIdentity) {
-        principal = NullPrincipal::CreateWithInheritedAttributes(
-            window->GetExtantDoc()->NodePrincipal());
-      } else {
-        principal = window->GetExtantDoc()->NodePrincipal();
-      }
-
-      // Normal case, connect the source stream to the track union stream to
-      // avoid us blocking. Pass a simple TrackSourceGetter for potential
-      // fake tracks. Apart from them gUM never adds tracks dynamically.
-      domStream = DOMMediaStream::CreateSourceStreamAsInput(window, msg);
-      stream = domStream->GetInputStream()->AsSourceStream();
-
       if (mAudioDevice) {
         nsString audioDeviceName;
         mAudioDevice->GetName(audioDeviceName);
-        const MediaSourceEnum source = mAudioDevice->GetMediaSource();
-        RefPtr<MediaStreamTrackSource> audioSource =
-            new LocalTrackSource(principal, audioDeviceName, mSourceListener,
-                                 source, kAudioTrack, mPeerIdentity);
+        RefPtr<MediaStream> stream = msg->CreateSourceStream();
+        audioTrackSource = new LocalTrackSource(
+            principal, audioDeviceName, mSourceListener,
+            mAudioDevice->GetMediaSource(), stream, kAudioTrack, mPeerIdentity);
         MOZ_ASSERT(IsOn(mConstraints.mAudio));
-        RefPtr<MediaStreamTrack> track = domStream->CreateDOMTrack(
-            kAudioTrack, MediaSegment::AUDIO, audioSource,
+        RefPtr<MediaStreamTrack> track = new dom::AudioStreamTrack(
+            window, stream, kAudioTrack, audioTrackSource,
             GetInvariant(mConstraints.mAudio));
         domStream->AddTrackInternal(track);
       }
       if (mVideoDevice) {
         nsString videoDeviceName;
         mVideoDevice->GetName(videoDeviceName);
-        const MediaSourceEnum source = mVideoDevice->GetMediaSource();
-        RefPtr<MediaStreamTrackSource> videoSource =
-            new LocalTrackSource(principal, videoDeviceName, mSourceListener,
-                                 source, kVideoTrack, mPeerIdentity);
+        RefPtr<MediaStream> stream = msg->CreateSourceStream();
+        videoTrackSource = new LocalTrackSource(
+            principal, videoDeviceName, mSourceListener,
+            mVideoDevice->GetMediaSource(), stream, kVideoTrack, mPeerIdentity);
         MOZ_ASSERT(IsOn(mConstraints.mVideo));
-        RefPtr<MediaStreamTrack> track = domStream->CreateDOMTrack(
-            kVideoTrack, MediaSegment::VIDEO, videoSource,
+        RefPtr<MediaStreamTrack> track = new dom::VideoStreamTrack(
+            window, stream, kVideoTrack, videoTrackSource,
             GetInvariant(mConstraints.mVideo));
         domStream->AddTrackInternal(track);
-        switch (source) {
+        switch (mVideoDevice->GetMediaSource()) {
           case MediaSourceEnum::Browser:
           case MediaSourceEnum::Screen:
           case MediaSourceEnum::Window:
             // Wait for first frame for screen-sharing devices, to ensure
             // with and height settings are available immediately, to pass wpt.
             firstFramePromise = mVideoDevice->mSource->GetFirstFramePromise();
             break;
           default:
             break;
         }
       }
     }
 
-    if (!domStream || !stream || sHasShutdown) {
+    if (!domStream || (!audioTrackSource && !videoTrackSource) ||
+        sHasShutdown) {
       LOG("Returning error for getUserMedia() - no stream");
 
       mHolder.Reject(MakeRefPtr<MediaMgrError>(
                          MediaMgrError::Name::AbortError,
                          sHasShutdown ? NS_LITERAL_STRING("In shutdown")
                                       : NS_LITERAL_STRING("No stream.")),
                      __func__);
       return NS_OK;
     }
 
     // Activate our source listener. We'll call Start() on the source when we
     // get a callback that the MediaStream has started consuming. The listener
     // is freed when the page is invalidated (on navigation or close).
-    mWindowListener->Activate(mSourceListener, stream, mAudioDevice,
-                              mVideoDevice);
+    mWindowListener->Activate(mSourceListener, mAudioDevice,
+                              std::move(audioTrackSource), mVideoDevice,
+                              std::move(videoTrackSource));
 
     nsTArray<RefPtr<MediaStreamTrack>> tracks(2);
     domStream->GetTracks(tracks);
     RefPtr<MediaStreamTrack> track = tracks[0];
     auto tracksCreatedListener = MakeRefPtr<TracksCreatedListener>(
         mManager, std::move(mHolder), mWindowListener, mWindowID, domStream,
         track, std::move(firstFramePromise));
 
@@ -4079,73 +4133,79 @@ void SourceListener::Register(GetUserMed
   MOZ_ASSERT(aListener, "No listener");
   MOZ_ASSERT(!mWindowListener, "Already registered");
   MOZ_ASSERT(!Activated(), "Already activated");
 
   mPrincipalHandle = aListener->GetPrincipalHandle();
   mWindowListener = aListener;
 }
 
-void SourceListener::Activate(RefPtr<SourceMediaStream> aStream,
-                              RefPtr<MediaDevice> aAudioDevice,
-                              RefPtr<MediaDevice> aVideoDevice) {
+void SourceListener::Activate(RefPtr<MediaDevice> aAudioDevice,
+                              RefPtr<LocalTrackSource> aAudioTrackSource,
+                              RefPtr<MediaDevice> aVideoDevice,
+                              RefPtr<LocalTrackSource> aVideoTrackSource) {
   MOZ_ASSERT(NS_IsMainThread(), "Only call on main thread");
 
   LOG("SourceListener %p activating audio=%p video=%p", this,
       aAudioDevice.get(), aVideoDevice.get());
 
   MOZ_ASSERT(!mStopped, "Cannot activate stopped source listener");
   MOZ_ASSERT(!Activated(), "Already activated");
 
   mMainThreadCheck = GetCurrentVirtualThread();
-  mStream = std::move(aStream);
   if (aAudioDevice) {
     bool offWhileDisabled =
         aAudioDevice->GetMediaSource() == MediaSourceEnum::Microphone &&
         Preferences::GetBool(
             "media.getusermedia.microphone.off_while_disabled.enabled", true);
     mAudioDeviceState =
-        MakeUnique<DeviceState>(std::move(aAudioDevice), offWhileDisabled);
+        MakeUnique<DeviceState>(std::move(aAudioDevice),
+                                std::move(aAudioTrackSource), offWhileDisabled);
   }
 
   if (aVideoDevice) {
     bool offWhileDisabled =
         aVideoDevice->GetMediaSource() == MediaSourceEnum::Camera &&
         Preferences::GetBool(
             "media.getusermedia.camera.off_while_disabled.enabled", true);
     mVideoDeviceState =
-        MakeUnique<DeviceState>(std::move(aVideoDevice), offWhileDisabled);
+        MakeUnique<DeviceState>(std::move(aVideoDevice),
+                                std::move(aVideoTrackSource), offWhileDisabled);
   }
 }
 
 RefPtr<SourceListener::SourceListenerPromise>
 SourceListener::InitializeAsync() {
   MOZ_ASSERT(NS_IsMainThread(), "Only call on main thread");
   MOZ_DIAGNOSTIC_ASSERT(!mStopped);
 
   return MediaManager::PostTask<SourceListenerPromise>(
              __func__,
-             [stream = mStream, principal = GetPrincipalHandle(),
+             [principal = GetPrincipalHandle(),
               audioDevice =
                   mAudioDeviceState ? mAudioDeviceState->mDevice : nullptr,
+              audioStream = mAudioDeviceState
+                                ? mAudioDeviceState->mTrackSource->mStream
+                                : nullptr,
               videoDevice =
-                  mVideoDeviceState ? mVideoDeviceState->mDevice : nullptr](
+                  mVideoDeviceState ? mVideoDeviceState->mDevice : nullptr,
+              videoStream = mVideoDeviceState
+                                ? mVideoDeviceState->mTrackSource->mStream
+                                : nullptr](
                  MozPromiseHolder<SourceListenerPromise>& aHolder) {
                if (audioDevice) {
-                 audioDevice->SetTrack(stream, kAudioTrack, principal);
+                 audioDevice->SetTrack(audioStream->AsSourceStream(),
+                                       kAudioTrack, principal);
                }
 
                if (videoDevice) {
-                 videoDevice->SetTrack(stream, kVideoTrack, principal);
+                 videoDevice->SetTrack(videoStream->AsSourceStream(),
+                                       kVideoTrack, principal);
                }
 
-               // SetTrack() queued the tracks. We add them synchronously here
-               // to avoid races.
-               stream->FinishAddTracks();
-
                if (audioDevice) {
                  nsresult rv = audioDevice->Start();
                  if (rv == NS_ERROR_NOT_AVAILABLE) {
                    PR_Sleep(200);
                    rv = audioDevice->Start();
                  }
                  if (NS_FAILED(rv)) {
                    nsString log;
@@ -4488,31 +4548,21 @@ void SourceListener::StopSharing() {
                                 MediaSourceEnum::Window)) {
     // We want to stop the whole stream if there's no audio;
     // just the video track if we have both.
     // StopTrack figures this out for us.
     StopTrack(kVideoTrack);
   }
   if (mAudioDeviceState && mAudioDeviceState->mDevice->GetMediaSource() ==
                                MediaSourceEnum::AudioCapture) {
-    uint64_t windowID = mWindowListener->WindowID();
-    auto* window = nsGlobalWindowInner::GetInnerWindowWithId(windowID);
-    MOZ_RELEASE_ASSERT(window);
-    window->SetAudioCapture(false);
-    MediaStreamGraph* graph = mStream->Graph();
-    graph->UnregisterCaptureStreamForWindow(windowID);
-    mStream->Destroy();
+    static_cast<AudioCaptureTrackSource*>(mAudioDeviceState->mTrackSource.get())
+        ->Stop();
   }
 }
 
-SourceMediaStream* SourceListener::GetSourceStream() {
-  NS_ASSERTION(mStream, "Getting stream from never-activated SourceListener");
-  return mStream;
-}
-
 bool SourceListener::CapturingVideo() const {
   MOZ_ASSERT(NS_IsMainThread());
   return Activated() && mVideoDeviceState && !mVideoDeviceState->mStopped &&
          (!mVideoDeviceState->mDevice->mSource->IsFake() ||
           Preferences::GetBool("media.navigator.permission.fake"));
 }
 
 bool SourceListener::CapturingAudio() const {
--- a/dom/media/MediaManager.h
+++ b/dom/media/MediaManager.h
@@ -90,20 +90,16 @@ class MediaDevice : public nsIMediaDevic
   nsresult Start();
   nsresult Reconfigure(const dom::MediaTrackConstraints& aConstraints,
                        const MediaEnginePrefs& aPrefs,
                        const char** aOutBadConstraint);
   nsresult FocusOnSelectedSource();
   nsresult Stop();
   nsresult Deallocate();
 
-  void Pull(const RefPtr<SourceMediaStream>& aStream, TrackID aTrackID,
-            StreamTime aEndOfAppendedData, StreamTime aDesiredTime,
-            const PrincipalHandle& aPrincipal);
-
   void GetSettings(dom::MediaTrackSettings& aOutSettings) const;
 
   dom::MediaSourceEnum GetMediaSource() const;
 
  protected:
   virtual ~MediaDevice() = default;
 
   static uint32_t FitnessDistance(
--- a/dom/media/MediaRecorder.cpp
+++ b/dom/media/MediaRecorder.cpp
@@ -794,17 +794,17 @@ class MediaRecorder::Session : public Pr
       LOG(LogLevel::Warning, ("Session.MediaTracksReady MediaStreamTracks "
                               "principal check failed"));
       DoSessionEndTask(NS_ERROR_DOM_SECURITY_ERR);
       return;
     }
 
     LOG(LogLevel::Debug,
         ("Session.MediaTracksReady track type = (%d)", trackTypes));
-    InitEncoder(trackTypes, mMediaStream->GraphRate());
+    InitEncoder(trackTypes, tracks[0]->Graph()->GraphRate());
   }
 
   void ConnectMediaStreamTrack(MediaStreamTrack& aTrack) {
     for (auto& track : mMediaStreamTracks) {
       if (track->AsAudioStreamTrack() && aTrack.AsAudioStreamTrack()) {
         // We only allow one audio track. See bug 1276928.
         return;
       }
@@ -1304,22 +1304,22 @@ void MediaRecorder::Start(const Optional
 
   nsTArray<RefPtr<MediaStreamTrack>> tracks;
   if (mDOMStream) {
     mDOMStream->GetTracks(tracks);
   }
   if (!tracks.IsEmpty()) {
     // If there are tracks already available that we're not allowed
     // to record, we should throw a security error.
+    RefPtr<nsIPrincipal> streamPrincipal = mDOMStream->GetPrincipal();
     bool subsumes = false;
     nsPIDOMWindowInner* window;
     Document* doc;
     if (!(window = GetOwner()) || !(doc = window->GetExtantDoc()) ||
-        NS_FAILED(doc->NodePrincipal()->Subsumes(mDOMStream->GetPrincipal(),
-                                                 &subsumes)) ||
+        NS_FAILED(doc->NodePrincipal()->Subsumes(streamPrincipal, &subsumes)) ||
         !subsumes) {
       aResult.Throw(NS_ERROR_DOM_SECURITY_ERR);
       return;
     }
   }
 
   uint32_t timeSlice = aTimeSlice.WasPassed() ? aTimeSlice.Value() : 0;
   MediaRecorderReporter::AddMediaRecorder(this);
--- a/dom/media/MediaStreamGraph.cpp
+++ b/dom/media/MediaStreamGraph.cpp
@@ -1858,17 +1858,16 @@ MediaStream::MediaStream()
       mStartBlocking(GRAPH_TIME_MAX),
       mSuspendedCount(0),
       mFinished(false),
       mNotifiedFinished(false),
       mMainThreadCurrentTime(0),
       mMainThreadFinished(false),
       mFinishedNotificationSent(false),
       mMainThreadDestroyed(false),
-      mNrOfMainThreadUsers(0),
       mGraph(nullptr) {
   MOZ_COUNT_CTOR(MediaStream);
 }
 
 MediaStream::~MediaStream() {
   MOZ_COUNT_DTOR(MediaStream);
   NS_ASSERTION(mMainThreadDestroyed, "Should have been destroyed already");
   NS_ASSERTION(mMainThreadListeners.IsEmpty(),
@@ -2010,18 +2009,16 @@ void MediaStream::DestroyImpl() {
   for (int32_t i = mConsumers.Length() - 1; i >= 0; --i) {
     mConsumers[i]->Disconnect();
   }
   mTracks.Clear();
   mGraph = nullptr;
 }
 
 void MediaStream::Destroy() {
-  NS_ASSERTION(mNrOfMainThreadUsers == 0,
-               "Do not mix Destroy() and RegisterUser()/UnregisterUser()");
   // Keep this stream alive until we leave this method
   RefPtr<MediaStream> kungFuDeathGrip = this;
 
   class Message : public ControlMessage {
    public:
     explicit Message(MediaStream* aStream) : ControlMessage(aStream) {}
     void Run() override {
       mStream->RemoveAllListenersImpl();
@@ -2033,33 +2030,16 @@ void MediaStream::Destroy() {
   };
   GraphImpl()->AppendMessage(MakeUnique<Message>(this));
   // Message::RunDuringShutdown may have removed this stream from the graph,
   // but our kungFuDeathGrip above will have kept this stream alive if
   // necessary.
   mMainThreadDestroyed = true;
 }
 
-void MediaStream::RegisterUser() {
-  MOZ_ASSERT(NS_IsMainThread());
-  ++mNrOfMainThreadUsers;
-}
-
-void MediaStream::UnregisterUser() {
-  MOZ_ASSERT(NS_IsMainThread());
-
-  --mNrOfMainThreadUsers;
-  NS_ASSERTION(mNrOfMainThreadUsers >= 0, "Double-removal of main thread user");
-  NS_ASSERTION(!IsDestroyed(),
-               "Do not mix Destroy() and RegisterUser()/UnregisterUser()");
-  if (mNrOfMainThreadUsers == 0) {
-    Destroy();
-  }
-}
-
 void MediaStream::AddAudioOutput(void* aKey) {
   class Message : public ControlMessage {
    public:
     Message(MediaStream* aStream, void* aKey)
         : ControlMessage(aStream), mKey(aKey) {}
     void Run() override { mStream->AddAudioOutputImpl(mKey); }
     void* mKey;
   };
@@ -3519,17 +3499,17 @@ SourceMediaStream* MediaStreamGraph::Cre
 }
 
 ProcessedMediaStream* MediaStreamGraph::CreateTrackUnionStream() {
   TrackUnionStream* stream = new TrackUnionStream();
   AddStream(stream);
   return stream;
 }
 
-ProcessedMediaStream* MediaStreamGraph::CreateAudioCaptureStream(
+AudioCaptureStream* MediaStreamGraph::CreateAudioCaptureStream(
     TrackID aTrackId) {
   AudioCaptureStream* stream = new AudioCaptureStream(aTrackId);
   AddStream(stream);
   return stream;
 }
 
 void MediaStreamGraph::AddStream(MediaStream* aStream) {
   NS_ADDREF(aStream);
--- a/dom/media/MediaStreamGraph.h
+++ b/dom/media/MediaStreamGraph.h
@@ -23,17 +23,18 @@
 #include <speex/speex_resampler.h>
 
 class nsIRunnable;
 class nsIGlobalObject;
 class nsPIDOMWindowInner;
 
 namespace mozilla {
 class AsyncLogger;
-};
+class AudioCaptureStream;
+};  // namespace mozilla
 
 extern mozilla::AsyncLogger gMSGTraceLogger;
 
 template <>
 class nsAutoRefTraits<SpeexResamplerState>
     : public nsPointerRefTraits<SpeexResamplerState> {
  public:
   static void Release(SpeexResamplerState* aState) {
@@ -360,27 +361,18 @@ class MediaStream : public mozilla::Link
    * dispatched to the event queue immediately.  (There are no pending updates
    * in this situation.)
    *
    * Main thread only.
    */
   void RunAfterPendingUpdates(already_AddRefed<nsIRunnable> aRunnable);
 
   // Signal that the client is done with this MediaStream. It will be deleted
-  // later. Do not mix usage of Destroy() with RegisterUser()/UnregisterUser().
-  // That will cause the MediaStream to be destroyed twice, which will cause
-  // some assertions to fail.
+  // later.
   virtual void Destroy();
-  // Signal that a client is using this MediaStream. Useful to not have to
-  // explicitly manage ownership (responsibility to Destroy()) when there are
-  // multiple clients using a MediaStream.
-  void RegisterUser();
-  // Signal that a client no longer needs this MediaStream. When the number of
-  // clients using this MediaStream reaches 0, it will be destroyed.
-  void UnregisterUser();
 
   // Returns the main-thread's view of how much data has been processed by
   // this stream.
   StreamTime GetCurrentTime() const {
     NS_ASSERTION(NS_IsMainThread(), "Call only on main thread");
     return mMainThreadCurrentTime;
   }
   // Return the main thread's view of whether this stream has finished.
@@ -613,17 +605,16 @@ class MediaStream : public mozilla::Link
    */
   bool mNotifiedFinished;
 
   // Main-thread views of state
   StreamTime mMainThreadCurrentTime;
   bool mMainThreadFinished;
   bool mFinishedNotificationSent;
   bool mMainThreadDestroyed;
-  int mNrOfMainThreadUsers;
 
   // Our media stream graph.  null if destroyed on the graph thread.
   MediaStreamGraphImpl* mGraph;
 };
 
 /**
  * This is a stream into which a decoder can write audio and video.
  *
@@ -1221,17 +1212,17 @@ class MediaStreamGraph {
    * previously added tracks, whichever is greater.
    * TODO at some point we will probably need to add API to select
    * particular tracks of each input stream.
    */
   ProcessedMediaStream* CreateTrackUnionStream();
   /**
    * Create a stream that will mix all its audio input.
    */
-  ProcessedMediaStream* CreateAudioCaptureStream(TrackID aTrackId);
+  AudioCaptureStream* CreateAudioCaptureStream(TrackID aTrackId);
 
   /**
    * Add a new stream to the graph.  Main thread.
    */
   void AddStream(MediaStream* aStream);
 
   /* From the main thread, ask the MSG to send back an event when the graph
    * thread is running, and audio is being processed. */
--- a/dom/media/MediaStreamTrack.cpp
+++ b/dom/media/MediaStreamTrack.cpp
@@ -134,17 +134,17 @@ class MediaStreamTrack::MSGListener : pu
 
  protected:
   const RefPtr<MediaStreamGraphImpl> mGraph;
 
   // Main thread only.
   WeakPtr<MediaStreamTrack> mTrack;
 };
 
-class TrackSink : public MediaStreamTrackSource::Sink {
+class MediaStreamTrack::TrackSink : public MediaStreamTrackSource::Sink {
  public:
   explicit TrackSink(MediaStreamTrack* aTrack) : mTrack(aTrack) {}
 
   /**
    * Keep the track source alive. This track and any clones are controlling the
    * lifetime of the source by being registered as its sinks.
    */
   bool KeepsSourceAlive() const override { return true; }
@@ -163,37 +163,47 @@ class TrackSink : public MediaStreamTrac
   }
 
   void MutedChanged(bool aNewState) override {
     if (mTrack) {
       mTrack->MutedChanged(aNewState);
     }
   }
 
+  void OverrideEnded() override {
+    if (mTrack) {
+      mTrack->OverrideEnded();
+    }
+  }
+
  private:
   WeakPtr<MediaStreamTrack> mTrack;
 };
 
-MediaStreamTrack::MediaStreamTrack(DOMMediaStream* aStream, TrackID aTrackID,
-                                   TrackID aInputTrackID,
+MediaStreamTrack::MediaStreamTrack(nsPIDOMWindowInner* aWindow,
+                                   MediaStream* aInputStream, TrackID aTrackID,
                                    MediaStreamTrackSource* aSource,
                                    const MediaTrackConstraints& aConstraints)
-    : mOwningStream(aStream),
+    : mWindow(aWindow),
+      mInputStream(aInputStream),
+      mStream(mInputStream ? mInputStream->Graph()->CreateTrackUnionStream()
+                           : nullptr),
+      mPort(mStream ? mStream->AllocateInputPort(mInputStream) : nullptr),
       mTrackID(aTrackID),
-      mInputTrackID(aInputTrackID),
       mSource(aSource),
       mSink(MakeUnique<TrackSink>(this)),
       mPrincipal(aSource->GetPrincipal()),
-      mReadyState(MediaStreamTrackState::Live),
+      mReadyState(mStream ? MediaStreamTrackState::Live
+                          : MediaStreamTrackState::Ended),
       mEnabled(true),
       mMuted(false),
       mConstraints(aConstraints) {
-  GetSource().RegisterSink(mSink.get());
+  if (!Ended()) {
+    GetSource().RegisterSink(mSink.get());
 
-  if (GetOwnedStream()) {
     mMSGListener = new MSGListener(this);
     AddListener(mMSGListener);
   }
 
   nsresult rv;
   nsCOMPtr<nsIUUIDGenerator> uuidgen =
       do_GetService("@mozilla.org/uuid-generator;1", &rv);
 
@@ -206,26 +216,17 @@ MediaStreamTrack::MediaStreamTrack(DOMMe
   char chars[NSID_LENGTH];
   uuid.ToProvidedString(chars);
   mID = NS_ConvertASCIItoUTF16(chars);
 }
 
 MediaStreamTrack::~MediaStreamTrack() { Destroy(); }
 
 void MediaStreamTrack::Destroy() {
-  mReadyState = MediaStreamTrackState::Ended;
-  if (mSource) {
-    mSource->UnregisterSink(mSink.get());
-  }
-  if (mMSGListener) {
-    if (GetOwnedStream()) {
-      RemoveListener(mMSGListener);
-    }
-    mMSGListener = nullptr;
-  }
+  SetReadyState(MediaStreamTrackState::Ended);
   // Remove all listeners -- avoid iterating over the list we're removing from
   const nsTArray<RefPtr<MediaStreamTrackListener>> trackListeners(
       mTrackListeners);
   for (auto listener : trackListeners) {
     RemoveListener(listener);
   }
   // Do the same as above for direct listeners
   const nsTArray<RefPtr<DirectMediaStreamTrackListener>> directTrackListeners(
@@ -235,87 +236,71 @@ void MediaStreamTrack::Destroy() {
   }
 }
 
 NS_IMPL_CYCLE_COLLECTION_CLASS(MediaStreamTrack)
 
 NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_INHERITED(MediaStreamTrack,
                                                 DOMEventTargetHelper)
   tmp->Destroy();
-  NS_IMPL_CYCLE_COLLECTION_UNLINK(mOwningStream)
+  NS_IMPL_CYCLE_COLLECTION_UNLINK(mWindow)
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mSource)
-  NS_IMPL_CYCLE_COLLECTION_UNLINK(mOriginalTrack)
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mPrincipal)
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mPendingPrincipal)
 NS_IMPL_CYCLE_COLLECTION_UNLINK_END
 
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(MediaStreamTrack,
                                                   DOMEventTargetHelper)
-  NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mOwningStream)
+  NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mWindow)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mSource)
-  NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mOriginalTrack)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mPrincipal)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mPendingPrincipal)
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
 
 NS_IMPL_ADDREF_INHERITED(MediaStreamTrack, DOMEventTargetHelper)
 NS_IMPL_RELEASE_INHERITED(MediaStreamTrack, DOMEventTargetHelper)
 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(MediaStreamTrack)
 NS_INTERFACE_MAP_END_INHERITING(DOMEventTargetHelper)
 
-nsPIDOMWindowInner* MediaStreamTrack::GetParentObject() const {
-  MOZ_RELEASE_ASSERT(mOwningStream);
-  return mOwningStream->GetParentObject();
-}
-
 JSObject* MediaStreamTrack::WrapObject(JSContext* aCx,
                                        JS::Handle<JSObject*> aGivenProto) {
   return MediaStreamTrack_Binding::Wrap(aCx, this, aGivenProto);
 }
 
 void MediaStreamTrack::GetId(nsAString& aID) const { aID = mID; }
 
 void MediaStreamTrack::SetEnabled(bool aEnabled) {
   LOG(LogLevel::Info,
       ("MediaStreamTrack %p %s", this, aEnabled ? "Enabled" : "Disabled"));
 
   if (mEnabled == aEnabled) {
     return;
   }
 
   mEnabled = aEnabled;
-  GetOwnedStream()->SetTrackEnabled(
-      mTrackID,
-      mEnabled ? DisabledTrackMode::ENABLED : DisabledTrackMode::SILENCE_BLACK);
+
+  if (Ended()) {
+    return;
+  }
+
+  mStream->SetTrackEnabled(mTrackID, mEnabled
+                                         ? DisabledTrackMode::ENABLED
+                                         : DisabledTrackMode::SILENCE_BLACK);
   GetSource().SinkEnabledStateChanged();
 }
 
 void MediaStreamTrack::Stop() {
   LOG(LogLevel::Info, ("MediaStreamTrack %p Stop()", this));
 
   if (Ended()) {
     LOG(LogLevel::Warning, ("MediaStreamTrack %p Already ended", this));
     return;
   }
 
-  if (!mSource) {
-    MOZ_ASSERT(false);
-    return;
-  }
-
-  mSource->UnregisterSink(mSink.get());
-
-  MOZ_ASSERT(mOwningStream,
-             "Every MediaStreamTrack needs an owning DOMMediaStream");
-  DOMMediaStream::TrackPort* port = mOwningStream->FindOwnedTrackPort(*this);
-  MOZ_ASSERT(port,
-             "A MediaStreamTrack must exist in its owning DOMMediaStream");
-  Unused << port->BlockSourceTrackId(mInputTrackID, BlockingMode::CREATION);
-
-  mReadyState = MediaStreamTrackState::Ended;
+  SetReadyState(MediaStreamTrackState::Ended);
 
   NotifyEnded();
 }
 
 void MediaStreamTrack::GetConstraints(dom::MediaTrackConstraints& aResult) {
   aResult = mConstraints;
 }
 
@@ -341,18 +326,17 @@ already_AddRefed<Promise> MediaStreamTra
     nsString str;
     aConstraints.ToJSON(str);
 
     LOG(LogLevel::Info, ("MediaStreamTrack %p ApplyConstraints() with "
                          "constraints %s",
                          this, NS_ConvertUTF16toUTF8(str).get()));
   }
 
-  nsPIDOMWindowInner* window = mOwningStream->GetParentObject();
-  nsIGlobalObject* go = window ? window->AsGlobal() : nullptr;
+  nsIGlobalObject* go = mWindow ? mWindow->AsGlobal() : nullptr;
 
   RefPtr<Promise> promise = Promise::Create(go, aRv);
   if (aRv.Failed()) {
     return nullptr;
   }
 
   // Forward constraints to the source.
   //
@@ -362,39 +346,38 @@ already_AddRefed<Promise> MediaStreamTra
 
   // Keep a reference to this, to make sure it's still here when we get back.
   RefPtr<MediaStreamTrack> self(this);
   GetSource()
       .ApplyConstraints(aConstraints, aCallerType)
       ->Then(
           GetCurrentThreadSerialEventTarget(), __func__,
           [this, self, promise, aConstraints](bool aDummy) {
-            nsPIDOMWindowInner* window = mOwningStream->GetParentObject();
-            if (!window || !window->IsCurrentInnerWindow()) {
+            if (!mWindow || !mWindow->IsCurrentInnerWindow()) {
               return;  // Leave Promise pending after navigation by design.
             }
             mConstraints = aConstraints;
             promise->MaybeResolve(false);
           },
           [this, self, promise](const RefPtr<MediaMgrError>& aError) {
-            nsPIDOMWindowInner* window = mOwningStream->GetParentObject();
-            if (!window || !window->IsCurrentInnerWindow()) {
+            if (!mWindow || !mWindow->IsCurrentInnerWindow()) {
               return;  // Leave Promise pending after navigation by design.
             }
-            promise->MaybeReject(MakeRefPtr<MediaStreamError>(window, *aError));
+            promise->MaybeReject(
+                MakeRefPtr<MediaStreamError>(mWindow, *aError));
           });
   return promise.forget();
 }
 
-MediaStreamGraph* MediaStreamTrack::Graph() {
-  return GetOwnedStream()->Graph();
-}
+ProcessedMediaStream* MediaStreamTrack::GetStream() const { return mStream; }
 
-MediaStreamGraphImpl* MediaStreamTrack::GraphImpl() {
-  return GetOwnedStream()->GraphImpl();
+MediaStreamGraph* MediaStreamTrack::Graph() const { return mStream->Graph(); }
+
+MediaStreamGraphImpl* MediaStreamTrack::GraphImpl() const {
+  return mStream->GraphImpl();
 }
 
 void MediaStreamTrack::SetPrincipal(nsIPrincipal* aPrincipal) {
   if (aPrincipal == mPrincipal) {
     return;
   }
   mPrincipal = aPrincipal;
 
@@ -501,146 +484,122 @@ void MediaStreamTrack::RemoveConsumer(Me
 
   // Remove destroyed consumers for cleanliness
   while (mConsumers.RemoveElement(nullptr)) {
     MOZ_ASSERT_UNREACHABLE("A consumer was not explicitly removed");
   }
 }
 
 already_AddRefed<MediaStreamTrack> MediaStreamTrack::Clone() {
-  // MediaStreamTracks are currently governed by streams, so we need a dummy
-  // DOMMediaStream to own our track clone.
-  RefPtr<DOMMediaStream> newStream =
-      new DOMMediaStream(mOwningStream->GetParentObject());
-
-  MediaStreamGraph* graph = Graph();
-  newStream->InitOwnedStreamCommon(graph);
-  newStream->InitPlaybackStreamCommon(graph);
-
-  return newStream->CloneDOMTrack(*this, mTrackID);
+  RefPtr<MediaStreamTrack> newTrack = CloneInternal();
+  newTrack->SetEnabled(Enabled());
+  newTrack->SetMuted(Muted());
+  MOZ_DIAGNOSTIC_ASSERT(newTrack->ReadyState() == ReadyState());
+  return newTrack.forget();
 }
 
 void MediaStreamTrack::SetReadyState(MediaStreamTrackState aState) {
   MOZ_ASSERT(!(mReadyState == MediaStreamTrackState::Ended &&
                aState == MediaStreamTrackState::Live),
              "We don't support overriding the ready state from ended to live");
 
+  if (Ended()) {
+    return;
+  }
+
   if (mReadyState == MediaStreamTrackState::Live &&
-      aState == MediaStreamTrackState::Ended && mSource) {
-    mSource->UnregisterSink(mSink.get());
+      aState == MediaStreamTrackState::Ended) {
+    if (mSource) {
+      mSource->UnregisterSink(mSink.get());
+    }
+    if (mMSGListener) {
+      RemoveListener(mMSGListener);
+      mMSGListener = nullptr;
+    }
+    if (mPort) {
+      mPort->Destroy();
+      mPort = nullptr;
+    }
+    if (mStream) {
+      mStream->Destroy();
+      mStream = nullptr;
+    }
   }
 
   mReadyState = aState;
 }
 
 void MediaStreamTrack::OverrideEnded() {
   MOZ_ASSERT(NS_IsMainThread());
 
   if (Ended()) {
     return;
   }
 
   LOG(LogLevel::Info, ("MediaStreamTrack %p ended", this));
 
-  if (!mSource) {
-    MOZ_ASSERT(false);
-    return;
-  }
-
-  mSource->UnregisterSink(mSink.get());
-
-  if (mMSGListener) {
-    RemoveListener(mMSGListener);
-  }
-  mMSGListener = nullptr;
-
-  mReadyState = MediaStreamTrackState::Ended;
+  SetReadyState(MediaStreamTrackState::Ended);
 
   NotifyEnded();
 
   DispatchTrustedEvent(NS_LITERAL_STRING("ended"));
 }
 
-DOMMediaStream* MediaStreamTrack::GetInputDOMStream() {
-  MediaStreamTrack* originalTrack =
-      mOriginalTrack ? mOriginalTrack.get() : this;
-  MOZ_RELEASE_ASSERT(originalTrack->mOwningStream);
-  return originalTrack->mOwningStream;
-}
-
-MediaStream* MediaStreamTrack::GetInputStream() {
-  DOMMediaStream* inputDOMStream = GetInputDOMStream();
-  MOZ_RELEASE_ASSERT(inputDOMStream->GetInputStream());
-  return inputDOMStream->GetInputStream();
-}
-
-ProcessedMediaStream* MediaStreamTrack::GetOwnedStream() {
-  if (!mOwningStream) {
-    return nullptr;
-  }
-
-  return mOwningStream->GetOwnedStream();
-}
-
 void MediaStreamTrack::AddListener(MediaStreamTrackListener* aListener) {
   LOG(LogLevel::Debug,
       ("MediaStreamTrack %p adding listener %p", this, aListener));
-  MOZ_ASSERT(GetOwnedStream());
+  mTrackListeners.AppendElement(aListener);
 
-  GetOwnedStream()->AddTrackListener(aListener, mTrackID);
-  mTrackListeners.AppendElement(aListener);
+  if (Ended()) {
+    return;
+  }
+  mStream->AddTrackListener(aListener, mTrackID);
 }
 
 void MediaStreamTrack::RemoveListener(MediaStreamTrackListener* aListener) {
   LOG(LogLevel::Debug,
       ("MediaStreamTrack %p removing listener %p", this, aListener));
+  mTrackListeners.RemoveElement(aListener);
 
-  if (GetOwnedStream()) {
-    GetOwnedStream()->RemoveTrackListener(aListener, mTrackID);
-    mTrackListeners.RemoveElement(aListener);
+  if (Ended()) {
+    return;
   }
+  mStream->RemoveTrackListener(aListener, mTrackID);
 }
 
 void MediaStreamTrack::AddDirectListener(
     DirectMediaStreamTrackListener* aListener) {
   LOG(LogLevel::Debug, ("MediaStreamTrack %p (%s) adding direct listener %p to "
                         "stream %p, track %d",
                         this, AsAudioStreamTrack() ? "audio" : "video",
-                        aListener, GetOwnedStream(), mTrackID));
-  MOZ_ASSERT(GetOwnedStream());
+                        aListener, mStream.get(), mTrackID));
+  mDirectTrackListeners.AppendElement(aListener);
 
-  GetOwnedStream()->AddDirectTrackListener(aListener, mTrackID);
-  mDirectTrackListeners.AppendElement(aListener);
+  if (Ended()) {
+    return;
+  }
+  mStream->AddDirectTrackListener(aListener, mTrackID);
 }
 
 void MediaStreamTrack::RemoveDirectListener(
     DirectMediaStreamTrackListener* aListener) {
   LOG(LogLevel::Debug,
       ("MediaStreamTrack %p removing direct listener %p from stream %p", this,
-       aListener, GetOwnedStream()));
+       aListener, mStream.get()));
+  mDirectTrackListeners.RemoveElement(aListener);
 
-  if (GetOwnedStream()) {
-    GetOwnedStream()->RemoveDirectTrackListener(aListener, mTrackID);
-    mDirectTrackListeners.RemoveElement(aListener);
+  if (Ended()) {
+    return;
   }
+  mStream->RemoveDirectTrackListener(aListener, mTrackID);
 }
 
 already_AddRefed<MediaInputPort> MediaStreamTrack::ForwardTrackContentsTo(
-    ProcessedMediaStream* aStream, TrackID aDestinationTrackID) {
+    ProcessedMediaStream* aStream) {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_RELEASE_ASSERT(aStream);
-  RefPtr<MediaInputPort> port = aStream->AllocateInputPort(
-      GetOwnedStream(), mTrackID, aDestinationTrackID);
+  RefPtr<MediaInputPort> port =
+      aStream->AllocateInputPort(mStream, mTrackID, mTrackID);
   return port.forget();
 }
 
-bool MediaStreamTrack::IsForwardedThrough(MediaInputPort* aPort) {
-  MOZ_ASSERT(NS_IsMainThread());
-  MOZ_ASSERT(aPort);
-  if (!aPort) {
-    return false;
-  }
-  return aPort->GetSource() == GetOwnedStream() &&
-         aPort->PassTrackThrough(mTrackID);
-}
-
 }  // namespace dom
 }  // namespace mozilla
--- a/dom/media/MediaStreamTrack.h
+++ b/dom/media/MediaStreamTrack.h
@@ -35,17 +35,16 @@ class ProcessedMediaStream;
 class RemoteSourceStreamInfo;
 class SourceStreamInfo;
 class MediaMgrError;
 
 namespace dom {
 
 class AudioStreamTrack;
 class VideoStreamTrack;
-class TrackSink;
 enum class CallerType : uint32_t;
 
 /**
  * Common interface through which a MediaStreamTrack can communicate with its
  * producer on the main thread.
  *
  * Kept alive by a strong ref in all MediaStreamTracks (original and clones)
  * sharing this source.
@@ -93,16 +92,22 @@ class MediaStreamTrackSource : public ns
     virtual void PrincipalChanged() = 0;
 
     /**
      * Called when the muted state of the MediaStreamTrackSource where this sink
      * is registered has changed.
      */
     virtual void MutedChanged(bool aNewState) = 0;
 
+    /**
+     * Called when the MediaStreamTrackSource where this sink is registered has
+     * stopped producing data for good, i.e., it has ended.
+     */
+    virtual void OverrideEnded() = 0;
+
    protected:
     virtual ~Sink() = default;
   };
 
   MediaStreamTrackSource(nsIPrincipal* aPrincipal, const nsString& aLabel)
       : mPrincipal(aPrincipal), mLabel(aLabel), mStopped(false) {}
 
   /**
@@ -280,16 +285,33 @@ class MediaStreamTrackSource : public ns
         MOZ_ASSERT_UNREACHABLE("Sink was not explicitly removed");
         mSinks.RemoveElement(sink);
         continue;
       }
       sink->MutedChanged(aNewState);
     }
   }
 
+  /**
+   * Called by a sub class when the source has stopped producing data for good,
+   * i.e., it has ended. Notifies all sinks.
+   */
+  void OverrideEnded() {
+    MOZ_ASSERT(NS_IsMainThread());
+    nsTArray<WeakPtr<Sink>> sinks(mSinks);
+    for (auto& sink : sinks) {
+      if (!sink) {
+        MOZ_ASSERT_UNREACHABLE("Sink was not explicitly removed");
+        mSinks.RemoveElement(sink);
+        continue;
+      }
+      sink->OverrideEnded();
+    }
+  }
+
   // Principal identifying who may access the contents of this source.
   nsCOMPtr<nsIPrincipal> mPrincipal;
 
   // Currently registered sinks.
   nsTArray<WeakPtr<Sink>> mSinks;
 
   // The label of the track we are the source of per the MediaStreamTrack spec.
   const nsString mLabel;
@@ -334,50 +356,80 @@ class MediaStreamTrackConsumer
   /**
    * Called when the track's readyState transitions to "ended".
    * Unlike the "ended" event exposed to script this is called for any reason,
    * including MediaStreamTrack::Stop().
    */
   virtual void NotifyEnded(MediaStreamTrack* aTrack){};
 };
 
+// clang-format off
 /**
- * Class representing a track in a DOMMediaStream.
+ * DOM wrapper for MediaStreamGraph-MediaStreams.
+ *
+ * To account for cloning, a MediaStreamTrack wraps two internal (and chained)
+ * MediaStreams:
+ *   1. mInputStream
+ *      - Controlled by the producer of the data in the track. The producer
+ *        decides on lifetime of the MediaStream and the track inside it.
+ *      - It can be any type of MediaStream.
+ *      - Contains one track only.
+ *   2. mStream
+ *      - A TrackUnionStream representing this MediaStreamTrack.
+ *      - Its data is piped from mInputStream through mPort.
+ *      - Contains one track only.
+ *      - When this MediaStreamTrack is enabled/disabled this is reflected in
+ *        the chunks in the track in mStream.
+ *      - When this MediaStreamTrack has ended, mStream gets destroyed.
+ *        Note that mInputStream is unaffected, such that any clones of mStream
+ *        can live on. When all clones are ended, this is signaled to the
+ *        producer via our MediaStreamTrackSource. It is then likely to destroy
+ *        mInputStream.
+ *
+ * A graphical representation of how tracks are connected when cloned follows:
+ *
+ * MediaStreamTrack A
+ *       mInputStream     mStream
+ *            t1 ---------> t1
+ *               \
+ *                -----
+ * MediaStreamTrack B  \  (clone of A)
+ *       mInputStream   \ mStream
+ *            *          -> t1
+ *
+ *   (*) is a copy of A's mInputStream
  */
+// clang-format on
 class MediaStreamTrack : public DOMEventTargetHelper,
                          public SupportsWeakPtr<MediaStreamTrack> {
-  // DOMMediaStream owns MediaStreamTrack instances, and requires access to
-  // some internal state, e.g., GetInputStream(), GetOwnedStream().
-  friend class mozilla::DOMMediaStream;
-
   // PeerConnection and friends need to know our owning DOMStream and track id.
   friend class mozilla::PeerConnectionImpl;
   friend class mozilla::PeerConnectionMedia;
   friend class mozilla::SourceStreamInfo;
   friend class mozilla::RemoteSourceStreamInfo;
 
   class MSGListener;
+  class TrackSink;
 
  public:
   /**
-   * aTrackID is the MediaStreamGraph track ID for the track in the
-   * MediaStream owned by aStream.
+   * aTrackID is the MediaStreamGraph track ID for the track in aInputStream.
    */
   MediaStreamTrack(
-      DOMMediaStream* aStream, TrackID aTrackID, TrackID aInputTrackID,
+      nsPIDOMWindowInner* aWindow, MediaStream* aInputStream, TrackID aTrackID,
       MediaStreamTrackSource* aSource,
       const MediaTrackConstraints& aConstraints = MediaTrackConstraints());
 
   NS_DECL_ISUPPORTS_INHERITED
   NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(MediaStreamTrack,
                                            DOMEventTargetHelper)
 
   MOZ_DECLARE_WEAKREFERENCE_TYPENAME(MediaStreamTrack)
 
-  nsPIDOMWindowInner* GetParentObject() const;
+  nsPIDOMWindowInner* GetParentObject() const { return mWindow; }
   JSObject* WrapObject(JSContext* aCx,
                        JS::Handle<JSObject*> aGivenProto) override;
 
   virtual AudioStreamTrack* AsAudioStreamTrack() { return nullptr; }
   virtual VideoStreamTrack* AsVideoStreamTrack() { return nullptr; }
 
   virtual const AudioStreamTrack* AsAudioStreamTrack() const { return nullptr; }
   virtual const VideoStreamTrack* AsVideoStreamTrack() const { return nullptr; }
@@ -406,79 +458,42 @@ class MediaStreamTrack : public DOMEvent
   IMPL_EVENT_HANDLER(ended)
 
   /**
    * Convenience (and legacy) method for when ready state is "ended".
    */
   bool Ended() const { return mReadyState == MediaStreamTrackState::Ended; }
 
   /**
-   * Forces the ready state to a particular value, for instance when we're
-   * cloning an already ended track.
-   */
-  void SetReadyState(MediaStreamTrackState aState);
-
-  /**
-   * Notified by the MediaStreamGraph, through our owning MediaStream on the
-   * main thread.
-   *
-   * Note that this sets the track to ended and raises the "ended" event
-   * synchronously.
-   */
-  void OverrideEnded();
-
-  /**
    * Get this track's principal.
    */
   nsIPrincipal* GetPrincipal() const { return mPrincipal; }
 
   /**
-   * Called by the MSGListener when this track's PrincipalHandle changes on
-   * the MediaStreamGraph thread. When the PrincipalHandle matches the pending
-   * principal we know that the principal change has propagated to consumers.
-   */
-  void NotifyPrincipalHandleChanged(const PrincipalHandle& aPrincipalHandle);
-
-  /**
-   * Called when this track's readyState transitions to "ended".
-   * Notifies all MediaStreamTrackConsumers that this track ended.
-   */
-  void NotifyEnded();
-
-  /**
    * Get this track's PeerIdentity.
    */
   const PeerIdentity* GetPeerIdentity() const {
     return GetSource().GetPeerIdentity();
   }
 
-  MediaStreamGraph* Graph();
-  MediaStreamGraphImpl* GraphImpl();
+  ProcessedMediaStream* GetStream() const;
+  MediaStreamGraph* Graph() const;
+  MediaStreamGraphImpl* GraphImpl() const;
 
   MediaStreamTrackSource& GetSource() const {
     MOZ_RELEASE_ASSERT(mSource,
                        "The track source is only removed on destruction");
     return *mSource;
   }
 
   // Webrtc allows the remote side to name tracks whatever it wants, and we
   // need to surface this to content.
   void AssignId(const nsAString& aID) { mID = aID; }
 
   /**
-   * Called when mSource's principal has changed.
-   */
-  void PrincipalChanged();
-
-  /**
-   * Called when mSource's muted state has changed.
-   */
-  void MutedChanged(bool aNewState);
-
-  /**
    * Add a PrincipalChangeObserver to this track.
    *
    * Returns true if it was successfully added.
    *
    * Ownership of the PrincipalChangeObserver remains with the caller, and it's
    * the caller's responsibility to remove the observer before it dies.
    */
   bool AddPrincipalChangeObserver(
@@ -525,72 +540,100 @@ class MediaStreamTrack : public DOMEvent
   virtual void AddDirectListener(DirectMediaStreamTrackListener* aListener);
   void RemoveDirectListener(DirectMediaStreamTrackListener* aListener);
 
   /**
    * Sets up a MediaInputPort from the underlying track that this
    * MediaStreamTrack represents, to aStream, and returns it.
    */
   already_AddRefed<MediaInputPort> ForwardTrackContentsTo(
-      ProcessedMediaStream* aStream, TrackID aDestinationTrackID = TRACK_ANY);
+      ProcessedMediaStream* aStream);
 
-  /**
-   * Returns true if this track is connected to aPort and forwarded to aPort's
-   * output stream.
-   */
-  bool IsForwardedThrough(MediaInputPort* aPort);
-
-  void SetMediaStreamSizeListener(DirectMediaStreamTrackListener* aListener);
-
-  // Returns the original DOMMediaStream's underlying input stream.
-  MediaStream* GetInputStream();
-
-  TrackID GetInputTrackId() const { return mInputTrackID; }
+  TrackID GetTrackID() const { return mTrackID; }
 
  protected:
   virtual ~MediaStreamTrack();
 
   /**
+   * Forces the ready state to a particular value, for instance when we're
+   * cloning an already ended track.
+   */
+  void SetReadyState(MediaStreamTrackState aState);
+
+  /**
+   * Notified by the MediaStreamGraph, through our owning MediaStream on the
+   * main thread.
+   *
+   * Note that this sets the track to ended and raises the "ended" event
+   * synchronously.
+   */
+  void OverrideEnded();
+
+  /**
+   * Called by the MSGListener when this track's PrincipalHandle changes on
+   * the MediaStreamGraph thread. When the PrincipalHandle matches the pending
+   * principal we know that the principal change has propagated to consumers.
+   */
+  void NotifyPrincipalHandleChanged(const PrincipalHandle& aNewPrincipalHandle);
+
+  /**
+   * Called when this track's readyState transitions to "ended".
+   * Notifies all MediaStreamTrackConsumers that this track ended.
+   */
+  void NotifyEnded();
+
+  /**
+   * Called when mSource's principal has changed.
+   */
+  void PrincipalChanged();
+
+  /**
+   * Called when mSource's muted state has changed.
+   */
+  void MutedChanged(bool aNewState);
+
+  /**
    * Sets this track's muted state without raising any events.
    */
   void SetMuted(bool aMuted) { mMuted = aMuted; }
 
   virtual void Destroy();
 
-  // Returns the owning DOMMediaStream's underlying owned stream.
-  ProcessedMediaStream* GetOwnedStream();
-
-  // Returns the original DOMMediaStream. If this track is a clone,
-  // the original track's owning DOMMediaStream is returned.
-  DOMMediaStream* GetInputDOMStream();
-
   /**
    * Sets the principal and notifies PrincipalChangeObservers if it changes.
    */
   void SetPrincipal(nsIPrincipal* aPrincipal);
 
   /**
-   * Creates a new MediaStreamTrack with the same type, input track ID and
-   * source as this MediaStreamTrack.
-   * aTrackID is the TrackID the new track will have in its owned stream.
+   * Creates a new MediaStreamTrack with the same kind, input stream, input
+   * track ID and source as this MediaStreamTrack.
    */
-  virtual already_AddRefed<MediaStreamTrack> CloneInternal(
-      DOMMediaStream* aOwningStream, TrackID aTrackID) = 0;
+  virtual already_AddRefed<MediaStreamTrack> CloneInternal() = 0;
 
   nsTArray<PrincipalChangeObserver<MediaStreamTrack>*>
       mPrincipalChangeObservers;
 
   nsTArray<WeakPtr<MediaStreamTrackConsumer>> mConsumers;
 
-  RefPtr<DOMMediaStream> mOwningStream;
-  TrackID mTrackID;
-  TrackID mInputTrackID;
+  // We need this to track our parent object.
+  nsCOMPtr<nsPIDOMWindowInner> mWindow;
+
+  // The input MediaStream assigned us by the data producer. Valid until we end.
+  // Owned by the producer.
+  RefPtr<MediaStream> mInputStream;
+  // The MediaStream representing this MediaStreamTrack in the MediaStreamGraph.
+  // Valid until we end. Owned by us.
+  RefPtr<ProcessedMediaStream> mStream;
+  // The MediaInputPort connecting mInputStream to mStream. Valid until we end.
+  // Owned by us.
+  RefPtr<MediaInputPort> mPort;
+  // The TrackID of this track in mInputStream and mStream.
+  const TrackID mTrackID;
   RefPtr<MediaStreamTrackSource> mSource;
   const UniquePtr<TrackSink> mSink;
-  RefPtr<MediaStreamTrack> mOriginalTrack;
   nsCOMPtr<nsIPrincipal> mPrincipal;
   nsCOMPtr<nsIPrincipal> mPendingPrincipal;
   RefPtr<MSGListener> mMSGListener;
   // Keep tracking MediaStreamTrackListener and DirectMediaStreamTrackListener,
   // so we can remove them in |Destory|.
   nsTArray<RefPtr<MediaStreamTrackListener>> mTrackListeners;
   nsTArray<RefPtr<DirectMediaStreamTrackListener>> mDirectTrackListeners;
   nsString mID;
new file mode 100644
--- /dev/null
+++ b/dom/media/MediaStreamWindowCapturer.cpp
@@ -0,0 +1,73 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "MediaStreamWindowCapturer.h"
+
+#include "AudioStreamTrack.h"
+#include "DOMMediaStream.h"
+#include "MediaStreamGraph.h"
+
+namespace mozilla {
+using dom::AudioStreamTrack;
+using dom::MediaStreamTrack;
+
+MediaStreamWindowCapturer::CapturedTrack::CapturedTrack(
+    MediaStreamTrack* aTrack, uint64_t aWindowID)
+    : mTrack(aTrack),
+      mPort(aTrack->Graph()->ConnectToCaptureStream(aWindowID,
+                                                    aTrack->GetStream())) {}
+
+MediaStreamWindowCapturer::CapturedTrack::~CapturedTrack() { mPort->Destroy(); }
+
+MediaStreamWindowCapturer::MediaStreamWindowCapturer(DOMMediaStream* aStream,
+                                                     uint64_t aWindowId)
+    : mStream(aStream), mWindowId(aWindowId) {
+  mStream->RegisterTrackListener(this);
+  nsTArray<RefPtr<AudioStreamTrack>> tracks;
+  mStream->GetAudioTracks(tracks);
+  for (const auto& t : tracks) {
+    if (t->Ended()) {
+      continue;
+    }
+    AddTrack(t);
+  }
+}
+
+MediaStreamWindowCapturer::~MediaStreamWindowCapturer() {
+  if (mStream) {
+    mStream->UnregisterTrackListener(this);
+  }
+}
+
+void MediaStreamWindowCapturer::NotifyTrackAdded(
+    const RefPtr<MediaStreamTrack>& aTrack) {
+  if (AudioStreamTrack* at = aTrack->AsAudioStreamTrack()) {
+    AddTrack(at);
+  }
+}
+
+void MediaStreamWindowCapturer::NotifyTrackRemoved(
+    const RefPtr<MediaStreamTrack>& aTrack) {
+  if (AudioStreamTrack* at = aTrack->AsAudioStreamTrack()) {
+    RemoveTrack(at);
+  }
+}
+
+void MediaStreamWindowCapturer::AddTrack(AudioStreamTrack* aTrack) {
+  if (aTrack->Ended()) {
+    return;
+  }
+  mTracks.AppendElement(MakeUnique<CapturedTrack>(aTrack, mWindowId));
+}
+
+void MediaStreamWindowCapturer::RemoveTrack(AudioStreamTrack* aTrack) {
+  for (size_t i = mTracks.Length(); i > 0; --i) {
+    if (mTracks[i - 1]->mTrack == aTrack) {
+      mTracks.RemoveElementAt(i - 1);
+      break;
+    }
+  }
+}
+}  // namespace mozilla
new file mode 100644
--- /dev/null
+++ b/dom/media/MediaStreamWindowCapturer.h
@@ -0,0 +1,49 @@
+/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef MediaStreamWindowCapturer_h
+#define MediaStreamWindowCapturer_h
+
+#include "DOMMediaStream.h"
+
+namespace mozilla {
+namespace dom {
+class AudioStreamTrack;
+class MediaStreamTrack;
+}  // namespace dom
+
+/**
+ * Given a DOMMediaStream and a window id, this class will pipe the audio from
+ * all live audio tracks in the stream to the MediaStreamGraph's window capture
+ * mechanism.
+ */
+class MediaStreamWindowCapturer : public DOMMediaStream::TrackListener {
+ public:
+  MediaStreamWindowCapturer(DOMMediaStream* aStream, uint64_t aWindowId);
+  ~MediaStreamWindowCapturer();
+
+  void NotifyTrackAdded(const RefPtr<dom::MediaStreamTrack>& aTrack) override;
+  void NotifyTrackRemoved(const RefPtr<dom::MediaStreamTrack>& aTrack) override;
+
+  struct CapturedTrack {
+    CapturedTrack(dom::MediaStreamTrack* aTrack, uint64_t aWindowID);
+    ~CapturedTrack();
+
+    const WeakPtr<dom::MediaStreamTrack> mTrack;
+    const RefPtr<MediaInputPort> mPort;
+  };
+
+  const WeakPtr<DOMMediaStream> mStream;
+  const uint64_t mWindowId;
+
+ protected:
+  void AddTrack(dom::AudioStreamTrack* aTrack);
+  void RemoveTrack(dom::AudioStreamTrack* aTrack);
+
+  nsTArray<UniquePtr<CapturedTrack>> mTracks;
+};
+}  // namespace mozilla
+
+#endif /* MediaStreamWindowCapturer_h */
--- a/dom/media/MediaTrackList.cpp
+++ b/dom/media/MediaTrackList.cpp
@@ -4,17 +4,16 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "MediaTrack.h"
 #include "MediaTrackList.h"
 #include "mozilla/AsyncEventDispatcher.h"
 #include "mozilla/dom/HTMLMediaElement.h"
 #include "mozilla/dom/AudioTrack.h"
-#include "mozilla/dom/VideoStreamTrack.h"
 #include "mozilla/dom/VideoTrack.h"
 #include "mozilla/dom/TrackEvent.h"
 #include "nsThreadUtils.h"
 
 namespace mozilla {
 namespace dom {
 
 MediaTrackList::MediaTrackList(nsIGlobalObject* aOwnerObject,
@@ -81,19 +80,20 @@ void MediaTrackList::RemoveTracks() {
   while (!mTracks.IsEmpty()) {
     RefPtr<MediaTrack> track = mTracks.LastElement();
     RemoveTrack(track);
   }
 }
 
 already_AddRefed<AudioTrack> MediaTrackList::CreateAudioTrack(
     nsIGlobalObject* aOwnerGlobal, const nsAString& aId, const nsAString& aKind,
-    const nsAString& aLabel, const nsAString& aLanguage, bool aEnabled) {
-  RefPtr<AudioTrack> track =
-      new AudioTrack(aOwnerGlobal, aId, aKind, aLabel, aLanguage, aEnabled);
+    const nsAString& aLabel, const nsAString& aLanguage, bool aEnabled,
+    AudioStreamTrack* aAudioTrack) {
+  RefPtr<AudioTrack> track = new AudioTrack(aOwnerGlobal, aId, aKind, aLabel,
+                                            aLanguage, aEnabled, aAudioTrack);
   return track.forget();
 }
 
 already_AddRefed<VideoTrack> MediaTrackList::CreateVideoTrack(
     nsIGlobalObject* aOwnerGlobal, const nsAString& aId, const nsAString& aKind,
     const nsAString& aLabel, const nsAString& aLanguage,
     VideoStreamTrack* aVideoTrack) {
   RefPtr<VideoTrack> track =
--- a/dom/media/MediaTrackList.h
+++ b/dom/media/MediaTrackList.h
@@ -9,23 +9,24 @@
 
 #include "mozilla/DOMEventTargetHelper.h"
 
 namespace mozilla {
 class DOMMediaStream;
 
 namespace dom {
 
+class AudioStreamTrack;
+class AudioTrack;
+class AudioTrackList;
 class HTMLMediaElement;
 class MediaTrack;
-class AudioTrackList;
+class VideoStreamTrack;
+class VideoTrack;
 class VideoTrackList;
-class AudioTrack;
-class VideoTrack;
-class VideoStreamTrack;
 
 /**
  * Base class of AudioTrackList and VideoTrackList. The AudioTrackList and
  * VideoTrackList objects represent a dynamic list of zero or more audio and
  * video tracks respectively.
  *
  * When a media element is to forget its media-resource-specific tracks, its
  * audio track list and video track list will be emptied.
@@ -48,20 +49,23 @@ class MediaTrackList : public DOMEventTa
   void AddTrack(MediaTrack* aTrack);
 
   // In remove track case, the VideoTrackList::mSelectedIndex should be updated
   // due to mTracks changed. No need to take care this in add track case.
   virtual void RemoveTrack(const RefPtr<MediaTrack>& aTrack);
 
   void RemoveTracks();
 
+  // For the case of src of HTMLMediaElement is non-MediaStream, leave the
+  // aAudioTrack as default(nullptr).
   static already_AddRefed<AudioTrack> CreateAudioTrack(
       nsIGlobalObject* aOwnerGlobal, const nsAString& aId,
       const nsAString& aKind, const nsAString& aLabel,
-      const nsAString& aLanguage, bool aEnabled);
+      const nsAString& aLanguage, bool aEnabled,
+      AudioStreamTrack* aAudioTrack = nullptr);
 
   // For the case of src of HTMLMediaElement is non-MediaStream, leave the
   // aVideoTrack as default(nullptr).
   static already_AddRefed<VideoTrack> CreateVideoTrack(
       nsIGlobalObject* aOwnerGlobal, const nsAString& aId,
       const nsAString& aKind, const nsAString& aLabel,
       const nsAString& aLanguage, VideoStreamTrack* aVideoTrack = nullptr);
 
--- a/dom/media/VideoStreamTrack.cpp
+++ b/dom/media/VideoStreamTrack.cpp
@@ -9,36 +9,42 @@
 #include "MediaStreamListener.h"
 #include "nsContentUtils.h"
 #include "nsGlobalWindowInner.h"
 #include "VideoOutput.h"
 
 namespace mozilla {
 namespace dom {
 
-VideoStreamTrack::VideoStreamTrack(DOMMediaStream* aStream, TrackID aTrackID,
-                                   TrackID aInputTrackID,
+VideoStreamTrack::VideoStreamTrack(nsPIDOMWindowInner* aWindow,
+                                   MediaStream* aInputStream, TrackID aTrackID,
                                    MediaStreamTrackSource* aSource,
                                    const MediaTrackConstraints& aConstraints)
-    : MediaStreamTrack(aStream, aTrackID, aInputTrackID, aSource,
-                       aConstraints) {}
+    : MediaStreamTrack(aWindow, aInputStream, aTrackID, aSource, aConstraints) {
+}
 
 void VideoStreamTrack::Destroy() {
   mVideoOutputs.Clear();
   MediaStreamTrack::Destroy();
 }
 
 void VideoStreamTrack::AddVideoOutput(VideoFrameContainer* aSink) {
+  if (Ended()) {
+    return;
+  }
   auto output = MakeRefPtr<VideoOutput>(
       aSink, nsGlobalWindowInner::Cast(GetParentObject())
                  ->AbstractMainThreadFor(TaskCategory::Other));
   AddVideoOutput(output);
 }
 
 void VideoStreamTrack::AddVideoOutput(VideoOutput* aOutput) {
+  if (Ended()) {
+    return;
+  }
   for (const auto& output : mVideoOutputs) {
     if (output == aOutput) {
       MOZ_ASSERT_UNREACHABLE("A VideoOutput was already added");
       return;
     }
   }
   mVideoOutputs.AppendElement(aOutput);
   AddDirectListener(aOutput);
--- a/dom/media/VideoStreamTrack.h
+++ b/dom/media/VideoStreamTrack.h
@@ -14,17 +14,17 @@ namespace mozilla {
 class VideoFrameContainer;
 class VideoOutput;
 
 namespace dom {
 
 class VideoStreamTrack : public MediaStreamTrack {
  public:
   VideoStreamTrack(
-      DOMMediaStream* aStream, TrackID aTrackID, TrackID aInputTrackID,
+      nsPIDOMWindowInner* aWindow, MediaStream* aInputStream, TrackID aTrackID,
       MediaStreamTrackSource* aSource,
       const MediaTrackConstraints& aConstraints = MediaTrackConstraints());
 
   void Destroy() override;
 
   VideoStreamTrack* AsVideoStreamTrack() override { return this; }
   const VideoStreamTrack* AsVideoStreamTrack() const override { return this; }
 
@@ -34,20 +34,20 @@ class VideoStreamTrack : public MediaStr
   void RemoveVideoOutput(VideoOutput* aOutput);
 
   // WebIDL
   void GetKind(nsAString& aKind) override { aKind.AssignLiteral("video"); }
 
   void GetLabel(nsAString& aLabel, CallerType aCallerType) override;
 
  protected:
-  already_AddRefed<MediaStreamTrack> CloneInternal(
-      DOMMediaStream* aOwningStream, TrackID aTrackID) override {
-    return do_AddRef(new VideoStreamTrack(
-        aOwningStream, aTrackID, mInputTrackID, mSource, mConstraints));
+  already_AddRefed<MediaStreamTrack> CloneInternal() override {
+    return do_AddRef(
+        new VideoStreamTrack(mWindow, Ended() ? nullptr : mInputStream.get(),
+                             mTrackID, mSource, mConstraints));
   }
 
  private:
   nsTArray<RefPtr<VideoOutput>> mVideoOutputs;
 };
 
 }  // namespace dom
 }  // namespace mozilla
--- a/dom/media/VideoTrack.cpp
+++ b/dom/media/VideoTrack.cpp
@@ -11,22 +11,22 @@
 #include "mozilla/dom/VideoTrackList.h"
 
 namespace mozilla {
 namespace dom {
 
 VideoTrack::VideoTrack(nsIGlobalObject* aOwnerGlobal, const nsAString& aId,
                        const nsAString& aKind, const nsAString& aLabel,
                        const nsAString& aLanguage,
-                       VideoStreamTrack* aStreamTarck)
+                       VideoStreamTrack* aStreamTrack)
     : MediaTrack(aOwnerGlobal, aId, aKind, aLabel, aLanguage),
       mSelected(false),
-      mVideoStreamTrack(aStreamTarck) {}
+      mVideoStreamTrack(aStreamTrack) {}
 
-VideoTrack::~VideoTrack() {}
+VideoTrack::~VideoTrack() = default;
 
 NS_IMPL_CYCLE_COLLECTION_INHERITED(VideoTrack, MediaTrack, mVideoStreamTrack)
 
 NS_IMPL_ADDREF_INHERITED(VideoTrack, MediaTrack)
 NS_IMPL_RELEASE_INHERITED(VideoTrack, MediaTrack)
 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(VideoTrack)
 NS_INTERFACE_MAP_END_INHERITING(MediaTrack)
 
--- a/dom/media/VideoTrack.h
+++ b/dom/media/VideoTrack.h
@@ -15,17 +15,17 @@ namespace dom {
 class VideoTrackList;
 class VideoStreamTrack;
 
 class VideoTrack : public MediaTrack {
  public:
   VideoTrack(nsIGlobalObject* aOwnerGlobal, const nsAString& aId,
              const nsAString& aKind, const nsAString& aLabel,
              const nsAString& aLanguage,
-             VideoStreamTrack* aStreamTarck = nullptr);
+             VideoStreamTrack* aStreamTrack = nullptr);
 
   NS_DECL_ISUPPORTS_INHERITED
   NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(VideoTrack, MediaTrack)
 
   JSObject* WrapObject(JSContext* aCx,
                        JS::Handle<JSObject*> aGivenProto) override;
 
   VideoTrack* AsVideoTrack() override { return this; }
--- a/dom/media/mediasink/DecodedStream.cpp
+++ b/dom/media/mediasink/DecodedStream.cpp
@@ -26,120 +26,117 @@ using media::NullableTimeUnit;
 using media::TimeUnit;
 
 /*
  * A container class to make it easier to pass the playback info all the
  * way to DecodedStreamGraphListener from DecodedStream.
  */
 struct PlaybackInfoInit {
   TimeUnit mStartTime;
+  StreamTime mOffset;
   MediaInfo mInfo;
-  TrackID mAudioTrackID;
-  TrackID mVideoTrackID;
 };
 
 class DecodedStreamGraphListener;
 
 class DecodedStreamTrackListener : public MediaStreamTrackListener {
  public:
   DecodedStreamTrackListener(DecodedStreamGraphListener* aGraphListener,
-                             SourceMediaStream* aStream, TrackID aTrackID);
+                             SourceMediaStream* aStream);
 
   void NotifyOutput(MediaStreamGraph* aGraph,
                     StreamTime aCurrentTrackTime) override;
   void NotifyEnded() override;
 
  private:
   const RefPtr<DecodedStreamGraphListener> mGraphListener;
   const RefPtr<SourceMediaStream> mStream;
-  const TrackID mTrackID;
 };
 
 class DecodedStreamGraphListener {
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(DecodedStreamGraphListener)
  public:
   DecodedStreamGraphListener(
-      SourceMediaStream* aStream, TrackID aAudioTrackID,
+      SourceMediaStream* aAudioStream,
       MozPromiseHolder<DecodedStream::EndedPromise>&& aAudioEndedHolder,
-      TrackID aVideoTrackID,
+      SourceMediaStream* aVideoStream,
       MozPromiseHolder<DecodedStream::EndedPromise>&& aVideoEndedHolder,
-      AbstractThread* aMainThread)
-      : mAudioTrackListener(IsTrackIDExplicit(aAudioTrackID)
-                                ? MakeRefPtr<DecodedStreamTrackListener>(
-                                      this, aStream, aAudioTrackID)
-                                : nullptr),
+      StreamTime aOffset, AbstractThread* aMainThread)
+      : mAudioTrackListener(
+            aAudioStream
+                ? MakeRefPtr<DecodedStreamTrackListener>(this, aAudioStream)
+                : nullptr),
         mAudioEndedHolder(std::move(aAudioEndedHolder)),
-        mVideoTrackListener(IsTrackIDExplicit(aVideoTrackID)
-                                ? MakeRefPtr<DecodedStreamTrackListener>(
-                                      this, aStream, aVideoTrackID)
-                                : nullptr),
+        mVideoTrackListener(
+            aVideoStream
+                ? MakeRefPtr<DecodedStreamTrackListener>(this, aVideoStream)
+                : nullptr),
         mVideoEndedHolder(std::move(aVideoEndedHolder)),
-        mStream(aStream),
-        mAudioTrackID(aAudioTrackID),
-        mVideoTrackID(aVideoTrackID),
+        mAudioStream(aAudioStream),
+        mVideoStream(aVideoStream),
+        mOffset(aOffset),
         mAbstractMainThread(aMainThread) {
     MOZ_ASSERT(NS_IsMainThread());
     if (mAudioTrackListener) {
-      mStream->AddTrackListener(mAudioTrackListener, mAudioTrackID);
+      mAudioStream->AddTrackListener(mAudioTrackListener,
+                                     OutputStreamManager::sTrackID);
     } else {
       mAudioEnded = true;
       mAudioEndedHolder.ResolveIfExists(true, __func__);
     }
 
     if (mVideoTrackListener) {
-      mStream->AddTrackListener(mVideoTrackListener, mVideoTrackID);
+      mVideoStream->AddTrackListener(mVideoTrackListener,
+                                     OutputStreamManager::sTrackID);
     } else {
       mVideoEnded = true;
       mVideoEndedHolder.ResolveIfExists(true, __func__);
     }
   }
 
-  void NotifyOutput(TrackID aTrackID, StreamTime aCurrentTrackTime) {
-    if (aTrackID == mAudioTrackID) {
-      if (aCurrentTrackTime >= mAudioEnd) {
-        mStream->EndTrack(mAudioTrackID);
+  void NotifyOutput(SourceMediaStream* aStream, StreamTime aCurrentTrackTime) {
+    StreamTime currentTime = aCurrentTrackTime + mOffset;
+    if (aStream == mAudioStream) {
+      if (currentTime >= mAudioEnd) {
+        mAudioStream->EndTrack(OutputStreamManager::sTrackID);
       }
-    } else if (aTrackID == mVideoTrackID) {
-      if (aCurrentTrackTime >= mVideoEnd) {
-        mStream->EndTrack(mVideoTrackID);
+    } else if (aStream == mVideoStream) {
+      if (currentTime >= mVideoEnd) {
+        mVideoStream->EndTrack(OutputStreamManager::sTrackID);
       }
     } else {
-      MOZ_CRASH("Unexpected TrackID");
+      MOZ_CRASH("Unexpected source stream");
     }
-    if (aTrackID != mAudioTrackID && mAudioTrackID != TRACK_NONE &&
-        !mAudioEnded) {
+    if (aStream != mAudioStream && mAudioStream && !mAudioEnded) {
       // Only audio playout drives the clock forward, if present and live.
       return;
     }
-    MOZ_ASSERT_IF(aTrackID == mAudioTrackID, !mAudioEnded);
-    MOZ_ASSERT_IF(aTrackID == mVideoTrackID, !mVideoEnded);
-    mOnOutput.Notify(mStream->StreamTimeToMicroseconds(aCurrentTrackTime));
+    MOZ_ASSERT_IF(aStream == mAudioStream, !mAudioEnded);
+    MOZ_ASSERT_IF(aStream == mVideoStream, !mVideoEnded);
+    mOnOutput.Notify(aStream->StreamTimeToMicroseconds(currentTime));
   }
 
-  void NotifyEnded(TrackID aTrackID) {
-    if (aTrackID == mAudioTrackID) {
+  void NotifyEnded(SourceMediaStream* aStream) {
+    if (aStream == mAudioStream) {
       mAudioEnded = true;
-    } else if (aTrackID == mVideoTrackID) {
+    } else if (aStream == mVideoStream) {
       mVideoEnded = true;
     } else {
-      MOZ_CRASH("Unexpected TrackID");
+      MOZ_CRASH("Unexpected source stream");
     }
-    mStream->Graph()->DispatchToMainThreadStableState(
-        NewRunnableMethod<TrackID>(
+    aStream->Graph()->DispatchToMainThreadStableState(
+        NewRunnableMethod<RefPtr<SourceMediaStream>>(
             "DecodedStreamGraphListener::DoNotifyTrackEnded", this,
-            &DecodedStreamGraphListener::DoNotifyTrackEnded, aTrackID));
+            &DecodedStreamGraphListener::DoNotifyTrackEnded, aStream));
   }
 
-  TrackID AudioTrackID() const { return mAudioTrackID; }
-
-  TrackID VideoTrackID() const { return mVideoTrackID; }
-
   /**
-   * Tell the graph listener to end the given track after it has seen at least
-   * aEnd worth of output reported as processed by the graph.
+   * Tell the graph listener to end the track sourced by the given stream after
+   * it has seen at least aEnd worth of output reported as processed by the
+   * graph.
    *
    * A StreamTime of STREAM_TIME_MAX indicates that the track has no end and is
    * the default.
    *
    * This method of ending tracks is needed because the MediaStreamGraph
    * processes ended tracks (through SourceMediaStream::EndTrack) at the
    * beginning of an iteration, but waits until the end of the iteration to
    * process any ControlMessages. When such a ControlMessage is a listener that
@@ -147,50 +144,52 @@ class DecodedStreamGraphListener {
    * track ends before the listener tracking this ending is added. This can lead
    * to a MediaStreamTrack ending on main thread (it uses another listener)
    * before the listeners to render the track get added, potentially meaning a
    * media element doesn't progress before reaching the end although data was
    * available.
    *
    * Callable from any thread.
    */
-  void EndTrackAt(TrackID aTrackID, StreamTime aEnd) {
-    if (aTrackID == mAudioTrackID) {
+  void EndTrackAt(SourceMediaStream* aStream, StreamTime aEnd) {
+    if (aStream == mAudioStream) {
       mAudioEnd = aEnd;
-    } else if (aTrackID == mVideoTrackID) {
+    } else if (aStream == mVideoStream) {
       mVideoEnd = aEnd;
     } else {
-      MOZ_CRASH("Unexpected TrackID");
+      MOZ_CRASH("Unexpected source stream");
     }
   }
 
-  void DoNotifyTrackEnded(TrackID aTrackID) {
+  void DoNotifyTrackEnded(SourceMediaStream* aStream) {
     MOZ_ASSERT(NS_IsMainThread());
-    if (aTrackID == mAudioTrackID) {
+    if (aStream == mAudioStream) {
       mAudioEndedHolder.ResolveIfExists(true, __func__);
-    } else if (aTrackID == mVideoTrackID) {
+    } else if (aStream == mVideoStream) {
       mVideoEndedHolder.ResolveIfExists(true, __func__);
     } else {
-      MOZ_CRASH("Unexpected track id");
+      MOZ_CRASH("Unexpected source stream");
     }
   }
 
   void Forget() {
     MOZ_ASSERT(NS_IsMainThread());
 
-    if (mAudioTrackListener && !mStream->IsDestroyed()) {
-      mStream->EndTrack(mAudioTrackID);
-      mStream->RemoveTrackListener(mAudioTrackListener, mAudioTrackID);
+    if (mAudioTrackListener && !mAudioStream->IsDestroyed()) {
+      mAudioStream->EndTrack(OutputStreamManager::sTrackID);
+      mAudioStream->RemoveTrackListener(mAudioTrackListener,
+                                        OutputStreamManager::sTrackID);
     }
     mAudioTrackListener = nullptr;
     mAudioEndedHolder.ResolveIfExists(false, __func__);
 
-    if (mVideoTrackListener && !mStream->IsDestroyed()) {
-      mStream->EndTrack(mVideoTrackID);
-      mStream->RemoveTrackListener(mVideoTrackListener, mVideoTrackID);
+    if (mVideoTrackListener && !mVideoStream->IsDestroyed()) {
+      mVideoStream->EndTrack(OutputStreamManager::sTrackID);
+      mVideoStream->RemoveTrackListener(mVideoTrackListener,
+                                        OutputStreamManager::sTrackID);
     }
     mVideoTrackListener = nullptr;
     mVideoEndedHolder.ResolveIfExists(false, __func__);
   }
 
   MediaEventSource<int64_t>& OnOutput() { return mOnOutput; }
 
  private:
@@ -207,50 +206,51 @@ class DecodedStreamGraphListener {
   RefPtr<DecodedStreamTrackListener> mVideoTrackListener;
   MozPromiseHolder<DecodedStream::EndedPromise> mVideoEndedHolder;
 
   // Graph thread only.
   bool mAudioEnded = false;
   bool mVideoEnded = false;
 
   // Any thread.
-  const RefPtr<SourceMediaStream> mStream;
-  const TrackID mAudioTrackID;
+  const RefPtr<SourceMediaStream> mAudioStream;
+  const RefPtr<SourceMediaStream> mVideoStream;
   Atomic<StreamTime> mAudioEnd{STREAM_TIME_MAX};
-  const TrackID mVideoTrackID;
   Atomic<StreamTime> mVideoEnd{STREAM_TIME_MAX};
+  const StreamTime mOffset;
   const RefPtr<AbstractThread> mAbstractMainThread;
 };
 
 DecodedStreamTrackListener::DecodedStreamTrackListener(
-    DecodedStreamGraphListener* aGraphListener, SourceMediaStream* aStream,
-    TrackID aTrackID)
-    : mGraphListener(aGraphListener), mStream(aStream), mTrackID(aTrackID) {}
+    DecodedStreamGraphListener* aGraphListener, SourceMediaStream* aStream)
+    : mGraphListener(aGraphListener), mStream(aStream) {}
 
 void DecodedStreamTrackListener::NotifyOutput(MediaStreamGraph* aGraph,
                                               StreamTime aCurrentTrackTime) {
-  mGraphListener->NotifyOutput(mTrackID, aCurrentTrackTime);
+  mGraphListener->NotifyOutput(mStream, aCurrentTrackTime);
 }
 
 void DecodedStreamTrackListener::NotifyEnded() {
-  mGraphListener->NotifyEnded(mTrackID);
+  mGraphListener->NotifyEnded(mStream);
 }
 
-/*
- * All MediaStream-related data is protected by the decoder's monitor.
- * We have at most one DecodedStreamDaata per MediaDecoder. Its stream
- * is used as the input for each ProcessedMediaStream created by calls to
- * captureStream(UntilEnded). Seeking creates a new source stream, as does
- * replaying after the input as ended. In the latter case, the new source is
- * not connected to streams created by captureStreamUntilEnded.
+/**
+ * All MediaStream-related data is protected by the decoder's monitor. We have
+ * at most one DecodedStreamData per MediaDecoder. Its streams are used as
+ * inputs for all output tracks created by OutputStreamManager after calls to
+ * captureStream/UntilEnded. Seeking creates new source streams, as does
+ * replaying after the input as ended. In the latter case, the new sources are
+ * not connected to tracks created by captureStreamUntilEnded.
  */
 class DecodedStreamData final {
  public:
   DecodedStreamData(
       OutputStreamManager* aOutputStreamManager, PlaybackInfoInit&& aInit,
+      RefPtr<SourceMediaStream> aAudioStream,
+      RefPtr<SourceMediaStream> aVideoStream,
       MozPromiseHolder<DecodedStream::EndedPromise>&& aAudioEndedPromise,
       MozPromiseHolder<DecodedStream::EndedPromise>&& aVideoEndedPromise,
       AbstractThread* aMainThread);
   ~DecodedStreamData();
   MediaEventSource<int64_t>& OnOutput();
   void Forget();
   void GetDebugInfo(dom::DecodedStreamDataDebugInfo& aInfo);
 
@@ -261,19 +261,19 @@ class DecodedStreamData final {
                            const PrincipalHandle& aPrincipalHandle);
 
   /* The following group of fields are protected by the decoder's monitor
    * and can be read or written on any thread.
    */
   // Count of audio frames written to the stream
   int64_t mAudioFramesWritten;
   // Count of video frames written to the stream in the stream's rate
-  StreamTime mStreamVideoWritten;
+  StreamTime mVideoStreamWritten;
   // Count of audio frames written to the stream in the stream's rate
-  StreamTime mStreamAudioWritten;
+  StreamTime mAudioStreamWritten;
   // mNextAudioTime is the end timestamp for the last packet sent to the stream.
   // Therefore audio packets starting at or after this time need to be copied
   // to the output stream.
   TimeUnit mNextAudioTime;
   // mLastVideoStartTime is the start timestamp for the last packet sent to the
   // stream. Therefore video packets starting after this time need to be copied
   // to the output stream.
   NullableTimeUnit mLastVideoStartTime;
@@ -286,67 +286,70 @@ class DecodedStreamData final {
   TimeStamp mLastVideoTimeStamp;
   // The last video image sent to the stream. Useful if we need to replicate
   // the image.
   RefPtr<layers::Image> mLastVideoImage;
   gfx::IntSize mLastVideoImageDisplaySize;
   bool mHaveSentFinishAudio;
   bool mHaveSentFinishVideo;
 
-  // The decoder is responsible for calling Destroy() on this stream.
-  const RefPtr<SourceMediaStream> mStream;
+  const RefPtr<SourceMediaStream> mAudioStream;
+  const RefPtr<SourceMediaStream> mVideoStream;
   const RefPtr<DecodedStreamGraphListener> mListener;
 
   const RefPtr<OutputStreamManager> mOutputStreamManager;
   const RefPtr<AbstractThread> mAbstractMainThread;
 };
 
 DecodedStreamData::DecodedStreamData(
     OutputStreamManager* aOutputStreamManager, PlaybackInfoInit&& aInit,
+    RefPtr<SourceMediaStream> aAudioStream,
+    RefPtr<SourceMediaStream> aVideoStream,
     MozPromiseHolder<DecodedStream::EndedPromise>&& aAudioEndedPromise,
     MozPromiseHolder<DecodedStream::EndedPromise>&& aVideoEndedPromise,
     AbstractThread* aMainThread)
     : mAudioFramesWritten(0),
-      mStreamVideoWritten(0),
-      mStreamAudioWritten(0),
+      mVideoStreamWritten(0),
+      mAudioStreamWritten(0),
       mNextAudioTime(aInit.mStartTime),
       mHaveSentFinishAudio(false),
       mHaveSentFinishVideo(false),
-      mStream(aOutputStreamManager->mSourceStream),
+      mAudioStream(std::move(aAudioStream)),
+      mVideoStream(std::move(aVideoStream)),
       // DecodedStreamGraphListener will resolve these promises.
       mListener(MakeRefPtr<DecodedStreamGraphListener>(
-          mStream, aInit.mAudioTrackID, std::move(aAudioEndedPromise),
-          aInit.mVideoTrackID, std::move(aVideoEndedPromise), aMainThread)),
+          mAudioStream, std::move(aAudioEndedPromise), mVideoStream,
+          std::move(aVideoEndedPromise), aInit.mOffset, aMainThread)),
       mOutputStreamManager(aOutputStreamManager),
       mAbstractMainThread(aMainThread) {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_DIAGNOSTIC_ASSERT(
-      mOutputStreamManager->HasTracks(aInit.mAudioTrackID, aInit.mVideoTrackID),
+      mOutputStreamManager->HasTracks(mAudioStream, mVideoStream),
       "Tracks must be pre-created on main thread");
-  if (IsTrackIDExplicit(aInit.mAudioTrackID)) {
-    mStream->AddAudioTrack(aInit.mAudioTrackID, aInit.mInfo.mAudio.mRate,
-                           new AudioSegment());
+  if (mAudioStream) {
+    mAudioStream->AddAudioTrack(OutputStreamManager::sTrackID,
+                                aInit.mInfo.mAudio.mRate, new AudioSegment());
   }
-  if (IsTrackIDExplicit(aInit.mVideoTrackID)) {
-    mStream->AddTrack(aInit.mVideoTrackID, new VideoSegment());
+  if (mVideoStream) {
+    mVideoStream->AddTrack(OutputStreamManager::sTrackID, new VideoSegment());
   }
 }
 
 DecodedStreamData::~DecodedStreamData() { MOZ_ASSERT(NS_IsMainThread()); }
 
 MediaEventSource<int64_t>& DecodedStreamData::OnOutput() {
   return mListener->OnOutput();
 }
 
 void DecodedStreamData::Forget() { mListener->Forget(); }
 
 void DecodedStreamData::GetDebugInfo(dom::DecodedStreamDataDebugInfo& aInfo) {
   aInfo.mInstance = NS_ConvertUTF8toUTF16(nsPrintfCString("%p", this));
   aInfo.mAudioFramesWritten = mAudioFramesWritten;
-  aInfo.mStreamAudioWritten = mStreamAudioWritten;
+  aInfo.mStreamAudioWritten = mAudioStreamWritten;
   aInfo.mNextAudioTime = mNextAudioTime.ToMicroseconds();
   aInfo.mLastVideoStartTime =
       mLastVideoStartTime.valueOr(TimeUnit::FromMicroseconds(-1))
           .ToMicroseconds();
   aInfo.mLastVideoEndTime =
       mLastVideoEndTime.valueOr(TimeUnit::FromMicroseconds(-1))
           .ToMicroseconds();
   aInfo.mHaveSentFinishAudio = mHaveSentFinishAudio;
@@ -431,30 +434,33 @@ nsresult DecodedStream::Start(const Time
       // This happens when RemoveOutput() is called immediately after
       // StartPlayback().
       if (mOutputStreamManager->IsEmpty()) {
         // Resolve the promise to indicate the end of playback.
         mAudioEndedPromise.Resolve(true, __func__);
         mVideoEndedPromise.Resolve(true, __func__);
         return NS_OK;
       }
-      if (mInit.mInfo.HasAudio() &&
-          !mOutputStreamManager->HasTrackType(MediaSegment::AUDIO)) {
-        mOutputStreamManager->AddTrack(MediaSegment::AUDIO);
+      RefPtr<SourceMediaStream> audioStream =
+          mOutputStreamManager->GetPrecreatedTrackOfType(MediaSegment::AUDIO);
+      if (mInit.mInfo.HasAudio() && !audioStream) {
+        MOZ_DIAGNOSTIC_ASSERT(
+            !mOutputStreamManager->HasTrackType(MediaSegment::AUDIO));
+        audioStream = mOutputStreamManager->AddTrack(MediaSegment::AUDIO);
       }
-      if (mInit.mInfo.HasVideo() &&
-          !mOutputStreamManager->HasTrackType(MediaSegment::VIDEO)) {
-        mOutputStreamManager->AddTrack(MediaSegment::VIDEO);
+      RefPtr<SourceMediaStream> videoStream =
+          mOutputStreamManager->GetPrecreatedTrackOfType(MediaSegment::VIDEO);
+      if (mInit.mInfo.HasVideo() && !videoStream) {
+        MOZ_DIAGNOSTIC_ASSERT(
+            !mOutputStreamManager->HasTrackType(MediaSegment::VIDEO));
+        videoStream = mOutputStreamManager->AddTrack(MediaSegment::VIDEO);
       }
-      mInit.mAudioTrackID =
-          mOutputStreamManager->GetLiveTrackIDFor(MediaSegment::AUDIO);
-      mInit.mVideoTrackID =
-          mOutputStreamManager->GetLiveTrackIDFor(MediaSegment::VIDEO);
       mData = MakeUnique<DecodedStreamData>(
-          mOutputStreamManager, std::move(mInit), std::move(mAudioEndedPromise),
+          mOutputStreamManager, std::move(mInit), std::move(audioStream),
+          std::move(videoStream), std::move(mAudioEndedPromise),
           std::move(mVideoEndedPromise), mAbstractMainThread);
       return NS_OK;
     }
     UniquePtr<DecodedStreamData> ReleaseData() { return std::move(mData); }
 
    private:
     PlaybackInfoInit mInit;
     Promise mAudioEndedPromise;
@@ -463,27 +469,25 @@ nsresult DecodedStream::Start(const Time
     UniquePtr<DecodedStreamData> mData;
     const RefPtr<AbstractThread> mAbstractMainThread;
   };
 
   MozPromiseHolder<DecodedStream::EndedPromise> audioEndedHolder;
   mAudioEndedPromise = audioEndedHolder.Ensure(__func__);
   MozPromiseHolder<DecodedStream::EndedPromise> videoEndedHolder;
   mVideoEndedPromise = videoEndedHolder.Ensure(__func__);
-  PlaybackInfoInit init{aStartTime, aInfo, TRACK_INVALID, TRACK_INVALID};
+  PlaybackInfoInit init{aStartTime, mStreamTimeOffset, aInfo};
   nsCOMPtr<nsIRunnable> r = new R(std::move(init), std::move(audioEndedHolder),
                                   std::move(videoEndedHolder),
                                   mOutputStreamManager, mAbstractMainThread);
   SyncRunnable::DispatchToThread(
       SystemGroup::EventTargetFor(TaskCategory::Other), r);
   mData = static_cast<R*>(r.get())->ReleaseData();
 
   if (mData) {
-    mInfo.mAudio.mTrackId = mData->mListener->AudioTrackID();
-    mInfo.mVideo.mTrackId = mData->mListener->VideoTrackID();
     mOutputListener = mData->OnOutput().Connect(mOwnerThread, this,
                                                 &DecodedStream::NotifyOutput);
     SendData();
   }
   return NS_OK;
 }
 
 void DecodedStream::Stop() {
@@ -618,50 +622,50 @@ void DecodedStream::SendAudio(double aVo
 
   if (mData->mHaveSentFinishAudio) {
     return;
   }
 
   AudioSegment output;
   uint32_t rate = mInfo.mAudio.mRate;
   AutoTArray<RefPtr<AudioData>, 10> audio;
-  TrackID audioTrackId = mInfo.mAudio.mTrackId;
-  SourceMediaStream* sourceStream = mData->mStream;
 
   // It's OK to hold references to the AudioData because AudioData
   // is ref-counted.
   mAudioQueue.GetElementsAfter(mData->mNextAudioTime, &audio);
   for (uint32_t i = 0; i < audio.Length(); ++i) {
     SendStreamAudio(mData.get(), mStartTime.ref(), audio[i], &output, rate,
                     aPrincipalHandle);
   }
 
   output.ApplyVolume(aVolume);
 
   // |mNextAudioTime| is updated as we process each audio sample in
   // SendStreamAudio().
   if (output.GetDuration() > 0) {
-    mData->mStreamAudioWritten +=
-        sourceStream->AppendToTrack(audioTrackId, &output);
+    mData->mAudioStreamWritten += mData->mAudioStream->AppendToTrack(
+        OutputStreamManager::sTrackID, &output);
   }
 
   if (mAudioQueue.IsFinished() && !mData->mHaveSentFinishAudio) {
-    mData->mListener->EndTrackAt(audioTrackId, mData->mStreamAudioWritten);
+    mData->mListener->EndTrackAt(mData->mAudioStream,
+                                 mData->mAudioStreamWritten);
     mData->mHaveSentFinishAudio = true;
   }
 }
 
 void DecodedStreamData::WriteVideoToSegment(
     layers::Image* aImage, const TimeUnit& aStart, const TimeUnit& aEnd,
     const gfx::IntSize& aIntrinsicSize, const TimeStamp& aTimeStamp,
     VideoSegment* aOutput, const PrincipalHandle& aPrincipalHandle) {
   RefPtr<layers::Image> image = aImage;
-  auto end = mStream->MicrosecondsToStreamTimeRoundDown(aEnd.ToMicroseconds());
+  auto end =
+      mVideoStream->MicrosecondsToStreamTimeRoundDown(aEnd.ToMicroseconds());
   auto start =
-      mStream->MicrosecondsToStreamTimeRoundDown(aStart.ToMicroseconds());
+      mVideoStream->MicrosecondsToStreamTimeRoundDown(aStart.ToMicroseconds());
   aOutput->AppendFrame(image.forget(), aIntrinsicSize, aPrincipalHandle, false,
                        aTimeStamp);
   // Extend this so we get accurate durations for all frames.
   // Because this track is pushed, we need durations so the graph can track
   // when playout of the track has finished.
   aOutput->ExtendLastFrameBy(end - start);
 
   mLastVideoStartTime = Some(aStart);
@@ -697,17 +701,17 @@ void DecodedStream::ResetVideo(const Pri
   // nullptr) at an earlier time than the previous, will signal to that consumer
   // to discard any frames ahead in time of the new frame. To be honest, this is
   // an ugly hack because the direct listeners of the MediaStreamGraph do not
   // have an API that supports clearing the future frames. ImageContainer and
   // VideoFrameContainer do though, and we will need to move to a similar API
   // for video tracks as part of bug 1493618.
   resetter.AppendFrame(nullptr, mData->mLastVideoImageDisplaySize,
                        aPrincipalHandle, false, currentTime);
-  mData->mStream->AppendToTrack(mInfo.mVideo.mTrackId, &resetter);
+  mData->mVideoStream->AppendToTrack(OutputStreamManager::sTrackID, &resetter);
 
   // Consumer buffers have been reset. We now set the next time to the start
   // time of the current frame, so that it can be displayed again on resuming.
   if (RefPtr<VideoData> v = mVideoQueue.PeekFront()) {
     mData->mLastVideoStartTime = Some(v->mTime - TimeUnit::FromMicroseconds(1));
     mData->mLastVideoEndTime = Some(v->mTime);
   } else {
     // There was no current frame in the queue. We set the next time to the
@@ -727,19 +731,17 @@ void DecodedStream::SendVideo(const Prin
     return;
   }
 
   if (mData->mHaveSentFinishVideo) {
     return;
   }
 
   VideoSegment output;
-  TrackID videoTrackId = mInfo.mVideo.mTrackId;
   AutoTArray<RefPtr<VideoData>, 10> video;
-  SourceMediaStream* sourceStream = mData->mStream;
 
   // It's OK to hold references to the VideoData because VideoData
   // is ref-counted.
   mVideoQueue.GetElementsAfter(
       mData->mLastVideoStartTime.valueOr(mStartTime.ref()), &video);
 
   TimeStamp currentTime;
   TimeUnit currentPosition = GetPosition(&currentTime);
@@ -779,34 +781,34 @@ void DecodedStream::SendVideo(const Prin
       // the track's lifetime in the MSG, as rendering is based on timestamps,
       // aka frame start times.
       TimeStamp t =
           std::max(mData->mLastVideoTimeStamp,
                    currentTime + (lastEnd - currentPosition).ToTimeDuration());
       TimeUnit end = std::max(
           v->GetEndTime(),
           lastEnd + TimeUnit::FromMicroseconds(
-                        sourceStream->StreamTimeToMicroseconds(1) + 1));
+                        mData->mVideoStream->StreamTimeToMicroseconds(1) + 1));
       mData->mLastVideoImage = v->mImage;
       mData->mLastVideoImageDisplaySize = v->mDisplay;
       mData->WriteVideoToSegment(v->mImage, lastEnd, end, v->mDisplay, t,
                                  &output, aPrincipalHandle);
     }
   }
 
   // Check the output is not empty.
   bool compensateEOS = false;
   bool forceBlack = false;
   if (output.GetLastFrame()) {
     compensateEOS = ZeroDurationAtLastChunk(output);
   }
 
   if (output.GetDuration() > 0) {
-    mData->mStreamVideoWritten +=
-        sourceStream->AppendToTrack(videoTrackId, &output);
+    mData->mVideoStreamWritten += mData->mVideoStream->AppendToTrack(
+        OutputStreamManager::sTrackID, &output);
   }
 
   if (mVideoQueue.IsFinished() && !mData->mHaveSentFinishVideo) {
     if (!mData->mLastVideoImage) {
       // We have video, but the video queue finished before we received any
       // frame. We insert a black frame to progress any consuming
       // HTMLMediaElement. This mirrors the behavior of VideoSink.
 
@@ -818,43 +820,44 @@ void DecodedStream::SendVideo(const Prin
       mData->mLastVideoImageDisplaySize = mInfo.mVideo.mDisplay;
     }
     if (compensateEOS) {
       VideoSegment endSegment;
       // Calculate the deviation clock time from DecodedStream.
       // We round the nr of microseconds up, because WriteVideoToSegment
       // will round the conversion from microseconds to StreamTime down.
       auto deviation = TimeUnit::FromMicroseconds(
-          sourceStream->StreamTimeToMicroseconds(1) + 1);
+          mData->mVideoStream->StreamTimeToMicroseconds(1) + 1);
       auto start = mData->mLastVideoEndTime.valueOr(mStartTime.ref());
       mData->WriteVideoToSegment(
           mData->mLastVideoImage, start, start + deviation,
           mData->mLastVideoImageDisplaySize,
           currentTime + (start + deviation - currentPosition).ToTimeDuration(),
           &endSegment, aPrincipalHandle);
       MOZ_ASSERT(endSegment.GetDuration() > 0);
       if (forceBlack) {
         endSegment.ReplaceWithDisabled();
       }
-      mData->mStreamVideoWritten +=
-          sourceStream->AppendToTrack(videoTrackId, &endSegment);
+      mData->mVideoStreamWritten += mData->mVideoStream->AppendToTrack(
+          OutputStreamManager::sTrackID, &endSegment);
     }
-    mData->mListener->EndTrackAt(videoTrackId, mData->mStreamVideoWritten);
+    mData->mListener->EndTrackAt(mData->mVideoStream,
+                                 mData->mVideoStreamWritten);
     mData->mHaveSentFinishVideo = true;
   }
 }
 
 StreamTime DecodedStream::SentDuration() {
   AssertOwnerThread();
 
   if (!mData) {
     return 0;
   }
 
-  return std::max(mData->mStreamAudioWritten, mData->mStreamVideoWritten);
+  return std::max(mData->mAudioStreamWritten, mData->mVideoStreamWritten);
 }
 
 void DecodedStream::SendData() {
   AssertOwnerThread();
 
   // Not yet created on the main thread. MDSM will try again later.
   if (!mData) {
     return;
--- a/dom/media/mediasink/OutputStreamManager.cpp
+++ b/dom/media/mediasink/OutputStreamManager.cpp
@@ -2,295 +2,320 @@
 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "OutputStreamManager.h"
 
 #include "DOMMediaStream.h"
-#include "MediaStreamGraph.h"
+#include "../MediaStreamGraphImpl.h"
 #include "mozilla/dom/MediaStreamTrack.h"
+#include "mozilla/dom/AudioStreamTrack.h"
+#include "mozilla/dom/VideoStreamTrack.h"
 #include "nsContentUtils.h"
 
 namespace mozilla {
 
 #define LOG(level, msg, ...) \
   MOZ_LOG(gMediaDecoderLog, level, (msg, ##__VA_ARGS__))
 
 class DecodedStreamTrackSource : public dom::MediaStreamTrackSource {
  public:
   NS_DECL_ISUPPORTS_INHERITED
   NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(DecodedStreamTrackSource,
                                            dom::MediaStreamTrackSource)
 
-  explicit DecodedStreamTrackSource(OutputStreamManager* aManager,
-                                    OutputStreamData* aData, TrackID aTrackID,
-                                    nsIPrincipal* aPrincipal,
-                                    AbstractThread* aAbstractMainThread)
-      : dom::MediaStreamTrackSource(aPrincipal, nsString()) {
+  explicit DecodedStreamTrackSource(SourceMediaStream* aSourceStream,
+                                    nsIPrincipal* aPrincipal)
+      : dom::MediaStreamTrackSource(aPrincipal, nsString()),
+        mStream(aSourceStream->Graph()->CreateTrackUnionStream()),
+        mPort(mStream->AllocateInputPort(aSourceStream)) {
     MOZ_ASSERT(NS_IsMainThread());
   }
 
   dom::MediaSourceEnum GetMediaSource() const override {
     return dom::MediaSourceEnum::Other;
   }
 
   void Stop() override {
     MOZ_ASSERT(NS_IsMainThread());
 
     // We don't notify the source that a track was stopped since it will keep
     // producing tracks until the element ends. The decoder also needs the
     // tracks it created to be live at the source since the decoder's clock is
-    // based on MediaStreams during capture.
+    // based on MediaStreams during capture. We do however, disconnect this
+    // track's underlying stream.
+    if (!mStream->IsDestroyed()) {
+      mStream->Destroy();
+      mPort->Destroy();
+    }
   }
 
   void Disable() override {}
 
   void Enable() override {}
 
   void SetPrincipal(nsIPrincipal* aPrincipal) {
     MOZ_ASSERT(NS_IsMainThread());
     mPrincipal = aPrincipal;
     PrincipalChanged();
   }
 
+  void ForceEnded() { OverrideEnded(); }
+
+  const RefPtr<ProcessedMediaStream> mStream;
+  const RefPtr<MediaInputPort> mPort;
+
  protected:
-  virtual ~DecodedStreamTrackSource() { MOZ_ASSERT(NS_IsMainThread()); }
+  virtual ~DecodedStreamTrackSource() {
+    MOZ_ASSERT(NS_IsMainThread());
+    MOZ_ASSERT(mStream->IsDestroyed());
+  }
 };
 
 NS_IMPL_ADDREF_INHERITED(DecodedStreamTrackSource, dom::MediaStreamTrackSource)
 NS_IMPL_RELEASE_INHERITED(DecodedStreamTrackSource, dom::MediaStreamTrackSource)
 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(DecodedStreamTrackSource)
 NS_INTERFACE_MAP_END_INHERITING(dom::MediaStreamTrackSource)
 NS_IMPL_CYCLE_COLLECTION_INHERITED(DecodedStreamTrackSource,
                                    dom::MediaStreamTrackSource)
 
 OutputStreamData::OutputStreamData(OutputStreamManager* aManager,
                                    AbstractThread* aAbstractMainThread,
                                    DOMMediaStream* aDOMStream)
     : mManager(aManager),
       mAbstractMainThread(aAbstractMainThread),
-      mDOMStream(aDOMStream),
-      mInputStream(mDOMStream->GetInputStream()->AsProcessedStream()),
-      mPort(mInputStream->AllocateInputPort(mManager->mSourceStream)) {
+      mDOMStream(aDOMStream) {
   MOZ_ASSERT(NS_IsMainThread());
 }
 
-OutputStreamData::~OutputStreamData() {
-  MOZ_ASSERT(NS_IsMainThread());
+OutputStreamData::~OutputStreamData() = default;
 
-  // Disconnect any existing port.
-  if (mPort) {
-    mPort->Destroy();
-  }
-}
-
-void OutputStreamData::AddTrack(TrackID aTrackID, MediaSegment::Type aType,
+void OutputStreamData::AddTrack(SourceMediaStream* aStream,
+                                MediaSegment::Type aType,
                                 nsIPrincipal* aPrincipal, bool aAsyncAddTrack) {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_DIAGNOSTIC_ASSERT(mDOMStream);
 
-  LOG(LogLevel::Debug, "Adding output %s track with id %d to MediaStream %p%s",
-      aType == MediaSegment::AUDIO ? "audio" : "video", aTrackID,
+  LOG(LogLevel::Debug,
+      "Adding output %s track sourced from stream %p to MediaStream %p%s",
+      aType == MediaSegment::AUDIO ? "audio" : "video", aStream,
       mDOMStream.get(), aAsyncAddTrack ? " (async)" : "");
 
-  RefPtr<dom::MediaStreamTrackSource> source = new DecodedStreamTrackSource(
-      mManager, this, aTrackID, aPrincipal, mAbstractMainThread);
-  RefPtr<dom::MediaStreamTrack> track =
-      mDOMStream->CreateDOMTrack(aTrackID, aType, source);
+  auto source = MakeRefPtr<DecodedStreamTrackSource>(aStream, aPrincipal);
+  RefPtr<dom::MediaStreamTrack> track;
+  if (aType == MediaSegment::AUDIO) {
+    track = new dom::AudioStreamTrack(mDOMStream->GetParentObject(),
+                                      source->mStream,
+                                      OutputStreamManager::sTrackID, source);
+  } else {
+    MOZ_ASSERT(aType == MediaSegment::VIDEO);
+    track = new dom::VideoStreamTrack(mDOMStream->GetParentObject(),
+                                      source->mStream,
+                                      OutputStreamManager::sTrackID, source);
+  }
   mTracks.AppendElement(track.get());
   if (aAsyncAddTrack) {
     GetMainThreadEventTarget()->Dispatch(
         NewRunnableMethod<RefPtr<dom::MediaStreamTrack>>(
             "DOMMediaStream::AddTrackInternal", mDOMStream.get(),
             &DOMMediaStream::AddTrackInternal, track));
   } else {
     mDOMStream->AddTrackInternal(track);
   }
 }
 
-void OutputStreamData::RemoveTrack(TrackID aTrackID) {
+void OutputStreamData::RemoveTrack(SourceMediaStream* aStream) {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_DIAGNOSTIC_ASSERT(mDOMStream);
 
-  LOG(LogLevel::Debug, "Removing output track with id %d from MediaStream %p",
-      aTrackID, mDOMStream.get());
+  LOG(LogLevel::Debug,
+      "Removing output track sourced by stream %p from MediaStream %p", aStream,
+      mDOMStream.get());
 
-  RefPtr<dom::MediaStreamTrack> track =
-      mDOMStream->FindOwnedDOMTrack(mInputStream, aTrackID);
-  MOZ_DIAGNOSTIC_ASSERT(track);
-  mTracks.RemoveElement(track);
-  GetMainThreadEventTarget()->Dispatch(
-      NewRunnableMethod("MediaStreamTrack::OverrideEnded", track,
-                        &dom::MediaStreamTrack::OverrideEnded));
+  for (const auto& t : nsTArray<WeakPtr<dom::MediaStreamTrack>>(mTracks)) {
+    mTracks.RemoveElement(t);
+    if (!t || t->Ended()) {
+      continue;
+    }
+    DecodedStreamTrackSource& source =
+        static_cast<DecodedStreamTrackSource&>(t->GetSource());
+    GetMainThreadEventTarget()->Dispatch(
+        NewRunnableMethod("DecodedStreamTrackSource::ForceEnded", &source,
+                          &DecodedStreamTrackSource::ForceEnded));
+  }
 }
 
 void OutputStreamData::SetPrincipal(nsIPrincipal* aPrincipal) {
   MOZ_DIAGNOSTIC_ASSERT(mDOMStream);
   for (const WeakPtr<dom::MediaStreamTrack>& track : mTracks) {
-    MOZ_DIAGNOSTIC_ASSERT(track);
+    if (!track || track->Ended()) {
+      continue;
+    }
     DecodedStreamTrackSource& source =
         static_cast<DecodedStreamTrackSource&>(track->GetSource());
     source.SetPrincipal(aPrincipal);
   }
 }
 
-OutputStreamManager::OutputStreamManager(SourceMediaStream* aSourceStream,
-                                         TrackID aNextTrackID,
+OutputStreamManager::OutputStreamManager(MediaStreamGraphImpl* aGraph,
                                          nsIPrincipal* aPrincipal,
                                          AbstractThread* aAbstractMainThread)
-    : mSourceStream(aSourceStream),
-      mAbstractMainThread(aAbstractMainThread),
+    : mAbstractMainThread(aAbstractMainThread),
+      mGraph(aGraph),
       mPrincipalHandle(
           aAbstractMainThread,
           aPrincipal ? MakePrincipalHandle(aPrincipal) : PRINCIPAL_HANDLE_NONE,
           "OutputStreamManager::mPrincipalHandle (Canonical)"),
-      mPrincipal(aPrincipal),
-      mNextTrackID(aNextTrackID),
-      mPlaying(true)  // mSourceStream always starts non-suspended
-{
+      mPrincipal(aPrincipal) {
   MOZ_ASSERT(NS_IsMainThread());
 }
 
 void OutputStreamManager::Add(DOMMediaStream* aDOMStream) {
   MOZ_ASSERT(NS_IsMainThread());
-  MOZ_ASSERT(!mSourceStream->IsDestroyed());
-  // All streams must belong to the same graph.
-  MOZ_ASSERT(mSourceStream->Graph() == aDOMStream->GetInputStream()->Graph());
 
   LOG(LogLevel::Info, "Adding MediaStream %p", aDOMStream);
 
   OutputStreamData* p = mStreams
                             .AppendElement(new OutputStreamData(
                                 this, mAbstractMainThread, aDOMStream))
                             ->get();
-  for (const Pair<TrackID, MediaSegment::Type>& pair : mLiveTracks) {
-    p->AddTrack(pair.first(), pair.second(), mPrincipal, false);
+  for (const auto& lt : mLiveTracks) {
+    p->AddTrack(lt->mSourceStream, lt->mType, mPrincipal, false);
   }
 }
 
 void OutputStreamManager::Remove(DOMMediaStream* aDOMStream) {
   MOZ_ASSERT(NS_IsMainThread());
-  MOZ_ASSERT(!mSourceStream->IsDestroyed());
 
   LOG(LogLevel::Info, "Removing MediaStream %p", aDOMStream);
 
   AutoRemoveDestroyedStreams();
   mStreams.ApplyIf(
       aDOMStream, 0, StreamComparator(),
       [&](const UniquePtr<OutputStreamData>& aData) {
-        for (const Pair<TrackID, MediaSegment::Type>& pair : mLiveTracks) {
-          aData->RemoveTrack(pair.first());
+        for (const auto& lt : mLiveTracks) {
+          aData->RemoveTrack(lt->mSourceStream);
         }
       },
       []() { MOZ_ASSERT_UNREACHABLE("Didn't exist"); });
   DebugOnly<bool> rv = mStreams.RemoveElement(aDOMStream, StreamComparator());
   MOZ_ASSERT(rv);
 }
 
 bool OutputStreamManager::HasTrackType(MediaSegment::Type aType) {
   MOZ_ASSERT(NS_IsMainThread());
 
   return mLiveTracks.Contains(aType, TrackTypeComparator());
 }
 
-bool OutputStreamManager::HasTracks(TrackID aAudioTrack, TrackID aVideoTrack) {
+bool OutputStreamManager::HasTracks(SourceMediaStream* aAudioStream,
+                                    SourceMediaStream* aVideoStream) {
   MOZ_ASSERT(NS_IsMainThread());
 
   size_t nrExpectedTracks = 0;
   bool asExpected = true;
-  if (IsTrackIDExplicit(aAudioTrack)) {
+  if (aAudioStream) {
     Unused << ++nrExpectedTracks;
     asExpected = asExpected && mLiveTracks.Contains(
-                                   MakePair(aAudioTrack, MediaSegment::AUDIO),
+                                   MakePair(aAudioStream, MediaSegment::AUDIO),
                                    TrackComparator());
   }
-  if (IsTrackIDExplicit(aVideoTrack)) {
+  if (aVideoStream) {
     Unused << ++nrExpectedTracks;
     asExpected = asExpected && mLiveTracks.Contains(
-                                   MakePair(aVideoTrack, MediaSegment::VIDEO),
+                                   MakePair(aVideoStream, MediaSegment::VIDEO),
                                    TrackComparator());
   }
   asExpected = asExpected && mLiveTracks.Length() == nrExpectedTracks;
   return asExpected;
 }
 
+SourceMediaStream* OutputStreamManager::GetPrecreatedTrackOfType(
+    MediaSegment::Type aType) const {
+  auto i = mLiveTracks.IndexOf(aType, 0, PrecreatedTrackTypeComparator());
+  return i == nsTArray<UniquePtr<LiveTrack>>::NoIndex
+             ? nullptr
+             : mLiveTracks[i]->mSourceStream.get();
+}
+
 size_t OutputStreamManager::NumberOfTracks() {
   MOZ_ASSERT(NS_IsMainThread());
   return mLiveTracks.Length();
 }
 
-void OutputStreamManager::AddTrack(MediaSegment::Type aType) {
+already_AddRefed<SourceMediaStream> OutputStreamManager::AddTrack(
+    MediaSegment::Type aType) {
   MOZ_ASSERT(NS_IsMainThread());
-  MOZ_ASSERT(!mSourceStream->IsDestroyed());
   MOZ_ASSERT(!HasTrackType(aType),
              "Cannot have two tracks of the same type at the same time");
 
-  TrackID id = mNextTrackID++;
+  RefPtr<SourceMediaStream> stream = mGraph->CreateSourceStream();
+  if (!mPlaying) {
+    stream->Suspend();
+  }
 
-  LOG(LogLevel::Info, "Adding %s track with id %d",
-      aType == MediaSegment::AUDIO ? "audio" : "video", id);
+  LOG(LogLevel::Info, "Adding %s track sourced by stream %p",
+      aType == MediaSegment::AUDIO ? "audio" : "video", stream.get());
 
-  mLiveTracks.AppendElement(MakePair(id, aType));
+  mLiveTracks.AppendElement(MakeUnique<LiveTrack>(stream, aType));
   AutoRemoveDestroyedStreams();
   for (const auto& data : mStreams) {
-    data->AddTrack(id, aType, mPrincipal, true);
+    data->AddTrack(stream, aType, mPrincipal, true);
   }
+
+  return stream.forget();
 }
 
+OutputStreamManager::LiveTrack::~LiveTrack() { mSourceStream->Destroy(); }
+
 void OutputStreamManager::AutoRemoveDestroyedStreams() {
   MOZ_ASSERT(NS_IsMainThread());
   for (size_t i = mStreams.Length(); i > 0; --i) {
     const auto& data = mStreams[i - 1];
-    if (!data->mDOMStream || !data->mDOMStream->GetInputStream()) {
+    if (!data->mDOMStream) {
       // If the mDOMStream WeakPtr is now null, mDOMStream has been destructed.
-      // If mDOMStream's input stream is now null, it has been unlinked but not
-      // yet destructed. In both cases we can consider it dead.
       mStreams.RemoveElementAt(i - 1);
     }
   }
 }
 
-void OutputStreamManager::RemoveTrack(TrackID aTrackID) {
+void OutputStreamManager::RemoveTrack(SourceMediaStream* aStream) {
   MOZ_ASSERT(NS_IsMainThread());
-  MOZ_ASSERT(!mSourceStream->IsDestroyed());
-  LOG(LogLevel::Info, "Removing track with id %d", aTrackID);
-  DebugOnly<bool> rv = mLiveTracks.RemoveElement(aTrackID, TrackIDComparator());
+  LOG(LogLevel::Info, "Removing track with source stream %p", aStream);
+  DebugOnly<bool> rv =
+      mLiveTracks.RemoveElement(aStream, TrackStreamComparator());
   MOZ_ASSERT(rv);
   AutoRemoveDestroyedStreams();
   for (const auto& data : mStreams) {
-    data->RemoveTrack(aTrackID);
+    data->RemoveTrack(aStream);
   }
 }
 
 void OutputStreamManager::RemoveTracks() {
   MOZ_ASSERT(NS_IsMainThread());
-  nsTArray<Pair<TrackID, MediaSegment::Type>> liveTracks(mLiveTracks);
-  for (const auto& pair : liveTracks) {
-    RemoveTrack(pair.first());
+  for (size_t i = mLiveTracks.Length(); i > 0; --i) {
+    RemoveTrack(mLiveTracks[i - 1]->mSourceStream);
   }
 }
 
 void OutputStreamManager::Disconnect() {
   MOZ_ASSERT(NS_IsMainThread());
   RemoveTracks();
   MOZ_ASSERT(mLiveTracks.IsEmpty());
   AutoRemoveDestroyedStreams();
   nsTArray<RefPtr<DOMMediaStream>> domStreams(mStreams.Length());
   for (const auto& data : mStreams) {
     domStreams.AppendElement(data->mDOMStream);
   }
   for (auto& domStream : domStreams) {
     Remove(domStream);
   }
   MOZ_ASSERT(mStreams.IsEmpty());
-  if (!mSourceStream->IsDestroyed()) {
-    mSourceStream->Destroy();
-  }
 }
 
 AbstractCanonical<PrincipalHandle>*
 OutputStreamManager::CanonicalPrincipalHandle() {
   return &mPrincipalHandle;
 }
 
 void OutputStreamManager::SetPrincipal(nsIPrincipal* aPrincipal) {
@@ -301,40 +326,30 @@ void OutputStreamManager::SetPrincipal(n
     AutoRemoveDestroyedStreams();
     for (const UniquePtr<OutputStreamData>& data : mStreams) {
       data->SetPrincipal(mPrincipal);
     }
     mPrincipalHandle = MakePrincipalHandle(principal);
   }
 }
 
-TrackID OutputStreamManager::NextTrackID() const {
-  MOZ_ASSERT(NS_IsMainThread());
-  return mNextTrackID;
-}
-
-TrackID OutputStreamManager::GetLiveTrackIDFor(MediaSegment::Type aType) const {
-  MOZ_ASSERT(NS_IsMainThread());
-  for (const auto& pair : mLiveTracks) {
-    if (pair.second() == aType) {
-      return pair.first();
-    }
-  }
-  return TRACK_NONE;
-}
-
 void OutputStreamManager::SetPlaying(bool aPlaying) {
   MOZ_ASSERT(NS_IsMainThread());
   if (mPlaying == aPlaying) {
     return;
   }
 
   mPlaying = aPlaying;
-  if (mPlaying) {
-    mSourceStream->Resume();
-  } else {
-    mSourceStream->Suspend();
+  for (auto& lt : mLiveTracks) {
+    if (mPlaying) {
+      lt->mSourceStream->Resume();
+      lt->mEverPlayed = true;
+    } else {
+      lt->mSourceStream->Suspend();
+    }
   }
 }
 
+OutputStreamManager::~OutputStreamManager() = default;
+
 #undef LOG
 
 }  // namespace mozilla
--- a/dom/media/mediasink/OutputStreamManager.h
+++ b/dom/media/mediasink/OutputStreamManager.h
@@ -33,124 +33,134 @@ class OutputStreamData {
   OutputStreamData(const OutputStreamData& aOther) = delete;
   OutputStreamData(OutputStreamData&& aOther) = delete;
   ~OutputStreamData();
 
   // Creates and adds a MediaStreamTrack to mDOMStream so that we can feed data
   // to it. For a true aAsyncAddTrack we will dispatch a task to add the
   // created track to mDOMStream, as is required by spec for the "addtrack"
   // event.
-  void AddTrack(TrackID aTrackID, MediaSegment::Type aType,
+  void AddTrack(SourceMediaStream* aStream, MediaSegment::Type aType,
                 nsIPrincipal* aPrincipal, bool aAsyncAddTrack);
-  // Ends the MediaStreamTrack with aTrackID. Calling this with a TrackID that
-  // doesn't exist in mDOMStream is an error.
-  void RemoveTrack(TrackID aTrackID);
+  // Ends any MediaStreamTracks sourced from aStream.
+  void RemoveTrack(SourceMediaStream* aStream);
 
   void SetPrincipal(nsIPrincipal* aPrincipal);
 
-  // The source stream DecodedStream is feeding tracks to.
   const RefPtr<OutputStreamManager> mManager;
   const RefPtr<AbstractThread> mAbstractMainThread;
   // The DOMMediaStream we add tracks to and represent.
   const WeakPtr<DOMMediaStream> mDOMStream;
-  // The input stream of mDOMStream.
-  const RefPtr<ProcessedMediaStream> mInputStream;
 
  private:
-  // mPort connects mSourceStream to mInputStream.
-  const RefPtr<MediaInputPort> mPort;
-
   // Tracks that have been added and not yet removed.
   nsTArray<WeakPtr<dom::MediaStreamTrack>> mTracks;
 };
 
 class OutputStreamManager {
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(OutputStreamManager);
 
  public:
-  explicit OutputStreamManager(SourceMediaStream* aSourceStream,
-                               TrackID aNextTrackID, nsIPrincipal* aPrincipal,
-                               AbstractThread* aAbstractMainThread);
+  OutputStreamManager(MediaStreamGraphImpl* aGraph, nsIPrincipal* aPrincipal,
+                      AbstractThread* aAbstractMainThread);
   // Add the output stream to the collection.
   void Add(DOMMediaStream* aDOMStream);
   // Remove the output stream from the collection.
   void Remove(DOMMediaStream* aDOMStream);
   // Returns true if there's a live track of the given type.
   bool HasTrackType(MediaSegment::Type aType);
-  // Returns true if the given tracks and no others are currently live.
-  // Use a non-explicit TrackID to make it ignored for that type.
-  bool HasTracks(TrackID aAudioTrack, TrackID aVideoTrack);
+  // Returns true if the given streams are sourcing all currently live tracks.
+  // Use nullptr to make it ignored for that type.
+  bool HasTracks(SourceMediaStream* aAudioStream,
+                 SourceMediaStream* aVideoStream);
+  // Gets the underlying stream for the given type if it has never been played,
+  // or nullptr if there is none.
+  SourceMediaStream* GetPrecreatedTrackOfType(MediaSegment::Type aType) const;
   // Returns the number of live tracks.
   size_t NumberOfTracks();
-  // Add a track to all output streams.
-  void AddTrack(MediaSegment::Type aType);
-  // Remove all currently live tracks from all output streams.
+  // Add a track sourced to all output streams and return the MediaStream that
+  // sources it.
+  already_AddRefed<SourceMediaStream> AddTrack(MediaSegment::Type aType);
+  // Remove all currently live tracks.
   void RemoveTracks();
-  // Disconnect mSourceStream from all output streams.
+  // Remove all currently live tracks and all output streams.
   void Disconnect();
   // The principal handle for the underlying decoder.
   AbstractCanonical<PrincipalHandle>* CanonicalPrincipalHandle();
   // Called when the underlying decoder's principal has changed.
   void SetPrincipal(nsIPrincipal* aPrincipal);
-  // Returns the track id that would be used the next time a track is added.
-  TrackID NextTrackID() const;
-  // Returns the TrackID for the currently live track of the given type, or
-  // TRACK_NONE otherwise.
-  TrackID GetLiveTrackIDFor(MediaSegment::Type aType) const;
   // Called by DecodedStream when its playing state changes. While not playing
   // we suspend mSourceStream.
   void SetPlaying(bool aPlaying);
   // Return true if the collection of output streams is empty.
   bool IsEmpty() const {
     MOZ_ASSERT(NS_IsMainThread());
     return mStreams.IsEmpty();
   }
 
-  // Keep the source stream so we can connect the output streams that
-  // are added after Connect().
-  const RefPtr<SourceMediaStream> mSourceStream;
+  static const TrackID sTrackID = 1;
+
   const RefPtr<AbstractThread> mAbstractMainThread;
 
  private:
-  ~OutputStreamManager() = default;
+  ~OutputStreamManager();
+
+  class LiveTrack {
+   public:
+    LiveTrack(SourceMediaStream* aSourceStream, MediaSegment::Type aType)
+        : mSourceStream(aSourceStream), mType(aType) {}
+    ~LiveTrack();
+
+    const RefPtr<SourceMediaStream> mSourceStream;
+    const MediaSegment::Type mType;
+    bool mEverPlayed = false;
+  };
+
   struct StreamComparator {
     static bool Equals(const UniquePtr<OutputStreamData>& aData,
                        DOMMediaStream* aStream) {
       return aData->mDOMStream == aStream;
     }
   };
-  struct TrackIDComparator {
-    static bool Equals(const Pair<TrackID, MediaSegment::Type>& aLiveTrack,
-                       TrackID aTrackID) {
-      return aLiveTrack.first() == aTrackID;
+  struct TrackStreamComparator {
+    static bool Equals(const UniquePtr<LiveTrack>& aLiveTrack,
+                       SourceMediaStream* aStream) {
+      return aLiveTrack->mSourceStream == aStream;
     }
   };
   struct TrackTypeComparator {
-    static bool Equals(const Pair<TrackID, MediaSegment::Type>& aLiveTrack,
+    static bool Equals(const UniquePtr<LiveTrack>& aLiveTrack,
                        MediaSegment::Type aType) {
-      return aLiveTrack.second() == aType;
+      return aLiveTrack->mType == aType;
+    }
+  };
+  struct PrecreatedTrackTypeComparator {
+    static bool Equals(const UniquePtr<LiveTrack>& aLiveTrack,
+                       MediaSegment::Type aType) {
+      return !aLiveTrack->mEverPlayed && aLiveTrack->mType == aType;
     }
   };
   struct TrackComparator {
-    static bool Equals(const Pair<TrackID, MediaSegment::Type>& aLiveTrack,
-                       const Pair<TrackID, MediaSegment::Type>& aOther) {
-      return aLiveTrack.first() == aOther.first() &&
-             aLiveTrack.second() == aOther.second();
+    static bool Equals(
+        const UniquePtr<LiveTrack>& aLiveTrack,
+        const Pair<SourceMediaStream*, MediaSegment::Type>& aOther) {
+      return aLiveTrack->mSourceStream == aOther.first() &&
+             aLiveTrack->mType == aOther.second();
     }
   };
 
   // Goes through mStreams and removes any entries that have been destroyed.
   void AutoRemoveDestroyedStreams();
 
-  // Remove aTrackID from all output streams.
-  void RemoveTrack(TrackID aTrackID);
+  // Remove tracks sourced from aStream from all output streams.
+  void RemoveTrack(SourceMediaStream* aStream);
 
+  const RefPtr<MediaStreamGraphImpl> mGraph;
   nsTArray<UniquePtr<OutputStreamData>> mStreams;
-  nsTArray<Pair<TrackID, MediaSegment::Type>> mLiveTracks;
+  nsTArray<UniquePtr<LiveTrack>> mLiveTracks;
   Canonical<PrincipalHandle> mPrincipalHandle;
   nsCOMPtr<nsIPrincipal> mPrincipal;
-  TrackID mNextTrackID;
-  bool mPlaying;
+  bool mPlaying = false;
 };
 
 }  // namespace mozilla
 
 #endif  // OutputStreamManager_h
--- a/dom/media/moz.build
+++ b/dom/media/moz.build
@@ -149,16 +149,17 @@ EXPORTS += [
     'MediaResult.h',
     'MediaSegment.h',
     'MediaShutdownManager.h',
     'MediaSpan.h',
     'MediaStatistics.h',
     'MediaStreamGraph.h',
     'MediaStreamListener.h',
     'MediaStreamTypes.h',
+    'MediaStreamWindowCapturer.h',
     'MediaTimer.h',
     'MediaTrack.h',
     'MediaTrackList.h',
     'MemoryBlockCache.h',
     'nsIDocumentActivity.h',
     'PrincipalChangeObserver.h',
     'QueueObject.h',
     'SeekJob.h',
@@ -266,16 +267,17 @@ UNIFIED_SOURCES += [
     'MediaMIMETypes.cpp',
     'MediaRecorder.cpp',
     'MediaResource.cpp',
     'MediaShutdownManager.cpp',
     'MediaStreamError.cpp',
     'MediaStreamGraph.cpp',
     'MediaStreamListener.cpp',
     'MediaStreamTrack.cpp',
+    'MediaStreamWindowCapturer.cpp',
     'MediaTimer.cpp',
     'MediaTrack.cpp',
     'MediaTrackList.cpp',
     'MemoryBlockCache.cpp',
     'QueueObject.cpp',
     'ReaderProxy.cpp',
     'SeekJob.cpp',
     'StreamTracks.cpp',
--- a/dom/media/tests/mochitest/test_getUserMedia_GC_MediaStream.html
+++ b/dom/media/tests/mochitest/test_getUserMedia_GC_MediaStream.html
@@ -15,43 +15,30 @@
 
   let SpecialStream = SpecialPowers.wrap(MediaStream);
 
   async function testGC(stream, numCopies, copy) {
     let startStreams = await SpecialStream.countUnderlyingStreams();
 
     let copies = new Array(numCopies).fill(0).map(() => copy(stream));
     ok(await SpecialStream.countUnderlyingStreams() > startStreams,
-        "MediaStream constructor creates more underlying streams");
+        "MediaStreamTrack constructor creates more underlying streams");
 
     copies = [];
     await new Promise(r => SpecialPowers.exactGC(r));
     is(await SpecialStream.countUnderlyingStreams(), startStreams,
-       "MediaStreams should have been collected");
+       "MediaStreamTracks should have been collected");
   }
 
   runTest(async () => {
     // We do not need LoopbackTone because it is not used
     // and creates extra streams that affect the result
     DISABLE_LOOPBACK_TONE = true;
 
     let gUMStream = await getUserMedia({video: true});
-    info("Testing GC of copy constructor");
-    await testGC(gUMStream, 10, s => new MediaStream(s));
-
-    info("Testing GC of track-array constructor");
-    await testGC(gUMStream, 10, s => new MediaStream(s.getTracks()));
-
-    info("Testing GC of empty constructor plus addTrack");
-    await testGC(gUMStream, 10, s => {
-      let s2 = new MediaStream();
-      s.getTracks().forEach(t => s2.addTrack(t));
-      return s2;
-    });
-
     info("Testing GC of track-array constructor with cloned tracks");
     await testGC(gUMStream, 10, s => new MediaStream(s.getTracks().map(t => t.clone())));
 
     info("Testing GC of empty constructor plus addTrack with cloned tracks");
     await testGC(gUMStream, 10, s => {
       let s2 = new MediaStream();
       s.getTracks().forEach(t => s2.addTrack(t.clone()));
       return s2;
--- a/dom/media/webaudio/MediaStreamAudioDestinationNode.cpp
+++ b/dom/media/webaudio/MediaStreamAudioDestinationNode.cpp
@@ -4,50 +4,62 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "MediaStreamAudioDestinationNode.h"
 #include "mozilla/dom/Document.h"
 #include "mozilla/dom/MediaStreamAudioDestinationNodeBinding.h"
 #include "AudioNodeEngine.h"
 #include "AudioNodeStream.h"
+#include "AudioStreamTrack.h"
 #include "DOMMediaStream.h"
-#include "MediaStreamTrack.h"
 #include "TrackUnionStream.h"
 
 namespace mozilla {
 namespace dom {
 
 class AudioDestinationTrackSource final : public MediaStreamTrackSource {
  public:
   NS_DECL_ISUPPORTS_INHERITED
   NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(AudioDestinationTrackSource,
                                            MediaStreamTrackSource)
 
   AudioDestinationTrackSource(MediaStreamAudioDestinationNode* aNode,
+                              MediaStream* aInputStream,
+                              ProcessedMediaStream* aStream,
                               nsIPrincipal* aPrincipal)
-      : MediaStreamTrackSource(aPrincipal, nsString()), mNode(aNode) {}
+      : MediaStreamTrackSource(aPrincipal, nsString()),
+        mStream(aStream),
+        mPort(mStream->AllocateInputPort(aInputStream)),
+        mNode(aNode) {}
 
   void Destroy() override {
+    if (!mStream->IsDestroyed()) {
+      mStream->Destroy();
+      mPort->Destroy();
+    }
     if (mNode) {
       mNode->DestroyMediaStream();
       mNode = nullptr;
     }
   }
 
   MediaSourceEnum GetMediaSource() const override {
     return MediaSourceEnum::AudioCapture;
   }
 
   void Stop() override { Destroy(); }
 
   void Disable() override {}
 
   void Enable() override {}
 
+  const RefPtr<ProcessedMediaStream> mStream;
+  const RefPtr<MediaInputPort> mPort;
+
  private:
   ~AudioDestinationTrackSource() = default;
 
   RefPtr<MediaStreamAudioDestinationNode> mNode;
 };
 
 NS_IMPL_ADDREF_INHERITED(AudioDestinationTrackSource, MediaStreamTrackSource)
 NS_IMPL_RELEASE_INHERITED(AudioDestinationTrackSource, MediaStreamTrackSource)
@@ -64,43 +76,35 @@ NS_INTERFACE_MAP_END_INHERITING(AudioNod
 
 NS_IMPL_ADDREF_INHERITED(MediaStreamAudioDestinationNode, AudioNode)
 NS_IMPL_RELEASE_INHERITED(MediaStreamAudioDestinationNode, AudioNode)
 
 MediaStreamAudioDestinationNode::MediaStreamAudioDestinationNode(
     AudioContext* aContext)
     : AudioNode(aContext, 2, ChannelCountMode::Explicit,
                 ChannelInterpretation::Speakers),
-      mDOMStream(DOMAudioNodeMediaStream::CreateTrackUnionStreamAsInput(
-          GetOwner(), this, aContext->Graph())) {
+      mDOMStream(MakeAndAddRef<DOMMediaStream>(GetOwner())) {
   // Ensure an audio track with the correct ID is exposed to JS. If we can't get
   // a principal here because the document is not available, pass in a null
   // principal. This happens in edge cases when the document is being unloaded
   // and it does not matter too much to have something working as long as it's
   // not dangerous.
   nsCOMPtr<nsIPrincipal> principal = nullptr;
   if (aContext->GetParentObject()) {
     Document* doc = aContext->GetParentObject()->GetExtantDoc();
     principal = doc->NodePrincipal();
   }
-  RefPtr<MediaStreamTrackSource> source =
-      new AudioDestinationTrackSource(this, principal);
-  RefPtr<MediaStreamTrack> track = mDOMStream->CreateDOMTrack(
-      AudioNodeStream::AUDIO_TRACK, MediaSegment::AUDIO, source,
-      MediaTrackConstraints());
+  mStream = AudioNodeStream::Create(aContext, new AudioNodeEngine(this),
+                                    AudioNodeStream::EXTERNAL_OUTPUT,
+                                    aContext->Graph());
+  auto source = MakeRefPtr<AudioDestinationTrackSource>(
+      this, mStream, aContext->Graph()->CreateTrackUnionStream(), principal);
+  auto track = MakeRefPtr<AudioStreamTrack>(
+      GetOwner(), source->mStream, AudioNodeStream::AUDIO_TRACK, source);
   mDOMStream->AddTrackInternal(track);
-
-  ProcessedMediaStream* outputStream =
-      mDOMStream->GetInputStream()->AsProcessedStream();
-  MOZ_ASSERT(!!outputStream);
-  AudioNodeEngine* engine = new AudioNodeEngine(this);
-  mStream = AudioNodeStream::Create(
-      aContext, engine, AudioNodeStream::EXTERNAL_OUTPUT, aContext->Graph());
-  mPort =
-      outputStream->AllocateInputPort(mStream, AudioNodeStream::AUDIO_TRACK);
 }
 
 /* static */
 already_AddRefed<MediaStreamAudioDestinationNode>
 MediaStreamAudioDestinationNode::Create(AudioContext& aAudioContext,
                                         const AudioNodeOptions& aOptions,
                                         ErrorResult& aRv) {
   if (aAudioContext.IsOffline()) {
@@ -119,31 +123,26 @@ MediaStreamAudioDestinationNode::Create(
   return audioNode.forget();
 }
 
 size_t MediaStreamAudioDestinationNode::SizeOfExcludingThis(
     MallocSizeOf aMallocSizeOf) const {
   // Future:
   // - mDOMStream
   size_t amount = AudioNode::SizeOfExcludingThis(aMallocSizeOf);
-  amount += mPort->SizeOfIncludingThis(aMallocSizeOf);
   return amount;
 }
 
 size_t MediaStreamAudioDestinationNode::SizeOfIncludingThis(
     MallocSizeOf aMallocSizeOf) const {
   return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
 }
 
 void MediaStreamAudioDestinationNode::DestroyMediaStream() {
   AudioNode::DestroyMediaStream();
-  if (mPort) {
-    mPort->Destroy();
-    mPort = nullptr;
-  }
 }
 
 JSObject* MediaStreamAudioDestinationNode::WrapObject(
     JSContext* aCx, JS::Handle<JSObject*> aGivenProto) {
   return MediaStreamAudioDestinationNode_Binding::Wrap(aCx, this, aGivenProto);
 }
 
 }  // namespace dom
--- a/dom/media/webaudio/MediaStreamAudioDestinationNode.h
+++ b/dom/media/webaudio/MediaStreamAudioDestinationNode.h
@@ -47,15 +47,14 @@ class MediaStreamAudioDestinationNode fi
   size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const override;
   size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const override;
 
  private:
   explicit MediaStreamAudioDestinationNode(AudioContext* aContext);
   ~MediaStreamAudioDestinationNode() = default;
 
   RefPtr<DOMMediaStream> mDOMStream;
-  RefPtr<MediaInputPort> mPort;
 };
 
 }  // namespace dom
 }  // namespace mozilla
 
 #endif
--- a/dom/media/webaudio/MediaStreamAudioSourceNode.cpp
+++ b/dom/media/webaudio/MediaStreamAudioSourceNode.cpp
@@ -47,28 +47,16 @@ MediaStreamAudioSourceNode::MediaStreamA
 already_AddRefed<MediaStreamAudioSourceNode> MediaStreamAudioSourceNode::Create(
     AudioContext& aAudioContext, const MediaStreamAudioSourceOptions& aOptions,
     ErrorResult& aRv) {
   if (aAudioContext.IsOffline()) {
     aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
     return nullptr;
   }
 
-  if (aAudioContext.Graph() !=
-      aOptions.mMediaStream->GetPlaybackStream()->Graph()) {
-    nsCOMPtr<nsPIDOMWindowInner> pWindow = aAudioContext.GetParentObject();
-    Document* document = pWindow ? pWindow->GetExtantDoc() : nullptr;
-    nsContentUtils::ReportToConsole(nsIScriptError::warningFlag,
-                                    NS_LITERAL_CSTRING("Web Audio"), document,
-                                    nsContentUtils::eDOM_PROPERTIES,
-                                    "MediaStreamAudioSourceNodeDifferentRate");
-    aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
-    return nullptr;
-  }
-
   RefPtr<MediaStreamAudioSourceNode> node =
       new MediaStreamAudioSourceNode(&aAudioContext, LockOnTrackPicked);
 
   node->Init(aOptions.mMediaStream, aRv);
   if (aRv.Failed()) {
     return nullptr;
   }
 
@@ -77,26 +65,19 @@ already_AddRefed<MediaStreamAudioSourceN
 
 void MediaStreamAudioSourceNode::Init(DOMMediaStream* aMediaStream,
                                       ErrorResult& aRv) {
   if (!aMediaStream) {
     aRv.Throw(NS_ERROR_FAILURE);
     return;
   }
 
-  MediaStream* inputStream = aMediaStream->GetPlaybackStream();
-  MediaStreamGraph* graph = Context()->Graph();
-  if (NS_WARN_IF(graph != inputStream->Graph())) {
-    aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
-    return;
-  }
-
   mInputStream = aMediaStream;
   AudioNodeEngine* engine = new MediaStreamAudioSourceNodeEngine(this);
-  mStream = AudioNodeExternalInputStream::Create(graph, engine);
+  mStream = AudioNodeExternalInputStream::Create(Context()->Graph(), engine);
   mInputStream->AddConsumerToKeepAlive(ToSupports(this));
 
   mInputStream->RegisterTrackListener(this);
   if (mInputStream->Active()) {
     NotifyActive();
   }
   AttachToRightTrack(mInputStream, aRv);
 }
@@ -107,24 +88,35 @@ void MediaStreamAudioSourceNode::Destroy
     mInputStream = nullptr;
   }
   DetachFromTrack();
 }
 
 MediaStreamAudioSourceNode::~MediaStreamAudioSourceNode() { Destroy(); }
 
 void MediaStreamAudioSourceNode::AttachToTrack(
-    const RefPtr<MediaStreamTrack>& aTrack) {
+    const RefPtr<MediaStreamTrack>& aTrack, ErrorResult& aRv) {
   MOZ_ASSERT(!mInputTrack);
   MOZ_ASSERT(aTrack->AsAudioStreamTrack());
 
   if (!mStream) {
     return;
   }
 
+  if (NS_WARN_IF(Context()->Graph() != aTrack->Graph())) {
+    nsCOMPtr<nsPIDOMWindowInner> pWindow = Context()->GetParentObject();
+    Document* document = pWindow ? pWindow->GetExtantDoc() : nullptr;
+    nsContentUtils::ReportToConsole(nsIScriptError::warningFlag,
+                                    NS_LITERAL_CSTRING("Web Audio"), document,
+                                    nsContentUtils::eDOM_PROPERTIES,
+                                    "MediaStreamAudioSourceNodeDifferentRate");
+    aRv.Throw(NS_ERROR_DOM_NOT_SUPPORTED_ERR);
+    return;
+  }
+
   mInputTrack = aTrack;
   ProcessedMediaStream* outputStream =
       static_cast<ProcessedMediaStream*>(mStream.get());
   mInputPort = mInputTrack->ForwardTrackContentsTo(outputStream);
   PrincipalChanged(mInputTrack);  // trigger enabling/disabling of the connector
   mInputTrack->AddPrincipalChangeObserver(this);
 }
 
@@ -165,17 +157,17 @@ void MediaStreamAudioSourceNode::AttachT
 
   for (const RefPtr<AudioStreamTrack>& track : tracks) {
     if (mBehavior == FollowChanges) {
       if (track->Ended()) {
         continue;
       }
     }
 
-    AttachToTrack(track);
+    AttachToTrack(track, aRv);
     MarkActive();
     return;
   }
 
   // There was no track available. We'll allow the node to be garbage collected.
   MarkInactive();
 }
 
@@ -187,17 +179,17 @@ void MediaStreamAudioSourceNode::NotifyT
   if (mInputTrack) {
     return;
   }
 
   if (!aTrack->AsAudioStreamTrack()) {
     return;
   }
 
-  AttachToTrack(aTrack);
+  AttachToTrack(aTrack, IgnoreErrors());
 }
 
 void MediaStreamAudioSourceNode::NotifyTrackRemoved(
     const RefPtr<MediaStreamTrack>& aTrack) {
   if (mBehavior == FollowChanges) {
     if (aTrack != mInputTrack) {
       return;
     }
--- a/dom/media/webaudio/MediaStreamAudioSourceNode.h
+++ b/dom/media/webaudio/MediaStreamAudioSourceNode.h
@@ -3,18 +3,19 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef MediaStreamAudioSourceNode_h_
 #define MediaStreamAudioSourceNode_h_
 
 #include "AudioNode.h"
+#include "AudioNodeEngine.h"
 #include "DOMMediaStream.h"
-#include "AudioNodeEngine.h"
+#include "PrincipalChangeObserver.h"
 
 namespace mozilla {
 
 namespace dom {
 
 class AudioContext;
 struct MediaStreamAudioSourceOptions;
 
@@ -72,17 +73,17 @@ class MediaStreamAudioSourceNode
   virtual const char* CrossOriginErrorString() const {
     return "MediaStreamAudioSourceNodeCrossOrigin";
   }
 
   size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const override;
   size_t SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const override;
 
   // Attaches to aTrack so that its audio content will be used as input.
-  void AttachToTrack(const RefPtr<MediaStreamTrack>& aTrack);
+  void AttachToTrack(const RefPtr<MediaStreamTrack>& aTrack, ErrorResult& aRv);
 
   // Detaches from the currently attached track if there is one.
   void DetachFromTrack();
 
   // Attaches to the first audio track in the MediaStream, when the tracks are
   // ordered by id.
   void AttachToRightTrack(const RefPtr<DOMMediaStream>& aMediaStream,
                           ErrorResult& aRv);
--- a/dom/media/webrtc/MediaEngineDefault.cpp
+++ b/dom/media/webrtc/MediaEngineDefault.cpp
@@ -246,18 +246,17 @@ void MediaEngineDefaultVideoSource::SetT
 
   MOZ_ASSERT(mState == kAllocated);
   MOZ_ASSERT(!mStream);
   MOZ_ASSERT(mTrackID == TRACK_NONE);
 
   mStream = aStream;
   mTrackID = aTrackID;
   mPrincipalHandle = aPrincipal;
-  aStream->AddTrack(aTrackID, new VideoSegment(),
-                    SourceMediaStream::ADDTRACK_QUEUED);
+  aStream->AddTrack(aTrackID, new VideoSegment());
 }
 
 nsresult MediaEngineDefaultVideoSource::Start() {
   AssertIsOnOwningThread();
 
   MOZ_ASSERT(mState == kAllocated || mState == kStopped);
   MOZ_ASSERT(mStream, "SetTrack() must happen before Start()");
   MOZ_ASSERT(IsTrackIDExplicit(mTrackID),
@@ -492,18 +491,17 @@ void MediaEngineDefaultAudioSource::SetT
   MOZ_ASSERT(mState == kAllocated);
   MOZ_ASSERT(!mStream);
   MOZ_ASSERT(mTrackID == TRACK_NONE);
 
   // AddAudioTrack will take ownership of segment
   mStream = aStream;
   mTrackID = aTrackID;
   mPrincipalHandle = aPrincipal;
-  aStream->AddAudioTrack(aTrackID, aStream->GraphRate(), new AudioSegment(),
-                         SourceMediaStream::ADDTRACK_QUEUED);
+  aStream->AddAudioTrack(aTrackID, aStream->GraphRate(), new AudioSegment());
 }
 
 nsresult MediaEngineDefaultAudioSource::Start() {
   AssertIsOnOwningThread();
 
   MOZ_ASSERT(mState == kAllocated || mState == kStopped);
   MOZ_ASSERT(mStream, "SetTrack() must happen before Start()");
   MOZ_ASSERT(IsTrackIDExplicit(mTrackID),
--- a/dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
+++ b/dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
@@ -284,18 +284,17 @@ void MediaEngineRemoteVideoSource::SetTr
   }
 
   {
     MutexAutoLock lock(mMutex);
     mStream = aStream;
     mTrackID = aTrackID;
     mPrincipal = aPrincipal;
   }
-  aStream->AddTrack(aTrackID, new VideoSegment(),
-                    SourceMediaStream::ADDTRACK_QUEUED);
+  aStream->AddTrack(aTrackID, new VideoSegment());
 }
 
 nsresult MediaEngineRemoteVideoSource::Start() {
   LOG(__PRETTY_FUNCTION__);
   AssertIsOnOwningThread();
 
   MOZ_ASSERT(mState == kAllocated || mState == kStopped);
   MOZ_ASSERT(mInitDone);
--- a/dom/media/webrtc/MediaEngineWebRTCAudio.cpp
+++ b/dom/media/webrtc/MediaEngineWebRTCAudio.cpp
@@ -490,18 +490,17 @@ void MediaEngineWebRTCMicrophoneSource::
   MOZ_ASSERT(mTrackID == TRACK_NONE);
   MOZ_ASSERT(mPrincipal == PRINCIPAL_HANDLE_NONE);
   mStream = aStream;
   mTrackID = aTrackID;
   mPrincipal = aPrincipal;
 
   AudioSegment* segment = new AudioSegment();
 
-  mStream->AddAudioTrack(mTrackID, mStream->GraphRate(), segment,
-                         SourceMediaStream::ADDTRACK_QUEUED);
+  mStream->AddAudioTrack(mTrackID, mStream->GraphRate(), segment);
 
   mInputProcessing = new AudioInputProcessing(mDeviceMaxChannelCount, mStream,
                                               mTrackID, mPrincipal);
 
   // We only add the listener once -- AudioInputProcessing wants pull
   // notifications also when stopped for appending silence.
   mPullListener = new AudioInputProcessingPullListener(mInputProcessing);
   NS_DispatchToMainThread(NS_NewRunnableFunction(
--- a/media/webrtc/signaling/gtest/mediapipeline_unittest.cpp
+++ b/media/webrtc/signaling/gtest/mediapipeline_unittest.cpp
@@ -6,17 +6,16 @@
 
 #include <iostream>
 
 #include "logging.h"
 #include "nss.h"
 
 #include "AudioSegment.h"
 #include "AudioStreamTrack.h"
-#include "DOMMediaStream.h"
 #include "mozilla/Mutex.h"
 #include "mozilla/RefPtr.h"
 #include "MediaPipeline.h"
 #include "MediaPipelineFilter.h"
 #include "MediaStreamGraph.h"
 #include "MediaStreamListener.h"
 #include "MediaStreamTrack.h"
 #include "transportflow.h"
@@ -33,29 +32,16 @@
 
 using namespace mozilla;
 MOZ_MTLOG_MODULE("mediapipeline")
 
 static MtransportTestUtils* test_utils;
 
 namespace {
 
-class FakeSourceMediaStream : public mozilla::SourceMediaStream {
- public:
-  FakeSourceMediaStream() : SourceMediaStream() {}
-
-  virtual ~FakeSourceMediaStream() override { mMainThreadDestroyed = true; }
-
-  virtual StreamTime AppendToTrack(
-      TrackID aID, MediaSegment* aSegment,
-      MediaSegment* aRawSegment = nullptr) override {
-    return aSegment->GetDuration();
-  }
-};
-
 class FakeMediaStreamTrackSource : public mozilla::dom::MediaStreamTrackSource {
  public:
   FakeMediaStreamTrackSource() : MediaStreamTrackSource(nullptr, nsString()) {}
 
   virtual mozilla::dom::MediaSourceEnum GetMediaSource() const override {
     return mozilla::dom::MediaSourceEnum::Microphone;
   }
 
@@ -64,18 +50,17 @@ class FakeMediaStreamTrackSource : publi
   virtual void Enable() override {}
 
   virtual void Stop() override {}
 };
 
 class FakeAudioStreamTrack : public mozilla::dom::AudioStreamTrack {
  public:
   FakeAudioStreamTrack()
-      : AudioStreamTrack(new DOMMediaStream(nullptr), 0, 1,
-                         new FakeMediaStreamTrackSource()),
+      : AudioStreamTrack(nullptr, nullptr, 0, new FakeMediaStreamTrackSource()),
         mMutex("Fake AudioStreamTrack"),
         mStop(false),
         mCount(0) {
     NS_NewTimerWithFuncCallback(
         getter_AddRefs(mTimer), FakeAudioStreamTrackGenerateData, this, 20,
         nsITimer::TYPE_REPEATING_SLACK,
         "FakeAudioStreamTrack::FakeAudioStreamTrackGenerateData",
         test_utils->sts_target());
--- a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
+++ b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
@@ -1160,88 +1160,96 @@ void MediaPipelineTransmit::PipelineList
       mConverter->QueueVideoChunk(*iter, !mEnabled);
     }
   }
 }
 
 class GenericReceiveListener : public MediaStreamTrackListener {
  public:
   explicit GenericReceiveListener(dom::MediaStreamTrack* aTrack)
-      : mTrack(new nsMainThreadPtrHolder<dom::MediaStreamTrack>(
-            "GenericReceiveListener::mTrack", aTrack)),
-        mTrackId(aTrack->GetInputTrackId()),
-        mSource(mTrack->GetInputStream()->AsSourceStream()),
+      : mTrackSource(new nsMainThreadPtrHolder<RemoteTrackSource>(
+            "GenericReceiveListener::mTrackSource",
+            &static_cast<RemoteTrackSource&>(aTrack->GetSource()))),
+        mTrackId(aTrack->GetTrackID()),
+        mSource(mTrackSource->mStream),
+        mIsAudio(aTrack->AsAudioStreamTrack()),
         mPrincipalHandle(PRINCIPAL_HANDLE_NONE),
         mListening(false),
         mMaybeTrackNeedsUnmute(true) {
-    MOZ_RELEASE_ASSERT(mSource, "Must be used with a SourceMediaStream");
+    MOZ_DIAGNOSTIC_ASSERT(NS_IsMainThread());
+    MOZ_DIAGNOSTIC_ASSERT(mSource, "Must be used with a SourceMediaStream");
   }
 
   virtual ~GenericReceiveListener() = default;
 
   void AddTrackToSource(uint32_t aRate = 0) {
-    MOZ_ASSERT((aRate != 0 && mTrack->AsAudioStreamTrack()) ||
-               mTrack->AsVideoStreamTrack());
+    MOZ_ASSERT_IF(mIsAudio, aRate != 0);
 
-    if (mTrack->AsAudioStreamTrack()) {
+    if (mIsAudio) {
       mSource->AddAudioTrack(mTrackId, aRate, new AudioSegment());
-    } else if (mTrack->AsVideoStreamTrack()) {
+    } else {
       mSource->AddTrack(mTrackId, new VideoSegment());
     }
     MOZ_LOG(gMediaPipelineLog, LogLevel::Debug,
-            ("GenericReceiveListener added %s track %d (%p) to stream %p",
-             mTrack->AsAudioStreamTrack() ? "audio" : "video", mTrackId,
-             mTrack.get(), mSource.get()));
+            ("GenericReceiveListener added %s track %d to stream %p",
+             mIsAudio ? "audio" : "video", mTrackId, mSource.get()));
 
     mSource->AddTrackListener(this, mTrackId);
   }
 
   void AddSelf() {
     if (mListening) {
       return;
     }
     mListening = true;
     mMaybeTrackNeedsUnmute = true;
-    if (mTrack->AsAudioStreamTrack() && !mSource->IsDestroyed()) {
+    if (mIsAudio && !mSource->IsDestroyed()) {
       mSource->SetPullingEnabled(mTrackId, true);
     }
   }
 
   void RemoveSelf() {
     if (!mListening) {
       return;
     }
     mListening = false;
-    if (mTrack->AsAudioStreamTrack() && !mSource->IsDestroyed()) {
+    if (mIsAudio && !mSource->IsDestroyed()) {
       mSource->SetPullingEnabled(mTrackId, false);
     }
   }
 
   void OnRtpReceived() {
     if (mMaybeTrackNeedsUnmute) {
       mMaybeTrackNeedsUnmute = false;
       NS_DispatchToMainThread(
           NewRunnableMethod("GenericReceiveListener::OnRtpReceived_m", this,
                             &GenericReceiveListener::OnRtpReceived_m));
     }
   }
 
   void OnRtpReceived_m() {
     if (mListening) {
-      static_cast<RemoteTrackSource&>(mTrack->GetSource()).SetMuted(false);
+      mTrackSource->SetMuted(false);
     }
   }
 
   void EndTrack() {
     MOZ_LOG(gMediaPipelineLog, LogLevel::Debug,
             ("GenericReceiveListener ending track"));
 
-    // This breaks the cycle with the SourceMediaStream
-    mSource->RemoveTrackListener(this, mTrackId);
-    mSource->EndTrack(mTrackId);
+    if (!mSource->IsDestroyed()) {
+      // This breaks the cycle with the SourceMediaStream
+      mSource->RemoveTrackListener(this, mTrackId);
+      mSource->EndTrack(mTrackId);
+      mSource->Destroy();
+    }
+
+    NS_DispatchToMainThread(NewRunnableMethod("RemoteTrackSource::ForceEnded",
+                                              mTrackSource.get(),
+                                              &RemoteTrackSource::ForceEnded));
   }
 
   // Must be called on the main thread
   void SetPrincipalHandle_m(const PrincipalHandle& aPrincipalHandle) {
     class Message : public ControlMessage {
      public:
       Message(GenericReceiveListener* aListener,
               const PrincipalHandle& aPrincipalHandle)
@@ -1252,29 +1260,30 @@ class GenericReceiveListener : public Me
       void Run() override {
         mListener->SetPrincipalHandle_msg(mPrincipalHandle);
       }
 
       const RefPtr<GenericReceiveListener> mListener;
       PrincipalHandle mPrincipalHandle;
     };
 
-    mTrack->GraphImpl()->AppendMessage(
+    mSource->GraphImpl()->AppendMessage(
         MakeUnique<Message>(this, aPrincipalHandle));
   }
 
   // Must be called on the MediaStreamGraph thread
   void SetPrincipalHandle_msg(const PrincipalHandle& aPrincipalHandle) {
     mPrincipalHandle = aPrincipalHandle;
   }
 
  protected:
-  const nsMainThreadPtrHandle<dom::MediaStreamTrack> mTrack;
+  const nsMainThreadPtrHandle<RemoteTrackSource> mTrackSource;
   const TrackID mTrackId;
   const RefPtr<SourceMediaStream> mSource;
+  const bool mIsAudio;
   PrincipalHandle mPrincipalHandle;
   bool mListening;
   Atomic<bool> mMaybeTrackNeedsUnmute;
 };
 
 MediaPipelineReceive::MediaPipelineReceive(
     const std::string& aPc, MediaTransportHandler* aTransportHandler,
     nsCOMPtr<nsIEventTarget> aMainThread, nsCOMPtr<nsIEventTarget> aStsThread,
@@ -1285,22 +1294,21 @@ MediaPipelineReceive::MediaPipelineRecei
 MediaPipelineReceive::~MediaPipelineReceive() {}
 
 class MediaPipelineReceiveAudio::PipelineListener
     : public GenericReceiveListener {
  public:
   PipelineListener(dom::MediaStreamTrack* aTrack,
                    const RefPtr<MediaSessionConduit>& aConduit)
       : GenericReceiveListener(aTrack),
-        mConduit(aConduit)
+        mConduit(aConduit),
         // AudioSession conduit only supports 16, 32, 44.1 and 48kHz
         // This is an artificial limitation, it would however require more
         // changes to support any rates. If the sampling rate is not-supported,
         // we will use 48kHz instead.
-        ,
         mRate(static_cast<AudioSessionConduit*>(mConduit.get())
                       ->IsSamplingFreqSupported(mSource->GraphRate())
                   ? mSource->GraphRate()
                   : WEBRTC_MAX_SAMPLE_RATE),
         mTaskQueue(
             new TaskQueue(GetMediaThreadPool(MediaThreadType::WEBRTC_DECODER),
                           "AudioPipelineListener")),
         mPlayedTicks(0) {
--- a/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp
+++ b/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp
@@ -30,16 +30,17 @@
 #include "nsThreadUtils.h"
 #include "nsIPrefService.h"
 #include "nsIPrefBranch.h"
 #include "nsProxyRelease.h"
 #include "prtime.h"
 
 #include "AudioConduit.h"
 #include "VideoConduit.h"
+#include "MediaStreamGraph.h"
 #include "runnable_utils.h"
 #include "PeerConnectionCtx.h"
 #include "PeerConnectionImpl.h"
 #include "PeerConnectionMedia.h"
 #include "RemoteTrackSource.h"
 #include "nsDOMDataChannelDeclarations.h"
 #include "dtlsidentity.h"
 #include "signaling/src/sdp/SdpAttribute.h"
@@ -1792,59 +1793,58 @@ static int GetDTMFToneCode(uint16_t c) {
   MOZ_ASSERT(i);
   return i - DTMF_TONECODES;
 }
 
 OwningNonNull<dom::MediaStreamTrack> PeerConnectionImpl::CreateReceiveTrack(
     SdpMediaSection::MediaType type) {
   bool audio = (type == SdpMediaSection::MediaType::kAudio);
 
-  MediaStreamGraph* graph = MediaStreamGraph::GetInstance(
-      audio ? MediaStreamGraph::AUDIO_THREAD_DRIVER
-            : MediaStreamGraph::SYSTEM_THREAD_DRIVER,
-      GetWindow(), MediaStreamGraph::REQUEST_DEFAULT_SAMPLE_RATE);
-
-  RefPtr<DOMMediaStream> stream =
-      DOMMediaStream::CreateSourceStreamAsInput(GetWindow(), graph);
-
-  CSFLogDebug(LOGTAG, "Created media stream %p, inner: %p", stream.get(),
-              stream->GetInputStream());
-
-  // Set the principal used for creating the tracks. This makes the stream
+  // Set the principal used for creating the tracks. This makes the track
   // data (audio/video samples) accessible to the receiving page. We're
   // only certain that privacy hasn't been requested if we're connected.
   nsCOMPtr<nsIPrincipal> principal;
   Document* doc = GetWindow()->GetExtantDoc();
   MOZ_ASSERT(doc);
   if (mPrivacyRequested.isSome() && !*mPrivacyRequested) {
     principal = doc->NodePrincipal();
   } else {
-    // we're either certain that we need isolation for the streams, OR
-    // we're not sure and we can fix the stream in SetDtlsConnected
+    // we're either certain that we need isolation for the tracks, OR
+    // we're not sure and we can fix the track in SetDtlsConnected
     principal =
         NullPrincipal::CreateWithInheritedAttributes(doc->NodePrincipal());
   }
 
+  MediaStreamGraph* graph = MediaStreamGraph::GetInstance(
+      audio ? MediaStreamGraph::AUDIO_THREAD_DRIVER
+            : MediaStreamGraph::SYSTEM_THREAD_DRIVER,
+      GetWindow(), MediaStreamGraph::REQUEST_DEFAULT_SAMPLE_RATE);
+
   RefPtr<MediaStreamTrack> track;
   RefPtr<RemoteTrackSource> trackSource;
+  RefPtr<SourceMediaStream> source = graph->CreateSourceStream();
   if (audio) {
-    trackSource = new RemoteTrackSource(principal,
+    trackSource = new RemoteTrackSource(source, principal,
                                         NS_ConvertASCIItoUTF16("remote audio"));
-    track = stream->CreateDOMTrack(333,  // Use a constant TrackID. Dependents
-                                         // read this from the DOM track.
-                                   MediaSegment::AUDIO, trackSource);
+    track = new AudioStreamTrack(GetWindow(), source,
+                                 333,  // Use a constant TrackID. Dependents
+                                       // read this from the DOM track.
+                                 trackSource);
   } else {
-    trackSource = new RemoteTrackSource(principal,
+    trackSource = new RemoteTrackSource(source, principal,
                                         NS_ConvertASCIItoUTF16("remote video"));
-    track = stream->CreateDOMTrack(666,  // Use a constant TrackID. Dependents
-                                         // read this from the DOM track.
-                                   MediaSegment::VIDEO, trackSource);
+    track = new VideoStreamTrack(GetWindow(), source,
+                                 666,  // Use a constant TrackID. Dependents
+                                       // read this from the DOM track.
+                                 trackSource);
   }
 
-  stream->AddTrackInternal(track);
+  CSFLogDebug(LOGTAG, "Created %s track %p, inner: %p",
+              audio ? "audio" : "video", track.get(), track->GetStream());
+
   // Spec says remote tracks start out muted.
   trackSource->SetMuted(true);
 
   return OwningNonNull<dom::MediaStreamTrack>(*track);
 }
 
 NS_IMETHODIMP
 PeerConnectionImpl::InsertDTMF(TransceiverImpl& transceiver,
--- a/media/webrtc/signaling/src/peerconnection/RemoteTrackSource.h
+++ b/media/webrtc/signaling/src/peerconnection/RemoteTrackSource.h
@@ -7,18 +7,19 @@
 
 #include "MediaStreamTrack.h"
 #include "MediaStreamError.h"
 
 namespace mozilla {
 
 class RemoteTrackSource : public dom::MediaStreamTrackSource {
  public:
-  explicit RemoteTrackSource(nsIPrincipal* aPrincipal, const nsString& aLabel)
-      : dom::MediaStreamTrackSource(aPrincipal, aLabel) {}
+  explicit RemoteTrackSource(SourceMediaStream* aStream,
+                             nsIPrincipal* aPrincipal, const nsString& aLabel)
+      : dom::MediaStreamTrackSource(aPrincipal, aLabel), mStream(aStream) {}
 
   dom::MediaSourceEnum GetMediaSource() const override {
     return dom::MediaSourceEnum::Other;
   }
 
   RefPtr<ApplyConstraintsPromise> ApplyConstraints(
       const dom::MediaTrackConstraints& aConstraints,
       dom::CallerType aCallerType) override {
@@ -38,16 +39,22 @@ class RemoteTrackSource : public dom::Me
 
   void Enable() override {}
 
   void SetPrincipal(nsIPrincipal* aPrincipal) {
     mPrincipal = aPrincipal;
     PrincipalChanged();
   }
   void SetMuted(bool aMuted) { MutedChanged(aMuted); }
+  void ForceEnded() { OverrideEnded(); }
+
+  const RefPtr<SourceMediaStream> mStream;
 
  protected:
-  virtual ~RemoteTrackSource() {}
+  virtual ~RemoteTrackSource() {
+    MOZ_ASSERT(NS_IsMainThread());
+    MOZ_ASSERT(mStream->IsDestroyed());
+  }
 };
 
 }  // namespace mozilla
 
 #endif  // _REMOTE_TRACK_SOURCE_H_
deleted file mode 100644
--- a/testing/web-platform/meta/mediacapture-streams/MediaStream-clone.https.html.ini
+++ /dev/null
@@ -1,4 +0,0 @@
-[MediaStream-clone.https.html]
-  [Tests that cloning MediaStream objects works as expected]
-    expected: FAIL
-