Bug 1454998 - Rename streams to tracks. r=padenot,karlt,smaug
☠☠ backed out by 5f5e153eb14b ☠ ☠
authorAndreas Pehrson <apehrson@mozilla.com>
Wed, 02 Oct 2019 08:18:16 +0000
changeset 495965 80417bdfa72112c6f9472c29ce49e8ff81e8c688
parent 495964 8ff03f2f4ca2da0761fb285f8c403b51765a19cf
child 495966 5f5e153eb14b8ecd44b5453e7a4a9e53e95193dd
push id114140
push userdvarga@mozilla.com
push dateWed, 02 Oct 2019 18:04:51 +0000
treeherdermozilla-inbound@32eb0ea893f3 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerspadenot, karlt, smaug
bugs1454998
milestone71.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1454998 - Rename streams to tracks. r=padenot,karlt,smaug This renames the following (in alphabetical order, non-exhaustive): AudioCaptureStream -> AudioCaptureTrack AudioNodeStream -> AudioNodeTrack AudioNodeExternalInputStream -> AudioNodeExternalInputTrack DirectMediaStreamTrackListener -> DirectMediaTrackListener MediaStream -> MediaTrack - Note that there's also dom::MediaTrack. Namespaces differentiate them. MediaStreamGraph -> MediaTrackGraph MediaStreamTrackListener -> MediaTrackListener MSG -> MTG (in comments) ProcessedMediaStream -> ProcessedMediaTrack SharedDummyStream -> SharedDummyTrack SourceMediaStream -> SourceMediaTrack StreamTime -> TrackTime TrackUnionStream -> ForwardedInputTrack - Because this no longer takes a union of anything, but only a single track as input. Other minor classes, members and comments have been updated to reflect these name changes. Differential Revision: https://phabricator.services.mozilla.com/D46146
dom/html/HTMLCanvasElement.cpp
dom/html/HTMLMediaElement.cpp
dom/html/HTMLMediaElement.h
dom/media/AudioBufferUtils.h
dom/media/AudioCaptureStream.cpp
dom/media/AudioCaptureStream.h
dom/media/AudioCaptureTrack.cpp
dom/media/AudioCaptureTrack.h
dom/media/AudioSampleFormat.h
dom/media/AudioSegment.h
dom/media/AudioStreamTrack.cpp
dom/media/AudioStreamTrack.h
dom/media/CanvasCaptureMediaStream.cpp
dom/media/CanvasCaptureMediaStream.h
dom/media/CubebUtils.cpp
dom/media/CubebUtils.h
dom/media/DOMMediaStream.cpp
dom/media/DOMMediaStream.h
dom/media/DriftCompensation.h
dom/media/ForwardedInputTrack.cpp
dom/media/ForwardedInputTrack.h
dom/media/GraphDriver.cpp
dom/media/GraphDriver.h
dom/media/GraphRunner.cpp
dom/media/GraphRunner.h
dom/media/ImageToI420.h
dom/media/MediaDecoder.cpp
dom/media/MediaDecoder.h
dom/media/MediaDecoderStateMachine.cpp
dom/media/MediaDecoderStateMachine.h
dom/media/MediaManager.cpp
dom/media/MediaManager.h
dom/media/MediaPlaybackDelayPolicy.cpp
dom/media/MediaRecorder.cpp
dom/media/MediaRecorder.h
dom/media/MediaSegment.h
dom/media/MediaStreamGraph.cpp
dom/media/MediaStreamGraph.h
dom/media/MediaStreamGraphImpl.h
dom/media/MediaStreamListener.cpp
dom/media/MediaStreamListener.h
dom/media/MediaStreamTrack.cpp
dom/media/MediaStreamTrack.h
dom/media/MediaStreamWindowCapturer.cpp
dom/media/MediaStreamWindowCapturer.h
dom/media/MediaTrack.cpp
dom/media/MediaTrack.h
dom/media/MediaTrackGraph.cpp
dom/media/MediaTrackGraph.h
dom/media/MediaTrackGraphImpl.h
dom/media/MediaTrackListener.cpp
dom/media/MediaTrackListener.h
dom/media/PrincipalHandle.h
dom/media/Tracing.h
dom/media/TrackUnionStream.cpp
dom/media/TrackUnionStream.h
dom/media/VideoFrameConverter.h
dom/media/VideoOutput.h
dom/media/VideoSegment.h
dom/media/VideoStreamTrack.cpp
dom/media/VideoStreamTrack.h
dom/media/VideoUtils.cpp
dom/media/VideoUtils.h
dom/media/encoder/MediaEncoder.cpp
dom/media/encoder/MediaEncoder.h
dom/media/encoder/OpusTrackEncoder.cpp
dom/media/encoder/TrackEncoder.cpp
dom/media/encoder/TrackEncoder.h
dom/media/encoder/VP8TrackEncoder.cpp
dom/media/encoder/VP8TrackEncoder.h
dom/media/gtest/TestAudioCallbackDriver.cpp
dom/media/gtest/TestGroupId.cpp
dom/media/gtest/TestVideoTrackEncoder.cpp
dom/media/imagecapture/CaptureTask.cpp
dom/media/imagecapture/CaptureTask.h
dom/media/imagecapture/ImageCapture.cpp
dom/media/imagecapture/ImageCapture.h
dom/media/mediasink/DecodedStream.cpp
dom/media/mediasink/DecodedStream.h
dom/media/mediasink/OutputStreamManager.cpp
dom/media/mediasink/OutputStreamManager.h
dom/media/moz.build
dom/media/test/mochitest.ini
dom/media/test/test_autoplay_policy_web_audio_AudioParamStream.html
dom/media/test/test_imagecapture.html
dom/media/tests/mochitest/identity/mochitest.ini
dom/media/tests/mochitest/mochitest.ini
dom/media/tests/mochitest/peerconnection_audio_forced_sample_rate.js
dom/media/tests/mochitest/test_peerConnection_basicAudio_forced_higher_rate.html
dom/media/tests/mochitest/test_peerConnection_basicAudio_forced_lower_rate.html
dom/media/webaudio/AnalyserNode.cpp
dom/media/webaudio/AudioBlock.h
dom/media/webaudio/AudioBufferSourceNode.cpp
dom/media/webaudio/AudioBufferSourceNode.h
dom/media/webaudio/AudioContext.cpp
dom/media/webaudio/AudioContext.h
dom/media/webaudio/AudioDestinationNode.cpp
dom/media/webaudio/AudioDestinationNode.h
dom/media/webaudio/AudioEventTimeline.cpp
dom/media/webaudio/AudioEventTimeline.h
dom/media/webaudio/AudioListener.cpp
dom/media/webaudio/AudioNode.cpp
dom/media/webaudio/AudioNode.h
dom/media/webaudio/AudioNodeEngine.cpp
dom/media/webaudio/AudioNodeEngine.h
dom/media/webaudio/AudioNodeExternalInputStream.cpp
dom/media/webaudio/AudioNodeExternalInputStream.h
dom/media/webaudio/AudioNodeExternalInputTrack.cpp
dom/media/webaudio/AudioNodeExternalInputTrack.h
dom/media/webaudio/AudioNodeStream.cpp
dom/media/webaudio/AudioNodeStream.h
dom/media/webaudio/AudioNodeTrack.cpp
dom/media/webaudio/AudioNodeTrack.h
dom/media/webaudio/AudioParam.cpp
dom/media/webaudio/AudioParam.h
dom/media/webaudio/AudioParamTimeline.h
dom/media/webaudio/AudioWorkletGlobalScope.cpp
dom/media/webaudio/AudioWorkletImpl.cpp
dom/media/webaudio/AudioWorkletImpl.h
dom/media/webaudio/AudioWorkletNode.cpp
dom/media/webaudio/BiquadFilterNode.cpp
dom/media/webaudio/ChannelMergerNode.cpp
dom/media/webaudio/ChannelSplitterNode.cpp
dom/media/webaudio/ConstantSourceNode.cpp
dom/media/webaudio/ConstantSourceNode.h
dom/media/webaudio/ConvolverNode.cpp
dom/media/webaudio/DelayNode.cpp
dom/media/webaudio/DynamicsCompressorNode.cpp
dom/media/webaudio/GainNode.cpp
dom/media/webaudio/IIRFilterNode.cpp
dom/media/webaudio/MediaElementAudioSourceNode.cpp
dom/media/webaudio/MediaStreamAudioDestinationNode.cpp
dom/media/webaudio/MediaStreamAudioDestinationNode.h
dom/media/webaudio/MediaStreamAudioSourceNode.cpp
dom/media/webaudio/MediaStreamAudioSourceNode.h
dom/media/webaudio/MediaStreamTrackAudioSourceNode.cpp
dom/media/webaudio/MediaStreamTrackAudioSourceNode.h
dom/media/webaudio/OscillatorNode.cpp
dom/media/webaudio/OscillatorNode.h
dom/media/webaudio/PannerNode.cpp
dom/media/webaudio/PannerNode.h
dom/media/webaudio/PlayingRefChangeHandler.h
dom/media/webaudio/ScriptProcessorNode.cpp
dom/media/webaudio/StereoPannerNode.cpp
dom/media/webaudio/WaveShaperNode.cpp
dom/media/webaudio/WaveShaperNode.h
dom/media/webaudio/WebAudioUtils.cpp
dom/media/webaudio/WebAudioUtils.h
dom/media/webaudio/moz.build
dom/media/webaudio/test/blink/mochitest.ini
dom/media/webaudio/test/mochitest.ini
dom/media/webaudio/test/test_WebAudioMemoryReporting.html
dom/media/webaudio/test/test_audioContextParams_sampleRate.html
dom/media/webrtc/MediaEngine.h
dom/media/webrtc/MediaEngineDefault.cpp
dom/media/webrtc/MediaEngineDefault.h
dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
dom/media/webrtc/MediaEngineRemoteVideoSource.h
dom/media/webrtc/MediaEngineSource.h
dom/media/webrtc/MediaEngineTabVideoSource.cpp
dom/media/webrtc/MediaEngineTabVideoSource.h
dom/media/webrtc/MediaEngineWebRTC.h
dom/media/webrtc/MediaEngineWebRTCAudio.cpp
dom/media/webrtc/MediaEngineWebRTCAudio.h
dom/media/webrtc/SineWaveGenerator.h
dom/media/webspeech/recognition/SpeechRecognition.cpp
dom/media/webspeech/recognition/SpeechRecognition.h
dom/media/webspeech/recognition/SpeechTrackListener.cpp
dom/media/webspeech/recognition/SpeechTrackListener.h
dom/media/webspeech/recognition/test/mochitest.ini
dom/media/webspeech/synth/test/mochitest.ini
dom/media/webspeech/synth/test/startup/mochitest.ini
dom/webidl/AudioParam.webidl
media/webrtc/signaling/gtest/mediapipeline_unittest.cpp
media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
media/webrtc/signaling/src/mediapipeline/MediaPipeline.h
media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp
media/webrtc/signaling/src/peerconnection/RemoteTrackSource.h
media/webrtc/signaling/src/peerconnection/TransceiverImpl.cpp
media/webrtc/signaling/src/peerconnection/TransceiverImpl.h
modules/libpref/init/all.js
--- a/dom/html/HTMLCanvasElement.cpp
+++ b/dom/html/HTMLCanvasElement.cpp
@@ -5,17 +5,17 @@
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "mozilla/dom/HTMLCanvasElement.h"
 
 #include "ImageEncoder.h"
 #include "jsapi.h"
 #include "jsfriendapi.h"
 #include "Layers.h"
-#include "MediaStreamGraph.h"
+#include "MediaTrackGraph.h"
 #include "mozilla/Assertions.h"
 #include "mozilla/Base64.h"
 #include "mozilla/CheckedInt.h"
 #include "mozilla/dom/CanvasCaptureMediaStream.h"
 #include "mozilla/dom/CanvasRenderingContext2D.h"
 #include "mozilla/dom/Event.h"
 #include "mozilla/dom/File.h"
 #include "mozilla/dom/HTMLCanvasElementBinding.h"
--- a/dom/html/HTMLMediaElement.cpp
+++ b/dom/html/HTMLMediaElement.cpp
@@ -29,19 +29,20 @@
 #include "MediaContainerType.h"
 #include "MediaError.h"
 #include "MediaManager.h"
 #include "MediaMetadataManager.h"
 #include "MediaResource.h"
 #include "MediaShutdownManager.h"
 #include "MediaSourceDecoder.h"
 #include "MediaStreamError.h"
-#include "MediaStreamGraphImpl.h"
-#include "MediaStreamListener.h"
+#include "MediaTrackGraphImpl.h"
+#include "MediaTrackListener.h"
 #include "MediaStreamWindowCapturer.h"
+#include "MediaTrack.h"
 #include "MediaTrackList.h"
 #include "SVGObserverUtils.h"
 #include "TimeRanges.h"
 #include "VideoFrameContainer.h"
 #include "VideoOutput.h"
 #include "VideoStreamTrack.h"
 #include "base/basictypes.h"
 #include "jsapi.h"
@@ -384,18 +385,17 @@ class HTMLMediaElement::FirstFrameListen
                      AbstractThread* aMainThread)
       : VideoOutput(aContainer, aMainThread) {
     MOZ_ASSERT(NS_IsMainThread());
   }
 
   // NB that this overrides VideoOutput::NotifyRealtimeTrackData, so we can
   // filter out all frames but the first one with a real size. This allows us to
   // later re-use the logic in VideoOutput for rendering that frame.
-  void NotifyRealtimeTrackData(MediaStreamGraph* aGraph,
-                               StreamTime aTrackOffset,
+  void NotifyRealtimeTrackData(MediaTrackGraph* aGraph, TrackTime aTrackOffset,
                                const MediaSegment& aMedia) override {
     MOZ_ASSERT(aMedia.GetType() == MediaSegment::VIDEO);
 
     if (mInitialSizeFound) {
       return;
     }
 
     const VideoSegment& video = static_cast<const VideoSegment&>(aMedia);
@@ -412,17 +412,17 @@ class HTMLMediaElement::FirstFrameListen
         VideoOutput::NotifyRealtimeTrackData(aGraph, aTrackOffset, segment);
         return;
       }
     }
   }
 
  private:
   // Whether a frame with a concrete size has been received. May only be
-  // accessed on the MSG's appending thread. (this is a direct listener so we
+  // accessed on the MTG's appending thread. (this is a direct listener so we
   // get called by whoever is producing this track's data)
   bool mInitialSizeFound = false;
 };
 
 /**
  * Helper class that manages audio and video outputs for all enabled tracks in a
  * media element. It also manages calculating the current time when playing a
  * MediaStream.
@@ -436,31 +436,31 @@ class HTMLMediaElement::MediaStreamRende
                       VideoFrameContainer* aVideoContainer,
                       void* aAudioOutputKey)
       : mVideoContainer(aVideoContainer),
         mAudioOutputKey(aAudioOutputKey),
         mWatchManager(this, aMainThread) {}
 
   void UpdateGraphTime() {
     mGraphTime =
-        mGraphTimeDummy->mStream->Graph()->CurrentTime() - *mGraphTimeOffset;
+        mGraphTimeDummy->mTrack->Graph()->CurrentTime() - *mGraphTimeOffset;
   }
 
   void Start() {
     if (mRendering) {
       return;
     }
 
     mRendering = true;
 
     if (!mGraphTimeDummy) {
       return;
     }
 
-    MediaStreamGraph* graph = mGraphTimeDummy->mStream->Graph();
+    MediaTrackGraph* graph = mGraphTimeDummy->mTrack->Graph();
     mGraphTimeOffset = Some(graph->CurrentTime().Ref() - mGraphTime);
     mWatchManager.Watch(graph->CurrentTime(),
                         &MediaStreamRenderer::UpdateGraphTime);
 
     for (const auto& t : mAudioTracks) {
       if (t) {
         t->AsAudioStreamTrack()->AddAudioOutput(mAudioOutputKey);
         t->AsAudioStreamTrack()->SetAudioOutputVolume(mAudioOutputKey,
@@ -479,17 +479,17 @@ class HTMLMediaElement::MediaStreamRende
     }
 
     mRendering = false;
 
     if (!mGraphTimeDummy) {
       return;
     }
 
-    mWatchManager.Unwatch(mGraphTimeDummy->mStream->Graph()->CurrentTime(),
+    mWatchManager.Unwatch(mGraphTimeDummy->mTrack->Graph()->CurrentTime(),
                           &MediaStreamRenderer::UpdateGraphTime);
 
     for (const auto& t : mAudioTracks) {
       if (t) {
         t->AsAudioStreamTrack()->RemoveAudioOutput(mAudioOutputKey);
       }
     }
 
@@ -553,18 +553,17 @@ class HTMLMediaElement::MediaStreamRende
     mVideoTrack = nullptr;
   }
 
   double CurrentTime() const {
     if (!mGraphTimeDummy) {
       return 0.0;
     }
 
-    return mGraphTimeDummy->mStream->GraphImpl()->MediaTimeToSeconds(
-        mGraphTime);
+    return mGraphTimeDummy->mTrack->GraphImpl()->MediaTimeToSeconds(mGraphTime);
   }
 
   Watchable<GraphTime>& CurrentGraphTime() { return mGraphTime; }
 
   // Set if we're rendering video.
   const RefPtr<VideoFrameContainer> mVideoContainer;
 
   // Set if we're rendering audio, nullptr otherwise.
@@ -585,35 +584,35 @@ class HTMLMediaElement::MediaStreamRende
     MOZ_DIAGNOSTIC_ASSERT(!mVideoTrack);
   }
 
   void EnsureGraphTimeDummy() {
     if (mGraphTimeDummy) {
       return;
     }
 
-    MediaStreamGraph* graph = nullptr;
+    MediaTrackGraph* graph = nullptr;
     for (const auto& t : mAudioTracks) {
       if (t && !t->Ended()) {
         graph = t->Graph();
         break;
       }
     }
 
     if (!graph && mVideoTrack && !mVideoTrack->Ended()) {
       graph = mVideoTrack->Graph();
     }
 
     if (!graph) {
       return;
     }
 
     // This dummy keeps `graph` alive and ensures access to it.
-    mGraphTimeDummy = MakeRefPtr<SharedDummyStream>(
-        graph->CreateSourceStream(MediaSegment::AUDIO));
+    mGraphTimeDummy = MakeRefPtr<SharedDummyTrack>(
+        graph->CreateSourceTrack(MediaSegment::AUDIO));
 
     if (mRendering) {
       mGraphTimeOffset = Some(graph->CurrentTime() - mGraphTime);
       mWatchManager.Watch(graph->CurrentTime(),
                           &MediaStreamRenderer::UpdateGraphTime);
     }
   }
 
@@ -622,20 +621,20 @@ class HTMLMediaElement::MediaStreamRende
   bool mRendering = false;
 
   // The audio output volume for all audio tracks.
   float mAudioOutputVolume = 1.0f;
 
   // WatchManager for mGraphTime.
   WatchManager<MediaStreamRenderer> mWatchManager;
 
-  // A dummy MediaStream to guarantee a MediaStreamGraph is kept alive while
+  // A dummy MediaTrack to guarantee a MediaTrackGraph is kept alive while
   // we're actively rendering, so we can track the graph's current time. Set
   // when the first track is added, never unset.
-  RefPtr<SharedDummyStream> mGraphTimeDummy;
+  RefPtr<SharedDummyTrack> mGraphTimeDummy;
 
   // Watchable that relays the graph's currentTime updates to the media element
   // only while we're rendering. This is the current time of the rendering in
   // GraphTime units.
   Watchable<GraphTime> mGraphTime = {0, "MediaStreamRenderer::mGraphTime"};
 
   // Nothing until a track has been added. Then, the current GraphTime at the
   // time when we were last Start()ed.
@@ -652,45 +651,45 @@ class HTMLMediaElement::StreamCaptureTra
     : public MediaStreamTrackSource,
       public MediaStreamTrackSource::Sink {
  public:
   NS_DECL_ISUPPORTS_INHERITED
   NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(StreamCaptureTrackSource,
                                            MediaStreamTrackSource)
 
   StreamCaptureTrackSource(MediaStreamTrackSource* aCapturedTrackSource,
-                           ProcessedMediaStream* aStream, MediaInputPort* aPort)
+                           ProcessedMediaTrack* aStream, MediaInputPort* aPort)
       : MediaStreamTrackSource(aCapturedTrackSource->GetPrincipal(),
                                nsString()),
         mCapturedTrackSource(aCapturedTrackSource),
-        mStream(aStream),
+        mTrack(aStream),
         mPort(aPort) {
     MOZ_ASSERT(mCapturedTrackSource);
-    MOZ_ASSERT(mStream);
+    MOZ_ASSERT(mTrack);
     MOZ_ASSERT(mPort);
 
     mCapturedTrackSource->RegisterSink(this);
   }
 
   void SetEnabled(bool aEnabled) {
-    if (!mStream) {
+    if (!mTrack) {
       return;
     }
-    mStream->SetEnabled(aEnabled ? DisabledTrackMode::ENABLED
-                                 : DisabledTrackMode::SILENCE_FREEZE);
+    mTrack->SetEnabled(aEnabled ? DisabledTrackMode::ENABLED
+                                : DisabledTrackMode::SILENCE_FREEZE);
   }
 
   void Destroy() override {
     if (mCapturedTrackSource) {
       mCapturedTrackSource->UnregisterSink(this);
       mCapturedTrackSource = nullptr;
     }
-    if (mStream) {
-      mStream->Destroy();
-      mStream = nullptr;
+    if (mTrack) {
+      mTrack->Destroy();
+      mTrack = nullptr;
     }
     if (mPort) {
       mPort->Destroy();
       mPort = nullptr;
     }
   }
 
   MediaSourceEnum GetMediaSource() const override {
@@ -741,22 +740,22 @@ class HTMLMediaElement::StreamCaptureTra
 
     Destroy();
     MediaStreamTrackSource::OverrideEnded();
   }
 
  private:
   virtual ~StreamCaptureTrackSource() {
     MOZ_ASSERT(!mCapturedTrackSource);
-    MOZ_ASSERT(!mStream);
+    MOZ_ASSERT(!mTrack);
     MOZ_ASSERT(!mPort);
   };
 
   RefPtr<MediaStreamTrackSource> mCapturedTrackSource;
-  RefPtr<ProcessedMediaStream> mStream;
+  RefPtr<ProcessedMediaTrack> mTrack;
   RefPtr<MediaInputPort> mPort;
 };
 
 NS_IMPL_ADDREF_INHERITED(HTMLMediaElement::StreamCaptureTrackSource,
                          MediaStreamTrackSource)
 NS_IMPL_RELEASE_INHERITED(HTMLMediaElement::StreamCaptureTrackSource,
                           MediaStreamTrackSource)
 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(
@@ -1076,29 +1075,29 @@ class HTMLMediaElement::AudioChannelAgen
                  "this = %p, Error : unknown suspended type!\n",
                  this));
     }
     return NS_OK;
   }
 
   NS_IMETHODIMP WindowAudioCaptureChanged(bool aCapture) override {
     MOZ_ASSERT(mAudioChannelAgent);
-    AudioCaptureStreamChangeIfNeeded();
+    AudioCaptureTrackChangeIfNeeded();
     return NS_OK;
   }
 
-  void AudioCaptureStreamChangeIfNeeded() {
+  void AudioCaptureTrackChangeIfNeeded() {
     MOZ_ASSERT(!mIsShutDown);
     if (!IsPlayingStarted()) {
       return;
     }
 
     MOZ_ASSERT(mAudioChannelAgent);
     bool isCapturing = mAudioChannelAgent->IsWindowAudioCapturingEnabled();
-    mOwner->AudioCaptureStreamChange(isCapturing);
+    mOwner->AudioCaptureTrackChange(isCapturing);
   }
 
   void NotifyAudioPlaybackChanged(AudibleChangedReasons aReason) {
     MOZ_ASSERT(!mIsShutDown);
     if (!IsPlayingStarted()) {
       return;
     }
 
@@ -1170,18 +1169,18 @@ class HTMLMediaElement::AudioChannelAgen
   }
 
   void StopAudioChanelAgent() {
     MOZ_ASSERT(mAudioChannelAgent);
     MOZ_ASSERT(mAudioChannelAgent->IsPlayingStarted());
     mAudioChannelAgent->NotifyStoppedPlaying();
     NotifyMediaStopped(mAudioChannelAgent->WindowID());
     // If we have started audio capturing before, we have to tell media element
-    // to clear the output capturing stream.
-    mOwner->AudioCaptureStreamChange(false);
+    // to clear the output capturing track.
+    mOwner->AudioCaptureTrackChange(false);
   }
 
   void SetSuspended(SuspendTypes aSuspend) {
     if (mSuspended == aSuspend) {
       return;
     }
 
     MaybeNotifyMediaResumed(aSuspend);
@@ -1899,17 +1898,17 @@ void HTMLMediaElement::AbortExistingLoad
   // We need to remove FirstFrameListener before VideoTracks get emptied.
   if (mFirstFrameListener) {
     mSelectedVideoStreamTrack->RemoveVideoOutput(mFirstFrameListener);
     mFirstFrameListener = nullptr;
   }
 
   // When aborting the existing loads, empty the objects in audio track list and
   // video track list, no events (in particular, no removetrack events) are
-  // fired as part of this. Ending MediaStream sends track ended notifications,
+  // fired as part of this. Ending MediaTrack sends track ended notifications,
   // so we empty the track lists prior.
   if (AudioTracks()) {
     AudioTracks()->EmptyTracks();
   }
   if (VideoTracks()) {
     VideoTracks()->EmptyTracks();
   }
 
@@ -2260,17 +2259,17 @@ void HTMLMediaElement::NotifyLoadError(c
   } else if (mSourceLoadCandidate) {
     DispatchAsyncSourceError(mSourceLoadCandidate);
     QueueLoadFromSourceTask();
   } else {
     NS_WARNING("Should know the source we were loading from!");
   }
 }
 
-void HTMLMediaElement::NotifyMediaTrackEnabled(MediaTrack* aTrack) {
+void HTMLMediaElement::NotifyMediaTrackEnabled(dom::MediaTrack* aTrack) {
   MOZ_ASSERT(aTrack);
   if (!aTrack) {
     return;
   }
 #ifdef DEBUG
   nsString id;
   aTrack->GetId(id);
 
@@ -2332,17 +2331,17 @@ void HTMLMediaElement::NotifyMediaTrackE
         // If the output stream is for audio only we ignore video tracks.
         continue;
       }
       AddCaptureMediaTrackToOutputStream(aTrack, ms);
     }
   }
 }
 
-void HTMLMediaElement::NotifyMediaTrackDisabled(MediaTrack* aTrack) {
+void HTMLMediaElement::NotifyMediaTrackDisabled(dom::MediaTrack* aTrack) {
   MOZ_ASSERT(aTrack);
   if (!aTrack) {
     return;
   }
 #ifdef DEBUG
   nsString id;
   aTrack->GetId(id);
 
@@ -2402,17 +2401,17 @@ void HTMLMediaElement::NotifyMediaTrackD
       continue;
     }
     MOZ_ASSERT(ms.mCapturingMediaStream);
     for (int32_t i = ms.mTracks.Length() - 1; i >= 0; --i) {
       if (ms.mTracks[i].first() != aTrack->GetId()) {
         continue;
       }
       // The source of this track just ended. Force-notify that it ended.
-      // If we bounce it to the MediaStreamGraph it might not be picked up,
+      // If we bounce it to the MediaTrackGraph it might not be picked up,
       // for instance if the MediaInputPort was destroyed in the same
       // iteration as it was added.
       mMainThreadEventTarget->Dispatch(NewRunnableMethod(
           "StreamCaptureTrackSource::OverrideEnded",
           static_cast<StreamCaptureTrackSource*>(ms.mTracks[i].second().get()),
           &StreamCaptureTrackSource::OverrideEnded));
 
       ms.mTracks.RemoveElementAt(i);
@@ -3156,17 +3155,18 @@ void HTMLMediaElement::SetCapturedOutput
       LOG(LogLevel::Debug, ("%s track %p for captured MediaStream %p",
                             aEnabled ? "Enabled" : "Disabled",
                             pair.second().get(), ms.mStream.get()));
     }
   }
 }
 
 void HTMLMediaElement::AddCaptureMediaTrackToOutputStream(
-    MediaTrack* aTrack, OutputMediaStream& aOutputStream, bool aAsyncAddtrack) {
+    dom::MediaTrack* aTrack, OutputMediaStream& aOutputStream,
+    bool aAsyncAddtrack) {
   if (aOutputStream.mCapturingDecoder) {
     MOZ_ASSERT(!aOutputStream.mCapturingMediaStream);
     return;
   }
   aOutputStream.mCapturingMediaStream = true;
 
   if (aOutputStream.mStream == mSrcStream) {
     // Cycle detected. This can happen since tracks are added async.
@@ -3190,49 +3190,49 @@ void HTMLMediaElement::AddCaptureMediaTr
   nsPIDOMWindowInner* window = OwnerDoc()->GetInnerWindow();
   if (!window) {
     return;
   }
 
   MediaSegment::Type type = inputTrack->AsAudioStreamTrack()
                                 ? MediaSegment::AUDIO
                                 : MediaSegment::VIDEO;
-  ProcessedMediaStream* stream =
-      inputTrack->Graph()->CreateTrackUnionStream(type);
-  RefPtr<MediaInputPort> port = inputTrack->ForwardTrackContentsTo(stream);
+  ProcessedMediaTrack* track =
+      inputTrack->Graph()->CreateForwardedInputTrack(type);
+  RefPtr<MediaInputPort> port = inputTrack->ForwardTrackContentsTo(track);
   auto source = MakeRefPtr<StreamCaptureTrackSource>(&inputTrack->GetSource(),
-                                                     stream, port);
+                                                     track, port);
 
   // Track is muted initially, so we don't leak data if it's added while paused
-  // and an MSG iteration passes before the mute comes into effect.
+  // and an MTG iteration passes before the mute comes into effect.
   source->SetEnabled(mSrcStreamIsPlaying);
 
-  RefPtr<MediaStreamTrack> track;
+  RefPtr<MediaStreamTrack> domTrack;
   if (inputTrack->AsAudioStreamTrack()) {
-    track = new AudioStreamTrack(window, stream, source);
+    domTrack = new AudioStreamTrack(window, track, source);
   } else {
-    track = new VideoStreamTrack(window, stream, source);
+    domTrack = new VideoStreamTrack(window, track, source);
   }
 
   aOutputStream.mTracks.AppendElement(
       Pair<nsString, RefPtr<MediaStreamTrackSource>>(aTrack->GetId(),
                                                      source.get()));
 
   if (aAsyncAddtrack) {
     mMainThreadEventTarget->Dispatch(
         NewRunnableMethod<StoreRefPtrPassByPtr<MediaStreamTrack>>(
             "DOMMediaStream::AddTrackInternal", aOutputStream.mStream,
-            &DOMMediaStream::AddTrackInternal, track));
+            &DOMMediaStream::AddTrackInternal, domTrack));
   } else {
-    aOutputStream.mStream->AddTrackInternal(track);
+    aOutputStream.mStream->AddTrackInternal(domTrack);
   }
 
   LOG(LogLevel::Debug,
       ("Created %s track %p from track %p through MediaInputPort %p",
-       inputTrack->AsAudioStreamTrack() ? "audio" : "video", track.get(),
+       inputTrack->AsAudioStreamTrack() ? "audio" : "video", domTrack.get(),
        inputTrack, port.get()));
 }
 
 void HTMLMediaElement::DiscardFinishWhenEndedOutputStreams() {
   // Discard all output streams that have finished now.
   for (int32_t i = mOutputStreams.Length() - 1; i >= 0; --i) {
     if (!mOutputStreams[i].mFinishWhenEnded) {
       continue;
@@ -3260,36 +3260,35 @@ bool HTMLMediaElement::CanBeCaptured(Str
       ContainsRestrictedContent()) {
     return false;
   }
   return true;
 }
 
 already_AddRefed<DOMMediaStream> HTMLMediaElement::CaptureStreamInternal(
     StreamCaptureBehavior aFinishBehavior, StreamCaptureType aStreamCaptureType,
-    MediaStreamGraph* aGraph) {
+    MediaTrackGraph* aGraph) {
   MOZ_RELEASE_ASSERT(aGraph);
   MOZ_ASSERT(CanBeCaptured(aStreamCaptureType));
 
   MarkAsContentSource(CallerAPI::CAPTURE_STREAM);
   MarkAsTainted();
 
   // We don't support routing to a different graph.
   if (!mOutputStreams.IsEmpty() &&
-      aGraph !=
-          mOutputStreams[0].mGraphKeepAliveDummyStream->mStream->Graph()) {
+      aGraph != mOutputStreams[0].mGraphKeepAliveDummyStream->mTrack->Graph()) {
     return nullptr;
   }
 
   OutputMediaStream* out = mOutputStreams.AppendElement();
   nsPIDOMWindowInner* window = OwnerDoc()->GetInnerWindow();
   out->mGraphKeepAliveDummyStream =
       mOutputStreams.Length() == 1
-          ? MakeRefPtr<SharedDummyStream>(
-                aGraph->CreateSourceStream(MediaSegment::AUDIO))
+          ? MakeRefPtr<SharedDummyTrack>(
+                aGraph->CreateSourceTrack(MediaSegment::AUDIO))
           : mOutputStreams[0].mGraphKeepAliveDummyStream;
   out->mStream = MakeAndAddRef<DOMMediaStream>(window);
   out->mStream->SetFinishedOnInactive(false);
   out->mFinishWhenEnded =
       aFinishBehavior == StreamCaptureBehavior::FINISH_WHEN_ENDED;
   out->mCapturingAudioOnly =
       aStreamCaptureType == StreamCaptureType::CAPTURE_AUDIO;
 
@@ -3340,17 +3339,17 @@ already_AddRefed<DOMMediaStream> HTMLMed
       }
     }
   }
   RefPtr<DOMMediaStream> result = out->mStream;
   return result.forget();
 }
 
 already_AddRefed<DOMMediaStream> HTMLMediaElement::CaptureAudio(
-    ErrorResult& aRv, MediaStreamGraph* aGraph) {
+    ErrorResult& aRv, MediaTrackGraph* aGraph) {
   MOZ_RELEASE_ASSERT(aGraph);
 
   if (!CanBeCaptured(StreamCaptureType::CAPTURE_AUDIO)) {
     aRv.Throw(NS_ERROR_FAILURE);
     return nullptr;
   }
 
   RefPtr<DOMMediaStream> stream =
@@ -3373,64 +3372,64 @@ RefPtr<GenericNonExclusivePromise> HTMLM
     return GenericNonExclusivePromise::CreateAndResolve(true, __func__);
   }
   AUTOPLAY_LOG("create allow-to-play promise for MediaElement %p", this);
   return mAllowedToPlayPromise.Ensure(__func__);
 }
 
 already_AddRefed<DOMMediaStream> HTMLMediaElement::MozCaptureStream(
     ErrorResult& aRv) {
-  MediaStreamGraph::GraphDriverType graphDriverType =
-      HasAudio() ? MediaStreamGraph::AUDIO_THREAD_DRIVER
-                 : MediaStreamGraph::SYSTEM_THREAD_DRIVER;
+  MediaTrackGraph::GraphDriverType graphDriverType =
+      HasAudio() ? MediaTrackGraph::AUDIO_THREAD_DRIVER
+                 : MediaTrackGraph::SYSTEM_THREAD_DRIVER;
 
   nsPIDOMWindowInner* window = OwnerDoc()->GetInnerWindow();
   if (!window) {
     aRv.Throw(NS_ERROR_FAILURE);
     return nullptr;
   }
 
   if (!CanBeCaptured(StreamCaptureType::CAPTURE_ALL_TRACKS)) {
     aRv.Throw(NS_ERROR_FAILURE);
     return nullptr;
   }
 
-  MediaStreamGraph* graph = MediaStreamGraph::GetInstance(
-      graphDriverType, window, MediaStreamGraph::REQUEST_DEFAULT_SAMPLE_RATE);
+  MediaTrackGraph* graph = MediaTrackGraph::GetInstance(
+      graphDriverType, window, MediaTrackGraph::REQUEST_DEFAULT_SAMPLE_RATE);
 
   RefPtr<DOMMediaStream> stream =
       CaptureStreamInternal(StreamCaptureBehavior::CONTINUE_WHEN_ENDED,
                             StreamCaptureType::CAPTURE_ALL_TRACKS, graph);
   if (!stream) {
     aRv.Throw(NS_ERROR_FAILURE);
     return nullptr;
   }
 
   return stream.forget();
 }
 
 already_AddRefed<DOMMediaStream> HTMLMediaElement::MozCaptureStreamUntilEnded(
     ErrorResult& aRv) {
-  MediaStreamGraph::GraphDriverType graphDriverType =
-      HasAudio() ? MediaStreamGraph::AUDIO_THREAD_DRIVER
-                 : MediaStreamGraph::SYSTEM_THREAD_DRIVER;
+  MediaTrackGraph::GraphDriverType graphDriverType =
+      HasAudio() ? MediaTrackGraph::AUDIO_THREAD_DRIVER
+                 : MediaTrackGraph::SYSTEM_THREAD_DRIVER;
 
   nsPIDOMWindowInner* window = OwnerDoc()->GetInnerWindow();
   if (!window) {
     aRv.Throw(NS_ERROR_FAILURE);
     return nullptr;
   }
 
   if (!CanBeCaptured(StreamCaptureType::CAPTURE_ALL_TRACKS)) {
     aRv.Throw(NS_ERROR_FAILURE);
     return nullptr;
   }
 
-  MediaStreamGraph* graph = MediaStreamGraph::GetInstance(
-      graphDriverType, window, MediaStreamGraph::REQUEST_DEFAULT_SAMPLE_RATE);
+  MediaTrackGraph* graph = MediaTrackGraph::GetInstance(
+      graphDriverType, window, MediaTrackGraph::REQUEST_DEFAULT_SAMPLE_RATE);
 
   RefPtr<DOMMediaStream> stream =
       CaptureStreamInternal(StreamCaptureBehavior::FINISH_WHEN_ENDED,
                             StreamCaptureType::CAPTURE_ALL_TRACKS, graph);
   if (!stream) {
     aRv.Throw(NS_ERROR_FAILURE);
     return nullptr;
   }
@@ -4847,17 +4846,17 @@ void HTMLMediaElement::UpdateSrcMediaStr
         mSelectedVideoStreamTrack->AddVideoOutput(mFirstFrameListener);
       }
     }
 
     SetCapturedOutputStreamsEnabled(false);  // Mute
   }
 }
 
-void HTMLMediaElement::UpdateSrcStreamTime() {
+void HTMLMediaElement::UpdateSrcTrackTime() {
   MOZ_ASSERT(NS_IsMainThread());
 
   if (mSrcStreamPlaybackEnded) {
     // We do a separate FireTimeUpdate() when this is set.
     return;
   }
 
   FireTimeUpdate(true);
@@ -4872,17 +4871,17 @@ void HTMLMediaElement::SetupSrcMediaStre
   nsPIDOMWindowInner* window = OwnerDoc()->GetInnerWindow();
   if (!window) {
     return;
   }
 
   mMediaStreamRenderer = MakeAndAddRef<MediaStreamRenderer>(
       mAbstractMainThread, GetVideoFrameContainer(), this);
   mWatchManager.Watch(mMediaStreamRenderer->CurrentGraphTime(),
-                      &HTMLMediaElement::UpdateSrcStreamTime);
+                      &HTMLMediaElement::UpdateSrcTrackTime);
   SetVolumeInternal();
 
   UpdateSrcMediaStreamPlaying();
   mSrcStreamVideoPrincipal = NodePrincipal();
 
   // If we pause this media element, track changes in the underlying stream
   // will continue to fire events at this element and alter its track list.
   // That's simpler than delaying the events, but probably confusing...
@@ -4912,17 +4911,17 @@ void HTMLMediaElement::EndSrcMediaStream
   if (mFirstFrameListener) {
     mSelectedVideoStreamTrack->RemoveVideoOutput(mFirstFrameListener);
   }
   mSelectedVideoStreamTrack = nullptr;
   mFirstFrameListener = nullptr;
 
   if (mMediaStreamRenderer) {
     mWatchManager.Unwatch(mMediaStreamRenderer->CurrentGraphTime(),
-                          &HTMLMediaElement::UpdateSrcStreamTime);
+                          &HTMLMediaElement::UpdateSrcTrackTime);
     mMediaStreamRenderer = nullptr;
   }
 
   mSrcStream->UnregisterTrackListener(mMediaStreamTrackListener.get());
   mMediaStreamTrackListener = nullptr;
   mSrcStreamTracksAvailable = false;
   mSrcStreamPlaybackEnded = false;
   mSrcStreamVideoPrincipal = nullptr;
@@ -4991,17 +4990,17 @@ void HTMLMediaElement::NotifyMediaStream
     MOZ_DIAGNOSTIC_ASSERT(VideoTracks(), "Element can't have been unlinked");
     RefPtr<VideoTrack> videoTrack =
         CreateVideoTrack(t, VideoTracks()->GetOwnerGlobal());
     VideoTracks()->AddTrack(videoTrack);
     // New MediaStreamTrack added, set the new added video track as selected
     // video track when there is no selected track.
     if (VideoTracks()->SelectedIndex() == -1) {
       MOZ_ASSERT(!mSelectedVideoStreamTrack);
-      videoTrack->SetEnabledInternal(true, MediaTrack::FIRE_NO_EVENTS);
+      videoTrack->SetEnabledInternal(true, dom::MediaTrack::FIRE_NO_EVENTS);
     }
   }
 
   UpdateReadyStateInternal();
 
   if (!mSrcStreamTracksAvailable) {
     mAbstractMainThread->Dispatch(NS_NewRunnableFunction(
         "HTMLMediaElement::NotifyMediaStreamTrackAdded->FirstFrameLoaded",
@@ -5029,19 +5028,19 @@ void HTMLMediaElement::NotifyMediaStream
   aTrack->GetId(id);
 
   LOG(LogLevel::Debug, ("%p, Removing %sTrack with id %s", this,
                         aTrack->AsAudioStreamTrack() ? "Audio" : "Video",
                         NS_ConvertUTF16toUTF8(id).get()));
 
   MOZ_DIAGNOSTIC_ASSERT(AudioTracks() && VideoTracks(),
                         "Element can't have been unlinked");
-  if (MediaTrack* t = AudioTracks()->GetTrackById(id)) {
+  if (dom::MediaTrack* t = AudioTracks()->GetTrackById(id)) {
     AudioTracks()->RemoveTrack(t);
-  } else if (MediaTrack* t = VideoTracks()->GetTrackById(id)) {
+  } else if (dom::MediaTrack* t = VideoTracks()->GetTrackById(id)) {
     VideoTracks()->RemoveTrack(t);
   } else {
     NS_ASSERTION(aTrack->AsVideoStreamTrack() && !IsVideo(),
                  "MediaStreamTrack ended but did not exist in track lists. "
                  "This is only allowed if a video element ends and we are an "
                  "audio element.");
     return;
   }
@@ -7023,39 +7022,39 @@ bool HTMLMediaElement::ShouldElementBePa
 
 void HTMLMediaElement::SetMediaInfo(const MediaInfo& aInfo) {
   const bool oldHasAudio = mMediaInfo.HasAudio();
   mMediaInfo = aInfo;
   if ((aInfo.HasAudio() != oldHasAudio) && mResumeDelayedPlaybackAgent) {
     mResumeDelayedPlaybackAgent->UpdateAudibleState(this, IsAudible());
   }
   if (mAudioChannelWrapper) {
-    mAudioChannelWrapper->AudioCaptureStreamChangeIfNeeded();
+    mAudioChannelWrapper->AudioCaptureTrackChangeIfNeeded();
   }
   UpdateWakeLock();
 }
 
-void HTMLMediaElement::AudioCaptureStreamChange(bool aCapture) {
+void HTMLMediaElement::AudioCaptureTrackChange(bool aCapture) {
   // No need to capture a silent media element.
   if (!HasAudio()) {
     return;
   }
 
   if (aCapture && !mStreamWindowCapturer) {
     nsPIDOMWindowInner* window = OwnerDoc()->GetInnerWindow();
     if (!window) {
       return;
     }
 
-    MediaStreamGraph* msg = MediaStreamGraph::GetInstance(
-        MediaStreamGraph::AUDIO_THREAD_DRIVER, window,
-        MediaStreamGraph::REQUEST_DEFAULT_SAMPLE_RATE);
+    MediaTrackGraph* mtg = MediaTrackGraph::GetInstance(
+        MediaTrackGraph::AUDIO_THREAD_DRIVER, window,
+        MediaTrackGraph::REQUEST_DEFAULT_SAMPLE_RATE);
     RefPtr<DOMMediaStream> stream =
         CaptureStreamInternal(StreamCaptureBehavior::CONTINUE_WHEN_ENDED,
-                              StreamCaptureType::CAPTURE_AUDIO, msg);
+                              StreamCaptureType::CAPTURE_AUDIO, mtg);
     mStreamWindowCapturer =
         MakeUnique<MediaStreamWindowCapturer>(stream, window->WindowID());
   } else if (!aCapture && mStreamWindowCapturer) {
     for (size_t i = 0; i < mOutputStreams.Length(); i++) {
       if (mOutputStreams[i].mStream == mStreamWindowCapturer->mStream) {
         if (mOutputStreams[i].mCapturingDecoder && mDecoder) {
           mDecoder->RemoveOutputStream(mOutputStreams[i].mStream);
         }
@@ -7350,17 +7349,17 @@ already_AddRefed<Promise> HTMLMediaEleme
                       return SinkInfoPromise::CreateAndResolve(aInfo, __func__);
                     }
                     return SinkInfoPromise::CreateAndReject(
                         aValue.RejectValue(), __func__);
                   });
               return p;
             }
             if (self->mSrcAttrStream) {
-              // Set Sink Id through MSG is not supported yet.
+              // Set Sink Id through MTG is not supported yet.
               return SinkInfoPromise::CreateAndReject(NS_ERROR_ABORT, __func__);
             }
             // No media attached to the element save it for later.
             return SinkInfoPromise::CreateAndResolve(aInfo, __func__);
           },
           [](nsresult res) {
             // Promise is rejected, sink not found.
             return SinkInfoPromise::CreateAndReject(res, __func__);
--- a/dom/html/HTMLMediaElement.h
+++ b/dom/html/HTMLMediaElement.h
@@ -48,20 +48,20 @@ namespace mozilla {
 class AbstractThread;
 class ChannelMediaDecoder;
 class DecoderDoctorDiagnostics;
 class DOMMediaStream;
 class ErrorResult;
 class MediaResource;
 class MediaDecoder;
 class MediaInputPort;
-class MediaStream;
-class MediaStreamGraph;
+class MediaTrack;
+class MediaTrackGraph;
 class MediaStreamWindowCapturer;
-struct SharedDummyStream;
+struct SharedDummyTrack;
 class VideoFrameContainer;
 namespace dom {
 class MediaKeys;
 class TextTrack;
 class TimeRanges;
 class WakeLock;
 class MediaStreamTrack;
 class MediaStreamTrackSource;
@@ -104,17 +104,16 @@ class HTMLMediaElement : public nsGeneri
                          public MediaDecoderOwner,
                          public PrincipalChangeObserver<MediaStreamTrack>,
                          public SupportsWeakPtr<HTMLMediaElement>,
                          public nsStubMutationObserver {
  public:
   typedef mozilla::TimeStamp TimeStamp;
   typedef mozilla::layers::ImageContainer ImageContainer;
   typedef mozilla::VideoFrameContainer VideoFrameContainer;
-  typedef mozilla::MediaStream MediaStream;
   typedef mozilla::MediaResource MediaResource;
   typedef mozilla::MediaDecoderOwner MediaDecoderOwner;
   typedef mozilla::MetadataTags MetadataTags;
 
   MOZ_DECLARE_WEAKREFERENCE_TYPENAME(HTMLMediaElement)
   NS_DECL_NSIMUTATIONOBSERVER_CONTENTREMOVED
 
   CORSMode GetCORSMode() { return mCORSMode; }
@@ -330,23 +329,23 @@ class HTMLMediaElement : public nsGeneri
    * whether it's appropriate to fire an error event.
    */
   void NotifyLoadError(const nsACString& aErrorDetails = nsCString());
 
   /**
    * Called by one of our associated MediaTrackLists (audio/video) when an
    * AudioTrack is enabled or a VideoTrack is selected.
    */
-  void NotifyMediaTrackEnabled(MediaTrack* aTrack);
+  void NotifyMediaTrackEnabled(dom::MediaTrack* aTrack);
 
   /**
    * Called by one of our associated MediaTrackLists (audio/video) when an
    * AudioTrack is disabled or a VideoTrack is unselected.
    */
-  void NotifyMediaTrackDisabled(MediaTrack* aTrack);
+  void NotifyMediaTrackDisabled(dom::MediaTrack* aTrack);
 
   /**
    * Returns the current load ID. Asynchronous events store the ID that was
    * current when they were enqueued, and if it has changed when they come to
    * fire, they consider themselves cancelled, and don't fire.
    */
   uint32_t GetCurrentLoadID() { return mCurrentLoadID; }
 
@@ -608,17 +607,17 @@ class HTMLMediaElement : public nsGeneri
   // in the URL bar of the browser window.
   already_AddRefed<nsIPrincipal> GetTopLevelPrincipal();
 
   bool ContainsRestrictedContent();
 
   void NotifyWaitingForKey() override;
 
   already_AddRefed<DOMMediaStream> CaptureAudio(ErrorResult& aRv,
-                                                MediaStreamGraph* aGraph);
+                                                MediaTrackGraph* aGraph);
 
   already_AddRefed<DOMMediaStream> MozCaptureStream(ErrorResult& aRv);
 
   already_AddRefed<DOMMediaStream> MozCaptureStreamUntilEnded(ErrorResult& aRv);
 
   bool MozAudioCaptured() const { return mAudioCaptured; }
 
   void MozGetMetadata(JSContext* aCx, JS::MutableHandle<JSObject*> aResult,
@@ -750,17 +749,17 @@ class HTMLMediaElement : public nsGeneri
   struct OutputMediaStream {
     OutputMediaStream();
     ~OutputMediaStream();
 
     RefPtr<DOMMediaStream> mStream;
     // Dummy stream to keep mGraph from shutting down when MediaDecoder shuts
     // down. Shared across all OutputMediaStreams as one stream is enough to
     // keep the graph alive.
-    RefPtr<SharedDummyStream> mGraphKeepAliveDummyStream;
+    RefPtr<SharedDummyTrack> mGraphKeepAliveDummyStream;
     bool mFinishWhenEnded;
     bool mCapturingAudioOnly;
     bool mCapturingDecoder;
     bool mCapturingMediaStream;
 
     // The following members are keeping state for a captured MediaStream.
     nsTArray<Pair<nsString, RefPtr<MediaStreamTrackSource>>> mTracks;
   };
@@ -825,17 +824,17 @@ class HTMLMediaElement : public nsGeneri
    */
   enum { REMOVING_SRC_STREAM = 0x1 };
   void UpdateSrcMediaStreamPlaying(uint32_t aFlags = 0);
 
   /**
    * mSrcStream's graph's CurrentTime() has been updated. It might be time to
    * fire "timeupdate".
    */
-  void UpdateSrcStreamTime();
+  void UpdateSrcTrackTime();
 
   /**
    * Called by our DOMMediaStream::TrackListener when a new MediaStreamTrack has
    * been added to the playback stream of |mSrcStream|.
    */
   void NotifyMediaStreamTrackAdded(const RefPtr<MediaStreamTrack>& aTrack);
 
   /**
@@ -853,17 +852,17 @@ class HTMLMediaElement : public nsGeneri
    */
   void SetCapturedOutputStreamsEnabled(bool aEnabled);
 
   /**
    * Create a new MediaStreamTrack for aTrack and add it to the DOMMediaStream
    * in aOutputStream. This automatically sets the output track to enabled or
    * disabled depending on our current playing state.
    */
-  void AddCaptureMediaTrackToOutputStream(MediaTrack* aTrack,
+  void AddCaptureMediaTrackToOutputStream(dom::MediaTrack* aTrack,
                                           OutputMediaStream& aOutputStream,
                                           bool aAsyncAddtrack = true);
 
   /**
    * Discard all output streams that are flagged to finish when playback ends.
    */
   void DiscardFinishWhenEndedOutputStreams();
 
@@ -875,17 +874,17 @@ class HTMLMediaElement : public nsGeneri
    * The stream will never finish.
    *
    * When aType is CAPTURE_AUDIO, we stop playout of audio and instead route it
    * to the DOMMediaStream. Volume and mute state will be applied to the audio
    * reaching the stream. No video tracks will be captured in this case.
    */
   already_AddRefed<DOMMediaStream> CaptureStreamInternal(
       StreamCaptureBehavior aBehavior, StreamCaptureType aType,
-      MediaStreamGraph* aGraph);
+      MediaTrackGraph* aGraph);
 
   /**
    * Initialize a decoder as a clone of an existing decoder in another
    * element.
    * mLoadingSrc must already be set.
    */
   nsresult InitializeDecoderAsClone(ChannelMediaDecoder* aOriginal);
 
@@ -1188,17 +1187,17 @@ class HTMLMediaElement : public nsGeneri
   // Recomputes ready state and fires events as necessary based on current
   // state.
   void UpdateReadyStateInternal();
 
   // Determine if the element should be paused because of suspend conditions.
   bool ShouldElementBePaused();
 
   // Create or destroy the captured stream.
-  void AudioCaptureStreamChange(bool aCapture);
+  void AudioCaptureTrackChange(bool aCapture);
 
   // A method to check whether the media element is allowed to start playback.
   bool AudioChannelAgentBlockedPlay();
 
   // If the network state is empty and then we would trigger DoLoad().
   void MaybeDoLoad();
 
   // Anything we need to check after played success and not related with spec.
@@ -1300,25 +1299,24 @@ class HTMLMediaElement : public nsGeneri
 
   // The DocGroup-specific AbstractThread::MainThread() of this HTML element.
   RefPtr<AbstractThread> mAbstractMainThread;
 
   // A reference to the VideoFrameContainer which contains the current frame
   // of video to display.
   RefPtr<VideoFrameContainer> mVideoFrameContainer;
 
-  // Holds a reference to the DOM wrapper for the MediaStream that has been
-  // set in the src attribute.
+  // Holds a reference to the MediaStream that has been set in the src
+  // attribute.
   RefPtr<DOMMediaStream> mSrcAttrStream;
 
   // Holds the triggering principal for the src attribute.
   nsCOMPtr<nsIPrincipal> mSrcAttrTriggeringPrincipal;
 
-  // Holds a reference to the DOM wrapper for the MediaStream that we're
-  // actually playing.
+  // Holds a reference to the MediaStream that we're actually playing.
   // At most one of mDecoder and mSrcStream can be non-null.
   RefPtr<DOMMediaStream> mSrcStream;
 
   // The MediaStreamRenderer handles rendering of our selected video track, and
   // enabled audio tracks, while mSrcStream is set.
   RefPtr<MediaStreamRenderer> mMediaStreamRenderer;
 
   // True once mSrcStream's initial set of tracks are known.
--- a/dom/media/AudioBufferUtils.h
+++ b/dom/media/AudioBufferUtils.h
@@ -95,20 +95,20 @@ class AudioCallbackBufferWrapper {
   }
 
   /**
    * Check that the buffer is completly filled, and reset internal state so this
    * instance can be reused.
    */
   void BufferFilled() {
     // It's okay to have exactly zero samples here, it can happen we have an
-    // audio callback driver because of a hint on MSG creation, but the
-    // AudioOutputStream has not been created yet, or if all the streams have
+    // audio callback driver because of a hint on MTG creation, but the
+    // AudioOutputStream has not been created yet, or if all the tracks have
     // finished but we're still running. Note: it's also ok if we had data in
-    // the scratch buffer - and we usually do - and all the streams were ended
+    // the scratch buffer - and we usually do - and all the tracks were ended
     // (no mixer callback occured).
     // XXX Remove this warning, or find a way to avoid it if the mixer callback
     // isn't called.
     NS_WARNING_ASSERTION(
         Available() == 0 || mSampleWriteOffset == 0,
         "Audio Buffer is not full by the end of the callback.");
     // Make sure the data returned is always set and not random!
     if (Available()) {
@@ -130,17 +130,17 @@ class AudioCallbackBufferWrapper {
   /* The position at which new samples should be written. We want to return to
    * the audio callback iff this is equal to mSamples. */
   uint32_t mSampleWriteOffset;
   uint32_t mChannels;
 };
 
 /**
  * This is a class that interfaces with the AudioCallbackBufferWrapper, and is
- * responsible for storing the excess of data produced by the MediaStreamGraph
+ * responsible for storing the excess of data produced by the MediaTrackGraph
  * because of different rounding constraints, to be used the next time the audio
  * backend calls back.
  */
 template <typename T, uint32_t BLOCK_SIZE>
 class SpillBuffer {
  public:
   SpillBuffer() : mBuffer(nullptr), mPosition(0), mChannels(0) {}
 
rename from dom/media/AudioCaptureStream.cpp
rename to dom/media/AudioCaptureTrack.cpp
--- a/dom/media/AudioCaptureStream.cpp
+++ b/dom/media/AudioCaptureTrack.cpp
@@ -1,115 +1,115 @@
 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
-#include "MediaStreamGraphImpl.h"
-#include "MediaStreamListener.h"
+#include "MediaTrackGraphImpl.h"
+#include "MediaTrackListener.h"
 #include "mozilla/MathAlgorithms.h"
 #include "mozilla/Unused.h"
 
 #include "AudioSegment.h"
 #include "mozilla/Logging.h"
 #include "mozilla/Attributes.h"
-#include "AudioCaptureStream.h"
+#include "AudioCaptureTrack.h"
 #include "ImageContainer.h"
 #include "AudioNodeEngine.h"
-#include "AudioNodeStream.h"
-#include "AudioNodeExternalInputStream.h"
+#include "AudioNodeTrack.h"
+#include "AudioNodeExternalInputTrack.h"
 #include "webaudio/MediaStreamAudioDestinationNode.h"
 #include <algorithm>
 #include "DOMMediaStream.h"
 
 using namespace mozilla::layers;
 using namespace mozilla::dom;
 using namespace mozilla::gfx;
 
 namespace mozilla {
 
 // We are mixing to mono until PeerConnection can accept stereo
 static const uint32_t MONO = 1;
 
-AudioCaptureStream::AudioCaptureStream(TrackRate aRate)
-    : ProcessedMediaStream(aRate, MediaSegment::AUDIO, new AudioSegment()),
+AudioCaptureTrack::AudioCaptureTrack(TrackRate aRate)
+    : ProcessedMediaTrack(aRate, MediaSegment::AUDIO, new AudioSegment()),
       mStarted(false) {
   MOZ_ASSERT(NS_IsMainThread());
-  MOZ_COUNT_CTOR(AudioCaptureStream);
+  MOZ_COUNT_CTOR(AudioCaptureTrack);
   mMixer.AddCallback(this);
 }
 
-AudioCaptureStream::~AudioCaptureStream() {
-  MOZ_COUNT_DTOR(AudioCaptureStream);
+AudioCaptureTrack::~AudioCaptureTrack() {
+  MOZ_COUNT_DTOR(AudioCaptureTrack);
   mMixer.RemoveCallback(this);
 }
 
-void AudioCaptureStream::Start() {
+void AudioCaptureTrack::Start() {
   class Message : public ControlMessage {
    public:
-    explicit Message(AudioCaptureStream* aStream)
-        : ControlMessage(aStream), mStream(aStream) {}
+    explicit Message(AudioCaptureTrack* aTrack)
+        : ControlMessage(aTrack), mTrack(aTrack) {}
 
-    virtual void Run() { mStream->mStarted = true; }
+    virtual void Run() { mTrack->mStarted = true; }
 
    protected:
-    AudioCaptureStream* mStream;
+    AudioCaptureTrack* mTrack;
   };
   GraphImpl()->AppendMessage(MakeUnique<Message>(this));
 }
 
-void AudioCaptureStream::ProcessInput(GraphTime aFrom, GraphTime aTo,
-                                      uint32_t aFlags) {
+void AudioCaptureTrack::ProcessInput(GraphTime aFrom, GraphTime aTo,
+                                     uint32_t aFlags) {
   if (!mStarted) {
     return;
   }
 
   uint32_t inputCount = mInputs.Length();
 
   if (mEnded) {
     return;
   }
 
-  // If the captured stream is connected back to a object on the page (be it an
-  // HTMLMediaElement with a stream as source, or an AudioContext), a cycle
+  // If the captured track is connected back to a object on the page (be it an
+  // HTMLMediaElement with a track as source, or an AudioContext), a cycle
   // situation occur. This can work if it's an AudioContext with at least one
-  // DelayNode, but the MSG will mute the whole cycle otherwise.
+  // DelayNode, but the MTG will mute the whole cycle otherwise.
   if (InMutedCycle() || inputCount == 0) {
     GetData<AudioSegment>()->AppendNullData(aTo - aFrom);
   } else {
     // We mix down all the tracks of all inputs, to a stereo track. Everything
     // is {up,down}-mixed to stereo.
     mMixer.StartMixing();
     AudioSegment output;
     for (uint32_t i = 0; i < inputCount; i++) {
-      MediaStream* s = mInputs[i]->GetSource();
+      MediaTrack* s = mInputs[i]->GetSource();
       AudioSegment* inputSegment = s->GetData<AudioSegment>();
-      StreamTime inputStart = s->GraphTimeToStreamTimeWithBlocking(aFrom);
-      StreamTime inputEnd = s->GraphTimeToStreamTimeWithBlocking(aTo);
+      TrackTime inputStart = s->GraphTimeToTrackTimeWithBlocking(aFrom);
+      TrackTime inputEnd = s->GraphTimeToTrackTimeWithBlocking(aTo);
       AudioSegment toMix;
       if (s->Ended() && inputSegment->GetDuration() <= inputStart) {
         toMix.AppendNullData(aTo - aFrom);
       } else {
         toMix.AppendSlice(*inputSegment, inputStart, inputEnd);
-        // Care for streams blocked in the [aTo, aFrom] range.
+        // Care for tracks blocked in the [aTo, aFrom] range.
         if (inputEnd - inputStart < aTo - aFrom) {
           toMix.AppendNullData((aTo - aFrom) - (inputEnd - inputStart));
         }
       }
       toMix.Mix(mMixer, MONO, Graph()->GraphRate());
     }
     // This calls MixerCallback below
     mMixer.FinishMixing();
   }
 }
 
-void AudioCaptureStream::MixerCallback(AudioDataValue* aMixedBuffer,
-                                       AudioSampleFormat aFormat,
-                                       uint32_t aChannels, uint32_t aFrames,
-                                       uint32_t aSampleRate) {
+void AudioCaptureTrack::MixerCallback(AudioDataValue* aMixedBuffer,
+                                      AudioSampleFormat aFormat,
+                                      uint32_t aChannels, uint32_t aFrames,
+                                      uint32_t aSampleRate) {
   AutoTArray<nsTArray<AudioDataValue>, MONO> output;
   AutoTArray<const AudioDataValue*, MONO> bufferPtrs;
   output.SetLength(MONO);
   bufferPtrs.SetLength(MONO);
 
   uint32_t written = 0;
   // We need to copy here, because the mixer will reuse the storage, we should
   // not hold onto it. Buffers are in planar format.
rename from dom/media/AudioCaptureStream.h
rename to dom/media/AudioCaptureTrack.h
--- a/dom/media/AudioCaptureStream.h
+++ b/dom/media/AudioCaptureTrack.h
@@ -1,41 +1,41 @@
 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
-#ifndef MOZILLA_AUDIOCAPTURESTREAM_H_
-#define MOZILLA_AUDIOCAPTURESTREAM_H_
+#ifndef MOZILLA_AUDIOCAPTURETRACK_H_
+#define MOZILLA_AUDIOCAPTURETRACK_H_
 
-#include "MediaStreamGraph.h"
+#include "MediaTrackGraph.h"
 #include "AudioMixer.h"
 #include <algorithm>
 
 namespace mozilla {
 
 class AbstractThread;
 class DOMMediaStream;
 
 /**
- * See MediaStreamGraph::CreateAudioCaptureStream.
+ * See MediaTrackGraph::CreateAudioCaptureTrack.
  */
-class AudioCaptureStream : public ProcessedMediaStream,
-                           public MixerCallbackReceiver {
+class AudioCaptureTrack : public ProcessedMediaTrack,
+                          public MixerCallbackReceiver {
  public:
-  explicit AudioCaptureStream(TrackRate aRate);
-  virtual ~AudioCaptureStream();
+  explicit AudioCaptureTrack(TrackRate aRate);
+  virtual ~AudioCaptureTrack();
 
   void Start();
 
   void ProcessInput(GraphTime aFrom, GraphTime aTo, uint32_t aFlags) override;
 
  protected:
   void MixerCallback(AudioDataValue* aMixedBuffer, AudioSampleFormat aFormat,
                      uint32_t aChannels, uint32_t aFrames,
                      uint32_t aSampleRate) override;
   AudioMixer mMixer;
   bool mStarted;
   bool mTrackCreated;
 };
 }  // namespace mozilla
 
-#endif /* MOZILLA_AUDIOCAPTURESTREAM_H_ */
+#endif /* MOZILLA_AUDIOCAPTURETRACK_H_ */
--- a/dom/media/AudioSampleFormat.h
+++ b/dom/media/AudioSampleFormat.h
@@ -7,17 +7,17 @@
 #define MOZILLA_AUDIOSAMPLEFORMAT_H_
 
 #include "mozilla/Assertions.h"
 #include <algorithm>
 
 namespace mozilla {
 
 /**
- * Audio formats supported in MediaStreams and media elements.
+ * Audio formats supported in MediaTracks and media elements.
  *
  * Only one of these is supported by AudioStream, and that is determined
  * at compile time (roughly, FLOAT32 on desktops, S16 on mobile). Media decoders
  * produce that format only; queued AudioData always uses that format.
  */
 enum AudioSampleFormat {
   // Silence: format will be chosen later
   AUDIO_FORMAT_SILENCE,
--- a/dom/media/AudioSegment.h
+++ b/dom/media/AudioSegment.h
@@ -144,30 +144,30 @@ void DownmixAndInterleave(const nsTArray
  * pointers so it can represent a subinterval of a buffer without copying.
  * An AudioChunk can store its individual channels anywhere; it maintains
  * separate pointers to each channel's buffer.
  */
 struct AudioChunk {
   typedef mozilla::AudioSampleFormat SampleFormat;
 
   // Generic methods
-  void SliceTo(StreamTime aStart, StreamTime aEnd) {
+  void SliceTo(TrackTime aStart, TrackTime aEnd) {
     MOZ_ASSERT(aStart >= 0 && aStart < aEnd && aEnd <= mDuration,
                "Slice out of bounds");
     if (mBuffer) {
       MOZ_ASSERT(aStart < INT32_MAX,
                  "Can't slice beyond 32-bit sample lengths");
       for (uint32_t channel = 0; channel < mChannelData.Length(); ++channel) {
         mChannelData[channel] = AddAudioSampleOffset(
             mChannelData[channel], mBufferFormat, int32_t(aStart));
       }
     }
     mDuration = aEnd - aStart;
   }
-  StreamTime GetDuration() const { return mDuration; }
+  TrackTime GetDuration() const { return mDuration; }
   bool CanCombineWithFollowing(const AudioChunk& aOther) const {
     if (aOther.mBuffer != mBuffer) {
       return false;
     }
     if (!mBuffer) {
       return true;
     }
     if (aOther.mVolume != mVolume) {
@@ -188,17 +188,17 @@ struct AudioChunk {
           AddAudioSampleOffset(mChannelData[channel], mBufferFormat,
                                int32_t(mDuration))) {
         return false;
       }
     }
     return true;
   }
   bool IsNull() const { return mBuffer == nullptr; }
-  void SetNull(StreamTime aDuration) {
+  void SetNull(TrackTime aDuration) {
     mBuffer = nullptr;
     mChannelData.Clear();
     mDuration = aDuration;
     mVolume = 1.0f;
     mBufferFormat = AUDIO_FORMAT_SILENCE;
     mPrincipalHandle = PRINCIPAL_HANDLE_NONE;
   }
 
@@ -257,17 +257,17 @@ struct AudioChunk {
   T* ChannelDataForWrite(size_t aChannel) {
     MOZ_ASSERT(AudioSampleTypeToFormat<T>::Format == mBufferFormat);
     MOZ_ASSERT(!mBuffer->IsShared());
     return static_cast<T*>(const_cast<void*>(mChannelData[aChannel]));
   }
 
   const PrincipalHandle& GetPrincipalHandle() const { return mPrincipalHandle; }
 
-  StreamTime mDuration = 0;            // in frames within the buffer
+  TrackTime mDuration = 0;             // in frames within the buffer
   RefPtr<ThreadSharedObject> mBuffer;  // the buffer object whose lifetime is
                                        // managed; null means data is all zeroes
   // one pointer per channel; empty if and only if mBuffer is null
   AutoTArray<const void*, GUESS_AUDIO_CHANNELS> mChannelData;
   float mVolume = 1.0f;  // volume multiplier to apply
   // format of frames in mBuffer (or silence if mBuffer is null)
   SampleFormat mBufferFormat = AUDIO_FORMAT_SILENCE;
   // principalHandle for the data in this chunk.
--- a/dom/media/AudioStreamTrack.cpp
+++ b/dom/media/AudioStreamTrack.cpp
@@ -1,49 +1,49 @@
 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "AudioStreamTrack.h"
 
-#include "MediaStreamGraph.h"
+#include "MediaTrackGraph.h"
 #include "nsContentUtils.h"
 
 namespace mozilla {
 namespace dom {
 
 void AudioStreamTrack::AddAudioOutput(void* aKey) {
   if (Ended()) {
     return;
   }
-  mStream->AddAudioOutput(aKey);
+  mTrack->AddAudioOutput(aKey);
 }
 
 void AudioStreamTrack::RemoveAudioOutput(void* aKey) {
   if (Ended()) {
     return;
   }
-  mStream->RemoveAudioOutput(aKey);
+  mTrack->RemoveAudioOutput(aKey);
 }
 
 void AudioStreamTrack::SetAudioOutputVolume(void* aKey, float aVolume) {
   if (Ended()) {
     return;
   }
-  mStream->SetAudioOutputVolume(aKey, aVolume);
+  mTrack->SetAudioOutputVolume(aKey, aVolume);
 }
 
 void AudioStreamTrack::GetLabel(nsAString& aLabel, CallerType aCallerType) {
   if (nsContentUtils::ResistFingerprinting(aCallerType)) {
     aLabel.AssignLiteral("Internal Microphone");
     return;
   }
   MediaStreamTrack::GetLabel(aLabel, aCallerType);
 }
 
 already_AddRefed<MediaStreamTrack> AudioStreamTrack::CloneInternal() {
-  return do_AddRef(new AudioStreamTrack(mWindow, mInputStream, mSource,
+  return do_AddRef(new AudioStreamTrack(mWindow, mInputTrack, mSource,
                                         ReadyState(), mConstraints));
 }
 
 }  // namespace dom
 }  // namespace mozilla
--- a/dom/media/AudioStreamTrack.h
+++ b/dom/media/AudioStreamTrack.h
@@ -10,21 +10,21 @@
 #include "DOMMediaStream.h"
 
 namespace mozilla {
 namespace dom {
 
 class AudioStreamTrack : public MediaStreamTrack {
  public:
   AudioStreamTrack(
-      nsPIDOMWindowInner* aWindow, MediaStream* aInputStream,
+      nsPIDOMWindowInner* aWindow, mozilla::MediaTrack* aInputTrack,
       MediaStreamTrackSource* aSource,
       MediaStreamTrackState aReadyState = MediaStreamTrackState::Live,
       const MediaTrackConstraints& aConstraints = MediaTrackConstraints())
-      : MediaStreamTrack(aWindow, aInputStream, aSource, aReadyState,
+      : MediaStreamTrack(aWindow, aInputTrack, aSource, aReadyState,
                          aConstraints) {}
 
   AudioStreamTrack* AsAudioStreamTrack() override { return this; }
   const AudioStreamTrack* AsAudioStreamTrack() const override { return this; }
 
   void AddAudioOutput(void* aKey);
   void RemoveAudioOutput(void* aKey);
   void SetAudioOutputVolume(void* aKey, float aVolume);
--- a/dom/media/CanvasCaptureMediaStream.cpp
+++ b/dom/media/CanvasCaptureMediaStream.cpp
@@ -2,32 +2,32 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "CanvasCaptureMediaStream.h"
 
 #include "DOMMediaStream.h"
 #include "ImageContainer.h"
-#include "MediaStreamGraph.h"
+#include "MediaTrackGraph.h"
 #include "Tracing.h"
 #include "VideoSegment.h"
 #include "gfxPlatform.h"
 #include "mozilla/Atomics.h"
 #include "mozilla/dom/CanvasCaptureMediaStreamBinding.h"
 #include "mozilla/gfx/2D.h"
 #include "nsContentUtils.h"
 
 using namespace mozilla::layers;
 using namespace mozilla::gfx;
 
 namespace mozilla {
 namespace dom {
 
-OutputStreamDriver::OutputStreamDriver(SourceMediaStream* aSourceStream,
+OutputStreamDriver::OutputStreamDriver(SourceMediaTrack* aSourceStream,
                                        const PrincipalHandle& aPrincipalHandle)
     : FrameCaptureListener(),
       mSourceStream(aSourceStream),
       mPrincipalHandle(aPrincipalHandle) {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_ASSERT(mSourceStream);
 
   // All CanvasCaptureMediaStreams shall at least get one frame.
@@ -45,29 +45,29 @@ void OutputStreamDriver::EndTrack() {
     mSourceStream->Destroy();
   }
 }
 
 void OutputStreamDriver::SetImage(const RefPtr<layers::Image>& aImage,
                                   const TimeStamp& aTime) {
   MOZ_ASSERT(NS_IsMainThread());
 
-  TRACE_COMMENT("SourceMediaStream %p", mSourceStream.get());
+  TRACE_COMMENT("SourceMediaTrack %p", mSourceStream.get());
 
   VideoSegment segment;
   segment.AppendFrame(do_AddRef(aImage), aImage->GetSize(), mPrincipalHandle,
                       false, aTime);
   mSourceStream->AppendData(&segment);
 }
 
 // ----------------------------------------------------------------------
 
 class TimerDriver : public OutputStreamDriver {
  public:
-  explicit TimerDriver(SourceMediaStream* aSourceStream, const double& aFPS,
+  explicit TimerDriver(SourceMediaTrack* aSourceStream, const double& aFPS,
                        const PrincipalHandle& aPrincipalHandle)
       : OutputStreamDriver(aSourceStream, aPrincipalHandle),
         mFPS(aFPS),
         mTimer(nullptr) {
     if (mFPS == 0.0) {
       return;
     }
 
@@ -109,17 +109,17 @@ class TimerDriver : public OutputStreamD
   const double mFPS;
   nsCOMPtr<nsITimer> mTimer;
 };
 
 // ----------------------------------------------------------------------
 
 class AutoDriver : public OutputStreamDriver {
  public:
-  explicit AutoDriver(SourceMediaStream* aSourceStream,
+  explicit AutoDriver(SourceMediaTrack* aSourceStream,
                       const PrincipalHandle& aPrincipalHandle)
       : OutputStreamDriver(aSourceStream, aPrincipalHandle) {}
 
   void NewFrame(already_AddRefed<Image> aImage,
                 const TimeStamp& aTime) override {
     // Don't reset `mFrameCaptureRequested` since AutoDriver shall always have
     // `mFrameCaptureRequested` set to true.
     // This also means we should accept every frame as NewFrame is called only
@@ -162,20 +162,20 @@ JSObject* CanvasCaptureMediaStream::Wrap
 void CanvasCaptureMediaStream::RequestFrame() {
   if (mOutputStreamDriver) {
     mOutputStreamDriver->RequestFrameCapture();
   }
 }
 
 nsresult CanvasCaptureMediaStream::Init(const dom::Optional<double>& aFPS,
                                         nsIPrincipal* aPrincipal) {
-  MediaStreamGraph* graph = MediaStreamGraph::GetInstance(
-      MediaStreamGraph::SYSTEM_THREAD_DRIVER, mWindow,
-      MediaStreamGraph::REQUEST_DEFAULT_SAMPLE_RATE);
-  SourceMediaStream* source = graph->CreateSourceStream(MediaSegment::VIDEO);
+  MediaTrackGraph* graph = MediaTrackGraph::GetInstance(
+      MediaTrackGraph::SYSTEM_THREAD_DRIVER, mWindow,
+      MediaTrackGraph::REQUEST_DEFAULT_SAMPLE_RATE);
+  SourceMediaTrack* source = graph->CreateSourceTrack(MediaSegment::VIDEO);
   PrincipalHandle principalHandle = MakePrincipalHandle(aPrincipal);
   if (!aFPS.WasPassed()) {
     mOutputStreamDriver = new AutoDriver(source, principalHandle);
   } else if (aFPS.Value() < 0) {
     return NS_ERROR_ILLEGAL_VALUE;
   } else {
     // Cap frame rate to 60 FPS for sanity
     double fps = std::min(60.0, aFPS.Value());
@@ -193,17 +193,17 @@ void CanvasCaptureMediaStream::StopCaptu
     return;
   }
 
   mOutputStreamDriver->EndTrack();
   mOutputStreamDriver->Forget();
   mOutputStreamDriver = nullptr;
 }
 
-SourceMediaStream* CanvasCaptureMediaStream::GetSourceStream() const {
+SourceMediaTrack* CanvasCaptureMediaStream::GetSourceStream() const {
   if (!mOutputStreamDriver) {
     return nullptr;
   }
   return mOutputStreamDriver->mSourceStream;
 }
 
 }  // namespace dom
 }  // namespace mozilla
--- a/dom/media/CanvasCaptureMediaStream.h
+++ b/dom/media/CanvasCaptureMediaStream.h
@@ -9,17 +9,17 @@
 #include "DOMMediaStream.h"
 #include "mozilla/dom/HTMLCanvasElement.h"
 #include "PrincipalHandle.h"
 
 class nsIPrincipal;
 
 namespace mozilla {
 class DOMMediaStream;
-class SourceMediaStream;
+class SourceMediaTrack;
 
 namespace layers {
 class Image;
 }  // namespace layers
 
 namespace dom {
 class CanvasCaptureMediaStream;
 class HTMLCanvasElement;
@@ -42,54 +42,54 @@ class OutputStreamFrameListener;
  * |        | ------------------------> |   OutputStreamDriver   |
  * | Canvas |  SetFrameCapture()        | (FrameCaptureListener) |
  * |________| ------------------------> |________________________|
  *                                                  |
  *                                                  | SetImage() -
  *                                                  | AppendToTrack()
  *                                                  |
  *                                                  v
- *                                      ___________________________
- *                                     |                           |
- *                                     |  MSG / SourceMediaStream  |
- *                                     |___________________________|
+ *                                      __________________________
+ *                                     |                          |
+ *                                     |  MTG / SourceMediaTrack  |
+ *                                     |__________________________|
  * ----------------------------------------------------------------------------
  */
 
 /*
  * Base class for drivers of the output stream.
  * It is up to each sub class to implement the NewFrame() callback of
  * FrameCaptureListener.
  */
 class OutputStreamDriver : public FrameCaptureListener {
  public:
-  OutputStreamDriver(SourceMediaStream* aSourceStream,
+  OutputStreamDriver(SourceMediaTrack* aSourceStream,
                      const PrincipalHandle& aPrincipalHandle);
 
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(OutputStreamDriver);
 
   /*
    * Sub classes can SetImage() to update the image being appended to the
-   * output stream. It will be appended on the next NotifyPull from MSG.
+   * output stream. It will be appended on the next NotifyPull from MTG.
    */
   void SetImage(const RefPtr<layers::Image>& aImage, const TimeStamp& aTime);
 
   /*
    * Ends the track in mSourceStream when we know there won't be any more images
    * requested for it.
    */
   void EndTrack();
 
   /*
    * Makes sure any internal resources this driver is holding that may create
    * reference cycles are released.
    */
   virtual void Forget() {}
 
-  const RefPtr<SourceMediaStream> mSourceStream;
+  const RefPtr<SourceMediaTrack> mSourceStream;
   const PrincipalHandle mPrincipalHandle;
 
  protected:
   virtual ~OutputStreamDriver();
 };
 
 class CanvasCaptureMediaStream : public DOMMediaStream {
  public:
@@ -111,17 +111,17 @@ class CanvasCaptureMediaStream : public 
 
   dom::FrameCaptureListener* FrameCaptureListener();
 
   /**
    * Stops capturing for this stream at mCanvas.
    */
   void StopCapture();
 
-  SourceMediaStream* GetSourceStream() const;
+  SourceMediaTrack* GetSourceStream() const;
 
  protected:
   ~CanvasCaptureMediaStream();
 
  private:
   RefPtr<HTMLCanvasElement> mCanvas;
   RefPtr<OutputStreamDriver> mOutputStreamDriver;
 };
--- a/dom/media/CubebUtils.cpp
+++ b/dom/media/CubebUtils.cpp
@@ -38,17 +38,17 @@
 
 #define AUDIOIPC_POOL_SIZE_DEFAULT 2
 #define AUDIOIPC_STACK_SIZE_DEFAULT (64 * 4096)
 
 #define PREF_VOLUME_SCALE "media.volume_scale"
 #define PREF_CUBEB_BACKEND "media.cubeb.backend"
 #define PREF_CUBEB_OUTPUT_DEVICE "media.cubeb.output_device"
 #define PREF_CUBEB_LATENCY_PLAYBACK "media.cubeb_latency_playback_ms"
-#define PREF_CUBEB_LATENCY_MSG "media.cubeb_latency_msg_frames"
+#define PREF_CUBEB_LATENCY_MTG "media.cubeb_latency_mtg_frames"
 // Allows to get something non-default for the preferred sample-rate, to allow
 // troubleshooting in the field and testing.
 #define PREF_CUBEB_FORCE_SAMPLE_RATE "media.cubeb.force_sample_rate"
 #define PREF_CUBEB_LOGGING_LEVEL "media.cubeb.logging_level"
 // Hidden pref used by tests to force failure to obtain cubeb context
 #define PREF_CUBEB_FORCE_NULL_CONTEXT "media.cubeb.force_null_context"
 // Hidden pref to disable BMO 1427011 experiment; can be removed once proven.
 #define PREF_CUBEB_DISABLE_DEVICE_SWITCHING \
@@ -107,23 +107,23 @@ StaticMutex sMutex;
 enum class CubebState {
   Uninitialized = 0,
   Initialized,
   Shutdown
 } sCubebState = CubebState::Uninitialized;
 cubeb* sCubebContext;
 double sVolumeScale = 1.0;
 uint32_t sCubebPlaybackLatencyInMilliseconds = 100;
-uint32_t sCubebMSGLatencyInFrames = 512;
+uint32_t sCubebMTGLatencyInFrames = 512;
 // If sCubebForcedSampleRate is zero, PreferredSampleRate will return the
 // preferred sample-rate for the audio backend in use. Otherwise, it will be
 // used as the preferred sample-rate.
 uint32_t sCubebForcedSampleRate = 0;
 bool sCubebPlaybackLatencyPrefSet = false;
-bool sCubebMSGLatencyPrefSet = false;
+bool sCubebMTGLatencyPrefSet = false;
 bool sAudioStreamInitEverSucceeded = false;
 bool sCubebForceNullContext = false;
 bool sCubebDisableDeviceSwitching = true;
 #ifdef MOZ_CUBEB_REMOTING
 bool sCubebSandbox = false;
 size_t sAudioIPCPoolSize;
 size_t sAudioIPCStackSize;
 #endif
@@ -221,25 +221,25 @@ void PrefChanged(const char* aPref, void
     StaticMutexAutoLock lock(sMutex);
     // Arbitrary default stream latency of 100ms.  The higher this
     // value, the longer stream volume changes will take to become
     // audible.
     sCubebPlaybackLatencyPrefSet = Preferences::HasUserValue(aPref);
     uint32_t value = Preferences::GetUint(aPref, CUBEB_NORMAL_LATENCY_MS);
     sCubebPlaybackLatencyInMilliseconds =
         std::min<uint32_t>(std::max<uint32_t>(value, 1), 1000);
-  } else if (strcmp(aPref, PREF_CUBEB_LATENCY_MSG) == 0) {
+  } else if (strcmp(aPref, PREF_CUBEB_LATENCY_MTG) == 0) {
     StaticMutexAutoLock lock(sMutex);
-    sCubebMSGLatencyPrefSet = Preferences::HasUserValue(aPref);
+    sCubebMTGLatencyPrefSet = Preferences::HasUserValue(aPref);
     uint32_t value = Preferences::GetUint(aPref, CUBEB_NORMAL_LATENCY_FRAMES);
     // 128 is the block size for the Web Audio API, which limits how low the
     // latency can be here.
     // We don't want to limit the upper limit too much, so that people can
     // experiment.
-    sCubebMSGLatencyInFrames =
+    sCubebMTGLatencyInFrames =
         std::min<uint32_t>(std::max<uint32_t>(value, 128), 1e6);
   } else if (strcmp(aPref, PREF_CUBEB_FORCE_SAMPLE_RATE) == 0) {
     StaticMutexAutoLock lock(sMutex);
     sCubebForcedSampleRate = Preferences::GetUint(aPref);
   } else if (strcmp(aPref, PREF_CUBEB_LOGGING_LEVEL) == 0) {
     nsAutoCString value;
     Preferences::GetCString(aPref, value);
     LogModule* cubebLog = LogModule::Get("cubeb");
@@ -575,47 +575,47 @@ uint32_t GetCubebPlaybackLatencyInMillis
   return sCubebPlaybackLatencyInMilliseconds;
 }
 
 bool CubebPlaybackLatencyPrefSet() {
   StaticMutexAutoLock lock(sMutex);
   return sCubebPlaybackLatencyPrefSet;
 }
 
-bool CubebMSGLatencyPrefSet() {
+bool CubebMTGLatencyPrefSet() {
   StaticMutexAutoLock lock(sMutex);
-  return sCubebMSGLatencyPrefSet;
+  return sCubebMTGLatencyPrefSet;
 }
 
-uint32_t GetCubebMSGLatencyInFrames(cubeb_stream_params* params) {
+uint32_t GetCubebMTGLatencyInFrames(cubeb_stream_params* params) {
   StaticMutexAutoLock lock(sMutex);
-  if (sCubebMSGLatencyPrefSet) {
-    MOZ_ASSERT(sCubebMSGLatencyInFrames > 0);
-    return sCubebMSGLatencyInFrames;
+  if (sCubebMTGLatencyPrefSet) {
+    MOZ_ASSERT(sCubebMTGLatencyInFrames > 0);
+    return sCubebMTGLatencyInFrames;
   }
 
 #ifdef MOZ_WIDGET_ANDROID
   return AndroidGetAudioOutputFramesPerBuffer();
 #else
   cubeb* context = GetCubebContextUnlocked();
   if (!context) {
-    return sCubebMSGLatencyInFrames;  // default 512
+    return sCubebMTGLatencyInFrames;  // default 512
   }
   uint32_t latency_frames = 0;
   if (cubeb_get_min_latency(context, params, &latency_frames) != CUBEB_OK) {
     NS_WARNING("Could not get minimal latency from cubeb.");
-    return sCubebMSGLatencyInFrames;  // default 512
+    return sCubebMTGLatencyInFrames;  // default 512
   }
   return latency_frames;
 #endif
 }
 
 static const char* gInitCallbackPrefs[] = {
     PREF_VOLUME_SCALE,           PREF_CUBEB_OUTPUT_DEVICE,
-    PREF_CUBEB_LATENCY_PLAYBACK, PREF_CUBEB_LATENCY_MSG,
+    PREF_CUBEB_LATENCY_PLAYBACK, PREF_CUBEB_LATENCY_MTG,
     PREF_CUBEB_BACKEND,          PREF_CUBEB_FORCE_NULL_CONTEXT,
     PREF_CUBEB_SANDBOX,          PREF_AUDIOIPC_POOL_SIZE,
     PREF_AUDIOIPC_STACK_SIZE,    nullptr,
 };
 static const char* gCallbackPrefs[] = {
     PREF_CUBEB_FORCE_SAMPLE_RATE,
     // We don't want to call the callback on startup, because the pref is the
     // empty string by default ("", which means "logging disabled"). Because the
--- a/dom/media/CubebUtils.h
+++ b/dom/media/CubebUtils.h
@@ -40,17 +40,17 @@ uint32_t PreferredSampleRate();
 enum Side { Input, Output };
 
 double GetVolumeScale();
 bool GetFirstStream();
 cubeb* GetCubebContext();
 void ReportCubebStreamInitFailure(bool aIsFirstStream);
 void ReportCubebBackendUsed();
 uint32_t GetCubebPlaybackLatencyInMilliseconds();
-uint32_t GetCubebMSGLatencyInFrames(cubeb_stream_params* params);
+uint32_t GetCubebMTGLatencyInFrames(cubeb_stream_params* params);
 bool CubebLatencyPrefSet();
 void GetCurrentBackend(nsAString& aBackend);
 cubeb_stream_prefs GetDefaultStreamPrefs();
 char* GetForcedOutputDevice();
 // No-op on all platforms but Android, where it tells the device's AudioManager
 // to switch to "communication mode", which might change audio routing,
 // bluetooth communication type, etc.
 void SetInCommunication(bool aInCommunication);
--- a/dom/media/DOMMediaStream.cpp
+++ b/dom/media/DOMMediaStream.cpp
@@ -1,22 +1,22 @@
 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "DOMMediaStream.h"
 
-#include "AudioCaptureStream.h"
+#include "AudioCaptureTrack.h"
 #include "AudioChannelAgent.h"
 #include "AudioStreamTrack.h"
 #include "Layers.h"
-#include "MediaStreamGraph.h"
-#include "MediaStreamGraphImpl.h"
-#include "MediaStreamListener.h"
+#include "MediaTrackGraph.h"
+#include "MediaTrackGraphImpl.h"
+#include "MediaTrackListener.h"
 #include "VideoStreamTrack.h"
 #include "mozilla/dom/AudioTrack.h"
 #include "mozilla/dom/AudioTrackList.h"
 #include "mozilla/dom/DocGroup.h"
 #include "mozilla/dom/HTMLCanvasElement.h"
 #include "mozilla/dom/MediaStreamBinding.h"
 #include "mozilla/dom/MediaStreamTrackEvent.h"
 #include "mozilla/dom/Promise.h"
@@ -227,35 +227,35 @@ already_AddRefed<Promise> DOMMediaStream
     return nullptr;
   }
 
   RefPtr<Promise> p = Promise::Create(go, aRv);
   if (aRv.Failed()) {
     return nullptr;
   }
 
-  MediaStreamGraph* graph = MediaStreamGraph::GetInstanceIfExists(
-      window, MediaStreamGraph::REQUEST_DEFAULT_SAMPLE_RATE);
+  MediaTrackGraph* graph = MediaTrackGraph::GetInstanceIfExists(
+      window, MediaTrackGraph::REQUEST_DEFAULT_SAMPLE_RATE);
   if (!graph) {
     p->MaybeResolve(0);
     return p.forget();
   }
 
-  auto* graphImpl = static_cast<MediaStreamGraphImpl*>(graph);
+  auto* graphImpl = static_cast<MediaTrackGraphImpl*>(graph);
 
   class Counter : public ControlMessage {
    public:
-    Counter(MediaStreamGraphImpl* aGraph, const RefPtr<Promise>& aPromise)
+    Counter(MediaTrackGraphImpl* aGraph, const RefPtr<Promise>& aPromise)
         : ControlMessage(nullptr), mGraph(aGraph), mPromise(aPromise) {
       MOZ_ASSERT(NS_IsMainThread());
     }
 
     void Run() override {
       uint32_t streams =
-          mGraph->mStreams.Length() + mGraph->mSuspendedStreams.Length();
+          mGraph->mTracks.Length() + mGraph->mSuspendedTracks.Length();
       mGraph->DispatchToMainThreadStableState(NS_NewRunnableFunction(
           "DOMMediaStream::CountUnderlyingStreams (stable state)",
           [promise = std::move(mPromise), streams]() mutable {
             NS_DispatchToMainThread(NS_NewRunnableFunction(
                 "DOMMediaStream::CountUnderlyingStreams",
                 [promise = std::move(promise), streams]() {
                   promise->MaybeResolve(streams);
                 }));
@@ -268,17 +268,17 @@ already_AddRefed<Promise> DOMMediaStream
     void RunDuringShutdown() override {
       NS_ReleaseOnMainThreadSystemGroup(
           "DOMMediaStream::CountUnderlyingStreams::Counter::RunDuringShutdown",
           mPromise.forget());
     }
 
    private:
     // mGraph owns this Counter instance and decides its lifetime.
-    MediaStreamGraphImpl* mGraph;
+    MediaTrackGraphImpl* mGraph;
     RefPtr<Promise> mPromise;
   };
   graphImpl->AppendMessage(MakeUnique<Counter>(graphImpl, p));
 
   return p.forget();
 }
 
 void DOMMediaStream::GetId(nsAString& aID) const { aID = mID; }
@@ -322,32 +322,32 @@ void DOMMediaStream::GetVideoTracks(
 void DOMMediaStream::GetTracks(
     nsTArray<RefPtr<MediaStreamTrack>>& aTracks) const {
   for (const auto& track : mTracks) {
     aTracks.AppendElement(track);
   }
 }
 
 void DOMMediaStream::AddTrack(MediaStreamTrack& aTrack) {
-  LOG(LogLevel::Info, ("DOMMediaStream %p Adding track %p (from stream %p)",
-                       this, &aTrack, aTrack.GetStream()));
+  LOG(LogLevel::Info, ("DOMMediaStream %p Adding track %p (from track %p)",
+                       this, &aTrack, aTrack.GetTrack()));
 
   if (HasTrack(aTrack)) {
     LOG(LogLevel::Debug,
         ("DOMMediaStream %p already contains track %p", this, &aTrack));
     return;
   }
 
   mTracks.AppendElement(&aTrack);
   NotifyTrackAdded(&aTrack);
 }
 
 void DOMMediaStream::RemoveTrack(MediaStreamTrack& aTrack) {
-  LOG(LogLevel::Info, ("DOMMediaStream %p Removing track %p (from stream %p)",
-                       this, &aTrack, aTrack.GetStream()));
+  LOG(LogLevel::Info, ("DOMMediaStream %p Removing track %p (from track %p)",
+                       this, &aTrack, aTrack.GetTrack()));
 
   if (!mTracks.RemoveElement(&aTrack)) {
     LOG(LogLevel::Debug,
         ("DOMMediaStream %p does not contain track %p", this, &aTrack));
     return;
   }
 
   if (!aTrack.Ended()) {
--- a/dom/media/DOMMediaStream.h
+++ b/dom/media/DOMMediaStream.h
@@ -15,19 +15,16 @@
 #include "MediaTrackConstraints.h"
 #include "mozilla/DOMEventTargetHelper.h"
 #include "mozilla/RelativeTimeline.h"
 
 namespace mozilla {
 
 class AbstractThread;
 class DOMMediaStream;
-class MediaStream;
-class MediaInputPort;
-class ProcessedMediaStream;
 
 enum class BlockingMode;
 
 namespace dom {
 class HTMLCanvasElement;
 class MediaStreamTrack;
 class MediaStreamTrackSource;
 class AudioStreamTrack;
--- a/dom/media/DriftCompensation.h
+++ b/dom/media/DriftCompensation.h
@@ -14,19 +14,19 @@
 
 namespace mozilla {
 
 static LazyLogModule gDriftCompensatorLog("DriftCompensator");
 #define LOG(type, ...) MOZ_LOG(gDriftCompensatorLog, type, (__VA_ARGS__))
 
 /**
  * DriftCompensator can be used to handle drift between audio and video tracks
- * from the MediaStreamGraph.
+ * from the MediaTrackGraph.
  *
- * Drift can occur because audio is driven by a MediaStreamGraph running off an
+ * Drift can occur because audio is driven by a MediaTrackGraph running off an
  * audio callback, thus it's progressed by the clock of one the audio output
  * devices on the user's machine. Video on the other hand is always expressed in
  * wall-clock TimeStamps, i.e., it's progressed by the system clock. These
  * clocks will, over time, drift apart.
  *
  * Do not use the DriftCompensator across multiple audio tracks, as it will
  * automatically record the start time of the first audio samples, and all
  * samples for the same audio track on the same audio clock will have to be
@@ -37,17 +37,17 @@ static LazyLogModule gDriftCompensatorLo
  * - The video thread for compensating drift of video frames to match the audio
  *   clock.
  */
 class DriftCompensator {
   const RefPtr<nsIEventTarget> mVideoThread;
   const TrackRate mAudioRate;
 
   // Number of audio samples produced. Any thread.
-  Atomic<StreamTime> mAudioSamples{0};
+  Atomic<TrackTime> mAudioSamples{0};
 
   // Time the first audio samples were added. mVideoThread only.
   TimeStamp mAudioStartTime;
 
   void SetAudioStartTime(TimeStamp aTime) {
     MOZ_ASSERT(mVideoThread->IsOnCurrentThread());
     MOZ_ASSERT(mAudioStartTime.IsNull());
     mAudioStartTime = aTime;
@@ -73,32 +73,32 @@ class DriftCompensator {
         &DriftCompensator::SetAudioStartTime, aStart));
     MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
     Unused << rv;
   }
 
   /**
    * aSamples is the number of samples fed by an AudioStream.
    */
-  void NotifyAudio(StreamTime aSamples) {
+  void NotifyAudio(TrackTime aSamples) {
     MOZ_ASSERT(aSamples > 0);
     mAudioSamples += aSamples;
 
     LOG(LogLevel::Verbose,
         "DriftCompensator %p Processed another %" PRId64
         " samples; now %.3fs audio",
         this, aSamples, static_cast<double>(mAudioSamples) / mAudioRate);
   }
 
   /**
    * Drift compensates a video TimeStamp based on historical audio data.
    */
   virtual TimeStamp GetVideoTime(TimeStamp aNow, TimeStamp aTime) {
     MOZ_ASSERT(mVideoThread->IsOnCurrentThread());
-    StreamTime samples = mAudioSamples;
+    TrackTime samples = mAudioSamples;
 
     if (samples / mAudioRate < 10) {
       // We don't apply compensation for the first 10 seconds because of the
       // higher inaccuracy during this time.
       LOG(LogLevel::Debug, "DriftCompensator %p %" PRId64 "ms so far; ignoring",
           this, samples * 1000 / mAudioRate);
       return aTime;
     }
rename from dom/media/TrackUnionStream.cpp
rename to dom/media/ForwardedInputTrack.cpp
--- a/dom/media/TrackUnionStream.cpp
+++ b/dom/media/ForwardedInputTrack.cpp
@@ -1,239 +1,240 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
-#include "MediaStreamGraphImpl.h"
-#include "MediaStreamListener.h"
+#include "ForwardedInputTrack.h"
+
+#include <algorithm>
+#include "AudioChannelService.h"
+#include "AudioNodeEngine.h"
+#include "AudioNodeExternalInputTrack.h"
+#include "AudioNodeTrack.h"
+#include "AudioSegment.h"
+#include "DOMMediaStream.h"
+#include "GeckoProfiler.h"
+#include "ImageContainer.h"
+#include "MediaTrackGraphImpl.h"
+#include "MediaTrackListener.h"
+#include "mozilla/Attributes.h"
+#include "mozilla/Logging.h"
 #include "mozilla/MathAlgorithms.h"
 #include "mozilla/Unused.h"
-
-#include "AudioSegment.h"
-#include "VideoSegment.h"
 #include "nsContentUtils.h"
 #include "nsIAppShell.h"
 #include "nsIObserver.h"
 #include "nsPrintfCString.h"
 #include "nsServiceManagerUtils.h"
 #include "nsWidgetsCID.h"
 #include "prerror.h"
-#include "mozilla/Logging.h"
-#include "mozilla/Attributes.h"
-#include "TrackUnionStream.h"
-#include "ImageContainer.h"
-#include "AudioChannelService.h"
-#include "AudioNodeEngine.h"
-#include "AudioNodeStream.h"
-#include "AudioNodeExternalInputStream.h"
+#include "Tracing.h"
+#include "VideoSegment.h"
 #include "webaudio/MediaStreamAudioDestinationNode.h"
-#include <algorithm>
-#include "DOMMediaStream.h"
-#include "GeckoProfiler.h"
 
 using namespace mozilla::layers;
 using namespace mozilla::dom;
 using namespace mozilla::gfx;
 
 namespace mozilla {
 
-#ifdef STREAM_LOG
-#  undef STREAM_LOG
+#ifdef TRACK_LOG
+#  undef TRACK_LOG
 #endif
 
-LazyLogModule gTrackUnionStreamLog("TrackUnionStream");
-#define STREAM_LOG(type, msg) MOZ_LOG(gTrackUnionStreamLog, type, msg)
+LazyLogModule gForwardedInputTrackLog("ForwardedInputTrack");
+#define TRACK_LOG(type, msg) MOZ_LOG(gForwardedInputTrackLog, type, msg)
 
-TrackUnionStream::TrackUnionStream(TrackRate aSampleRate,
-                                   MediaSegment::Type aType)
-    : ProcessedMediaStream(
+ForwardedInputTrack::ForwardedInputTrack(TrackRate aSampleRate,
+                                         MediaSegment::Type aType)
+    : ProcessedMediaTrack(
           aSampleRate, aType,
           aType == MediaSegment::AUDIO
               ? static_cast<MediaSegment*>(new AudioSegment())
               : static_cast<MediaSegment*>(new VideoSegment())) {}
 
-void TrackUnionStream::AddInput(MediaInputPort* aPort) {
+void ForwardedInputTrack::AddInput(MediaInputPort* aPort) {
   SetInput(aPort);
-  ProcessedMediaStream::AddInput(aPort);
+  ProcessedMediaTrack::AddInput(aPort);
 }
 
-void TrackUnionStream::RemoveInput(MediaInputPort* aPort) {
-  STREAM_LOG(LogLevel::Debug,
-             ("TrackUnionStream %p removing input %p", this, aPort));
+void ForwardedInputTrack::RemoveInput(MediaInputPort* aPort) {
+  TRACK_LOG(LogLevel::Debug,
+            ("ForwardedInputTrack %p removing input %p", this, aPort));
   MOZ_ASSERT(aPort == mInputPort);
-  nsTArray<RefPtr<DirectMediaStreamTrackListener>> listeners(
-      mOwnedDirectListeners);
+  nsTArray<RefPtr<DirectMediaTrackListener>> listeners(mOwnedDirectListeners);
   for (const auto& listener : listeners) {
     // Remove listeners while the entry still exists.
     RemoveDirectListenerImpl(listener);
   }
   mInputPort = nullptr;
-  ProcessedMediaStream::RemoveInput(aPort);
+  ProcessedMediaTrack::RemoveInput(aPort);
 }
 
-void TrackUnionStream::SetInput(MediaInputPort* aPort) {
+void ForwardedInputTrack::SetInput(MediaInputPort* aPort) {
   MOZ_ASSERT(aPort);
   MOZ_ASSERT(aPort->GetSource());
   MOZ_ASSERT(aPort->GetSource()->GetData<MediaSegment>());
   MOZ_ASSERT(!mInputPort);
   mInputPort = aPort;
 
   for (const auto& listener : mOwnedDirectListeners) {
-    MediaStream* source = mInputPort->GetSource();
-    STREAM_LOG(LogLevel::Debug, ("TrackUnionStream %p adding direct listener "
-                                 "%p. Forwarding to input stream %p.",
-                                 this, listener.get(), aPort->GetSource()));
+    MediaTrack* source = mInputPort->GetSource();
+    TRACK_LOG(LogLevel::Debug, ("ForwardedInputTrack %p adding direct listener "
+                                "%p. Forwarding to input track %p.",
+                                this, listener.get(), aPort->GetSource()));
     source->AddDirectListenerImpl(do_AddRef(listener));
   }
 }
 
-void TrackUnionStream::ProcessInputImpl(MediaStream* aSource,
-                                        MediaSegment* aSegment, GraphTime aFrom,
-                                        GraphTime aTo, uint32_t aFlags) {
+void ForwardedInputTrack::ProcessInputImpl(MediaTrack* aSource,
+                                           MediaSegment* aSegment, GraphTime aFrom,
+                                           GraphTime aTo, uint32_t aFlags) {
   GraphTime next;
   for (GraphTime t = aFrom; t < aTo; t = next) {
     MediaInputPort::InputInterval interval =
         MediaInputPort::GetNextInputInterval(mInputPort, t);
     interval.mEnd = std::min(interval.mEnd, aTo);
 
     const bool inputEnded =
         !aSource ||
         (aSource->Ended() &&
          aSource->GetEnd() <=
-             aSource->GraphTimeToStreamTimeWithBlocking(interval.mEnd));
+             aSource->GraphTimeToTrackTimeWithBlocking(interval.mEnd));
 
-    StreamTime ticks = interval.mEnd - interval.mStart;
+    TrackTime ticks = interval.mEnd - interval.mStart;
     next = interval.mEnd;
 
     if (interval.mStart >= interval.mEnd) {
       break;
     }
 
     if (inputEnded) {
       if (mAutoend && (aFlags & ALLOW_END)) {
         mEnded = true;
         break;
       }
       aSegment->AppendNullData(ticks);
-      STREAM_LOG(LogLevel::Verbose, ("TrackUnionStream %p appending %lld ticks "
-                                     "of null data (ended input)",
-                                     this, (long long)ticks));
+      TRACK_LOG(LogLevel::Verbose,
+                ("ForwardedInputTrack %p appending %lld ticks "
+                 "of null data (ended input)",
+                 this, (long long)ticks));
     } else if (interval.mInputIsBlocked) {
       aSegment->AppendNullData(ticks);
-      STREAM_LOG(LogLevel::Verbose, ("TrackUnionStream %p appending %lld ticks "
-                                     "of null data (blocked input)",
-                                     this, (long long)ticks));
+      TRACK_LOG(LogLevel::Verbose,
+                ("ForwardedInputTrack %p appending %lld ticks "
+                 "of null data (blocked input)",
+                 this, (long long)ticks));
     } else if (InMutedCycle()) {
       aSegment->AppendNullData(ticks);
     } else if (aSource->IsSuspended()) {
       aSegment->AppendNullData(aTo - aFrom);
     } else {
-      MOZ_ASSERT(GetEnd() == GraphTimeToStreamTimeWithBlocking(interval.mStart),
+      MOZ_ASSERT(GetEnd() == GraphTimeToTrackTimeWithBlocking(interval.mStart),
                  "Samples missing");
-      StreamTime inputStart =
-          aSource->GraphTimeToStreamTimeWithBlocking(interval.mStart);
-      StreamTime inputEnd =
-          aSource->GraphTimeToStreamTimeWithBlocking(interval.mEnd);
+      TrackTime inputStart =
+          aSource->GraphTimeToTrackTimeWithBlocking(interval.mStart);
+      TrackTime inputEnd =
+          aSource->GraphTimeToTrackTimeWithBlocking(interval.mEnd);
       aSegment->AppendSlice(*aSource->GetData<MediaSegment>(), inputStart,
                             inputEnd);
     }
     ApplyTrackDisabling(aSegment);
     for (const auto& listener : mTrackListeners) {
       listener->NotifyQueuedChanges(Graph(), GetEnd(), *aSegment);
     }
     mSegment->AppendFrom(aSegment);
   }
 }
 
-void TrackUnionStream::ProcessInput(GraphTime aFrom, GraphTime aTo,
-                                    uint32_t aFlags) {
-  TRACE_AUDIO_CALLBACK_COMMENT("TrackUnionStream %p", this);
+void ForwardedInputTrack::ProcessInput(GraphTime aFrom, GraphTime aTo,
+                                       uint32_t aFlags) {
+  TRACE_AUDIO_CALLBACK_COMMENT("ForwardedInputTrack %p", this);
   if (mEnded) {
     return;
   }
 
   MediaInputPort* input = mInputPort;
-  MediaStream* source = input ? input->GetSource() : nullptr;
+  MediaTrack* source = input ? input->GetSource() : nullptr;
   if (mType == MediaSegment::AUDIO) {
     AudioSegment audio;
     ProcessInputImpl(source, &audio, aFrom, aTo, aFlags);
   } else if (mType == MediaSegment::VIDEO) {
     VideoSegment video;
     ProcessInputImpl(source, &video, aFrom, aTo, aFlags);
   } else {
     MOZ_CRASH("Unknown segment type");
   }
 }
 
-void TrackUnionStream::SetEnabledImpl(DisabledTrackMode aMode) {
+void ForwardedInputTrack::SetEnabledImpl(DisabledTrackMode aMode) {
   bool enabled = aMode == DisabledTrackMode::ENABLED;
-  STREAM_LOG(LogLevel::Info, ("TrackUnionStream %p was explicitly %s", this,
-                              enabled ? "enabled" : "disabled"));
-  for (DirectMediaStreamTrackListener* listener : mOwnedDirectListeners) {
+  TRACK_LOG(LogLevel::Info, ("ForwardedInputTrack %p was explicitly %s", this,
+                             enabled ? "enabled" : "disabled"));
+  for (DirectMediaTrackListener* listener : mOwnedDirectListeners) {
     DisabledTrackMode oldMode = mDisabledMode;
     bool oldEnabled = oldMode == DisabledTrackMode::ENABLED;
     if (!oldEnabled && enabled) {
-      STREAM_LOG(LogLevel::Debug, ("TrackUnionStream %p setting "
-                                   "direct listener enabled",
-                                   this));
+      TRACK_LOG(LogLevel::Debug, ("ForwardedInputTrack %p setting "
+                                  "direct listener enabled",
+                                  this));
       listener->DecreaseDisabled(oldMode);
     } else if (oldEnabled && !enabled) {
-      STREAM_LOG(LogLevel::Debug, ("TrackUnionStream %p setting "
-                                   "direct listener disabled",
-                                   this));
+      TRACK_LOG(LogLevel::Debug, ("ForwardedInputTrack %p setting "
+                                  "direct listener disabled",
+                                  this));
       listener->IncreaseDisabled(aMode);
     }
   }
-  MediaStream::SetEnabledImpl(aMode);
+  MediaTrack::SetEnabledImpl(aMode);
 }
 
-void TrackUnionStream::AddDirectListenerImpl(
-    already_AddRefed<DirectMediaStreamTrackListener> aListener) {
-  RefPtr<DirectMediaStreamTrackListener> listener = aListener;
+void ForwardedInputTrack::AddDirectListenerImpl(
+    already_AddRefed<DirectMediaTrackListener> aListener) {
+  RefPtr<DirectMediaTrackListener> listener = aListener;
   mOwnedDirectListeners.AppendElement(listener);
 
   DisabledTrackMode currentMode = mDisabledMode;
   if (currentMode != DisabledTrackMode::ENABLED) {
     listener->IncreaseDisabled(currentMode);
   }
 
   if (mInputPort) {
-    MediaStream* source = mInputPort->GetSource();
-    STREAM_LOG(LogLevel::Debug, ("TrackUnionStream %p adding direct listener "
-                                 "%p. Forwarding to input stream %p.",
-                                 this, listener.get(), source));
+    MediaTrack* source = mInputPort->GetSource();
+    TRACK_LOG(LogLevel::Debug, ("ForwardedInputTrack %p adding direct listener "
+                                "%p. Forwarding to input track %p.",
+                                this, listener.get(), source));
     source->AddDirectListenerImpl(listener.forget());
   }
 }
 
-void TrackUnionStream::RemoveDirectListenerImpl(
-    DirectMediaStreamTrackListener* aListener) {
+void ForwardedInputTrack::RemoveDirectListenerImpl(
+    DirectMediaTrackListener* aListener) {
   for (size_t i = 0; i < mOwnedDirectListeners.Length(); ++i) {
     if (mOwnedDirectListeners[i] == aListener) {
-      STREAM_LOG(
-          LogLevel::Debug,
-          ("TrackUnionStream %p removing direct listener %p", this, aListener));
+      TRACK_LOG(LogLevel::Debug,
+                ("ForwardedInputTrack %p removing direct listener %p", this,
+                 aListener));
       DisabledTrackMode currentMode = mDisabledMode;
       if (currentMode != DisabledTrackMode::ENABLED) {
         // Reset the listener's state.
         aListener->DecreaseDisabled(currentMode);
       }
       mOwnedDirectListeners.RemoveElementAt(i);
       break;
     }
   }
   if (mInputPort) {
     // Forward to the input
-    MediaStream* source = mInputPort->GetSource();
+    MediaTrack* source = mInputPort->GetSource();
     source->RemoveDirectListenerImpl(aListener);
   }
 }
 
-void TrackUnionStream::RemoveAllDirectListenersImpl() {
-  nsTArray<RefPtr<DirectMediaStreamTrackListener>> listeners(
-      mOwnedDirectListeners);
+void ForwardedInputTrack::RemoveAllDirectListenersImpl() {
+  nsTArray<RefPtr<DirectMediaTrackListener>> listeners(mOwnedDirectListeners);
   for (const auto& listener : listeners) {
     RemoveDirectListenerImpl(listener);
   }
   MOZ_DIAGNOSTIC_ASSERT(mOwnedDirectListeners.IsEmpty());
 }
 
 }  // namespace mozilla
rename from dom/media/TrackUnionStream.h
rename to dom/media/ForwardedInputTrack.h
--- a/dom/media/TrackUnionStream.h
+++ b/dom/media/ForwardedInputTrack.h
@@ -1,60 +1,59 @@
 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
-#ifndef MOZILLA_TRACKUNIONSTREAM_H_
-#define MOZILLA_TRACKUNIONSTREAM_H_
+#ifndef MOZILLA_FORWARDEDINPUTTRACK_H_
+#define MOZILLA_FORWARDEDINPUTTRACK_H_
 
-#include "MediaStreamGraph.h"
+#include "MediaTrackGraph.h"
 #include "nsAutoPtr.h"
 #include <algorithm>
 
 namespace mozilla {
 
 /**
- * See MediaStreamGraph::CreateTrackUnionStream.
+ * See MediaTrackGraph::CreateForwardedInputTrack.
  */
-class TrackUnionStream : public ProcessedMediaStream {
+class ForwardedInputTrack : public ProcessedMediaTrack {
  public:
-  TrackUnionStream(TrackRate aSampleRate, MediaSegment::Type aType);
+  ForwardedInputTrack(TrackRate aSampleRate, MediaSegment::Type aType);
 
-  virtual TrackUnionStream* AsTrackUnionStream() override { return this; }
+  virtual ForwardedInputTrack* AsForwardedInputTrack() override { return this; }
   friend class DOMMediaStream;
 
   void AddInput(MediaInputPort* aPort) override;
   void RemoveInput(MediaInputPort* aPort) override;
   void ProcessInput(GraphTime aFrom, GraphTime aTo, uint32_t aFlags) override;
 
   void SetEnabledImpl(DisabledTrackMode aMode) override;
 
-  friend class MediaStreamGraphImpl;
+  friend class MediaTrackGraphImpl;
 
  protected:
-  // Set up this stream from a specific input.
+  // Set up this track from a specific input.
   void SetInput(MediaInputPort* aPort);
 
   // MediaSegment-agnostic ProcessInput.
-  void ProcessInputImpl(MediaStream* aSource, MediaSegment* aSegment,
+  void ProcessInputImpl(MediaTrack* aSource, MediaSegment* aSegment,
                         GraphTime aFrom, GraphTime aTo, uint32_t aFlags);
 
   void AddDirectListenerImpl(
-      already_AddRefed<DirectMediaStreamTrackListener> aListener) override;
-  void RemoveDirectListenerImpl(
-      DirectMediaStreamTrackListener* aListener) override;
+      already_AddRefed<DirectMediaTrackListener> aListener) override;
+  void RemoveDirectListenerImpl(DirectMediaTrackListener* aListener) override;
   void RemoveAllDirectListenersImpl() override;
 
   // These are direct track listeners that have been added to this
-  // TrackUnionStream-track. While an input is set, these are forwarded to the
-  // input stream. We will update these when this track's disabled status
+  // ForwardedInputTrack-track. While an input is set, these are forwarded to
+  // the input track. We will update these when this track's disabled status
   // changes.
-  nsTArray<RefPtr<DirectMediaStreamTrackListener>> mOwnedDirectListeners;
+  nsTArray<RefPtr<DirectMediaTrackListener>> mOwnedDirectListeners;
 
   // Set if an input has been added, nullptr otherwise. Adding more than one
   // input is an error.
   MediaInputPort* mInputPort = nullptr;
 };
 
 }  // namespace mozilla
 
-#endif /* MOZILLA_MEDIASTREAMGRAPH_H_ */
+#endif /* MOZILLA_FORWARDEDINPUTTRACK_H_ */
--- a/dom/media/GraphDriver.cpp
+++ b/dom/media/GraphDriver.cpp
@@ -1,15 +1,15 @@
 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
-#include <MediaStreamGraphImpl.h>
+#include <MediaTrackGraphImpl.h>
 #include "mozilla/dom/AudioContext.h"
 #include "mozilla/dom/AudioDeviceInfo.h"
 #include "mozilla/dom/WorkletThread.h"
 #include "mozilla/SharedThreadPool.h"
 #include "mozilla/ClearOnShutdown.h"
 #include "mozilla/Unused.h"
 #include "mozilla/MathAlgorithms.h"
 #include "CubebDeviceEnumerator.h"
@@ -18,25 +18,25 @@
 #ifdef MOZ_WEBRTC
 #  include "webrtc/MediaEngineWebRTC.h"
 #endif
 
 #ifdef XP_MACOSX
 #  include <sys/sysctl.h>
 #endif
 
-extern mozilla::LazyLogModule gMediaStreamGraphLog;
+extern mozilla::LazyLogModule gMediaTrackGraphLog;
 #ifdef LOG
 #  undef LOG
 #endif  // LOG
-#define LOG(type, msg) MOZ_LOG(gMediaStreamGraphLog, type, msg)
+#define LOG(type, msg) MOZ_LOG(gMediaTrackGraphLog, type, msg)
 
 namespace mozilla {
 
-GraphDriver::GraphDriver(MediaStreamGraphImpl* aGraphImpl)
+GraphDriver::GraphDriver(MediaTrackGraphImpl* aGraphImpl)
     : mIterationStart(0),
       mIterationEnd(0),
       mGraphImpl(aGraphImpl),
       mPreviousDriver(nullptr),
       mNextDriver(nullptr) {}
 
 void GraphDriver::SetGraphTime(GraphDriver* aPreviousDriver,
                                GraphTime aLastSwitchNextIterationStart,
@@ -139,49 +139,49 @@ void GraphDriver::SetNextDriver(GraphDri
 }
 
 void GraphDriver::SetPreviousDriver(GraphDriver* aPreviousDriver) {
   MOZ_ASSERT(OnGraphThread() || !ThreadRunning());
   GraphImpl()->GetMonitor().AssertCurrentThreadOwns();
   mPreviousDriver = aPreviousDriver;
 }
 
-ThreadedDriver::ThreadedDriver(MediaStreamGraphImpl* aGraphImpl)
+ThreadedDriver::ThreadedDriver(MediaTrackGraphImpl* aGraphImpl)
     : GraphDriver(aGraphImpl), mThreadRunning(false) {}
 
-class MediaStreamGraphShutdownThreadRunnable : public Runnable {
+class MediaTrackGraphShutdownThreadRunnable : public Runnable {
  public:
-  explicit MediaStreamGraphShutdownThreadRunnable(
+  explicit MediaTrackGraphShutdownThreadRunnable(
       already_AddRefed<nsIThread> aThread)
-      : Runnable("MediaStreamGraphShutdownThreadRunnable"), mThread(aThread) {}
+      : Runnable("MediaTrackGraphShutdownThreadRunnable"), mThread(aThread) {}
   NS_IMETHOD Run() override {
     MOZ_ASSERT(NS_IsMainThread());
     MOZ_ASSERT(mThread);
 
     mThread->Shutdown();
     mThread = nullptr;
     return NS_OK;
   }
 
  private:
   nsCOMPtr<nsIThread> mThread;
 };
 
 ThreadedDriver::~ThreadedDriver() {
   if (mThread) {
     nsCOMPtr<nsIRunnable> event =
-        new MediaStreamGraphShutdownThreadRunnable(mThread.forget());
+        new MediaTrackGraphShutdownThreadRunnable(mThread.forget());
     SystemGroup::Dispatch(TaskCategory::Other, event.forget());
   }
 }
 
-class MediaStreamGraphInitThreadRunnable : public Runnable {
+class MediaTrackGraphInitThreadRunnable : public Runnable {
  public:
-  explicit MediaStreamGraphInitThreadRunnable(ThreadedDriver* aDriver)
-      : Runnable("MediaStreamGraphInitThreadRunnable"), mDriver(aDriver) {}
+  explicit MediaTrackGraphInitThreadRunnable(ThreadedDriver* aDriver)
+      : Runnable("MediaTrackGraphInitThreadRunnable"), mDriver(aDriver) {}
   NS_IMETHOD Run() override {
     MOZ_ASSERT(!mDriver->ThreadRunning());
     LOG(LogLevel::Debug, ("Starting a new system driver for graph %p",
                           mDriver->mGraphImpl.get()));
 
     RefPtr<GraphDriver> previousDriver;
     {
       MonitorAutoLock mon(mDriver->mGraphImpl->GetMonitor());
@@ -216,39 +216,39 @@ class MediaStreamGraphInitThreadRunnable
 
 void ThreadedDriver::Start() {
   MOZ_ASSERT(!ThreadRunning());
   LOG(LogLevel::Debug,
       ("Starting thread for a SystemClockDriver  %p", mGraphImpl.get()));
   Unused << NS_WARN_IF(mThread);
   MOZ_ASSERT(!mThread);  // Ensure we haven't already started it
 
-  nsCOMPtr<nsIRunnable> event = new MediaStreamGraphInitThreadRunnable(this);
+  nsCOMPtr<nsIRunnable> event = new MediaTrackGraphInitThreadRunnable(this);
   // Note: mThread may be null during event->Run() if we pass to NewNamedThread!
   // See AudioInitTask
-  nsresult rv = NS_NewNamedThread("MediaStreamGrph", getter_AddRefs(mThread));
+  nsresult rv = NS_NewNamedThread("MediaTrackGrph", getter_AddRefs(mThread));
   if (NS_SUCCEEDED(rv)) {
     mThread->EventTarget()->Dispatch(event.forget(), NS_DISPATCH_NORMAL);
   }
 }
 
 void ThreadedDriver::Shutdown() {
   NS_ASSERTION(NS_IsMainThread(), "Must be called on main thread");
   // mGraph's thread is not running so it's OK to do whatever here
-  LOG(LogLevel::Debug, ("Stopping threads for MediaStreamGraph %p", this));
+  LOG(LogLevel::Debug, ("Stopping threads for MediaTrackGraph %p", this));
 
   if (mThread) {
     LOG(LogLevel::Debug,
         ("%p: Stopping ThreadedDriver's %p thread", GraphImpl(), this));
     mThread->Shutdown();
     mThread = nullptr;
   }
 }
 
-SystemClockDriver::SystemClockDriver(MediaStreamGraphImpl* aGraphImpl)
+SystemClockDriver::SystemClockDriver(MediaTrackGraphImpl* aGraphImpl)
     : ThreadedDriver(aGraphImpl),
       mInitialTimeStamp(TimeStamp::Now()),
       mCurrentTimeStamp(TimeStamp::Now()),
       mLastTimeStamp(TimeStamp::Now()),
       mIsFallback(false) {}
 
 SystemClockDriver::~SystemClockDriver() {}
 
@@ -315,17 +315,17 @@ void ThreadedDriver::RunThread() {
 
 MediaTime SystemClockDriver::GetIntervalForIteration() {
   TimeStamp now = TimeStamp::Now();
   MediaTime interval =
       GraphImpl()->SecondsToMediaTime((now - mCurrentTimeStamp).ToSeconds());
   mCurrentTimeStamp = now;
 
   MOZ_LOG(
-      gMediaStreamGraphLog, LogLevel::Verbose,
+      gMediaTrackGraphLog, LogLevel::Verbose,
       ("%p: Updating current time to %f (real %f, StateComputedTime() %f)",
        GraphImpl(), GraphImpl()->MediaTimeToSeconds(IterationEnd() + interval),
        (now - mInitialTimeStamp).ToSeconds(),
        GraphImpl()->MediaTimeToSeconds(StateComputedTime())));
 
   return interval;
 }
 
@@ -378,17 +378,17 @@ TimeDuration SystemClockDriver::WaitInte
   timeoutMS = std::max<int64_t>(0, std::min<int64_t>(timeoutMS, 60 * 1000));
   LOG(LogLevel::Verbose,
       ("%p: Waiting for next iteration; at %f, timeout=%f", GraphImpl(),
        (now - mInitialTimeStamp).ToSeconds(), timeoutMS / 1000.0));
 
   return TimeDuration::FromMilliseconds(timeoutMS);
 }
 
-OfflineClockDriver::OfflineClockDriver(MediaStreamGraphImpl* aGraphImpl,
+OfflineClockDriver::OfflineClockDriver(MediaTrackGraphImpl* aGraphImpl,
                                        GraphTime aSlice)
     : ThreadedDriver(aGraphImpl), mSlice(aSlice) {}
 
 OfflineClockDriver::~OfflineClockDriver() {}
 
 MediaTime OfflineClockDriver::GetIntervalForIteration() {
   return GraphImpl()->MillisecondsToMediaTime(mSlice);
 }
@@ -441,25 +441,25 @@ AsyncCubebTask::Run() {
     default:
       MOZ_CRASH("Operation not implemented.");
   }
 
   // The thread will kill itself after a bit
   return NS_OK;
 }
 
-StreamAndPromiseForOperation::StreamAndPromiseForOperation(
-    MediaStream* aStream, void* aPromise, dom::AudioContextOperation aOperation,
+TrackAndPromiseForOperation::TrackAndPromiseForOperation(
+    MediaTrack* aTrack, void* aPromise, dom::AudioContextOperation aOperation,
     dom::AudioContextOperationFlags aFlags)
-    : mStream(aStream),
+    : mTrack(aTrack),
       mPromise(aPromise),
       mOperation(aOperation),
       mFlags(aFlags) {}
 
-AudioCallbackDriver::AudioCallbackDriver(MediaStreamGraphImpl* aGraphImpl,
+AudioCallbackDriver::AudioCallbackDriver(MediaTrackGraphImpl* aGraphImpl,
                                          uint32_t aInputChannelCount,
                                          AudioInputType aAudioInputType)
     : GraphDriver(aGraphImpl),
       mOutputChannels(0),
       mSampleRate(0),
       mInputChannelCount(aInputChannelCount),
       mIterationDurationMS(MEDIA_GRAPH_TARGET_PERIOD_MS),
       mStarted(false),
@@ -584,20 +584,20 @@ bool AudioCallbackDriver::Init() {
   output.layout = CUBEB_LAYOUT_UNDEFINED;
   output.prefs = CubebUtils::GetDefaultStreamPrefs();
 #if !defined(XP_WIN)
   if (mInputDevicePreference == CUBEB_DEVICE_PREF_VOICE) {
     output.prefs |= static_cast<cubeb_stream_prefs>(CUBEB_STREAM_PREF_VOICE);
   }
 #endif
 
-  uint32_t latencyFrames = CubebUtils::GetCubebMSGLatencyInFrames(&output);
+  uint32_t latencyFrames = CubebUtils::GetCubebMTGLatencyInFrames(&output);
 
   // Macbook and MacBook air don't have enough CPU to run very low latency
-  // MediaStreamGraphs, cap the minimal latency to 512 frames int this case.
+  // MediaTrackGraphs, cap the minimal latency to 512 frames int this case.
   if (IsMacbookOrMacbookAir()) {
     latencyFrames = std::max((uint32_t)512, latencyFrames);
   }
 
   // On OSX, having a latency that is lower than 10ms is very common. It's
   // not very useful when doing voice, because all the WebRTC code deal in 10ms
   // chunks of audio.  Take the first power of two above 10ms at the current
   // rate in this case. It's probably 512, for common rates.
@@ -630,17 +630,17 @@ bool AudioCallbackDriver::Init() {
     DebugOnly<int> rv =
         cubeb_stream_set_volume(mAudioStream, CubebUtils::GetVolumeScale());
     NS_WARNING_ASSERTION(
         rv == CUBEB_OK,
         "Could not set the audio stream volume in GraphDriver.cpp");
     CubebUtils::ReportCubebBackendUsed();
   } else {
     NS_WARNING(
-        "Could not create a cubeb stream for MediaStreamGraph, falling "
+        "Could not create a cubeb stream for MediaTrackGraph, falling "
         "back to a SystemClockDriver");
     // Only report failures when we're not coming from a driver that was
     // created itself as a fallback driver because of a previous audio driver
     // failure.
     if (!mFromFallback) {
       CubebUtils::ReportCubebStreamInitFailure(firstStream);
     }
     MonitorAutoLock lock(GraphImpl()->GetMonitor());
@@ -693,28 +693,28 @@ void AudioCallbackDriver::Start() {
       new AsyncCubebTask(AsAudioCallbackDriver(), AsyncCubebOperation::INIT);
   initEvent->Dispatch();
 }
 
 bool AudioCallbackDriver::StartStream() {
   MOZ_ASSERT(!IsStarted() && OnCubebOperationThread());
   mShouldFallbackIfError = true;
   if (cubeb_stream_start(mAudioStream) != CUBEB_OK) {
-    NS_WARNING("Could not start cubeb stream for MSG.");
+    NS_WARNING("Could not start cubeb stream for MTG.");
     return false;
   }
 
   mStarted = true;
   return true;
 }
 
 void AudioCallbackDriver::Stop() {
   MOZ_ASSERT(OnCubebOperationThread());
   if (cubeb_stream_stop(mAudioStream) != CUBEB_OK) {
-    NS_WARNING("Could not stop cubeb stream for MSG.");
+    NS_WARNING("Could not stop cubeb stream for MTG.");
   }
   mStarted = false;
 }
 
 void AudioCallbackDriver::RemoveMixerCallback() {
   MOZ_ASSERT(OnGraphThread() || !ThreadRunning());
 
   if (mAddedMixer) {
@@ -1048,49 +1048,49 @@ uint32_t AudioCallbackDriver::IterationD
   MOZ_ASSERT(OnGraphThread());
   // The real fix would be to have an API in cubeb to give us the number. Short
   // of that, we approximate it here. bug 1019507
   return mIterationDurationMS;
 }
 
 bool AudioCallbackDriver::IsStarted() { return mStarted; }
 
-void AudioCallbackDriver::EnqueueStreamAndPromiseForOperation(
-    MediaStream* aStream, void* aPromise, dom::AudioContextOperation aOperation,
+void AudioCallbackDriver::EnqueueTrackAndPromiseForOperation(
+    MediaTrack* aTrack, void* aPromise, dom::AudioContextOperation aOperation,
     dom::AudioContextOperationFlags aFlags) {
   MOZ_ASSERT(OnGraphThread() || !ThreadRunning());
   MonitorAutoLock mon(mGraphImpl->GetMonitor());
   MOZ_ASSERT((aFlags | dom::AudioContextOperationFlags::SendStateChange) ||
              !aPromise);
   if (aFlags == dom::AudioContextOperationFlags::SendStateChange) {
     mPromisesForOperation.AppendElement(
-        StreamAndPromiseForOperation(aStream, aPromise, aOperation, aFlags));
+        TrackAndPromiseForOperation(aTrack, aPromise, aOperation, aFlags));
   }
 }
 
 void AudioCallbackDriver::CompleteAudioContextOperations(
     AsyncCubebOperation aOperation) {
   MOZ_ASSERT(OnCubebOperationThread());
-  AutoTArray<StreamAndPromiseForOperation, 1> array;
+  AutoTArray<TrackAndPromiseForOperation, 1> array;
 
   // We can't lock for the whole function because AudioContextOperationCompleted
   // will grab the monitor
   {
     MonitorAutoLock mon(GraphImpl()->GetMonitor());
     array.SwapElements(mPromisesForOperation);
   }
 
   for (uint32_t i = 0; i < array.Length(); i++) {
-    StreamAndPromiseForOperation& s = array[i];
+    TrackAndPromiseForOperation& s = array[i];
     if ((aOperation == AsyncCubebOperation::INIT &&
          s.mOperation == dom::AudioContextOperation::Resume) ||
         (aOperation == AsyncCubebOperation::SHUTDOWN &&
          s.mOperation != dom::AudioContextOperation::Resume)) {
       MOZ_ASSERT(s.mFlags == dom::AudioContextOperationFlags::SendStateChange);
-      GraphImpl()->AudioContextOperationCompleted(s.mStream, s.mPromise,
+      GraphImpl()->AudioContextOperationCompleted(s.mTrack, s.mPromise,
                                                   s.mOperation, s.mFlags);
       array.RemoveElementAt(i);
       i--;
     }
   }
 
   if (!array.IsEmpty()) {
     MonitorAutoLock mon(GraphImpl()->GetMonitor());
--- a/dom/media/GraphDriver.h
+++ b/dom/media/GraphDriver.h
@@ -28,101 +28,101 @@ template <>
 class nsAutoRefTraits<cubeb_stream> : public nsPointerRefTraits<cubeb_stream> {
  public:
   static void Release(cubeb_stream* aStream) { cubeb_stream_destroy(aStream); }
 };
 
 namespace mozilla {
 
 /**
- * Assume we can run an iteration of the MediaStreamGraph loop in this much time
+ * Assume we can run an iteration of the MediaTrackGraph loop in this much time
  * or less.
  * We try to run the control loop at this rate.
  */
 static const int MEDIA_GRAPH_TARGET_PERIOD_MS = 10;
 
 /**
- * Assume that we might miss our scheduled wakeup of the MediaStreamGraph by
+ * Assume that we might miss our scheduled wakeup of the MediaTrackGraph by
  * this much.
  */
 static const int SCHEDULE_SAFETY_MARGIN_MS = 10;
 
 /**
  * Try have this much audio buffered in streams and queued to the hardware.
  * The maximum delay to the end of the next control loop
  * is 2*MEDIA_GRAPH_TARGET_PERIOD_MS + SCHEDULE_SAFETY_MARGIN_MS.
  * There is no point in buffering more audio than this in a stream at any
  * given time (until we add processing).
  * This is not optimal yet.
  */
 static const int AUDIO_TARGET_MS =
     2 * MEDIA_GRAPH_TARGET_PERIOD_MS + SCHEDULE_SAFETY_MARGIN_MS;
 
-class MediaStream;
-class MediaStreamGraphImpl;
+class MediaTrack;
+class MediaTrackGraphImpl;
 
 class AudioCallbackDriver;
 class OfflineClockDriver;
 class SystemClockDriver;
 
 namespace dom {
 enum class AudioContextOperation;
 }
 
 /**
  * A driver is responsible for the scheduling of the processing, the thread
- * management, and give the different clocks to a MediaStreamGraph. This is an
- * abstract base class. A MediaStreamGraph can be driven by an
+ * management, and give the different clocks to a MediaTrackGraph. This is an
+ * abstract base class. A MediaTrackGraph can be driven by an
  * OfflineClockDriver, if the graph is offline, or a SystemClockDriver, if the
  * graph is real time.
- * A MediaStreamGraph holds an owning reference to its driver.
+ * A MediaTrackGraph holds an owning reference to its driver.
  *
  * The lifetime of drivers is a complicated affair. Here are the different
  * scenarii that can happen:
  *
- * Starting a MediaStreamGraph with an AudioCallbackDriver
+ * Starting a MediaTrackGraph with an AudioCallbackDriver
  * - A new thread T is created, from the main thread.
  * - On this thread T, cubeb is initialized if needed, and a cubeb_stream is
  *   created and started
  * - The thread T posts a message to the main thread to terminate itself.
  * - The graph runs off the audio thread
  *
- * Starting a MediaStreamGraph with a SystemClockDriver:
+ * Starting a MediaTrackGraph with a SystemClockDriver:
  * - A new thread T is created from the main thread.
  * - The graph runs off this thread.
  *
  * Switching from a SystemClockDriver to an AudioCallbackDriver:
  * - A new AudioCallabackDriver is created and initialized on the graph thread
- * - At the end of the MSG iteration, the SystemClockDriver transfers its timing
+ * - At the end of the MTG iteration, the SystemClockDriver transfers its timing
  *   info and a reference to itself to the AudioCallbackDriver. It then starts
  *   the AudioCallbackDriver.
  * - When the AudioCallbackDriver starts, it checks if it has been switched from
  *   a SystemClockDriver, and if that is the case, sends a message to the main
  *   thread to shut the SystemClockDriver thread down.
  * - The graph now runs off an audio callback
  *
  * Switching from an AudioCallbackDriver to a SystemClockDriver:
  * - A new SystemClockDriver is created, and set as mNextDriver.
- * - At the end of the MSG iteration, the AudioCallbackDriver transfers its
+ * - At the end of the MTG iteration, the AudioCallbackDriver transfers its
  *   timing info and a reference to itself to the SystemClockDriver. A new
  *   SystemClockDriver is started from the current audio thread.
  * - When starting, the SystemClockDriver checks if it has been switched from an
  *   AudioCallbackDriver. If yes, it creates a new temporary thread to release
  *   the cubeb_streams. This temporary thread closes the cubeb_stream, and
  *   then dispatches a message to the main thread to be terminated.
  * - The graph now runs off a normal thread.
  *
  * Two drivers cannot run at the same time for the same graph. The thread safety
  * of the different attributes of drivers, and they access pattern is documented
  * next to the members themselves.
  *
  */
 class GraphDriver {
  public:
-  explicit GraphDriver(MediaStreamGraphImpl* aGraphImpl);
+  explicit GraphDriver(MediaTrackGraphImpl* aGraphImpl);
 
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(GraphDriver);
   /* For {System,Offline}ClockDriver, this waits until it's time to process
    * more data.  For AudioCallbackDriver, this is a no-op. */
   virtual void WaitForNextIteration() = 0;
   /* Wakes up the graph if it is waiting. */
   virtual void WakeUp() = 0;
   /* Start the graph, init the driver, start the thread.
@@ -173,20 +173,20 @@ class GraphDriver {
                     GraphTime aLastSwitchNextIterationEnd);
   /**
    * Call this to indicate that another iteration of the control loop is
    * required on its regular schedule. The monitor must not be held.
    * This function has to be idempotent.
    */
   void EnsureNextIteration();
 
-  MediaStreamGraphImpl* GraphImpl() const { return mGraphImpl; }
+  MediaTrackGraphImpl* GraphImpl() const { return mGraphImpl; }
 
 #ifdef DEBUG
-  // True if the current thread is driving the MSG.
+  // True if the current thread is driving the MTG.
   bool OnGraphThread();
 #endif
   // True if the current thread is the GraphDriver's thread.
   virtual bool OnThread() = 0;
   // GraphDriver's thread has started and the thread is running.
   virtual bool ThreadRunning() = 0;
 
  protected:
@@ -195,18 +195,18 @@ class GraphDriver {
   void SetNextDriver(GraphDriver* aNextDriver);
 
   // Time of the start of this graph iteration. This must be accessed while
   // having the monitor.
   GraphTime mIterationStart;
   // Time of the end of this graph iteration. This must be accessed while having
   // the monitor.
   GraphTime mIterationEnd;
-  // The MediaStreamGraphImpl associated with this driver.
-  const RefPtr<MediaStreamGraphImpl> mGraphImpl;
+  // The MediaTrackGraphImpl associated with this driver.
+  const RefPtr<MediaTrackGraphImpl> mGraphImpl;
 
   // This is non-null only when this driver has recently switched from an other
   // driver, and has not cleaned it up yet (for example because the audio stream
   // is currently calling the callback during initialization).
   //
   // This is written to when changing driver, from the previous driver's thread,
   // or a thread created for the occasion. This is read each time we need to
   // check whether we're changing driver (in Switching()), from the graph
@@ -215,35 +215,35 @@ class GraphDriver {
   RefPtr<GraphDriver> mPreviousDriver;
   // This is non-null only when this driver is going to switch to an other
   // driver at the end of this iteration.
   // This must be accessed using the {Set,Get}NextDriver methods.
   RefPtr<GraphDriver> mNextDriver;
   virtual ~GraphDriver() {}
 };
 
-class MediaStreamGraphInitThreadRunnable;
+class MediaTrackGraphInitThreadRunnable;
 
 /**
  * This class is a driver that manages its own thread.
  */
 class ThreadedDriver : public GraphDriver {
  public:
-  explicit ThreadedDriver(MediaStreamGraphImpl* aGraphImpl);
+  explicit ThreadedDriver(MediaTrackGraphImpl* aGraphImpl);
   virtual ~ThreadedDriver();
   void WaitForNextIteration() override;
   void WakeUp() override;
   void Start() override;
   void Shutdown() override;
   /**
    * Runs main control loop on the graph thread. Normally a single invocation
    * of this runs for the entire lifetime of the graph thread.
    */
   void RunThread();
-  friend class MediaStreamGraphInitThreadRunnable;
+  friend class MediaTrackGraphInitThreadRunnable;
   uint32_t IterationDuration() override { return MEDIA_GRAPH_TARGET_PERIOD_MS; }
 
   nsIThread* Thread() { return mThread; }
 
   bool OnThread() override {
     return !mThread || mThread->EventTarget()->IsOnCurrentThread();
   }
 
@@ -263,22 +263,22 @@ class ThreadedDriver : public GraphDrive
 
  private:
   // This is true if the thread is running. It is false
   // before starting the thread and after stopping it.
   Atomic<bool> mThreadRunning;
 };
 
 /**
- * A SystemClockDriver drives a MediaStreamGraph using a system clock, and waits
+ * A SystemClockDriver drives a MediaTrackGraph using a system clock, and waits
  * using a monitor, between each iteration.
  */
 class SystemClockDriver : public ThreadedDriver {
  public:
-  explicit SystemClockDriver(MediaStreamGraphImpl* aGraphImpl);
+  explicit SystemClockDriver(MediaTrackGraphImpl* aGraphImpl);
   virtual ~SystemClockDriver();
   TimeDuration WaitInterval() override;
   MediaTime GetIntervalForIteration() override;
   void MarkAsFallback();
   bool IsFallback();
   SystemClockDriver* AsSystemClockDriver() override { return this; }
 
  private:
@@ -294,32 +294,32 @@ class SystemClockDriver : public Threade
 };
 
 /**
  * An OfflineClockDriver runs the graph as fast as possible, without waiting
  * between iteration.
  */
 class OfflineClockDriver : public ThreadedDriver {
  public:
-  OfflineClockDriver(MediaStreamGraphImpl* aGraphImpl, GraphTime aSlice);
+  OfflineClockDriver(MediaTrackGraphImpl* aGraphImpl, GraphTime aSlice);
   virtual ~OfflineClockDriver();
   TimeDuration WaitInterval() override;
   MediaTime GetIntervalForIteration() override;
   OfflineClockDriver* AsOfflineClockDriver() override { return this; }
 
  private:
   // Time, in GraphTime, for each iteration
   GraphTime mSlice;
 };
 
-struct StreamAndPromiseForOperation {
-  StreamAndPromiseForOperation(MediaStream* aStream, void* aPromise,
-                               dom::AudioContextOperation aOperation,
-                               dom::AudioContextOperationFlags aFlags);
-  RefPtr<MediaStream> mStream;
+struct TrackAndPromiseForOperation {
+  TrackAndPromiseForOperation(MediaTrack* aTrack, void* aPromise,
+                              dom::AudioContextOperation aOperation,
+                              dom::AudioContextOperationFlags aFlags);
+  RefPtr<MediaTrack> mTrack;
   void* mPromise;
   dom::AudioContextOperation mOperation;
   dom::AudioContextOperationFlags mFlags;
 };
 
 enum class AsyncCubebOperation { INIT, SHUTDOWN };
 enum class AudioInputType { Unknown, Voice };
 
@@ -347,17 +347,17 @@ class AudioCallbackDriver : public Graph
                             public MixerCallbackReceiver
 #if defined(XP_WIN)
     ,
                             public audio::DeviceChangeListener
 #endif
 {
  public:
   /** If aInputChannelCount is zero, then this driver is output-only. */
-  AudioCallbackDriver(MediaStreamGraphImpl* aGraphImpl,
+  AudioCallbackDriver(MediaTrackGraphImpl* aGraphImpl,
                       uint32_t aInputChannelCount,
                       AudioInputType aAudioInputType);
   virtual ~AudioCallbackDriver();
 
   void Start() override;
   void WaitForNextIteration() override;
   void WakeUp() override;
   void Shutdown() override;
@@ -405,19 +405,18 @@ class AudioCallbackDriver : public Graph
     if (mInputDevicePreference == CUBEB_DEVICE_PREF_VOICE) {
       return AudioInputType::Voice;
     }
     return AudioInputType::Unknown;
   }
 
   /* Enqueue a promise that is going to be resolved when a specific operation
    * occurs on the cubeb stream. */
-  void EnqueueStreamAndPromiseForOperation(
-      MediaStream* aStream, void* aPromise,
-      dom::AudioContextOperation aOperation,
+  void EnqueueTrackAndPromiseForOperation(
+      MediaTrack* aTrack, void* aPromise, dom::AudioContextOperation aOperation,
       dom::AudioContextOperationFlags aFlags);
 
   std::thread::id ThreadId() { return mAudioThreadId.load(); }
 
   bool OnThread() override {
     return mAudioThreadId.load() == std::this_thread::get_id();
   }
 
@@ -455,17 +454,17 @@ class AudioCallbackDriver : public Graph
    *  the graph will try to re-open an audio stream later. */
   void FallbackToSystemClockDriver();
 
   /* This is true when the method is executed on CubebOperation thread pool. */
   bool OnCubebOperationThread() {
     return mInitShutdownThread->IsOnCurrentThreadInfallible();
   }
 
-  /* MediaStreamGraphs are always down/up mixed to output channels. */
+  /* MediaTrackGraphs are always down/up mixed to output channels. */
   uint32_t mOutputChannels;
   /* The size of this buffer comes from the fact that some audio backends can
    * call back with a number of frames lower than one block (128 frames), so we
    * need to keep at most two block in the SpillBuffer, because we always round
    * up to block boundaries during an iteration.
    * This is only ever accessed on the audio callback thread. */
   SpillBuffer<AudioDataValue, WEBAUDIO_BLOCK_SIZE * 2> mScratchBuffer;
   /* Wrapper to ensure we write exactly the number of frames we need in the
@@ -506,17 +505,17 @@ class AudioCallbackDriver : public Graph
     ~AutoInCallback();
     AudioCallbackDriver* mDriver;
   };
 
   /* Shared thread pool with up to one thread for off-main-thread
    * initialization and shutdown of the audio stream via AsyncCubebTask. */
   const RefPtr<SharedThreadPool> mInitShutdownThread;
   /* This must be accessed with the graph monitor held. */
-  AutoTArray<StreamAndPromiseForOperation, 1> mPromisesForOperation;
+  AutoTArray<TrackAndPromiseForOperation, 1> mPromisesForOperation;
   cubeb_device_pref mInputDevicePreference;
   /* This is used to signal adding the mixer callback on first run
    * of audio callback. This is atomic because it is touched from different
    * threads, the audio callback thread and the state change thread. However,
    * the order of the threads does not allow concurent access. */
   Atomic<bool> mAddedMixer;
   /* Contains the id of the audio thread for as long as the callback
    * is taking place, after that it is reseted to an invalid value. */
@@ -548,14 +547,14 @@ class AsyncCubebTask : public Runnable {
  protected:
   virtual ~AsyncCubebTask();
 
  private:
   NS_IMETHOD Run() final;
 
   RefPtr<AudioCallbackDriver> mDriver;
   AsyncCubebOperation mOperation;
-  RefPtr<MediaStreamGraphImpl> mShutdownGrip;
+  RefPtr<MediaTrackGraphImpl> mShutdownGrip;
 };
 
 }  // namespace mozilla
 
 #endif  // GRAPHDRIVER_H_
--- a/dom/media/GraphRunner.cpp
+++ b/dom/media/GraphRunner.cpp
@@ -2,33 +2,33 @@
 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at https://mozilla.org/MPL/2.0/. */
 
 #include "GraphRunner.h"
 
 #include "GraphDriver.h"
-#include "MediaStreamGraph.h"
-#include "MediaStreamGraphImpl.h"
+#include "MediaTrackGraph.h"
+#include "MediaTrackGraphImpl.h"
 #include "mozilla/dom/WorkletThread.h"
 #include "nsISupportsImpl.h"
 #include "prthread.h"
 #include "Tracing.h"
 #include "audio_thread_priority.h"
 
 namespace mozilla {
 
 static void Start(void* aArg) {
   NS_SetCurrentThreadName("GraphRunner");
   GraphRunner* th = static_cast<GraphRunner*>(aArg);
   th->Run();
 }
 
-GraphRunner::GraphRunner(MediaStreamGraphImpl* aGraph)
+GraphRunner::GraphRunner(MediaTrackGraphImpl* aGraph)
     : mMonitor("GraphRunner::mMonitor"),
       mGraph(aGraph),
       mStateEnd(0),
       mStillProcessing(true),
       mThreadState(ThreadState::Wait),
       // Note that mThread needs to be initialized last, as it may pre-empt the
       // thread running this ctor and enter Run() with uninitialized members.
       mThread(PR_CreateThread(PR_SYSTEM_THREAD, &Start, this,
--- a/dom/media/GraphRunner.h
+++ b/dom/media/GraphRunner.h
@@ -12,21 +12,21 @@
 
 #include <thread>
 
 struct PRThread;
 
 namespace mozilla {
 
 class GraphDriver;
-class MediaStreamGraphImpl;
+class MediaTrackGraphImpl;
 
 class GraphRunner {
  public:
-  explicit GraphRunner(MediaStreamGraphImpl* aGraph);
+  explicit GraphRunner(MediaTrackGraphImpl* aGraph);
   ~GraphRunner();
 
   /**
    * Marks us as shut down and signals mThread, so that it runs until the end.
    */
   void Shutdown();
 
   /**
@@ -52,19 +52,19 @@ class GraphRunner {
    */
   bool RunByGraphDriver(GraphDriver* aDriver);
 #endif
 
  private:
   // Monitor used for yielding mThread through Wait(), and scheduling mThread
   // through Signal() from a GraphDriver.
   Monitor mMonitor;
-  // The MediaStreamGraph we're running. Weakptr beecause this graph owns us and
+  // The MediaTrackGraph we're running. Weakptr beecause this graph owns us and
   // guarantees that our lifetime will not go beyond that of itself.
-  MediaStreamGraphImpl* const mGraph;
+  MediaTrackGraphImpl* const mGraph;
   // GraphTime being handed over to the graph through OneIteration. Protected by
   // mMonitor.
   GraphTime mStateEnd;
   // Reply from mGraph's OneIteration. Protected by mMonitor.
   bool mStillProcessing;
 
   enum class ThreadState {
     Wait,      // Waiting for a message.  This is the initial state.
--- a/dom/media/ImageToI420.h
+++ b/dom/media/ImageToI420.h
@@ -1,16 +1,18 @@
 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef ImageToI420Converter_h
 #define ImageToI420Converter_h
 
+#include "nsError.h"
+
 namespace mozilla {
 
 namespace layers {
 class Image;
 }  // namespace layers
 
 /**
  * Converts aImage to an I420 image and writes it to the given buffers.
--- a/dom/media/MediaDecoder.cpp
+++ b/dom/media/MediaDecoder.cpp
@@ -227,17 +227,17 @@ void MediaDecoder::SetVolume(double aVol
 
 RefPtr<GenericPromise> MediaDecoder::SetSink(AudioDeviceInfo* aSink) {
   MOZ_ASSERT(NS_IsMainThread());
   AbstractThread::AutoEnter context(AbstractMainThread());
   return GetStateMachine()->InvokeSetSink(aSink);
 }
 
 void MediaDecoder::AddOutputStream(DOMMediaStream* aStream,
-                                   SharedDummyStream* aDummyStream) {
+                                   SharedDummyTrack* aDummyStream) {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_ASSERT(mDecoderStateMachine, "Must be called after Load().");
   AbstractThread::AutoEnter context(AbstractMainThread());
   mDecoderStateMachine->EnsureOutputStreamManager(aDummyStream);
   if (mInfo) {
     mDecoderStateMachine->EnsureOutputStreamManagerHasTracks(*mInfo);
   }
   mDecoderStateMachine->AddOutputStream(aStream);
--- a/dom/media/MediaDecoder.h
+++ b/dom/media/MediaDecoder.h
@@ -43,17 +43,17 @@ class MediaMemoryInfo;
 class AbstractThread;
 class DOMMediaStream;
 class DecoderBenchmark;
 class FrameStatistics;
 class VideoFrameContainer;
 class MediaFormatReader;
 class MediaDecoderStateMachine;
 struct MediaPlaybackEvent;
-struct SharedDummyStream;
+struct SharedDummyTrack;
 
 enum class Visibility : uint8_t;
 
 struct MOZ_STACK_CLASS MediaDecoderInit {
   MediaDecoderOwner* const mOwner;
   const double mVolume;
   const bool mPreservesPitch;
   const double mPlaybackRate;
@@ -156,26 +156,25 @@ class MediaDecoder : public DecoderDocto
 
   // Set the given device as the output device.
   RefPtr<GenericPromise> SetSink(AudioDeviceInfo* aSink);
 
   bool GetMinimizePreroll() const { return mMinimizePreroll; }
 
   // All MediaStream-related data is protected by mReentrantMonitor.
   // We have at most one DecodedStreamData per MediaDecoder. Its stream
-  // is used as the input for each ProcessedMediaStream created by calls to
+  // is used as the input for each ProcessedMediaTrack created by calls to
   // captureStream(UntilEnded). Seeking creates a new source stream, as does
   // replaying after the input as ended. In the latter case, the new source is
   // not connected to streams created by captureStreamUntilEnded.
 
   // Add an output stream. All decoder output will be sent to the stream.
   // The stream is initially blocked. The decoder is responsible for unblocking
   // it while it is playing back.
-  void AddOutputStream(DOMMediaStream* aStream,
-                       SharedDummyStream* aDummyStream);
+  void AddOutputStream(DOMMediaStream* aStream, SharedDummyTrack* aDummyStream);
   // Remove an output stream added with AddOutputStream.
   void RemoveOutputStream(DOMMediaStream* aStream);
 
   // Update the principal for any output streams and their tracks.
   void SetOutputStreamPrincipal(nsIPrincipal* aPrincipal);
 
   // Return the duration of the video in seconds.
   virtual double GetDuration();
--- a/dom/media/MediaDecoderStateMachine.cpp
+++ b/dom/media/MediaDecoderStateMachine.cpp
@@ -26,16 +26,17 @@
 #include "nsPrintfCString.h"
 #include "nsTArray.h"
 #include "AudioSegment.h"
 #include "DOMMediaStream.h"
 #include "ImageContainer.h"
 #include "MediaDecoder.h"
 #include "MediaDecoderStateMachine.h"
 #include "MediaShutdownManager.h"
+#include "MediaTrackGraph.h"
 #include "MediaTimer.h"
 #include "ReaderProxy.h"
 #include "TimeUnits.h"
 #include "VideoSegment.h"
 #include "VideoUtils.h"
 
 namespace mozilla {
 
@@ -3773,17 +3774,17 @@ void MediaDecoderStateMachine::RemoveOut
         });
     nsresult rv = OwnerThread()->Dispatch(r.forget());
     MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
     Unused << rv;
   }
 }
 
 void MediaDecoderStateMachine::EnsureOutputStreamManager(
-    SharedDummyStream* aDummyStream) {
+    SharedDummyTrack* aDummyStream) {
   MOZ_ASSERT(NS_IsMainThread());
   if (mOutputStreamManager) {
     return;
   }
   mOutputStreamManager = new OutputStreamManager(
       aDummyStream, mOutputStreamPrincipal, mAbstractMainThread);
 }
 
@@ -3796,26 +3797,26 @@ void MediaDecoderStateMachine::EnsureOut
   if ((!aLoadedInfo.HasAudio() ||
        mOutputStreamManager->HasTrackType(MediaSegment::AUDIO)) &&
       (!aLoadedInfo.HasVideo() ||
        mOutputStreamManager->HasTrackType(MediaSegment::VIDEO))) {
     return;
   }
   if (aLoadedInfo.HasAudio()) {
     MOZ_ASSERT(!mOutputStreamManager->HasTrackType(MediaSegment::AUDIO));
-    RefPtr<SourceMediaStream> dummy =
+    RefPtr<SourceMediaTrack> dummy =
         mOutputStreamManager->AddTrack(MediaSegment::AUDIO);
-    LOG("Pre-created audio track with underlying stream %p", dummy.get());
+    LOG("Pre-created audio track with underlying track %p", dummy.get());
     Unused << dummy;
   }
   if (aLoadedInfo.HasVideo()) {
     MOZ_ASSERT(!mOutputStreamManager->HasTrackType(MediaSegment::VIDEO));
-    RefPtr<SourceMediaStream> dummy =
+    RefPtr<SourceMediaTrack> dummy =
         mOutputStreamManager->AddTrack(MediaSegment::VIDEO);
-    LOG("Pre-created video track with underlying stream %p", dummy.get());
+    LOG("Pre-created video track with underlying track %p", dummy.get());
     Unused << dummy;
   }
 }
 
 class VideoQueueMemoryFunctor : public nsDequeFunctor {
  public:
   VideoQueueMemoryFunctor() : mSize(0) {}
 
--- a/dom/media/MediaDecoderStateMachine.h
+++ b/dom/media/MediaDecoderStateMachine.h
@@ -183,17 +183,17 @@ class MediaDecoderStateMachine
   // Returns the state machine task queue.
   TaskQueue* OwnerThread() const { return mTaskQueue; }
 
   RefPtr<GenericPromise> RequestDebugInfo(
       dom::MediaDecoderStateMachineDebugInfo& aInfo);
 
   void SetOutputStreamPrincipal(nsIPrincipal* aPrincipal);
   // If an OutputStreamManager does not exist, one will be created.
-  void EnsureOutputStreamManager(SharedDummyStream* aDummyStream);
+  void EnsureOutputStreamManager(SharedDummyTrack* aDummyStream);
   // If an OutputStreamManager exists, tracks matching aLoadedInfo will be
   // created unless they already exist in the manager.
   void EnsureOutputStreamManagerHasTracks(const MediaInfo& aLoadedInfo);
   // Add an output stream to the output stream manager. The manager must have
   // been created through EnsureOutputStreamManager() before this.
   void AddOutputStream(DOMMediaStream* aStream);
   // Remove an output stream added with AddOutputStream. If the last output
   // stream was removed, we will also tear down the OutputStreamManager.
--- a/dom/media/MediaManager.cpp
+++ b/dom/media/MediaManager.cpp
@@ -1,23 +1,23 @@
 /* -*- Mode: c++; c-basic-offset: 2; indent-tabs-mode: nil; tab-width: 40 -*- */
 /* vim: set ts=2 et sw=2 tw=80: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "MediaManager.h"
 
-#include "AudioCaptureStream.h"
+#include "AudioCaptureTrack.h"
 #include "AudioDeviceInfo.h"
 #include "AudioStreamTrack.h"
-#include "MediaStreamGraphImpl.h"
+#include "MediaTrackGraphImpl.h"
 #include "MediaTimer.h"
 #include "mozilla/dom/MediaDeviceInfo.h"
-#include "MediaStreamListener.h"
+#include "MediaTrackListener.h"
 #include "nsArray.h"
 #include "nsContentUtils.h"
 #include "nsGlobalWindow.h"
 #include "nsHashPropertyBag.h"
 #include "nsIEventTarget.h"
 #include "nsIUUIDGenerator.h"
 #include "nsIScriptGlobalObject.h"
 #include "nsIPermissionManager.h"
@@ -290,17 +290,17 @@ void MediaManager::CallOnError(GetUserMe
 
 void MediaManager::CallOnSuccess(GetUserMediaSuccessCallback& aCallback,
                                  DOMMediaStream& aStream) {
   aCallback.Call(aStream);
 }
 
 /**
  * SourceListener has threadsafe refcounting for use across the main, media and
- * MSG threads. But it has a non-threadsafe SupportsWeakPtr for WeakPtr usage
+ * MTG threads. But it has a non-threadsafe SupportsWeakPtr for WeakPtr usage
  * only from main thread, to ensure that garbage- and cycle-collected objects
  * don't hold a reference to it during late shutdown.
  */
 class SourceListener : public SupportsWeakPtr<SourceListener> {
  public:
   typedef MozPromise<bool /* aIgnored */, RefPtr<MediaMgrError>, true>
       SourceListenerPromise;
 
@@ -311,65 +311,65 @@ class SourceListener : public SupportsWe
   SourceListener();
 
   /**
    * Registers this source listener as belonging to the given window listener.
    */
   void Register(GetUserMediaWindowListener* aListener);
 
   /**
-   * Marks this listener as active and adds itself as a listener to aStream.
+   * Marks this listener as active and creates internal device states.
    */
   void Activate(RefPtr<MediaDevice> aAudioDevice,
                 RefPtr<LocalTrackSource> aAudioTrackSource,
                 RefPtr<MediaDevice> aVideoDevice,
                 RefPtr<LocalTrackSource> aVideoTrackSource);
 
   /**
    * Posts a task to initialize and start all associated devices.
    */
   RefPtr<SourceListenerPromise> InitializeAsync();
 
   /**
-   * Stops all live tracks, finishes the associated MediaStream and cleans up
-   * the weak reference to the associated window listener.
+   * Stops all live tracks, ends the associated MediaTrack and cleans up the
+   * weak reference to the associated window listener.
    * This will also tell the window listener to remove its hard reference to
    * this SourceListener, so any caller will need to keep its own hard ref.
    */
   void Stop();
 
   /**
-   * Posts a task to stop the device associated with aStream and notifies the
+   * Posts a task to stop the device associated with aTrack and notifies the
    * associated window listener that a track was stopped.
    * Should this track be the last live one to be stopped, we'll also call Stop.
    * This might tell the window listener to remove its hard reference to this
    * SourceListener, so any caller will need to keep its own hard ref.
    */
-  void StopTrack(MediaStream* aStream);
+  void StopTrack(MediaTrack* aTrack);
 
   /**
-   * Like StopTrack with the audio device's stream.
+   * Like StopTrack with the audio device's track.
    */
   void StopAudioTrack();
 
   /**
-   * Like StopTrack with the video device's stream.
+   * Like StopTrack with the video device's track.
    */
   void StopVideoTrack();
 
   /**
    * Gets the main thread MediaTrackSettings from the MediaEngineSource
-   * associated with aStream.
+   * associated with aTrack.
    */
-  void GetSettingsFor(MediaStream* aStream,
+  void GetSettingsFor(MediaTrack* aTrack,
                       MediaTrackSettings& aOutSettings) const;
 
   /**
    * Posts a task to set the enabled state of the device associated with
-   * aStream to aEnabled and notifies the associated window listener that a
+   * aTrack to aEnabled and notifies the associated window listener that a
    * track's state has changed.
    *
    * Turning the hardware off while the device is disabled is supported for:
    * - Camera (enabled by default, controlled by pref
    *   "media.getusermedia.camera.off_while_disabled.enabled")
    * - Microphone (disabled by default, controlled by pref
    *   "media.getusermedia.microphone.off_while_disabled.enabled")
    * Screen-, app-, or windowsharing is not supported at this time.
@@ -379,17 +379,17 @@ class SourceListener : public SupportsWe
    * This is now defaulting to 3 seconds but can be overriden by prefs:
    * - "media.getusermedia.camera.off_while_disabled.delay_ms" and
    * - "media.getusermedia.microphone.off_while_disabled.delay_ms".
    *
    * The delay is in place to prevent misuse by malicious sites. If a track is
    * re-enabled before the delay has passed, the device will not be touched
    * until another disable followed by the full delay happens.
    */
-  void SetEnabledFor(MediaStream* aStream, bool aEnabled);
+  void SetEnabledFor(MediaTrack* aTrack, bool aEnabled);
 
   /**
    * Stops all screen/app/window/audioCapture sharing, but not camera or
    * microphone.
    */
   void StopSharing();
 
   MediaDevice* GetAudioDevice() const {
@@ -406,56 +406,56 @@ class SourceListener : public SupportsWe
 
   bool CapturingVideo() const;
 
   bool CapturingAudio() const;
 
   CaptureState CapturingSource(MediaSourceEnum aSource) const;
 
   RefPtr<SourceListenerPromise> ApplyConstraintsToTrack(
-      MediaStream* aStream, const MediaTrackConstraints& aConstraints,
+      MediaTrack* aTrack, const MediaTrackConstraints& aConstraints,
       CallerType aCallerType);
 
   PrincipalHandle GetPrincipalHandle() const;
 
  private:
   virtual ~SourceListener() = default;
 
   /**
-   * Returns a pointer to the device state for aStream.
+   * Returns a pointer to the device state for aTrack.
    *
    * This is intended for internal use where we need to figure out which state
-   * corresponds to aStream, not for availability checks. As such, we assert
+   * corresponds to aTrack, not for availability checks. As such, we assert
    * that the device does indeed exist.
    *
    * Since this is a raw pointer and the state lifetime depends on the
    * SourceListener's lifetime, it's internal use only.
    */
-  DeviceState& GetDeviceStateFor(MediaStream* aStream) const;
+  DeviceState& GetDeviceStateFor(MediaTrack* aTrack) const;
 
   // true after this listener has had all devices stopped. MainThread only.
   bool mStopped;
 
   // never ever indirect off this; just for assertions
   PRThread* mMainThreadCheck;
 
   // Set in Register() on main thread, then read from any thread.
   PrincipalHandle mPrincipalHandle;
 
   // Weak pointer to the window listener that owns us. MainThread only.
   GetUserMediaWindowListener* mWindowListener;
 
-  // Accessed from MediaStreamGraph thread, MediaManager thread, and MainThread
+  // Accessed from MediaTrackGraph thread, MediaManager thread, and MainThread
   // No locking needed as they're set on Activate() and never assigned to again.
   UniquePtr<DeviceState> mAudioDeviceState;
   UniquePtr<DeviceState> mVideoDeviceState;
 };
 
 /**
- * This class represents a WindowID and handles all MediaStreamTrackListeners
+ * This class represents a WindowID and handles all MediaTrackListeners
  * (here subclassed as SourceListeners) used to feed GetUserMedia source
  * streams. It proxies feedback from them into messages for browser chrome.
  * The SourceListeners are used to Start() and Stop() the underlying
  * MediaEngineSource when MediaStreams are assigned and deassigned in content.
  */
 class GetUserMediaWindowListener {
   friend MediaManager;
 
@@ -694,18 +694,16 @@ class GetUserMediaWindowListener {
 
  private:
   ~GetUserMediaWindowListener() {
     MOZ_ASSERT(mInactiveListeners.Length() == 0,
                "Inactive listeners should already be removed");
     MOZ_ASSERT(mActiveListeners.Length() == 0,
                "Active listeners should already be removed");
     Unused << mMediaThread;
-    // It's OK to release mStream on any thread; they have thread-safe
-    // refcounts.
   }
 
   // Set at construction
   base::Thread* mMediaThread;
 
   uint64_t mWindowID;
   const PrincipalHandle mPrincipalHandle;
 
@@ -716,21 +714,21 @@ class GetUserMediaWindowListener {
   nsTArray<RefPtr<SourceListener>> mInactiveListeners;
   nsTArray<RefPtr<SourceListener>> mActiveListeners;
 };
 
 class LocalTrackSource : public MediaStreamTrackSource {
  public:
   LocalTrackSource(nsIPrincipal* aPrincipal, const nsString& aLabel,
                    const RefPtr<SourceListener>& aListener,
-                   MediaSourceEnum aSource, MediaStream* aStream,
+                   MediaSourceEnum aSource, MediaTrack* aTrack,
                    RefPtr<PeerIdentity> aPeerIdentity)
       : MediaStreamTrackSource(aPrincipal, aLabel),
         mSource(aSource),
-        mStream(aStream),
+        mTrack(aTrack),
         mPeerIdentity(std::move(aPeerIdentity)),
         mListener(aListener.get()) {}
 
   MediaSourceEnum GetMediaSource() const override { return mSource; }
 
   const PeerIdentity* GetPeerIdentity() const override { return mPeerIdentity; }
 
   RefPtr<MediaStreamTrackSource::ApplyConstraintsPromise> ApplyConstraints(
@@ -738,109 +736,107 @@ class LocalTrackSource : public MediaStr
       CallerType aCallerType) override {
     MOZ_ASSERT(NS_IsMainThread());
     if (sHasShutdown || !mListener) {
       // Track has been stopped, or we are in shutdown. In either case
       // there's no observable outcome, so pretend we succeeded.
       return MediaStreamTrackSource::ApplyConstraintsPromise::CreateAndResolve(
           false, __func__);
     }
-    return mListener->ApplyConstraintsToTrack(mStream, aConstraints,
+    return mListener->ApplyConstraintsToTrack(mTrack, aConstraints,
                                               aCallerType);
   }
 
   void GetSettings(MediaTrackSettings& aOutSettings) override {
     if (mListener) {
-      mListener->GetSettingsFor(mStream, aOutSettings);
+      mListener->GetSettingsFor(mTrack, aOutSettings);
     }
   }
 
   void Stop() override {
     if (mListener) {
-      mListener->StopTrack(mStream);
+      mListener->StopTrack(mTrack);
       mListener = nullptr;
     }
-    if (!mStream->IsDestroyed()) {
-      mStream->Destroy();
+    if (!mTrack->IsDestroyed()) {
+      mTrack->Destroy();
     }
   }
 
   void Disable() override {
     if (mListener) {
-      mListener->SetEnabledFor(mStream, false);
+      mListener->SetEnabledFor(mTrack, false);
     }
   }
 
   void Enable() override {
     if (mListener) {
-      mListener->SetEnabledFor(mStream, true);
+      mListener->SetEnabledFor(mTrack, true);
     }
   }
 
   const MediaSourceEnum mSource;
-  const RefPtr<MediaStream> mStream;
+  const RefPtr<MediaTrack> mTrack;
   const RefPtr<const PeerIdentity> mPeerIdentity;
 
  protected:
   ~LocalTrackSource() {
     MOZ_ASSERT(NS_IsMainThread());
-    MOZ_ASSERT(mStream->IsDestroyed());
+    MOZ_ASSERT(mTrack->IsDestroyed());
   }
 
   // This is a weak pointer to avoid having the SourceListener (which may
   // have references to threads and threadpools) kept alive by DOM-objects
   // that may have ref-cycles and thus are released very late during
   // shutdown, even after xpcom-shutdown-threads. See bug 1351655 for what
   // can happen.
   WeakPtr<SourceListener> mListener;
 };
 
 class AudioCaptureTrackSource : public LocalTrackSource {
  public:
   AudioCaptureTrackSource(nsIPrincipal* aPrincipal, nsPIDOMWindowInner* aWindow,
                           const nsString& aLabel,
-                          AudioCaptureStream* aAudioCaptureStream,
+                          AudioCaptureTrack* aAudioCaptureTrack,
                           RefPtr<PeerIdentity> aPeerIdentity)
       : LocalTrackSource(aPrincipal, aLabel, nullptr,
-                         MediaSourceEnum::AudioCapture, aAudioCaptureStream,
+                         MediaSourceEnum::AudioCapture, aAudioCaptureTrack,
                          std::move(aPeerIdentity)),
         mWindow(aWindow),
-        mAudioCaptureStream(aAudioCaptureStream) {
-    mAudioCaptureStream->Start();
-    mAudioCaptureStream->Graph()->RegisterCaptureStreamForWindow(
-        mWindow->WindowID(), mAudioCaptureStream);
+        mAudioCaptureTrack(aAudioCaptureTrack) {
+    mAudioCaptureTrack->Start();
+    mAudioCaptureTrack->Graph()->RegisterCaptureTrackForWindow(
+        mWindow->WindowID(), mAudioCaptureTrack);
     mWindow->SetAudioCapture(true);
   }
 
   void Stop() override {
     MOZ_ASSERT(NS_IsMainThread());
-    if (!mAudioCaptureStream->IsDestroyed()) {
+    if (!mAudioCaptureTrack->IsDestroyed()) {
       MOZ_ASSERT(mWindow);
       mWindow->SetAudioCapture(false);
-      mAudioCaptureStream->Graph()->UnregisterCaptureStreamForWindow(
+      mAudioCaptureTrack->Graph()->UnregisterCaptureTrackForWindow(
           mWindow->WindowID());
       mWindow = nullptr;
     }
-    // LocalTrackSource destroys the stream.
+    // LocalTrackSource destroys the track.
     LocalTrackSource::Stop();
-    MOZ_ASSERT(mAudioCaptureStream->IsDestroyed());
+    MOZ_ASSERT(mAudioCaptureTrack->IsDestroyed());
   }
 
-  ProcessedMediaStream* InputStream() const {
-    return mAudioCaptureStream.get();
-  }
+  ProcessedMediaTrack* InputTrack() const { return mAudioCaptureTrack.get(); }
 
  protected:
   ~AudioCaptureTrackSource() {
     MOZ_ASSERT(NS_IsMainThread());
-    MOZ_ASSERT(mAudioCaptureStream->IsDestroyed());
+    MOZ_ASSERT(mAudioCaptureTrack->IsDestroyed());
   }
 
   RefPtr<nsPIDOMWindowInner> mWindow;
-  const RefPtr<AudioCaptureStream> mAudioCaptureStream;
+  const RefPtr<AudioCaptureTrack> mAudioCaptureTrack;
 };
 
 /**
  * nsIMediaDevice implementation.
  */
 NS_IMPL_ISUPPORTS(MediaDevice, nsIMediaDevice)
 
 MediaDevice::MediaDevice(const RefPtr<MediaEngineSource>& aSource,
@@ -1045,21 +1041,21 @@ nsresult MediaDevice::Allocate(const Med
       aConstraints.mDeviceId.Value().GetAsString().EqualsASCII("bad device")) {
     return NS_ERROR_FAILURE;
   }
 
   return mSource->Allocate(aConstraints, aPrefs, aPrincipalInfo,
                            aOutBadConstraint);
 }
 
-void MediaDevice::SetTrack(const RefPtr<SourceMediaStream>& aStream,
+void MediaDevice::SetTrack(const RefPtr<SourceMediaTrack>& aTrack,
                            const PrincipalHandle& aPrincipalHandle) {
   MOZ_ASSERT(MediaManager::IsInMediaThread());
   MOZ_ASSERT(mSource);
-  mSource->SetTrack(aStream, aPrincipalHandle);
+  mSource->SetTrack(aTrack, aPrincipalHandle);
 }
 
 nsresult MediaDevice::Start() {
   MOZ_ASSERT(MediaManager::IsInMediaThread());
   MOZ_ASSERT(mSource);
   return mSource->Start();
 }
 
@@ -1117,27 +1113,27 @@ static bool IsOn(const OwningBooleanOrMe
 static const MediaTrackConstraints& GetInvariant(
     const OwningBooleanOrMediaTrackConstraints& aUnion) {
   static const MediaTrackConstraints empty;
   return aUnion.IsMediaTrackConstraints() ? aUnion.GetAsMediaTrackConstraints()
                                           : empty;
 }
 
 /**
- * Creates a MediaStream, attaches a listener and fires off a success callback
+ * Creates a MediaTrack, attaches a listener and fires off a success callback
  * to the DOM with the stream. We also pass in the error callback so it can
  * be released correctly.
  *
  * All of this must be done on the main thread!
  *
  * Note that the various GetUserMedia Runnable classes currently allow for
- * two streams.  If we ever need to support getting more than two streams
+ * two tracks.  If we ever need to support getting more than two tracks
  * at once, we could convert everything to nsTArray<RefPtr<blah> >'s,
  * though that would complicate the constructors some.  Currently the
- * GetUserMedia spec does not allow for more than 2 streams to be obtained in
+ * GetUserMedia spec does not allow for more than 2 tracks to be obtained in
  * one call, to simplify handling of constraints.
  */
 class GetUserMediaStreamRunnable : public Runnable {
  public:
   GetUserMediaStreamRunnable(
       MozPromiseHolder<MediaManager::StreamPromise>&& aHolder,
       uint64_t aWindowID, RefPtr<GetUserMediaWindowListener> aWindowListener,
       RefPtr<SourceListener> aSourceListener,
@@ -1171,77 +1167,76 @@ class GetUserMediaStreamRunnable : publi
 
     // We're on main-thread, and the windowlist can only
     // be invalidated from the main-thread (see OnNavigation)
     if (!mManager->IsWindowListenerStillActive(mWindowListener)) {
       // This window is no longer live. mListener has already been removed.
       return NS_OK;
     }
 
-    MediaStreamGraph::GraphDriverType graphDriverType =
-        mAudioDevice ? MediaStreamGraph::AUDIO_THREAD_DRIVER
-                     : MediaStreamGraph::SYSTEM_THREAD_DRIVER;
-    MediaStreamGraph* msg = MediaStreamGraph::GetInstance(
-        graphDriverType, window, MediaStreamGraph::REQUEST_DEFAULT_SAMPLE_RATE);
+    MediaTrackGraph::GraphDriverType graphDriverType =
+        mAudioDevice ? MediaTrackGraph::AUDIO_THREAD_DRIVER
+                     : MediaTrackGraph::SYSTEM_THREAD_DRIVER;
+    MediaTrackGraph* mtg = MediaTrackGraph::GetInstance(
+        graphDriverType, window, MediaTrackGraph::REQUEST_DEFAULT_SAMPLE_RATE);
 
     auto domStream = MakeRefPtr<DOMMediaStream>(window);
     RefPtr<LocalTrackSource> audioTrackSource;
     RefPtr<LocalTrackSource> videoTrackSource;
     nsCOMPtr<nsIPrincipal> principal;
     if (mPeerIdentity) {
       principal = NullPrincipal::CreateWithInheritedAttributes(
           window->GetExtantDoc()->NodePrincipal());
     } else {
       principal = window->GetExtantDoc()->NodePrincipal();
     }
     RefPtr<GenericNonExclusivePromise> firstFramePromise;
     if (mAudioDevice) {
       if (mAudioDevice->GetMediaSource() == MediaSourceEnum::AudioCapture) {
         // AudioCapture is a special case, here, in the sense that we're not
-        // really using the audio source and the SourceMediaStream, which acts
-        // as placeholders. We re-route a number of streams internally in the
-        // MSG and mix them down instead.
+        // really using the audio source and the SourceMediaTrack, which acts
+        // as placeholders. We re-route a number of tracks internally in the
+        // MTG and mix them down instead.
         NS_WARNING(
             "MediaCaptureWindowState doesn't handle "
             "MediaSourceEnum::AudioCapture. This must be fixed with UX "
             "before shipping.");
         auto audioCaptureSource = MakeRefPtr<AudioCaptureTrackSource>(
             principal, window, NS_LITERAL_STRING("Window audio capture"),
-            msg->CreateAudioCaptureStream(), mPeerIdentity);
+            mtg->CreateAudioCaptureTrack(), mPeerIdentity);
         audioTrackSource = audioCaptureSource;
         RefPtr<MediaStreamTrack> track = new dom::AudioStreamTrack(
-            window, audioCaptureSource->InputStream(), audioCaptureSource);
+            window, audioCaptureSource->InputTrack(), audioCaptureSource);
         domStream->AddTrackInternal(track);
       } else {
         nsString audioDeviceName;
         mAudioDevice->GetName(audioDeviceName);
-        RefPtr<MediaStream> stream =
-            msg->CreateSourceStream(MediaSegment::AUDIO);
+        RefPtr<MediaTrack> track = mtg->CreateSourceTrack(MediaSegment::AUDIO);
         audioTrackSource = new LocalTrackSource(
             principal, audioDeviceName, mSourceListener,
-            mAudioDevice->GetMediaSource(), stream, mPeerIdentity);
+            mAudioDevice->GetMediaSource(), track, mPeerIdentity);
         MOZ_ASSERT(IsOn(mConstraints.mAudio));
-        RefPtr<MediaStreamTrack> track = new dom::AudioStreamTrack(
-            window, stream, audioTrackSource, dom::MediaStreamTrackState::Live,
+        RefPtr<MediaStreamTrack> domTrack = new dom::AudioStreamTrack(
+            window, track, audioTrackSource, dom::MediaStreamTrackState::Live,
             GetInvariant(mConstraints.mAudio));
-        domStream->AddTrackInternal(track);
+        domStream->AddTrackInternal(domTrack);
       }
     }
     if (mVideoDevice) {
       nsString videoDeviceName;
       mVideoDevice->GetName(videoDeviceName);
-      RefPtr<MediaStream> stream = msg->CreateSourceStream(MediaSegment::VIDEO);
+      RefPtr<MediaTrack> track = mtg->CreateSourceTrack(MediaSegment::VIDEO);
       videoTrackSource = new LocalTrackSource(
           principal, videoDeviceName, mSourceListener,
-          mVideoDevice->GetMediaSource(), stream, mPeerIdentity);
+          mVideoDevice->GetMediaSource(), track, mPeerIdentity);
       MOZ_ASSERT(IsOn(mConstraints.mVideo));
-      RefPtr<MediaStreamTrack> track = new dom::VideoStreamTrack(
-          window, stream, videoTrackSource, dom::MediaStreamTrackState::Live,
+      RefPtr<MediaStreamTrack> domTrack = new dom::VideoStreamTrack(
+          window, track, videoTrackSource, dom::MediaStreamTrackState::Live,
           GetInvariant(mConstraints.mVideo));
-      domStream->AddTrackInternal(track);
+      domStream->AddTrackInternal(domTrack);
       switch (mVideoDevice->GetMediaSource()) {
         case MediaSourceEnum::Browser:
         case MediaSourceEnum::Screen:
         case MediaSourceEnum::Window:
           // Wait for first frame for screen-sharing devices, to ensure
           // with and height settings are available immediately, to pass wpt.
           firstFramePromise = mVideoDevice->mSource->GetFirstFramePromise();
           break;
@@ -4107,32 +4102,30 @@ SourceListener::InitializeAsync() {
   MOZ_DIAGNOSTIC_ASSERT(!mStopped);
 
   return MediaManager::PostTask<SourceListenerPromise>(
              __func__,
              [principal = GetPrincipalHandle(),
               audioDevice =
                   mAudioDeviceState ? mAudioDeviceState->mDevice : nullptr,
               audioStream = mAudioDeviceState
-                                ? mAudioDeviceState->mTrackSource->mStream
+                                ? mAudioDeviceState->mTrackSource->mTrack
                                 : nullptr,
               videoDevice =
                   mVideoDeviceState ? mVideoDeviceState->mDevice : nullptr,
               videoStream = mVideoDeviceState
-                                ? mVideoDeviceState->mTrackSource->mStream
+                                ? mVideoDeviceState->mTrackSource->mTrack
                                 : nullptr](
                  MozPromiseHolder<SourceListenerPromise>& aHolder) {
                if (audioDevice) {
-                 audioDevice->SetTrack(audioStream->AsSourceStream(),
-                                       principal);
+                 audioDevice->SetTrack(audioStream->AsSourceTrack(), principal);
                }
 
                if (videoDevice) {
-                 videoDevice->SetTrack(videoStream->AsSourceStream(),
-                                       principal);
+                 videoDevice->SetTrack(videoStream->AsSourceTrack(), principal);
                }
 
                if (audioDevice) {
                  nsresult rv = audioDevice->Start();
                  if (rv == NS_ERROR_NOT_AVAILABLE) {
                    PR_Sleep(200);
                    rv = audioDevice->Start();
                  }
@@ -4246,23 +4239,23 @@ void SourceListener::Stop() {
       StopVideoTrack();
     }
   }
 
   mWindowListener->Remove(this);
   mWindowListener = nullptr;
 }
 
-void SourceListener::StopTrack(MediaStream* aStream) {
+void SourceListener::StopTrack(MediaTrack* aTrack) {
   MOZ_ASSERT(NS_IsMainThread(), "Only call on main thread");
   MOZ_ASSERT(Activated(), "No device to stop");
-  DeviceState& state = GetDeviceStateFor(aStream);
-
-  LOG("SourceListener %p stopping %s track for stream %p", this,
-      &state == mAudioDeviceState.get() ? "audio" : "video", aStream);
+  DeviceState& state = GetDeviceStateFor(aTrack);
+
+  LOG("SourceListener %p stopping %s track for track %p", this,
+      &state == mAudioDeviceState.get() ? "audio" : "video", aTrack);
 
   if (state.mStopped) {
     // device already stopped.
     return;
   }
   state.mStopped = true;
 
   state.mDisableTimer->Cancel();
@@ -4278,46 +4271,46 @@ void SourceListener::StopTrack(MediaStre
   if ((!mAudioDeviceState || mAudioDeviceState->mStopped) &&
       (!mVideoDeviceState || mVideoDeviceState->mStopped)) {
     LOG("SourceListener %p this was the last track stopped", this);
     Stop();
   }
 }
 
 void SourceListener::StopAudioTrack() {
-  StopTrack(mAudioDeviceState->mTrackSource->mStream);
+  StopTrack(mAudioDeviceState->mTrackSource->mTrack);
 }
 
 void SourceListener::StopVideoTrack() {
-  StopTrack(mVideoDeviceState->mTrackSource->mStream);
+  StopTrack(mVideoDeviceState->mTrackSource->mTrack);
 }
 
-void SourceListener::GetSettingsFor(MediaStream* aStream,
+void SourceListener::GetSettingsFor(MediaTrack* aTrack,
                                     MediaTrackSettings& aOutSettings) const {
   MOZ_ASSERT(NS_IsMainThread(), "Only call on main thread");
-  DeviceState& state = GetDeviceStateFor(aStream);
+  DeviceState& state = GetDeviceStateFor(aTrack);
   state.mDevice->GetSettings(aOutSettings);
 
   MediaSourceEnum mediaSource = state.mDevice->GetMediaSource();
   if (mediaSource == MediaSourceEnum::Camera ||
       mediaSource == MediaSourceEnum::Microphone) {
     aOutSettings.mDeviceId.Construct(state.mDevice->mID);
     aOutSettings.mGroupId.Construct(state.mDevice->mGroupID);
   }
 }
 
-void SourceListener::SetEnabledFor(MediaStream* aStream, bool aEnable) {
+void SourceListener::SetEnabledFor(MediaTrack* aTrack, bool aEnable) {
   MOZ_ASSERT(NS_IsMainThread(), "Only call on main thread");
   MOZ_ASSERT(Activated(), "No device to set enabled state for");
 
-  DeviceState& state = GetDeviceStateFor(aStream);
-
-  LOG("SourceListener %p %s %s track for stream %p", this,
+  DeviceState& state = GetDeviceStateFor(aTrack);
+
+  LOG("SourceListener %p %s %s track for track %p", this,
       aEnable ? "enabling" : "disabling",
-      &state == mAudioDeviceState.get() ? "audio" : "video", aStream);
+      &state == mAudioDeviceState.get() ? "audio" : "video", aTrack);
 
   state.mTrackEnabled = aEnable;
 
   if (state.mStopped) {
     // Device terminally stopped. Updating device state is pointless.
     return;
   }
 
@@ -4357,28 +4350,28 @@ void SourceListener::SetEnabledFor(Media
   }
 
   typedef MozPromise<nsresult, bool, /* IsExclusive = */ true>
       DeviceOperationPromise;
   RefPtr<SourceListener> self = this;
   timerPromise
       ->Then(
           GetMainThreadSerialEventTarget(), __func__,
-          [self, this, &state, stream = RefPtr<MediaStream>(aStream),
+          [self, this, &state, track = RefPtr<MediaTrack>(aTrack),
            aEnable]() mutable {
             MOZ_ASSERT(state.mDeviceEnabled != aEnable,
                        "Device operation hasn't started");
             MOZ_ASSERT(state.mOperationInProgress,
                        "It's our responsibility to reset the inProgress state");
 
-            LOG("SourceListener %p %s %s track for stream %p - starting device "
+            LOG("SourceListener %p %s %s track for track %p - starting device "
                 "operation",
                 this, aEnable ? "enabling" : "disabling",
                 &state == mAudioDeviceState.get() ? "audio" : "video",
-                stream.get());
+                track.get());
 
             if (state.mStopped) {
               // Source was stopped between timer resolving and this runnable.
               return DeviceOperationPromise::CreateAndResolve(NS_ERROR_ABORT,
                                                               __func__);
             }
 
             state.mDeviceEnabled = aEnable;
@@ -4403,43 +4396,43 @@ void SourceListener::SetEnabledFor(Media
           },
           []() {
             // Timer was canceled by us. We signal this with NS_ERROR_ABORT.
             return DeviceOperationPromise::CreateAndResolve(NS_ERROR_ABORT,
                                                             __func__);
           })
       ->Then(
           GetMainThreadSerialEventTarget(), __func__,
-          [self, this, &state, stream = RefPtr<MediaStream>(aStream),
+          [self, this, &state, track = RefPtr<MediaTrack>(aTrack),
            aEnable](nsresult aResult) mutable {
             MOZ_ASSERT_IF(aResult != NS_ERROR_ABORT,
                           state.mDeviceEnabled == aEnable);
             MOZ_ASSERT(state.mOperationInProgress);
             state.mOperationInProgress = false;
 
             if (state.mStopped) {
               // Device was stopped on main thread during the operation. Nothing
               // to do.
               return;
             }
 
-            LOG("SourceListener %p %s %s track for stream %p %s", this,
+            LOG("SourceListener %p %s %s track for track %p %s", this,
                 aEnable ? "enabling" : "disabling",
                 &state == mAudioDeviceState.get() ? "audio" : "video",
-                stream.get(), NS_SUCCEEDED(aResult) ? "succeeded" : "failed");
+                track.get(), NS_SUCCEEDED(aResult) ? "succeeded" : "failed");
 
             if (NS_FAILED(aResult) && aResult != NS_ERROR_ABORT) {
               // This path handles errors from starting or stopping the device.
               // NS_ERROR_ABORT are for cases where *we* aborted. They need
               // graceful handling.
               if (aEnable) {
                 // Starting the device failed. Stopping the track here will make
                 // the MediaStreamTrack end after a pass through the
-                // MediaStreamGraph.
-                StopTrack(stream);
+                // MediaTrackGraph.
+                StopTrack(track);
               } else {
                 // Stopping the device failed. This is odd, but not fatal.
                 MOZ_ASSERT_UNREACHABLE("The device should be stoppable");
 
                 // To keep our internal state sane in this case, we disallow
                 // future stops due to disable.
                 state.mOffWhileDisabled = false;
               }
@@ -4455,19 +4448,19 @@ void SourceListener::SetEnabledFor(Media
             if (state.mTrackEnabled == state.mDeviceEnabled) {
               // Intended state is same as device's current state.
               // Nothing more to do.
               return;
             }
 
             // Track state changed during this operation. We'll start over.
             if (state.mTrackEnabled) {
-              SetEnabledFor(stream, true);
+              SetEnabledFor(track, true);
             } else {
-              SetEnabledFor(stream, false);
+              SetEnabledFor(track, false);
             }
           },
           []() { MOZ_ASSERT_UNREACHABLE("Unexpected and unhandled reject"); });
 }
 
 void SourceListener::StopSharing() {
   MOZ_ASSERT(NS_IsMainThread());
 
@@ -4478,20 +4471,20 @@ void SourceListener::StopSharing() {
   MOZ_RELEASE_ASSERT(mWindowListener);
   LOG("SourceListener %p StopSharing", this);
 
   RefPtr<SourceListener> self(this);
   if (mVideoDeviceState && (mVideoDeviceState->mDevice->GetMediaSource() ==
                                 MediaSourceEnum::Screen ||
                             mVideoDeviceState->mDevice->GetMediaSource() ==
                                 MediaSourceEnum::Window)) {
-    // We want to stop the whole stream if there's no audio;
+    // We want to stop the whole track if there's no audio;
     // just the video track if we have both.
     // StopTrack figures this out for us.
-    StopTrack(mVideoDeviceState->mTrackSource->mStream);
+    StopTrack(mVideoDeviceState->mTrackSource->mTrack);
   }
   if (mAudioDeviceState && mAudioDeviceState->mDevice->GetMediaSource() ==
                                MediaSourceEnum::AudioCapture) {
     static_cast<AudioCaptureTrackSource*>(mAudioDeviceState->mTrackSource.get())
         ->Stop();
   }
 }
 
@@ -4542,24 +4535,24 @@ CaptureState SourceListener::CapturingSo
     return CaptureState::Enabled;
   }
 
   return CaptureState::Disabled;
 }
 
 RefPtr<SourceListener::SourceListenerPromise>
 SourceListener::ApplyConstraintsToTrack(
-    MediaStream* aStream, const MediaTrackConstraints& aConstraints,
+    MediaTrack* aTrack, const MediaTrackConstraints& aConstraints,
     CallerType aCallerType) {
   MOZ_ASSERT(NS_IsMainThread());
-  DeviceState& state = GetDeviceStateFor(aStream);
+  DeviceState& state = GetDeviceStateFor(aTrack);
 
   if (mStopped || state.mStopped) {
-    LOG("gUM %s track for stream %p applyConstraints, but source is stopped",
-        &state == mAudioDeviceState.get() ? "audio" : "video", aStream);
+    LOG("gUM %s track for track %p applyConstraints, but source is stopped",
+        &state == mAudioDeviceState.get() ? "audio" : "video", aTrack);
     return SourceListenerPromise::CreateAndResolve(false, __func__);
   }
 
   MediaManager* mgr = MediaManager::GetIfExists();
   if (!mgr) {
     return SourceListenerPromise::CreateAndResolve(false, __func__);
   }
 
@@ -4600,26 +4593,24 @@ SourceListener::ApplyConstraintsToTrack(
         aHolder.Resolve(false, __func__);
       });
 }
 
 PrincipalHandle SourceListener::GetPrincipalHandle() const {
   return mPrincipalHandle;
 }
 
-DeviceState& SourceListener::GetDeviceStateFor(MediaStream* aStream) const {
-  if (mAudioDeviceState &&
-      mAudioDeviceState->mTrackSource->mStream == aStream) {
+DeviceState& SourceListener::GetDeviceStateFor(MediaTrack* aTrack) const {
+  if (mAudioDeviceState && mAudioDeviceState->mTrackSource->mTrack == aTrack) {
     return *mAudioDeviceState;
   }
-  if (mVideoDeviceState &&
-      mVideoDeviceState->mTrackSource->mStream == aStream) {
+  if (mVideoDeviceState && mVideoDeviceState->mTrackSource->mTrack == aTrack) {
     return *mVideoDeviceState;
   }
-  MOZ_CRASH("Unknown stream");
+  MOZ_CRASH("Unknown track");
 }
 
 // Doesn't kill audio
 void GetUserMediaWindowListener::StopSharing() {
   MOZ_ASSERT(NS_IsMainThread(), "Only call on main thread");
 
   for (auto& l : nsTArray<RefPtr<SourceListener>>(mActiveListeners)) {
     l->StopSharing();
--- a/dom/media/MediaManager.h
+++ b/dom/media/MediaManager.h
@@ -80,17 +80,17 @@ class MediaDevice : public nsIMediaDevic
   uint32_t GetBestFitnessDistance(
       const nsTArray<const NormalizedConstraintSet*>& aConstraintSets,
       bool aIsChrome);
 
   nsresult Allocate(const dom::MediaTrackConstraints& aConstraints,
                     const MediaEnginePrefs& aPrefs,
                     const mozilla::ipc::PrincipalInfo& aPrincipalInfo,
                     const char** aOutBadConstraint);
-  void SetTrack(const RefPtr<SourceMediaStream>& aStream,
+  void SetTrack(const RefPtr<SourceMediaTrack>& aTrack,
                 const PrincipalHandle& aPrincipal);
   nsresult Start();
   nsresult Reconfigure(const dom::MediaTrackConstraints& aConstraints,
                        const MediaEnginePrefs& aPrefs,
                        const char** aOutBadConstraint);
   nsresult FocusOnSelectedSource();
   nsresult Stop();
   nsresult Deallocate();
@@ -194,17 +194,17 @@ class MediaManager final : public nsIMed
   typedef dom::NavigatorUserMediaSuccessCallback GetUserMediaSuccessCallback;
   typedef dom::NavigatorUserMediaErrorCallback GetUserMediaErrorCallback;
 
   MOZ_CAN_RUN_SCRIPT
   static void CallOnError(GetUserMediaErrorCallback& aCallback,
                           dom::MediaStreamError& aError);
   MOZ_CAN_RUN_SCRIPT
   static void CallOnSuccess(GetUserMediaSuccessCallback& aCallback,
-                            DOMMediaStream& aStream);
+                            DOMMediaStream& aTrack);
 
   typedef nsTArray<RefPtr<MediaDevice>> MediaDeviceSet;
   typedef media::Refcountable<MediaDeviceSet> MediaDeviceSetRefCnt;
 
   typedef MozPromise<RefPtr<DOMMediaStream>, RefPtr<MediaMgrError>, true>
       StreamPromise;
   typedef MozPromise<RefPtr<MediaDeviceSetRefCnt>, RefPtr<MediaMgrError>, true>
       DevicesPromise;
--- a/dom/media/MediaPlaybackDelayPolicy.cpp
+++ b/dom/media/MediaPlaybackDelayPolicy.cpp
@@ -2,16 +2,17 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "MediaPlaybackDelayPolicy.h"
 
 #include "nsPIDOMWindow.h"
 #include "mozilla/dom/HTMLMediaElement.h"
+#include "mozilla/StaticPrefs_media.h"
 
 namespace mozilla {
 namespace dom {
 
 using AudibleState = AudioChannelService::AudibleState;
 
 static AudibleState DetermineMediaAudibleState(const HTMLMediaElement* aElement,
                                                bool aIsAudible) {
--- a/dom/media/MediaRecorder.cpp
+++ b/dom/media/MediaRecorder.cpp
@@ -2,22 +2,22 @@
 /* vim:set ts=2 sw=2 sts=2 et cindent: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "MediaRecorder.h"
 
 #include "AudioNodeEngine.h"
-#include "AudioNodeStream.h"
+#include "AudioNodeTrack.h"
 #include "DOMMediaStream.h"
 #include "GeckoProfiler.h"
 #include "MediaDecoder.h"
 #include "MediaEncoder.h"
-#include "MediaStreamGraphImpl.h"
+#include "MediaTrackGraphImpl.h"
 #include "VideoUtils.h"
 #include "mozilla/DOMEventTargetHelper.h"
 #include "mozilla/dom/AudioStreamTrack.h"
 #include "mozilla/dom/BlobEvent.h"
 #include "mozilla/dom/File.h"
 #include "mozilla/dom/MediaRecorderErrorEvent.h"
 #include "mozilla/dom/MutableBlobStorage.h"
 #include "mozilla/dom/VideoStreamTrack.h"
@@ -175,25 +175,25 @@ NS_IMPL_RELEASE_INHERITED(MediaRecorder,
  * MediaRecorder::Start before Read Thread shutdown, the same recording context
  * in MediaRecorder might be access by two Reading Threads, which cause a
  * problem. In the new design, we put recording context into Session object,
  * including Read Thread.  Each Session has its own recording context and Read
  * Thread, problem is been resolved.
  *
  * Life cycle of a Session object.
  * 1) Initialization Stage (in main thread)
- *    Setup media streams in MSG, and bind MediaEncoder with Source Stream when
+ *    Setup media tracks in MTG, and bind MediaEncoder with Source Stream when
  * mStream is available. Resource allocation, such as encoded data cache buffer
  * and MediaEncoder. Create read thread. Automatically switch to Extract stage
  * in the end of this stage. 2) Extract Stage (in Read Thread) Pull encoded A/V
  * frames from MediaEncoder, dispatch to OnDataAvailable handler. Unless a
  * client calls Session::Stop, Session object keeps stay in this stage. 3)
  * Destroy Stage (in main thread) Switch from Extract stage to Destroy stage by
- * calling Session::Stop. Release session resource and remove associated streams
- * from MSG.
+ * calling Session::Stop. Release session resource and remove associated tracks
+ * from MTG.
  *
  * Lifetime of MediaRecorder and Session objects.
  * 1) MediaRecorder creates a Session in MediaRecorder::Start function and holds
  *    a reference to Session. Then the Session registers itself to a
  *    ShutdownBlocker and also holds a reference to MediaRecorder.
  *    Therefore, the reference dependency in gecko is:
  *    ShutdownBlocker -> Session <-> MediaRecorder, note that there is a cycle
  *    reference between Session and MediaRecorder.
@@ -1183,26 +1183,26 @@ class MediaRecorder::Session : public Pr
   Result<RunningState, nsresult> mRunningState;
 };
 
 MediaRecorder::~MediaRecorder() {
   LOG(LogLevel::Debug, ("~MediaRecorder (%p)", this));
   UnRegisterActivityObserver();
 }
 
-MediaRecorder::MediaRecorder(DOMMediaStream& aSourceMediaStream,
+MediaRecorder::MediaRecorder(DOMMediaStream& aSourceMediaTrack,
                              nsPIDOMWindowInner* aOwnerWindow)
     : DOMEventTargetHelper(aOwnerWindow),
       mAudioNodeOutput(0),
       mState(RecordingState::Inactive),
       mAudioBitsPerSecond(0),
       mVideoBitsPerSecond(0),
       mBitsPerSecond(0) {
   MOZ_ASSERT(aOwnerWindow);
-  mDOMStream = &aSourceMediaStream;
+  mDOMStream = &aSourceMediaTrack;
 
   RegisterActivityObserver();
 }
 
 MediaRecorder::MediaRecorder(AudioNode& aSrcAudioNode, uint32_t aSrcOutput,
                              nsPIDOMWindowInner* aOwnerWindow)
     : DOMEventTargetHelper(aOwnerWindow),
       mAudioNodeOutput(aSrcOutput),
--- a/dom/media/MediaRecorder.h
+++ b/dom/media/MediaRecorder.h
@@ -11,21 +11,20 @@
 #include "mozilla/DOMEventTargetHelper.h"
 #include "mozilla/MozPromise.h"
 #include "nsIDocumentActivity.h"
 
 // Max size for allowing queue encoded data in memory
 #define MAX_ALLOW_MEMORY_BUFFER 1024000
 namespace mozilla {
 
-class AudioNodeStream;
+class AudioNodeTrack;
 class DOMMediaStream;
 class ErrorResult;
 struct MediaRecorderOptions;
-class MediaStream;
 class GlobalObject;
 
 namespace dom {
 
 class AudioNode;
 class Blob;
 class Document;
 class DOMException;
@@ -45,17 +44,17 @@ class DOMException;
  * or RequestData function called by UA.
  */
 
 class MediaRecorder final : public DOMEventTargetHelper,
                             public nsIDocumentActivity {
  public:
   class Session;
 
-  MediaRecorder(DOMMediaStream& aSourceMediaStream,
+  MediaRecorder(DOMMediaStream& aSourceMediaTrack,
                 nsPIDOMWindowInner* aOwnerWindow);
   MediaRecorder(AudioNode& aSrcAudioNode, uint32_t aSrcOutput,
                 nsPIDOMWindowInner* aOwnerWindow);
 
   static nsTArray<RefPtr<Session>> GetSessions();
 
   // nsWrapperCache
   JSObject* WrapObject(JSContext* aCx,
--- a/dom/media/MediaSegment.h
+++ b/dom/media/MediaSegment.h
@@ -20,60 +20,60 @@ namespace mozilla {
  * maximum avoids overflow in conversions between track rates and conversions
  * from seconds.
  */
 typedef int32_t TrackRate;
 const int64_t TRACK_RATE_MAX_BITS = 20;
 const TrackRate TRACK_RATE_MAX = 1 << TRACK_RATE_MAX_BITS;
 
 /**
- * A number of ticks at a rate determined by some underlying track (e.g.
- * audio sample rate). We want to make sure that multiplying TrackTicks by
- * a TrackRate doesn't overflow, so we set its max accordingly.
- * StreamTime should be used instead when we're working with MediaStreamGraph's
- * rate, but TrackTicks can be used outside MediaStreams when we have data
- * at a different rate.
+ * A number of ticks at a rate determined by some underlying track (e.g., audio
+ * sample rate). We want to make sure that multiplying TrackTicks by a TrackRate
+ * doesn't overflow, so we set its max accordingly.
+ * TrackTime should be used instead when we're working with MediaTrackGraph's
+ * rate, but TrackTicks can be used outside MediaTracks when we have data at a
+ * different rate.
  */
 typedef int64_t TrackTicks;
 const int64_t TRACK_TICKS_MAX = INT64_MAX >> TRACK_RATE_MAX_BITS;
 
 /**
  * We represent media times in 64-bit audio frame counts or ticks.
- * All tracks in a MediaStreamGraph have the same rate.
+ * All tracks in a MediaTrackGraph have the same rate.
  */
 typedef int64_t MediaTime;
 const int64_t MEDIA_TIME_MAX = TRACK_TICKS_MAX;
 
 /**
- * Media time relative to the start of a MediaStream.
+ * Media time relative to the start of a MediaTrack.
  */
-typedef MediaTime StreamTime;
-const StreamTime STREAM_TIME_MAX = MEDIA_TIME_MAX;
+typedef MediaTime TrackTime;
+const TrackTime TRACK_TIME_MAX = MEDIA_TIME_MAX;
 
 /**
  * Media time relative to the start of the graph timeline.
  */
 typedef MediaTime GraphTime;
 const GraphTime GRAPH_TIME_MAX = MEDIA_TIME_MAX;
 
 /**
  * The number of chunks allocated by default for a MediaSegment.
  * Appending more chunks than this will cause further allocations.
  *
  * 16 is an arbitrary number intended to cover the most common cases in the
- * MediaStreamGraph (1 with silence and 1-2 with data for a realtime track)
+ * MediaTrackGraph (1 with silence and 1-2 with data for a realtime track)
  * with some margin.
  */
 const size_t DEFAULT_SEGMENT_CAPACITY = 16;
 
 /**
  * A MediaSegment is a chunk of media data sequential in time. Different
  * types of data have different subclasses of MediaSegment, all inheriting
  * from MediaSegmentBase.
- * All MediaSegment data is timed using StreamTime. The actual tick rate
+ * All MediaSegment data is timed using TrackTime. The actual tick rate
  * is defined on a per-track basis. For some track types, this can be
  * a fixed constant for all tracks of that type (e.g. 1MHz for video).
  *
  * Each media segment defines a concept of "null media data" (e.g. silence
  * for audio or "no video frame" for video), which can be efficiently
  * represented. This is used for padding.
  */
 class MediaSegment {
@@ -83,27 +83,27 @@ class MediaSegment {
 
   virtual ~MediaSegment() { MOZ_COUNT_DTOR(MediaSegment); }
 
   enum Type { AUDIO, VIDEO, TYPE_COUNT };
 
   /**
    * Gets the total duration of the segment.
    */
-  StreamTime GetDuration() const { return mDuration; }
+  TrackTime GetDuration() const { return mDuration; }
   Type GetType() const { return mType; }
 
   /**
    * Gets the last principal id that was appended to this segment.
    */
   const PrincipalHandle& GetLastPrincipalHandle() const {
     return mLastPrincipalHandle;
   }
   /**
-   * Called by the MediaStreamGraph as it appends a chunk with a different
+   * Called by the MediaTrackGraph as it appends a chunk with a different
    * principal id than the current one.
    */
   void SetLastPrincipalHandle(PrincipalHandle aLastPrincipalHandle) {
     mLastPrincipalHandle = std::forward<PrincipalHandle>(aLastPrincipalHandle);
   }
 
   /**
    * Returns true if all chunks in this segment are null.
@@ -121,34 +121,34 @@ class MediaSegment {
   virtual MediaSegment* CreateEmptyClone() const = 0;
   /**
    * Moves contents of aSource to the end of this segment.
    */
   virtual void AppendFrom(MediaSegment* aSource) = 0;
   /**
    * Append a slice of aSource to this segment.
    */
-  virtual void AppendSlice(const MediaSegment& aSource, StreamTime aStart,
-                           StreamTime aEnd) = 0;
+  virtual void AppendSlice(const MediaSegment& aSource, TrackTime aStart,
+                           TrackTime aEnd) = 0;
   /**
    * Replace all contents up to aDuration with null data.
    */
-  virtual void ForgetUpTo(StreamTime aDuration) = 0;
+  virtual void ForgetUpTo(TrackTime aDuration) = 0;
   /**
    * Forget all data buffered after a given point
    */
-  virtual void FlushAfter(StreamTime aNewEnd) = 0;
+  virtual void FlushAfter(TrackTime aNewEnd) = 0;
   /**
    * Insert aDuration of null data at the start of the segment.
    */
-  virtual void InsertNullDataAtStart(StreamTime aDuration) = 0;
+  virtual void InsertNullDataAtStart(TrackTime aDuration) = 0;
   /**
    * Insert aDuration of null data at the end of the segment.
    */
-  virtual void AppendNullData(StreamTime aDuration) = 0;
+  virtual void AppendNullData(TrackTime aDuration) = 0;
   /**
    * Replace contents with disabled (silence/black) data of the same duration
    */
   virtual void ReplaceWithDisabled() = 0;
   /**
    * Replace contents with null data of the same duration
    */
   virtual void ReplaceWithNull() = 0;
@@ -175,20 +175,20 @@ class MediaSegment {
 
   MediaSegment(MediaSegment&& aSegment)
       : mDuration(std::move(aSegment.mDuration)),
         mType(std::move(aSegment.mType)),
         mLastPrincipalHandle(std::move(aSegment.mLastPrincipalHandle)) {
     MOZ_COUNT_CTOR(MediaSegment);
   }
 
-  StreamTime mDuration;  // total of mDurations of all chunks
+  TrackTime mDuration;  // total of mDurations of all chunks
   Type mType;
 
-  // The latest principal handle that the MediaStreamGraph has processed for
+  // The latest principal handle that the MediaTrackGraph has processed for
   // this segment.
   PrincipalHandle mLastPrincipalHandle;
 };
 
 /**
  * C is the implementation class subclassed from MediaSegmentBase.
  * C must contain a Chunk class.
  */
@@ -206,80 +206,80 @@ class MediaSegmentBase : public MediaSeg
   }
   bool IsEmpty() const override { return mChunks.IsEmpty(); }
   MediaSegment* CreateEmptyClone() const override { return new C(); }
   void AppendFrom(MediaSegment* aSource) override {
     NS_ASSERTION(aSource->GetType() == C::StaticType(), "Wrong type");
     AppendFromInternal(static_cast<C*>(aSource));
   }
   void AppendFrom(C* aSource) { AppendFromInternal(aSource); }
-  void AppendSlice(const MediaSegment& aSource, StreamTime aStart,
-                   StreamTime aEnd) override {
+  void AppendSlice(const MediaSegment& aSource, TrackTime aStart,
+                   TrackTime aEnd) override {
     NS_ASSERTION(aSource.GetType() == C::StaticType(), "Wrong type");
     AppendSliceInternal(static_cast<const C&>(aSource), aStart, aEnd);
   }
-  void AppendSlice(const C& aOther, StreamTime aStart, StreamTime aEnd) {
+  void AppendSlice(const C& aOther, TrackTime aStart, TrackTime aEnd) {
     AppendSliceInternal(aOther, aStart, aEnd);
   }
   /**
    * Replace the first aDuration ticks with null media data, because the data
    * will not be required again.
    */
-  void ForgetUpTo(StreamTime aDuration) override {
+  void ForgetUpTo(TrackTime aDuration) override {
     if (mChunks.IsEmpty() || aDuration <= 0) {
       return;
     }
     if (mChunks[0].IsNull()) {
-      StreamTime extraToForget =
+      TrackTime extraToForget =
           std::min(aDuration, mDuration) - mChunks[0].GetDuration();
       if (extraToForget > 0) {
         RemoveLeading(extraToForget, 1);
         mChunks[0].mDuration += extraToForget;
         mDuration += extraToForget;
       }
       return;
     }
     RemoveLeading(aDuration, 0);
     mChunks.InsertElementAt(0)->SetNull(aDuration);
     mDuration += aDuration;
   }
-  void FlushAfter(StreamTime aNewEnd) override {
+  void FlushAfter(TrackTime aNewEnd) override {
     if (mChunks.IsEmpty()) {
       return;
     }
 
     if (mChunks[0].IsNull()) {
-      StreamTime extraToKeep = aNewEnd - mChunks[0].GetDuration();
+      TrackTime extraToKeep = aNewEnd - mChunks[0].GetDuration();
       if (extraToKeep < 0) {
         // reduce the size of the Null, get rid of everthing else
         mChunks[0].SetNull(aNewEnd);
         extraToKeep = 0;
       }
       RemoveTrailing(extraToKeep, 1);
     } else {
       if (aNewEnd > mDuration) {
         NS_ASSERTION(aNewEnd <= mDuration, "can't add data in FlushAfter");
         return;
       }
       RemoveTrailing(aNewEnd, 0);
     }
     mDuration = aNewEnd;
   }
-  void InsertNullDataAtStart(StreamTime aDuration) override {
+  void InsertNullDataAtStart(TrackTime aDuration) override {
     if (aDuration <= 0) {
       return;
     }
     if (!mChunks.IsEmpty() && mChunks[0].IsNull()) {
       mChunks[0].mDuration += aDuration;
     } else {
       mChunks.InsertElementAt(0)->SetNull(aDuration);
     }
     mDuration += aDuration;
   }
-  void AppendNullData(StreamTime aDuration) override {
+  void AppendNullData(TrackTime aDuration) override {
     if (aDuration <= 0) {
       return;
     }
     if (!mChunks.IsEmpty() && mChunks[mChunks.Length() - 1].IsNull()) {
       mChunks[mChunks.Length() - 1].mDuration += aDuration;
     } else {
       mChunks.AppendElement()->SetNull(aDuration);
     }
@@ -287,17 +287,17 @@ class MediaSegmentBase : public MediaSeg
   }
   void ReplaceWithDisabled() override {
     if (GetType() != AUDIO) {
       MOZ_CRASH("Disabling unknown segment type");
     }
     ReplaceWithNull();
   }
   void ReplaceWithNull() override {
-    StreamTime duration = GetDuration();
+    TrackTime duration = GetDuration();
     Clear();
     AppendNullData(duration);
   }
   void Clear() override {
     mDuration = 0;
     mChunks.ClearAndRetainStorage();
     mChunks.SetCapacity(DEFAULT_SEGMENT_CAPACITY);
   }
@@ -324,17 +324,17 @@ class MediaSegmentBase : public MediaSeg
     const Chunk& operator*() { return mSegment.mChunks[mIndex]; }
     const Chunk* operator->() { return &mSegment.mChunks[mIndex]; }
 
    private:
     const MediaSegmentBase<C, Chunk>& mSegment;
     uint32_t mIndex;
   };
 
-  void RemoveLeading(StreamTime aDuration) { RemoveLeading(aDuration, 0); }
+  void RemoveLeading(TrackTime aDuration) { RemoveLeading(aDuration, 0); }
 
   size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const override {
     size_t amount = mChunks.ShallowSizeOfExcludingThis(aMallocSizeOf);
     for (size_t i = 0; i < mChunks.Length(); i++) {
       amount += mChunks[i].SizeOfExcludingThisIfUnshared(aMallocSizeOf);
     }
     return amount;
   }
@@ -382,52 +382,52 @@ class MediaSegmentBase : public MediaSeg
     }
 
     aSource->mChunks.ClearAndRetainStorage();
     MOZ_ASSERT(aSource->mChunks.Capacity() >= DEFAULT_SEGMENT_CAPACITY,
                "Capacity must be retained after appending from aSource");
   }
 
   void AppendSliceInternal(const MediaSegmentBase<C, Chunk>& aSource,
-                           StreamTime aStart, StreamTime aEnd) {
+                           TrackTime aStart, TrackTime aEnd) {
     MOZ_ASSERT(aStart <= aEnd, "Endpoints inverted");
     NS_ASSERTION(aStart >= 0 && aEnd <= aSource.mDuration,
                  "Slice out of range");
     mDuration += aEnd - aStart;
-    StreamTime offset = 0;
+    TrackTime offset = 0;
     for (uint32_t i = 0; i < aSource.mChunks.Length() && offset < aEnd; ++i) {
       const Chunk& c = aSource.mChunks[i];
-      StreamTime start = std::max(aStart, offset);
-      StreamTime nextOffset = offset + c.GetDuration();
-      StreamTime end = std::min(aEnd, nextOffset);
+      TrackTime start = std::max(aStart, offset);
+      TrackTime nextOffset = offset + c.GetDuration();
+      TrackTime end = std::min(aEnd, nextOffset);
       if (start < end) {
         if (!mChunks.IsEmpty() &&
             mChunks[mChunks.Length() - 1].CanCombineWithFollowing(c)) {
           MOZ_ASSERT(start - offset >= 0 && end - offset <= aSource.mDuration,
                      "Slice out of bounds");
           mChunks[mChunks.Length() - 1].mDuration += end - start;
         } else {
           mChunks.AppendElement(c)->SliceTo(start - offset, end - offset);
         }
       }
       offset = nextOffset;
     }
   }
 
-  Chunk* AppendChunk(StreamTime aDuration) {
+  Chunk* AppendChunk(TrackTime aDuration) {
     MOZ_ASSERT(aDuration >= 0);
     Chunk* c = mChunks.AppendElement();
     c->mDuration = aDuration;
     mDuration += aDuration;
     return c;
   }
 
-  void RemoveLeading(StreamTime aDuration, uint32_t aStartIndex) {
+  void RemoveLeading(TrackTime aDuration, uint32_t aStartIndex) {
     NS_ASSERTION(aDuration >= 0, "Can't remove negative duration");
-    StreamTime t = aDuration;
+    TrackTime t = aDuration;
     uint32_t chunksToRemove = 0;
     for (uint32_t i = aStartIndex; i < mChunks.Length() && t > 0; ++i) {
       Chunk* c = &mChunks[i];
       if (c->GetDuration() > t) {
         c->SliceTo(t, c->GetDuration());
         t = 0;
         break;
       }
@@ -440,19 +440,19 @@ class MediaSegmentBase : public MediaSeg
       mChunks.RemoveElementsAt(aStartIndex, chunksToRemove);
     }
     mDuration -= aDuration - t;
 
     MOZ_ASSERT(mChunks.Capacity() >= DEFAULT_SEGMENT_CAPACITY,
                "Capacity must be retained after removing chunks");
   }
 
-  void RemoveTrailing(StreamTime aKeep, uint32_t aStartIndex) {
+  void RemoveTrailing(TrackTime aKeep, uint32_t aStartIndex) {
     NS_ASSERTION(aKeep >= 0, "Can't keep negative duration");
-    StreamTime t = aKeep;
+    TrackTime t = aKeep;
     uint32_t i;
     for (i = aStartIndex; i < mChunks.Length(); ++i) {
       Chunk* c = &mChunks[i];
       if (c->GetDuration() > t) {
         c->SliceTo(0, t);
         break;
       }
       t -= c->GetDuration();
--- a/dom/media/MediaStreamTrack.cpp
+++ b/dom/media/MediaStreamTrack.cpp
@@ -3,18 +3,18 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "MediaStreamTrack.h"
 
 #include "DOMMediaStream.h"
 #include "MediaSegment.h"
 #include "MediaStreamError.h"
-#include "MediaStreamGraphImpl.h"
-#include "MediaStreamListener.h"
+#include "MediaTrackGraphImpl.h"
+#include "MediaTrackListener.h"
 #include "mozilla/BasePrincipal.h"
 #include "mozilla/dom/Promise.h"
 #include "nsContentUtils.h"
 #include "nsGlobalWindowInner.h"
 #include "nsIUUIDGenerator.h"
 #include "nsServiceManagerUtils.h"
 #include "systemservices/MediaUtils.h"
 
@@ -52,70 +52,70 @@ auto MediaStreamTrackSource::ApplyConstr
     -> RefPtr<ApplyConstraintsPromise> {
   return ApplyConstraintsPromise::CreateAndReject(
       MakeRefPtr<MediaMgrError>(MediaMgrError::Name::OverconstrainedError,
                                 NS_LITERAL_STRING("")),
       __func__);
 }
 
 /**
- * MSGListener monitors state changes of the media flowing through the
- * MediaStreamGraph.
+ * MTGListener monitors state changes of the media flowing through the
+ * MediaTrackGraph.
  *
  *
  * For changes to PrincipalHandle the following applies:
  *
  * When the main thread principal for a MediaStreamTrack changes, its principal
  * will be set to the combination of the previous principal and the new one.
  *
- * As a PrincipalHandle change later happens on the MediaStreamGraph thread, we
+ * As a PrincipalHandle change later happens on the MediaTrackGraph thread, we
  * will be notified. If the latest principal on main thread matches the
- * PrincipalHandle we just saw on MSG thread, we will set the track's principal
+ * PrincipalHandle we just saw on MTG thread, we will set the track's principal
  * to the new one.
  *
  * We know at this point that the old principal has been flushed out and data
  * under it cannot leak to consumers.
  *
  * In case of multiple changes to the main thread state, the track's principal
  * will be a combination of its old principal and all the new ones until the
- * latest main thread principal matches the PrincipalHandle on the MSG thread.
+ * latest main thread principal matches the PrincipalHandle on the MTG thread.
  */
-class MediaStreamTrack::MSGListener : public MediaStreamTrackListener {
+class MediaStreamTrack::MTGListener : public MediaTrackListener {
  public:
-  explicit MSGListener(MediaStreamTrack* aTrack) : mTrack(aTrack) {}
+  explicit MTGListener(MediaStreamTrack* aTrack) : mTrack(aTrack) {}
 
   void DoNotifyPrincipalHandleChanged(
       const PrincipalHandle& aNewPrincipalHandle) {
     MOZ_ASSERT(NS_IsMainThread());
 
     if (!mTrack) {
       return;
     }
 
     mTrack->NotifyPrincipalHandleChanged(aNewPrincipalHandle);
   }
 
   void NotifyPrincipalHandleChanged(
-      MediaStreamGraph* aGraph,
+      MediaTrackGraph* aGraph,
       const PrincipalHandle& aNewPrincipalHandle) override {
     aGraph->DispatchToMainThreadStableState(
         NewRunnableMethod<StoreCopyPassByConstLRef<PrincipalHandle>>(
-            "dom::MediaStreamTrack::MSGListener::"
+            "dom::MediaStreamTrack::MTGListener::"
             "DoNotifyPrincipalHandleChanged",
-            this, &MSGListener::DoNotifyPrincipalHandleChanged,
+            this, &MTGListener::DoNotifyPrincipalHandleChanged,
             aNewPrincipalHandle));
   }
 
-  void NotifyRemoved(MediaStreamGraph* aGraph) override {
+  void NotifyRemoved(MediaTrackGraph* aGraph) override {
     // `mTrack` is a WeakPtr and must be destroyed on main thread.
-    // We dispatch ourselves to main thread here in case the MediaStreamGraph
+    // We dispatch ourselves to main thread here in case the MediaTrackGraph
     // is holding the last reference to us.
     aGraph->DispatchToMainThreadStableState(
-        NS_NewRunnableFunction("MediaStreamTrack::MSGListener::mTrackReleaser",
-                               [self = RefPtr<MSGListener>(this)]() {}));
+        NS_NewRunnableFunction("MediaStreamTrack::MTGListener::mTrackReleaser",
+                               [self = RefPtr<MTGListener>(this)]() {}));
   }
 
   void DoNotifyEnded() {
     MOZ_ASSERT(NS_IsMainThread());
 
     if (!mTrack) {
       return;
     }
@@ -127,20 +127,20 @@ class MediaStreamTrack::MSGListener : pu
     AbstractThread* mainThread =
         nsGlobalWindowInner::Cast(mTrack->GetParentObject())
             ->AbstractMainThreadFor(TaskCategory::Other);
     mainThread->Dispatch(NewRunnableMethod("MediaStreamTrack::OverrideEnded",
                                            mTrack.get(),
                                            &MediaStreamTrack::OverrideEnded));
   }
 
-  void NotifyEnded(MediaStreamGraph* aGraph) override {
+  void NotifyEnded(MediaTrackGraph* aGraph) override {
     aGraph->DispatchToMainThreadStableState(
-        NewRunnableMethod("MediaStreamTrack::MSGListener::DoNotifyEnded", this,
-                          &MSGListener::DoNotifyEnded));
+        NewRunnableMethod("MediaStreamTrack::MTGListener::DoNotifyEnded", this,
+                          &MTGListener::DoNotifyEnded));
   }
 
  protected:
   // Main thread only.
   WeakPtr<MediaStreamTrack> mTrack;
 };
 
 class MediaStreamTrack::TrackSink : public MediaStreamTrackSource::Sink {
@@ -178,49 +178,49 @@ class MediaStreamTrack::TrackSink : publ
     }
   }
 
  private:
   WeakPtr<MediaStreamTrack> mTrack;
 };
 
 MediaStreamTrack::MediaStreamTrack(nsPIDOMWindowInner* aWindow,
-                                   MediaStream* aInputStream,
+                                   mozilla::MediaTrack* aInputTrack,
                                    MediaStreamTrackSource* aSource,
                                    MediaStreamTrackState aReadyState,
                                    const MediaTrackConstraints& aConstraints)
     : mWindow(aWindow),
-      mInputStream(aInputStream),
+      mInputTrack(aInputTrack),
       mSource(aSource),
       mSink(MakeUnique<TrackSink>(this)),
       mPrincipal(aSource->GetPrincipal()),
       mReadyState(aReadyState),
       mEnabled(true),
       mMuted(false),
       mConstraints(aConstraints) {
   if (!Ended()) {
     GetSource().RegisterSink(mSink.get());
 
-    // Even if the input stream is destroyed we need mStream so that methods
+    // Even if the input track is destroyed we need mTrack so that methods
     // like AddListener still work. Keeping the number of paths to a minimum
     // also helps prevent bugs elsewhere. We'll be ended through the
     // MediaStreamTrackSource soon enough.
-    auto graph = mInputStream->IsDestroyed()
-                     ? MediaStreamGraph::GetInstanceIfExists(
-                           mWindow, mInputStream->mSampleRate)
-                     : mInputStream->Graph();
+    auto graph = mInputTrack->IsDestroyed()
+                     ? MediaTrackGraph::GetInstanceIfExists(
+                           mWindow, mInputTrack->mSampleRate)
+                     : mInputTrack->Graph();
     MOZ_DIAGNOSTIC_ASSERT(graph,
-                          "A destroyed input stream is only expected when "
+                          "A destroyed input track is only expected when "
                           "cloning, but since we're live there must be another "
                           "live track that is keeping the graph alive");
 
-    mStream = graph->CreateTrackUnionStream(mInputStream->mType);
-    mPort = mStream->AllocateInputPort(mInputStream);
-    mMSGListener = new MSGListener(this);
-    AddListener(mMSGListener);
+    mTrack = graph->CreateForwardedInputTrack(mInputTrack->mType);
+    mPort = mTrack->AllocateInputPort(mInputTrack);
+    mMTGListener = new MTGListener(this);
+    AddListener(mMTGListener);
   }
 
   nsresult rv;
   nsCOMPtr<nsIUUIDGenerator> uuidgen =
       do_GetService("@mozilla.org/uuid-generator;1", &rv);
 
   nsID uuid;
   memset(&uuid, 0, sizeof(uuid));
@@ -233,23 +233,22 @@ MediaStreamTrack::MediaStreamTrack(nsPID
   mID = NS_ConvertASCIItoUTF16(chars);
 }
 
 MediaStreamTrack::~MediaStreamTrack() { Destroy(); }
 
 void MediaStreamTrack::Destroy() {
   SetReadyState(MediaStreamTrackState::Ended);
   // Remove all listeners -- avoid iterating over the list we're removing from
-  const nsTArray<RefPtr<MediaStreamTrackListener>> trackListeners(
-      mTrackListeners);
+  const nsTArray<RefPtr<MediaTrackListener>> trackListeners(mTrackListeners);
   for (auto listener : trackListeners) {
     RemoveListener(listener);
   }
   // Do the same as above for direct listeners
-  const nsTArray<RefPtr<DirectMediaStreamTrackListener>> directTrackListeners(
+  const nsTArray<RefPtr<DirectMediaTrackListener>> directTrackListeners(
       mDirectTrackListeners);
   for (auto listener : directTrackListeners) {
     RemoveDirectListener(listener);
   }
 }
 
 NS_IMPL_CYCLE_COLLECTION_CLASS(MediaStreamTrack)
 
@@ -291,18 +290,18 @@ void MediaStreamTrack::SetEnabled(bool a
   }
 
   mEnabled = aEnabled;
 
   if (Ended()) {
     return;
   }
 
-  mStream->SetEnabled(mEnabled ? DisabledTrackMode::ENABLED
-                               : DisabledTrackMode::SILENCE_BLACK);
+  mTrack->SetEnabled(mEnabled ? DisabledTrackMode::ENABLED
+                              : DisabledTrackMode::SILENCE_BLACK);
   GetSource().SinkEnabledStateChanged();
 }
 
 void MediaStreamTrack::Stop() {
   LOG(LogLevel::Info, ("MediaStreamTrack %p Stop()", this));
 
   if (Ended()) {
     LOG(LogLevel::Warning, ("MediaStreamTrack %p Already ended", this));
@@ -376,29 +375,29 @@ already_AddRefed<Promise> MediaStreamTra
               return;  // Leave Promise pending after navigation by design.
             }
             promise->MaybeReject(
                 MakeRefPtr<MediaStreamError>(mWindow, *aError));
           });
   return promise.forget();
 }
 
-ProcessedMediaStream* MediaStreamTrack::GetStream() const {
+ProcessedMediaTrack* MediaStreamTrack::GetTrack() const {
   MOZ_DIAGNOSTIC_ASSERT(!Ended());
-  return mStream;
+  return mTrack;
 }
 
-MediaStreamGraph* MediaStreamTrack::Graph() const {
+MediaTrackGraph* MediaStreamTrack::Graph() const {
   MOZ_DIAGNOSTIC_ASSERT(!Ended());
-  return mStream->Graph();
+  return mTrack->Graph();
 }
 
-MediaStreamGraphImpl* MediaStreamTrack::GraphImpl() const {
+MediaTrackGraphImpl* MediaStreamTrack::GraphImpl() const {
   MOZ_DIAGNOSTIC_ASSERT(!Ended());
-  return mStream->GraphImpl();
+  return mTrack->GraphImpl();
 }
 
 void MediaStreamTrack::SetPrincipal(nsIPrincipal* aPrincipal) {
   if (aPrincipal == mPrincipal) {
     return;
   }
   mPrincipal = aPrincipal;
 
@@ -425,17 +424,17 @@ void MediaStreamTrack::PrincipalChanged(
     SetPrincipal(newPrincipal);
   }
 }
 
 void MediaStreamTrack::NotifyPrincipalHandleChanged(
     const PrincipalHandle& aNewPrincipalHandle) {
   PrincipalHandle handle(aNewPrincipalHandle);
   LOG(LogLevel::Info, ("MediaStreamTrack %p principalHandle changed on "
-                       "MediaStreamGraph thread to %p. Current principal: %p, "
+                       "MediaTrackGraph thread to %p. Current principal: %p, "
                        "pending: %p",
                        this, GetPrincipalFromHandle(handle), mPrincipal.get(),
                        mPendingPrincipal.get()));
   if (PrincipalHandleMatches(handle, mPendingPrincipal)) {
     SetPrincipal(mPendingPrincipal);
     mPendingPrincipal = nullptr;
   }
 }
@@ -525,28 +524,28 @@ void MediaStreamTrack::SetReadyState(Med
     return;
   }
 
   if (mReadyState == MediaStreamTrackState::Live &&
       aState == MediaStreamTrackState::Ended) {
     if (mSource) {
       mSource->UnregisterSink(mSink.get());
     }
-    if (mMSGListener) {
-      RemoveListener(mMSGListener);
+    if (mMTGListener) {
+      RemoveListener(mMTGListener);
     }
     if (mPort) {
       mPort->Destroy();
     }
-    if (mStream) {
-      mStream->Destroy();
+    if (mTrack) {
+      mTrack->Destroy();
     }
     mPort = nullptr;
-    mStream = nullptr;
-    mMSGListener = nullptr;
+    mTrack = nullptr;
+    mMTGListener = nullptr;
   }
 
   mReadyState = aState;
 }
 
 void MediaStreamTrack::OverrideEnded() {
   MOZ_ASSERT(NS_IsMainThread());
 
@@ -558,66 +557,65 @@ void MediaStreamTrack::OverrideEnded() {
 
   SetReadyState(MediaStreamTrackState::Ended);
 
   NotifyEnded();
 
   DispatchTrustedEvent(NS_LITERAL_STRING("ended"));
 }
 
-void MediaStreamTrack::AddListener(MediaStreamTrackListener* aListener) {
+void MediaStreamTrack::AddListener(MediaTrackListener* aListener) {
   LOG(LogLevel::Debug,
       ("MediaStreamTrack %p adding listener %p", this, aListener));
   mTrackListeners.AppendElement(aListener);
 
   if (Ended()) {
     return;
   }
-  mStream->AddListener(aListener);
+  mTrack->AddListener(aListener);
 }
 
-void MediaStreamTrack::RemoveListener(MediaStreamTrackListener* aListener) {
+void MediaStreamTrack::RemoveListener(MediaTrackListener* aListener) {
   LOG(LogLevel::Debug,
       ("MediaStreamTrack %p removing listener %p", this, aListener));
   mTrackListeners.RemoveElement(aListener);
 
   if (Ended()) {
     return;
   }
-  mStream->RemoveListener(aListener);
+  mTrack->RemoveListener(aListener);
 }
 
-void MediaStreamTrack::AddDirectListener(
-    DirectMediaStreamTrackListener* aListener) {
+void MediaStreamTrack::AddDirectListener(DirectMediaTrackListener* aListener) {
   LOG(LogLevel::Debug, ("MediaStreamTrack %p (%s) adding direct listener %p to "
-                        "stream %p",
+                        "track %p",
                         this, AsAudioStreamTrack() ? "audio" : "video",
-                        aListener, mStream.get()));
+                        aListener, mTrack.get()));
   mDirectTrackListeners.AppendElement(aListener);
 
   if (Ended()) {
     return;
   }
-  mStream->AddDirectListener(aListener);
+  mTrack->AddDirectListener(aListener);
 }
 
 void MediaStreamTrack::RemoveDirectListener(
-    DirectMediaStreamTrackListener* aListener) {
+    DirectMediaTrackListener* aListener) {
   LOG(LogLevel::Debug,
-      ("MediaStreamTrack %p removing direct listener %p from stream %p", this,
-       aListener, mStream.get()));
+      ("MediaStreamTrack %p removing direct listener %p from track %p", this,
+       aListener, mTrack.get()));
   mDirectTrackListeners.RemoveElement(aListener);
 
   if (Ended()) {
     return;
   }
-  mStream->RemoveDirectListener(aListener);
+  mTrack->RemoveDirectListener(aListener);
 }
 
 already_AddRefed<MediaInputPort> MediaStreamTrack::ForwardTrackContentsTo(
-    ProcessedMediaStream* aStream) {
+    ProcessedMediaTrack* aTrack) {
   MOZ_ASSERT(NS_IsMainThread());
-  MOZ_RELEASE_ASSERT(aStream);
-  return aStream->AllocateInputPort(mStream);
+  MOZ_RELEASE_ASSERT(aTrack);
+  return aTrack->AllocateInputPort(mTrack);
 }
 
 }  // namespace dom
 }  // namespace mozilla
--- a/dom/media/MediaStreamTrack.h
+++ b/dom/media/MediaStreamTrack.h
@@ -18,25 +18,25 @@
 #include "nsID.h"
 #include "nsIPrincipal.h"
 
 namespace mozilla {
 
 class DOMMediaStream;
 class MediaEnginePhotoCallback;
 class MediaInputPort;
-class MediaStream;
-class MediaStreamGraph;
-class MediaStreamGraphImpl;
-class MediaStreamTrackListener;
-class DirectMediaStreamTrackListener;
+class MediaTrack;
+class MediaTrackGraph;
+class MediaTrackGraphImpl;
+class MediaTrackListener;
+class DirectMediaTrackListener;
 class PeerConnectionImpl;
 class PeerConnectionMedia;
 class PeerIdentity;
-class ProcessedMediaStream;
+class ProcessedMediaTrack;
 class RemoteSourceStreamInfo;
 class SourceStreamInfo;
 class MediaMgrError;
 
 namespace dom {
 
 class AudioStreamTrack;
 class VideoStreamTrack;
@@ -140,17 +140,17 @@ class MediaStreamTrackSource : public ns
 
   /**
    * MediaStreamTrack::GetLabel (see spec) calls through to here.
    */
   void GetLabel(nsAString& aLabel) { aLabel.Assign(mLabel); }
 
   /**
    * Forwards a photo request to backends that support it. Other backends return
-   * NS_ERROR_NOT_IMPLEMENTED to indicate that a MediaStreamGraph-based fallback
+   * NS_ERROR_NOT_IMPLEMENTED to indicate that a MediaTrackGraph-based fallback
    * should be used.
    */
   virtual nsresult TakePhoto(MediaEnginePhotoCallback*) const {
     return NS_ERROR_NOT_IMPLEMENTED;
   }
 
   typedef MozPromise<bool /* aIgnored */, RefPtr<MediaMgrError>, true>
       ApplyConstraintsPromise;
@@ -358,65 +358,65 @@ class MediaStreamTrackConsumer
    * Unlike the "ended" event exposed to script this is called for any reason,
    * including MediaStreamTrack::Stop().
    */
   virtual void NotifyEnded(MediaStreamTrack* aTrack){};
 };
 
 // clang-format off
 /**
- * DOM wrapper for MediaStreamGraph-MediaStreams.
+ * DOM wrapper for MediaTrackGraph-MediaTracks.
  *
  * To account for cloning, a MediaStreamTrack wraps two internal (and chained)
- * MediaStreams:
- *   1. mInputStream
+ * MediaTracks:
+ *   1. mInputTrack
  *      - Controlled by the producer of the data in the track. The producer
- *        decides on lifetime of the MediaStream and the track inside it.
- *      - It can be any type of MediaStream.
+ *        decides on lifetime of the MediaTrack and the track inside it.
+ *      - It can be any type of MediaTrack.
  *      - Contains one track only.
- *   2. mStream
- *      - A TrackUnionStream representing this MediaStreamTrack.
- *      - Its data is piped from mInputStream through mPort.
+ *   2. mTrack
+ *      - A ForwardedInputTrack representing this MediaStreamTrack.
+ *      - Its data is piped from mInputTrack through mPort.
  *      - Contains one track only.
  *      - When this MediaStreamTrack is enabled/disabled this is reflected in
- *        the chunks in the track in mStream.
- *      - When this MediaStreamTrack has ended, mStream gets destroyed.
- *        Note that mInputStream is unaffected, such that any clones of mStream
+ *        the chunks in the track in mTrack.
+ *      - When this MediaStreamTrack has ended, mTrack gets destroyed.
+ *        Note that mInputTrack is unaffected, such that any clones of mTrack
  *        can live on. When all clones are ended, this is signaled to the
  *        producer via our MediaStreamTrackSource. It is then likely to destroy
- *        mInputStream.
+ *        mInputTrack.
  *
  * A graphical representation of how tracks are connected when cloned follows:
  *
  * MediaStreamTrack A
- *       mInputStream     mStream
+ *       mInputTrack     mTrack
  *            t1 ---------> t1
  *               \
  *                -----
  * MediaStreamTrack B  \  (clone of A)
- *       mInputStream   \ mStream
+ *       mInputTrack   \ mTrack
  *            *          -> t1
  *
- *   (*) is a copy of A's mInputStream
+ *   (*) is a copy of A's mInputTrack
  */
 // clang-format on
 class MediaStreamTrack : public DOMEventTargetHelper,
                          public SupportsWeakPtr<MediaStreamTrack> {
   // PeerConnection and friends need to know our owning DOMStream and track id.
   friend class mozilla::PeerConnectionImpl;
   friend class mozilla::PeerConnectionMedia;
   friend class mozilla::SourceStreamInfo;
   friend class mozilla::RemoteSourceStreamInfo;
 
-  class MSGListener;
+  class MTGListener;
   class TrackSink;
 
  public:
   MediaStreamTrack(
-      nsPIDOMWindowInner* aWindow, MediaStream* aInputStream,
+      nsPIDOMWindowInner* aWindow, mozilla::MediaTrack* aInputTrack,
       MediaStreamTrackSource* aSource,
       MediaStreamTrackState aReadyState = MediaStreamTrackState::Live,
       const MediaTrackConstraints& aConstraints = MediaTrackConstraints());
 
   NS_DECL_ISUPPORTS_INHERITED
   NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(MediaStreamTrack,
                                            DOMEventTargetHelper)
 
@@ -467,19 +467,19 @@ class MediaStreamTrack : public DOMEvent
 
   /**
    * Get this track's PeerIdentity.
    */
   const PeerIdentity* GetPeerIdentity() const {
     return GetSource().GetPeerIdentity();
   }
 
-  ProcessedMediaStream* GetStream() const;
-  MediaStreamGraph* Graph() const;
-  MediaStreamGraphImpl* GraphImpl() const;
+  ProcessedMediaTrack* GetTrack() const;
+  MediaTrackGraph* Graph() const;
+  MediaTrackGraphImpl* GraphImpl() const;
 
   MediaStreamTrackSource& GetSource() const {
     MOZ_RELEASE_ASSERT(mSource,
                        "The track source is only removed on destruction");
     return *mSource;
   }
 
   // Webrtc allows the remote side to name tracks whatever it wants, and we
@@ -513,64 +513,64 @@ class MediaStreamTrack : public DOMEvent
   void AddConsumer(MediaStreamTrackConsumer* aConsumer);
 
   /**
    * Remove an added MediaStreamTrackConsumer from this track.
    */
   void RemoveConsumer(MediaStreamTrackConsumer* aConsumer);
 
   /**
-   * Adds a MediaStreamTrackListener to the MediaStreamGraph representation of
+   * Adds a MediaTrackListener to the MediaTrackGraph representation of
    * this track.
    */
-  virtual void AddListener(MediaStreamTrackListener* aListener);
+  virtual void AddListener(MediaTrackListener* aListener);
 
   /**
-   * Removes a MediaStreamTrackListener from the MediaStreamGraph representation
+   * Removes a MediaTrackListener from the MediaTrackGraph representation
    * of this track.
    */
-  void RemoveListener(MediaStreamTrackListener* aListener);
+  void RemoveListener(MediaTrackListener* aListener);
 
   /**
    * Attempts to add a direct track listener to this track.
    * Callers must listen to the NotifyInstalled event to know if installing
-   * the listener succeeded (tracks originating from SourceMediaStreams) or
+   * the listener succeeded (tracks originating from SourceMediaTracks) or
    * failed (e.g., WebAudio originated tracks).
    */
-  virtual void AddDirectListener(DirectMediaStreamTrackListener* aListener);
-  void RemoveDirectListener(DirectMediaStreamTrackListener* aListener);
+  virtual void AddDirectListener(DirectMediaTrackListener* aListener);
+  void RemoveDirectListener(DirectMediaTrackListener* aListener);
 
   /**
    * Sets up a MediaInputPort from the underlying track that this
-   * MediaStreamTrack represents, to aStream, and returns it.
+   * MediaStreamTrack represents, to aTrack, and returns it.
    */
   already_AddRefed<MediaInputPort> ForwardTrackContentsTo(
-      ProcessedMediaStream* aStream);
+      ProcessedMediaTrack* aTrack);
 
  protected:
   virtual ~MediaStreamTrack();
 
   /**
    * Forces the ready state to a particular value, for instance when we're
    * cloning an already ended track.
    */
   void SetReadyState(MediaStreamTrackState aState);
 
   /**
-   * Notified by the MediaStreamGraph, through our owning MediaStream on the
+   * Notified by the MediaTrackGraph, through our owning MediaStream on the
    * main thread.
    *
    * Note that this sets the track to ended and raises the "ended" event
    * synchronously.
    */
   void OverrideEnded();
 
   /**
-   * Called by the MSGListener when this track's PrincipalHandle changes on
-   * the MediaStreamGraph thread. When the PrincipalHandle matches the pending
+   * Called by the MTGListener when this track's PrincipalHandle changes on
+   * the MediaTrackGraph thread. When the PrincipalHandle matches the pending
    * principal we know that the principal change has propagated to consumers.
    */
   void NotifyPrincipalHandleChanged(const PrincipalHandle& aNewPrincipalHandle);
 
   /**
    * Called when this track's readyState transitions to "ended".
    * Notifies all MediaStreamTrackConsumers that this track ended.
    */
@@ -594,48 +594,48 @@ class MediaStreamTrack : public DOMEvent
   virtual void Destroy();
 
   /**
    * Sets the principal and notifies PrincipalChangeObservers if it changes.
    */
   void SetPrincipal(nsIPrincipal* aPrincipal);
 
   /**
-   * Creates a new MediaStreamTrack with the same kind, input stream, input
+   * Creates a new MediaStreamTrack with the same kind, input track, input
    * track ID and source as this MediaStreamTrack.
    */
   virtual already_AddRefed<MediaStreamTrack> CloneInternal() = 0;
 
   nsTArray<PrincipalChangeObserver<MediaStreamTrack>*>
       mPrincipalChangeObservers;
 
   nsTArray<WeakPtr<MediaStreamTrackConsumer>> mConsumers;
 
   // We need this to track our parent object.
   nsCOMPtr<nsPIDOMWindowInner> mWindow;
 
-  // The input MediaStream assigned us by the data producer.
+  // The input MediaTrack assigned us by the data producer.
   // Owned by the producer.
-  const RefPtr<MediaStream> mInputStream;
-  // The MediaStream representing this MediaStreamTrack in the MediaStreamGraph.
+  const RefPtr<mozilla::MediaTrack> mInputTrack;
+  // The MediaTrack representing this MediaStreamTrack in the MediaTrackGraph.
   // Set on construction if we're live. Valid until we end. Owned by us.
-  RefPtr<ProcessedMediaStream> mStream;
-  // The MediaInputPort connecting mInputStream to mStream. Set on construction
-  // if mInputStream is non-destroyed and we're live. Valid until we end. Owned
+  RefPtr<ProcessedMediaTrack> mTrack;
+  // The MediaInputPort connecting mInputTrack to mTrack. Set on construction
+  // if mInputTrack is non-destroyed and we're live. Valid until we end. Owned
   // by us.
   RefPtr<MediaInputPort> mPort;
   RefPtr<MediaStreamTrackSource> mSource;
   const UniquePtr<TrackSink> mSink;
   nsCOMPtr<nsIPrincipal> mPrincipal;
   nsCOMPtr<nsIPrincipal> mPendingPrincipal;
-  RefPtr<MSGListener> mMSGListener;
-  // Keep tracking MediaStreamTrackListener and DirectMediaStreamTrackListener,
+  RefPtr<MTGListener> mMTGListener;
+  // Keep tracking MediaTrackListener and DirectMediaTrackListener,
   // so we can remove them in |Destory|.
-  nsTArray<RefPtr<MediaStreamTrackListener>> mTrackListeners;
-  nsTArray<RefPtr<DirectMediaStreamTrackListener>> mDirectTrackListeners;
+  nsTArray<RefPtr<MediaTrackListener>> mTrackListeners;
+  nsTArray<RefPtr<DirectMediaTrackListener>> mDirectTrackListeners;
   nsString mID;
   MediaStreamTrackState mReadyState;
   bool mEnabled;
   bool mMuted;
   dom::MediaTrackConstraints mConstraints;
 };
 
 }  // namespace dom
--- a/dom/media/MediaStreamWindowCapturer.cpp
+++ b/dom/media/MediaStreamWindowCapturer.cpp
@@ -2,27 +2,27 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "MediaStreamWindowCapturer.h"
 
 #include "AudioStreamTrack.h"
 #include "DOMMediaStream.h"
-#include "MediaStreamGraph.h"
+#include "MediaTrackGraph.h"
 
 namespace mozilla {
 using dom::AudioStreamTrack;
 using dom::MediaStreamTrack;
 
 MediaStreamWindowCapturer::CapturedTrack::CapturedTrack(
     MediaStreamTrack* aTrack, uint64_t aWindowID)
     : mTrack(aTrack),
-      mPort(aTrack->Graph()->ConnectToCaptureStream(aWindowID,
-                                                    aTrack->GetStream())) {}
+      mPort(aTrack->Graph()->ConnectToCaptureTrack(aWindowID,
+                                                   aTrack->GetTrack())) {}
 
 MediaStreamWindowCapturer::CapturedTrack::~CapturedTrack() { mPort->Destroy(); }
 
 MediaStreamWindowCapturer::MediaStreamWindowCapturer(DOMMediaStream* aStream,
                                                      uint64_t aWindowId)
     : mStream(aStream), mWindowId(aWindowId) {
   mStream->RegisterTrackListener(this);
   nsTArray<RefPtr<AudioStreamTrack>> tracks;
--- a/dom/media/MediaStreamWindowCapturer.h
+++ b/dom/media/MediaStreamWindowCapturer.h
@@ -9,19 +9,21 @@
 #include "DOMMediaStream.h"
 
 namespace mozilla {
 namespace dom {
 class AudioStreamTrack;
 class MediaStreamTrack;
 }  // namespace dom
 
+class MediaInputPort;
+
 /**
  * Given a DOMMediaStream and a window id, this class will pipe the audio from
- * all live audio tracks in the stream to the MediaStreamGraph's window capture
+ * all live audio tracks in the stream to the MediaTrackGraph's window capture
  * mechanism.
  */
 class MediaStreamWindowCapturer : public DOMMediaStream::TrackListener {
  public:
   MediaStreamWindowCapturer(DOMMediaStream* aStream, uint64_t aWindowId);
   ~MediaStreamWindowCapturer();
 
   void NotifyTrackAdded(const RefPtr<dom::MediaStreamTrack>& aTrack) override;
--- a/dom/media/MediaTrack.cpp
+++ b/dom/media/MediaTrack.cpp
@@ -18,19 +18,19 @@ MediaTrack::MediaTrack(nsIGlobalObject* 
     : DOMEventTargetHelper(aOwnerGlobal),
       mId(aId),
       mKind(aKind),
       mLabel(aLabel),
       mLanguage(aLanguage) {}
 
 MediaTrack::~MediaTrack() {}
 
-NS_IMPL_CYCLE_COLLECTION_INHERITED(MediaTrack, DOMEventTargetHelper, mList)
+NS_IMPL_CYCLE_COLLECTION_INHERITED(dom::MediaTrack, DOMEventTargetHelper, mList)
 
-NS_IMPL_ADDREF_INHERITED(MediaTrack, DOMEventTargetHelper)
-NS_IMPL_RELEASE_INHERITED(MediaTrack, DOMEventTargetHelper)
-NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(MediaTrack)
+NS_IMPL_ADDREF_INHERITED(dom::MediaTrack, DOMEventTargetHelper)
+NS_IMPL_RELEASE_INHERITED(dom::MediaTrack, DOMEventTargetHelper)
+NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(dom::MediaTrack)
 NS_INTERFACE_MAP_END_INHERITING(DOMEventTargetHelper)
 
 void MediaTrack::SetTrackList(MediaTrackList* aList) { mList = aList; }
 
 }  // namespace dom
 }  // namespace mozilla
--- a/dom/media/MediaTrack.h
+++ b/dom/media/MediaTrack.h
@@ -32,17 +32,18 @@ class AudioTrack;
  */
 class MediaTrack : public DOMEventTargetHelper {
  public:
   MediaTrack(nsIGlobalObject* aOwnerGlobal, const nsAString& aId,
              const nsAString& aKind, const nsAString& aLabel,
              const nsAString& aLanguage);
 
   NS_DECL_ISUPPORTS_INHERITED
-  NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(MediaTrack, DOMEventTargetHelper)
+  NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(dom::MediaTrack,
+                                           DOMEventTargetHelper)
 
   enum {
     DEFAULT = 0,
     FIRE_NO_EVENTS = 1 << 0,
   };
   // The default behavior of enabling an audio track or selecting a video track
   // fires a change event and notifies its media resource about the changes.
   // It should not fire any events when fetching media resource.
rename from dom/media/MediaStreamGraph.cpp
rename to dom/media/MediaTrackGraph.cpp
--- a/dom/media/MediaStreamGraph.cpp
+++ b/dom/media/MediaTrackGraph.cpp
@@ -1,32 +1,32 @@
 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-*/
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
-#include "MediaStreamGraphImpl.h"
+#include "MediaTrackGraphImpl.h"
 #include "mozilla/MathAlgorithms.h"
 #include "mozilla/Unused.h"
 
 #include "AudioSegment.h"
 #include "VideoSegment.h"
 #include "nsContentUtils.h"
 #include "nsIObserver.h"
 #include "nsPrintfCString.h"
 #include "nsServiceManagerUtils.h"
 #include "prerror.h"
 #include "mozilla/Logging.h"
 #include "mozilla/Attributes.h"
-#include "TrackUnionStream.h"
+#include "ForwardedInputTrack.h"
 #include "ImageContainer.h"
-#include "AudioCaptureStream.h"
-#include "AudioNodeStream.h"
-#include "AudioNodeExternalInputStream.h"
-#include "MediaStreamListener.h"
+#include "AudioCaptureTrack.h"
+#include "AudioNodeTrack.h"
+#include "AudioNodeExternalInputTrack.h"
+#include "MediaTrackListener.h"
 #include "mozilla/dom/BaseAudioContextBinding.h"
 #include "mozilla/media/MediaUtils.h"
 #include <algorithm>
 #include "GeckoProfiler.h"
 #include "VideoFrameContainer.h"
 #include "mozilla/AbstractThread.h"
 #include "mozilla/StaticPrefs_dom.h"
 #include "mozilla/Unused.h"
@@ -38,267 +38,265 @@
 #include "webaudio/blink/DenormalDisabler.h"
 #include "webaudio/blink/HRTFDatabaseLoader.h"
 
 using namespace mozilla::layers;
 using namespace mozilla::dom;
 using namespace mozilla::gfx;
 using namespace mozilla::media;
 
-mozilla::AsyncLogger gMSGTraceLogger("MSGTracing");
+mozilla::AsyncLogger gMTGTraceLogger("MTGTracing");
 
 namespace mozilla {
 
-LazyLogModule gMediaStreamGraphLog("MediaStreamGraph");
+LazyLogModule gMediaTrackGraphLog("MediaTrackGraph");
 #ifdef LOG
 #  undef LOG
 #endif  // LOG
-#define LOG(type, msg) MOZ_LOG(gMediaStreamGraphLog, type, msg)
+#define LOG(type, msg) MOZ_LOG(gMediaTrackGraphLog, type, msg)
 
 /**
  * A hash table containing the graph instances, one per document.
  *
  * The key is a hash of nsPIDOMWindowInner, see `WindowToHash`.
  */
-static nsDataHashtable<nsUint32HashKey, MediaStreamGraphImpl*> gGraphs;
-
-MediaStreamGraphImpl::~MediaStreamGraphImpl() {
-  MOZ_ASSERT(mStreams.IsEmpty() && mSuspendedStreams.IsEmpty(),
-             "All streams should have been destroyed by messages from the main "
+static nsDataHashtable<nsUint32HashKey, MediaTrackGraphImpl*> gGraphs;
+
+MediaTrackGraphImpl::~MediaTrackGraphImpl() {
+  MOZ_ASSERT(mTracks.IsEmpty() && mSuspendedTracks.IsEmpty(),
+             "All tracks should have been destroyed by messages from the main "
              "thread");
-  LOG(LogLevel::Debug, ("MediaStreamGraph %p destroyed", this));
-  LOG(LogLevel::Debug, ("MediaStreamGraphImpl::~MediaStreamGraphImpl"));
+  LOG(LogLevel::Debug, ("MediaTrackGraph %p destroyed", this));
+  LOG(LogLevel::Debug, ("MediaTrackGraphImpl::~MediaTrackGraphImpl"));
 
 #ifdef TRACING
-  gMSGTraceLogger.Stop();
+  gMTGTraceLogger.Stop();
 #endif
 }
 
-void MediaStreamGraphImpl::AddStreamGraphThread(MediaStream* aStream) {
+void MediaTrackGraphImpl::AddTrackGraphThread(MediaTrack* aTrack) {
   MOZ_ASSERT(OnGraphThreadOrNotRunning());
-  aStream->mStartTime = mProcessedTime;
-
-  if (aStream->IsSuspended()) {
-    mSuspendedStreams.AppendElement(aStream);
+  aTrack->mStartTime = mProcessedTime;
+
+  if (aTrack->IsSuspended()) {
+    mSuspendedTracks.AppendElement(aTrack);
     LOG(LogLevel::Debug,
-        ("%p: Adding media stream %p, in the suspended stream array", this,
-         aStream));
+        ("%p: Adding media track %p, in the suspended track array", this,
+         aTrack));
   } else {
-    mStreams.AppendElement(aStream);
-    LOG(LogLevel::Debug, ("%p:  Adding media stream %p, count %zu", this,
-                          aStream, mStreams.Length()));
+    mTracks.AppendElement(aTrack);
+    LOG(LogLevel::Debug, ("%p:  Adding media track %p, count %zu", this, aTrack,
+                          mTracks.Length()));
   }
 
-  SetStreamOrderDirty();
+  SetTrackOrderDirty();
 }
 
-void MediaStreamGraphImpl::RemoveStreamGraphThread(MediaStream* aStream) {
+void MediaTrackGraphImpl::RemoveTrackGraphThread(MediaTrack* aTrack) {
   MOZ_ASSERT(OnGraphThreadOrNotRunning());
-  // Remove references in mStreamUpdates before we allow aStream to die.
+  // Remove references in mTrackUpdates before we allow aTrack to die.
   // Pending updates are not needed (since the main thread has already given
-  // up the stream) so we will just drop them.
+  // up the track) so we will just drop them.
   {
     MonitorAutoLock lock(mMonitor);
-    for (uint32_t i = 0; i < mStreamUpdates.Length(); ++i) {
-      if (mStreamUpdates[i].mStream == aStream) {
-        mStreamUpdates[i].mStream = nullptr;
+    for (uint32_t i = 0; i < mTrackUpdates.Length(); ++i) {
+      if (mTrackUpdates[i].mTrack == aTrack) {
+        mTrackUpdates[i].mTrack = nullptr;
       }
     }
   }
 
   // Ensure that mFirstCycleBreaker and mMixer are updated when necessary.
-  SetStreamOrderDirty();
-
-  if (aStream->IsSuspended()) {
-    mSuspendedStreams.RemoveElement(aStream);
+  SetTrackOrderDirty();
+
+  if (aTrack->IsSuspended()) {
+    mSuspendedTracks.RemoveElement(aTrack);
   } else {
-    mStreams.RemoveElement(aStream);
+    mTracks.RemoveElement(aTrack);
   }
 
-  LOG(LogLevel::Debug, ("%p: Removed media stream %p, count %zu", this, aStream,
-                        mStreams.Length()));
-
-  NS_RELEASE(aStream);  // probably destroying it
+  LOG(LogLevel::Debug, ("%p: Removed media track %p, count %zu", this, aTrack,
+                        mTracks.Length()));
+
+  NS_RELEASE(aTrack);  // probably destroying it
 }
 
-StreamTime MediaStreamGraphImpl::GraphTimeToStreamTimeWithBlocking(
-    const MediaStream* aStream, GraphTime aTime) const {
+TrackTime MediaTrackGraphImpl::GraphTimeToTrackTimeWithBlocking(
+    const MediaTrack* aTrack, GraphTime aTime) const {
   MOZ_ASSERT(
       aTime <= mStateComputedTime,
       "Don't ask about times where we haven't made blocking decisions yet");
-  return std::max<StreamTime>(
-      0, std::min(aTime, aStream->mStartBlocking) - aStream->mStartTime);
+  return std::max<TrackTime>(
+      0, std::min(aTime, aTrack->mStartBlocking) - aTrack->mStartTime);
 }
 
-GraphTime MediaStreamGraphImpl::IterationEnd() const {
+GraphTime MediaTrackGraphImpl::IterationEnd() const {
   MOZ_ASSERT(OnGraphThreadOrNotRunning());
   return CurrentDriver()->IterationEnd();
 }
 
-void MediaStreamGraphImpl::UpdateCurrentTimeForStreams(
+void MediaTrackGraphImpl::UpdateCurrentTimeForTracks(
     GraphTime aPrevCurrentTime) {
   MOZ_ASSERT(OnGraphThread());
-  for (MediaStream* stream : AllStreams()) {
+  for (MediaTrack* track : AllTracks()) {
     // Shouldn't have already notified of ended *and* have output!
-    MOZ_ASSERT_IF(stream->mStartBlocking > aPrevCurrentTime,
-                  !stream->mNotifiedEnded);
+    MOZ_ASSERT_IF(track->mStartBlocking > aPrevCurrentTime,
+                  !track->mNotifiedEnded);
 
     // Calculate blocked time and fire Blocked/Unblocked events
-    GraphTime blockedTime = mStateComputedTime - stream->mStartBlocking;
+    GraphTime blockedTime = mStateComputedTime - track->mStartBlocking;
     NS_ASSERTION(blockedTime >= 0, "Error in blocking time");
-    stream->AdvanceTimeVaryingValuesToCurrentTime(mStateComputedTime,
-                                                  blockedTime);
+    track->AdvanceTimeVaryingValuesToCurrentTime(mStateComputedTime,
+                                                 blockedTime);
     LOG(LogLevel::Verbose,
-        ("%p: MediaStream %p bufferStartTime=%f blockedTime=%f", this, stream,
-         MediaTimeToSeconds(stream->mStartTime),
+        ("%p: MediaTrack %p bufferStartTime=%f blockedTime=%f", this, track,
+         MediaTimeToSeconds(track->mStartTime),
          MediaTimeToSeconds(blockedTime)));
-    stream->mStartBlocking = mStateComputedTime;
-
-    StreamTime streamCurrentTime =
-        stream->GraphTimeToStreamTime(mStateComputedTime);
-    if (stream->mEnded) {
-      MOZ_ASSERT(stream->GetEnd() <= streamCurrentTime);
-      if (!stream->mNotifiedEnded) {
+    track->mStartBlocking = mStateComputedTime;
+
+    TrackTime trackCurrentTime =
+        track->GraphTimeToTrackTime(mStateComputedTime);
+    if (track->mEnded) {
+      MOZ_ASSERT(track->GetEnd() <= trackCurrentTime);
+      if (!track->mNotifiedEnded) {
         // Playout of this track ended and listeners have not been notified.
-        stream->mNotifiedEnded = true;
-        SetStreamOrderDirty();
-        for (const auto& listener : stream->mTrackListeners) {
-          listener->NotifyOutput(this, stream->GetEnd());
+        track->mNotifiedEnded = true;
+        SetTrackOrderDirty();
+        for (const auto& listener : track->mTrackListeners) {
+          listener->NotifyOutput(this, track->GetEnd());
           listener->NotifyEnded(this);
         }
       }
     } else {
-      for (const auto& listener : stream->mTrackListeners) {
-        listener->NotifyOutput(this, streamCurrentTime);
+      for (const auto& listener : track->mTrackListeners) {
+        listener->NotifyOutput(this, trackCurrentTime);
       }
     }
   }
 }
 
 template <typename C, typename Chunk>
-void MediaStreamGraphImpl::ProcessChunkMetadataForInterval(MediaStream* aStream,
-                                                           C& aSegment,
-                                                           StreamTime aStart,
-                                                           StreamTime aEnd) {
+void MediaTrackGraphImpl::ProcessChunkMetadataForInterval(MediaTrack* aTrack,
+                                                          C& aSegment,
+                                                          TrackTime aStart,
+                                                          TrackTime aEnd) {
   MOZ_ASSERT(OnGraphThreadOrNotRunning());
-  MOZ_ASSERT(aStream);
-
-  StreamTime offset = 0;
+  MOZ_ASSERT(aTrack);
+
+  TrackTime offset = 0;
   for (typename C::ConstChunkIterator chunk(aSegment); !chunk.IsEnded();
        chunk.Next()) {
     if (offset >= aEnd) {
       break;
     }
     offset += chunk->GetDuration();
     if (chunk->IsNull() || offset < aStart) {
       continue;
     }
     const PrincipalHandle& principalHandle = chunk->GetPrincipalHandle();
     if (principalHandle != aSegment.GetLastPrincipalHandle()) {
       aSegment.SetLastPrincipalHandle(principalHandle);
       LOG(LogLevel::Debug,
-          ("%p: MediaStream %p, principalHandle "
+          ("%p: MediaTrack %p, principalHandle "
            "changed in %sChunk with duration %lld",
-           this, aStream,
+           this, aTrack,
            aSegment.GetType() == MediaSegment::AUDIO ? "Audio" : "Video",
            (long long)chunk->GetDuration()));
-      for (const auto& listener : aStream->mTrackListeners) {
+      for (const auto& listener : aTrack->mTrackListeners) {
         listener->NotifyPrincipalHandleChanged(this, principalHandle);
       }
     }
   }
 }
 
-void MediaStreamGraphImpl::ProcessChunkMetadata(GraphTime aPrevCurrentTime) {
+void MediaTrackGraphImpl::ProcessChunkMetadata(GraphTime aPrevCurrentTime) {
   MOZ_ASSERT(OnGraphThreadOrNotRunning());
-  for (MediaStream* stream : AllStreams()) {
-    StreamTime iterationStart = stream->GraphTimeToStreamTime(aPrevCurrentTime);
-    StreamTime iterationEnd = stream->GraphTimeToStreamTime(mProcessedTime);
-    if (!stream->mSegment) {
+  for (MediaTrack* track : AllTracks()) {
+    TrackTime iterationStart = track->GraphTimeToTrackTime(aPrevCurrentTime);
+    TrackTime iterationEnd = track->GraphTimeToTrackTime(mProcessedTime);
+    if (!track->mSegment) {
       continue;
     }
-    if (stream->mType == MediaSegment::AUDIO) {
+    if (track->mType == MediaSegment::AUDIO) {
       ProcessChunkMetadataForInterval<AudioSegment, AudioChunk>(
-          stream, *stream->GetData<AudioSegment>(), iterationStart,
-          iterationEnd);
-    } else if (stream->mType == MediaSegment::VIDEO) {
+          track, *track->GetData<AudioSegment>(), iterationStart, iterationEnd);
+    } else if (track->mType == MediaSegment::VIDEO) {
       ProcessChunkMetadataForInterval<VideoSegment, VideoChunk>(
-          stream, *stream->GetData<VideoSegment>(), iterationStart,
-          iterationEnd);
+          track, *track->GetData<VideoSegment>(), iterationStart, iterationEnd);
     } else {
       MOZ_CRASH("Unknown track type");
     }
   }
 }
 
-GraphTime MediaStreamGraphImpl::WillUnderrun(MediaStream* aStream,
-                                             GraphTime aEndBlockingDecisions) {
-  // Ended tracks can't underrun. ProcessedMediaStreams also can't cause
+GraphTime MediaTrackGraphImpl::WillUnderrun(MediaTrack* aTrack,
+                                            GraphTime aEndBlockingDecisions) {
+  // Ended tracks can't underrun. ProcessedMediaTracks also can't cause
   // underrun currently, since we'll always be able to produce data for them
-  // unless they block on some other stream.
-  if (aStream->mEnded || aStream->AsProcessedStream()) {
+  // unless they block on some other track.
+  if (aTrack->mEnded || aTrack->AsProcessedTrack()) {
     return aEndBlockingDecisions;
   }
-  // This stream isn't ended or suspended. We don't need to call
-  // StreamTimeToGraphTime since an underrun is the only thing that can block
+  // This track isn't ended or suspended. We don't need to call
+  // TrackTimeToGraphTime since an underrun is the only thing that can block
   // it.
-  GraphTime bufferEnd = aStream->GetEnd() + aStream->mStartTime;
+  GraphTime bufferEnd = aTrack->GetEnd() + aTrack->mStartTime;
 #ifdef DEBUG
   if (bufferEnd < mProcessedTime) {
-    LOG(LogLevel::Error, ("%p: MediaStream %p underrun, "
+    LOG(LogLevel::Error, ("%p: MediaTrack %p underrun, "
                           "bufferEnd %f < mProcessedTime %f (%" PRId64
-                          " < %" PRId64 "), Streamtime %" PRId64,
-                          this, aStream, MediaTimeToSeconds(bufferEnd),
+                          " < %" PRId64 "), TrackTime %" PRId64,
+                          this, aTrack, MediaTimeToSeconds(bufferEnd),
                           MediaTimeToSeconds(mProcessedTime), bufferEnd,
-                          mProcessedTime, aStream->GetEnd()));
+                          mProcessedTime, aTrack->GetEnd()));
     NS_ASSERTION(bufferEnd >= mProcessedTime, "Buffer underran");
   }
 #endif
   return std::min(bufferEnd, aEndBlockingDecisions);
 }
 
 namespace {
-// Value of mCycleMarker for unvisited streams in cycle detection.
+// Value of mCycleMarker for unvisited tracks in cycle detection.
 const uint32_t NOT_VISITED = UINT32_MAX;
-// Value of mCycleMarker for ordered streams in muted cycles.
+// Value of mCycleMarker for ordered tracks in muted cycles.
 const uint32_t IN_MUTED_CYCLE = 1;
 }  // namespace
 
-bool MediaStreamGraphImpl::AudioTrackPresent() {
+bool MediaTrackGraphImpl::AudioTrackPresent() {
   MOZ_ASSERT(OnGraphThreadOrNotRunning());
 
   bool audioTrackPresent = false;
-  for (MediaStream* stream : mStreams) {
-    if (stream->AsAudioNodeStream()) {
+  for (MediaTrack* track : mTracks) {
+    if (track->AsAudioNodeTrack()) {
       audioTrackPresent = true;
       break;
     }
 
-    if (stream->mType == MediaSegment::AUDIO && !stream->mNotifiedEnded) {
+    if (track->mType == MediaSegment::AUDIO && !track->mNotifiedEnded) {
       audioTrackPresent = true;
       break;
     }
   }
 
   // XXX For some reason, there are race conditions when starting an audio input
   // where we find no active audio tracks.  In any case, if we have an active
   // audio input we should not allow a switch back to a SystemClockDriver
   if (!audioTrackPresent && mInputDeviceUsers.Count() != 0) {
     NS_WARNING("No audio tracks, but full-duplex audio is enabled!!!!!");
     audioTrackPresent = true;
   }
 
   return audioTrackPresent;
 }
 
-void MediaStreamGraphImpl::UpdateStreamOrder() {
+void MediaTrackGraphImpl::UpdateTrackOrder() {
   MOZ_ASSERT(OnGraphThread());
   bool audioTrackPresent = AudioTrackPresent();
 
-  // Note that this looks for any audio streams, input or output, and switches
+  // Note that this looks for any audio tracks, input or output, and switches
   // to a SystemClockDriver if there are none.  However, if another is already
   // pending, let that switch happen.
 
   if (!audioTrackPresent && mRealtime &&
       CurrentDriver()->AsAudioCallbackDriver()) {
     MonitorAutoLock mon(mMonitor);
     if (CurrentDriver()->AsAudioCallbackDriver()->IsStarted() &&
         !(CurrentDriver()->Switching())) {
@@ -320,223 +318,223 @@ void MediaStreamGraphImpl::UpdateStreamO
     MonitorAutoLock mon(mMonitor);
     if (LifecycleStateRef() == LIFECYCLE_RUNNING) {
       AudioCallbackDriver* driver = new AudioCallbackDriver(
           this, AudioInputChannelCount(), AudioInputDevicePreference());
       CurrentDriver()->SwitchAtNextIteration(driver);
     }
   }
 
-  if (!mStreamOrderDirty) {
+  if (!mTrackOrderDirty) {
     return;
   }
 
-  mStreamOrderDirty = false;
+  mTrackOrderDirty = false;
 
   // The algorithm for finding cycles is based on Tim Leslie's iterative
   // implementation [1][2] of Pearce's variant [3] of Tarjan's strongly
   // connected components (SCC) algorithm.  There are variations (a) to
-  // distinguish whether streams in SCCs of size 1 are in a cycle and (b) to
+  // distinguish whether tracks in SCCs of size 1 are in a cycle and (b) to
   // re-run the algorithm over SCCs with breaks at DelayNodes.
   //
   // [1] http://www.timl.id.au/?p=327
   // [2]
   // https://github.com/scipy/scipy/blob/e2c502fca/scipy/sparse/csgraph/_traversal.pyx#L582
   // [3] http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.102.1707
   //
   // There are two stacks.  One for the depth-first search (DFS),
-  mozilla::LinkedList<MediaStream> dfsStack;
-  // and another for streams popped from the DFS stack, but still being
-  // considered as part of SCCs involving streams on the stack.
-  mozilla::LinkedList<MediaStream> sccStack;
-
-  // An index into mStreams for the next stream found with no unsatisfied
+  mozilla::LinkedList<MediaTrack> dfsStack;
+  // and another for tracks popped from the DFS stack, but still being
+  // considered as part of SCCs involving tracks on the stack.
+  mozilla::LinkedList<MediaTrack> sccStack;
+
+  // An index into mTracks for the next track found with no unsatisfied
   // upstream dependencies.
-  uint32_t orderedStreamCount = 0;
-
-  for (uint32_t i = 0; i < mStreams.Length(); ++i) {
-    MediaStream* s = mStreams[i];
-    ProcessedMediaStream* ps = s->AsProcessedStream();
-    if (ps) {
-      // The dfsStack initially contains a list of all processed streams in
+  uint32_t orderedTrackCount = 0;
+
+  for (uint32_t i = 0; i < mTracks.Length(); ++i) {
+    MediaTrack* t = mTracks[i];
+    ProcessedMediaTrack* pt = t->AsProcessedTrack();
+    if (pt) {
+      // The dfsStack initially contains a list of all processed tracks in
       // unchanged order.
-      dfsStack.insertBack(s);
-      ps->mCycleMarker = NOT_VISITED;
+      dfsStack.insertBack(t);
+      pt->mCycleMarker = NOT_VISITED;
     } else {
-      // SourceMediaStreams have no inputs and so can be ordered now.
-      mStreams[orderedStreamCount] = s;
-      ++orderedStreamCount;
+      // SourceMediaTracks have no inputs and so can be ordered now.
+      mTracks[orderedTrackCount] = t;
+      ++orderedTrackCount;
     }
   }
 
   // mNextStackMarker corresponds to "index" in Tarjan's algorithm.  It is a
-  // counter to label mCycleMarker on the next visited stream in the DFS
-  // uniquely in the set of visited streams that are still being considered.
+  // counter to label mCycleMarker on the next visited track in the DFS
+  // uniquely in the set of visited tracks that are still being considered.
   //
   // In this implementation, the counter descends so that the values are
-  // strictly greater than the values that mCycleMarker takes when the stream
+  // strictly greater than the values that mCycleMarker takes when the track
   // has been ordered (0 or IN_MUTED_CYCLE).
   //
-  // Each new stream labelled, as the DFS searches upstream, receives a value
-  // less than those used for all other streams being considered.
+  // Each new track labelled, as the DFS searches upstream, receives a value
+  // less than those used for all other tracks being considered.
   uint32_t nextStackMarker = NOT_VISITED - 1;
-  // Reset list of DelayNodes in cycles stored at the tail of mStreams.
-  mFirstCycleBreaker = mStreams.Length();
-
-  // Rearrange dfsStack order as required to DFS upstream and pop streams
-  // in processing order to place in mStreams.
-  while (auto ps = static_cast<ProcessedMediaStream*>(dfsStack.getFirst())) {
-    const auto& inputs = ps->mInputs;
-    MOZ_ASSERT(ps->AsProcessedStream());
-    if (ps->mCycleMarker == NOT_VISITED) {
+  // Reset list of DelayNodes in cycles stored at the tail of mTracks.
+  mFirstCycleBreaker = mTracks.Length();
+
+  // Rearrange dfsStack order as required to DFS upstream and pop tracks
+  // in processing order to place in mTracks.
+  while (auto pt = static_cast<ProcessedMediaTrack*>(dfsStack.getFirst())) {
+    const auto& inputs = pt->mInputs;
+    MOZ_ASSERT(pt->AsProcessedTrack());
+    if (pt->mCycleMarker == NOT_VISITED) {
       // Record the position on the visited stack, so that any searches
-      // finding this stream again know how much of the stack is in the cycle.
-      ps->mCycleMarker = nextStackMarker;
+      // finding this track again know how much of the stack is in the cycle.
+      pt->mCycleMarker = nextStackMarker;
       --nextStackMarker;
-      // Not-visited input streams should be processed first.
-      // SourceMediaStreams have already been ordered.
+      // Not-visited input tracks should be processed first.
+      // SourceMediaTracks have already been ordered.
       for (uint32_t i = inputs.Length(); i--;) {
         if (inputs[i]->mSource->IsSuspended()) {
           continue;
         }
-        auto input = inputs[i]->mSource->AsProcessedStream();
+        auto input = inputs[i]->mSource->AsProcessedTrack();
         if (input && input->mCycleMarker == NOT_VISITED) {
-          // It can be that this stream has an input which is from a suspended
+          // It can be that this track has an input which is from a suspended
           // AudioContext.
           if (input->isInList()) {
             input->remove();
             dfsStack.insertFront(input);
           }
         }
       }
       continue;
     }
 
     // Returning from DFS.  Pop from dfsStack.
-    ps->remove();
+    pt->remove();
 
     // cycleStackMarker keeps track of the highest marker value on any
-    // upstream stream, if any, found receiving input, directly or indirectly,
+    // upstream track, if any, found receiving input, directly or indirectly,
     // from the visited stack (and so from |ps|, making a cycle).  In a
     // variation from Tarjan's SCC algorithm, this does not include |ps|
     // unless it is part of the cycle.
     uint32_t cycleStackMarker = 0;
     for (uint32_t i = inputs.Length(); i--;) {
       if (inputs[i]->mSource->IsSuspended()) {
         continue;
       }
-      auto input = inputs[i]->mSource->AsProcessedStream();
+      auto input = inputs[i]->mSource->AsProcessedTrack();
       if (input) {
         cycleStackMarker = std::max(cycleStackMarker, input->mCycleMarker);
       }
     }
 
     if (cycleStackMarker <= IN_MUTED_CYCLE) {
       // All inputs have been ordered and their stack markers have been removed.
-      // This stream is not part of a cycle.  It can be processed next.
-      ps->mCycleMarker = 0;
-      mStreams[orderedStreamCount] = ps;
-      ++orderedStreamCount;
+      // This track is not part of a cycle.  It can be processed next.
+      pt->mCycleMarker = 0;
+      mTracks[orderedTrackCount] = pt;
+      ++orderedTrackCount;
       continue;
     }
 
-    // A cycle has been found.  Record this stream for ordering when all
-    // streams in this SCC have been popped from the DFS stack.
-    sccStack.insertFront(ps);
-
-    if (cycleStackMarker > ps->mCycleMarker) {
-      // Cycles have been found that involve streams that remain on the stack.
-      // Leave mCycleMarker indicating the most downstream (last) stream on
+    // A cycle has been found.  Record this track for ordering when all
+    // tracks in this SCC have been popped from the DFS stack.
+    sccStack.insertFront(pt);
+
+    if (cycleStackMarker > pt->mCycleMarker) {
+      // Cycles have been found that involve tracks that remain on the stack.
+      // Leave mCycleMarker indicating the most downstream (last) track on
       // the stack known to be part of this SCC.  In this way, any searches on
       // other paths that find |ps| will know (without having to traverse from
-      // this stream again) that they are part of this SCC (i.e. part of an
+      // this track again) that they are part of this SCC (i.e. part of an
       // intersecting cycle).
-      ps->mCycleMarker = cycleStackMarker;
+      pt->mCycleMarker = cycleStackMarker;
       continue;
     }
 
-    // |ps| is the root of an SCC involving no other streams on dfsStack, the
-    // complete SCC has been recorded, and streams in this SCC are part of at
+    // |pit| is the root of an SCC involving no other tracks on dfsStack, the
+    // complete SCC has been recorded, and tracks in this SCC are part of at
     // least one cycle.
-    MOZ_ASSERT(cycleStackMarker == ps->mCycleMarker);
+    MOZ_ASSERT(cycleStackMarker == pt->mCycleMarker);
     // If there are DelayNodes in this SCC, then they may break the cycles.
     bool haveDelayNode = false;
     auto next = sccStack.getFirst();
-    // Streams in this SCC are identified by mCycleMarker <= cycleStackMarker.
-    // (There may be other streams later in sccStack from other incompletely
-    // searched SCCs, involving streams still on dfsStack.)
+    // Tracks in this SCC are identified by mCycleMarker <= cycleStackMarker.
+    // (There may be other tracks later in sccStack from other incompletely
+    // searched SCCs, involving tracks still on dfsStack.)
     //
     // DelayNodes in cycles must behave differently from those not in cycles,
     // so all DelayNodes in the SCC must be identified.
-    while (next && static_cast<ProcessedMediaStream*>(next)->mCycleMarker <=
+    while (next && static_cast<ProcessedMediaTrack*>(next)->mCycleMarker <=
                        cycleStackMarker) {
-      auto ns = next->AsAudioNodeStream();
+      auto nt = next->AsAudioNodeTrack();
       // Get next before perhaps removing from list below.
       next = next->getNext();
-      if (ns && ns->Engine()->AsDelayNodeEngine()) {
+      if (nt && nt->Engine()->AsDelayNodeEngine()) {
         haveDelayNode = true;
         // DelayNodes break cycles by producing their output in a
         // preprocessing phase; they do not need to be ordered before their
-        // consumers.  Order them at the tail of mStreams so that they can be
+        // consumers.  Order them at the tail of mTracks so that they can be
         // handled specially.  Do so now, so that DFS ignores them.
-        ns->remove();
-        ns->mCycleMarker = 0;
+        nt->remove();
+        nt->mCycleMarker = 0;
         --mFirstCycleBreaker;
-        mStreams[mFirstCycleBreaker] = ns;
+        mTracks[mFirstCycleBreaker] = nt;
       }
     }
     auto after_scc = next;
     while ((next = sccStack.getFirst()) != after_scc) {
       next->remove();
-      auto removed = static_cast<ProcessedMediaStream*>(next);
+      auto removed = static_cast<ProcessedMediaTrack*>(next);
       if (haveDelayNode) {
-        // Return streams to the DFS stack again (to order and detect cycles
-        // without delayNodes).  Any of these streams that are still inputs
-        // for streams on the visited stack must be returned to the front of
+        // Return tracks to the DFS stack again (to order and detect cycles
+        // without delayNodes).  Any of these tracks that are still inputs
+        // for tracks on the visited stack must be returned to the front of
         // the stack to be ordered before their dependents.  We know that none
-        // of these streams need input from streams on the visited stack, so
+        // of these tracks need input from tracks on the visited stack, so
         // they can all be searched and ordered before the current stack head
         // is popped.
         removed->mCycleMarker = NOT_VISITED;
         dfsStack.insertFront(removed);
       } else {
-        // Streams in cycles without any DelayNodes must be muted, and so do
+        // Tracks in cycles without any DelayNodes must be muted, and so do
         // not need input and can be ordered now.  They must be ordered before
         // their consumers so that their muted output is available.
         removed->mCycleMarker = IN_MUTED_CYCLE;
-        mStreams[orderedStreamCount] = removed;
-        ++orderedStreamCount;
+        mTracks[orderedTrackCount] = removed;
+        ++orderedTrackCount;
       }
     }
   }
 
-  MOZ_ASSERT(orderedStreamCount == mFirstCycleBreaker);
+  MOZ_ASSERT(orderedTrackCount == mFirstCycleBreaker);
 }
 
-void MediaStreamGraphImpl::CreateOrDestroyAudioStreams(MediaStream* aStream) {
+void MediaTrackGraphImpl::CreateOrDestroyAudioTracks(MediaTrack* aTrack) {
   MOZ_ASSERT(OnGraphThread());
   MOZ_ASSERT(mRealtime,
-             "Should only attempt to create audio streams in real-time mode");
-
-  if (aStream->mAudioOutputs.IsEmpty()) {
-    aStream->mAudioOutputStream = nullptr;
+             "Should only attempt to create audio tracks in real-time mode");
+
+  if (aTrack->mAudioOutputs.IsEmpty()) {
+    aTrack->mAudioOutputStream = nullptr;
     return;
   }
 
-  if (aStream->mAudioOutputStream) {
+  if (aTrack->mAudioOutputStream) {
     return;
   }
 
   LOG(LogLevel::Debug,
-      ("%p: Updating AudioOutputStream for MediaStream %p", this, aStream));
-
-  aStream->mAudioOutputStream = MakeUnique<MediaStream::AudioOutputStream>();
-  aStream->mAudioOutputStream->mAudioPlaybackStartTime = mProcessedTime;
-  aStream->mAudioOutputStream->mBlockedAudioTime = 0;
-  aStream->mAudioOutputStream->mLastTickWritten = 0;
+      ("%p: Updating AudioOutputStream for MediaTrack %p", this, aTrack));
+
+  aTrack->mAudioOutputStream = MakeUnique<MediaTrack::AudioOutputStream>();
+  aTrack->mAudioOutputStream->mAudioPlaybackStartTime = mProcessedTime;
+  aTrack->mAudioOutputStream->mBlockedAudioTime = 0;
+  aTrack->mAudioOutputStream->mLastTickWritten = 0;
 
   bool switching = false;
   {
     MonitorAutoLock lock(mMonitor);
     switching = CurrentDriver()->Switching();
   }
 
   if (!CurrentDriver()->AsAudioCallbackDriver() && !switching) {
@@ -544,114 +542,114 @@ void MediaStreamGraphImpl::CreateOrDestr
     if (LifecycleStateRef() == LIFECYCLE_RUNNING) {
       AudioCallbackDriver* driver = new AudioCallbackDriver(
           this, AudioInputChannelCount(), AudioInputDevicePreference());
       CurrentDriver()->SwitchAtNextIteration(driver);
     }
   }
 }
 
-StreamTime MediaStreamGraphImpl::PlayAudio(MediaStream* aStream) {
+TrackTime MediaTrackGraphImpl::PlayAudio(MediaTrack* aTrack) {
   MOZ_ASSERT(OnGraphThread());
   MOZ_ASSERT(mRealtime, "Should only attempt to play audio in realtime mode");
 
   float volume = 0.0f;
-  for (uint32_t i = 0; i < aStream->mAudioOutputs.Length(); ++i) {
-    volume += aStream->mAudioOutputs[i].mVolume * mGlobalVolume;
+  for (uint32_t i = 0; i < aTrack->mAudioOutputs.Length(); ++i) {
+    volume += aTrack->mAudioOutputs[i].mVolume * mGlobalVolume;
   }
 
-  StreamTime ticksWritten = 0;
-
-  if (aStream->mAudioOutputStream) {
+  TrackTime ticksWritten = 0;
+
+  if (aTrack->mAudioOutputStream) {
     ticksWritten = 0;
 
-    MediaStream::AudioOutputStream& audioOutput = *aStream->mAudioOutputStream;
-    AudioSegment* audio = aStream->GetData<AudioSegment>();
+    MediaTrack::AudioOutputStream& audioOutput = *aTrack->mAudioOutputStream;
+    AudioSegment* audio = aTrack->GetData<AudioSegment>();
     AudioSegment output;
 
-    StreamTime offset = aStream->GraphTimeToStreamTime(mProcessedTime);
-
-    // We don't update aStream->mTracksStartTime here to account for time spent
-    // blocked. Instead, we'll update it in UpdateCurrentTimeForStreams after
+    TrackTime offset = aTrack->GraphTimeToTrackTime(mProcessedTime);
+
+    // We don't update aTrack->mTracksStartTime here to account for time spent
+    // blocked. Instead, we'll update it in UpdateCurrentTimeForTracks after
     // the blocked period has completed. But we do need to make sure we play
-    // from the right offsets in the stream buffer, even if we've already
+    // from the right offsets in the track buffer, even if we've already
     // written silence for some amount of blocked time after the current time.
     GraphTime t = mProcessedTime;
     while (t < mStateComputedTime) {
-      bool blocked = t >= aStream->mStartBlocking;
-      GraphTime end = blocked ? mStateComputedTime : aStream->mStartBlocking;
+      bool blocked = t >= aTrack->mStartBlocking;
+      GraphTime end = blocked ? mStateComputedTime : aTrack->mStartBlocking;
       NS_ASSERTION(end <= mStateComputedTime, "mStartBlocking is wrong!");
 
       // Check how many ticks of sound we can provide if we are blocked some
       // time in the middle of this cycle.
-      StreamTime toWrite = end - t;
+      TrackTime toWrite = end - t;
 
       if (blocked) {
         output.InsertNullDataAtStart(toWrite);
         ticksWritten += toWrite;
         LOG(LogLevel::Verbose,
-            ("%p: MediaStream %p writing %" PRId64
+            ("%p: MediaTrack %p writing %" PRId64
              " blocking-silence samples for "
              "%f to %f (%" PRId64 " to %" PRId64 ")",
-             this, aStream, toWrite, MediaTimeToSeconds(t),
+             this, aTrack, toWrite, MediaTimeToSeconds(t),
              MediaTimeToSeconds(end), offset, offset + toWrite));
       } else {
-        StreamTime endTicksNeeded = offset + toWrite;
-        StreamTime endTicksAvailable = audio->GetDuration();
+        TrackTime endTicksNeeded = offset + toWrite;
+        TrackTime endTicksAvailable = audio->GetDuration();
 
         if (endTicksNeeded <= endTicksAvailable) {
           LOG(LogLevel::Verbose,
-              ("%p: MediaStream %p writing %" PRId64 " samples for %f to %f "
+              ("%p: MediaTrack %p writing %" PRId64 " samples for %f to %f "
                "(samples %" PRId64 " to %" PRId64 ")",
-               this, aStream, toWrite, MediaTimeToSeconds(t),
+               this, aTrack, toWrite, MediaTimeToSeconds(t),
                MediaTimeToSeconds(end), offset, endTicksNeeded));
           output.AppendSlice(*audio, offset, endTicksNeeded);
           ticksWritten += toWrite;
           offset = endTicksNeeded;
         } else {
           // MOZ_ASSERT(track->IsEnded(), "Not enough data, and track not
           // ended."); If we are at the end of the track, maybe write the
           // remaining samples, and pad with/output silence.
           if (endTicksNeeded > endTicksAvailable &&
               offset < endTicksAvailable) {
             output.AppendSlice(*audio, offset, endTicksAvailable);
             LOG(LogLevel::Verbose,
-                ("%p: MediaStream %p writing %" PRId64 " samples for %f to %f "
+                ("%p: MediaTrack %p writing %" PRId64 " samples for %f to %f "
                  "(samples %" PRId64 " to %" PRId64 ")",
-                 this, aStream, toWrite, MediaTimeToSeconds(t),
+                 this, aTrack, toWrite, MediaTimeToSeconds(t),
                  MediaTimeToSeconds(end), offset, endTicksNeeded));
             uint32_t available = endTicksAvailable - offset;
             ticksWritten += available;
             toWrite -= available;
             offset = endTicksAvailable;
           }
           output.AppendNullData(toWrite);
           LOG(LogLevel::Verbose,
-              ("%p MediaStream %p writing %" PRId64
+              ("%p MediaTrack %p writing %" PRId64
                " padding slsamples for %f to "
                "%f (samples %" PRId64 " to %" PRId64 ")",
-               this, aStream, toWrite, MediaTimeToSeconds(t),
+               this, aTrack, toWrite, MediaTimeToSeconds(t),
                MediaTimeToSeconds(end), offset, endTicksNeeded));
           ticksWritten += toWrite;
         }
         output.ApplyVolume(volume);
       }
       t = end;
     }
     audioOutput.mLastTickWritten = offset;
 
     output.WriteTo(mMixer, AudioOutputChannelCount(), mSampleRate);
   }
   return ticksWritten;
 }
 
-void MediaStreamGraphImpl::OpenAudioInputImpl(CubebUtils::AudioDeviceID aID,
-                                              AudioDataListener* aListener) {
+void MediaTrackGraphImpl::OpenAudioInputImpl(CubebUtils::AudioDeviceID aID,
+                                             AudioDataListener* aListener) {
   MOZ_ASSERT(OnGraphThread());
-  // Only allow one device per MSG (hence, per document), but allow opening a
+  // Only allow one device per MTG (hence, per document), but allow opening a
   // device multiple times
   nsTArray<RefPtr<AudioDataListener>>& listeners =
       mInputDeviceUsers.GetOrInsert(aID);
   if (listeners.IsEmpty() && mInputDeviceUsers.Count() > 1) {
     // We don't support opening multiple input device in a graph for now.
     listeners.RemoveElement(aID);
     return;
   }
@@ -673,38 +671,38 @@ void MediaStreamGraphImpl::OpenAudioInpu
       CurrentDriver()->SwitchAtNextIteration(driver);
     } else {
       LOG(LogLevel::Error, ("OpenAudioInput in shutdown!"));
       MOZ_ASSERT_UNREACHABLE("Can't open cubeb inputs in shutdown");
     }
   }
 }
 
-nsresult MediaStreamGraphImpl::OpenAudioInput(CubebUtils::AudioDeviceID aID,
-                                              AudioDataListener* aListener) {
+nsresult MediaTrackGraphImpl::OpenAudioInput(CubebUtils::AudioDeviceID aID,
+                                             AudioDataListener* aListener) {
   MOZ_ASSERT(NS_IsMainThread());
   class Message : public ControlMessage {
    public:
-    Message(MediaStreamGraphImpl* aGraph, CubebUtils::AudioDeviceID aID,
+    Message(MediaTrackGraphImpl* aGraph, CubebUtils::AudioDeviceID aID,
             AudioDataListener* aListener)
         : ControlMessage(nullptr),
           mGraph(aGraph),
           mID(aID),
           mListener(aListener) {}
     void Run() override { mGraph->OpenAudioInputImpl(mID, mListener); }
-    MediaStreamGraphImpl* mGraph;
+    MediaTrackGraphImpl* mGraph;
     CubebUtils::AudioDeviceID mID;
     RefPtr<AudioDataListener> mListener;
   };
   // XXX Check not destroyed!
   this->AppendMessage(MakeUnique<Message>(this, aID, aListener));
   return NS_OK;
 }
 
-void MediaStreamGraphImpl::CloseAudioInputImpl(
+void MediaTrackGraphImpl::CloseAudioInputImpl(
     Maybe<CubebUtils::AudioDeviceID>& aID, AudioDataListener* aListener) {
   MOZ_ASSERT(OnGraphThreadOrNotRunning());
   // It is possible to not know the ID here, find it first.
   if (aID.isNothing()) {
     for (auto iter = mInputDeviceUsers.Iter(); !iter.Done(); iter.Next()) {
       if (iter.Data().Contains(aListener)) {
         aID = Some(iter.Key());
       }
@@ -714,17 +712,17 @@ void MediaStreamGraphImpl::CloseAudioInp
 
   nsTArray<RefPtr<AudioDataListener>>* listeners =
       mInputDeviceUsers.GetValue(aID.value());
 
   MOZ_ASSERT(listeners);
   DebugOnly<bool> wasPresent = listeners->RemoveElement(aListener);
   MOZ_ASSERT(wasPresent);
 
-  // Breaks the cycle between the MSG and the listener.
+  // Breaks the cycle between the MTG and the listener.
   aListener->Disconnect(this);
 
   if (!listeners->IsEmpty()) {
     // There is still a consumer for this audio input device
     return;
   }
 
   mInputDeviceID = nullptr;  // reset to default
@@ -750,39 +748,39 @@ void MediaStreamGraphImpl::CloseAudioInp
           ("%p: CloseInput: no output present (SystemClockCallback)", this));
 
       driver = new SystemClockDriver(this);
       CurrentDriver()->SwitchAtNextIteration(driver);
     }  // else SystemClockDriver->SystemClockDriver, no switch
   }
 }
 
-void MediaStreamGraphImpl::CloseAudioInput(
-    Maybe<CubebUtils::AudioDeviceID>& aID, AudioDataListener* aListener) {
+void MediaTrackGraphImpl::CloseAudioInput(Maybe<CubebUtils::AudioDeviceID>& aID,
+                                          AudioDataListener* aListener) {
   MOZ_ASSERT(NS_IsMainThread());
   class Message : public ControlMessage {
    public:
-    Message(MediaStreamGraphImpl* aGraph, Maybe<CubebUtils::AudioDeviceID>& aID,
+    Message(MediaTrackGraphImpl* aGraph, Maybe<CubebUtils::AudioDeviceID>& aID,
             AudioDataListener* aListener)
         : ControlMessage(nullptr),
           mGraph(aGraph),
           mID(aID),
           mListener(aListener) {}
     void Run() override { mGraph->CloseAudioInputImpl(mID, mListener); }
-    MediaStreamGraphImpl* mGraph;
+    MediaTrackGraphImpl* mGraph;
     Maybe<CubebUtils::AudioDeviceID> mID;
     RefPtr<AudioDataListener> mListener;
   };
   this->AppendMessage(MakeUnique<Message>(this, aID, aListener));
 }
 
 // All AudioInput listeners get the same speaker data (at least for now).
-void MediaStreamGraphImpl::NotifyOutputData(AudioDataValue* aBuffer,
-                                            size_t aFrames, TrackRate aRate,
-                                            uint32_t aChannels) {
+void MediaTrackGraphImpl::NotifyOutputData(AudioDataValue* aBuffer,
+                                           size_t aFrames, TrackRate aRate,
+                                           uint32_t aChannels) {
 #ifdef ANDROID
   // On Android, mInputDeviceID is always null and represents the default
   // device.
   // The absence of an input consumer is enough to know we need to bail out
   // here.
   if (!mInputDeviceUsers.GetValue(mInputDeviceID)) {
     return;
   }
@@ -796,19 +794,19 @@ void MediaStreamGraphImpl::NotifyOutputD
   nsTArray<RefPtr<AudioDataListener>>* listeners =
       mInputDeviceUsers.GetValue(mInputDeviceID);
   MOZ_ASSERT(listeners);
   for (auto& listener : *listeners) {
     listener->NotifyOutputData(this, aBuffer, aFrames, aRate, aChannels);
   }
 }
 
-void MediaStreamGraphImpl::NotifyInputData(const AudioDataValue* aBuffer,
-                                           size_t aFrames, TrackRate aRate,
-                                           uint32_t aChannels) {
+void MediaTrackGraphImpl::NotifyInputData(const AudioDataValue* aBuffer,
+                                          size_t aFrames, TrackRate aRate,
+                                          uint32_t aChannels) {
 #ifdef ANDROID
   if (!mInputDeviceUsers.GetValue(mInputDeviceID)) {
     return;
   }
 #else
 #  ifdef DEBUG
   {
     MonitorAutoLock lock(mMonitor);
@@ -825,17 +823,17 @@ void MediaStreamGraphImpl::NotifyInputDa
   nsTArray<RefPtr<AudioDataListener>>* listeners =
       mInputDeviceUsers.GetValue(mInputDeviceID);
   MOZ_ASSERT(listeners);
   for (auto& listener : *listeners) {
     listener->NotifyInputData(this, aBuffer, aFrames, aRate, aChannels);
   }
 }
 
-void MediaStreamGraphImpl::DeviceChangedImpl() {
+void MediaTrackGraphImpl::DeviceChangedImpl() {
   MOZ_ASSERT(OnGraphThread());
 
 #ifdef ANDROID
   if (!mInputDeviceUsers.GetValue(mInputDeviceID)) {
     return;
   }
 #else
   if (!mInputDeviceID) {
@@ -845,49 +843,48 @@ void MediaStreamGraphImpl::DeviceChanged
 
   nsTArray<RefPtr<AudioDataListener>>* listeners =
       mInputDeviceUsers.GetValue(mInputDeviceID);
   for (auto& listener : *listeners) {
     listener->DeviceChanged(this);
   }
 }
 
-void MediaStreamGraphImpl::DeviceChanged() {
+void MediaTrackGraphImpl::DeviceChanged() {
   // This is safe to be called from any thread: this message comes from an
   // underlying platform API, and we don't have much guarantees. If it is not
   // called from the main thread (and it probably will rarely be), it will post
   // itself to the main thread, and the actual device change message will be ran
   // and acted upon on the graph thread.
   if (!NS_IsMainThread()) {
-    RefPtr<nsIRunnable> runnable =
-        WrapRunnable(RefPtr<MediaStreamGraphImpl>(this),
-                     &MediaStreamGraphImpl::DeviceChanged);
+    RefPtr<nsIRunnable> runnable = WrapRunnable(
+        RefPtr<MediaTrackGraphImpl>(this), &MediaTrackGraphImpl::DeviceChanged);
     mAbstractMainThread->Dispatch(runnable.forget());
     return;
   }
 
   class Message : public ControlMessage {
    public:
-    explicit Message(MediaStreamGraph* aGraph)
+    explicit Message(MediaTrackGraph* aGraph)
         : ControlMessage(nullptr),
-          mGraphImpl(static_cast<MediaStreamGraphImpl*>(aGraph)) {}
+          mGraphImpl(static_cast<MediaTrackGraphImpl*>(aGraph)) {}
     void Run() override { mGraphImpl->DeviceChangedImpl(); }
     // We know that this is valid, because the graph can't shutdown if it has
     // messages.
-    MediaStreamGraphImpl* mGraphImpl;
+    MediaTrackGraphImpl* mGraphImpl;
   };
 
   // Reset the latency, it will get fetched again next time it's queried.
   MOZ_ASSERT(NS_IsMainThread());
   mAudioOutputLatency = 0.0;
 
   AppendMessage(MakeUnique<Message>(this));
 }
 
-void MediaStreamGraphImpl::ReevaluateInputDevice() {
+void MediaTrackGraphImpl::ReevaluateInputDevice() {
   MOZ_ASSERT(OnGraphThread());
   bool needToSwitch = false;
 
   if (CurrentDriver()->AsAudioCallbackDriver()) {
     AudioCallbackDriver* audioCallbackDriver =
         CurrentDriver()->AsAudioCallbackDriver();
     if (audioCallbackDriver->InputChannelCount() != AudioInputChannelCount()) {
       needToSwitch = true;
@@ -912,38 +909,38 @@ void MediaStreamGraphImpl::ReevaluateInp
         this, AudioInputChannelCount(), AudioInputDevicePreference());
     {
       MonitorAutoLock lock(mMonitor);
       CurrentDriver()->SwitchAtNextIteration(newDriver);
     }
   }
 }
 
-bool MediaStreamGraphImpl::OnGraphThreadOrNotRunning() const {
+bool MediaTrackGraphImpl::OnGraphThreadOrNotRunning() const {
   // either we're on the right thread (and calling CurrentDriver() is safe),
   // or we're going to fail the assert anyway, so don't cross-check
   // via CurrentDriver().
   return mDetectedNotRunning ? NS_IsMainThread() : OnGraphThread();
 }
 
-bool MediaStreamGraphImpl::OnGraphThread() const {
+bool MediaTrackGraphImpl::OnGraphThread() const {
   // we're on the right thread (and calling mDriver is safe),
   MOZ_ASSERT(mDriver);
   if (mGraphRunner && mGraphRunner->OnThread()) {
     return true;
   }
   return mDriver->OnThread();
 }
 
-bool MediaStreamGraphImpl::Destroyed() const {
+bool MediaTrackGraphImpl::Destroyed() const {
   MOZ_ASSERT(NS_IsMainThread());
   return !mSelfRef;
 }
 
-bool MediaStreamGraphImpl::ShouldUpdateMainThread() {
+bool MediaTrackGraphImpl::ShouldUpdateMainThread() {
   MOZ_ASSERT(OnGraphThreadOrNotRunning());
   if (mRealtime) {
     return true;
   }
 
   TimeStamp now = TimeStamp::Now();
   // For offline graphs, update now if there is no pending iteration or if it
   // has been long enough since the last update.
@@ -951,300 +948,300 @@ bool MediaStreamGraphImpl::ShouldUpdateM
       ((now - mLastMainThreadUpdate).ToMilliseconds() >
        CurrentDriver()->IterationDuration())) {
     mLastMainThreadUpdate = now;
     return true;
   }
   return false;
 }
 
-void MediaStreamGraphImpl::PrepareUpdatesToMainThreadState(bool aFinalUpdate) {
+void MediaTrackGraphImpl::PrepareUpdatesToMainThreadState(bool aFinalUpdate) {
   MOZ_ASSERT(OnGraphThreadOrNotRunning());
   mMonitor.AssertCurrentThreadOwns();
 
   // We don't want to frequently update the main thread about timing update
   // when we are not running in realtime.
   if (aFinalUpdate || ShouldUpdateMainThread()) {
     // Strip updates that will be obsoleted below, so as to keep the length of
-    // mStreamUpdates sane.
+    // mTrackUpdates sane.
     size_t keptUpdateCount = 0;
-    for (size_t i = 0; i < mStreamUpdates.Length(); ++i) {
-      MediaStream* stream = mStreamUpdates[i].mStream;
-      // RemoveStreamGraphThread() clears mStream in updates for
-      // streams that are removed from the graph.
-      MOZ_ASSERT(!stream || stream->GraphImpl() == this);
-      if (!stream || stream->MainThreadNeedsUpdates()) {
-        // Discard this update as it has either been cleared when the stream
+    for (size_t i = 0; i < mTrackUpdates.Length(); ++i) {
+      MediaTrack* track = mTrackUpdates[i].mTrack;
+      // RemoveTrackGraphThread() clears mTrack in updates for
+      // tracks that are removed from the graph.
+      MOZ_ASSERT(!track || track->GraphImpl() == this);
+      if (!track || track->MainThreadNeedsUpdates()) {
+        // Discard this update as it has either been cleared when the track
         // was destroyed or there will be a newer update below.
         continue;
       }
       if (keptUpdateCount != i) {
-        mStreamUpdates[keptUpdateCount] = std::move(mStreamUpdates[i]);
-        MOZ_ASSERT(!mStreamUpdates[i].mStream);
+        mTrackUpdates[keptUpdateCount] = std::move(mTrackUpdates[i]);
+        MOZ_ASSERT(!mTrackUpdates[i].mTrack);
       }
       ++keptUpdateCount;
     }
-    mStreamUpdates.TruncateLength(keptUpdateCount);
-
-    mStreamUpdates.SetCapacity(mStreamUpdates.Length() + mStreams.Length() +
-                               mSuspendedStreams.Length());
-    for (MediaStream* stream : AllStreams()) {
-      if (!stream->MainThreadNeedsUpdates()) {
+    mTrackUpdates.TruncateLength(keptUpdateCount);
+
+    mTrackUpdates.SetCapacity(mTrackUpdates.Length() + mTracks.Length() +
+                              mSuspendedTracks.Length());
+    for (MediaTrack* track : AllTracks()) {
+      if (!track->MainThreadNeedsUpdates()) {
         continue;
       }
-      StreamUpdate* update = mStreamUpdates.AppendElement();
-      update->mStream = stream;
+      TrackUpdate* update = mTrackUpdates.AppendElement();
+      update->mTrack = track;
       // No blocking to worry about here, since we've passed
-      // UpdateCurrentTimeForStreams.
+      // UpdateCurrentTimeForTracks.
       update->mNextMainThreadCurrentTime =
-          stream->GraphTimeToStreamTime(mProcessedTime);
-      update->mNextMainThreadEnded = stream->mNotifiedEnded;
+          track->GraphTimeToTrackTime(mProcessedTime);
+      update->mNextMainThreadEnded = track->mNotifiedEnded;
     }
     mNextMainThreadGraphTime = mProcessedTime;
     if (!mPendingUpdateRunnables.IsEmpty()) {
       mUpdateRunnables.AppendElements(std::move(mPendingUpdateRunnables));
     }
   }
 
   // If this is the final update, then a stable state event will soon be
   // posted just before this thread finishes, and so there is no need to also
   // post here.
   if (!aFinalUpdate &&
       // Don't send the message to the main thread if it's not going to have
       // any work to do.
-      !(mUpdateRunnables.IsEmpty() && mStreamUpdates.IsEmpty())) {
+      !(mUpdateRunnables.IsEmpty() && mTrackUpdates.IsEmpty())) {
     EnsureStableStateEventPosted();
   }
 }
 
-GraphTime MediaStreamGraphImpl::RoundUpToEndOfAudioBlock(GraphTime aTime) {
+GraphTime MediaTrackGraphImpl::RoundUpToEndOfAudioBlock(GraphTime aTime) {
   if (aTime % WEBAUDIO_BLOCK_SIZE == 0) {
     return aTime;
   }
   return RoundUpToNextAudioBlock(aTime);
 }
 
-GraphTime MediaStreamGraphImpl::RoundUpToNextAudioBlock(GraphTime aTime) {
+GraphTime MediaTrackGraphImpl::RoundUpToNextAudioBlock(GraphTime aTime) {
   uint64_t block = aTime >> WEBAUDIO_BLOCK_SIZE_BITS;
   uint64_t nextBlock = block + 1;
   GraphTime nextTime = nextBlock << WEBAUDIO_BLOCK_SIZE_BITS;
   return nextTime;
 }
 
-void MediaStreamGraphImpl::ProduceDataForStreamsBlockByBlock(
-    uint32_t aStreamIndex, TrackRate aSampleRate) {
+void MediaTrackGraphImpl::ProduceDataForTracksBlockByBlock(
+    uint32_t aTrackIndex, TrackRate aSampleRate) {
   MOZ_ASSERT(OnGraphThread());
-  MOZ_ASSERT(aStreamIndex <= mFirstCycleBreaker,
-             "Cycle breaker is not AudioNodeStream?");
+  MOZ_ASSERT(aTrackIndex <= mFirstCycleBreaker,
+             "Cycle breaker is not AudioNodeTrack?");
   GraphTime t = mProcessedTime;
   while (t < mStateComputedTime) {
     GraphTime next = RoundUpToNextAudioBlock(t);
-    for (uint32_t i = mFirstCycleBreaker; i < mStreams.Length(); ++i) {
-      auto ns = static_cast<AudioNodeStream*>(mStreams[i]);
-      MOZ_ASSERT(ns->AsAudioNodeStream());
-      ns->ProduceOutputBeforeInput(t);
+    for (uint32_t i = mFirstCycleBreaker; i < mTracks.Length(); ++i) {
+      auto nt = static_cast<AudioNodeTrack*>(mTracks[i]);
+      MOZ_ASSERT(nt->AsAudioNodeTrack());
+      nt->ProduceOutputBeforeInput(t);
     }
-    for (uint32_t i = aStreamIndex; i < mStreams.Length(); ++i) {
-      ProcessedMediaStream* ps = mStreams[i]->AsProcessedStream();
-      if (ps) {
-        ps->ProcessInput(
+    for (uint32_t i = aTrackIndex; i < mTracks.Length(); ++i) {
+      ProcessedMediaTrack* pt = mTracks[i]->AsProcessedTrack();
+      if (pt) {
+        pt->ProcessInput(
             t, next,
-            (next == mStateComputedTime) ? ProcessedMediaStream::ALLOW_END : 0);
+            (next == mStateComputedTime) ? ProcessedMediaTrack::ALLOW_END : 0);
       }
     }
     t = next;
   }
   NS_ASSERTION(t == mStateComputedTime,
                "Something went wrong with rounding to block boundaries");
 }
 
-void MediaStreamGraphImpl::RunMessageAfterProcessing(
+void MediaTrackGraphImpl::RunMessageAfterProcessing(
     UniquePtr<ControlMessage> aMessage) {
   MOZ_ASSERT(OnGraphThread());
 
   if (mFrontMessageQueue.IsEmpty()) {
     mFrontMessageQueue.AppendElement();
   }
 
   // Only one block is used for messages from the graph thread.
   MOZ_ASSERT(mFrontMessageQueue.Length() == 1);
   mFrontMessageQueue[0].mMessages.AppendElement(std::move(aMessage));
 }
 
-void MediaStreamGraphImpl::RunMessagesInQueue() {
+void MediaTrackGraphImpl::RunMessagesInQueue() {
   TRACE_AUDIO_CALLBACK();
   MOZ_ASSERT(OnGraphThread());
   // Calculate independent action times for each batch of messages (each
   // batch corresponding to an event loop task). This isolates the performance
   // of different scripts to some extent.
   for (uint32_t i = 0; i < mFrontMessageQueue.Length(); ++i) {
     nsTArray<UniquePtr<ControlMessage>>& messages =
         mFrontMessageQueue[i].mMessages;
 
     for (uint32_t j = 0; j < messages.Length(); ++j) {
       messages[j]->Run();
     }
   }
   mFrontMessageQueue.Clear();
 }
 
-void MediaStreamGraphImpl::UpdateGraph(GraphTime aEndBlockingDecisions) {
+void MediaTrackGraphImpl::UpdateGraph(GraphTime aEndBlockingDecisions) {
   TRACE_AUDIO_CALLBACK();
   MOZ_ASSERT(OnGraphThread());
   MOZ_ASSERT(aEndBlockingDecisions >= mProcessedTime);
   // The next state computed time can be the same as the previous: it
   // means the driver would have been blocking indefinitly, but the graph has
   // been woken up right after having been to sleep.
   MOZ_ASSERT(aEndBlockingDecisions >= mStateComputedTime);
 
-  UpdateStreamOrder();
+  UpdateTrackOrder();
 
   bool ensureNextIteration = false;
 
-  for (MediaStream* stream : mStreams) {
-    if (SourceMediaStream* is = stream->AsSourceStream()) {
+  for (MediaTrack* track : mTracks) {
+    if (SourceMediaTrack* is = track->AsSourceTrack()) {
       ensureNextIteration |= is->PullNewData(aEndBlockingDecisions);
       is->ExtractPendingInput(mStateComputedTime, aEndBlockingDecisions);
     }
-    if (stream->mEnded) {
-      // The stream's not suspended, and since it's ended, underruns won't
+    if (track->mEnded) {
+      // The track's not suspended, and since it's ended, underruns won't
       // stop it playing out. So there's no blocking other than what we impose
       // here.
-      GraphTime endTime = stream->GetEnd() + stream->mStartTime;
+      GraphTime endTime = track->GetEnd() + track->mStartTime;
       if (endTime <= mStateComputedTime) {
         LOG(LogLevel::Verbose,
-            ("%p: MediaStream %p is blocked due to being ended", this, stream));
-        stream->mStartBlocking = mStateComputedTime;
+            ("%p: MediaTrack %p is blocked due to being ended", this, track));
+        track->mStartBlocking = mStateComputedTime;
       } else {
         LOG(LogLevel::Verbose,
-            ("%p: MediaStream %p has ended, but is not blocked yet (current "
+            ("%p: MediaTrack %p has ended, but is not blocked yet (current "
              "time %f, end at %f)",
-             this, stream, MediaTimeToSeconds(mStateComputedTime),
+             this, track, MediaTimeToSeconds(mStateComputedTime),
              MediaTimeToSeconds(endTime)));
-        // Data can't be added to a ended stream, so underruns are irrelevant.
+        // Data can't be added to a ended track, so underruns are irrelevant.
         MOZ_ASSERT(endTime <= aEndBlockingDecisions);
-        stream->mStartBlocking = endTime;
+        track->mStartBlocking = endTime;
       }
     } else {
-      stream->mStartBlocking = WillUnderrun(stream, aEndBlockingDecisions);
+      track->mStartBlocking = WillUnderrun(track, aEndBlockingDecisions);
 
 #ifdef MOZ_DIAGNOSTIC_ASSERT_ENABLED
-      if (SourceMediaStream* s = stream->AsSourceStream()) {
+      if (SourceMediaTrack* s = track->AsSourceTrack()) {
         if (s->Ended()) {
           continue;
         }
         {
           MutexAutoLock lock(s->mMutex);
           if (!s->mUpdateTrack->mPullingEnabled) {
             // The invariant that data must be provided is only enforced when
             // pulling.
             continue;
           }
         }
-        if (stream->GetEnd() <
-            stream->GraphTimeToStreamTime(aEndBlockingDecisions)) {
+        if (track->GetEnd() <
+            track->GraphTimeToTrackTime(aEndBlockingDecisions)) {
           LOG(LogLevel::Error,
-              ("%p: SourceMediaStream %p (%s) is live and pulled, "
+              ("%p: SourceMediaTrack %p (%s) is live and pulled, "
                "but wasn't fed "
                "enough data. TrackListeners=%zu. Track-end=%f, "
                "Iteration-end=%f",
-               this, stream,
-               (stream->mType == MediaSegment::AUDIO ? "audio" : "video"),
-               stream->mTrackListeners.Length(),
-               MediaTimeToSeconds(stream->GetEnd()),
+               this, track,
+               (track->mType == MediaSegment::AUDIO ? "audio" : "video"),
+               track->mTrackListeners.Length(),
+               MediaTimeToSeconds(track->GetEnd()),
                MediaTimeToSeconds(
-                   stream->GraphTimeToStreamTime(aEndBlockingDecisions))));
+                   track->GraphTimeToTrackTime(aEndBlockingDecisions))));
           MOZ_DIAGNOSTIC_ASSERT(false,
-                                "A non-ended SourceMediaStream wasn't fed "
+                                "A non-ended SourceMediaTrack wasn't fed "
                                 "enough data by NotifyPull");
         }
       }
 #endif /* MOZ_DIAGNOSTIC_ASSERT_ENABLED */
     }
   }
 
-  for (MediaStream* stream : mSuspendedStreams) {
-    stream->mStartBlocking = mStateComputedTime;
+  for (MediaTrack* track : mSuspendedTracks) {
+    track->mStartBlocking = mStateComputedTime;
   }
 
   // If the loop is woken up so soon that IterationEnd() barely advances or
   // if an offline graph is not currently rendering, we end up having
   // aEndBlockingDecisions == mStateComputedTime.
   // Since the process interval [mStateComputedTime, aEndBlockingDecision) is
-  // empty, Process() will not find any unblocked stream and so will not
+  // empty, Process() will not find any unblocked track and so will not
   // ensure another iteration.  If the graph should be rendering, then ensure
   // another iteration to render.
   if (ensureNextIteration || (aEndBlockingDecisions == mStateComputedTime &&
                               mStateComputedTime < mEndTime)) {
     EnsureNextIteration();
   }
 }
 
-void MediaStreamGraphImpl::Process() {
+void MediaTrackGraphImpl::Process() {
   TRACE_AUDIO_CALLBACK();
   MOZ_ASSERT(OnGraphThread());
-  // Play stream contents.
+  // Play track contents.
   bool allBlockedForever = true;
-  // True when we've done ProcessInput for all processed streams.
+  // True when we've done ProcessInput for all processed tracks.
   bool doneAllProducing = false;
   // This is the number of frame that are written to the AudioStreams, for
   // this cycle.
-  StreamTime ticksPlayed = 0;
+  TrackTime ticksPlayed = 0;
 
   mMixer.StartMixing();
 
-  // Figure out what each stream wants to do
-  for (uint32_t i = 0; i < mStreams.Length(); ++i) {
-    MediaStream* stream = mStreams[i];
+  // Figure out what each track wants to do
+  for (uint32_t i = 0; i < mTracks.Length(); ++i) {
+    MediaTrack* track = mTracks[i];
     if (!doneAllProducing) {
-      ProcessedMediaStream* ps = stream->AsProcessedStream();
-      if (ps) {
-        AudioNodeStream* n = stream->AsAudioNodeStream();
+      ProcessedMediaTrack* pt = track->AsProcessedTrack();
+      if (pt) {
+        AudioNodeTrack* n = track->AsAudioNodeTrack();
         if (n) {
 #ifdef DEBUG
-          // Verify that the sampling rate for all of the following streams is
+          // Verify that the sampling rate for all of the following tracks is
           // the same
-          for (uint32_t j = i + 1; j < mStreams.Length(); ++j) {
-            AudioNodeStream* nextStream = mStreams[j]->AsAudioNodeStream();
-            if (nextStream) {
-              MOZ_ASSERT(n->mSampleRate == nextStream->mSampleRate,
-                         "All AudioNodeStreams in the graph must have the same "
+          for (uint32_t j = i + 1; j < mTracks.Length(); ++j) {
+            AudioNodeTrack* nextTrack = mTracks[j]->AsAudioNodeTrack();
+            if (nextTrack) {
+              MOZ_ASSERT(n->mSampleRate == nextTrack->mSampleRate,
+                         "All AudioNodeTracks in the graph must have the same "
                          "sampling rate");
             }
           }
 #endif
-          // Since an AudioNodeStream is present, go ahead and
-          // produce audio block by block for all the rest of the streams.
-          ProduceDataForStreamsBlockByBlock(i, n->mSampleRate);
+          // Since an AudioNodeTrack is present, go ahead and
+          // produce audio block by block for all the rest of the tracks.
+          ProduceDataForTracksBlockByBlock(i, n->mSampleRate);
           doneAllProducing = true;
         } else {
-          ps->ProcessInput(mProcessedTime, mStateComputedTime,
-                           ProcessedMediaStream::ALLOW_END);
-          // Assert that a live stream produced enough data
-          MOZ_ASSERT_IF(!stream->mEnded,
-                        stream->GetEnd() >= GraphTimeToStreamTimeWithBlocking(
-                                                stream, mStateComputedTime));
+          pt->ProcessInput(mProcessedTime, mStateComputedTime,
+                           ProcessedMediaTrack::ALLOW_END);
+          // Assert that a live track produced enough data
+          MOZ_ASSERT_IF(!track->mEnded,
+                        track->GetEnd() >= GraphTimeToTrackTimeWithBlocking(
+                                               track, mStateComputedTime));
         }
       }
     }
     // Only playback audio and video in real-time mode
     if (mRealtime) {
-      CreateOrDestroyAudioStreams(stream);
+      CreateOrDestroyAudioTracks(track);
       if (CurrentDriver()->AsAudioCallbackDriver()) {
-        StreamTime ticksPlayedForThisStream = PlayAudio(stream);
+        TrackTime ticksPlayedForThisTrack = PlayAudio(track);
         if (!ticksPlayed) {
-          ticksPlayed = ticksPlayedForThisStream;
+          ticksPlayed = ticksPlayedForThisTrack;
         } else {
-          MOZ_ASSERT(!ticksPlayedForThisStream ||
-                         ticksPlayedForThisStream == ticksPlayed,
-                     "Each stream should have the same number of frame.");
+          MOZ_ASSERT(!ticksPlayedForThisTrack ||
+                         ticksPlayedForThisTrack == ticksPlayed,
+                     "Each track should have the same number of frame.");
         }
       }
     }
-    if (stream->mStartBlocking > mProcessedTime) {
+    if (track->mStartBlocking > mProcessedTime) {
       allBlockedForever = false;
     }
   }
 
   if (CurrentDriver()->AsAudioCallbackDriver()) {
     if (!ticksPlayed) {
       // Nothing was played, so the mixer doesn't know how many frames were
       // processed. We still tell it so AudioCallbackDriver knows how much has
@@ -1256,50 +1253,50 @@ void MediaStreamGraphImpl::Process() {
     mMixer.FinishMixing();
   }
 
   if (!allBlockedForever) {
     EnsureNextIteration();
   }
 }
 
-bool MediaStreamGraphImpl::UpdateMainThreadState() {
+bool MediaTrackGraphImpl::UpdateMainThreadState() {
   MOZ_ASSERT(OnGraphThread());
   if (mForceShutDown) {
-    for (MediaStream* stream : AllStreams()) {
-      stream->NotifyForcedShutdown();
+    for (MediaTrack* track : AllTracks()) {
+      track->NotifyForcedShutdown();
     }
   }
 
   MonitorAutoLock lock(mMonitor);
   bool finalUpdate =
       mForceShutDown || (IsEmpty() && mBackMessageQueue.IsEmpty());
   PrepareUpdatesToMainThreadState(finalUpdate);
   if (finalUpdate) {
     // Enter shutdown mode when this iteration is completed.
-    // No need to Destroy streams here. The main-thread owner of each
-    // stream is responsible for calling Destroy on them.
+    // No need to Destroy tracks here. The main-thread owner of each
+    // track is responsible for calling Destroy on them.
     return false;
   }
 
   CurrentDriver()->WaitForNextIteration();
 
   SwapMessageQueues();
   return true;
 }
 
-bool MediaStreamGraphImpl::OneIteration(GraphTime aStateEnd) {
+bool MediaTrackGraphImpl::OneIteration(GraphTime aStateEnd) {
   if (mGraphRunner) {
     return mGraphRunner->OneIteration(aStateEnd);
   }
 
   return OneIterationImpl(aStateEnd);
 }
 
-bool MediaStreamGraphImpl::OneIterationImpl(GraphTime aStateEnd) {
+bool MediaTrackGraphImpl::OneIterationImpl(GraphTime aStateEnd) {
   TRACE_AUDIO_CALLBACK();
 
   // Changes to LIFECYCLE_RUNNING occur before starting or reviving the graph
   // thread, and so the monitor need not be held to check mLifecycleState.
   // LIFECYCLE_THREAD_NOT_STARTED is possible when shutting down offline
   // graphs that have not started.
   MOZ_DIAGNOSTIC_ASSERT(mLifecycleState <= LIFECYCLE_RUNNING);
   MOZ_ASSERT(OnGraphThread());
@@ -1314,130 +1311,130 @@ bool MediaStreamGraphImpl::OneIterationI
 
   mStateComputedTime = stateEnd;
 
   Process();
 
   GraphTime oldProcessedTime = mProcessedTime;
   mProcessedTime = stateEnd;
 
-  UpdateCurrentTimeForStreams(oldProcessedTime);
+  UpdateCurrentTimeForTracks(oldProcessedTime);
 
   ProcessChunkMetadata(oldProcessedTime);
 
   // Process graph messages queued from RunMessageAfterProcessing() on this
   // thread during the iteration.
   RunMessagesInQueue();
 
   return UpdateMainThreadState();
 }
 
-void MediaStreamGraphImpl::ApplyStreamUpdate(StreamUpdate* aUpdate) {
+void MediaTrackGraphImpl::ApplyTrackUpdate(TrackUpdate* aUpdate) {
   MOZ_ASSERT(NS_IsMainThread());
   mMonitor.AssertCurrentThreadOwns();
 
-  MediaStream* stream = aUpdate->mStream;
-  if (!stream) return;
-  stream->mMainThreadCurrentTime = aUpdate->mNextMainThreadCurrentTime;
-  stream->mMainThreadEnded = aUpdate->mNextMainThreadEnded;
-
-  if (stream->ShouldNotifyTrackEnded()) {
-    stream->NotifyMainThreadListeners();
+  MediaTrack* track = aUpdate->mTrack;
+  if (!track) return;
+  track->mMainThreadCurrentTime = aUpdate->mNextMainThreadCurrentTime;
+  track->mMainThreadEnded = aUpdate->mNextMainThreadEnded;
+
+  if (track->ShouldNotifyTrackEnded()) {
+    track->NotifyMainThreadListeners();
   }
 }
 
-void MediaStreamGraphImpl::ForceShutDown() {
+void MediaTrackGraphImpl::ForceShutDown() {
   MOZ_ASSERT(NS_IsMainThread(), "Must be called on main thread");
-  LOG(LogLevel::Debug, ("%p: MediaStreamGraph::ForceShutdown", this));
+  LOG(LogLevel::Debug, ("%p: MediaTrackGraph::ForceShutdown", this));
 
   if (mShutdownBlocker) {
     // Avoid waiting forever for a graph to shut down
     // synchronously.  Reports are that some 3rd-party audio drivers
     // occasionally hang in shutdown (both for us and Chrome).
     NS_NewTimerWithCallback(
         getter_AddRefs(mShutdownTimer), this,
-        MediaStreamGraph::AUDIO_CALLBACK_DRIVER_SHUTDOWN_TIMEOUT,
+        MediaTrackGraph::AUDIO_CALLBACK_DRIVER_SHUTDOWN_TIMEOUT,
         nsITimer::TYPE_ONE_SHOT);
   }
 
   class Message final : public ControlMessage {
    public:
-    explicit Message(MediaStreamGraphImpl* aGraph)
+    explicit Message(MediaTrackGraphImpl* aGraph)
         : ControlMessage(nullptr), mGraph(aGraph) {}
     void Run() override { mGraph->mForceShutDown = true; }
     // The graph owns this message.
-    MediaStreamGraphImpl* MOZ_NON_OWNING_REF mGraph;
+    MediaTrackGraphImpl* MOZ_NON_OWNING_REF mGraph;
   };
 
-  if (mMainThreadStreamCount > 0 || mMainThreadPortCount > 0) {
-    // If both the stream and port counts are zero, the regular shutdown
+  if (mMainThreadTrackCount > 0 || mMainThreadPortCount > 0) {
+    // If both the track and port counts are zero, the regular shutdown
     // sequence will progress shortly to shutdown threads and destroy the graph.
     AppendMessage(MakeUnique<Message>(this));
   }
 }
 
 NS_IMETHODIMP
-MediaStreamGraphImpl::Notify(nsITimer* aTimer) {
+MediaTrackGraphImpl::Notify(nsITimer* aTimer) {
   MOZ_ASSERT(NS_IsMainThread());
   NS_ASSERTION(!mShutdownBlocker,
-               "MediaStreamGraph took too long to shut down!");
+               "MediaTrackGraph took too long to shut down!");
   // Sigh, graph took too long to shut down.  Stop blocking system
   // shutdown and hope all is well.
   RemoveShutdownBlocker();
   return NS_OK;
 }
 
-void MediaStreamGraphImpl::AddShutdownBlocker() {
+void MediaTrackGraphImpl::AddShutdownBlocker() {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_ASSERT(!mShutdownBlocker);
 
   class Blocker : public media::ShutdownBlocker {
-    const RefPtr<MediaStreamGraphImpl> mGraph;
+    const RefPtr<MediaTrackGraphImpl> mGraph;
 
    public:
-    Blocker(MediaStreamGraphImpl* aGraph, const nsString& aName)
+    Blocker(MediaTrackGraphImpl* aGraph, const nsString& aName)
         : media::ShutdownBlocker(aName), mGraph(aGraph) {}
 
     NS_IMETHOD
     BlockShutdown(nsIAsyncShutdownClient* aProfileBeforeChange) override {
       mGraph->ForceShutDown();
       return NS_OK;
     }
   };
 
   // Blocker names must be distinct.
   nsString blockerName;
-  blockerName.AppendPrintf("MediaStreamGraph %p shutdown", this);
+  blockerName.AppendPrintf("MediaTrackGraph %p shutdown", this);
   mShutdownBlocker = MakeAndAddRef<Blocker>(this, blockerName);
   nsresult rv = media::GetShutdownBarrier()->AddBlocker(
       mShutdownBlocker, NS_LITERAL_STRING(__FILE__), __LINE__,
-      NS_LITERAL_STRING("MediaStreamGraph shutdown"));
+      NS_LITERAL_STRING("MediaTrackGraph shutdown"));
   MOZ_RELEASE_ASSERT(NS_SUCCEEDED(rv));
 }
 
-void MediaStreamGraphImpl::RemoveShutdownBlocker() {
+void MediaTrackGraphImpl::RemoveShutdownBlocker() {
   if (!mShutdownBlocker) {
     return;
   }
   media::GetShutdownBarrier()->RemoveBlocker(mShutdownBlocker);
   mShutdownBlocker = nullptr;
 }
 
 NS_IMETHODIMP
-MediaStreamGraphImpl::GetName(nsACString& aName) {
-  aName.AssignLiteral("MediaStreamGraphImpl");
+MediaTrackGraphImpl::GetName(nsACString& aName) {
+  aName.AssignLiteral("MediaTrackGraphImpl");
   return NS_OK;
 }
 
 namespace {
 
-class MediaStreamGraphShutDownRunnable : public Runnable {
+class MediaTrackGraphShutDownRunnable : public Runnable {
  public:
-  explicit MediaStreamGraphShutDownRunnable(MediaStreamGraphImpl* aGraph)
-      : Runnable("MediaStreamGraphShutDownRunnable"), mGraph(aGraph) {}
+  explicit MediaTrackGraphShutDownRunnable(MediaTrackGraphImpl* aGraph)
+      : Runnable("MediaTrackGraphShutDownRunnable"), mGraph(aGraph) {}
   NS_IMETHOD Run() override {
     MOZ_ASSERT(NS_IsMainThread());
     MOZ_ASSERT(mGraph->mDetectedNotRunning && mGraph->mDriver,
                "We should know the graph thread control loop isn't running!");
 
     LOG(LogLevel::Debug, ("%p: Shutting down graph", mGraph.get()));
 
     // We've asserted the graph isn't running.  Use mDriver instead of
@@ -1458,17 +1455,17 @@ class MediaStreamGraphShutDownRunnable :
     mGraph->mDriver
         ->Shutdown();  // This will wait until it's shutdown since
                        // we'll start tearing down the graph after this
 
     // Release the driver now so that an AudioCallbackDriver will release its
     // SharedThreadPool reference.  Each SharedThreadPool reference must be
     // released before SharedThreadPool::SpinUntilEmpty() runs on
     // xpcom-shutdown-threads.  Don't wait for GC/CC to release references to
-    // objects owning streams, or for expiration of mGraph->mShutdownTimer,
+    // objects owning tracks, or for expiration of mGraph->mShutdownTimer,
     // which won't otherwise release its reference on the graph until
     // nsTimerImpl::Shutdown(), which runs after xpcom-shutdown-threads.
     {
       MonitorAutoLock mon(mGraph->mMonitor);
       mGraph->SetCurrentDriver(nullptr);
     }
 
     // Safe to access these without the monitor since the graph isn't running.
@@ -1481,205 +1478,204 @@ class MediaStreamGraphShutDownRunnable :
           " continue - freezing and leaking");
 
       // The timer fired, so we may be deeper in shutdown now.  Block any
       // further teardown and just leak, for safety.
       return NS_OK;
     }
 
     // mGraph's thread is not running so it's OK to do whatever here
-    for (MediaStream* stream : mGraph->AllStreams()) {
+    for (MediaTrack* track : mGraph->AllTracks()) {
       // Clean up all MediaSegments since we cannot release Images too
       // late during shutdown. Also notify listeners that they were removed
       // so they can clean up any gfx resources.
-      stream->RemoveAllResourcesAndListenersImpl();
+      track->RemoveAllResourcesAndListenersImpl();
     }
 
     MOZ_ASSERT(mGraph->mUpdateRunnables.IsEmpty());
     mGraph->mPendingUpdateRunnables.Clear();
 
     mGraph->RemoveShutdownBlocker();
 
-    // We can't block past the final LIFECYCLE_WAITING_FOR_STREAM_DESTRUCTION
-    // stage, since completion of that stage requires all streams to be freed,
+    // We can't block past the final LIFECYCLE_WAITING_FOR_TRACK_DESTRUCTION
+    // stage, since completion of that stage requires all tracks to be freed,
     // which requires shutdown to proceed.
 
     if (mGraph->IsEmpty()) {
       // mGraph is no longer needed, so delete it.
       mGraph->Destroy();
     } else {
       // The graph is not empty.  We must be in a forced shutdown, either for
       // process shutdown or a non-realtime graph that has finished
       // processing. Some later AppendMessage will detect that the graph has
       // been emptied, and delete it.
       NS_ASSERTION(mGraph->mForceShutDown, "Not in forced shutdown?");
       mGraph->LifecycleStateRef() =
-          MediaStreamGraphImpl::LIFECYCLE_WAITING_FOR_STREAM_DESTRUCTION;
+          MediaTrackGraphImpl::LIFECYCLE_WAITING_FOR_TRACK_DESTRUCTION;
     }
     return NS_OK;
   }
 
  private:
-  RefPtr<MediaStreamGraphImpl> mGraph;
+  RefPtr<MediaTrackGraphImpl> mGraph;
 };
 
-class MediaStreamGraphStableStateRunnable : public Runnable {
+class MediaTrackGraphStableStateRunnable : public Runnable {
  public:
-  explicit MediaStreamGraphStableStateRunnable(MediaStreamGraphImpl* aGraph,
-                                               bool aSourceIsMSG)
-      : Runnable("MediaStreamGraphStableStateRunnable"),
+  explicit MediaTrackGraphStableStateRunnable(MediaTrackGraphImpl* aGraph,
+                                              bool aSourceIsMTG)
+      : Runnable("MediaTrackGraphStableStateRunnable"),
         mGraph(aGraph),
-        mSourceIsMSG(aSourceIsMSG) {}
+        mSourceIsMTG(aSourceIsMTG) {}
   NS_IMETHOD Run() override {
     TRACE();
     if (mGraph) {
-      mGraph->RunInStableState(mSourceIsMSG);
+      mGraph->RunInStableState(mSourceIsMTG);
     }
     return NS_OK;
   }
 
  private:
-  RefPtr<MediaStreamGraphImpl> mGraph;
-  bool mSourceIsMSG;
+  RefPtr<MediaTrackGraphImpl> mGraph;
+  bool mSourceIsMTG;
 };
 
 /*
  * Control messages forwarded from main thread to graph manager thread
  */
 class CreateMessage : public ControlMessage {
  public:
-  explicit CreateMessage(MediaStream* aStream) : ControlMessage(aStream) {}
-  void Run() override { mStream->GraphImpl()->AddStreamGraphThread(mStream); }
+  explicit CreateMessage(MediaTrack* aTrack) : ControlMessage(aTrack) {}
+  void Run() override { mTrack->GraphImpl()->AddTrackGraphThread(mTrack); }
   void RunDuringShutdown() override {
     // Make sure to run this message during shutdown too, to make sure
-    // that we balance the number of streams registered with the graph
+    // that we balance the number of tracks registered with the graph
     // as they're destroyed during shutdown.
     Run();
   }
 };
 
 }  // namespace
 
-void MediaStreamGraphImpl::RunInStableState(bool aSourceIsMSG) {
+void MediaTrackGraphImpl::RunInStableState(bool aSourceIsMTG) {
   MOZ_ASSERT(NS_IsMainThread(), "Must be called on main thread");
 
   nsTArray<nsCOMPtr<nsIRunnable>> runnables;
   // When we're doing a forced shutdown, pending control messages may be
   // run on the main thread via RunDuringShutdown. Those messages must
   // run without the graph monitor being held. So, we collect them here.
   nsTArray<UniquePtr<ControlMessage>> controlMessagesToRunDuringShutdown;
 
   {
     MonitorAutoLock lock(mMonitor);
-    if (aSourceIsMSG) {
+    if (aSourceIsMTG) {
       MOZ_ASSERT(mPostedRunInStableStateEvent);
       mPostedRunInStableStateEvent = false;
     }
 
     // This should be kept in sync with the LifecycleState enum in
-    // MediaStreamGraphImpl.h
+    // MediaTrackGraphImpl.h
     const char* LifecycleState_str[] = {
         "LIFECYCLE_THREAD_NOT_STARTED", "LIFECYCLE_RUNNING",
         "LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP",
         "LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN",
-        "LIFECYCLE_WAITING_FOR_STREAM_DESTRUCTION"};
+        "LIFECYCLE_WAITING_FOR_TRACK_DESTRUCTION"};
 
     if (LifecycleStateRef() != LIFECYCLE_RUNNING) {
       LOG(LogLevel::Debug,
           ("%p: Running stable state callback. Current state: %s", this,
            LifecycleState_str[LifecycleStateRef()]));
     }
 
     runnables.SwapElements(mUpdateRunnables);
-    for (uint32_t i = 0; i < mStreamUpdates.Length(); ++i) {
-      StreamUpdate* update = &mStreamUpdates[i];
-      if (update->mStream) {
-        ApplyStreamUpdate(update);
+    for (uint32_t i = 0; i < mTrackUpdates.Length(); ++i) {
+      TrackUpdate* update = &mTrackUpdates[i];
+      if (update->mTrack) {
+        ApplyTrackUpdate(update);
       }
     }
-    mStreamUpdates.Clear();
+    mTrackUpdates.Clear();
 
     mMainThreadGraphTime = mNextMainThreadGraphTime;
 
     if (mCurrentTaskMessageQueue.IsEmpty()) {
       if (LifecycleStateRef() == LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP &&
           IsEmpty()) {
         // Complete shutdown. First, ensure that this graph is no longer used.
         // A new graph graph will be created if one is needed.
         // Asynchronously clean up old graph. We don't want to do this
         // synchronously because it spins the event loop waiting for threads
         // to shut down, and we don't want to do that in a stable state handler.
         LifecycleStateRef() = LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN;
         LOG(LogLevel::Debug,
-            ("%p: Sending MediaStreamGraphShutDownRunnable", this));
-        nsCOMPtr<nsIRunnable> event =
-            new MediaStreamGraphShutDownRunnable(this);
+            ("%p: Sending MediaTrackGraphShutDownRunnable", this));
+        nsCOMPtr<nsIRunnable> event = new MediaTrackGraphShutDownRunnable(this);
         mAbstractMainThread->Dispatch(event.forget());
       }
     } else {
       if (LifecycleStateRef() <= LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP) {
         MessageBlock* block = mBackMessageQueue.AppendElement();
         block->mMessages.SwapElements(mCurrentTaskMessageQueue);
         EnsureNextIterationLocked();
       }
 
-      // If this MediaStreamGraph has entered regular (non-forced) shutdown it
+      // If this MediaTrackGraph has entered regular (non-forced) shutdown it
       // is not able to process any more messages. Those messages being added to
       // the graph in the first place is an error.
       MOZ_DIAGNOSTIC_ASSERT(mForceShutDown ||
                             LifecycleStateRef() <
                                 LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP);
     }
 
     if (LifecycleStateRef() == LIFECYCLE_THREAD_NOT_STARTED) {
       LifecycleStateRef() = LIFECYCLE_RUNNING;
       // Start the thread now. We couldn't start it earlier because
-      // the graph might exit immediately on finding it has no streams. The
-      // first message for a new graph must create a stream.
+      // the graph might exit immediately on finding it has no tracks. The
+      // first message for a new graph must create a track.
       {
-        // We should exit the monitor for now, because starting a stream might
+        // We should exit the monitor for now, because starting a track might
         // take locks, and we don't want to deadlock.
         LOG(LogLevel::Debug,
             ("%p: Starting a graph with a %s", this,
              CurrentDriver()->AsAudioCallbackDriver() ? "AudioCallbackDriver"
                                                       : "SystemClockDriver"));
         RefPtr<GraphDriver> driver = CurrentDriver();
         MonitorAutoUnlock unlock(mMonitor);
         driver->Start();
         // It's not safe to Shutdown() a thread from StableState, and
         // releasing this may shutdown a SystemClockDriver thread.
         // Proxy the release to outside of StableState.
-        NS_ReleaseOnMainThreadSystemGroup("MediaStreamGraphImpl::CurrentDriver",
+        NS_ReleaseOnMainThreadSystemGroup("MediaTrackGraphImpl::CurrentDriver",
                                           driver.forget(),
                                           true);  // always proxy
       }
     }
 
     if (LifecycleStateRef() == LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP &&
         mForceShutDown) {
       // Defer calls to RunDuringShutdown() to happen while mMonitor is not
       // held.
       for (uint32_t i = 0; i < mBackMessageQueue.Length(); ++i) {
         MessageBlock& mb = mBackMessageQueue[i];
         controlMessagesToRunDuringShutdown.AppendElements(
             std::move(mb.mMessages));
       }
       mBackMessageQueue.Clear();
       MOZ_ASSERT(mCurrentTaskMessageQueue.IsEmpty());
-      // Stop MediaStreamGraph threads.
+      // Stop MediaTrackGraph threads.
       LifecycleStateRef() = LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN;
-      nsCOMPtr<nsIRunnable> event = new MediaStreamGraphShutDownRunnable(this);
+      nsCOMPtr<nsIRunnable> event = new MediaTrackGraphShutDownRunnable(this);
       mAbstractMainThread->Dispatch(event.forget());
     }
 
     mDetectedNotRunning = LifecycleStateRef() > LIFECYCLE_RUNNING;
   }
 
   // Make sure we get a new current time in the next event loop task
-  if (!aSourceIsMSG) {
+  if (!aSourceIsMTG) {
     MOZ_ASSERT(mPostedRunInStableState);
     mPostedRunInStableState = false;
   }
 
   for (uint32_t i = 0; i < controlMessagesToRunDuringShutdown.Length(); ++i) {
     controlMessagesToRunDuringShutdown[i]->RunDuringShutdown();
   }
 
@@ -1689,55 +1685,55 @@ void MediaStreamGraphImpl::RunInStableSt
       LifecycleStateRef() >= LIFECYCLE_WAITING_FOR_THREAD_SHUTDOWN;
 #endif
 
   for (uint32_t i = 0; i < runnables.Length(); ++i) {
     runnables[i]->Run();
   }
 }
 
-void MediaStreamGraphImpl::EnsureRunInStableState() {
+void MediaTrackGraphImpl::EnsureRunInStableState() {
   MOZ_ASSERT(NS_IsMainThread(), "main thread only");
 
   if (mPostedRunInStableState) return;
   mPostedRunInStableState = true;
   nsCOMPtr<nsIRunnable> event =
-      new MediaStreamGraphStableStateRunnable(this, false);
+      new MediaTrackGraphStableStateRunnable(this, false);
   nsContentUtils::RunInStableState(event.forget());
 }
 
-void MediaStreamGraphImpl::EnsureStableStateEventPosted() {
+void MediaTrackGraphImpl::EnsureStableStateEventPosted() {
   MOZ_ASSERT(OnGraphThread());
   mMonitor.AssertCurrentThreadOwns();
 
   if (mPostedRunInStableStateEvent) return;
   mPostedRunInStableStateEvent = true;
   nsCOMPtr<nsIRunnable> event =
-      new MediaStreamGraphStableStateRunnable(this, true);
+      new MediaTrackGraphStableStateRunnable(this, true);
   mAbstractMainThread->Dispatch(event.forget());
 }
 
-void MediaStreamGraphImpl::SignalMainThreadCleanup() {
+void MediaTrackGraphImpl::SignalMainThreadCleanup() {
   MOZ_ASSERT(mDriver->OnThread());
 
   MonitorAutoLock lock(mMonitor);
   // LIFECYCLE_THREAD_NOT_STARTED is possible when shutting down offline
   // graphs that have not started.
   MOZ_DIAGNOSTIC_ASSERT(mLifecycleState <= LIFECYCLE_RUNNING);
   LOG(LogLevel::Debug,
-      ("%p: MediaStreamGraph waiting for main thread cleanup", this));
+      ("%p: MediaTrackGraph waiting for main thread cleanup", this));
   LifecycleStateRef() =
-      MediaStreamGraphImpl::LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP;
+      MediaTrackGraphImpl::LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP;
   EnsureStableStateEventPosted();
 }
 
-void MediaStreamGraphImpl::AppendMessage(UniquePtr<ControlMessage> aMessage) {
+void MediaTrackGraphImpl::AppendMessage(UniquePtr<ControlMessage> aMessage) {
   MOZ_ASSERT(NS_IsMainThread(), "main thread only");
-  MOZ_ASSERT_IF(aMessage->GetStream(), !aMessage->GetStream()->IsDestroyed());
-  MOZ_DIAGNOSTIC_ASSERT(mMainThreadStreamCount > 0 || mMainThreadPortCount > 0);
+  MOZ_ASSERT_IF(aMessage->GetTrack(), !aMessage->GetTrack()->IsDestroyed());
+  MOZ_DIAGNOSTIC_ASSERT(mMainThreadTrackCount > 0 || mMainThreadPortCount > 0);
 
   if (mDetectedNotRunning &&
       LifecycleStateRef() > LIFECYCLE_WAITING_FOR_MAIN_THREAD_CLEANUP) {
     // The graph control loop is not running and main thread cleanup has
     // happened. From now on we can't append messages to
     // mCurrentTaskMessageQueue, because that will never be processed again, so
     // just RunDuringShutdown this message. This should only happen during
     // forced shutdown, or after a non-realtime graph has finished processing.
@@ -1745,59 +1741,59 @@ void MediaStreamGraphImpl::AppendMessage
     MOZ_ASSERT(mCanRunMessagesSynchronously);
     mCanRunMessagesSynchronously = false;
 #endif
     aMessage->RunDuringShutdown();
 #ifdef DEBUG
     mCanRunMessagesSynchronously = true;
 #endif
     if (IsEmpty() &&
-        LifecycleStateRef() >= LIFECYCLE_WAITING_FOR_STREAM_DESTRUCTION) {
+        LifecycleStateRef() >= LIFECYCLE_WAITING_FOR_TRACK_DESTRUCTION) {
       Destroy();
     }
     return;
   }
 
   mCurrentTaskMessageQueue.AppendElement(std::move(aMessage));
   EnsureRunInStableState();
 }
 
-void MediaStreamGraphImpl::Dispatch(already_AddRefed<nsIRunnable>&& aRunnable) {
+void MediaTrackGraphImpl::Dispatch(already_AddRefed<nsIRunnable>&& aRunnable) {
   mAbstractMainThread->Dispatch(std::move(aRunnable));
 }
 
-MediaStream::MediaStream(TrackRate aSampleRate, MediaSegment::Type aType,
-                         MediaSegment* aSegment)
+MediaTrack::MediaTrack(TrackRate aSampleRate, MediaSegment::Type aType,
+                       MediaSegment* aSegment)
     : mSampleRate(aSampleRate),
       mType(aType),
       mSegment(aSegment),
       mStartTime(0),
       mForgottenTime(0),
       mEnded(false),
       mNotifiedEnded(false),
       mDisabledMode(DisabledTrackMode::ENABLED),
       mStartBlocking(GRAPH_TIME_MAX),
       mSuspendedCount(0),
       mMainThreadCurrentTime(0),
       mMainThreadEnded(false),
       mEndedNotificationSent(false),
       mMainThreadDestroyed(false),
       mGraph(nullptr) {
-  MOZ_COUNT_CTOR(MediaStream);
+  MOZ_COUNT_CTOR(MediaTrack);
   MOZ_ASSERT_IF(mSegment, mSegment->GetType() == aType);
 }
 
-MediaStream::~MediaStream() {
-  MOZ_COUNT_DTOR(MediaStream);
+MediaTrack::~MediaTrack() {
+  MOZ_COUNT_DTOR(MediaTrack);
   NS_ASSERTION(mMainThreadDestroyed, "Should have been destroyed already");
   NS_ASSERTION(mMainThreadListeners.IsEmpty(),
                "All main thread listeners should have been removed");
 }
 
-size_t MediaStream::SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const {
+size_t MediaTrack::SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const {
   size_t amount = 0;
 
   // Not owned:
   // - mGraph - Not reported here
   // - mConsumers - elements
   // Future:
   // - mLastPlayedVideoFrame
   // - mTrackListeners - elements
@@ -1807,353 +1803,346 @@ size_t MediaStream::SizeOfExcludingThis(
   amount += mTrackListeners.ShallowSizeOfExcludingThis(aMallocSizeOf);
   amount += mMainThreadListeners.ShallowSizeOfExcludingThis(aMallocSizeOf);
   amount += mConsumers.ShallowSizeOfExcludingThis(aMallocSizeOf);
   amount += aMallocSizeOf(mAudioOutputStream.get());
 
   return amount;
 }
 
-size_t MediaStream::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const {
+size_t MediaTrack::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const {
   return aMallocSizeOf(this) + SizeOfExcludingThis(aMallocSizeOf);
 }
 
-void MediaStream::IncrementSuspendCount() {
+void MediaTrack::IncrementSuspendCount() {
   ++mSuspendedCount;
   if (mSuspendedCount == 1) {
     for (uint32_t i = 0; i < mConsumers.Length(); ++i) {
       mConsumers[i]->Suspended();
     }
   }
 }
 
-void MediaStream::DecrementSuspendCount() {
+void MediaTrack::DecrementSuspendCount() {
   NS_ASSERTION(mSuspendedCount > 0, "Suspend count underrun");
   --mSuspendedCount;
   if (mSuspendedCount == 0) {
     for (uint32_t i = 0; i < mConsumers.Length(); ++i) {
       mConsumers[i]->Resumed();
     }
   }
 }
 
-MediaStreamGraphImpl* MediaStream::GraphImpl() { return mGraph; }
-
-const MediaStreamGraphImpl* MediaStream::GraphImpl() const { return mGraph; }
-
-MediaStreamGraph* MediaStream::Graph() { return mGraph; }
-
-const MediaStreamGraph* MediaStream::Graph() const { return mGraph; }
-
-void MediaStream::SetGraphImpl(MediaStreamGraphImpl* aGraph) {
+MediaTrackGraphImpl* MediaTrack::GraphImpl() { return mGraph; }
+
+const MediaTrackGraphImpl* MediaTrack::GraphImpl() const { return mGraph; }
+
+MediaTrackGraph* MediaTrack::Graph() { return mGraph; }
+
+const MediaTrackGraph* MediaTrack::Graph() const { return mGraph; }
+
+void MediaTrack::SetGraphImpl(MediaTrackGraphImpl* aGraph) {
   MOZ_ASSERT(!mGraph, "Should only be called once");
   MOZ_ASSERT(mSampleRate == aGraph->GraphRate());
   mGraph = aGraph;
 }
 
-void MediaStream::SetGraphImpl(MediaStreamGraph* aGraph) {
-  MediaStreamGraphImpl* graph = static_cast<MediaStreamGraphImpl*>(aGraph);
+void MediaTrack::SetGraphImpl(MediaTrackGraph* aGraph) {
+  MediaTrackGraphImpl* graph = static_cast<MediaTrackGraphImpl*>(aGraph);
   SetGraphImpl(graph);
 }
 
-StreamTime MediaStream::GraphTimeToStreamTime(GraphTime aTime) const {
+TrackTime MediaTrack::GraphTimeToTrackTime(GraphTime aTime) const {
   NS_ASSERTION(mStartBlocking == GraphImpl()->mStateComputedTime ||
                    aTime <= mStartBlocking,
                "Incorrectly ignoring blocking!");
   return aTime - mStartTime;
 }
 
-GraphTime MediaStream::StreamTimeToGraphTime(StreamTime aTime) const {
+GraphTime MediaTrack::TrackTimeToGraphTime(TrackTime aTime) const {
   NS_ASSERTION(mStartBlocking == GraphImpl()->mStateComputedTime ||
                    aTime + mStartTime <= mStartBlocking,
                "Incorrectly ignoring blocking!");
   return aTime + mStartTime;
 }
 
-StreamTime MediaStream::GraphTimeToStreamTimeWithBlocking(
-    GraphTime aTime) const {
-  return GraphImpl()->GraphTimeToStreamTimeWithBlocking(this, aTime);
+TrackTime MediaTrack::GraphTimeToTrackTimeWithBlocking(GraphTime aTime) const {
+  return GraphImpl()->GraphTimeToTrackTimeWithBlocking(this, aTime);
 }
 
-void MediaStream::RemoveAllResourcesAndListenersImpl() {
+void MediaTrack::RemoveAllResourcesAndListenersImpl() {
   GraphImpl()->AssertOnGraphThreadOrNotRunning();
 
   auto trackListeners(mTrackListeners);
   for (auto& l : trackListeners) {
     l->NotifyRemoved(Graph());
   }
   mTrackListeners.Clear();
 
   RemoveAllDirectListenersImpl();
 
   if (mSegment) {
     mSegment->Clear();
   }
 }
 
-void MediaStream::DestroyImpl() {
+void MediaTrack::DestroyImpl() {
   for (int32_t i = mConsumers.Length() - 1; i >= 0; --i) {
     mConsumers[i]->Disconnect();
   }
   if (mSegment) {
     mSegment->Clear();
   }
   mGraph = nullptr;
 }
 
-void MediaStream::Destroy() {
-  // Keep this stream alive until we leave this method
-  RefPtr<MediaStream> kungFuDeathGrip = this;
+void MediaTrack::Destroy() {
+  // Keep this track alive until we leave this method
+  RefPtr<MediaTrack> kungFuDeathGrip = this;
 
   class Message : public ControlMessage {
    public:
-    explicit Message(MediaStream* aStream) : ControlMessage(aStream) {}
+    explicit Message(MediaTrack* aTrack) : ControlMessage(aTrack) {}
     void Run() override {
-      mStream->RemoveAllResourcesAndListenersImpl();
-      auto graph = mStream->GraphImpl();
-      mStream->DestroyImpl();
-      graph->RemoveStreamGraphThread(mStream);
+      mTrack->RemoveAllResourcesAndListenersImpl();
+      auto graph = mTrack->GraphImpl();
+      mTrack->DestroyImpl();
+      graph->RemoveTrackGraphThread(mTrack);
     }
     void RunDuringShutdown() override { Run(); }
   };
   // Keep a reference to the graph, since Message might RunDuringShutdown()
   // synchronously and make GraphImpl() invalid.
-  RefPtr<MediaStreamGraphImpl> graph = GraphImpl();
+  RefPtr<MediaTrackGraphImpl> graph = GraphImpl();
   graph->AppendMessage(MakeUnique<Message>(this));
-  graph->RemoveStream(this);
-  // Message::RunDuringShutdown may have removed this stream from the graph,
-  // but our kungFuDeathGrip above will have kept this stream alive if
+  graph->RemoveTrack(this);
+  // Message::RunDuringShutdown may have removed this track from the graph,
+  // but our kungFuDeathGrip above will have kept this track alive if
   // necessary.
   mMainThreadDestroyed = true;
 }
 
-StreamTime MediaStream::GetEnd() const {
+TrackTime MediaTrack::GetEnd() const {
   return mSegment ? mSegment->GetDuration() : 0;
 }
 
-void MediaStream::AddAudioOutput(void* aKey) {
+void MediaTrack::AddAudioOutput(void* aKey) {
   class Message : public ControlMessage {
    public:
-    Message(MediaStream* aStream, void* aKey)
-        : ControlMessage(aStream), mKey(aKey) {}
-    void Run() override { mStream->AddAudioOutputImpl(mKey); }
+    Message(MediaTrack* aTrack, void* aKey)
+        : ControlMessage(aTrack), mKey(aKey) {}
+    void Run() override { mTrack->AddAudioOutputImpl(mKey); }
     void* mKey;
   };
   GraphImpl()->AppendMessage(MakeUnique<Message>(this, aKey));
 }
 
-void MediaStream::SetAudioOutputVolumeImpl(void* aKey, float aVolume) {
+void MediaTrack::SetAudioOutputVolumeImpl(void* aKey, float aVolume) {
   for (uint32_t i = 0; i < mAudioOutputs.Length(); ++i) {
     if (mAudioOutputs[i].mKey == aKey) {
       mAudioOutputs[i].mVolume = aVolume;
       return;
     }
   }
   NS_ERROR("Audio output key not found");
 }
 
-void MediaStream::SetAudioOutputVolume(void* aKey, float aVolume) {
+void MediaTrack::SetAudioOutputVolume(void* aKey, float aVolume) {
   class Message : public ControlMessage {
    public:
-    Message(MediaStream* aStream, void* aKey, float aVolume)
-        : ControlMessage(aStream), mKey(aKey), mVolume(aVolume) {}
-    void Run() override { mStream->SetAudioOutputVolumeImpl(mKey, mVolume); }
+    Message(MediaTrack* aTrack, void* aKey, float aVolume)
+        : ControlMessage(aTrack), mKey(aKey), mVolume(aVolume) {}
+    void Run() override { mTrack->SetAudioOutputVolumeImpl(mKey, mVolume); }
     void* mKey;
     float mVolume;
   };
   GraphImpl()->AppendMessage(MakeUnique<Message>(this, aKey, aVolume));
 }
 
-void MediaStream::AddAudioOutputImpl(void* aKey) {
+void MediaTrack::AddAudioOutputImpl(void* aKey) {
   LOG(LogLevel::Info,
-      ("MediaStream %p Adding AudioOutput for key %p", this, aKey));
+      ("MediaTrack %p Adding AudioOutput for key %p", this, aKey));
   mAudioOutputs.AppendElement(AudioOutput(aKey));
 }
 
-void MediaStream::RemoveAudioOutputImpl(void* aKey) {
+void MediaTrack::RemoveAudioOutputImpl(void* aKey) {
   LOG(LogLevel::Info,
-      ("MediaStream %p Removing AudioOutput for key %p", this, aKey));
+      ("MediaTrack %p Removing AudioOutput for key %p", this, aKey));
   for (uint32_t i = 0; i < mAudioOutputs.Length(); ++i) {
     if (mAudioOutputs[i].mKey == aKey) {
       mAudioOutputs.RemoveElementAt(i);
       return;
     }
   }
   NS_ERROR("Audio output key not found");
 }
 
-void MediaStream::RemoveAudioOutput(void* aKey) {
+void MediaTrack::RemoveAudioOutput(void* aKey) {
   class Message : public ControlMessage {
    public:
-    Message(MediaStream* aStream, void* aKey)
-        : ControlMessage(aStream), mKey(aKey) {}
-    void Run() override { mStream->RemoveAudioOutputImpl(mKey); }
+    Message(MediaTrack* aTrack, void* aKey)
+        : ControlMessage(aTrack), mKey(aKey) {}
+    void Run() override { mTrack->RemoveAudioOutputImpl(mKey); }
     void* mKey;
   };
   GraphImpl()->AppendMessage(MakeUnique<Message>(this, aKey));
 }
 
-void MediaStream::Suspend() {
+void MediaTrack::Suspend() {
   class Message : public ControlMessage {
    public:
-    explicit Message(MediaStream* aStream) : ControlMessage(aStream) {}
-    void Run() override {
-      mStream->GraphImpl()->IncrementSuspendCount(mStream);
-    }
+    explicit Message(MediaTrack* aTrack) : ControlMessage(aTrack) {}
+    void Run() override { mTrack->GraphImpl()->IncrementSuspendCount(mTrack); }
   };
 
   // This can happen if this method has been called asynchronously, and the
-  // stream has been destroyed since then.
+  // track has been destroyed since then.
   if (mMainThreadDestroyed) {
     return;
   }
   GraphImpl()->AppendMessage(MakeUnique<Message>(this));
 }
 
-void MediaStream::Resume() {
+void MediaTrack::Resume() {
   class Message : public ControlMessage {
    public:
-    explicit Message(MediaStream* aStream) : ControlMessage(aStream) {}
-    void Run() override {
-      mStream->GraphImpl()->DecrementSuspendCount(mStream);
-    }
+    explicit Message(MediaTrack* aTrack) : ControlMessage(aTrack) {}
+    void Run() override { mTrack->GraphImpl()->DecrementSuspendCount(mTrack); }
   };
 
   // This can happen if this method has been called asynchronously, and the
-  // stream has been destroyed since then.
+  // track has been destroyed since then.
   if (mMainThreadDestroyed) {
     return;
   }
   GraphImpl()->AppendMessage(MakeUnique<Message>(this));
 }
 
-void MediaStream::AddListenerImpl(
-    already_AddRefed<MediaStreamTrackListener> aListener) {
-  RefPtr<MediaStreamTrackListener> l(aListener);
+void MediaTrack::AddListenerImpl(
+    already_AddRefed<MediaTrackListener> aListener) {
+  RefPtr<MediaTrackListener> l(aListener);
   mTrackListeners.AppendElement(std::move(l));
 
   PrincipalHandle lastPrincipalHandle = mSegment->GetLastPrincipalHandle();
   mTrackListeners.LastElement()->NotifyPrincipalHandleChanged(
       Graph(), lastPrincipalHandle);
   if (mNotifiedEnded) {
     mTrackListeners.LastElement()->NotifyEnded(Graph());
   }
   if (mDisabledMode == DisabledTrackMode::SILENCE_BLACK) {
     mTrackListeners.LastElement()->NotifyEnabledStateChanged(Graph(), false);
   }
 }
 
-void MediaStream::AddListener(MediaStreamTrackListener* aListener) {
+void MediaTrack::AddListener(MediaTrackListener* aListener) {
   class Message : public ControlMessage {
    public:
-    Message(MediaStream* aStream, MediaStreamTrackListener* aListener)
-        : ControlMessage(aStream), mListener(aListener) {}
-    void Run() override { mStream->AddListenerImpl(mListener.forget()); }
-    RefPtr<MediaStreamTrackListener> mListener;
+    Message(MediaTrack* aTrack, MediaTrackListener* aListener)
+        : ControlMessage(aTrack), mListener(aListener) {}
+    void Run() override { mTrack->AddListenerImpl(mListener.forget()); }
+    RefPtr<MediaTrackListener> mListener;
   };
   MOZ_ASSERT(mSegment, "Segment-less tracks do not support listeners");
   GraphImpl()->AppendMessage(MakeUnique<Message>(this, aListener));
 }
 
-void MediaStream::RemoveListenerImpl(MediaStreamTrackListener* aListener) {
+void MediaTrack::RemoveListenerImpl(MediaTrackListener* aListener) {
   for (size_t i = 0; i < mTrackListeners.Length(); ++i) {
     if (mTrackListeners[i] == aListener) {
       mTrackListeners[i]->NotifyRemoved(Graph());
       mTrackListeners.RemoveElementAt(i);
       return;
     }
   }
 }
 
-void MediaStream::RemoveListener(MediaStreamTrackListener* aListener) {
+void MediaTrack::RemoveListener(MediaTrackListener* aListener) {
   class Message : public ControlMessage {
    public:
-    Message(MediaStream* aStream, MediaStreamTrackListener* aListener)
-        : ControlMessage(aStream), mListener(aListener) {}
-    void Run() override { mStream->RemoveListenerImpl(mListener); }
+    Message(MediaTrack* aTrack, MediaTrackListener* aListener)
+        : ControlMessage(aTrack), mListener(aListener) {}
+    void Run() override { mTrack->RemoveListenerImpl(mListener); }
     void RunDuringShutdown() override {
       // During shutdown we still want the listener's NotifyRemoved to be
       // called, since not doing that might block shutdown of other modules.
       Run();
     }
-    RefPtr<MediaStreamTrackListener> mListener;
+    RefPtr<MediaTrackListener> mListener;
   };
   GraphImpl()->AppendMessage(MakeUnique<Message>(this, aListener));
 }
 
-void MediaStream::AddDirectListenerImpl(
-    already_AddRefed<DirectMediaStreamTrackListener> aListener) {
-  // Base implementation, for streams that don't support direct track listeners.
-  RefPtr<DirectMediaStreamTrackListener> listener = aListener;
+void MediaTrack::AddDirectListenerImpl(
+    already_AddRefed<DirectMediaTrackListener> aListener) {
+  // Base implementation, for tracks that don't support direct track listeners.
+  RefPtr<DirectMediaTrackListener> listener = aListener;
   listener->NotifyDirectListenerInstalled(
-      DirectMediaStreamTrackListener::InstallationResult::STREAM_NOT_SUPPORTED);
+      DirectMediaTrackListener::InstallationResult::TRACK_NOT_SUPPORTED);
 }
 
-void MediaStream::AddDirectListener(DirectMediaStreamTrackListener* aListener) {
+void MediaTrack::AddDirectListener(DirectMediaTrackListener* aListener) {
   class Message : public ControlMessage {
    public:
-    Message(MediaStream* aStream, DirectMediaStreamTrackListener* aListener)
-        : ControlMessage(aStream), mListener(aListener) {}
-    void Run() override { mStream->AddDirectListenerImpl(mListener.forget()); }
-    RefPtr<DirectMediaStreamTrackListener> mListener;
+    Message(MediaTrack* aTrack, DirectMediaTrackListener* aListener)
+        : ControlMessage(aTrack), mListener(aListener) {}
+    void Run() override { mTrack->AddDirectListenerImpl(mListener.forget()); }
+    RefPtr<DirectMediaTrackListener> mListener;
   };
   GraphImpl()->AppendMessage(MakeUnique<Message>(this, aListener));
 }
 
-void MediaStream::RemoveDirectListenerImpl(
-    DirectMediaStreamTrackListener* aListener) {
+void MediaTrack::RemoveDirectListenerImpl(DirectMediaTrackListener* aListener) {
   // Base implementation, the listener was never added so nothing to do.
 }
 
-void MediaStream::RemoveDirectListener(
-    DirectMediaStreamTrackListener* aListener) {
+void MediaTrack::RemoveDirectListener(DirectMediaTrackListener* aListener) {
   class Message : public ControlMessage {
    public:
-    Message(MediaStream* aStream, DirectMediaStreamTrackListener* aListener)
-        : ControlMessage(aStream), mListener(aListener) {}
-    void Run() override { mStream->RemoveDirectListenerImpl(mListener); }
+    Message(MediaTrack* aTrack, DirectMediaTrackListener* aListener)
+        : ControlMessage(aTrack), mListener(aListener) {}
+    void Run() override { mTrack->RemoveDirectListenerImpl(mListener); }
     void RunDuringShutdown() override {
       // During shutdown we still want the listener's
       // NotifyDirectListenerUninstalled to be called, since not doing that
       // might block shutdown of other modules.
       Run();
     }
-    RefPtr<DirectMediaStreamTrackListener> mListener;
+    RefPtr<DirectMediaTrackListener> mListener;
   };
   GraphImpl()->AppendMessage(MakeUnique<Message>(this, aListener));
 }
 
-void MediaStream::RunAfterPendingUpdates(
+void MediaTrack::RunAfterPendingUpdates(
     already_AddRefed<nsIRunnable> aRunnable) {
   MOZ_ASSERT(NS_IsMainThread());
-  MediaStreamGraphImpl* graph = GraphImpl();
+  MediaTrackGraphImpl* graph = GraphImpl();
   nsCOMPtr<nsIRunnable> runnable(aRunnable);
 
   class Message : public ControlMessage {
    public:
-    Message(MediaStream* aStream, already_AddRefed<nsIRunnable> aRunnable)
-        : ControlMessage(aStream), mRunnable(aRunnable) {}
+    Message(MediaTrack* aTrack, already_AddRefed<nsIRunnable> aRunnable)
+        : ControlMessage(aTrack), mRunnable(aRunnable) {}
     void Run() override {
-      mStream->Graph()->DispatchToMainThreadStableState(mRunnable.forget());
+      mTrack->Graph()->DispatchToMainThreadStableState(mRunnable.forget());
     }
     void RunDuringShutdown() override {
       // Don't run mRunnable now as it may call AppendMessage() which would
       // assume that there are no remaining controlMessagesToRunDuringShutdown.
       MOZ_ASSERT(NS_IsMainThread());
-      mStream->GraphImpl()->Dispatch(mRunnable.forget());
+      mTrack->GraphImpl()->Dispatch(mRunnable.forget());
     }
 
    private:
     nsCOMPtr<nsIRunnable> mRunnable;
   };
 
   graph->AppendMessage(MakeUnique<Message>(this, runnable.forget()));
 }
 
-void MediaStream::SetEnabledImpl(DisabledTrackMode aMode) {
+void MediaTrack::SetEnabledImpl(DisabledTrackMode aMode) {
   if (aMode == DisabledTrackMode::ENABLED) {
     mDisabledMode = DisabledTrackMode::ENABLED;
     for (const auto& l : mTrackListeners) {
       l->NotifyEnabledStateChanged(Graph(), true);
     }
   } else {
     MOZ_DIAGNOSTIC_ASSERT(
         mDisabledMode == DisabledTrackMode::ENABLED,
@@ -2162,29 +2151,29 @@ void MediaStream::SetEnabledImpl(Disable
     if (aMode == DisabledTrackMode::SILENCE_BLACK) {
       for (const auto& l : mTrackListeners) {
         l->NotifyEnabledStateChanged(Graph(), false);
       }
     }
   }
 }
 
-void MediaStream::SetEnabled(DisabledTrackMode aMode) {
+void MediaTrack::SetEnabled(DisabledTrackMode aMode) {
   class Message : public ControlMessage {
    public:
-    Message(MediaStream* aStream, DisabledTrackMode aMode)
-        : ControlMessage(aStream), mMode(aMode) {}
-    void Run() override { mStream->SetEnabledImpl(mMode); }
+    Message(MediaTrack* aTrack, DisabledTrackMode aMode)
+        : ControlMessage(aTrack), mMode(aMode) {}
+    void Run() override { mTrack->SetEnabledImpl(mMode); }
     DisabledTrackMode mMode;
   };
   GraphImpl()->AppendMessage(MakeUnique<Message>(this, aMode));
 }
 
-void MediaStream::ApplyTrackDisabling(MediaSegment* aSegment,
-                                      MediaSegment* aRawSegment) {
+void MediaTrack::ApplyTrackDisabling(MediaSegment* aSegment,
+                                     MediaSegment* aRawSegment) {
   if (mDisabledMode == DisabledTrackMode::ENABLED) {
     return;
   }
   if (mDisabledMode == DisabledTrackMode::SILENCE_BLACK) {
     aSegment->ReplaceWithDisabled();
     if (aRawSegment) {
       aRawSegment->ReplaceWithDisabled();
     }
@@ -2193,197 +2182,196 @@ void MediaStream::ApplyTrackDisabling(Me
     if (aRawSegment) {
       aRawSegment->ReplaceWithNull();
     }
   } else {
     MOZ_CRASH("Unsupported mode");
   }
 }
 
-void MediaStream::AddMainThreadListener(
-    MainThreadMediaStreamListener* aListener) {
+void MediaTrack::AddMainThreadListener(
+    MainThreadMediaTrackListener* aListener) {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_ASSERT(aListener);
   MOZ_ASSERT(!mMainThreadListeners.Contains(aListener));
 
   mMainThreadListeners.AppendElement(aListener);
 
   // If it is not yet time to send the notification, then exit here.
   if (!mEndedNotificationSent) {
     return;
   }
 
   class NotifyRunnable final : public Runnable {
    public:
-    explicit NotifyRunnable(MediaStream* aStream)
-        : Runnable("MediaStream::NotifyRunnable"), mStream(aStream) {}
+    explicit NotifyRunnable(MediaTrack* aTrack)
+        : Runnable("MediaTrack::NotifyRunnable"), mTrack(aTrack) {}
 
     NS_IMETHOD Run() override {
       MOZ_ASSERT(NS_IsMainThread());
-      mStream->NotifyMainThreadListeners();
+      mTrack->NotifyMainThreadListeners();
       return NS_OK;
     }
 
    private:
     ~NotifyRunnable() {}
 
-    RefPtr<MediaStream> mStream;
+    RefPtr<MediaTrack> mTrack;
   };
 
   nsCOMPtr<nsIRunnable> runnable = new NotifyRunnable(this);
   GraphImpl()->Dispatch(runnable.forget());
 }
 
-void MediaStream::AdvanceTimeVaryingValuesToCurrentTime(
-    GraphTime aCurrentTime, GraphTime aBlockedTime) {
+void MediaTrack::AdvanceTimeVaryingValuesToCurrentTime(GraphTime aCurrentTime,
+                                                       GraphTime aBlockedTime) {
   mStartTime += aBlockedTime;
 
   if (!mSegment) {
     // No data to be forgotten.
     return;
   }
 
-  StreamTime time = aCurrentTime - mStartTime;
+  TrackTime time = aCurrentTime - mStartTime;
   // Only prune if there is a reasonable chunk (50ms @ 48kHz) to forget, so we
   // don't spend too much time pruning segments.
-  const StreamTime minChunkSize = 2400;
+  const TrackTime minChunkSize = 2400;
   if (time < mForgottenTime + minChunkSize) {
     return;
   }
 
   mForgottenTime = std::min(GetEnd() - 1, time);
   mSegment->ForgetUpTo(mForgottenTime);
 }
 
-SourceMediaStream::SourceMediaStream(MediaSegment::Type aType,
-                                     TrackRate aSampleRate)
-    : MediaStream(aSampleRate, aType,
-                  aType == MediaSegment::AUDIO
-                      ? static_cast<MediaSegment*>(new AudioSegment())
-                      : static_cast<MediaSegment*>(new VideoSegment())),
-      mMutex("mozilla::media::SourceMediaStream") {
+SourceMediaTrack::SourceMediaTrack(MediaSegment::Type aType,
+                                   TrackRate aSampleRate)
+    : MediaTrack(aSampleRate, aType,
+                 aType == MediaSegment::AUDIO
+                     ? static_cast<MediaSegment*>(new AudioSegment())
+                     : static_cast<MediaSegment*>(new VideoSegment())),
+      mMutex("mozilla::media::SourceMediaTrack") {
   mUpdateTrack = MakeUnique<TrackData>();
   mUpdateTrack->mInputRate = aSampleRate;
   mUpdateTrack->mResamplerChannelCount = 0;
   mUpdateTrack->mData = UniquePtr<MediaSegment>(mSegment->CreateEmptyClone());
   mUpdateTrack->mEnded = false;
   mUpdateTrack->mPullingEnabled = false;
 }
 
-nsresult SourceMediaStream::OpenAudioInput(CubebUtils::AudioDeviceID aID,
-                                           AudioDataListener* aListener) {
+nsresult SourceMediaTrack::OpenAudioInput(CubebUtils::AudioDeviceID aID,
+                                          AudioDataListener* aListener) {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_ASSERT(GraphImpl());
   MOZ_ASSERT(!mInputListener);
   mInputListener = aListener;
   return GraphImpl()->OpenAudioInput(aID, aListener);
 }
 
-void SourceMediaStream::CloseAudioInput(Maybe<CubebUtils::AudioDeviceID>& aID) {
+void SourceMediaTrack::CloseAudioInput(Maybe<CubebUtils::AudioDeviceID>& aID) {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_ASSERT(GraphImpl());
   if (!mInputListener) {
     return;
   }
   GraphImpl()->CloseAudioInput(aID, mInputListener);
   mInputListener = nullptr;
 }
 
-void SourceMediaStream::Destroy() {
+void SourceMediaTrack::Destroy() {
   MOZ_ASSERT(NS_IsMainThread());
   Maybe<CubebUtils::AudioDeviceID> id = Nothing();
   CloseAudioInput(id);
 
-  MediaStream::Destroy();
+  MediaTrack::Destroy();
 }
 
-void SourceMediaStream::DestroyImpl() {
+void SourceMediaTrack::DestroyImpl() {
   GraphImpl()->AssertOnGraphThreadOrNotRunning();
   for (int32_t i = mConsumers.Length() - 1; i >= 0; --i) {
     // Disconnect before we come under mMutex's lock since it can call back
     // through RemoveDirectListenerImpl() and deadlock.
     mConsumers[i]->Disconnect();
   }
 
   // Hold mMutex while mGraph is reset so that other threads holding mMutex
   // can null-check know that the graph will not destroyed.
   MutexAutoLock lock(mMutex);
   mUpdateTrack = nullptr;
-  MediaStream::DestroyImpl();
+  MediaTrack::DestroyImpl();
 }
 
-void SourceMediaStream::SetPullingEnabled(bool aEnabled) {
+void SourceMediaTrack::SetPullingEnabled(bool aEnabled) {
   class Message : public ControlMessage {
    public:
-    Message(SourceMediaStream* aStream, bool aEnabled)
-        : ControlMessage(nullptr), mStream(aStream), mEnabled(aEnabled) {}
+    Message(SourceMediaTrack* aTrack, bool aEnabled)
+        : ControlMessage(nullptr), mTrack(aTrack), mEnabled(aEnabled) {}
     void Run() override {
-      MutexAutoLock lock(mStream->mMutex);
-      if (!mStream->mUpdateTrack) {
-        // We can't enable pulling for a stream that has ended. We ignore
+      MutexAutoLock lock(mTrack->mMutex);
+      if (!mTrack->mUpdateTrack) {
+        // We can't enable pulling for a track that has ended. We ignore
         // this if we're disabling pulling, since shutdown sequences are
         // complex. If there's truly an issue we'll have issues enabling anyway.
-        MOZ_ASSERT_IF(mEnabled, mStream->mEnded);
+        MOZ_ASSERT_IF(mEnabled, mTrack->mEnded);
         return;
       }
-      MOZ_ASSERT(mStream->mType == MediaSegment::AUDIO,
+      MOZ_ASSERT(mTrack->mType == MediaSegment::AUDIO,
                  "Pulling is not allowed for video");
-      mStream->mUpdateTrack->mPullingEnabled = mEnabled;
+      mTrack->mUpdateTrack->mPullingEnabled = mEnabled;
     }
-    SourceMediaStream* mStream;
+    SourceMediaTrack* mTrack;
     bool mEnabled;
   };
   GraphImpl()->AppendMessage(MakeUnique<Message>(this, aEnabled));
 }
 
-bool SourceMediaStream::PullNewData(GraphTime aDesiredUpToTime) {
-  TRACE_AUDIO_CALLBACK_COMMENT("SourceMediaStream %p", this);
-  StreamTime t;
-  StreamTime current;
+bool SourceMediaTrack::PullNewData(GraphTime aDesiredUpToTime) {
+  TRACE_AUDIO_CALLBACK_COMMENT("SourceMediaTrack %p", this);
+  TrackTime t;
+  TrackTime current;
   {
     if (mEnded) {
       return false;
     }
     MutexAutoLock lock(mMutex);
     if (mUpdateTrack->mEnded) {
       return false;
     }
     if (!mUpdateTrack->mPullingEnabled) {
       return false;
     }
-    // Compute how much stream time we'll need assuming we don't block
-    // the stream at all.
-    t = GraphTimeToStreamTime(aDesiredUpToTime);
+    // Compute how much track time we'll need assuming we don't block
+    // the track at all.
+    t = GraphTimeToTrackTime(aDesiredUpToTime);
     current = GetEnd() + mUpdateTrack->mData->GetDuration();
   }
   if (t <= current) {
     return false;
   }
-  LOG(LogLevel::Verbose,
-      ("%p: Calling NotifyPull stream=%p t=%f current end=%f", GraphImpl(),
-       this, GraphImpl()->MediaTimeToSeconds(t),
-       GraphImpl()->MediaTimeToSeconds(current)));
+  LOG(LogLevel::Verbose, ("%p: Calling NotifyPull track=%p t=%f current end=%f",
+                          GraphImpl(), this, GraphImpl()->MediaTimeToSeconds(t),
+                          GraphImpl()->MediaTimeToSeconds(current)));
   for (auto& l : mTrackListeners) {
     l->NotifyPull(Graph(), current, t);
   }
   return true;
 }
 
 /**
  * This moves chunks from aIn to aOut. For audio this is simple. For video
  * we carry durations over if present, or extend up to aDesiredUpToTime if not.
  *
  * We also handle "resetters" from captured media elements. This type of source
  * pushes future frames into the track, and should it need to remove some, e.g.,
  * because of a seek or pause, it tells us by letting time go backwards. Without
  * this, tracks would be live for too long after a seek or pause.
  */
-static void MoveToSegment(SourceMediaStream* aStream, MediaSegment* aIn,
-                          MediaSegment* aOut, StreamTime aCurrentTime,
-                          StreamTime aDesiredUpToTime) {
+static void MoveToSegment(SourceMediaTrack* aTrack, MediaSegment* aIn,
+                          MediaSegment* aOut, TrackTime aCurrentTime,
+                          TrackTime aDesiredUpToTime) {
   MOZ_ASSERT(aIn->GetType() == aOut->GetType());
   MOZ_ASSERT(aOut->GetDuration() >= aCurrentTime);
   if (aIn->GetType() == MediaSegment::AUDIO) {
     aOut->AppendFrom(aIn);
   } else {
     VideoSegment* in = static_cast<VideoSegment*>(aIn);
     VideoSegment* out = static_cast<VideoSegment*>(aOut);
     for (VideoSegment::ConstChunkIterator c(*in); !c.IsEnded(); c.Next()) {
@@ -2423,52 +2411,52 @@ static void MoveToSegment(SourceMediaStr
     if (out->GetDuration() < aDesiredUpToTime) {
       out->ExtendLastFrameBy(aDesiredUpToTime - out->GetDuration());
     }
     in->Clear();
   }
   MOZ_ASSERT(aIn->GetDuration() == 0, "aIn must be consumed");
 }
 
-void SourceMediaStream::ExtractPendingInput(GraphTime aCurrentTime,
-                                            GraphTime aDesiredUpToTime) {
+void SourceMediaTrack::ExtractPendingInput(GraphTime aCurrentTime,
+                                           GraphTime aDesiredUpToTime) {
   MutexAutoLock lock(mMutex);
 
   if (!mUpdateTrack) {
     MOZ_ASSERT(mEnded);
     return;
   }
 
-  StreamTime streamCurrentTime = GraphTimeToStreamTime(aCurrentTime);
+  TrackTime trackCurrentTime = GraphTimeToTrackTime(aCurrentTime);
 
   ApplyTrackDisabling(mUpdateTrack->mData.get());
 
   if (!mUpdateTrack->mData->IsEmpty()) {
     for (const auto& l : mTrackListeners) {
       l->NotifyQueuedChanges(GraphImpl(), GetEnd(), *mUpdateTrack->mData);
     }
   }
-  StreamTime streamDesiredUpToTime = GraphTimeToStreamTime(aDesiredUpToTime);
-  StreamTime endTime = streamDesiredUpToTime;
+  TrackTime trackDesiredUpToTime = GraphTimeToTrackTime(aDesiredUpToTime);
+  TrackTime endTime = trackDesiredUpToTime;
   if (mUpdateTrack->mEnded) {
-    endTime = std::min(streamDesiredUpToTime,
+    endTime = std::min(trackDesiredUpToTime,
                        GetEnd() + mUpdateTrack->mData->GetDuration());
   }
   LOG(LogLevel::Verbose,
-      ("%p: SourceMediaStream %p advancing end from %" PRId64 " to %" PRId64,
-       GraphImpl(), this, int64_t(streamCurrentTime), int64_t(endTime)));
+      ("%p: SourceMediaTrack %p advancing end from %" PRId64 " to %" PRId64,
+       GraphImpl(), this, int64_t(trackCurrentTime), int64_t(endTime)));
   MoveToSegment(this, mUpdateTrack->mData.get(), mSegment.get(),
-                streamCurrentTime, endTime);
-  if (mUpdateTrack->mEnded && GetEnd() < streamDesiredUpToTime) {
+                trackCurrentTime, endTime);
+  if (mUpdateTrack->mEnded && GetEnd() < trackDesiredUpToTime) {
     mEnded = true;
     mUpdateTrack = nullptr;
   }
 }
 
-void SourceMediaStream::ResampleAudioToGraphSampleRate(MediaSegment* aSegment) {
+void SourceMediaTrack::ResampleAudioToGraphSampleRate(MediaSegment* aSegment) {
   mMutex.AssertCurrentThreadOwns();
   if (aSegment->GetType() != MediaSegment::AUDIO ||
       mUpdateTrack->mInputRate == GraphImpl()->GraphRate()) {
     return;
   }
   AudioSegment* segment = static_cast<AudioSegment*>(aSegment);
   int channels = segment->ChannelCount();
 
@@ -2484,47 +2472,46 @@ void SourceMediaStream::ResampleAudioToG
     }
     mUpdateTrack->mResampler.own(state);
     mUpdateTrack->mResamplerChannelCount = channels;
   }
   segment->ResampleChunks(mUpdateTrack->mResampler, mUpdateTrack->mInputRate,
                           GraphImpl()->GraphRate());
 }
 
-void SourceMediaStream::AdvanceTimeVaryingValuesToCurrentTime(
+void SourceMediaTrack::AdvanceTimeVaryingValuesToCurrentTime(
     GraphTime aCurrentTime, GraphTime aBlockedTime) {
   MutexAutoLock lock(mMutex);
-  MediaStream::AdvanceTimeVaryingValuesToCurrentTime(aCurrentTime,
-                                                     aBlockedTime);
+  MediaTrack::AdvanceTimeVaryingValuesToCurrentTime(aCurrentTime, aBlockedTime);
 }
 
-void SourceMediaStream::SetAppendDataSourceRate(TrackRate aRate) {
+void SourceMediaTrack::SetAppendDataSourceRate(TrackRate aRate) {
   MutexAutoLock lock(mMutex);
   if (!mUpdateTrack) {
     return;
   }
   MOZ_DIAGNOSTIC_ASSERT(mSegment->GetType() == MediaSegment::AUDIO);
   // Set the new input rate and reset the resampler.
   mUpdateTrack->mInputRate = aRate;
   mUpdateTrack->mResampler.own(nullptr);
   mUpdateTrack->mResamplerChannelCount = 0;
 }
 
-StreamTime SourceMediaStream::AppendData(MediaSegment* aSegment,
-                                         MediaSegment* aRawSegment) {
+TrackTime SourceMediaTrack::AppendData(MediaSegment* aSegment,
+                                       MediaSegment* aRawSegment) {
   MutexAutoLock lock(mMutex);
   MOZ_DIAGNOSTIC_ASSERT(aSegment->GetType() == mType);
-  StreamTime appended = 0;
+  TrackTime appended = 0;
   auto graph = GraphImpl();
   if (!mUpdateTrack || mUpdateTrack->mEnded || !graph) {
     aSegment->Clear();
     return appended;
   }
 
-  // Data goes into mData, and on the next iteration of the MSG moves
+  // Data goes into mData, and on the next iteration of the MTG moves
   // into the track's segment after NotifyQueuedTrackChanges().  This adds
   // 0-10ms of delay before data gets to direct listeners.
   // Indirect listeners (via subsequent TrackUnion nodes) are synced to
   // playout time, and so can be delayed by buffering.
 
   // Apply track disabling before notifying any consumers directly
   // or inserting into the graph
   ApplyTrackDisabling(aSegment, aRawSegment);
@@ -2535,65 +2522,65 @@ StreamTime SourceMediaStream::AppendData
   NotifyDirectConsumers(aRawSegment ? aRawSegment : aSegment);
   appended = aSegment->GetDuration();
   mUpdateTrack->mData->AppendFrom(aSegment);  // note: aSegment is now dead
   graph->EnsureNextIteration();
 
   return appended;
 }
 
-void SourceMediaStream::NotifyDirectConsumers(MediaSegment* aSegment) {
+void SourceMediaTrack::NotifyDirectConsumers(MediaSegment* aSegment) {
   mMutex.AssertCurrentThreadOwns();
 
   for (const auto& l : mDirectTrackListeners) {
-    StreamTime offset = 0;  // FIX! need a separate StreamTime.... or the end of
-                            // the internal buffer
+    TrackTime offset = 0;  // FIX! need a separate TrackTime.... or the end of
+                           // the internal buffer
     l->NotifyRealtimeTrackDataAndApplyTrackDisabling(Graph(), offset,
                                                      *aSegment);
   }
 }
 
-void SourceMediaStream::AddDirectListenerImpl(
-    already_AddRefed<DirectMediaStreamTrackListener> aListener) {
+void SourceMediaTrack::AddDirectListenerImpl(
+    already_AddRefed<DirectMediaTrackListener> aListener) {
   MutexAutoLock lock(mMutex);
 
-  RefPtr<DirectMediaStreamTrackListener> listener = aListener;
+  RefPtr<DirectMediaTrackListener> listener = aListener;
   LOG(LogLevel::Debug,
-      ("%p: Adding direct track listener %p to source stream %p", GraphImpl(),
+      ("%p: Adding direct track listener %p to source track %p", GraphImpl(),
        listener.get(), this));
 
   MOZ_ASSERT(mType == MediaSegment::VIDEO);
   for (const auto& l : mDirectTrackListeners) {
     if (l == listener) {
       listener->NotifyDirectListenerInstalled(
-          DirectMediaStreamTrackListener::InstallationResult::ALREADY_EXISTS);
+          DirectMediaTrackListener::InstallationResult::ALREADY_EXISTS);
       return;
     }
   }
 
   mDirectTrackListeners.AppendElement(listener);
 
   LOG(LogLevel::Debug,
       ("%p: Added direct track listener %p", GraphImpl(), listener.get()));
   listener->NotifyDirectListenerInstalled(
-      DirectMediaStreamTrackListener::InstallationResult::SUCCESS);
+      DirectMediaTrackListener::InstallationResult::SUCCESS);
 
   if (mEnded) {
     return;
   }
 
   // Pass buffered data to the listener
   VideoSegment bufferedData;
   size_t videoFrames = 0;
   VideoSegment& segment = *GetData<VideoSegment>();
   for (VideoSegment::ConstChunkIterator iter(segment); !iter.IsEnded();
        iter.Next()) {
     if (iter->mTimeStamp.IsNull()) {
       // No timestamp means this is only for the graph's internal book-keeping,
-      // denoting a late start of the stream.
+      // denoting a late start of the track.
       continue;
     }
     ++videoFrames;
     bufferedData.AppendFrame(do_AddRef(iter->mFrame.GetImage()),
                              iter->mFrame.GetIntrinsicSize(),
                              iter->mFrame.GetPrincipalHandle(),
                              iter->mFrame.GetForceBlack(), iter->mTimeStamp);
   }
@@ -2611,73 +2598,73 @@ void SourceMediaStream::AddDirectListene
 
   LOG(LogLevel::Info,
       ("%p: Notifying direct listener %p of %zu video frames and duration "
        "%" PRId64,
        GraphImpl(), listener.get(), videoFrames, bufferedData.GetDuration()));
   listener->NotifyRealtimeTrackData(Graph(), 0, bufferedData);
 }
 
-void SourceMediaStream::RemoveDirectListenerImpl(
-    DirectMediaStreamTrackListener* aListener) {
+void SourceMediaTrack::RemoveDirectListenerImpl(
+    DirectMediaTrackListener* aListener) {
   MutexAutoLock lock(mMutex);
   for (int32_t i = mDirectTrackListeners.Length() - 1; i >= 0; --i) {
-    const RefPtr<DirectMediaStreamTrackListener>& l = mDirectTrackListeners[i];
+    const RefPtr<DirectMediaTrackListener>& l = mDirectTrackListeners[i];
     if (l == aListener) {
       aListener->NotifyDirectListenerUninstalled();
       mDirectTrackListeners.RemoveElementAt(i);
     }
   }
 }
 
-void SourceMediaStream::End() {
+void SourceMediaTrack::End() {
   MutexAutoLock lock(mMutex);
   if (!mUpdateTrack) {
     // Already ended
     return;
   }
   mUpdateTrack->mEnded = true;
   if (auto graph = GraphImpl()) {
     graph->EnsureNextIteration();
   }
 }
 
-void SourceMediaStream::SetEnabledImpl(DisabledTrackMode aMode) {
+void SourceMediaTrack::SetEnabledImpl(DisabledTrackMode aMode) {
   {
     MutexAutoLock lock(mMutex);
     for (const auto& l : mDirectTrackListeners) {
       DisabledTrackMode oldMode = mDisabledMode;
       bool oldEnabled = oldMode == DisabledTrackMode::ENABLED;
       if (!oldEnabled && aMode == DisabledTrackMode::ENABLED) {
-        LOG(LogLevel::Debug, ("%p: SourceMediaStream %p setting "
+        LOG(LogLevel::Debug, ("%p: SourceMediaTrack %p setting "
                               "direct listener enabled",
                               GraphImpl(), this));
         l->DecreaseDisabled(oldMode);
       } else if (oldEnabled && aMode != DisabledTrackMode::ENABLED) {
-        LOG(LogLevel::Debug, ("%p: SourceMediaStream %p setting "
+        LOG(LogLevel::Debug, ("%p: SourceMediaTrack %p setting "
                               "direct listener disabled",
                               GraphImpl(), this));
         l->IncreaseDisabled(aMode);
       }
     }
   }
-  MediaStream::SetEnabledImpl(aMode);
+  MediaTrack::SetEnabledImpl(aMode);
 }
 
-void SourceMediaStream::RemoveAllDirectListenersImpl() {
+void SourceMediaTrack::RemoveAllDirectListenersImpl() {
   GraphImpl()->AssertOnGraphThreadOrNotRunning();
 
   auto directListeners(mDirectTrackListeners);
   for (auto& l : directListeners) {
     l->NotifyDirectListenerUninstalled();
   }
   mDirectTrackListeners.Clear();
 }
 
-SourceMediaStream::~SourceMediaStream() {}
+SourceMediaTrack::~SourceMediaTrack() {}
 
 void MediaInputPort::Init() {
   LOG(LogLevel::Debug, ("%p: Adding MediaInputPort %p (from %p to %p)",
                         mSource->GraphImpl(), this, mSource, mDest));
   // Only connect the port if it wasn't disconnected on allocation.
   if (mSource) {
     mSource->AddConsumer(this);
     mDest->AddInput(this);
@@ -2692,17 +2679,17 @@ void MediaInputPort::Disconnect() {
                "mSource must either both be null or both non-null");
   if (!mSource) return;
 
   mSource->RemoveConsumer(this);
   mDest->RemoveInput(this);
   mSource = nullptr;
   mDest = nullptr;
 
-  GraphImpl()->SetStreamOrderDirty();
+  GraphImpl()->SetTrackOrderDirty();
 }
 
 MediaInputPort::InputInterval MediaInputPort::GetNextInputInterval(
     MediaInputPort const* aPort, GraphTime aTime) {
   InputInterval result = {GRAPH_TIME_MAX, GRAPH_TIME_MAX, false};
   if (!aPort) {
     result.mStart = aTime;
     result.mInputIsBlocked = true;
@@ -2735,166 +2722,166 @@ void MediaInputPort::Destroy() {
       mPort->SetGraphImpl(nullptr);
       NS_RELEASE(mPort);
     }
     void RunDuringShutdown() override { Run(); }
     MediaInputPort* mPort;
   };
   // Keep a reference to the graph, since Message might RunDuringShutdown()
   // synchronously and make GraphImpl() invalid.
-  RefPtr<MediaStreamGraphImpl> graph = GraphImpl();
+  RefPtr<MediaTrackGraphImpl> graph = GraphImpl();
   graph->AppendMessage(MakeUnique<Message>(this));
   --graph->mMainThreadPortCount;
 }
 
-MediaStreamGraphImpl* MediaInputPort::GraphImpl() { return mGraph; }
-
-MediaStreamGraph* MediaInputPort::Graph() { return mGraph; }
-
-void MediaInputPort::SetGraphImpl(MediaStreamGraphImpl* aGraph) {
+MediaTrackGraphImpl* MediaInputPort::GraphImpl() { return mGraph; }
+
+MediaTrackGraph* MediaInputPort::Graph() { return mGraph; }
+
+void MediaInputPort::SetGraphImpl(MediaTrackGraphImpl* aGraph) {
   MOZ_ASSERT(!mGraph || !aGraph, "Should only be set once");
   mGraph = aGraph;
 }
 
-already_AddRefed<MediaInputPort> ProcessedMediaStream::AllocateInputPort(
-    MediaStream* aStream, uint16_t aInputNumber, uint16_t aOutputNumber) {
+already_AddRefed<MediaInputPort> ProcessedMediaTrack::AllocateInputPort(
+    MediaTrack* aTrack, uint16_t aInputNumber, uint16_t aOutputNumber) {
   // This method creates two references to the MediaInputPort: one for
-  // the main thread, and one for the MediaStreamGraph.
+  // the main thread, and one for the MediaTrackGraph.
   class Message : public ControlMessage {
    public:
     explicit Message(MediaInputPort* aPort)
         : ControlMessage(aPort->GetDestination()), mPort(aPort) {}
     void Run() override {
       mPort->Init();
       // The graph holds its reference implicitly
-      mPort->GraphImpl()->SetStreamOrderDirty();
+      mPort->GraphImpl()->SetTrackOrderDirty();
       Unused << mPort.forget();
     }
     void RunDuringShutdown() override { Run(); }
     RefPtr<MediaInputPort> mPort;
   };
 
-  MOZ_DIAGNOSTIC_ASSERT(aStream->mType == mType);
+  MOZ_DIAGNOSTIC_ASSERT(aTrack->mType == mType);
   RefPtr<MediaInputPort> port;
-  if (aStream->IsDestroyed()) {
+  if (aTrack->IsDestroyed()) {
     // Create a port that's disconnected, which is what it'd be after its source
-    // stream is Destroy()ed normally. Disconnect() is idempotent so destroying
+    // track is Destroy()ed normally. Disconnect() is idempotent so destroying
     // this later is fine.
     port = new MediaInputPort(nullptr, nullptr, aInputNumber, aOutputNumber);
   } else {
-    MOZ_ASSERT(aStream->GraphImpl() == GraphImpl());
-    port = new MediaInputPort(aStream, this, aInputNumber, aOutputNumber);
+    MOZ_ASSERT(aTrack->GraphImpl() == GraphImpl());
+    port = new MediaInputPort(aTrack, this, aInputNumber, aOutputNumber);
   }
   port->SetGraphImpl(GraphImpl());
   ++GraphImpl()->mMainThreadPortCount;
   GraphImpl()->AppendMessage(MakeUnique<Message>(port));
   return port.forget();
 }
 
-void ProcessedMediaStream::QueueSetAutoend(bool aAutoend) {
+void ProcessedMediaTrack::QueueSetAutoend(bool aAutoend) {
   class Message : public ControlMessage {
    public:
-    Message(ProcessedMediaStream* aStream, bool aAutoend)
-        : ControlMessage(aStream), mAutoend(aAutoend) {}
+    Message(ProcessedMediaTrack* aTrack, bool aAutoend)
+        : ControlMessage(aTrack), mAutoend(aAutoend) {}
     void Run() override {
-      static_cast<ProcessedMediaStream*>(mStream)->SetAutoendImpl(mAutoend);
+      static_cast<ProcessedMediaTrack*>(mTrack)->SetAutoendImpl(mAutoend);
     }
     bool mAutoend;
   };
   GraphImpl()->AppendMessage(MakeUnique<Message>(this, aAutoend));
 }
 
-void ProcessedMediaStream::DestroyImpl() {
+void ProcessedMediaTrack::DestroyImpl() {
   for (int32_t i = mInputs.Length() - 1; i >= 0; --i) {
     mInputs[i]->Disconnect();
   }
 
   for (int32_t i = mSuspendedInputs.Length() - 1; i >= 0; --i) {
     mSuspendedInputs[i]->Disconnect();
   }
 
-  MediaStream::DestroyImpl();
-  // The stream order is only important if there are connections, in which
-  // case MediaInputPort::Disconnect() called SetStreamOrderDirty().
-  // MediaStreamGraphImpl::RemoveStreamGraphThread() will also call
-  // SetStreamOrderDirty(), for other reasons.
+  MediaTrack::DestroyImpl();
+  // The track order is only important if there are connections, in which
+  // case MediaInputPort::Disconnect() called SetTrackOrderDirty().