Backed out 9 changesets (bug 1170958) for frequent test_getUserMedia_addTrackRemoveTrack.html failures
authorWes Kocher <wkocher@mozilla.com>
Fri, 25 Sep 2015 13:08:55 -0700
changeset 264604 277eb06d67b5c277618d25d2d8082640fc167a68
parent 264603 5ab984256b2db37b6273a06bd0b6f579ce022dbe
child 264605 3260a147595a19c87aedb267b3e24e512f6737e1
push id15391
push usercbook@mozilla.com
push dateMon, 28 Sep 2015 12:19:39 +0000
treeherderfx-team@43af8bb24e9a [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
bugs1170958
milestone44.0a1
backs out277c1f8098d1db77778798a3ee6dfaaecdab7010
aa86bb9eea959cc7864766697b33b6ad51ec7f6c
8af8b85a4b26e5a2ed65de5eb6756d8d191c9a5f
ec1bf225e9cbd1423882443afe09f02ca8129ec2
4a04ddca2b6bdb518f8818671ab506f1db1a4e04
e85c9977a3113226187108dbae33b39652278131
16b40ff04e8f103309db1513e2504ea632f8b367
ad206925c84a82d21db31e13a14267e9952adfb0
2106eccec79b991466e59a9e5ab9e8ce9bfc63b2
Backed out 9 changesets (bug 1170958) for frequent test_getUserMedia_addTrackRemoveTrack.html failures Backed out changeset 277c1f8098d1 (bug 1170958) Backed out changeset aa86bb9eea95 (bug 1170958) Backed out changeset 8af8b85a4b26 (bug 1170958) Backed out changeset ec1bf225e9cb (bug 1170958) Backed out changeset 4a04ddca2b6b (bug 1170958) Backed out changeset e85c9977a311 (bug 1170958) Backed out changeset 16b40ff04e8f (bug 1170958) Backed out changeset ad206925c84a (bug 1170958) Backed out changeset 2106eccec79b (bug 1170958)
dom/camera/CameraPreviewMediaStream.h
dom/camera/DOMCameraControl.cpp
dom/camera/DOMCameraControl.h
dom/html/HTMLCanvasElement.cpp
dom/html/HTMLMediaElement.cpp
dom/html/HTMLMediaElement.h
dom/media/AudioCaptureStream.cpp
dom/media/AudioCaptureStream.h
dom/media/CanvasCaptureMediaStream.cpp
dom/media/DOMMediaStream.cpp
dom/media/DOMMediaStream.h
dom/media/MediaManager.cpp
dom/media/MediaRecorder.cpp
dom/media/MediaStreamGraph.cpp
dom/media/MediaStreamGraph.h
dom/media/MediaStreamTrack.cpp
dom/media/MediaStreamTrack.h
dom/media/StreamBuffer.h
dom/media/TrackUnionStream.cpp
dom/media/TrackUnionStream.h
dom/media/encoder/MediaEncoder.cpp
dom/media/encoder/MediaEncoder.h
dom/media/imagecapture/CaptureTask.cpp
dom/media/imagecapture/CaptureTask.h
dom/media/webaudio/AudioNode.cpp
dom/media/webaudio/AudioNodeExternalInputStream.cpp
dom/media/webaudio/AudioParam.cpp
dom/media/webaudio/MediaStreamAudioDestinationNode.cpp
dom/media/webaudio/MediaStreamAudioSourceNode.cpp
dom/media/webaudio/MediaStreamAudioSourceNode.h
dom/media/webspeech/recognition/SpeechRecognition.cpp
dom/media/webspeech/recognition/SpeechStreamListener.cpp
dom/media/webspeech/recognition/SpeechStreamListener.h
dom/media/webspeech/synth/nsSpeechTask.cpp
media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
media/webrtc/signaling/src/mediapipeline/MediaPipeline.h
media/webrtc/signaling/src/peerconnection/MediaPipelineFactory.cpp
media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp
media/webrtc/signaling/src/peerconnection/PeerConnectionMedia.cpp
media/webrtc/signaling/test/FakeMediaStreams.h
media/webrtc/signaling/test/FakeMediaStreamsImpl.h
--- a/dom/camera/CameraPreviewMediaStream.h
+++ b/dom/camera/CameraPreviewMediaStream.h
@@ -37,16 +37,17 @@ protected:
  */
 class CameraPreviewMediaStream : public MediaStream
 {
   typedef mozilla::layers::Image Image;
 
 public:
   explicit CameraPreviewMediaStream(DOMMediaStream* aWrapper);
 
+  virtual CameraPreviewMediaStream* AsCameraPreviewStream() override { return this; };
   virtual void AddAudioOutput(void* aKey) override;
   virtual void SetAudioOutputVolume(void* aKey, float aVolume) override;
   virtual void RemoveAudioOutput(void* aKey) override;
   virtual void AddVideoOutput(VideoFrameContainer* aContainer) override;
   virtual void RemoveVideoOutput(VideoFrameContainer* aContainer) override;
   virtual void Suspend() override {}
   virtual void Resume() override {}
   virtual void AddListener(MediaStreamListener* aListener) override;
--- a/dom/camera/DOMCameraControl.cpp
+++ b/dom/camera/DOMCameraControl.cpp
@@ -274,23 +274,18 @@ nsDOMCameraControl::nsDOMCameraControl(u
     sCachedCameraControl = nullptr;
 #endif
     mCameraControl = ICameraControl::Create(aCameraId);
 #ifdef MOZ_WIDGET_GONK
   }
 #endif
   mCurrentConfiguration = initialConfig.forget();
 
-  // Register the playback listener directly on the camera input stream.
-  // We want as low latency as possible for the camera, thus avoiding
-  // MediaStreamGraph altogether. Don't do the regular InitStreamCommon()
-  // to avoid initializing the Owned and Playback streams. This is OK since
-  // we are not user/DOM facing anyway.
-  CreateAndAddPlaybackStreamListener(mInput);
-
+  // Attach our DOM-facing media stream to our viewfinder stream.
+  InitStreamCommon(mInput);
   MOZ_ASSERT(mWindow, "Shouldn't be created with a null window!");
   if (mWindow->GetExtantDoc()) {
     CombineWithPrincipal(mWindow->GetExtantDoc()->NodePrincipal());
   }
 
   // Register a listener for camera events.
   mListener = new DOMCameraControlListener(this, mInput);
   mCameraControl->AddListener(mListener);
@@ -324,21 +319,16 @@ nsDOMCameraControl::nsDOMCameraControl(u
   }
 }
 
 nsDOMCameraControl::~nsDOMCameraControl()
 {
   DOM_CAMERA_LOGT("%s:%d : this=%p\n", __func__, __LINE__, this);
   /*invoke DOMMediaStream destroy*/
   Destroy();
-
-  if (mInput) {
-    mInput->Destroy();
-    mInput = nullptr;
-  }
 }
 
 JSObject*
 nsDOMCameraControl::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto)
 {
   return CameraControlBinding::Wrap(aCx, this, aGivenProto);
 }
 
@@ -464,22 +454,16 @@ nsDOMCameraControl::Get(uint32_t aKey, n
       v.mRight,
       v.mWeight
     );
   }
 
   return NS_OK;
 }
 
-MediaStream*
-nsDOMCameraControl::GetCameraStream() const
-{
-  return mInput;
-}
-
 #define THROW_IF_NO_CAMERACONTROL(...)                                          \
   do {                                                                          \
     if (!mCameraControl) {                                                      \
       DOM_CAMERA_LOGW("mCameraControl is null at %s:%d\n", __func__, __LINE__); \
       aRv = NS_ERROR_NOT_AVAILABLE;                                             \
       return __VA_ARGS__;                                                       \
     }                                                                           \
   } while (0)
--- a/dom/camera/DOMCameraControl.h
+++ b/dom/camera/DOMCameraControl.h
@@ -66,18 +66,16 @@ public:
                      const dom::CameraConfiguration& aInitialConfig,
                      dom::Promise* aPromise,
                      nsPIDOMWindow* aWindow);
 
   void Shutdown();
 
   nsPIDOMWindow* GetParentObject() const { return mWindow; }
 
-  MediaStream* GetCameraStream() const override;
-
   // Attributes.
   void GetEffect(nsString& aEffect, ErrorResult& aRv);
   void SetEffect(const nsAString& aEffect, ErrorResult& aRv);
   void GetWhiteBalanceMode(nsString& aMode, ErrorResult& aRv);
   void SetWhiteBalanceMode(const nsAString& aMode, ErrorResult& aRv);
   void GetSceneMode(nsString& aMode, ErrorResult& aRv);
   void SetSceneMode(const nsAString& aMode, ErrorResult& aRv);
   void GetFlashMode(nsString& aMode, ErrorResult& aRv);
--- a/dom/html/HTMLCanvasElement.cpp
+++ b/dom/html/HTMLCanvasElement.cpp
@@ -537,17 +537,17 @@ HTMLCanvasElement::CaptureStream(const O
 
   TrackID videoTrackId = 1;
   nsresult rv = stream->Init(aFrameRate, videoTrackId);
   if (NS_FAILED(rv)) {
     aRv.Throw(rv);
     return nullptr;
   }
 
-  stream->CreateOwnDOMTrack(videoTrackId, MediaSegment::VIDEO);
+  stream->CreateDOMTrack(videoTrackId, MediaSegment::VIDEO);
   RegisterFrameCaptureListener(stream->FrameCaptureListener());
   return stream.forget();
 }
 
 nsresult
 HTMLCanvasElement::ExtractData(nsAString& aType,
                                const nsAString& aOptions,
                                nsIInputStream** aStream)
--- a/dom/html/HTMLMediaElement.cpp
+++ b/dom/html/HTMLMediaElement.cpp
@@ -556,17 +556,17 @@ HTMLMediaElement::GetMozMediaSourceObjec
 {
   nsRefPtr<MediaSource> source = mMediaSource;
   return source.forget();
 }
 
 already_AddRefed<DOMMediaStream>
 HTMLMediaElement::GetSrcObject() const
 {
-  NS_ASSERTION(!mSrcAttrStream || mSrcAttrStream->GetPlaybackStream(),
+  NS_ASSERTION(!mSrcAttrStream || mSrcAttrStream->GetStream(),
                "MediaStream should have been set up properly");
   nsRefPtr<DOMMediaStream> stream = mSrcAttrStream;
   return stream.forget();
 }
 
 void
 HTMLMediaElement::SetSrcObject(DOMMediaStream& aValue)
 {
@@ -580,17 +580,17 @@ HTMLMediaElement::SetSrcObject(DOMMediaS
   DoLoad();
 }
 
 // TODO: Remove prefixed versions soon (1183495)
 
 already_AddRefed<DOMMediaStream>
 HTMLMediaElement::GetMozSrcObject() const
 {
-  NS_ASSERTION(!mSrcAttrStream || mSrcAttrStream->GetPlaybackStream(),
+  NS_ASSERTION(!mSrcAttrStream || mSrcAttrStream->GetStream(),
                "MediaStream should have been set up properly");
   nsRefPtr<DOMMediaStream> stream = mSrcAttrStream;
   return stream.forget();
 }
 
 void
 HTMLMediaElement::SetMozSrcObject(DOMMediaStream& aValue)
 {
@@ -974,18 +974,16 @@ void HTMLMediaElement::NotifyLoadError()
 }
 
 void HTMLMediaElement::NotifyMediaTrackEnabled(MediaTrack* aTrack)
 {
   if (!aTrack) {
     return;
   }
 
-  LOG(LogLevel::Debug, ("MediaElement %p MediaStreamTrack %p enabled", this));
-
   // TODO: We are dealing with single audio track and video track for now.
   if (AudioTrack* track = aTrack->AsAudioTrack()) {
     if (!track->Enabled()) {
       SetMutedInternal(mMuted | MUTED_BY_AUDIO_TRACK);
     } else {
       SetMutedInternal(mMuted & ~MUTED_BY_AUDIO_TRACK);
     }
   } else if (VideoTrack* track = aTrack->AsVideoTrack()) {
@@ -994,18 +992,16 @@ void HTMLMediaElement::NotifyMediaTrackE
 }
 
 void HTMLMediaElement::NotifyMediaStreamTracksAvailable(DOMMediaStream* aStream)
 {
   if (!mSrcStream || mSrcStream != aStream) {
     return;
   }
 
-  LOG(LogLevel::Debug, ("MediaElement %p MediaStream tracks available", this));
-
   bool videoHasChanged = IsVideo() && HasVideo() != !VideoTracks()->IsEmpty();
 
   if (videoHasChanged) {
     // We are a video element and HasVideo() changed so update the screen
     // wakelock
     NotifyOwnerDocumentActivityChangedInternal();
   }
 
@@ -1875,27 +1871,27 @@ HTMLMediaElement::CaptureStreamInternal(
   out->mStream = DOMMediaStream::CreateTrackUnionStream(window, aGraph);
   nsRefPtr<nsIPrincipal> principal = GetCurrentPrincipal();
   out->mStream->CombineWithPrincipal(principal);
   out->mStream->SetCORSMode(mCORSMode);
   out->mFinishWhenEnded = aFinishWhenEnded;
 
   mAudioCaptured = true;
   if (mDecoder) {
-    mDecoder->AddOutputStream(out->mStream->GetInputStream()->AsProcessedStream(),
+    mDecoder->AddOutputStream(out->mStream->GetStream()->AsProcessedStream(),
                               aFinishWhenEnded);
     if (mReadyState >= HAVE_METADATA) {
       // Expose the tracks to JS directly.
       if (HasAudio()) {
         TrackID audioTrackId = mMediaInfo.mAudio.mTrackId;
-        out->mStream->CreateOwnDOMTrack(audioTrackId, MediaSegment::AUDIO);
+        out->mStream->CreateDOMTrack(audioTrackId, MediaSegment::AUDIO);
       }
       if (HasVideo()) {
         TrackID videoTrackId = mMediaInfo.mVideo.mTrackId;
-        out->mStream->CreateOwnDOMTrack(videoTrackId, MediaSegment::VIDEO);
+        out->mStream->CreateDOMTrack(videoTrackId, MediaSegment::VIDEO);
       }
     }
   }
   nsRefPtr<DOMMediaStream> result = out->mStream;
   return result.forget();
 }
 
 already_AddRefed<DOMMediaStream>
@@ -2445,17 +2441,17 @@ bool HTMLMediaElement::ParseAttribute(in
       // We cannot change the AudioChannel of a decoder.
       if (mDecoder) {
         return true;
       }
 
       mAudioChannel = audioChannel;
 
       if (mSrcStream) {
-        nsRefPtr<MediaStream> stream = GetSrcMediaStream();
+        nsRefPtr<MediaStream> stream = mSrcStream->GetStream();
         if (stream) {
           stream->SetAudioChannelType(mAudioChannel);
         }
       }
 
       return true;
     }
   }
@@ -2860,17 +2856,17 @@ nsresult HTMLMediaElement::FinishDecoder
   if (NS_FAILED(rv)) {
     ShutdownDecoder();
     LOG(LogLevel::Debug, ("%p Failed to load for decoder %p", this, aDecoder));
     return rv;
   }
 
   for (uint32_t i = 0; i < mOutputStreams.Length(); ++i) {
     OutputMediaStream* ms = &mOutputStreams[i];
-    aDecoder->AddOutputStream(ms->mStream->GetInputStream()->AsProcessedStream(),
+    aDecoder->AddOutputStream(ms->mStream->GetStream()->AsProcessedStream(),
                               ms->mFinishWhenEnded);
   }
 
 #ifdef MOZ_EME
   if (mMediaKeys) {
     mDecoder->SetCDMProxy(mMediaKeys->GetCDMProxy());
   }
 #endif
@@ -3052,19 +3048,17 @@ public:
       size = mInitialSize;
     }
     nsRefPtr<HTMLMediaElement> deathGrip = mElement;
     mElement->UpdateInitialMediaSize(size);
   }
   virtual void NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID,
                                         StreamTime aTrackOffset,
                                         uint32_t aTrackEvents,
-                                        const MediaSegment& aQueuedMedia,
-                                        MediaStream* aInputStream,
-                                        TrackID aInputTrackID) override
+                                        const MediaSegment& aQueuedMedia) override
   {
     MutexAutoLock lock(mMutex);
     if (mInitialSize != gfx::IntSize(0,0) ||
         aQueuedMedia.GetType() != MediaSegment::VIDEO) {
       return;
     }
     const VideoSegment& video = static_cast<const VideoSegment&>(aQueuedMedia);
     for (VideoSegment::ConstChunkIterator c(video); !c.IsEnded(); c.Next()) {
@@ -3104,31 +3098,27 @@ private:
   HTMLMediaElement* mElement;
 };
 
 void HTMLMediaElement::UpdateSrcMediaStreamPlaying(uint32_t aFlags)
 {
   if (!mSrcStream) {
     return;
   }
-  // We might be in cycle collection with mSrcStream->GetPlaybackStream() already
+  // We might be in cycle collection with mSrcStream->GetStream() already
   // returning null due to unlinking.
 
-  MediaStream* stream = GetSrcMediaStream();
+  MediaStream* stream = mSrcStream->GetStream();
   bool shouldPlay = !(aFlags & REMOVING_SRC_STREAM) && !mPaused &&
       !mPausedForInactiveDocumentOrChannel && stream;
   if (shouldPlay == mSrcStreamIsPlaying) {
     return;
   }
   mSrcStreamIsPlaying = shouldPlay;
 
-  LOG(LogLevel::Debug, ("MediaElement %p %s playback of DOMMediaStream %p",
-                        this, shouldPlay ? "Setting up" : "Removing",
-                        mSrcStream.get()));
-
   if (shouldPlay) {
     mSrcStreamPausedCurrentTime = -1;
 
     mMediaStreamListener = new StreamListener(this,
         "HTMLMediaElement::mMediaStreamListener");
     mMediaStreamSizeListener = new StreamSizeListener(this);
     stream->AddListener(mMediaStreamListener);
     stream->AddListener(mMediaStreamSizeListener);
@@ -3186,17 +3176,17 @@ void HTMLMediaElement::SetupSrcMediaStre
 
   mSrcStream = aStream;
 
   nsIDOMWindow* window = OwnerDoc()->GetInnerWindow();
   if (!window) {
     return;
   }
 
-  nsRefPtr<MediaStream> stream = GetSrcMediaStream();
+  nsRefPtr<MediaStream> stream = mSrcStream->GetStream();
   if (stream) {
     stream->SetAudioChannelType(mAudioChannel);
   }
 
   UpdateSrcMediaStreamPlaying();
 
   // Note: we must call DisconnectTrackListListeners(...)  before dropping
   // mSrcStream.
@@ -3281,16 +3271,28 @@ void HTMLMediaElement::MetadataLoaded(co
     // Dispatch a distinct 'encrypted' event for each initData we have.
     for (const auto& initData : mPendingEncryptedInitData.mInitDatas) {
       DispatchEncrypted(initData.mInitData, initData.mType);
     }
     mPendingEncryptedInitData.mInitDatas.Clear();
 #endif // MOZ_EME
   }
 
+  // Expose the tracks to JS directly.
+  for (OutputMediaStream& out : mOutputStreams) {
+    if (aInfo->HasAudio()) {
+      TrackID audioTrackId = aInfo->mAudio.mTrackId;
+      out.mStream->CreateDOMTrack(audioTrackId, MediaSegment::AUDIO);
+    }
+    if (aInfo->HasVideo()) {
+      TrackID videoTrackId = aInfo->mVideo.mTrackId;
+      out.mStream->CreateDOMTrack(videoTrackId, MediaSegment::VIDEO);
+    }
+  }
+
   // If this element had a video track, but consists only of an audio track now,
   // delete the VideoFrameContainer. This happens when the src is changed to an
   // audio only file.
   // Else update its dimensions.
   if (!aInfo->HasVideo()) {
     ResetState();
   } else {
     mWatchManager.ManualNotify(&HTMLMediaElement::UpdateReadyStateInternal);
@@ -3575,134 +3577,104 @@ bool HTMLMediaElement::IsCORSSameOrigin(
     ShouldCheckAllowOrigin();
 }
 
 void
 HTMLMediaElement::UpdateReadyStateInternal()
 {
   if (!mDecoder && !mSrcStream) {
     // Not initialized - bail out.
-    LOG(LogLevel::Debug, ("MediaElement %p UpdateReadyStateInternal() "
-                          "Not initialized", this));
     return;
   }
 
   if (mDecoder && mReadyState < nsIDOMHTMLMediaElement::HAVE_METADATA) {
     // aNextFrame might have a next frame because the decoder can advance
     // on its own thread before MetadataLoaded gets a chance to run.
     // The arrival of more data can't change us out of this readyState.
-    LOG(LogLevel::Debug, ("MediaElement %p UpdateReadyStateInternal() "
-                          "Decoder ready state < HAVE_METADATA", this));
     return;
   }
 
   if (mSrcStream && mReadyState < nsIDOMHTMLMediaElement::HAVE_METADATA) {
     bool hasAudio = !AudioTracks()->IsEmpty();
     bool hasVideo = !VideoTracks()->IsEmpty();
 
-    if (!hasAudio && !hasVideo) {
-      LOG(LogLevel::Debug, ("MediaElement %p UpdateReadyStateInternal() "
-                            "Stream with no tracks", this));
+    if ((!hasAudio && !hasVideo) ||
+        (IsVideo() && hasVideo && !HasVideo())) {
       return;
     }
 
-    if (IsVideo() && hasVideo && !HasVideo()) {
-      LOG(LogLevel::Debug, ("MediaElement %p UpdateReadyStateInternal() "
-                            "Stream waiting for video", this));
-      return;
-    }
-
-    LOG(LogLevel::Debug, ("MediaElement %p UpdateReadyStateInternal() Stream has "
-                          "metadata; audioTracks=%d, videoTracks=%d, "
-                          "hasVideoFrame=%d", this, AudioTracks()->Length(),
-                          VideoTracks()->Length(), HasVideo()));
-
     // We are playing a stream that has video and a video frame is now set.
     // This means we have all metadata needed to change ready state.
     MediaInfo mediaInfo = mMediaInfo;
     if (hasAudio) {
       mediaInfo.EnableAudio();
     }
     if (hasVideo) {
       mediaInfo.EnableVideo();
     }
     MetadataLoaded(&mediaInfo, nsAutoPtr<const MetadataTags>(nullptr));
   }
 
   if (NextFrameStatus() == MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE_SEEKING) {
-    LOG(LogLevel::Debug, ("MediaElement %p UpdateReadyStateInternal() "
-                          "NEXT_FRAME_UNAVAILABLE_SEEKING; Forcing HAVE_METADATA", this));
     ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_METADATA);
     return;
   }
 
   if (IsVideo() && HasVideo() && !IsPlaybackEnded() &&
         GetImageContainer() && !GetImageContainer()->HasCurrentImage()) {
     // Don't advance if we are playing video, but don't have a video frame.
     // Also, if video became available after advancing to HAVE_CURRENT_DATA
     // while we are still playing, we need to revert to HAVE_METADATA until
     // a video frame is available.
-    LOG(LogLevel::Debug, ("MediaElement %p UpdateReadyStateInternal() "
-                          "Playing video but no video frame; Forcing HAVE_METADATA", this));
     ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_METADATA);
     return;
   }
 
   if (mDownloadSuspendedByCache && mDecoder && !mDecoder->IsEndedOrShutdown()) {
     // The decoder has signaled that the download has been suspended by the
     // media cache. So move readyState into HAVE_ENOUGH_DATA, in case there's
     // script waiting for a "canplaythrough" event; without this forced
     // transition, we will never fire the "canplaythrough" event if the
     // media cache is too small, and scripts are bound to fail. Don't force
     // this transition if the decoder is in ended state; the readyState
     // should remain at HAVE_CURRENT_DATA in this case.
     // Note that this state transition includes the case where we finished
     // downloaded the whole data stream.
-    LOG(LogLevel::Debug, ("MediaElement %p UpdateReadyStateInternal() "
-                          "Decoder download suspended by cache", this));
     ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_ENOUGH_DATA);
     return;
   }
 
   if (NextFrameStatus() != MediaDecoderOwner::NEXT_FRAME_AVAILABLE) {
-    LOG(LogLevel::Debug, ("MediaElement %p UpdateReadyStateInternal() "
-                          "Next frame not available", this));
     ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_CURRENT_DATA);
     if (!mWaitingFired && NextFrameStatus() == MediaDecoderOwner::NEXT_FRAME_UNAVAILABLE_BUFFERING) {
       FireTimeUpdate(false);
       DispatchAsyncEvent(NS_LITERAL_STRING("waiting"));
       mWaitingFired = true;
     }
     return;
   }
 
   if (mSrcStream) {
-    LOG(LogLevel::Debug, ("MediaElement %p UpdateReadyStateInternal() "
-                          "Stream HAVE_ENOUGH_DATA", this));
     ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_ENOUGH_DATA);
     return;
   }
 
   // Now see if we should set HAVE_ENOUGH_DATA.
   // If it's something we don't know the size of, then we can't
   // make a real estimate, so we go straight to HAVE_ENOUGH_DATA once
   // we've downloaded enough data that our download rate is considered
   // reliable. We have to move to HAVE_ENOUGH_DATA at some point or
   // autoplay elements for live streams will never play. Otherwise we
   // move to HAVE_ENOUGH_DATA if we can play through the entire media
   // without stopping to buffer.
   if (mDecoder->CanPlayThrough())
   {
-    LOG(LogLevel::Debug, ("MediaElement %p UpdateReadyStateInternal() "
-                          "Decoder can play through", this));
     ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_ENOUGH_DATA);
     return;
   }
-  LOG(LogLevel::Debug, ("MediaElement %p UpdateReadyStateInternal() "
-                        "Default; Decoder has future data", this));
   ChangeReadyState(nsIDOMHTMLMediaElement::HAVE_FUTURE_DATA);
 }
 
 static const char* const gReadyStateToString[] = {
   "HAVE_NOTHING",
   "HAVE_METADATA",
   "HAVE_CURRENT_DATA",
   "HAVE_FUTURE_DATA",
@@ -4810,31 +4782,31 @@ NS_IMETHODIMP HTMLMediaElement::WindowAu
       mAudioCapturedByWindow = true;
       nsCOMPtr<nsPIDOMWindow> window =
         do_QueryInterface(OwnerDoc()->GetParentObject());
       uint64_t id = window->WindowID();
       MediaStreamGraph* msg =
         MediaStreamGraph::GetInstance(MediaStreamGraph::AUDIO_THREAD_DRIVER,
                                       AudioChannel::Normal);
 
-      if (GetSrcMediaStream()) {
-        mCaptureStreamPort = msg->ConnectToCaptureStream(id, GetSrcMediaStream());
+      if (mSrcStream) {
+        mCaptureStreamPort = msg->ConnectToCaptureStream(id, mSrcStream->GetStream());
       } else {
         nsRefPtr<DOMMediaStream> stream = CaptureStreamInternal(false, msg);
-        mCaptureStreamPort = msg->ConnectToCaptureStream(id, stream->GetPlaybackStream());
+        mCaptureStreamPort = msg->ConnectToCaptureStream(id, stream->GetStream());
       }
     } else {
       mAudioCapturedByWindow = false;
       if (mDecoder) {
         ProcessedMediaStream* ps =
           mCaptureStreamPort->GetSource()->AsProcessedStream();
         MOZ_ASSERT(ps);
 
         for (uint32_t i = 0; i < mOutputStreams.Length(); i++) {
-          if (mOutputStreams[i].mStream->GetPlaybackStream() == ps) {
+          if (mOutputStreams[i].mStream->GetStream() == ps) {
             mOutputStreams.RemoveElementAt(i);
             break;
           }
         }
 
         mDecoder->RemoveOutputStream(ps);
       }
       mCaptureStreamPort->Destroy();
--- a/dom/html/HTMLMediaElement.h
+++ b/dom/html/HTMLMediaElement.h
@@ -344,29 +344,22 @@ public:
    * be fired if we've not fired a timeupdate event (for any reason) in the
    * last 250ms, as required by the spec when the current time is periodically
    * increasing during playback.
    */
   virtual void FireTimeUpdate(bool aPeriodic) final override;
 
   /**
    * This will return null if mSrcStream is null, or if mSrcStream is not
-   * null but its GetPlaybackStream() returns null --- which can happen during
+   * null but its GetStream() returns null --- which can happen during
    * cycle collection unlinking!
    */
   MediaStream* GetSrcMediaStream() const
   {
-    if (!mSrcStream) {
-      return nullptr;
-    }
-    if (mSrcStream->GetCameraStream()) {
-      // XXX Remove this check with CameraPreviewMediaStream per bug 1124630.
-      return mSrcStream->GetCameraStream();
-    }
-    return mSrcStream->GetPlaybackStream();
+    return mSrcStream ? mSrcStream->GetStream() : nullptr;
   }
 
   // WebIDL
 
   MediaError* GetError() const
   {
     return mError;
   }
@@ -1095,26 +1088,31 @@ protected:
 
   // If non-negative, the time we should return for currentTime while playing
   // mSrcStream.
   double mSrcStreamPausedCurrentTime;
 
   // Holds a reference to the stream connecting this stream to the capture sink.
   nsRefPtr<MediaInputPort> mCaptureStreamPort;
 
+  // Holds a reference to a stream with mSrcStream as input but intended for
+  // playback. Used so we don't block playback of other video elements
+  // playing the same mSrcStream.
+  nsRefPtr<DOMMediaStream> mPlaybackStream;
+
   // Holds references to the DOM wrappers for the MediaStreams that we're
   // writing to.
   struct OutputMediaStream {
     nsRefPtr<DOMMediaStream> mStream;
     bool mFinishWhenEnded;
   };
   nsTArray<OutputMediaStream> mOutputStreams;
 
-  // Holds a reference to the MediaStreamListener attached to mSrcStream's
-  // playback stream.
+  // Holds a reference to the MediaStreamListener attached to mPlaybackStream
+  // (or mSrcStream if mPlaybackStream is null).
   nsRefPtr<StreamListener> mMediaStreamListener;
   // Holds a reference to the size-getting MediaStreamListener attached to
   // mSrcStream.
   nsRefPtr<StreamSizeListener> mMediaStreamSizeListener;
 
   // Holds a reference to the MediaSource, if any, referenced by the src
   // attribute on the media element.
   nsRefPtr<MediaSource> mSrcMediaSource;
--- a/dom/media/AudioCaptureStream.cpp
+++ b/dom/media/AudioCaptureStream.cpp
@@ -24,18 +24,18 @@ using namespace mozilla::dom;
 using namespace mozilla::gfx;
 
 namespace mozilla
 {
 
 // We are mixing to mono until PeerConnection can accept stereo
 static const uint32_t MONO = 1;
 
-AudioCaptureStream::AudioCaptureStream(DOMMediaStream* aWrapper, TrackID aTrackId)
-  : ProcessedMediaStream(aWrapper), mTrackId(aTrackId), mTrackCreated(false)
+AudioCaptureStream::AudioCaptureStream(DOMMediaStream* aWrapper)
+  : ProcessedMediaStream(aWrapper), mTrackCreated(false)
 {
   MOZ_ASSERT(NS_IsMainThread());
   MOZ_COUNT_CTOR(AudioCaptureStream);
   mMixer.AddCallback(this);
 }
 
 AudioCaptureStream::~AudioCaptureStream()
 {
@@ -43,24 +43,24 @@ AudioCaptureStream::~AudioCaptureStream(
   mMixer.RemoveCallback(this);
 }
 
 void
 AudioCaptureStream::ProcessInput(GraphTime aFrom, GraphTime aTo,
                                  uint32_t aFlags)
 {
   uint32_t inputCount = mInputs.Length();
-  StreamBuffer::Track* track = EnsureTrack(mTrackId);
+  StreamBuffer::Track* track = EnsureTrack(AUDIO_TRACK);
   // Notify the DOM everything is in order.
   if (!mTrackCreated) {
     for (uint32_t i = 0; i < mListeners.Length(); i++) {
       MediaStreamListener* l = mListeners[i];
       AudioSegment tmp;
       l->NotifyQueuedTrackChanges(
-        Graph(), mTrackId, 0, MediaStreamListener::TRACK_EVENT_CREATED, tmp);
+        Graph(), AUDIO_TRACK, 0, MediaStreamListener::TRACK_EVENT_CREATED, tmp);
       l->NotifyFinishedTrackCreation(Graph());
     }
     mTrackCreated = true;
   }
 
   // If the captured stream is connected back to a object on the page (be it an
   // HTMLMediaElement with a stream as source, or an AudioContext), a cycle
   // situation occur. This can work if it's an AudioContext with at least one
@@ -122,11 +122,11 @@ AudioCaptureStream::MixerCallback(AudioD
   chunk.mBufferFormat = aFormat;
   chunk.mVolume = 1.0f;
   chunk.mChannelData.SetLength(MONO);
   for (uint32_t channel = 0; channel < aChannels; channel++) {
     chunk.mChannelData[channel] = bufferPtrs[channel];
   }
 
   // Now we have mixed data, simply append it to out track.
-  EnsureTrack(mTrackId)->Get<AudioSegment>()->AppendAndConsumeChunk(&chunk);
+  EnsureTrack(AUDIO_TRACK)->Get<AudioSegment>()->AppendAndConsumeChunk(&chunk);
 }
 }
--- a/dom/media/AudioCaptureStream.h
+++ b/dom/media/AudioCaptureStream.h
@@ -3,39 +3,38 @@
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #ifndef MOZILLA_AUDIOCAPTURESTREAM_H_
 #define MOZILLA_AUDIOCAPTURESTREAM_H_
 
 #include "MediaStreamGraph.h"
 #include "AudioMixer.h"
-#include "StreamBuffer.h"
 #include <algorithm>
 
 namespace mozilla
 {
 
 class DOMMediaStream;
 
 /**
  * See MediaStreamGraph::CreateAudioCaptureStream.
  */
 class AudioCaptureStream : public ProcessedMediaStream,
                            public MixerCallbackReceiver
 {
 public:
-  explicit AudioCaptureStream(DOMMediaStream* aWrapper, TrackID aTrackId);
+  explicit AudioCaptureStream(DOMMediaStream* aWrapper);
   virtual ~AudioCaptureStream();
 
   void ProcessInput(GraphTime aFrom, GraphTime aTo, uint32_t aFlags) override;
 
 protected:
+  enum { AUDIO_TRACK = 1 };
   void MixerCallback(AudioDataValue* aMixedBuffer, AudioSampleFormat aFormat,
                      uint32_t aChannels, uint32_t aFrames,
                      uint32_t aSampleRate) override;
   AudioMixer mMixer;
-  TrackID mTrackId;
   bool mTrackCreated;
 };
 }
 
 #endif /* MOZILLA_AUDIOCAPTURESTREAM_H_ */
--- a/dom/media/CanvasCaptureMediaStream.cpp
+++ b/dom/media/CanvasCaptureMediaStream.cpp
@@ -236,24 +236,24 @@ CanvasCaptureMediaStream::RequestFrame()
 }
 
 nsresult
 CanvasCaptureMediaStream::Init(const dom::Optional<double>& aFPS,
                                const TrackID& aTrackId)
 {
   if (!aFPS.WasPassed()) {
     mOutputStreamDriver =
-      new AutoDriver(GetInputStream()->AsSourceStream(), aTrackId);
+      new AutoDriver(GetStream()->AsSourceStream(), aTrackId);
   } else if (aFPS.Value() < 0) {
     return NS_ERROR_ILLEGAL_VALUE;
   } else {
     // Cap frame rate to 60 FPS for sanity
     double fps = std::min(60.0, aFPS.Value());
     mOutputStreamDriver =
-      new TimerDriver(GetInputStream()->AsSourceStream(), fps, aTrackId);
+      new TimerDriver(GetStream()->AsSourceStream(), fps, aTrackId);
   }
   return NS_OK;
 }
 
 already_AddRefed<CanvasCaptureMediaStream>
 CanvasCaptureMediaStream::CreateSourceStream(nsIDOMWindow* aWindow,
                                              HTMLCanvasElement* aCanvas)
 {
--- a/dom/media/DOMMediaStream.cpp
+++ b/dom/media/DOMMediaStream.cpp
@@ -16,305 +16,146 @@
 #include "mozilla/dom/VideoTrack.h"
 #include "mozilla/dom/VideoTrackList.h"
 #include "mozilla/dom/HTMLCanvasElement.h"
 #include "MediaStreamGraph.h"
 #include "AudioStreamTrack.h"
 #include "VideoStreamTrack.h"
 #include "Layers.h"
 
-#ifdef LOG
-#undef LOG
-#endif
-
-static PRLogModuleInfo* gMediaStreamLog;
-#define LOG(type, msg) MOZ_LOG(gMediaStreamLog, type, msg)
-
 using namespace mozilla;
 using namespace mozilla::dom;
 using namespace mozilla::layers;
 
 const TrackID TRACK_VIDEO_PRIMARY = 1;
 
-/**
- * TrackPort is a representation of a MediaStreamTrack-MediaInputPort pair
- * that make up a link between the Owned stream and the Playback stream.
- *
- * Semantically, the track is the identifier/key and the port the value of this
- * connection.
- *
- * The input port can be shared between several TrackPorts. This is the case
- * for DOMMediaStream's mPlaybackPort which forwards all tracks in its
- * mOwnedStream automatically.
- *
- * If the MediaStreamTrack is owned by another DOMMediaStream (called A) than
- * the one owning the TrackPort (called B), the input port (locked to the
- * MediaStreamTrack's TrackID) connects A's mOwnedStream to B's mPlaybackStream.
- *
- * A TrackPort may never leave the DOMMediaStream it was created in. Internal
- * use only.
- */
-class DOMMediaStream::TrackPort
-{
+class DOMMediaStream::StreamListener : public MediaStreamListener {
 public:
-  NS_INLINE_DECL_CYCLE_COLLECTING_NATIVE_REFCOUNTING(TrackPort)
-  NS_DECL_CYCLE_COLLECTION_NATIVE_CLASS(TrackPort)
-
-  enum class InputPortOwnership {
-    OWNED = 1,
-    EXTERNAL
-  };
-
-  TrackPort(MediaInputPort* aInputPort,
-            MediaStreamTrack* aTrack,
-            const InputPortOwnership aOwnership)
-    : mInputPort(aInputPort)
-    , mTrack(aTrack)
-    , mOwnership(aOwnership)
-  {
-    MOZ_ASSERT(mInputPort);
-    MOZ_ASSERT(mTrack);
-
-    MOZ_COUNT_CTOR(TrackPort);
-  }
-
-protected:
-  virtual ~TrackPort()
-  {
-    MOZ_COUNT_DTOR(TrackPort);
-
-    if (mOwnership == InputPortOwnership::OWNED && mInputPort) {
-      mInputPort->Destroy();
-      mInputPort = nullptr;
-    }
-  }
-
-public:
-  void DestroyInputPort()
-  {
-    if (mInputPort) {
-      mInputPort->Destroy();
-      mInputPort = nullptr;
-    }
-  }
-
-  /**
-   * Returns the source stream of the input port.
-   */
-  MediaStream* GetSource() const { return mInputPort ? mInputPort->GetSource()
-                                                     : nullptr; }
-
-  /**
-   * Returns the track ID this track is locked to in the source stream of the
-   * input port.
-   */
-  TrackID GetSourceTrackId() const { return mInputPort ? mInputPort->GetSourceTrackId()
-                                                       : TRACK_INVALID; }
-
-  MediaInputPort* GetInputPort() const { return mInputPort; }
-  MediaStreamTrack* GetTrack() const { return mTrack; }
-
-private:
-  nsRefPtr<MediaInputPort> mInputPort;
-  nsRefPtr<MediaStreamTrack> mTrack;
-
-  // Defines if we've been given ownership of the input port or if it's owned
-  // externally. The owner is responsible for destroying the port.
-  const InputPortOwnership mOwnership;
-};
-
-NS_IMPL_CYCLE_COLLECTION(DOMMediaStream::TrackPort, mTrack)
-NS_IMPL_CYCLE_COLLECTION_ROOT_NATIVE(DOMMediaStream::TrackPort, AddRef)
-NS_IMPL_CYCLE_COLLECTION_UNROOT_NATIVE(DOMMediaStream::TrackPort, Release)
-
-/**
- * Listener registered on the Owned stream to detect added and ended owned
- * tracks for keeping the list of MediaStreamTracks in sync with the tracks
- * added and ended directly at the source.
- */
-class DOMMediaStream::OwnedStreamListener : public MediaStreamListener {
-public:
-  explicit OwnedStreamListener(DOMMediaStream* aStream)
+  explicit StreamListener(DOMMediaStream* aStream)
     : mStream(aStream)
   {}
 
+  // Main thread only
   void Forget() { mStream = nullptr; }
-
-  void DoNotifyTrackCreated(TrackID aTrackId, MediaSegment::Type aType)
-  {
-    MOZ_ASSERT(NS_IsMainThread());
+  DOMMediaStream* GetStream() { return mStream; }
 
-    if (!mStream) {
-      return;
-    }
-
-    MediaStreamTrack* track = mStream->FindOwnedDOMTrack(
-      mStream->GetOwnedStream(), aTrackId);
-    if (track) {
-      // This track has already been manually created. Abort.
-      return;
+  class TrackChange : public nsRunnable {
+  public:
+    TrackChange(StreamListener* aListener,
+                TrackID aID, StreamTime aTrackOffset,
+                uint32_t aEvents, MediaSegment::Type aType)
+      : mListener(aListener), mID(aID), mEvents(aEvents), mType(aType)
+    {
     }
 
-    NS_WARN_IF_FALSE(!mStream->mTracks.IsEmpty(),
-                     "A new track was detected on the input stream; creating a corresponding MediaStreamTrack. "
-                     "Initial tracks should be added manually to immediately and synchronously be available to JS.");
-    mStream->CreateOwnDOMTrack(aTrackId, aType);
-  }
+    NS_IMETHOD Run()
+    {
+      NS_ASSERTION(NS_IsMainThread(), "main thread only");
+
+      DOMMediaStream* stream = mListener->GetStream();
+      if (!stream) {
+        return NS_OK;
+      }
 
-  void DoNotifyTrackEnded(TrackID aTrackId)
-  {
-    MOZ_ASSERT(NS_IsMainThread());
-
-    if (!mStream) {
-      return;
+      nsRefPtr<MediaStreamTrack> track;
+      if (mEvents & MediaStreamListener::TRACK_EVENT_CREATED) {
+        track = stream->BindDOMTrack(mID, mType);
+        if (!track) {
+          stream->CreateDOMTrack(mID, mType);
+          track = stream->BindDOMTrack(mID, mType);
+        }
+        stream->NotifyMediaStreamTrackCreated(track);
+      } else {
+        track = stream->GetDOMTrackFor(mID);
+      }
+      if (mEvents & MediaStreamListener::TRACK_EVENT_ENDED) {
+        if (track) {
+          track->NotifyEnded();
+          stream->NotifyMediaStreamTrackEnded(track);
+        } else {
+          NS_ERROR("track ended but not found");
+        }
+      }
+      return NS_OK;
     }
 
-    nsRefPtr<MediaStreamTrack> track =
-      mStream->FindOwnedDOMTrack(mStream->GetOwnedStream(), aTrackId);
-    NS_ASSERTION(track, "Owned MediaStreamTracks must be known by the DOMMediaStream");
-    if (track) {
-      LOG(LogLevel::Debug, ("DOMMediaStream %p MediaStreamTrack %p ended at the source. Marking it ended.",
-                            mStream, track.get()));
-      track->NotifyEnded();
-    }
-  }
+    StreamTime mEndTime;
+    nsRefPtr<StreamListener> mListener;
+    TrackID mID;
+    uint32_t mEvents;
+    MediaSegment::Type mType;
+  };
 
-  void NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID,
-                                StreamTime aTrackOffset, uint32_t aTrackEvents,
-                                const MediaSegment& aQueuedMedia,
-                                MediaStream* aInputStream,
-                                TrackID aInputTrackID) override
+  /**
+   * Notify that changes to one of the stream tracks have been queued.
+   * aTrackEvents can be any combination of TRACK_EVENT_CREATED and
+   * TRACK_EVENT_ENDED. aQueuedMedia is the data being added to the track
+   * at aTrackOffset (relative to the start of the stream).
+   * aQueuedMedia can be null if there is no output.
+   */
+  virtual void NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID,
+                                        StreamTime aTrackOffset,
+                                        uint32_t aTrackEvents,
+                                        const MediaSegment& aQueuedMedia) override
   {
-    if (aTrackEvents & TRACK_EVENT_CREATED) {
-      nsCOMPtr<nsIRunnable> runnable =
-        NS_NewRunnableMethodWithArgs<TrackID, MediaSegment::Type>(
-          this, &OwnedStreamListener::DoNotifyTrackCreated,
-          aID, aQueuedMedia.GetType());
-      aGraph->DispatchToMainThreadAfterStreamStateUpdate(runnable.forget());
-    } else if (aTrackEvents & TRACK_EVENT_ENDED) {
-      nsCOMPtr<nsIRunnable> runnable =
-        NS_NewRunnableMethodWithArgs<TrackID>(
-          this, &OwnedStreamListener::DoNotifyTrackEnded, aID);
+    if (aTrackEvents & (TRACK_EVENT_CREATED | TRACK_EVENT_ENDED)) {
+      nsRefPtr<TrackChange> runnable =
+        new TrackChange(this, aID, aTrackOffset, aTrackEvents,
+                        aQueuedMedia.GetType());
       aGraph->DispatchToMainThreadAfterStreamStateUpdate(runnable.forget());
     }
   }
 
-private:
-  // These fields may only be accessed on the main thread
-  DOMMediaStream* mStream;
-};
-
-/**
- * Listener registered on the Playback stream to detect when tracks end and when
- * all new tracks this iteration have been created - for when several tracks are
- * queued by the source and committed all at once.
- */
-class DOMMediaStream::PlaybackStreamListener : public MediaStreamListener {
-public:
-  explicit PlaybackStreamListener(DOMMediaStream* aStream)
-    : mStream(aStream)
-  {}
-
-  void Forget()
-  {
-    MOZ_ASSERT(NS_IsMainThread());
-    mStream = nullptr;
-  }
-
-  void DoNotifyTrackEnded(MediaStream* aInputStream,
-                          TrackID aInputTrackID)
-  {
-    MOZ_ASSERT(NS_IsMainThread());
-
-    if (!mStream) {
-      return;
-    }
-
-    LOG(LogLevel::Debug, ("DOMMediaStream %p Track %u of stream %p ended",
-                          mStream, aInputTrackID, aInputStream));
-
-    nsRefPtr<MediaStreamTrack> track =
-      mStream->FindPlaybackDOMTrack(aInputStream, aInputTrackID);
-    if (!track) {
-      LOG(LogLevel::Debug, ("DOMMediaStream %p Not a playback track.", mStream));
-      return;
+  class TracksCreatedRunnable : public nsRunnable {
+  public:
+    explicit TracksCreatedRunnable(StreamListener* aListener)
+      : mListener(aListener)
+    {
     }
 
-    LOG(LogLevel::Debug, ("DOMMediaStream %p Playback track; notifying stream listeners.",
-                           mStream));
-    mStream->NotifyTrackRemoved(track);
+    NS_IMETHOD Run()
+    {
+      MOZ_ASSERT(NS_IsMainThread());
 
-    nsRefPtr<TrackPort> endedPort = mStream->FindPlaybackTrackPort(*track);
-    NS_ASSERTION(endedPort, "Playback track should have a TrackPort");
-    if (endedPort &&
-        endedPort->GetSourceTrackId() != TRACK_ANY &&
-        endedPort->GetSourceTrackId() != TRACK_INVALID &&
-        endedPort->GetSourceTrackId() != TRACK_NONE) {
-      // If a track connected to a locked-track input port ends, we destroy the
-      // port to allow our playback stream to finish.
-      // XXX (bug 1208316) This should not be necessary when MediaStreams don't
-      // finish but instead become inactive.
-      endedPort->DestroyInputPort();
-    }
-  }
+      DOMMediaStream* stream = mListener->GetStream();
+      if (!stream) {
+        return NS_OK;
+      }
 
-  void DoNotifyFinishedTrackCreation()
-  {
-    MOZ_ASSERT(NS_IsMainThread());
-
-    if (!mStream) {
-      return;
+      stream->TracksCreated();
+      return NS_OK;
     }
 
-    mStream->NotifyTracksCreated();
-  }
-
-  // The methods below are called on the MediaStreamGraph thread.
+    nsRefPtr<StreamListener> mListener;
+  };
 
-  void NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID,
-                                StreamTime aTrackOffset, uint32_t aTrackEvents,
-                                const MediaSegment& aQueuedMedia,
-                                MediaStream* aInputStream,
-                                TrackID aInputTrackID) override
+  virtual void NotifyFinishedTrackCreation(MediaStreamGraph* aGraph) override
   {
-    if (aTrackEvents & TRACK_EVENT_ENDED) {
-      nsCOMPtr<nsIRunnable> runnable =
-        NS_NewRunnableMethodWithArgs<StorensRefPtrPassByPtr<MediaStream>, TrackID>(
-          this, &PlaybackStreamListener::DoNotifyTrackEnded, aInputStream, aInputTrackID);
-      aGraph->DispatchToMainThreadAfterStreamStateUpdate(runnable.forget());
-    }
-  }
-
-  void NotifyFinishedTrackCreation(MediaStreamGraph* aGraph) override
-  {
-    nsCOMPtr<nsIRunnable> runnable =
-      NS_NewRunnableMethod(this, &PlaybackStreamListener::DoNotifyFinishedTrackCreation);
+    nsRefPtr<TracksCreatedRunnable> runnable = new TracksCreatedRunnable(this);
     aGraph->DispatchToMainThreadAfterStreamStateUpdate(runnable.forget());
   }
 
 private:
   // These fields may only be accessed on the main thread
   DOMMediaStream* mStream;
 };
 
 NS_IMPL_CYCLE_COLLECTION_CLASS(DOMMediaStream)
 
 NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN_INHERITED(DOMMediaStream,
                                                 DOMEventTargetHelper)
   tmp->Destroy();
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mWindow)
-  NS_IMPL_CYCLE_COLLECTION_UNLINK(mOwnedTracks)
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mTracks)
   NS_IMPL_CYCLE_COLLECTION_UNLINK(mConsumersToKeepAlive)
 NS_IMPL_CYCLE_COLLECTION_UNLINK_END
 
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(DOMMediaStream,
                                                   DOMEventTargetHelper)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mWindow)
-  NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mOwnedTracks)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mTracks)
   NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mConsumersToKeepAlive)
 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
 
 NS_IMPL_ADDREF_INHERITED(DOMMediaStream, DOMEventTargetHelper)
 NS_IMPL_RELEASE_INHERITED(DOMMediaStream, DOMEventTargetHelper)
 
 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(DOMMediaStream)
@@ -333,29 +174,24 @@ NS_IMPL_CYCLE_COLLECTION_INHERITED(DOMAu
 
 NS_IMPL_ADDREF_INHERITED(DOMAudioNodeMediaStream, DOMMediaStream)
 NS_IMPL_RELEASE_INHERITED(DOMAudioNodeMediaStream, DOMMediaStream)
 
 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(DOMAudioNodeMediaStream)
 NS_INTERFACE_MAP_END_INHERITING(DOMMediaStream)
 
 DOMMediaStream::DOMMediaStream()
-  : mLogicalStreamStartTime(0), mInputStream(nullptr), mOwnedStream(nullptr),
-    mPlaybackStream(nullptr), mOwnedPort(nullptr), mPlaybackPort(nullptr),
-    mTracksCreated(false), mNotifiedOfMediaStreamGraphShutdown(false),
-    mCORSMode(CORS_NONE)
+  : mLogicalStreamStartTime(0),
+    mStream(nullptr), mTracksCreated(false),
+    mNotifiedOfMediaStreamGraphShutdown(false), mCORSMode(CORS_NONE)
 {
   nsresult rv;
   nsCOMPtr<nsIUUIDGenerator> uuidgen =
     do_GetService("@mozilla.org/uuid-generator;1", &rv);
 
-  if (!gMediaStreamLog) {
-    gMediaStreamLog = PR_NewLogModule("MediaStream");
-  }
-
   if (NS_SUCCEEDED(rv) && uuidgen) {
     nsID uuid;
     memset(&uuid, 0, sizeof(uuid));
     rv = uuidgen->GenerateUUIDInPlace(&uuid);
     if (NS_SUCCEEDED(rv)) {
       char buffer[NSID_LENGTH];
       uuid.ToProvidedString(buffer);
       mID = NS_ConvertASCIItoUTF16(buffer);
@@ -366,169 +202,122 @@ DOMMediaStream::DOMMediaStream()
 DOMMediaStream::~DOMMediaStream()
 {
   Destroy();
 }
 
 void
 DOMMediaStream::Destroy()
 {
-  LOG(LogLevel::Debug, ("DOMMediaStream %p Being destroyed.", this));
-  if (mOwnedListener) {
-    mOwnedListener->Forget();
-    mOwnedListener = nullptr;
-  }
-  if (mPlaybackListener) {
-    mPlaybackListener->Forget();
-    mPlaybackListener = nullptr;
-  }
-  if (mPlaybackPort) {
-    mPlaybackPort->Destroy();
-    mPlaybackPort = nullptr;
+  if (mListener) {
+    mListener->Forget();
+    mListener = nullptr;
   }
-  if (mOwnedPort) {
-    mOwnedPort->Destroy();
-    mOwnedPort = nullptr;
-  }
-  if (mPlaybackStream) {
-    mPlaybackStream->Destroy();
-    mPlaybackStream = nullptr;
-  }
-  if (mOwnedStream) {
-    mOwnedStream->Destroy();
-    mOwnedStream = nullptr;
-  }
-  if (mInputStream) {
-    mInputStream->Destroy();
-    mInputStream = nullptr;
+  if (mStream) {
+    mStream->Destroy();
+    mStream = nullptr;
   }
 }
 
 JSObject*
 DOMMediaStream::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto)
 {
   return dom::MediaStreamBinding::Wrap(aCx, this, aGivenProto);
 }
 
 double
 DOMMediaStream::CurrentTime()
 {
-  if (!mPlaybackStream) {
+  if (!mStream) {
     return 0.0;
   }
-  return mPlaybackStream->
-    StreamTimeToSeconds(mPlaybackStream->GetCurrentTime() - mLogicalStreamStartTime);
+  return mStream->
+    StreamTimeToSeconds(mStream->GetCurrentTime() - mLogicalStreamStartTime);
 }
 
 void
 DOMMediaStream::GetId(nsAString& aID) const
 {
   aID = mID;
 }
 
 void
 DOMMediaStream::GetAudioTracks(nsTArray<nsRefPtr<AudioStreamTrack> >& aTracks)
 {
-  for (const nsRefPtr<TrackPort>& info : mTracks) {
-    AudioStreamTrack* t = info->GetTrack()->AsAudioStreamTrack();
+  for (uint32_t i = 0; i < mTracks.Length(); ++i) {
+    AudioStreamTrack* t = mTracks[i]->AsAudioStreamTrack();
     if (t) {
       aTracks.AppendElement(t);
     }
   }
 }
 
 void
 DOMMediaStream::GetVideoTracks(nsTArray<nsRefPtr<VideoStreamTrack> >& aTracks)
 {
-  for (const nsRefPtr<TrackPort>& info : mTracks) {
-    VideoStreamTrack* t = info->GetTrack()->AsVideoStreamTrack();
+  for (uint32_t i = 0; i < mTracks.Length(); ++i) {
+    VideoStreamTrack* t = mTracks[i]->AsVideoStreamTrack();
     if (t) {
       aTracks.AppendElement(t);
     }
   }
 }
 
 void
 DOMMediaStream::GetTracks(nsTArray<nsRefPtr<MediaStreamTrack> >& aTracks)
 {
-  for (const nsRefPtr<TrackPort>& info : mTracks) {
-    aTracks.AppendElement(info->GetTrack());
-  }
+  aTracks.AppendElements(mTracks);
 }
 
 bool
 DOMMediaStream::HasTrack(const MediaStreamTrack& aTrack) const
 {
-  return !!FindPlaybackDOMTrack(aTrack.GetStream()->GetOwnedStream(), aTrack.GetTrackID());
-}
-
-bool
-DOMMediaStream::OwnsTrack(const MediaStreamTrack& aTrack) const
-{
-  return (aTrack.GetStream() == this) && HasTrack(aTrack);
+  return mTracks.Contains(&aTrack);
 }
 
 bool
 DOMMediaStream::IsFinished()
 {
-  return !mPlaybackStream || mPlaybackStream->IsFinished();
+  return !mStream || mStream->IsFinished();
 }
 
 void
 DOMMediaStream::InitSourceStream(nsIDOMWindow* aWindow,
                                  MediaStreamGraph* aGraph)
 {
   mWindow = aWindow;
-  InitStreamCommon(aGraph->CreateSourceStream(nullptr), aGraph);
+  InitStreamCommon(aGraph->CreateSourceStream(this));
 }
 
 void
 DOMMediaStream::InitTrackUnionStream(nsIDOMWindow* aWindow,
                                      MediaStreamGraph* aGraph)
 {
   mWindow = aWindow;
-  InitStreamCommon(aGraph->CreateTrackUnionStream(nullptr), aGraph);
+
+  InitStreamCommon(aGraph->CreateTrackUnionStream(this));
 }
 
 void
 DOMMediaStream::InitAudioCaptureStream(nsIDOMWindow* aWindow,
                                        MediaStreamGraph* aGraph)
 {
   mWindow = aWindow;
 
-  const TrackID AUDIO_TRACK = 1;
-
-  InitStreamCommon(aGraph->CreateAudioCaptureStream(this, AUDIO_TRACK), aGraph);
-  CreateOwnDOMTrack(AUDIO_TRACK, MediaSegment::AUDIO);
+  InitStreamCommon(aGraph->CreateAudioCaptureStream(this));
 }
 
 void
-DOMMediaStream::InitStreamCommon(MediaStream* aStream,
-                                 MediaStreamGraph* aGraph)
+DOMMediaStream::InitStreamCommon(MediaStream* aStream)
 {
-  mInputStream = aStream;
-
-  // We pass null as the wrapper since it is only used to signal finished
-  // streams. This is only needed for the playback stream.
-  mOwnedStream = aGraph->CreateTrackUnionStream(nullptr);
-  mOwnedStream->SetAutofinish(true);
-  mOwnedPort = mOwnedStream->AllocateInputPort(mInputStream);
+  mStream = aStream;
 
-  mPlaybackStream = aGraph->CreateTrackUnionStream(this);
-  mPlaybackStream->SetAutofinish(true);
-  mPlaybackPort = mPlaybackStream->AllocateInputPort(mOwnedStream);
-
-  LOG(LogLevel::Debug, ("DOMMediaStream %p Initiated with mInputStream=%p, mOwnedStream=%p, mPlaybackStream=%p",
-                        this, mInputStream, mOwnedStream, mPlaybackStream));
-
-  // Setup track listeners
-  mOwnedListener = new OwnedStreamListener(this);
-  mOwnedStream->AddListener(mOwnedListener);
-  mPlaybackListener = new PlaybackStreamListener(this);
-  mPlaybackStream->AddListener(mPlaybackListener);
+  // Setup track listener
+  mListener = new StreamListener(this);
+  aStream->AddListener(mListener);
 }
 
 already_AddRefed<DOMMediaStream>
 DOMMediaStream::CreateSourceStream(nsIDOMWindow* aWindow,
                                    MediaStreamGraph* aGraph)
 {
   nsRefPtr<DOMMediaStream> stream = new DOMMediaStream();
   stream->InitSourceStream(aWindow, aGraph);
@@ -551,26 +340,26 @@ DOMMediaStream::CreateAudioCaptureStream
   nsRefPtr<DOMMediaStream> stream = new DOMMediaStream();
   stream->InitAudioCaptureStream(aWindow, aGraph);
   return stream.forget();
 }
 
 void
 DOMMediaStream::SetTrackEnabled(TrackID aTrackID, bool aEnabled)
 {
-  if (mOwnedStream) {
-    mOwnedStream->SetTrackEnabled(aTrackID, aEnabled);
+  if (mStream) {
+    mStream->SetTrackEnabled(aTrackID, aEnabled);
   }
 }
 
 void
 DOMMediaStream::StopTrack(TrackID aTrackID)
 {
-  if (mInputStream && mInputStream->AsSourceStream()) {
-    mInputStream->AsSourceStream()->EndTrack(aTrackID);
+  if (mStream && mStream->AsSourceStream()) {
+    mStream->AsSourceStream()->EndTrack(aTrackID);
   }
 }
 
 already_AddRefed<Promise>
 DOMMediaStream::ApplyConstraintsToTrack(TrackID aTrackID,
                                         const MediaTrackConstraints& aConstraints,
                                         ErrorResult &aRv)
 {
@@ -626,78 +415,75 @@ DOMMediaStream::AddPrincipalChangeObserv
 
 bool
 DOMMediaStream::RemovePrincipalChangeObserver(PrincipalChangeObserver* aObserver)
 {
   return mPrincipalChangeObservers.RemoveElement(aObserver);
 }
 
 MediaStreamTrack*
-DOMMediaStream::CreateOwnDOMTrack(TrackID aTrackID, MediaSegment::Type aType)
+DOMMediaStream::CreateDOMTrack(TrackID aTrackID, MediaSegment::Type aType)
 {
-  MOZ_ASSERT(FindOwnedDOMTrack(GetOwnedStream(), aTrackID) == nullptr);
-
   MediaStreamTrack* track;
   switch (aType) {
   case MediaSegment::AUDIO:
     track = new AudioStreamTrack(this, aTrackID);
     break;
   case MediaSegment::VIDEO:
     track = new VideoStreamTrack(this, aTrackID);
     break;
   default:
     MOZ_CRASH("Unhandled track type");
   }
-
-  LOG(LogLevel::Debug, ("DOMMediaStream %p Created new track %p with ID %u", this, track, aTrackID));
+  mTracks.AppendElement(track);
 
-  nsRefPtr<TrackPort> ownedTrackPort =
-    new TrackPort(mOwnedPort, track, TrackPort::InputPortOwnership::EXTERNAL);
-  mOwnedTracks.AppendElement(ownedTrackPort.forget());
-
-  nsRefPtr<TrackPort> playbackTrackPort =
-    new TrackPort(mPlaybackPort, track, TrackPort::InputPortOwnership::EXTERNAL);
-  mTracks.AppendElement(playbackTrackPort.forget());
-
-  NotifyMediaStreamTrackCreated(track);
   return track;
 }
 
 MediaStreamTrack*
-DOMMediaStream::FindOwnedDOMTrack(MediaStream* aOwningStream, TrackID aTrackID) const
+DOMMediaStream::BindDOMTrack(TrackID aTrackID, MediaSegment::Type aType)
 {
-  if (aOwningStream != mOwnedStream) {
-    return nullptr;
+  MediaStreamTrack* track = nullptr;
+  bool bindSuccess = false;
+  switch (aType) {
+  case MediaSegment::AUDIO: {
+    for (size_t i = 0; i < mTracks.Length(); ++i) {
+      track = mTracks[i]->AsAudioStreamTrack();
+      if (track && track->GetTrackID() == aTrackID) {
+        bindSuccess = true;
+        break;
+      }
+    }
+    break;
   }
-
-  for (const nsRefPtr<TrackPort>& info : mOwnedTracks) {
-    if (info->GetTrack()->GetTrackID() == aTrackID) {
-      return info->GetTrack();
+  case MediaSegment::VIDEO: {
+    for (size_t i = 0; i < mTracks.Length(); ++i) {
+      track = mTracks[i]->AsVideoStreamTrack();
+      if (track && track->GetTrackID() == aTrackID) {
+        bindSuccess = true;
+        break;
+      }
     }
+    break;
   }
-  return nullptr;
+  default:
+    MOZ_CRASH("Unhandled track type");
+  }
+  return bindSuccess ? track : nullptr;
 }
 
 MediaStreamTrack*
-DOMMediaStream::FindPlaybackDOMTrack(MediaStream* aInputStream, TrackID aInputTrackID) const
+DOMMediaStream::GetDOMTrackFor(TrackID aTrackID)
 {
-  for (const nsRefPtr<TrackPort>& info : mTracks) {
-    if (info->GetInputPort() == mPlaybackPort &&
-        aInputStream == mOwnedStream &&
-        aInputTrackID == info->GetTrack()->GetTrackID()) {
-      // This track is in our owned and playback streams.
-      return info->GetTrack();
-    }
-    if (info->GetInputPort()->GetSource() == aInputStream &&
-        info->GetSourceTrackId() == aInputTrackID) {
-      // This track is owned externally but in our playback stream.
-      MOZ_ASSERT(aInputTrackID != TRACK_NONE);
-      MOZ_ASSERT(aInputTrackID != TRACK_INVALID);
-      MOZ_ASSERT(aInputTrackID != TRACK_ANY);
-      return info->GetTrack();
+  for (uint32_t i = 0; i < mTracks.Length(); ++i) {
+    MediaStreamTrack* t = mTracks[i];
+    // We may add streams to our track list that are actually owned by
+    // a different DOMMediaStream. Ignore those.
+    if (t->GetTrackID() == aTrackID && t->GetStream() == this) {
+      return t;
     }
   }
   return nullptr;
 }
 
 void
 DOMMediaStream::NotifyMediaStreamGraphShutdown()
 {
@@ -726,16 +512,17 @@ DOMMediaStream::OnTracksAvailable(OnTrac
   }
   mRunOnTracksAvailable.AppendElement(aRunnable);
   CheckTracksAvailable();
 }
 
 void
 DOMMediaStream::TracksCreated()
 {
+  MOZ_ASSERT(!mTracks.IsEmpty());
   mTracksCreated = true;
   CheckTracksAvailable();
 }
 
 void
 DOMMediaStream::CheckTracksAvailable()
 {
   if (!mTracksCreated) {
@@ -744,24 +531,16 @@ DOMMediaStream::CheckTracksAvailable()
   nsTArray<nsAutoPtr<OnTracksAvailableCallback> > callbacks;
   callbacks.SwapElements(mRunOnTracksAvailable);
 
   for (uint32_t i = 0; i < callbacks.Length(); ++i) {
     callbacks[i]->NotifyTracksAvailable(this);
   }
 }
 
-void
-DOMMediaStream::CreateAndAddPlaybackStreamListener(MediaStream* aStream)
-{
-  MOZ_ASSERT(GetCameraStream(), "I'm a hack. Only DOMCameraControl may use me.");
-  mPlaybackListener = new PlaybackStreamListener(this);
-  aStream->AddListener(mPlaybackListener);
-}
-
 already_AddRefed<AudioTrack>
 DOMMediaStream::CreateAudioTrack(AudioStreamTrack* aStreamTrack)
 {
   nsAutoString id;
   nsAutoString label;
   aStreamTrack->GetId(id);
   aStreamTrack->GetLabel(label);
 
@@ -787,21 +566,21 @@ DOMMediaStream::ConstructMediaTracks(Aud
                                      VideoTrackList* aVideoTrackList)
 {
   MediaTrackListListener audioListener(aAudioTrackList);
   mMediaTrackListListeners.AppendElement(audioListener);
   MediaTrackListListener videoListener(aVideoTrackList);
   mMediaTrackListListeners.AppendElement(videoListener);
 
   int firstEnabledVideo = -1;
-  for (const nsRefPtr<TrackPort>& info : mTracks) {
-    if (AudioStreamTrack* t = info->GetTrack()->AsAudioStreamTrack()) {
+  for (uint32_t i = 0; i < mTracks.Length(); ++i) {
+    if (AudioStreamTrack* t = mTracks[i]->AsAudioStreamTrack()) {
       nsRefPtr<AudioTrack> track = CreateAudioTrack(t);
       aAudioTrackList->AddTrack(track);
-    } else if (VideoStreamTrack* t = info->GetTrack()->AsVideoStreamTrack()) {
+    } else if (VideoStreamTrack* t = mTracks[i]->AsVideoStreamTrack()) {
       nsRefPtr<VideoTrack> track = CreateVideoTrack(t);
       aVideoTrackList->AddTrack(track);
       firstEnabledVideo = (t->Enabled() && firstEnabledVideo < 0)
                           ? (aVideoTrackList->Length() - 1)
                           : firstEnabledVideo;
     }
   }
 
@@ -852,33 +631,33 @@ DOMMediaStream::NotifyMediaStreamTrackEn
   aTrack->GetId(id);
   for (uint32_t i = 0; i < mMediaTrackListListeners.Length(); ++i) {
     mMediaTrackListListeners[i].NotifyMediaTrackEnded(id);
   }
 }
 
 DOMLocalMediaStream::~DOMLocalMediaStream()
 {
-  if (mInputStream) {
+  if (mStream) {
     // Make sure Listeners of this stream know it's going away
     Stop();
   }
 }
 
 JSObject*
 DOMLocalMediaStream::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto)
 {
   return dom::LocalMediaStreamBinding::Wrap(aCx, this, aGivenProto);
 }
 
 void
 DOMLocalMediaStream::Stop()
 {
-  if (mInputStream && mInputStream->AsSourceStream()) {
-    mInputStream->AsSourceStream()->EndAllTrackAndFinish();
+  if (mStream && mStream->AsSourceStream()) {
+    mStream->AsSourceStream()->EndAllTrackAndFinish();
   }
 }
 
 already_AddRefed<DOMLocalMediaStream>
 DOMLocalMediaStream::CreateSourceStream(nsIDOMWindow* aWindow,
                                         MediaStreamGraph* aGraph)
 {
   nsRefPtr<DOMLocalMediaStream> stream = new DOMLocalMediaStream();
@@ -943,17 +722,17 @@ already_AddRefed<DOMHwMediaStream>
 DOMHwMediaStream::CreateHwStream(nsIDOMWindow* aWindow)
 {
   nsRefPtr<DOMHwMediaStream> stream = new DOMHwMediaStream();
 
   MediaStreamGraph* graph =
     MediaStreamGraph::GetInstance(MediaStreamGraph::SYSTEM_THREAD_DRIVER,
                                   AudioChannel::Normal);
   stream->InitSourceStream(aWindow, graph);
-  stream->Init(stream->GetInputStream());
+  stream->Init(stream->GetStream());
 
   return stream.forget();
 }
 
 void
 DOMHwMediaStream::Init(MediaStream* stream)
 {
   SourceMediaStream* srcStream = stream->AsSourceStream();
@@ -996,17 +775,17 @@ DOMHwMediaStream::SetImageSize(uint32_t 
 #ifdef MOZ_WIDGET_GONK
   OverlayImage::Data imgData;
 
   imgData.mOverlayId = mOverlayImage->GetOverlayId();
   imgData.mSize = IntSize(width, height);
   mOverlayImage->SetData(imgData);
 #endif
 
-  SourceMediaStream* srcStream = GetInputStream()->AsSourceStream();
+  SourceMediaStream* srcStream = GetStream()->AsSourceStream();
   StreamBuffer::Track* track = srcStream->FindTrack(TRACK_VIDEO_PRIMARY);
 
   if (!track || !track->GetSegment()) {
     return;
   }
 
 #ifdef MOZ_WIDGET_GONK
   // Clear the old segment.
--- a/dom/media/DOMMediaStream.h
+++ b/dom/media/DOMMediaStream.h
@@ -30,19 +30,17 @@
 #endif
 
 namespace mozilla {
 
 class DOMHwMediaStream;
 class DOMLocalMediaStream;
 class MediaStream;
 class MediaEngineSource;
-class MediaInputPort;
 class MediaStreamGraph;
-class ProcessedMediaStream;
 
 namespace dom {
 class AudioNode;
 class HTMLCanvasElement;
 class MediaStreamTrack;
 class AudioStreamTrack;
 class VideoStreamTrack;
 class AudioTrack;
@@ -61,124 +59,19 @@ class OverlayImage;
 class MediaStreamDirectListener;
 
 #define NS_DOMMEDIASTREAM_IID \
 { 0x8cb65468, 0x66c0, 0x444e, \
   { 0x89, 0x9f, 0x89, 0x1d, 0x9e, 0xd2, 0xbe, 0x7c } }
 
 /**
  * DOM wrapper for MediaStreams.
- *
- * To account for track operations such as clone(), addTrack() and
- * removeTrack(), a DOMMediaStream wraps three internal (and chained)
- * MediaStreams:
- *   1. mInputStream
- *      - Controlled by the owner/source of the DOMMediaStream.
- *        It's a stream of the type indicated by
- *      - DOMMediaStream::CreateSourceStream/CreateTrackUnionStream. A source
- *        typically creates its DOMMediaStream, creates the MediaStreamTracks
- *        owned by said stream, then gets the internal input stream to which it
- *        feeds data for the previously created tracks.
- *      - When necessary it can create tracks on the internal stream only and
- *        their corresponding MediaStreamTracks will be asynchronously created.
- *   2. mOwnedStream
- *      - A TrackUnionStream containing tracks owned by this stream.
- *      - The internal model of a MediaStreamTrack consists of its owning
- *        DOMMediaStream and the TrackID of the corresponding internal track in
- *        the owning DOMMediaStream's mOwnedStream.
- *      - The owned stream is different from the input stream since a cloned
- *        DOMMediaStream is also the owner of its (cloned) MediaStreamTracks.
- *      - Stopping an original track shall not stop its clone. This is
- *        solved by stopping it at the owned stream, while the clone's owned
- *        stream gets data directly from the original input stream.
- *      - A DOMMediaStream (original or clone) gets all tracks dynamically
- *        added by the source automatically forwarded by having a TRACK_ANY
- *        MediaInputPort set up from the owning DOMMediaStream's input stream
- *        to this DOMMediaStream's owned stream.
- *   3. mPlaybackStream
- *      - A TrackUnionStream containing the tracks corresponding to the
- *        MediaStreamTracks currently in this DOMMediaStream (per getTracks()).
- *      - Similarly as for mOwnedStream, there's a TRACK_ANY MediaInputPort set
- *        up from the owned stream to the playback stream to allow tracks
- *        dynamically added by the source to be automatically forwarded to any
- *        audio or video sinks.
- *      - MediaStreamTracks added by addTrack() are set up with a MediaInputPort
- *        locked to their internal TrackID, from their owning DOMMediaStream's
- *        owned stream to this playback stream.
- *
- *
- * A graphical representation of how tracks are connected in various cases as
- * follows:
- *
- *                     addTrack()ed case:
- * DOMStream A
- *           Input        Owned          Playback
- *            t1 ---------> t1 ------------> t1     <- MediaStreamTrack X
- *                                                     (pointing to t1 in A)
- *                                 --------> t2     <- MediaStreamTrack Y
- *                                /                    (pointing to t1 in B)
- * DOMStream B                   /
- *           Input        Owned /        Playback
- *            t1 ---------> t1 ------------> t1     <- MediaStreamTrack Y
- *                                                     (pointing to t1 in B)
- *
- *                     removeTrack()ed case:
- * DOMStream A
- *           Input        Owned          Playback
- *            t1 ---------> t1                      <- No tracks
- *
- *
- *                     clone()d case:
- * DOMStream A
- *           Input        Owned          Playback
- *            t1 ---------> t1 ------------> t1     <- MediaStreamTrack X
- *               \                                     (pointing to t1 in A)
- *                -----
- * DOMStream B         \
- *           Input      \ Owned          Playback
- *                       -> t1 ------------> t1     <- MediaStreamTrack Y
- *                                                     (pointing to t1 in B)
- *
- *
- *            addTrack()ed, removeTrack()ed and clone()d case:
- *
- *  Here we have done the following:
- *    var A = someStreamWithTwoTracks;
- *    var B = someStreamWithOneTrack;
- *    var X = A.getTracks()[0];
- *    var Y = A.getTracks()[1];
- *    var Z = B.getTracks()[0];
- *    A.addTrack(Z);
- *    A.removeTrack(X);
- *    B.removeTrack(Z);
- *    var A' = A.clone();
- *
- * DOMStream A
- *           Input        Owned          Playback
- *            t1 ---------> t1                      <- MediaStreamTrack X (removed)
- *                                                     (pointing to t1 in A)
- *            t2 ---------> t2 ------------> t2     <- MediaStreamTrack Y
- *             \                                       (pointing to t2 in A)
- *              \                    ------> t3     <- MediaStreamTrack Z
- *               \                  /                  (pointing to t1 in B)
- * DOMStream B    \                /
- *           Input \      Owned   /      Playback
- *            t1 ---^-----> t1 ---                  <- MediaStreamTrack Z (removed)
- *              \    \                                 (pointing to t1 in B)
- *               \    \
- * DOMStream A'   \    \
- *           Input \    \ Owned          Playback
- *                  \    -> t1 ------------> t1     <- MediaStreamTrack Y'
- *                   \                                 (pointing to t1 in A')
- *                    ----> t2 ------------> t2     <- MediaStreamTrack Z'
- *                                                     (pointing to t2 in A')
  */
 class DOMMediaStream : public DOMEventTargetHelper
 {
-  class TrackPort;
   friend class DOMLocalMediaStream;
   typedef dom::MediaStreamTrack MediaStreamTrack;
   typedef dom::AudioStreamTrack AudioStreamTrack;
   typedef dom::VideoStreamTrack VideoStreamTrack;
   typedef dom::AudioTrack AudioTrack;
   typedef dom::VideoTrack VideoTrack;
   typedef dom::AudioTrackList AudioTrackList;
   typedef dom::VideoTrackList VideoTrackList;
@@ -205,48 +98,19 @@ public:
   // WebIDL
   double CurrentTime();
 
   void GetId(nsAString& aID) const;
 
   void GetAudioTracks(nsTArray<nsRefPtr<AudioStreamTrack> >& aTracks);
   void GetVideoTracks(nsTArray<nsRefPtr<VideoStreamTrack> >& aTracks);
   void GetTracks(nsTArray<nsRefPtr<MediaStreamTrack> >& aTracks);
-
-  // NON-WebIDL
-
-  /**
-   * Returns true if this DOMMediaStream has aTrack in its mPlaybackStream.
-   */
   bool HasTrack(const MediaStreamTrack& aTrack) const;
 
-  /**
-   * Returns true if this DOMMediaStream owns aTrack.
-   */
-  bool OwnsTrack(const MediaStreamTrack& aTrack) const;
-
-  /**
-   * Returns the corresponding MediaStreamTrack if it's in our mOwnedStream.
-   */
-  MediaStreamTrack* FindOwnedDOMTrack(MediaStream* aOwningStream, TrackID aTrackID) const;
-
-  /**
-   * Returns the corresponding MediaStreamTrack if it's in our mPlaybackStream.
-   */
-  MediaStreamTrack* FindPlaybackDOMTrack(MediaStream* aOwningStream, TrackID aTrackID) const;
-
-  MediaStream* GetInputStream() const { return mInputStream; }
-  ProcessedMediaStream* GetOwnedStream() const { return mOwnedStream; }
-  ProcessedMediaStream* GetPlaybackStream() const { return mPlaybackStream; }
-
-  /**
-   * Allows a video element to identify this stream as a camera stream, which
-   * needs special treatment.
-   */
-  virtual MediaStream* GetCameraStream() const { return nullptr; }
+  MediaStream* GetStream() const { return mStream; }
 
   /**
    * Overridden in DOMLocalMediaStreams to allow getUserMedia to pass
    * data directly to RTCPeerConnection without going through graph queuing.
    * Returns a bool to let us know if direct data will be delivered.
    */
   virtual bool AddDirectListener(MediaStreamDirectListener *aListener) { return false; }
   virtual void RemoveDirectListener(MediaStreamDirectListener *aListener) {}
@@ -348,23 +212,21 @@ public:
   static already_AddRefed<DOMMediaStream> CreateAudioCaptureStream(
     nsIDOMWindow* aWindow, MediaStreamGraph* aGraph);
 
   void SetLogicalStreamStartTime(StreamTime aTime)
   {
     mLogicalStreamStartTime = aTime;
   }
 
-  /**
-   * Called for each track in our owned stream to indicate to JS that we
-   * are carrying that track.
-   *
-   * Creates a MediaStreamTrack, adds it to mTracks and returns it.
-   */
-  MediaStreamTrack* CreateOwnDOMTrack(TrackID aTrackID, MediaSegment::Type aType);
+  // Notifications from StreamListener.
+  // BindDOMTrack should only be called when it's safe to run script.
+  MediaStreamTrack* BindDOMTrack(TrackID aTrackID, MediaSegment::Type aType);
+  MediaStreamTrack* CreateDOMTrack(TrackID aTrackID, MediaSegment::Type aType);
+  MediaStreamTrack* GetDOMTrackFor(TrackID aTrackID);
 
   class OnTracksAvailableCallback {
   public:
     virtual ~OnTracksAvailableCallback() {}
     virtual void NotifyTracksAvailable(DOMMediaStream* aStream) = 0;
   };
   // When the initial set of tracks has been added, run
   // aCallback->NotifyTracksAvailable.
@@ -404,79 +266,51 @@ public:
   virtual void NotifyMediaStreamTrackCreated(MediaStreamTrack* aTrack);
 
   virtual void NotifyMediaStreamTrackEnded(MediaStreamTrack* aTrack);
 
 protected:
   virtual ~DOMMediaStream();
 
   void Destroy();
-  void InitSourceStream(nsIDOMWindow* aWindow, MediaStreamGraph* aGraph);
-  void InitTrackUnionStream(nsIDOMWindow* aWindow, MediaStreamGraph* aGraph);
-  void InitAudioCaptureStream(nsIDOMWindow* aWindow, MediaStreamGraph* aGraph);
-  void InitStreamCommon(MediaStream* aStream, MediaStreamGraph* aGraph);
+  void InitSourceStream(nsIDOMWindow* aWindow,
+                        MediaStreamGraph* aGraph);
+  void InitTrackUnionStream(nsIDOMWindow* aWindow,
+                            MediaStreamGraph* aGraph);
+  void InitAudioCaptureStream(nsIDOMWindow* aWindow,
+                              MediaStreamGraph* aGraph);
+  void InitStreamCommon(MediaStream* aStream);
   already_AddRefed<AudioTrack> CreateAudioTrack(AudioStreamTrack* aStreamTrack);
   already_AddRefed<VideoTrack> CreateVideoTrack(VideoStreamTrack* aStreamTrack);
 
   // Called when MediaStreamGraph has finished an iteration where tracks were
   // created.
   void TracksCreated();
 
   void CheckTracksAvailable();
 
-  class OwnedStreamListener;
-  friend class OwnedStreamListener;
-
-  class PlaybackStreamListener;
-  friend class PlaybackStreamListener;
-
-  // XXX Bug 1124630. Remove with CameraPreviewMediaStream.
-  void CreateAndAddPlaybackStreamListener(MediaStream*);
+  class StreamListener;
+  friend class StreamListener;
 
   // StreamTime at which the currentTime attribute would return 0.
   StreamTime mLogicalStreamStartTime;
 
   // We need this to track our parent object.
   nsCOMPtr<nsIDOMWindow> mWindow;
 
-  // MediaStreams are owned by the graph, but we tell them when to die,
-  // and they won't die until we let them.
-
-  // This stream contains tracks used as input by us. Cloning happens from this
-  // stream. Tracks may exist in these stream but not in |mOwnedStream| if they
-  // have been stopped.
-  MediaStream* mInputStream;
-
-  // This stream contains tracks owned by us (if we were created directly from
-  // source, or cloned from some other stream). Tracks map to |mOwnedTracks|.
-  ProcessedMediaStream* mOwnedStream;
-
-  // This stream contains tracks currently played by us, despite of owner.
-  // Tracks map to |mTracks|.
-  ProcessedMediaStream* mPlaybackStream;
+  // MediaStream is owned by the graph, but we tell it when to die, and it won't
+  // die until we let it.
+  MediaStream* mStream;
 
-  // This port connects mInputStream to mOwnedStream. All tracks forwarded.
-  nsRefPtr<MediaInputPort> mOwnedPort;
-
-  // This port connects mOwnedStream to mPlaybackStream. All tracks not
-  // explicitly blocked due to removal are forwarded.
-  nsRefPtr<MediaInputPort> mPlaybackPort;
-
-  // MediaStreamTracks corresponding to tracks in our mOwnedStream.
-  nsAutoTArray<nsRefPtr<TrackPort>, 2> mOwnedTracks;
-
-  // MediaStreamTracks corresponding to tracks in our mPlaybackStream.
-  nsAutoTArray<nsRefPtr<TrackPort>, 2> mTracks;
-
-  nsRefPtr<OwnedStreamListener> mOwnedListener;
-  nsRefPtr<PlaybackStreamListener> mPlaybackListener;
+  nsAutoTArray<nsRefPtr<MediaStreamTrack>,2> mTracks;
+  nsRefPtr<StreamListener> mListener;
 
   nsTArray<nsAutoPtr<OnTracksAvailableCallback> > mRunOnTracksAvailable;
 
-  // Set to true after MediaStreamGraph has created tracks for mPlaybackStream.
+  // Set to true after MediaStreamGraph has created tracks for mStream.
   bool mTracksCreated;
 
   nsString mID;
 
   // Keep these alive until the stream finishes
   nsTArray<nsCOMPtr<nsISupports> > mConsumersToKeepAlive;
 
   bool mNotifiedOfMediaStreamGraphShutdown;
--- a/dom/media/MediaManager.cpp
+++ b/dom/media/MediaManager.cpp
@@ -632,26 +632,26 @@ nsresult AudioDevice::Restart(const dom:
 /**
  * A subclass that we only use to stash internal pointers to MediaStreamGraph objects
  * that need to be cleaned up.
  */
 class nsDOMUserMediaStream : public DOMLocalMediaStream
 {
 public:
   static already_AddRefed<nsDOMUserMediaStream>
-  CreateSourceStream(nsIDOMWindow* aWindow,
+  CreateTrackUnionStream(nsIDOMWindow* aWindow,
                          GetUserMediaCallbackMediaStreamListener* aListener,
                          AudioDevice* aAudioDevice,
                          VideoDevice* aVideoDevice,
                          MediaStreamGraph* aMSG)
   {
     nsRefPtr<nsDOMUserMediaStream> stream = new nsDOMUserMediaStream(aListener,
                                                                      aAudioDevice,
                                                                      aVideoDevice);
-    stream->InitSourceStream(aWindow, aMSG);
+    stream->InitTrackUnionStream(aWindow, aMSG);
     return stream.forget();
   }
 
   nsDOMUserMediaStream(GetUserMediaCallbackMediaStreamListener* aListener,
                        AudioDevice *aAudioDevice,
                        VideoDevice *aVideoDevice) :
     mListener(aListener),
     mAudioDevice(aAudioDevice),
@@ -670,42 +670,45 @@ public:
 #endif
     mPlayoutDelay(20)
   {}
 
   virtual ~nsDOMUserMediaStream()
   {
     Stop();
 
-    if (GetSourceStream()) {
-      GetSourceStream()->Destroy();
+    if (mPort) {
+      mPort->Destroy();
+    }
+    if (mSourceStream) {
+      mSourceStream->Destroy();
     }
   }
 
   virtual void Stop() override
   {
-    if (GetSourceStream()) {
-      GetSourceStream()->EndAllTrackAndFinish();
+    if (mSourceStream) {
+      mSourceStream->EndAllTrackAndFinish();
     }
   }
 
   // For gUM streams, we have a trackunion which assigns TrackIDs.  However, for a
   // single-source trackunion like we have here, the TrackUnion will assign trackids
   // that match the source's trackids, so we can avoid needing a mapping function.
   // XXX This will not handle more complex cases well.
   virtual void StopTrack(TrackID aTrackID) override
   {
-    if (GetSourceStream()) {
-      GetSourceStream()->EndTrack(aTrackID);
+    if (mSourceStream) {
+      mSourceStream->EndTrack(aTrackID);
       // We could override NotifyMediaStreamTrackEnded(), and maybe should, but it's
       // risky to do late in a release since that will affect all track ends, and not
       // just StopTrack()s.
-      nsRefPtr<dom::MediaStreamTrack> ownedTrack = FindOwnedDOMTrack(mOwnedStream, aTrackID);
-      if (ownedTrack) {
-        mListener->StopTrack(aTrackID, !!ownedTrack->AsAudioStreamTrack());
+      if (GetDOMTrackFor(aTrackID)) {
+        mListener->StopTrack(aTrackID,
+                             !!GetDOMTrackFor(aTrackID)->AsAudioStreamTrack());
       } else {
         LOG(("StopTrack(%d) on non-existent track", aTrackID));
       }
     }
   }
 
   virtual already_AddRefed<Promise>
   ApplyConstraintsToTrack(TrackID aTrackID,
@@ -718,25 +721,25 @@ public:
 
     if (sInShutdown) {
       nsRefPtr<MediaStreamError> error = new MediaStreamError(window,
           NS_LITERAL_STRING("AbortError"),
           NS_LITERAL_STRING("In shutdown"));
       promise->MaybeReject(error);
       return promise.forget();
     }
-    if (!GetSourceStream()) {
+    if (!mSourceStream) {
       nsRefPtr<MediaStreamError> error = new MediaStreamError(window,
           NS_LITERAL_STRING("InternalError"),
           NS_LITERAL_STRING("No stream."));
       promise->MaybeReject(error);
       return promise.forget();
     }
 
-    nsRefPtr<dom::MediaStreamTrack> track = FindOwnedDOMTrack(mOwnedStream, aTrackID);
+    nsRefPtr<dom::MediaStreamTrack> track = GetDOMTrackFor(aTrackID);
     if (!track) {
       LOG(("ApplyConstraintsToTrack(%d) on non-existent track", aTrackID));
       nsRefPtr<MediaStreamError> error = new MediaStreamError(window,
           NS_LITERAL_STRING("InternalError"),
           NS_LITERAL_STRING("No track."));
       promise->MaybeReject(error);
       return promise.forget();
     }
@@ -766,18 +769,18 @@ public:
     // forward to superclass
     DOMLocalMediaStream::NotifyMediaStreamTrackEnded(aTrack);
   }
 #endif
 
   // Allow getUserMedia to pass input data directly to PeerConnection/MediaPipeline
   virtual bool AddDirectListener(MediaStreamDirectListener *aListener) override
   {
-    if (GetSourceStream()) {
-      GetSourceStream()->AddDirectListener(aListener);
+    if (mSourceStream) {
+      mSourceStream->AddDirectListener(aListener);
       return true; // application should ignore NotifyQueuedTrackData
     }
     return false;
   }
 
   virtual void
   AudioConfig(bool aEchoOn, uint32_t aEcho,
               bool aAgcOn, uint32_t aAgc,
@@ -790,21 +793,32 @@ public:
     mAgc = aAgc;
     mNoiseOn = aNoiseOn;
     mNoise = aNoise;
     mPlayoutDelay = aPlayoutDelay;
   }
 
   virtual void RemoveDirectListener(MediaStreamDirectListener *aListener) override
   {
-    if (GetSourceStream()) {
-      GetSourceStream()->RemoveDirectListener(aListener);
+    if (mSourceStream) {
+      mSourceStream->RemoveDirectListener(aListener);
     }
   }
 
+  // let us intervene for direct listeners when someone does track.enabled = false
+  virtual void SetTrackEnabled(TrackID aTrackID, bool aEnabled) override
+  {
+    // We encapsulate the SourceMediaStream and TrackUnion into one entity, so
+    // we can handle the disabling at the SourceMediaStream
+
+    // We need to find the input track ID for output ID aTrackID, so we let the TrackUnion
+    // forward the request to the source and translate the ID
+    GetStream()->AsProcessedStream()->ForwardTrackEnabled(aTrackID, aEnabled);
+  }
+
   virtual DOMLocalMediaStream* AsDOMLocalMediaStream() override
   {
     return this;
   }
 
   virtual MediaEngineSource* GetMediaEngine(TrackID aTrackID) override
   {
     // MediaEngine supports only one video and on video track now and TrackID is
@@ -814,24 +828,20 @@ public:
     }
     else if (aTrackID == kAudioTrack) {
       return mAudioDevice ? mAudioDevice->GetSource() : nullptr;
     }
 
     return nullptr;
   }
 
-  SourceMediaStream* GetSourceStream()
-  {
-    if (GetInputStream()) {
-      return GetInputStream()->AsSourceStream();
-    }
-    return nullptr;
-  }
-
+  // The actual MediaStream is a TrackUnionStream. But these resources need to be
+  // explicitly destroyed too.
+  nsRefPtr<SourceMediaStream> mSourceStream;
+  nsRefPtr<MediaInputPort> mPort;
   nsRefPtr<GetUserMediaCallbackMediaStreamListener> mListener;
   nsRefPtr<AudioDevice> mAudioDevice; // so we can turn on AEC
   nsRefPtr<VideoDevice> mVideoDevice;
   bool mEchoOn;
   bool mAgcOn;
   bool mNoiseOn;
   uint32_t mEcho;
   uint32_t mAgc;
@@ -912,17 +922,17 @@ public:
     {
       // We're in the main thread, so no worries here.
       if (!(mManager->IsWindowStillActive(mWindowID))) {
         return;
       }
 
       // Start currentTime from the point where this stream was successfully
       // returned.
-      aStream->SetLogicalStreamStartTime(aStream->GetPlaybackStream()->GetCurrentTime());
+      aStream->SetLogicalStreamStartTime(aStream->GetStream()->GetCurrentTime());
 
       // This is safe since we're on main-thread, and the windowlist can only
       // be invalidated from the main-thread (see OnNavigation)
       LOG(("Returning success for getUserMedia()"));
       mOnSuccess->OnSuccess(aStream);
     }
     uint64_t mWindowID;
     nsCOMPtr<nsIDOMGetUserMediaSuccessCallback> mOnSuccess;
@@ -984,55 +994,61 @@ public:
 
     MediaStreamGraph::GraphDriverType graphDriverType =
       mAudioDevice ? MediaStreamGraph::AUDIO_THREAD_DRIVER
                    : MediaStreamGraph::SYSTEM_THREAD_DRIVER;
     MediaStreamGraph* msg =
       MediaStreamGraph::GetInstance(graphDriverType,
                                     dom::AudioChannel::Normal);
 
+    nsRefPtr<SourceMediaStream> stream = msg->CreateSourceStream(nullptr);
+
     nsRefPtr<DOMLocalMediaStream> domStream;
-    nsRefPtr<SourceMediaStream> stream;
     // AudioCapture is a special case, here, in the sense that we're not really
     // using the audio source and the SourceMediaStream, which acts as
     // placeholders. We re-route a number of stream internaly in the MSG and mix
     // them down instead.
     if (mAudioDevice &&
         mAudioDevice->GetMediaSource() == dom::MediaSourceEnum::AudioCapture) {
       domStream = DOMLocalMediaStream::CreateAudioCaptureStream(window, msg);
       // It should be possible to pipe the capture stream to anything. CORS is
       // not a problem here, we got explicit user content.
       domStream->SetPrincipal(window->GetExtantDoc()->NodePrincipal());
-      stream = msg->CreateSourceStream(nullptr); // Placeholder
       msg->RegisterCaptureStreamForWindow(
-            mWindowID, domStream->GetInputStream()->AsProcessedStream());
+            mWindowID, domStream->GetStream()->AsProcessedStream());
       window->SetAudioCapture(true);
     } else {
       // Normal case, connect the source stream to the track union stream to
       // avoid us blocking
-      domStream = nsDOMUserMediaStream::CreateSourceStream(window, mListener,
-                                                           mAudioDevice, mVideoDevice,
-                                                           msg);
-
-      if (mAudioDevice) {
-        domStream->CreateOwnDOMTrack(kAudioTrack, MediaSegment::AUDIO);
-      }
-      if (mVideoDevice) {
-        domStream->CreateOwnDOMTrack(kVideoTrack, MediaSegment::VIDEO);
-      }
+      nsRefPtr<nsDOMUserMediaStream> trackunion =
+        nsDOMUserMediaStream::CreateTrackUnionStream(window, mListener,
+                                                     mAudioDevice, mVideoDevice,
+                                                     msg);
+      trackunion->GetStream()->AsProcessedStream()->SetAutofinish(true);
+      nsRefPtr<MediaInputPort> port = trackunion->GetStream()->AsProcessedStream()->
+        AllocateInputPort(stream);
+      trackunion->mSourceStream = stream;
+      trackunion->mPort = port.forget();
+      // Log the relationship between SourceMediaStream and TrackUnion stream
+      // Make sure logger starts before capture
+      AsyncLatencyLogger::Get(true);
+      LogLatency(AsyncLatencyLogger::MediaStreamCreate,
+          reinterpret_cast<uint64_t>(stream.get()),
+          reinterpret_cast<int64_t>(trackunion->GetStream()));
 
       nsCOMPtr<nsIPrincipal> principal;
       if (mPeerIdentity) {
         principal = nsNullPrincipal::Create();
-        domStream->SetPeerIdentity(mPeerIdentity.forget());
+        trackunion->SetPeerIdentity(mPeerIdentity.forget());
       } else {
         principal = window->GetExtantDoc()->NodePrincipal();
       }
-      domStream->CombineWithPrincipal(principal);
-      stream = domStream->GetInputStream()->AsSourceStream();
+      trackunion->CombineWithPrincipal(principal);
+
+      domStream = trackunion.forget();
     }
 
     if (!domStream || sInShutdown) {
       nsCOMPtr<nsIDOMGetUserMediaErrorCallback> onFailure = mOnFailure.forget();
       LOG(("Returning error for getUserMedia() - no stream"));
 
       nsGlobalWindow* window = nsGlobalWindow::GetInnerWindowWithId(mWindowID);
       if (window) {
@@ -1044,31 +1060,30 @@ public:
       }
       return NS_OK;
     }
 
     // The listener was added at the beginning in an inactive state.
     // Activate our listener. We'll call Start() on the source when get a callback
     // that the MediaStream has started consuming. The listener is freed
     // when the page is invalidated (on navigation or close).
-    MOZ_ASSERT(stream);
     mListener->Activate(stream.forget(), mAudioDevice, mVideoDevice);
 
     // Note: includes JS callbacks; must be released on MainThread
     TracksAvailableCallback* tracksAvailableCallback =
       new TracksAvailableCallback(mManager, mOnSuccess, mWindowID, domStream);
 
     mListener->AudioConfig(aec_on, (uint32_t) aec,
                            agc_on, (uint32_t) agc,
                            noise_on, (uint32_t) noise,
                            playout_delay);
 
     // Dispatch to the media thread to ask it to start the sources,
     // because that can take a while.
-    // Pass ownership of domStream to the MediaOperationTask
+    // Pass ownership of trackunion to the MediaOperationTask
     // to ensure it's kept alive until the MediaOperationTask runs (at least).
     MediaManager::PostTask(FROM_HERE,
         new MediaOperationTask(MEDIA_START, mListener, domStream,
                                tracksAvailableCallback,
                                mAudioDevice, mVideoDevice,
                                false, mWindowID, mOnFailure.forget()));
     // We won't need mOnFailure now.
     mOnFailure = nullptr;
--- a/dom/media/MediaRecorder.cpp
+++ b/dom/media/MediaRecorder.cpp
@@ -535,17 +535,18 @@ private:
     // Create a Track Union Stream
     MediaStreamGraph* gm = mRecorder->GetSourceMediaStream()->Graph();
     mTrackUnionStream = gm->CreateTrackUnionStream(nullptr);
     MOZ_ASSERT(mTrackUnionStream, "CreateTrackUnionStream failed");
 
     mTrackUnionStream->SetAutofinish(true);
 
     // Bind this Track Union Stream with Source Media.
-    mInputPort = mTrackUnionStream->AllocateInputPort(mRecorder->GetSourceMediaStream());
+    mInputPort = mTrackUnionStream->AllocateInputPort(mRecorder->GetSourceMediaStream(),
+                                                      0);
 
     DOMMediaStream* domStream = mRecorder->Stream();
     if (domStream) {
       // Get the track type hint from DOM media stream.
       TracksAvailableCallback* tracksAvailableCallback = new TracksAvailableCallback(this);
       domStream->OnTracksAvailable(tracksAvailableCallback);
     } else {
       // Web Audio node has only audio.
@@ -794,17 +795,17 @@ MediaRecorder::MediaRecorder(AudioNode& 
     AudioNodeEngine* engine = new AudioNodeEngine(nullptr);
     AudioNodeStream::Flags flags =
       AudioNodeStream::EXTERNAL_OUTPUT |
       AudioNodeStream::NEED_MAIN_THREAD_FINISHED;
     mPipeStream = AudioNodeStream::Create(ctx, engine, flags);
     AudioNodeStream* ns = aSrcAudioNode.GetStream();
     if (ns) {
       mInputPort = mPipeStream->AllocateInputPort(aSrcAudioNode.GetStream(),
-                                                  TRACK_ANY, 0, aSrcOutput);
+                                                  0, aSrcOutput);
     }
   }
   mAudioNode = &aSrcAudioNode;
   if (!gMediaRecorderLog) {
     gMediaRecorderLog = PR_NewLogModule("MediaRecorder");
   }
   RegisterActivityObserver();
 }
@@ -1168,17 +1169,17 @@ MediaRecorder::NotifyOwnerDocumentActivi
     Stop(result);
   }
 }
 
 MediaStream*
 MediaRecorder::GetSourceMediaStream()
 {
   if (mDOMStream != nullptr) {
-    return mDOMStream->GetPlaybackStream();
+    return mDOMStream->GetStream();
   }
   MOZ_ASSERT(mAudioNode != nullptr);
   return mPipeStream ? mPipeStream.get() : mAudioNode->GetStream();
 }
 
 nsIPrincipal*
 MediaRecorder::GetSourcePrincipal()
 {
--- a/dom/media/MediaStreamGraph.cpp
+++ b/dom/media/MediaStreamGraph.cpp
@@ -1596,17 +1596,17 @@ MediaStream::MediaStream(DOMMediaStream*
   , mMainThreadDestroyed(false)
   , mGraph(nullptr)
   , mAudioChannelType(dom::AudioChannel::Normal)
 {
   MOZ_COUNT_CTOR(MediaStream);
   // aWrapper should not already be connected to a MediaStream! It needs
   // to be hooked up to this stream, and since this stream is only just
   // being created now, aWrapper must not be connected to anything.
-  NS_ASSERTION(!aWrapper || !aWrapper->GetPlaybackStream(),
+  NS_ASSERTION(!aWrapper || !aWrapper->GetStream(),
                "Wrapper already has another media stream hooked up to it!");
 }
 
 size_t
 MediaStream::SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const
 {
   size_t amount = 0;
 
@@ -2425,49 +2425,18 @@ MediaInputPort::Graph()
 
 void
 MediaInputPort::SetGraphImpl(MediaStreamGraphImpl* aGraph)
 {
   MOZ_ASSERT(!mGraph || !aGraph, "Should only be set once");
   mGraph = aGraph;
 }
 
-void
-MediaInputPort::BlockTrackIdImpl(TrackID aTrackId)
-{
-  mBlockedTracks.AppendElement(aTrackId);
-}
-
-void
-MediaInputPort::BlockTrackId(TrackID aTrackId)
-{
-  class Message : public ControlMessage {
-  public:
-    explicit Message(MediaInputPort* aPort, TrackID aTrackId)
-      : ControlMessage(aPort->GetDestination()),
-        mPort(aPort), mTrackId(aTrackId) {}
-    virtual void Run()
-    {
-      mPort->BlockTrackIdImpl(mTrackId);
-    }
-    virtual void RunDuringShutdown()
-    {
-      Run();
-    }
-    nsRefPtr<MediaInputPort> mPort;
-    TrackID mTrackId;
-  };
-
-  MOZ_ASSERT(aTrackId != TRACK_NONE && aTrackId != TRACK_INVALID && aTrackId != TRACK_ANY,
-             "Only explicit TrackID is allowed");
-  GraphImpl()->AppendMessage(new Message(this, aTrackId));
-}
-
 already_AddRefed<MediaInputPort>
-ProcessedMediaStream::AllocateInputPort(MediaStream* aStream, TrackID aTrackID,
+ProcessedMediaStream::AllocateInputPort(MediaStream* aStream,
                                         uint16_t aInputNumber, uint16_t aOutputNumber)
 {
   // This method creates two references to the MediaInputPort: one for
   // the main thread, and one for the MediaStreamGraph.
   class Message : public ControlMessage {
   public:
     explicit Message(MediaInputPort* aPort)
       : ControlMessage(aPort->GetDestination()),
@@ -2480,20 +2449,17 @@ ProcessedMediaStream::AllocateInputPort(
       unused << mPort.forget();
     }
     virtual void RunDuringShutdown()
     {
       Run();
     }
     nsRefPtr<MediaInputPort> mPort;
   };
-
-  MOZ_ASSERT(aTrackID != TRACK_NONE && aTrackID != TRACK_INVALID,
-             "Only TRACK_ANY and explicit ID are allowed");
-  nsRefPtr<MediaInputPort> port = new MediaInputPort(aStream, aTrackID, this,
+  nsRefPtr<MediaInputPort> port = new MediaInputPort(aStream, this,
                                                      aInputNumber, aOutputNumber);
   port->SetGraphImpl(GraphImpl());
   GraphImpl()->AppendMessage(new Message(port));
   return port.forget();
 }
 
 void
 ProcessedMediaStream::Finish()
@@ -2794,20 +2760,19 @@ ProcessedMediaStream*
 MediaStreamGraph::CreateTrackUnionStream(DOMMediaStream* aWrapper)
 {
   TrackUnionStream* stream = new TrackUnionStream(aWrapper);
   AddStream(stream);
   return stream;
 }
 
 ProcessedMediaStream*
-MediaStreamGraph::CreateAudioCaptureStream(DOMMediaStream* aWrapper,
-                                           TrackID aTrackId)
+MediaStreamGraph::CreateAudioCaptureStream(DOMMediaStream* aWrapper)
 {
-  AudioCaptureStream* stream = new AudioCaptureStream(aWrapper, aTrackId);
+  AudioCaptureStream* stream = new AudioCaptureStream(aWrapper);
   AddStream(stream);
   return stream;
 }
 
 void
 MediaStreamGraph::AddStream(MediaStream* aStream, uint32_t aFlags)
 {
   NS_ADDREF(aStream);
--- a/dom/media/MediaStreamGraph.h
+++ b/dom/media/MediaStreamGraph.h
@@ -156,26 +156,21 @@ public:
     TRACK_EVENT_CREATED = 0x01,
     TRACK_EVENT_ENDED = 0x02
   };
   /**
    * Notify that changes to one of the stream tracks have been queued.
    * aTrackEvents can be any combination of TRACK_EVENT_CREATED and
    * TRACK_EVENT_ENDED. aQueuedMedia is the data being added to the track
    * at aTrackOffset (relative to the start of the stream).
-   * aInputStream and aInputTrackID will be set if the changes originated
-   * from an input stream's track. In practice they will only be used for
-   * ProcessedMediaStreams.
    */
   virtual void NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID,
                                         StreamTime aTrackOffset,
                                         uint32_t aTrackEvents,
-                                        const MediaSegment& aQueuedMedia,
-                                        MediaStream* aInputStream = nullptr,
-                                        TrackID aInputTrackID = TRACK_INVALID) {}
+                                        const MediaSegment& aQueuedMedia) {}
 
   /**
    * Notify that all new tracks this iteration have been created.
    * This is to ensure that tracks added atomically to MediaStreamGraph
    * are also notified of atomically to MediaStreamListeners.
    */
   virtual void NotifyFinishedTrackCreation(MediaStreamGraph* aGraph) {}
 };
@@ -419,16 +414,17 @@ public:
 
   friend class MediaStreamGraphImpl;
   friend class MediaInputPort;
   friend class AudioNodeExternalInputStream;
 
   virtual SourceMediaStream* AsSourceStream() { return nullptr; }
   virtual ProcessedMediaStream* AsProcessedStream() { return nullptr; }
   virtual AudioNodeStream* AsAudioNodeStream() { return nullptr; }
+  virtual CameraPreviewMediaStream* AsCameraPreviewStream() { return nullptr; }
 
   // These Impl methods perform the core functionality of the control methods
   // above, on the media graph thread.
   /**
    * Stop all stream activity and disconnect it from all inputs and outputs.
    * This must be idempotent.
    */
   virtual void DestroyImpl();
@@ -904,40 +900,34 @@ protected:
 };
 
 /**
  * Represents a connection between a ProcessedMediaStream and one of its
  * input streams.
  * We make these refcounted so that stream-related messages with MediaInputPort*
  * pointers can be sent to the main thread safely.
  *
- * A port can be locked to a specific track in the source stream, in which case
- * only this track will be forwarded to the destination stream. TRACK_ANY
- * can used to signal that all tracks shall be forwarded.
- *
  * When a port's source or destination stream dies, the stream's DestroyImpl
  * calls MediaInputPort::Disconnect to disconnect the port from
  * the source and destination streams.
  *
  * The lifetimes of MediaInputPort are controlled from the main thread.
  * The media graph adds a reference to the port. When a MediaInputPort is no
  * longer needed, main-thread code sends a Destroy message for the port and
  * clears its reference (the last main-thread reference to the object). When
  * the Destroy message is processed on the graph manager thread we disconnect
  * the port and drop the graph's reference, destroying the object.
  */
 class MediaInputPort final
 {
 private:
   // Do not call this constructor directly. Instead call aDest->AllocateInputPort.
-  MediaInputPort(MediaStream* aSource, TrackID& aSourceTrack,
-                 ProcessedMediaStream* aDest,
+  MediaInputPort(MediaStream* aSource, ProcessedMediaStream* aDest,
                  uint16_t aInputNumber, uint16_t aOutputNumber)
     : mSource(aSource)
-    , mSourceTrack(aSourceTrack)
     , mDest(aDest)
     , mInputNumber(aInputNumber)
     , mOutputNumber(aOutputNumber)
     , mGraph(nullptr)
   {
     MOZ_COUNT_CTOR(MediaInputPort);
   }
 
@@ -960,32 +950,18 @@ public:
   /**
    * Disconnects and destroys the port. The caller must not reference this
    * object again.
    */
   void Destroy();
 
   // Any thread
   MediaStream* GetSource() { return mSource; }
-  TrackID GetSourceTrackId() { return mSourceTrack; }
   ProcessedMediaStream* GetDestination() { return mDest; }
 
-  // Block aTrackId in the port. Consumers will interpret this track as ended.
-  void BlockTrackId(TrackID aTrackId);
-private:
-  void BlockTrackIdImpl(TrackID aTrackId);
-
-public:
-  // Returns true if aTrackId has not been blocked and this port has not
-  // been locked to another track.
-  bool PassTrackThrough(TrackID aTrackId) {
-    return !mBlockedTracks.Contains(aTrackId) &&
-           (mSourceTrack == TRACK_ANY || mSourceTrack == aTrackId);
-  }
-
   uint16_t InputNumber() const { return mInputNumber; }
   uint16_t OutputNumber() const { return mOutputNumber; }
 
   // Call on graph manager thread
   struct InputInterval {
     GraphTime mStart;
     GraphTime mEnd;
     bool mInputIsBlocked;
@@ -1021,23 +997,21 @@ public:
   }
 
 private:
   friend class MediaStreamGraphImpl;
   friend class MediaStream;
   friend class ProcessedMediaStream;
   // Never modified after Init()
   MediaStream* mSource;
-  TrackID mSourceTrack;
   ProcessedMediaStream* mDest;
   // The input and output numbers are optional, and are currently only used by
   // Web Audio.
   const uint16_t mInputNumber;
   const uint16_t mOutputNumber;
-  nsTArray<TrackID> mBlockedTracks;
 
   // Our media stream graph
   MediaStreamGraphImpl* mGraph;
 };
 
 /**
  * This stream processes zero or more input streams in parallel to produce
  * its output. The details of how the output is produced are handled by
@@ -1049,23 +1023,18 @@ public:
   explicit ProcessedMediaStream(DOMMediaStream* aWrapper)
     : MediaStream(aWrapper), mAutofinish(false)
   {}
 
   // Control API.
   /**
    * Allocates a new input port attached to source aStream.
    * This stream can be removed by calling MediaInputPort::Remove().
-   * The input port is tied to aTrackID in the source stream.
-   * aTrackID can be set to TRACK_ANY to automatically forward all tracks from
-   * aStream. To end a track in the destination stream forwarded with TRACK_ANY,
-   * it can be blocked in the input port through MediaInputPort::BlockTrackId().
    */
   already_AddRefed<MediaInputPort> AllocateInputPort(MediaStream* aStream,
-                                                     TrackID aTrackID = TRACK_ANY,
                                                      uint16_t aInputNumber = 0,
                                                      uint16_t aOutputNumber = 0);
   /**
    * Force this stream into the finished state.
    */
   void Finish();
   /**
    * Set the autofinish flag on this stream (defaults to false). When this flag
@@ -1110,16 +1079,21 @@ public:
    * mStateComputedTime, violating the invariant that finished streams are blocked.
    */
   enum {
     ALLOW_FINISH = 0x01
   };
   virtual void ProcessInput(GraphTime aFrom, GraphTime aTo, uint32_t aFlags) = 0;
   void SetAutofinishImpl(bool aAutofinish) { mAutofinish = aAutofinish; }
 
+  /**
+   * Forward SetTrackEnabled() to the input MediaStream(s) and translate the ID
+   */
+  virtual void ForwardTrackEnabled(TrackID aOutputID, bool aEnabled) {};
+
   // Only valid after MediaStreamGraphImpl::UpdateStreamOrder() has run.
   // A DelayNode is considered to break a cycle and so this will not return
   // true for echo loops, only for muted cycles.
   bool InMutedCycle() const { return mCycleMarker; }
 
   virtual size_t SizeOfExcludingThis(MallocSizeOf aMallocSizeOf) const override
   {
     size_t amount = MediaStream::SizeOfExcludingThis(aMallocSizeOf);
@@ -1197,18 +1171,17 @@ public:
    * tracks, whichever is greater.
    * TODO at some point we will probably need to add API to select
    * particular tracks of each input stream.
    */
   ProcessedMediaStream* CreateTrackUnionStream(DOMMediaStream* aWrapper);
   /**
    * Create a stream that will mix all its audio input.
    */
-  ProcessedMediaStream* CreateAudioCaptureStream(DOMMediaStream* aWrapper,
-                                                 TrackID aTrackId);
+  ProcessedMediaStream* CreateAudioCaptureStream(DOMMediaStream* aWrapper);
 
   enum {
     ADD_STREAM_SUSPENDED = 0x01
   };
   /**
    * Add a new stream to the graph.  Main thread.
    */
   void AddStream(MediaStream* aStream, uint32_t aFlags = 0);
--- a/dom/media/MediaStreamTrack.cpp
+++ b/dom/media/MediaStreamTrack.cpp
@@ -8,17 +8,17 @@
 #include "DOMMediaStream.h"
 #include "nsIUUIDGenerator.h"
 #include "nsServiceManagerUtils.h"
 
 namespace mozilla {
 namespace dom {
 
 MediaStreamTrack::MediaStreamTrack(DOMMediaStream* aStream, TrackID aTrackID)
-  : mOwningStream(aStream), mTrackID(aTrackID), mEnded(false), mEnabled(true)
+  : mStream(aStream), mTrackID(aTrackID), mEnded(false), mEnabled(true)
 {
 
   nsresult rv;
   nsCOMPtr<nsIUUIDGenerator> uuidgen =
     do_GetService("@mozilla.org/uuid-generator;1", &rv);
 
   nsID uuid;
   memset(&uuid, 0, sizeof(uuid));
@@ -31,43 +31,43 @@ MediaStreamTrack::MediaStreamTrack(DOMMe
   mID = NS_ConvertASCIItoUTF16(chars);
 }
 
 MediaStreamTrack::~MediaStreamTrack()
 {
 }
 
 NS_IMPL_CYCLE_COLLECTION_INHERITED(MediaStreamTrack, DOMEventTargetHelper,
-                                   mOwningStream)
+                                   mStream)
 
 NS_IMPL_ADDREF_INHERITED(MediaStreamTrack, DOMEventTargetHelper)
 NS_IMPL_RELEASE_INHERITED(MediaStreamTrack, DOMEventTargetHelper)
 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION_INHERITED(MediaStreamTrack)
 NS_INTERFACE_MAP_END_INHERITING(DOMEventTargetHelper)
 
 void
 MediaStreamTrack::GetId(nsAString& aID) const
 {
   aID = mID;
 }
 
 void
 MediaStreamTrack::SetEnabled(bool aEnabled)
 {
   mEnabled = aEnabled;
-  mOwningStream->SetTrackEnabled(mTrackID, aEnabled);
+  mStream->SetTrackEnabled(mTrackID, aEnabled);
 }
 
 void
 MediaStreamTrack::Stop()
 {
-  mOwningStream->StopTrack(mTrackID);
+  mStream->StopTrack(mTrackID);
 }
 
 already_AddRefed<Promise>
 MediaStreamTrack::ApplyConstraints(const MediaTrackConstraints& aConstraints,
                                    ErrorResult &aRv)
 {
-  return GetStream()->ApplyConstraintsToTrack(mTrackID, aConstraints, aRv);
+  return mStream->ApplyConstraintsToTrack(mTrackID, aConstraints, aRv);
 }
 
 } // namespace dom
 } // namespace mozilla
--- a/dom/media/MediaStreamTrack.h
+++ b/dom/media/MediaStreamTrack.h
@@ -30,28 +30,20 @@ public:
    * MediaStream owned by aStream.
    */
   MediaStreamTrack(DOMMediaStream* aStream, TrackID aTrackID);
 
   NS_DECL_ISUPPORTS_INHERITED
   NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(MediaStreamTrack,
                                            DOMEventTargetHelper)
 
-  DOMMediaStream* GetParentObject() const { return mOwningStream; }
+  DOMMediaStream* GetParentObject() const { return mStream; }
   virtual JSObject* WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto) override = 0;
 
-  /**
-   * Returns the DOMMediaStream owning this track.
-   */
-  DOMMediaStream* GetStream() const { return mOwningStream; }
-
-  /**
-   * Returns the TrackID this stream has in its owning DOMMediaStream's Owned
-   * stream.
-   */
+  DOMMediaStream* GetStream() const { return mStream; }
   TrackID GetTrackID() const { return mTrackID; }
   virtual AudioStreamTrack* AsAudioStreamTrack() { return nullptr; }
   virtual VideoStreamTrack* AsVideoStreamTrack() { return nullptr; }
 
   // WebIDL
   virtual void GetKind(nsAString& aKind) = 0;
   void GetId(nsAString& aID) const;
   void GetLabel(nsAString& aLabel) { aLabel.Truncate(); }
@@ -66,17 +58,17 @@ public:
 
   // Webrtc allows the remote side to name tracks whatever it wants, and we
   // need to surface this to content.
   void AssignId(const nsAString& aID) { mID = aID; }
 
 protected:
   virtual ~MediaStreamTrack();
 
-  nsRefPtr<DOMMediaStream> mOwningStream;
+  nsRefPtr<DOMMediaStream> mStream;
   TrackID mTrackID;
   nsString mID;
   bool mEnded;
   bool mEnabled;
 };
 
 } // namespace dom
 } // namespace mozilla
--- a/dom/media/StreamBuffer.h
+++ b/dom/media/StreamBuffer.h
@@ -14,17 +14,16 @@ namespace mozilla {
 /**
  * Unique ID for track within a StreamBuffer. Tracks from different
  * StreamBuffers may have the same ID; this matters when appending StreamBuffers,
  * since tracks with the same ID are matched. Only IDs greater than 0 are allowed.
  */
 typedef int32_t TrackID;
 const TrackID TRACK_NONE = 0;
 const TrackID TRACK_INVALID = -1;
-const TrackID TRACK_ANY = -2;
 
 inline TrackTicks RateConvertTicksRoundDown(TrackRate aOutRate,
                                             TrackRate aInRate,
                                             TrackTicks aTicks)
 {
   NS_ASSERTION(0 < aOutRate && aOutRate <= TRACK_RATE_MAX, "Bad out rate");
   NS_ASSERTION(0 < aInRate && aInRate <= TRACK_RATE_MAX, "Bad in rate");
   NS_WARN_IF_FALSE(0 <= aTicks && aTicks <= TRACK_TICKS_MAX, "Bad ticks"); // bug 957691
--- a/dom/media/TrackUnionStream.cpp
+++ b/dom/media/TrackUnionStream.cpp
@@ -50,20 +50,18 @@ TrackUnionStream::TrackUnionStream(DOMMe
 {
   if (!gTrackUnionStreamLog) {
     gTrackUnionStreamLog = PR_NewLogModule("TrackUnionStream");
   }
 }
 
   void TrackUnionStream::RemoveInput(MediaInputPort* aPort)
   {
-    STREAM_LOG(LogLevel::Debug, ("TrackUnionStream %p removing input %p", this, aPort));
     for (int32_t i = mTrackMap.Length() - 1; i >= 0; --i) {
       if (mTrackMap[i].mInputPort == aPort) {
-        STREAM_LOG(LogLevel::Debug, ("TrackUnionStream %p removing trackmap entry %d", this, i));
         EndTrack(i);
         mTrackMap.RemoveElementAt(i);
       }
     }
     ProcessedMediaStream::RemoveInput(aPort);
   }
   void TrackUnionStream::ProcessInput(GraphTime aFrom, GraphTime aTo, uint32_t aFlags)
   {
@@ -93,29 +91,28 @@ TrackUnionStream::TrackUnionStream(DOMMe
       for (StreamBuffer::TrackIter tracks(stream->GetStreamBuffer());
            !tracks.IsEnded(); tracks.Next()) {
         bool found = false;
         for (uint32_t j = 0; j < mTrackMap.Length(); ++j) {
           TrackMapEntry* map = &mTrackMap[j];
           if (map->mInputPort == mInputs[i] && map->mInputTrackID == tracks->GetID()) {
             bool trackFinished;
             StreamBuffer::Track* outputTrack = mBuffer.FindTrack(map->mOutputTrackID);
-            found = true;
-            if (!outputTrack || outputTrack->IsEnded() ||
-                !mInputs[i]->PassTrackThrough(tracks->GetID())) {
+            if (!outputTrack || outputTrack->IsEnded()) {
               trackFinished = true;
             } else {
               CopyTrackData(tracks.get(), j, aFrom, aTo, &trackFinished);
             }
             mappedTracksFinished[j] = trackFinished;
             mappedTracksWithMatchingInputTracks[j] = true;
+            found = true;
             break;
           }
         }
-        if (!found && mInputs[i]->PassTrackThrough(tracks->GetID())) {
+        if (!found) {
           bool trackFinished = false;
           trackAdded = true;
           uint32_t mapIndex = AddTrack(mInputs[i], tracks.get(), aFrom);
           CopyTrackData(tracks.get(), mapIndex, aFrom, aTo, &trackFinished);
           mappedTracksFinished.AppendElement(trackFinished);
           mappedTracksWithMatchingInputTracks.AppendElement(true);
         }
       }
@@ -144,16 +141,28 @@ TrackUnionStream::TrackUnionStream(DOMMe
       mBuffer.AdvanceKnownTracksTime(GraphTimeToStreamTimeWithBlocking(aTo));
     }
     if (allHaveCurrentData) {
       // We can make progress if we're not blocked
       mHasCurrentData = true;
     }
   }
 
+  // Forward SetTrackEnabled(output_track_id, enabled) to the Source MediaStream,
+  // translating the output track ID into the correct ID in the source.
+  void TrackUnionStream::ForwardTrackEnabled(TrackID aOutputID, bool aEnabled)
+  {
+    for (int32_t i = mTrackMap.Length() - 1; i >= 0; --i) {
+      if (mTrackMap[i].mOutputTrackID == aOutputID) {
+        mTrackMap[i].mInputPort->GetSource()->
+          SetTrackEnabled(mTrackMap[i].mInputTrackID, aEnabled);
+      }
+    }
+  }
+
   uint32_t TrackUnionStream::AddTrack(MediaInputPort* aPort, StreamBuffer::Track* aTrack,
                     GraphTime aFrom)
   {
     TrackID id = aTrack->GetID();
     if (id > mNextAvailableTrackID &&
        mUsedTracks.BinaryIndexOf(id) == mUsedTracks.NoIndex) {
       // Input id available. Mark it used in mUsedTracks.
       mUsedTracks.InsertElementSorted(id);
@@ -177,18 +186,17 @@ TrackUnionStream::TrackUnionStream(DOMMe
     StreamTime outputStart = GraphTimeToStreamTimeWithBlocking(aFrom);
 
     nsAutoPtr<MediaSegment> segment;
     segment = aTrack->GetSegment()->CreateEmptyClone();
     for (uint32_t j = 0; j < mListeners.Length(); ++j) {
       MediaStreamListener* l = mListeners[j];
       l->NotifyQueuedTrackChanges(Graph(), id, outputStart,
                                   MediaStreamListener::TRACK_EVENT_CREATED,
-                                  *segment,
-                                  aPort->GetSource(), aTrack->GetID());
+                                  *segment);
     }
     segment->AppendNullData(outputStart);
     StreamBuffer::Track* track =
       &mBuffer.AddTrack(id, outputStart, segment.forget());
     STREAM_LOG(LogLevel::Debug, ("TrackUnionStream %p adding track %d for input stream %p track %d, start ticks %lld",
                               this, id, aPort->GetSource(), aTrack->GetID(),
                               (long long)outputStart));
 
@@ -203,27 +211,24 @@ TrackUnionStream::TrackUnionStream(DOMMe
     return mTrackMap.Length() - 1;
   }
 
   void TrackUnionStream::EndTrack(uint32_t aIndex)
   {
     StreamBuffer::Track* outputTrack = mBuffer.FindTrack(mTrackMap[aIndex].mOutputTrackID);
     if (!outputTrack || outputTrack->IsEnded())
       return;
-    STREAM_LOG(LogLevel::Debug, ("TrackUnionStream %p ending track %d", this, outputTrack->GetID()));
     for (uint32_t j = 0; j < mListeners.Length(); ++j) {
       MediaStreamListener* l = mListeners[j];
       StreamTime offset = outputTrack->GetSegment()->GetDuration();
       nsAutoPtr<MediaSegment> segment;
       segment = outputTrack->GetSegment()->CreateEmptyClone();
       l->NotifyQueuedTrackChanges(Graph(), outputTrack->GetID(), offset,
                                   MediaStreamListener::TRACK_EVENT_ENDED,
-                                  *segment,
-                                  mTrackMap[aIndex].mInputPort->GetSource(),
-                                  mTrackMap[aIndex].mInputTrackID);
+                                  *segment);
     }
     outputTrack->SetEnded();
   }
 
   void TrackUnionStream::CopyTrackData(StreamBuffer::Track* aInputTrack,
                      uint32_t aMapIndex, GraphTime aFrom, GraphTime aTo,
                      bool* aOutputTrackFinished)
   {
--- a/dom/media/TrackUnionStream.h
+++ b/dom/media/TrackUnionStream.h
@@ -16,16 +16,20 @@ namespace mozilla {
  */
 class TrackUnionStream : public ProcessedMediaStream {
 public:
   explicit TrackUnionStream(DOMMediaStream* aWrapper);
 
   virtual void RemoveInput(MediaInputPort* aPort) override;
   virtual void ProcessInput(GraphTime aFrom, GraphTime aTo, uint32_t aFlags) override;
 
+  // Forward SetTrackEnabled(output_track_id, enabled) to the Source MediaStream,
+  // translating the output track ID into the correct ID in the source.
+  virtual void ForwardTrackEnabled(TrackID aOutputID, bool aEnabled) override;
+
 protected:
   // Only non-ended tracks are allowed to persist in this map.
   struct TrackMapEntry {
     // mEndOfConsumedInputTicks is the end of the input ticks that we've consumed.
     // 0 if we haven't consumed any yet.
     StreamTime mEndOfConsumedInputTicks;
     // mEndOfLastInputIntervalInInputStream is the timestamp for the end of the
     // previous interval which was unblocked for both the input and output
--- a/dom/media/encoder/MediaEncoder.cpp
+++ b/dom/media/encoder/MediaEncoder.cpp
@@ -36,19 +36,17 @@ PRLogModuleInfo* gMediaEncoderLog;
 
 namespace mozilla {
 
 void
 MediaEncoder::NotifyQueuedTrackChanges(MediaStreamGraph* aGraph,
                                        TrackID aID,
                                        StreamTime aTrackOffset,
                                        uint32_t aTrackEvents,
-                                       const MediaSegment& aQueuedMedia,
-                                       MediaStream* aInputStream,
-                                       TrackID aInputTrackID)
+                                       const MediaSegment& aQueuedMedia)
 {
   // Process the incoming raw track data from MediaStreamGraph, called on the
   // thread of MediaStreamGraph.
   if (mAudioEncoder && aQueuedMedia.GetType() == MediaSegment::AUDIO) {
     mAudioEncoder->NotifyQueuedTrackChanges(aGraph, aID,
                                             aTrackOffset, aTrackEvents,
                                             aQueuedMedia);
 
--- a/dom/media/encoder/MediaEncoder.h
+++ b/dom/media/encoder/MediaEncoder.h
@@ -80,19 +80,17 @@ public :
 
   /**
    * Notified by the control loop of MediaStreamGraph; aQueueMedia is the raw
    * track data in form of MediaSegment.
    */
   virtual void NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID,
                                         StreamTime aTrackOffset,
                                         uint32_t aTrackEvents,
-                                        const MediaSegment& aQueuedMedia,
-                                        MediaStream* aInputStream,
-                                        TrackID aInputTrackID) override;
+                                        const MediaSegment& aQueuedMedia) override;
 
   /**
    * Notified the stream is being removed.
    */
   virtual void NotifyEvent(MediaStreamGraph* aGraph,
                            MediaStreamListener::MediaStreamGraphEvent event) override;
 
   /**
--- a/dom/media/imagecapture/CaptureTask.cpp
+++ b/dom/media/imagecapture/CaptureTask.cpp
@@ -52,48 +52,46 @@ CaptureTask::AttachStream()
 {
   MOZ_ASSERT(NS_IsMainThread());
 
   nsRefPtr<dom::VideoStreamTrack> track = mImageCapture->GetVideoStreamTrack();
 
   nsRefPtr<DOMMediaStream> domStream = track->GetStream();
   domStream->AddPrincipalChangeObserver(this);
 
-  nsRefPtr<MediaStream> stream = domStream->GetPlaybackStream();
+  nsRefPtr<MediaStream> stream = domStream->GetStream();
   stream->AddListener(this);
 }
 
 void
 CaptureTask::DetachStream()
 {
   MOZ_ASSERT(NS_IsMainThread());
 
   nsRefPtr<dom::VideoStreamTrack> track = mImageCapture->GetVideoStreamTrack();
 
   nsRefPtr<DOMMediaStream> domStream = track->GetStream();
   domStream->RemovePrincipalChangeObserver(this);
 
-  nsRefPtr<MediaStream> stream = domStream->GetPlaybackStream();
+  nsRefPtr<MediaStream> stream = domStream->GetStream();
   stream->RemoveListener(this);
 }
 
 void
 CaptureTask::PrincipalChanged(DOMMediaStream* aMediaStream)
 {
   MOZ_ASSERT(NS_IsMainThread());
   mPrincipalChanged = true;
 }
 
 void
 CaptureTask::NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID,
                                       StreamTime aTrackOffset,
                                       uint32_t aTrackEvents,
-                                      const MediaSegment& aQueuedMedia,
-                                      MediaStream* aInputStream,
-                                      TrackID aInputTrackID)
+                                      const MediaSegment& aQueuedMedia)
 {
   if (mImageGrabbedOrTrackEnd) {
     return;
   }
 
   if (aTrackEvents == MediaStreamListener::TRACK_EVENT_ENDED) {
     PostTrackEndEvent();
     return;
--- a/dom/media/imagecapture/CaptureTask.h
+++ b/dom/media/imagecapture/CaptureTask.h
@@ -30,19 +30,17 @@ class ImageCapture;
 class CaptureTask : public MediaStreamListener,
                     public DOMMediaStream::PrincipalChangeObserver
 {
 public:
   // MediaStreamListener methods.
   virtual void NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID,
                                         StreamTime aTrackOffset,
                                         uint32_t aTrackEvents,
-                                        const MediaSegment& aQueuedMedia,
-                                        MediaStream* aInputStream,
-                                        TrackID aInputTrackID) override;
+                                        const MediaSegment& aQueuedMedia) override;
 
   virtual void NotifyEvent(MediaStreamGraph* aGraph,
                            MediaStreamGraphEvent aEvent) override;
 
   // DOMMediaStream::PrincipalChangeObserver method.
   virtual void PrincipalChanged(DOMMediaStream* aMediaStream) override;
 
   // CaptureTask methods.
--- a/dom/media/webaudio/AudioNode.cpp
+++ b/dom/media/webaudio/AudioNode.cpp
@@ -220,18 +220,17 @@ AudioNode::Connect(AudioNode& aDestinati
   input->mInputPort = aInput;
   input->mOutputPort = aOutput;
   AudioNodeStream* destinationStream = aDestination.mStream;
   if (mStream && destinationStream) {
     // Connect streams in the MediaStreamGraph
     MOZ_ASSERT(aInput <= UINT16_MAX, "Unexpected large input port number");
     MOZ_ASSERT(aOutput <= UINT16_MAX, "Unexpected large output port number");
     input->mStreamPort = destinationStream->
-      AllocateInputPort(mStream, AudioNodeStream::AUDIO_TRACK,
-                        static_cast<uint16_t>(aInput),
+      AllocateInputPort(mStream, static_cast<uint16_t>(aInput),
                         static_cast<uint16_t>(aOutput));
   }
   aDestination.NotifyInputsChanged();
 
   // This connection may have connected a panner and a source.
   Context()->UpdatePannerSource();
 }
 
@@ -263,18 +262,17 @@ AudioNode::Connect(AudioParam& aDestinat
 
   MediaStream* stream = aDestination.Stream();
   MOZ_ASSERT(stream->AsProcessedStream());
   ProcessedMediaStream* ps = static_cast<ProcessedMediaStream*>(stream);
   if (mStream) {
     // Setup our stream as an input to the AudioParam's stream
     MOZ_ASSERT(aOutput <= UINT16_MAX, "Unexpected large output port number");
     input->mStreamPort =
-      ps->AllocateInputPort(mStream, AudioNodeStream::AUDIO_TRACK,
-                            0, static_cast<uint16_t>(aOutput));
+      ps->AllocateInputPort(mStream, 0, static_cast<uint16_t>(aOutput));
   }
 }
 
 void
 AudioNode::SendDoubleParameterToStream(uint32_t aIndex, double aValue)
 {
   MOZ_ASSERT(mStream, "How come we don't have a stream here?");
   mStream->SetDoubleParameter(aIndex, aValue);
--- a/dom/media/webaudio/AudioNodeExternalInputStream.cpp
+++ b/dom/media/webaudio/AudioNodeExternalInputStream.cpp
@@ -137,20 +137,16 @@ AudioNodeExternalInputStream::ProcessInp
   MOZ_ASSERT(mInputs.Length() == 1);
 
   MediaStream* source = mInputs[0]->GetSource();
   nsAutoTArray<AudioSegment,1> audioSegments;
   uint32_t inputChannels = 0;
   for (StreamBuffer::TrackIter tracks(source->mBuffer, MediaSegment::AUDIO);
        !tracks.IsEnded(); tracks.Next()) {
     const StreamBuffer::Track& inputTrack = *tracks;
-    if (!mInputs[0]->PassTrackThrough(tracks->GetID())) {
-      continue;
-    }
-
     const AudioSegment& inputSegment =
         *static_cast<AudioSegment*>(inputTrack.GetSegment());
     if (inputSegment.IsNull()) {
       continue;
     }
 
     AudioSegment& segment = *audioSegments.AppendElement();
     GraphTime next;
--- a/dom/media/webaudio/AudioParam.cpp
+++ b/dom/media/webaudio/AudioParam.cpp
@@ -109,18 +109,17 @@ AudioParam::Stream()
   // Mark as an AudioParam helper stream
   stream->SetAudioParamHelperStream();
 
   mStream = stream.forget();
 
   // Setup the AudioParam's stream as an input to the owner AudioNode's stream
   AudioNodeStream* nodeStream = mNode->GetStream();
   if (nodeStream) {
-    mNodeStreamPort =
-      nodeStream->AllocateInputPort(mStream, AudioNodeStream::AUDIO_TRACK);
+    mNodeStreamPort = nodeStream->AllocateInputPort(mStream);
   }
 
   // Send the stream to the timeline on the MSG side.
   AudioTimelineEvent event(mStream);
 
   mCallback(mNode, event);
 
   return mStream;
--- a/dom/media/webaudio/MediaStreamAudioDestinationNode.cpp
+++ b/dom/media/webaudio/MediaStreamAudioDestinationNode.cpp
@@ -29,24 +29,24 @@ MediaStreamAudioDestinationNode::MediaSt
               ChannelCountMode::Explicit,
               ChannelInterpretation::Speakers)
   , mDOMStream(
       DOMAudioNodeMediaStream::CreateTrackUnionStream(GetOwner(),
                                                       this,
                                                       aContext->Graph()))
 {
   // Ensure an audio track with the correct ID is exposed to JS
-  mDOMStream->CreateOwnDOMTrack(AudioNodeStream::AUDIO_TRACK, MediaSegment::AUDIO);
+  mDOMStream->CreateDOMTrack(AudioNodeStream::AUDIO_TRACK, MediaSegment::AUDIO);
 
-  ProcessedMediaStream* outputStream = mDOMStream->GetInputStream()->AsProcessedStream();
+  ProcessedMediaStream* outputStream = mDOMStream->GetStream()->AsProcessedStream();
   MOZ_ASSERT(!!outputStream);
   AudioNodeEngine* engine = new AudioNodeEngine(this);
   mStream = AudioNodeStream::Create(aContext, engine,
                                     AudioNodeStream::EXTERNAL_OUTPUT);
-  mPort = outputStream->AllocateInputPort(mStream, AudioNodeStream::AUDIO_TRACK);
+  mPort = outputStream->AllocateInputPort(mStream);
 
   nsIDocument* doc = aContext->GetParentObject()->GetExtantDoc();
   if (doc) {
     mDOMStream->CombineWithPrincipal(doc->NodePrincipal());
   }
 }
 
 MediaStreamAudioDestinationNode::~MediaStreamAudioDestinationNode()
--- a/dom/media/webaudio/MediaStreamAudioSourceNode.cpp
+++ b/dom/media/webaudio/MediaStreamAudioSourceNode.cpp
@@ -3,17 +3,16 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this
  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "MediaStreamAudioSourceNode.h"
 #include "mozilla/dom/MediaStreamAudioSourceNodeBinding.h"
 #include "AudioNodeEngine.h"
 #include "AudioNodeExternalInputStream.h"
-#include "AudioStreamTrack.h"
 #include "nsIDocument.h"
 #include "mozilla/CORSMode.h"
 
 namespace mozilla {
 namespace dom {
 
 NS_IMPL_CYCLE_COLLECTION_CLASS(MediaStreamAudioSourceNode)
 
@@ -37,17 +36,17 @@ MediaStreamAudioSourceNode::MediaStreamA
               2,
               ChannelCountMode::Max,
               ChannelInterpretation::Speakers),
     mInputStream(aMediaStream)
 {
   AudioNodeEngine* engine = new MediaStreamAudioSourceNodeEngine(this);
   mStream = AudioNodeExternalInputStream::Create(aContext->Graph(), engine);
   ProcessedMediaStream* outputStream = static_cast<ProcessedMediaStream*>(mStream.get());
-  mInputPort = outputStream->AllocateInputPort(aMediaStream->GetPlaybackStream());
+  mInputPort = outputStream->AllocateInputPort(aMediaStream->GetStream());
   mInputStream->AddConsumerToKeepAlive(static_cast<nsIDOMEventTarget*>(this));
 
   PrincipalChanged(mInputStream); // trigger enabling/disabling of the connector
   mInputStream->AddPrincipalChangeObserver(this);
 }
 
 MediaStreamAudioSourceNode::~MediaStreamAudioSourceNode()
 {
--- a/dom/media/webaudio/MediaStreamAudioSourceNode.h
+++ b/dom/media/webaudio/MediaStreamAudioSourceNode.h
@@ -39,18 +39,17 @@ public:
 private:
   bool mEnabled;
 };
 
 class MediaStreamAudioSourceNode : public AudioNode,
                                    public DOMMediaStream::PrincipalChangeObserver
 {
 public:
-  MediaStreamAudioSourceNode(AudioContext* aContext,
-                             DOMMediaStream* aMediaStream);
+  MediaStreamAudioSourceNode(AudioContext* aContext, DOMMediaStream* aMediaStream);
 
   NS_DECL_ISUPPORTS_INHERITED
   NS_DECL_CYCLE_COLLECTION_CLASS_INHERITED(MediaStreamAudioSourceNode, AudioNode)
 
   virtual JSObject* WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto) override;
 
   virtual void DestroyMediaStream() override;
 
--- a/dom/media/webspeech/recognition/SpeechRecognition.cpp
+++ b/dom/media/webspeech/recognition/SpeechRecognition.cpp
@@ -554,35 +554,35 @@ SpeechRecognition::NotifyError(SpeechEve
  **************************************/
 NS_IMETHODIMP
 SpeechRecognition::StartRecording(DOMMediaStream* aDOMStream)
 {
   // hold a reference so that the underlying stream
   // doesn't get Destroy()'ed
   mDOMStream = aDOMStream;
 
-  if (NS_WARN_IF(!mDOMStream->GetPlaybackStream())) {
+  if (NS_WARN_IF(!mDOMStream->GetStream())) {
     return NS_ERROR_UNEXPECTED;
   }
   mSpeechListener = new SpeechStreamListener(this);
-  mDOMStream->GetPlaybackStream()->AddListener(mSpeechListener);
+  mDOMStream->GetStream()->AddListener(mSpeechListener);
 
   mEndpointer.StartSession();
 
   return mSpeechDetectionTimer->Init(this, kSPEECH_DETECTION_TIMEOUT_MS,
                                      nsITimer::TYPE_ONE_SHOT);
 }
 
 NS_IMETHODIMP
 SpeechRecognition::StopRecording()
 {
   // we only really need to remove the listener explicitly when testing,
   // as our JS code still holds a reference to mDOMStream and only assigning
   // it to nullptr isn't guaranteed to free the stream and the listener.
-  mDOMStream->GetPlaybackStream()->RemoveListener(mSpeechListener);
+  mDOMStream->GetStream()->RemoveListener(mSpeechListener);
   mSpeechListener = nullptr;
   mDOMStream = nullptr;
 
   mEndpointer.EndSession();
   DispatchTrustedEvent(NS_LITERAL_STRING("audioend"));
 
   return NS_OK;
 }
--- a/dom/media/webspeech/recognition/SpeechStreamListener.cpp
+++ b/dom/media/webspeech/recognition/SpeechStreamListener.cpp
@@ -28,19 +28,17 @@ SpeechStreamListener::~SpeechStreamListe
                   static_cast<DOMEventTargetHelper*>(forgottenRecognition));
 }
 
 void
 SpeechStreamListener::NotifyQueuedTrackChanges(MediaStreamGraph* aGraph,
                                                TrackID aID,
                                                StreamTime aTrackOffset,
                                                uint32_t aTrackEvents,
-                                               const MediaSegment& aQueuedMedia,
-                                               MediaStream* aInputStream,
-                                               TrackID aInputTrackID)
+                                               const MediaSegment& aQueuedMedia)
 {
   AudioSegment* audio = const_cast<AudioSegment*>(
     static_cast<const AudioSegment*>(&aQueuedMedia));
 
   AudioSegment::ChunkIterator iterator(*audio);
   while (!iterator.IsEnded()) {
     // Skip over-large chunks so we don't crash!
     if (iterator->GetDuration() > INT_MAX) {
--- a/dom/media/webspeech/recognition/SpeechStreamListener.h
+++ b/dom/media/webspeech/recognition/SpeechStreamListener.h
@@ -22,19 +22,17 @@ class SpeechStreamListener : public Medi
 {
 public:
   explicit SpeechStreamListener(SpeechRecognition* aRecognition);
   ~SpeechStreamListener();
 
   virtual void NotifyQueuedTrackChanges(MediaStreamGraph* aGraph, TrackID aID,
                                         StreamTime aTrackOffset,
                                         uint32_t aTrackEvents,
-                                        const MediaSegment& aQueuedMedia,
-                                        MediaStream* aInputStream,
-                                        TrackID aInputTrackID) override;
+                                        const MediaSegment& aQueuedMedia) override;
 
   virtual void NotifyEvent(MediaStreamGraph* aGraph,
                            MediaStreamListener::MediaStreamGraphEvent event) override;
 
 private:
   template<typename SampleFormatType>
   void ConvertAndDispatchAudioChunk(int aDuration, float aVolume, SampleFormatType* aData, TrackRate aTrackRate);
   nsRefPtr<SpeechRecognition> mRecognition;
--- a/dom/media/webspeech/synth/nsSpeechTask.cpp
+++ b/dom/media/webspeech/synth/nsSpeechTask.cpp
@@ -16,18 +16,16 @@
 #ifdef GetCurrentTime
 #undef GetCurrentTime
 #endif
 
 #undef LOG
 extern PRLogModuleInfo* GetSpeechSynthLog();
 #define LOG(type, msg) MOZ_LOG(GetSpeechSynthLog(), type, msg)
 
-#define AUDIO_TRACK 1
-
 namespace mozilla {
 namespace dom {
 
 class SynthStreamListener : public MediaStreamListener
 {
 public:
   explicit SynthStreamListener(nsSpeechTask* aSpeechTask) :
     mSpeechTask(aSpeechTask),
@@ -190,17 +188,17 @@ nsSpeechTask::Setup(nsISpeechTaskCallbac
   // XXX: Support more than one channel
   if(NS_WARN_IF(!(aChannels == 1))) {
     return NS_ERROR_FAILURE;
   }
 
   mChannels = aChannels;
 
   AudioSegment* segment = new AudioSegment();
-  mStream->AddAudioTrack(AUDIO_TRACK, aRate, 0, segment);
+  mStream->AddAudioTrack(1, aRate, 0, segment);
   mStream->AddAudioOutput(this);
   mStream->SetAudioOutputVolume(this, mVolume);
 
   return NS_OK;
 }
 
 static nsRefPtr<mozilla::SharedBuffer>
 makeSamples(int16_t* aData, uint32_t aDataLen)
--- a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
+++ b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
@@ -678,25 +678,25 @@ nsresult MediaPipelineTransmit::Transpor
 
   return NS_OK;
 }
 
 nsresult MediaPipelineTransmit::ReplaceTrack(DOMMediaStream *domstream,
                                              const std::string& track_id) {
   // MainThread, checked in calls we make
   MOZ_MTLOG(ML_DEBUG, "Reattaching pipeline " << description_ << " to stream "
-            << static_cast<void *>(domstream->GetOwnedStream())
+            << static_cast<void *>(domstream->GetStream())
             << " track " << track_id << " conduit type=" <<
             (conduit_->type() == MediaSessionConduit::AUDIO ?"audio":"video"));
 
   if (domstream_) { // may be excessive paranoia
     DetachMediaStream();
   }
   domstream_ = domstream; // Detach clears it
-  stream_ = domstream->GetOwnedStream();
+  stream_ = domstream->GetStream();
   // Unsets the track id after RemoveListener() takes effect.
   listener_->UnsetTrackId(stream_->GraphImpl());
   track_id_ = track_id;
   AttachToTrack(track_id);
   return NS_OK;
 }
 
 void MediaPipeline::DisconnectTransport_s(TransportInfo &info) {
@@ -863,19 +863,17 @@ NotifyRealtimeData(MediaStreamGraph* gra
 
   NewData(graph, tid, offset, events, media);
 }
 
 void MediaPipelineTransmit::PipelineListener::
 NotifyQueuedTrackChanges(MediaStreamGraph* graph, TrackID tid,
                          StreamTime offset,
                          uint32_t events,
-                         const MediaSegment& queued_media,
-                         MediaStream* aInputStream,
-                         TrackID aInputTrackID) {
+                         const MediaSegment& queued_media) {
   MOZ_MTLOG(ML_DEBUG, "MediaPipeline::NotifyQueuedTrackChanges()");
 
   // ignore non-direct data if we're also getting direct data
   if (!direct_connect_) {
     NewData(graph, tid, offset, events, queued_media);
   }
 }
 
--- a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.h
+++ b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.h
@@ -383,17 +383,17 @@ public:
                         const std::string& track_id,
                         int level,
                         bool is_video,
                         RefPtr<MediaSessionConduit> conduit,
                         RefPtr<TransportFlow> rtp_transport,
                         RefPtr<TransportFlow> rtcp_transport,
                         nsAutoPtr<MediaPipelineFilter> filter) :
       MediaPipeline(pc, TRANSMIT, main_thread, sts_thread,
-                    domstream->GetOwnedStream(), track_id, level,
+                    domstream->GetStream(), track_id, level,
                     conduit, rtp_transport, rtcp_transport, filter),
       listener_(new PipelineListener(conduit)),
       domstream_(domstream),
       is_video_(is_video)
   {}
 
   // Initialize (stuff here may fail)
   virtual nsresult Init() override;
@@ -477,19 +477,17 @@ public:
       MutexAutoLock lock(mMutex);
       return track_id_external_;
     }
 
     // Implement MediaStreamListener
     virtual void NotifyQueuedTrackChanges(MediaStreamGraph* graph, TrackID tid,
                                           StreamTime offset,
                                           uint32_t events,
-                                          const MediaSegment& queued_media,
-                                          MediaStream* input_stream,
-                                          TrackID input_tid) override;
+                                          const MediaSegment& queued_media) override;
     virtual void NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime) override {}
 
     // Implement MediaStreamDirectListener
     virtual void NotifyRealtimeData(MediaStreamGraph* graph, TrackID tid,
                                     StreamTime offset,
                                     uint32_t events,
                                     const MediaSegment& media) override;
 
@@ -626,19 +624,17 @@ class MediaPipelineReceiveAudio : public
         MOZ_CRASH();
       }
     }
 
     // Implement MediaStreamListener
     virtual void NotifyQueuedTrackChanges(MediaStreamGraph* graph, TrackID tid,
                                           StreamTime offset,
                                           uint32_t events,
-                                          const MediaSegment& queued_media,
-                                          MediaStream* input_stream,
-                                          TrackID input_tid) override {}
+                                          const MediaSegment& queued_media) override {}
     virtual void NotifyPull(MediaStreamGraph* graph, StreamTime desired_time) override;
 
    private:
     RefPtr<MediaSessionConduit> conduit_;
   };
 
   RefPtr<PipelineListener> listener_;
 };
@@ -724,19 +720,17 @@ class MediaPipelineReceiveVideo : public
    public:
     PipelineListener(SourceMediaStream * source, TrackID track_id,
                      bool queue_track);
 
     // Implement MediaStreamListener
     virtual void NotifyQueuedTrackChanges(MediaStreamGraph* graph, TrackID tid,
                                           StreamTime offset,
                                           uint32_t events,
-                                          const MediaSegment& queued_media,
-                                          MediaStream* input_stream,
-                                          TrackID input_tid) override {}
+                                          const MediaSegment& queued_media) override {}
     virtual void NotifyPull(MediaStreamGraph* graph, StreamTime desired_time) override;
 
     // Accessors for external writes from the renderer
     void FrameSizeChange(unsigned int width,
                          unsigned int height,
                          unsigned int number_of_streams) {
       ReentrantMonitorAutoEnter enter(monitor_);
 
--- a/media/webrtc/signaling/src/peerconnection/MediaPipelineFactory.cpp
+++ b/media/webrtc/signaling/src/peerconnection/MediaPipelineFactory.cpp
@@ -456,31 +456,31 @@ MediaPipelineFactory::CreateMediaPipelin
   MOZ_MTLOG(ML_DEBUG, __FUNCTION__ << ": Creating pipeline for "
             << numericTrackId << " -> " << aTrack.GetTrackId());
 
   if (aTrack.GetMediaType() == SdpMediaSection::kAudio) {
     pipeline = new MediaPipelineReceiveAudio(
         mPC->GetHandle(),
         mPC->GetMainThread().get(),
         mPC->GetSTSThread(),
-        stream->GetMediaStream()->GetInputStream(),
+        stream->GetMediaStream()->GetStream(),
         aTrack.GetTrackId(),
         numericTrackId,
         aLevel,
         static_cast<AudioSessionConduit*>(aConduit.get()), // Ugly downcast.
         aRtpFlow,
         aRtcpFlow,
         aFilter,
         queue_track);
   } else if (aTrack.GetMediaType() == SdpMediaSection::kVideo) {
     pipeline = new MediaPipelineReceiveVideo(
         mPC->GetHandle(),
         mPC->GetMainThread().get(),
         mPC->GetSTSThread(),
-        stream->GetMediaStream()->GetInputStream(),
+        stream->GetMediaStream()->GetStream(),
         aTrack.GetTrackId(),
         numericTrackId,
         aLevel,
         static_cast<VideoSessionConduit*>(aConduit.get()), // Ugly downcast.
         aRtpFlow,
         aRtcpFlow,
         aFilter,
         queue_track);
--- a/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp
+++ b/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp
@@ -237,17 +237,17 @@ public:
                     jrv.ErrorCodeAsInt());
       }
     }
 
     if (notifyStream) {
       // Start currentTime from the point where this stream was successfully
       // returned.
       aStream->SetLogicalStreamStartTime(
-          aStream->GetPlaybackStream()->GetCurrentTime());
+          aStream->GetStream()->GetCurrentTime());
 
       JSErrorResult rv;
       CSFLogInfo(logTag, "Calling OnAddStream(%s)", streamId.c_str());
       mObserver->OnAddStream(*aStream, rv);
       if (rv.Failed()) {
         CSFLogError(logTag, ": OnAddStream() failed! Error: %u",
                     rv.ErrorCodeAsInt());
       }
@@ -466,17 +466,17 @@ PeerConnectionImpl::MakeMediaStream()
     // we're either certain that we need isolation for the streams, OR
     // we're not sure and we can fix the stream in SetDtlsConnected
     nsCOMPtr<nsIPrincipal> principal =
       do_CreateInstance(NS_NULLPRINCIPAL_CONTRACTID);
     stream->CombineWithPrincipal(principal);
   }
 #endif
 
-  CSFLogDebug(logTag, "Created media stream %p, inner: %p", stream.get(), stream->GetInputStream());
+  CSFLogDebug(logTag, "Created media stream %p, inner: %p", stream.get(), stream->GetStream());
 
   return stream.forget();
 }
 
 nsresult
 PeerConnectionImpl::CreateRemoteSourceStreamInfo(nsRefPtr<RemoteSourceStreamInfo>*
                                                  aInfo,
                                                  const std::string& aStreamID)
--- a/media/webrtc/signaling/src/peerconnection/PeerConnectionMedia.cpp
+++ b/media/webrtc/signaling/src/peerconnection/PeerConnectionMedia.cpp
@@ -1299,17 +1299,17 @@ void
 RemoteSourceStreamInfo::StartReceiving()
 {
   if (mReceiving || mPipelines.empty()) {
     return;
   }
 
   mReceiving = true;
 
-  SourceMediaStream* source = GetMediaStream()->GetInputStream()->AsSourceStream();
+  SourceMediaStream* source = GetMediaStream()->GetStream()->AsSourceStream();
   source->FinishAddTracks();
   source->SetPullEnabled(true);
   // AdvanceKnownTracksTicksTime(HEAT_DEATH_OF_UNIVERSE) means that in
   // theory per the API, we can't add more tracks before that
   // time. However, the impl actually allows it, and it avoids a whole
   // bunch of locking that would be required (and potential blocking)
   // if we used smaller values and updated them on each NotifyPull.
   source->AdvanceKnownTracksTime(STREAM_TIME_MAX);
--- a/media/webrtc/signaling/test/FakeMediaStreams.h
+++ b/media/webrtc/signaling/test/FakeMediaStreams.h
@@ -69,31 +69,28 @@ class Fake_VideoSink {
 public:
   Fake_VideoSink() {}
   virtual void SegmentReady(mozilla::MediaSegment* aSegment) = 0;
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(Fake_VideoSink)
 protected:
   virtual ~Fake_VideoSink() {}
 };
 
-class Fake_MediaStream;
 class Fake_SourceMediaStream;
 
 class Fake_MediaStreamListener
 {
 protected:
   virtual ~Fake_MediaStreamListener() {}
 
 public:
   virtual void NotifyQueuedTrackChanges(mozilla::MediaStreamGraph* aGraph, mozilla::TrackID aID,
                                         mozilla::StreamTime aTrackOffset,
                                         uint32_t aTrackEvents,
-                                        const mozilla::MediaSegment& aQueuedMedia,
-                                        Fake_MediaStream* aInputStream,
-                                        mozilla::TrackID aInputTrackID) = 0;
+                                        const mozilla::MediaSegment& aQueuedMedia)  = 0;
   virtual void NotifyPull(mozilla::MediaStreamGraph* aGraph, mozilla::StreamTime aDesiredTime) = 0;
 
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(Fake_MediaStreamListener)
 };
 
 class Fake_MediaStreamDirectListener : public Fake_MediaStreamListener
 {
 protected:
@@ -361,19 +358,16 @@ public:
     return ds.forget();
   }
 
   virtual void Stop() {} // Really DOMLocalMediaStream
 
   virtual bool AddDirectListener(Fake_MediaStreamListener *aListener) { return false; }
   virtual void RemoveDirectListener(Fake_MediaStreamListener *aListener) {}
 
-  Fake_MediaStream *GetInputStream() { return mMediaStream; }
-  Fake_MediaStream *GetOwnedStream() { return mMediaStream; }
-  Fake_MediaStream *GetPlaybackStream() { return mMediaStream; }
   Fake_MediaStream *GetStream() { return mMediaStream; }
   std::string GetId() const { return mID; }
   void AssignId(const std::string& id) { mID = id; }
 
   // Hints to tell the SDP generator about whether this
   // MediaStream probably has audio and/or video
   typedef uint8_t TrackTypeHints;
   enum {
@@ -409,32 +403,16 @@ public:
   HasTrack(const Fake_MediaStreamTrack& aTrack) const
   {
     return ((mHintContents & HINT_CONTENTS_AUDIO) && aTrack.AsAudioStreamTrack()) ||
            ((mHintContents & HINT_CONTENTS_VIDEO) && aTrack.AsVideoStreamTrack());
   }
 
   void SetTrackEnabled(mozilla::TrackID aTrackID, bool aEnabled) {}
 
-  Fake_MediaStreamTrack*
-  CreateOwnDOMTrack(mozilla::TrackID aTrackID, mozilla::MediaSegment::Type aType)
-  {
-    switch(aType) {
-      case mozilla::MediaSegment::AUDIO: {
-        return mAudioTrack;
-      }
-      case mozilla::MediaSegment::VIDEO: {
-        return mVideoTrack;
-      }
-      default: {
-        MOZ_CRASH("Unkown media type");
-      }
-    }
-  }
-
   class PrincipalChangeObserver
   {
   public:
     virtual void PrincipalChanged(Fake_DOMMediaStream* aMediaStream) = 0;
   };
   void AddPrincipalChangeObserver(void* ignoredObserver) {}
   void RemovePrincipalChangeObserver(void* ignoredObserver) {}
 
--- a/media/webrtc/signaling/test/FakeMediaStreamsImpl.h
+++ b/media/webrtc/signaling/test/FakeMediaStreamsImpl.h
@@ -111,19 +111,17 @@ void Fake_AudioStreamSource::Periodic() 
   segment.AppendFrames(samples.forget(), channels, AUDIO_BUFFER_SIZE);
 
   for(std::set<Fake_MediaStreamListener *>::iterator it = mListeners.begin();
        it != mListeners.end(); ++it) {
     (*it)->NotifyQueuedTrackChanges(nullptr, // Graph
                                     0, // TrackID
                                     0, // Offset TODO(ekr@rtfm.com) fix
                                     0, // ???
-                                    segment,
-                                    nullptr, // Input stream
-                                    -1);     // Input track id
+                                    segment);
   }
 }
 
 
 // Fake_MediaPeriodic
 NS_IMPL_ISUPPORTS(Fake_MediaPeriodic, nsITimerCallback)
 
 NS_IMETHODIMP