Bug 1290948 - Part 5: TransceiverImpl and some major refactoring. r+drno r=drno
☠☠ backed out by e077a6e6e842 ☠ ☠
authorByron Campen [:bwc] <docfaraday@gmail.com>
Wed, 23 Aug 2017 16:12:43 -0500
changeset 443517 1a5f090502b0a27641a0866513a82aa8c545fc14
parent 443516 ffb6e6da955fa81fc4faca06210aa2e5764ed205
child 443518 314675023cd5dd745f7f1b9cccb537b762b495f2
push id8527
push userCallek@gmail.com
push dateThu, 11 Jan 2018 21:05:50 +0000
treeherdermozilla-beta@95342d212a7a [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersdrno
bugs1290948
milestone59.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1290948 - Part 5: TransceiverImpl and some major refactoring. r+drno r=drno MozReview-Commit-ID: 3IBAch7xVNG
media/webrtc/signaling/gtest/mediapipeline_unittest.cpp
media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
media/webrtc/signaling/src/mediapipeline/MediaPipeline.h
media/webrtc/signaling/src/mediapipeline/MediaPipelineFilter.cpp
media/webrtc/signaling/src/mediapipeline/moz.build
media/webrtc/signaling/src/peerconnection/MediaPipelineFactory.cpp
media/webrtc/signaling/src/peerconnection/MediaPipelineFactory.h
media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp
media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.h
media/webrtc/signaling/src/peerconnection/PeerConnectionMedia.cpp
media/webrtc/signaling/src/peerconnection/PeerConnectionMedia.h
media/webrtc/signaling/src/peerconnection/RemoteTrackSource.h
media/webrtc/signaling/src/peerconnection/TransceiverImpl.cpp
media/webrtc/signaling/src/peerconnection/TransceiverImpl.h
media/webrtc/signaling/src/peerconnection/moz.build
--- a/media/webrtc/signaling/gtest/mediapipeline_unittest.cpp
+++ b/media/webrtc/signaling/gtest/mediapipeline_unittest.cpp
@@ -238,17 +238,18 @@ class TransportInfo {
   TransportLayerDtls *dtls_;
 };
 
 class TestAgent {
  public:
   TestAgent() :
       audio_config_(109, "opus", 48000, 960, 2, 64000, false),
       audio_conduit_(mozilla::AudioSessionConduit::Create()),
-      audio_pipeline_() {
+      audio_pipeline_(),
+      use_bundle_(false) {
   }
 
   static void ConnectRtp(TestAgent *client, TestAgent *server) {
     TransportInfo::InitAndConnect(client->audio_rtp_transport_,
                                   server->audio_rtp_transport_);
   }
 
   static void ConnectRtcp(TestAgent *client, TestAgent *server) {
@@ -256,48 +257,34 @@ class TestAgent {
                                   server->audio_rtcp_transport_);
   }
 
   static void ConnectBundle(TestAgent *client, TestAgent *server) {
     TransportInfo::InitAndConnect(client->bundle_transport_,
                                   server->bundle_transport_);
   }
 
-  virtual void CreatePipelines_s(bool aIsRtcpMux) = 0;
-
-  void Start() {
-    MOZ_MTLOG(ML_DEBUG, "Starting");
-    audio_pipeline_->Init();
-  }
-
-  void StopInt() {
-  }
+  virtual void CreatePipeline(bool aIsRtcpMux) = 0;
 
   void Stop() {
     MOZ_MTLOG(ML_DEBUG, "Stopping");
 
     if (audio_pipeline_)
-      audio_pipeline_->ShutdownMedia_m();
-
-    mozilla::SyncRunnable::DispatchToThread(
-      test_utils->sts_target(),
-      WrapRunnable(this, &TestAgent::StopInt));
+      audio_pipeline_->Stop();
   }
 
   void Shutdown_s() {
     audio_rtp_transport_.Shutdown();
     audio_rtcp_transport_.Shutdown();
     bundle_transport_.Shutdown();
-    if (audio_pipeline_)
-      audio_pipeline_->DetachTransport_s();
   }
 
   void Shutdown() {
     if (audio_pipeline_)
-      audio_pipeline_->ShutdownMedia_m();
+      audio_pipeline_->Shutdown_m();
     if (audio_stream_track_)
       audio_stream_track_->Stop();
 
     mozilla::SyncRunnable::DispatchToThread(
       test_utils->sts_target(),
       WrapRunnable(this, &TestAgent::Shutdown_s));
   }
 
@@ -324,131 +311,140 @@ class TestAgent {
   int GetAudioRtcpCountSent() {
     return audio_pipeline_->rtcp_packets_sent();
   }
 
   int GetAudioRtcpCountReceived() {
     return audio_pipeline_->rtcp_packets_received();
   }
 
+
+  void SetUsingBundle(bool use_bundle) {
+    use_bundle_ = use_bundle;
+  }
+
  protected:
   mozilla::AudioCodecConfig audio_config_;
   RefPtr<mozilla::MediaSessionConduit> audio_conduit_;
   RefPtr<FakeAudioStreamTrack> audio_stream_track_;
   // TODO(bcampen@mozilla.com): Right now this does not let us test RTCP in
   // both directions; only the sender's RTCP is sent, but the receiver should
   // be sending it too.
   RefPtr<mozilla::MediaPipeline> audio_pipeline_;
   TransportInfo audio_rtp_transport_;
   TransportInfo audio_rtcp_transport_;
   TransportInfo bundle_transport_;
+  bool use_bundle_;
 };
 
 class TestAgentSend : public TestAgent {
  public:
-  TestAgentSend() : use_bundle_(false) {
+  TestAgentSend() {
     mozilla::MediaConduitErrorCode err =
         static_cast<mozilla::AudioSessionConduit *>(audio_conduit_.get())->
         ConfigureSendMediaCodec(&audio_config_);
     EXPECT_EQ(mozilla::kMediaConduitNoError, err);
 
     audio_stream_track_ = new FakeAudioStreamTrack();
   }
 
-  virtual void CreatePipelines_s(bool aIsRtcpMux) {
+  virtual void CreatePipeline(bool aIsRtcpMux) {
 
     std::string test_pc;
 
     if (aIsRtcpMux) {
       ASSERT_FALSE(audio_rtcp_transport_.flow_);
     }
 
+    RefPtr<MediaPipelineTransmit> audio_pipeline =
+      new mozilla::MediaPipelineTransmit(
+        test_pc,
+        nullptr,
+        test_utils->sts_target(),
+        false,
+        audio_stream_track_.get(),
+        audio_conduit_);
+
+    audio_pipeline->Start();
+
+    audio_pipeline_ = audio_pipeline;
+
     RefPtr<TransportFlow> rtp(audio_rtp_transport_.flow_);
     RefPtr<TransportFlow> rtcp(audio_rtcp_transport_.flow_);
 
     if (use_bundle_) {
       rtp = bundle_transport_.flow_;
       rtcp = nullptr;
     }
 
-    audio_pipeline_ = new mozilla::MediaPipelineTransmit(
-        test_pc,
-        nullptr,
-        test_utils->sts_target(),
-        audio_stream_track_.get(),
-        "audio_track_fake_uuid",
-        1,
-        audio_conduit_,
-        rtp,
-        rtcp,
-        nsAutoPtr<MediaPipelineFilter>());
+    audio_pipeline_->UpdateTransport_m(
+        rtp, rtcp, nsAutoPtr<MediaPipelineFilter>(nullptr));
   }
-
-  void SetUsingBundle(bool use_bundle) {
-    use_bundle_ = use_bundle;
-  }
-
- private:
-  bool use_bundle_;
 };
 
 
 class TestAgentReceive : public TestAgent {
  public:
 
   TestAgentReceive() {
     std::vector<mozilla::AudioCodecConfig *> codecs;
     codecs.push_back(&audio_config_);
 
     mozilla::MediaConduitErrorCode err =
         static_cast<mozilla::AudioSessionConduit *>(audio_conduit_.get())->
         ConfigureRecvMediaCodecs(codecs);
     EXPECT_EQ(mozilla::kMediaConduitNoError, err);
   }
 
-  virtual void CreatePipelines_s(bool aIsRtcpMux) {
-      std::string test_pc;
+  virtual void CreatePipeline(bool aIsRtcpMux) {
+    std::string test_pc;
 
     if (aIsRtcpMux) {
       ASSERT_FALSE(audio_rtcp_transport_.flow_);
     }
 
     audio_pipeline_ = new mozilla::MediaPipelineReceiveAudio(
         test_pc,
         nullptr,
         test_utils->sts_target(),
-        new FakeSourceMediaStream(), "audio_track_fake_uuid", 1, 1,
         static_cast<mozilla::AudioSessionConduit *>(audio_conduit_.get()),
-        audio_rtp_transport_.flow_,
-        audio_rtcp_transport_.flow_,
-        bundle_filter_);
+        nullptr);
+
+    audio_pipeline_->Start();
+
+    RefPtr<TransportFlow> rtp(audio_rtp_transport_.flow_);
+    RefPtr<TransportFlow> rtcp(audio_rtcp_transport_.flow_);
+
+    if (use_bundle_) {
+      rtp = bundle_transport_.flow_;
+      rtcp = nullptr;
+    }
+
+    audio_pipeline_->UpdateTransport_m(rtp, rtcp, bundle_filter_);
   }
 
   void SetBundleFilter(nsAutoPtr<MediaPipelineFilter> filter) {
     bundle_filter_ = filter;
   }
 
   void UpdateFilter_s(
       nsAutoPtr<MediaPipelineFilter> filter) {
-    audio_pipeline_->UpdateTransport_s(1,
-                                       audio_rtp_transport_.flow_,
+    audio_pipeline_->UpdateTransport_s(audio_rtp_transport_.flow_,
                                        audio_rtcp_transport_.flow_,
                                        filter);
   }
 
  private:
   nsAutoPtr<MediaPipelineFilter> bundle_filter_;
 };
 
 
 class MediaPipelineTest : public ::testing::Test {
  public:
   ~MediaPipelineTest() {
-    p1_.Stop();
-    p2_.Stop();
     p1_.Shutdown();
     p2_.Shutdown();
   }
 
   static void SetUpTestCase() {
     test_utils = new MtransportTestUtils();
     NSS_NoDB_Init(nullptr);
     NSS_SetDomesticPolicy();
@@ -489,26 +485,18 @@ class MediaPipelineTest : public ::testi
     // make any sense.
     ASSERT_FALSE(!aIsRtcpMux && bundle);
 
     p2_.SetBundleFilter(initialFilter);
 
     // Setup transport flows
     InitTransports(aIsRtcpMux);
 
-    mozilla::SyncRunnable::DispatchToThread(
-      test_utils->sts_target(),
-      WrapRunnable(&p1_, &TestAgent::CreatePipelines_s, aIsRtcpMux), NS_DISPATCH_SYNC);
-
-    mozilla::SyncRunnable::DispatchToThread(
-      test_utils->sts_target(),
-      WrapRunnable(&p2_, &TestAgent::CreatePipelines_s, aIsRtcpMux), NS_DISPATCH_SYNC);
-
-    p2_.Start();
-    p1_.Start();
+    p1_.CreatePipeline(aIsRtcpMux);
+    p2_.CreatePipeline(aIsRtcpMux);
 
     if (bundle) {
       PR_Sleep(ms_until_filter_update);
 
       // Leaving refinedFilter not set implies we want to just update with
       // the other side's SSRC
       if (!refinedFilter) {
         refinedFilter = new MediaPipelineFilter;
--- a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
+++ b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
@@ -21,16 +21,17 @@
 #include "ImageTypes.h"
 #include "ImageContainer.h"
 #include "DOMMediaStream.h"
 #include "MediaStreamTrack.h"
 #include "MediaStreamListener.h"
 #include "MediaStreamVideoSink.h"
 #include "VideoUtils.h"
 #include "VideoStreamTrack.h"
+#include "MediaEngine.h"
 
 #include "nsError.h"
 #include "AudioSegment.h"
 #include "MediaSegment.h"
 #include "MediaPipelineFilter.h"
 #include "RtpLogger.h"
 #include "databuffer.h"
 #include "transportflow.h"
@@ -559,84 +560,66 @@ protected:
 };
 
 static char kDTLSExporterLabel[] = "EXTRACTOR-dtls_srtp";
 
 MediaPipeline::MediaPipeline(const std::string& pc,
                              Direction direction,
                              nsCOMPtr<nsIEventTarget> main_thread,
                              nsCOMPtr<nsIEventTarget> sts_thread,
-                             const std::string& track_id,
-                             int level,
-                             RefPtr<MediaSessionConduit> conduit,
-                             RefPtr<TransportFlow> rtp_transport,
-                             RefPtr<TransportFlow> rtcp_transport,
-                             nsAutoPtr<MediaPipelineFilter> filter)
+                             RefPtr<MediaSessionConduit> conduit)
   : direction_(direction),
-    track_id_(track_id),
-    level_(level),
+    level_(0),
     conduit_(conduit),
-    rtp_(rtp_transport, rtcp_transport ? RTP : MUX),
-    rtcp_(rtcp_transport ? rtcp_transport : rtp_transport,
-          rtcp_transport ? RTCP : MUX),
+    rtp_(nullptr, RTP),
+    rtcp_(nullptr, RTCP),
     main_thread_(main_thread),
     sts_thread_(sts_thread),
     rtp_packets_sent_(0),
     rtcp_packets_sent_(0),
     rtp_packets_received_(0),
     rtcp_packets_received_(0),
     rtp_bytes_sent_(0),
     rtp_bytes_received_(0),
     pc_(pc),
     description_(),
-    filter_(filter),
     rtp_parser_(webrtc::RtpHeaderParser::Create()){
-  // To indicate rtcp-mux rtcp_transport should be nullptr.
-  // Therefore it's an error to send in the same flow for
-  // both rtp and rtcp.
-  MOZ_ASSERT(rtp_transport != rtcp_transport);
-
   // PipelineTransport() will access this->sts_thread_; moved here for safety
   transport_ = new PipelineTransport(this);
-}
-
-MediaPipeline::~MediaPipeline() {
-  ASSERT_ON_THREAD(main_thread_);
-  CSFLogInfo(LOGTAG, "Destroying MediaPipeline: %s", description_.c_str());
-}
-
-nsresult MediaPipeline::Init() {
-  ASSERT_ON_THREAD(main_thread_);
   packet_dumper_ = new PacketDumper(pc_);
 
   if (direction_ == RECEIVE) {
     conduit_->SetReceiverTransport(transport_);
   } else {
     conduit_->SetTransmitterTransport(transport_);
   }
+}
+
+MediaPipeline::~MediaPipeline() {
+  CSFLogInfo(LOGTAG, "Destroying MediaPipeline: %s", description_.c_str());
+  // MediaSessionConduit insists that it be released on main.
+  RUN_ON_THREAD(main_thread_, WrapRelease(conduit_.forget()),
+      NS_DISPATCH_NORMAL);
+}
+
+void
+MediaPipeline::Shutdown_m()
+{
+  CSFLogInfo(LOGTAG, "%s in %s", description_.c_str(), __FUNCTION__);
+
+  Stop();
+  DetachMedia();
 
   RUN_ON_THREAD(sts_thread_,
                 WrapRunnable(
                     RefPtr<MediaPipeline>(this),
-                    &MediaPipeline::Init_s),
+                    &MediaPipeline::DetachTransport_s),
                 NS_DISPATCH_NORMAL);
-
-  return NS_OK;
 }
 
-nsresult MediaPipeline::Init_s() {
-  ASSERT_ON_THREAD(sts_thread_);
-
-  return AttachTransport_s();
-}
-
-
-// Disconnect us from the transport so that we can cleanly destruct the
-// pipeline on the main thread.  ShutdownMedia_m() must have already been
-// called
 void
 MediaPipeline::DetachTransport_s()
 {
   ASSERT_ON_THREAD(sts_thread_);
 
   disconnect_all();
   transport_->Detach();
   rtp_.Detach();
@@ -666,57 +649,54 @@ MediaPipeline::AttachTransport_s()
   }
 
   transport_->Attach(this);
 
   return NS_OK;
 }
 
 void
-MediaPipeline::UpdateTransport_m(int level,
-                                 RefPtr<TransportFlow> rtp_transport,
+MediaPipeline::UpdateTransport_m(RefPtr<TransportFlow> rtp_transport,
                                  RefPtr<TransportFlow> rtcp_transport,
                                  nsAutoPtr<MediaPipelineFilter> filter)
 {
   RUN_ON_THREAD(sts_thread_,
                 WrapRunnable(
-                    this,
+                    RefPtr<MediaPipeline>(this),
                     &MediaPipeline::UpdateTransport_s,
-                    level,
                     rtp_transport,
                     rtcp_transport,
                     filter),
                 NS_DISPATCH_NORMAL);
 }
 
 void
-MediaPipeline::UpdateTransport_s(int level,
-                                 RefPtr<TransportFlow> rtp_transport,
+MediaPipeline::UpdateTransport_s(RefPtr<TransportFlow> rtp_transport,
                                  RefPtr<TransportFlow> rtcp_transport,
                                  nsAutoPtr<MediaPipelineFilter> filter)
 {
   bool rtcp_mux = false;
   if (!rtcp_transport) {
     rtcp_transport = rtp_transport;
     rtcp_mux = true;
   }
 
   if ((rtp_transport != rtp_.transport_) ||
       (rtcp_transport != rtcp_.transport_)) {
     disconnect_all();
     transport_->Detach();
     rtp_.Detach();
     rtcp_.Detach();
-    rtp_ = TransportInfo(rtp_transport, rtcp_mux ? MUX : RTP);
-    rtcp_ = TransportInfo(rtcp_transport, rtcp_mux ? MUX : RTCP);
-    AttachTransport_s();
+    if (rtp_transport && rtcp_transport) {
+      rtp_ = TransportInfo(rtp_transport, rtcp_mux ? MUX : RTP);
+      rtcp_ = TransportInfo(rtcp_transport, rtcp_mux ? MUX : RTCP);
+      AttachTransport_s();
+    }
   }
 
-  level_ = level;
-
   if (filter_ && filter) {
     // Use the new filter, but don't forget any remote SSRCs that we've learned
     // by receiving traffic.
     filter_->Update(*filter);
   } else {
     filter_ = filter;
   }
 }
@@ -882,37 +862,39 @@ nsresult MediaPipeline::TransportReady_s
   if (!info.send_srtp_ || !info.recv_srtp_) {
     CSFLogError(LOGTAG, "Couldn't create SRTP flow for %s",
                 ToString(info.type_));
     info.state_ = MP_CLOSED;
     UpdateRtcpMuxState(info);
     return NS_ERROR_FAILURE;
   }
 
-  CSFLogInfo(LOGTAG, "Listening for %s packets received on %p",
-             ToString(info.type_), dtls->downward());
+  if (direction_ == RECEIVE) {
+    CSFLogInfo(LOGTAG, "Listening for %s packets received on %p",
+               ToString(info.type_), dtls->downward());
 
-  switch (info.type_) {
-    case RTP:
-      dtls->downward()->SignalPacketReceived.connect(
-          this,
-          &MediaPipeline::RtpPacketReceived);
-      break;
-    case RTCP:
-      dtls->downward()->SignalPacketReceived.connect(
-          this,
-          &MediaPipeline::RtcpPacketReceived);
-      break;
-    case MUX:
-      dtls->downward()->SignalPacketReceived.connect(
-          this,
-          &MediaPipeline::PacketReceived);
-      break;
-    default:
-      MOZ_CRASH();
+    switch (info.type_) {
+      case RTP:
+        dtls->downward()->SignalPacketReceived.connect(
+            this,
+            &MediaPipeline::RtpPacketReceived);
+        break;
+      case RTCP:
+        dtls->downward()->SignalPacketReceived.connect(
+            this,
+            &MediaPipeline::RtcpPacketReceived);
+        break;
+      case MUX:
+        dtls->downward()->SignalPacketReceived.connect(
+            this,
+            &MediaPipeline::PacketReceived);
+        break;
+      default:
+        MOZ_CRASH();
+    }
   }
 
   info.state_ = MP_OPEN;
   UpdateRtcpMuxState(info);
   return NS_OK;
 }
 
 nsresult MediaPipeline::TransportFailed_s(TransportInfo &info) {
@@ -1007,16 +989,20 @@ void MediaPipeline::increment_rtcp_packe
                description_.c_str(), this, static_cast<void *>(rtp_.transport_),
                rtcp_packets_received_);
   }
 }
 
 void MediaPipeline::RtpPacketReceived(TransportLayer *layer,
                                       const unsigned char *data,
                                       size_t len) {
+  if (direction_ == TRANSMIT) {
+    return;
+  }
+
   if (!transport_->pipeline()) {
     CSFLogError(LOGTAG, "Discarding incoming packet; transport disconnected");
     return;
   }
 
   if (!conduit_) {
     CSFLogDebug(LOGTAG, "Discarding incoming packet; media disconnected");
     return;
@@ -1030,20 +1016,16 @@ void MediaPipeline::RtpPacketReceived(Tr
   if (rtp_.transport_->state() != TransportLayer::TS_OPEN) {
     CSFLogError(LOGTAG, "Discarding incoming packet; transport not open");
     return;
   }
 
   // This should never happen.
   MOZ_ASSERT(rtp_.recv_srtp_);
 
-  if (direction_ == TRANSMIT) {
-    return;
-  }
-
   if (!len) {
     return;
   }
 
   // Filter out everything but RTP/RTCP
   if (data[0] < 128 || data[0] > 191) {
     return;
   }
@@ -1156,24 +1138,22 @@ void MediaPipeline::RtcpPacketReceived(T
     return;
   }
 
   // Filter out everything but RTP/RTCP
   if (data[0] < 128 || data[0] > 191) {
     return;
   }
 
-  // We do not filter RTCP for send pipelines, since the webrtc.org code for
+  // We do not filter receiver reports, since the webrtc.org code for
   // senders already has logic to ignore RRs that do not apply.
   // TODO bug 1279153: remove SR check for reduced size RTCP
-  if (filter_ && direction_ == RECEIVE) {
-    if (!filter_->FilterSenderReport(data, len)) {
-      CSFLogWarn(LOGTAG, "Dropping incoming RTCP packet; filtered out");
-      return;
-    }
+  if (filter_ && !filter_->FilterSenderReport(data, len)) {
+    CSFLogWarn(LOGTAG, "Dropping incoming RTCP packet; filtered out");
+    return;
   }
 
   packet_dumper_->Dump(
       level_, dom::mozPacketDumpType::Srtcp, false, data, len);
 
   // Make a copy rather than cast away constness
   auto inner_data = MakeUnique<unsigned char[]>(len);
   memcpy(inner_data.get(), data, len);
@@ -1305,24 +1285,24 @@ public:
 
   void OnVideoFrameConverted(unsigned char* aVideoFrame,
                              unsigned int aVideoFrameLength,
                              unsigned short aWidth,
                              unsigned short aHeight,
                              VideoType aVideoType,
                              uint64_t aCaptureTime)
   {
-    MOZ_ASSERT(conduit_->type() == MediaSessionConduit::VIDEO);
+    MOZ_RELEASE_ASSERT(conduit_->type() == MediaSessionConduit::VIDEO);
     static_cast<VideoSessionConduit*>(conduit_.get())->SendVideoFrame(
       aVideoFrame, aVideoFrameLength, aWidth, aHeight, aVideoType, aCaptureTime);
   }
 
   void OnVideoFrameConverted(webrtc::VideoFrame& aVideoFrame)
   {
-    MOZ_ASSERT(conduit_->type() == MediaSessionConduit::VIDEO);
+    MOZ_RELEASE_ASSERT(conduit_->type() == MediaSessionConduit::VIDEO);
     static_cast<VideoSessionConduit*>(conduit_.get())->SendVideoFrame(aVideoFrame);
   }
 
   // Implement MediaStreamTrackListener
   void NotifyQueuedChanges(MediaStreamGraph* aGraph,
                            StreamTime aTrackOffset,
                            const MediaSegment& aQueuedMedia) override;
 
@@ -1427,28 +1407,26 @@ protected:
   RefPtr<PipelineListener> listener_;
   Mutex mutex_;
 };
 
 MediaPipelineTransmit::MediaPipelineTransmit(
     const std::string& pc,
     nsCOMPtr<nsIEventTarget> main_thread,
     nsCOMPtr<nsIEventTarget> sts_thread,
+    bool is_video,
     dom::MediaStreamTrack* domtrack,
-    const std::string& track_id,
-    int level,
-    RefPtr<MediaSessionConduit> conduit,
-    RefPtr<TransportFlow> rtp_transport,
-    RefPtr<TransportFlow> rtcp_transport,
-    nsAutoPtr<MediaPipelineFilter> filter) :
-  MediaPipeline(pc, TRANSMIT, main_thread, sts_thread, track_id, level,
-                conduit, rtp_transport, rtcp_transport, filter),
+    RefPtr<MediaSessionConduit> conduit) :
+  MediaPipeline(pc, TRANSMIT, main_thread, sts_thread, conduit),
   listener_(new PipelineListener(conduit)),
-  domtrack_(domtrack)
+  is_video_(is_video),
+  domtrack_(domtrack),
+  transmitting_(false)
 {
+  SetDescription();
   if (!IsVideo()) {
     audio_processing_ = MakeAndAddRef<AudioProxyThread>(static_cast<AudioSessionConduit*>(conduit.get()));
     listener_->SetAudioProxy(audio_processing_);
   }
   else { // Video
     // For video we send frames to an async VideoFrameConverter that calls
     // back to a VideoFrameFeeder that feeds I420 frames to VideoConduit.
 
@@ -1461,32 +1439,68 @@ MediaPipelineTransmit::MediaPipelineTran
   }
 }
 
 MediaPipelineTransmit::~MediaPipelineTransmit()
 {
   if (feeder_) {
     feeder_->Detach();
   }
+
+  MOZ_ASSERT(!domtrack_);
 }
 
-nsresult MediaPipelineTransmit::Init() {
-  AttachToTrack(track_id_);
-
-  return MediaPipeline::Init();
-}
-
-void MediaPipelineTransmit::AttachToTrack(const std::string& track_id) {
-  ASSERT_ON_THREAD(main_thread_);
-
+void MediaPipelineTransmit::SetDescription() {
   description_ = pc_ + "| ";
   description_ += conduit_->type() == MediaSessionConduit::AUDIO ?
       "Transmit audio[" : "Transmit video[";
+
+  if (!domtrack_) {
+    description_ += "no track]";
+    return;
+  }
+
+  nsString nsTrackId;
+  domtrack_->GetId(nsTrackId);
+  std::string track_id(NS_ConvertUTF16toUTF8(nsTrackId).get());
   description_ += track_id;
   description_ += "]";
+}
+
+void MediaPipelineTransmit::Stop() {
+  ASSERT_ON_THREAD(main_thread_);
+
+  if (!domtrack_ || !transmitting_) {
+    return;
+  }
+
+  transmitting_ = false;
+
+  if (domtrack_->AsAudioStreamTrack()) {
+    domtrack_->RemoveDirectListener(listener_);
+    domtrack_->RemoveListener(listener_);
+  } else if (VideoStreamTrack* video = domtrack_->AsVideoStreamTrack()) {
+    video->RemoveVideoOutput(listener_);
+  } else {
+    MOZ_ASSERT(false, "Unknown track type");
+  }
+
+  conduit_->StopTransmitting();
+}
+
+void MediaPipelineTransmit::Start() {
+  ASSERT_ON_THREAD(main_thread_);
+
+  if (!domtrack_ || transmitting_) {
+    return;
+  }
+
+  transmitting_ = true;
+
+  conduit_->StartTransmitting();
 
   // TODO(ekr@rtfm.com): Check for errors
   CSFLogDebug(LOGTAG, "Attaching pipeline to track %p conduit type=%s", this,
               (conduit_->type() == MediaSessionConduit::AUDIO ?"audio":"video"));
 
 #if !defined(MOZILLA_EXTERNAL_LINKAGE)
   // With full duplex we don't risk having audio come in late to the MSG
   // so we won't need a direct listener.
@@ -1510,17 +1524,17 @@ void MediaPipelineTransmit::AttachToTrac
   } else {
     MOZ_ASSERT(false, "Unknown track type");
   }
 }
 
 bool
 MediaPipelineTransmit::IsVideo() const
 {
-  return !!domtrack_->AsVideoStreamTrack();
+  return is_video_;
 }
 
 void MediaPipelineTransmit::UpdateSinkIdentity_m(MediaStreamTrack* track,
                                                  nsIPrincipal* principal,
                                                  const PeerIdentity* sinkIdentity) {
   ASSERT_ON_THREAD(main_thread_);
 
   if (track != nullptr && track != domtrack_) {
@@ -1544,58 +1558,58 @@ void MediaPipelineTransmit::UpdateSinkId
 
   listener_->SetEnabled(enableTrack);
 }
 
 void
 MediaPipelineTransmit::DetachMedia()
 {
   ASSERT_ON_THREAD(main_thread_);
-  if (domtrack_) {
-    if (domtrack_->AsAudioStreamTrack()) {
-      domtrack_->RemoveDirectListener(listener_);
-      domtrack_->RemoveListener(listener_);
-    } else if (VideoStreamTrack* video = domtrack_->AsVideoStreamTrack()) {
-      video->RemoveVideoOutput(listener_);
-    } else {
-      MOZ_ASSERT(false, "Unknown track type");
-    }
-    domtrack_ = nullptr;
-  }
+  domtrack_ = nullptr;
   // Let the listener be destroyed with the pipeline (or later).
 }
 
 nsresult MediaPipelineTransmit::TransportReady_s(TransportInfo &info) {
   ASSERT_ON_THREAD(sts_thread_);
   // Call base ready function.
   MediaPipeline::TransportReady_s(info);
 
   // Should not be set for a transmitter
   if (&info == &rtp_) {
     listener_->SetActive(true);
   }
 
   return NS_OK;
 }
 
-nsresult MediaPipelineTransmit::ReplaceTrack(MediaStreamTrack& domtrack) {
+nsresult MediaPipelineTransmit::ReplaceTrack(RefPtr<MediaStreamTrack>& domtrack) {
   // MainThread, checked in calls we make
-  nsString nsTrackId;
-  domtrack.GetId(nsTrackId);
-  std::string track_id(NS_ConvertUTF16toUTF8(nsTrackId).get());
-  CSFLogDebug(LOGTAG, "Reattaching pipeline %s to track %p track %s conduit type: %s",
-              description_.c_str(), &domtrack, track_id.c_str(),
-              (conduit_->type() == MediaSessionConduit::AUDIO ?"audio":"video"));
+  if (domtrack) {
+    nsString nsTrackId;
+    domtrack->GetId(nsTrackId);
+    std::string track_id(NS_ConvertUTF16toUTF8(nsTrackId).get());
+    CSFLogDebug(LOGTAG, "Reattaching pipeline %s to track %p track %s conduit type: %s",
+                description_.c_str(), &domtrack, track_id.c_str(),
+                (conduit_->type() == MediaSessionConduit::AUDIO ?"audio":"video"));
+  }
 
-  DetachMedia();
-  domtrack_ = &domtrack; // Detach clears it
-  // Unsets the track id after RemoveListener() takes effect.
-  listener_->UnsetTrackId(domtrack_->GraphImpl());
-  track_id_ = track_id;
-  AttachToTrack(track_id);
+  RefPtr<dom::MediaStreamTrack> oldTrack = domtrack_;
+  bool wasTransmitting = oldTrack && transmitting_;
+  Stop();
+  domtrack_ = domtrack;
+  SetDescription();
+
+  if (oldTrack) {
+    // Unsets the track id after RemoveListener() takes effect.
+    listener_->UnsetTrackId(oldTrack->GraphImpl());
+  }
+
+  if (wasTransmitting) {
+    Start();
+  }
   return NS_OK;
 }
 
 void MediaPipeline::DisconnectTransport_s(TransportInfo &info) {
   MOZ_ASSERT(info.transport_);
   ASSERT_ON_THREAD(sts_thread_);
 
   info.transport_->SignalStateChange.disconnect(this);
@@ -1883,58 +1897,74 @@ class GenericReceiveCallback : public Tr
     : listener_(listener) {}
 
   void TrackAdded(TrackTicks time);
 
  private:
   RefPtr<GenericReceiveListener> listener_;
 };
 
-// Add a listener on the MSG thread using the MSG command queue
-static void AddListener(MediaStream* source, MediaStreamListener* listener) {
-  class Message : public ControlMessage {
-   public:
-    Message(MediaStream* stream, MediaStreamListener* listener)
-      : ControlMessage(stream),
-        listener_(listener) {}
-
-    virtual void Run() override {
-      mStream->AddListenerImpl(listener_.forget());
-    }
-   private:
-    RefPtr<MediaStreamListener> listener_;
-  };
-
-  MOZ_ASSERT(listener);
-
-  if (source->GraphImpl()) {
-    source->GraphImpl()->AppendMessage(MakeUnique<Message>(source, listener));
-  }
-}
-
 class GenericReceiveListener : public MediaStreamListener
 {
  public:
   GenericReceiveListener(SourceMediaStream *source, TrackID track_id)
     : source_(source),
       track_id_(track_id),
       played_ticks_(0),
       last_log_(0),
-      principal_handle_(PRINCIPAL_HANDLE_NONE) {}
+      principal_handle_(PRINCIPAL_HANDLE_NONE),
+      listening_(false)
+  {
+    MOZ_ASSERT(source);
+  }
 
   virtual ~GenericReceiveListener() {}
 
   void AddSelf()
   {
-    AddListener(source_, this);
+    if (!listening_) {
+      listening_ = true;
+      source_->AddListener(this);
+    }
+  }
+
+  void RemoveSelf()
+  {
+    if (listening_) {
+      listening_ = false;
+      source_->RemoveListener(this);
+    }
   }
 
   void EndTrack()
   {
-    source_->EndTrack(track_id_);
+    CSFLogDebug(LOGTAG, "GenericReceiveListener ending track");
+
+    // We do this on MSG to avoid it racing against StartTrack.
+    class Message : public ControlMessage
+    {
+    public:
+      Message(SourceMediaStream* stream,
+              TrackID track_id)
+        : ControlMessage(stream),
+          source_(stream),
+          track_id_(track_id)
+      {}
+
+      void Run() override {
+        source_->EndTrack(track_id_);
+      }
+
+      RefPtr<SourceMediaStream> source_;
+      const TrackID track_id_;
+    };
+
+    source_->GraphImpl()->AppendMessage(MakeUnique<Message>(source_, track_id_));
+    // This breaks the cycle with source_
+    source_->RemoveListener(this);
   }
 
   // Must be called on the main thread
   void SetPrincipalHandle_m(const PrincipalHandle& principal_handle)
   {
     class Message : public ControlMessage
     {
     public:
@@ -1959,55 +1989,45 @@ class GenericReceiveListener : public Me
 
   // Must be called on the MediaStreamGraph thread
   void SetPrincipalHandle_msg(const PrincipalHandle& principal_handle)
   {
     principal_handle_ = principal_handle;
   }
 
  protected:
-  SourceMediaStream *source_;
+  RefPtr<SourceMediaStream> source_;
   const TrackID track_id_;
   TrackTicks played_ticks_;
   TrackTicks last_log_; // played_ticks_ when we last logged
   PrincipalHandle principal_handle_;
+  bool listening_;
 };
 
 MediaPipelineReceive::MediaPipelineReceive(
     const std::string& pc,
     nsCOMPtr<nsIEventTarget> main_thread,
     nsCOMPtr<nsIEventTarget> sts_thread,
-    SourceMediaStream *stream,
-    const std::string& track_id,
-    int level,
-    RefPtr<MediaSessionConduit> conduit,
-    RefPtr<TransportFlow> rtp_transport,
-    RefPtr<TransportFlow> rtcp_transport,
-    nsAutoPtr<MediaPipelineFilter> filter) :
-  MediaPipeline(pc, RECEIVE, main_thread, sts_thread,
-                track_id, level, conduit, rtp_transport,
-                rtcp_transport, filter),
-  stream_(stream),
+    RefPtr<MediaSessionConduit> conduit) :
+  MediaPipeline(pc, RECEIVE, main_thread, sts_thread, conduit),
   segments_added_(0)
 {
-  MOZ_ASSERT(stream_);
 }
 
 MediaPipelineReceive::~MediaPipelineReceive()
 {
-  MOZ_ASSERT(!stream_);  // Check that we have shut down already.
 }
 
 class MediaPipelineReceiveAudio::PipelineListener
   : public GenericReceiveListener
 {
 public:
-  PipelineListener(SourceMediaStream * source, TrackID track_id,
+  PipelineListener(SourceMediaStream * source,
                    const RefPtr<MediaSessionConduit>& conduit)
-    : GenericReceiveListener(source, track_id),
+    : GenericReceiveListener(source, kAudioTrack),
       conduit_(conduit)
   {
   }
 
   ~PipelineListener()
   {
     if (!NS_IsMainThread()) {
       // release conduit on mainthread.  Must use forget()!
@@ -2113,67 +2133,63 @@ public:
 private:
   RefPtr<MediaSessionConduit> conduit_;
 };
 
 MediaPipelineReceiveAudio::MediaPipelineReceiveAudio(
     const std::string& pc,
     nsCOMPtr<nsIEventTarget> main_thread,
     nsCOMPtr<nsIEventTarget> sts_thread,
-    SourceMediaStream* stream,
-    const std::string& media_stream_track_id,
-    TrackID numeric_track_id,
-    int level,
     RefPtr<AudioSessionConduit> conduit,
-    RefPtr<TransportFlow> rtp_transport,
-    RefPtr<TransportFlow> rtcp_transport,
-    nsAutoPtr<MediaPipelineFilter> filter) :
-  MediaPipelineReceive(pc, main_thread, sts_thread,
-                       stream, media_stream_track_id, level, conduit,
-                       rtp_transport, rtcp_transport, filter),
-  listener_(new PipelineListener(stream, numeric_track_id, conduit))
-{}
+    SourceMediaStream* aStream) :
+  MediaPipelineReceive(pc, main_thread, sts_thread, conduit),
+  listener_(aStream ? new PipelineListener(aStream, conduit_) : nullptr)
+{
+  description_ = pc_ + "| Receive audio";
+}
 
 void MediaPipelineReceiveAudio::DetachMedia()
 {
   ASSERT_ON_THREAD(main_thread_);
-  if (stream_ && listener_) {
+  if (listener_) {
     listener_->EndTrack();
-
-    if (stream_->GraphImpl()) {
-      stream_->RemoveListener(listener_);
-    }
-    stream_ = nullptr;
+    listener_ = nullptr;
   }
 }
 
-nsresult MediaPipelineReceiveAudio::Init()
-{
-  ASSERT_ON_THREAD(main_thread_);
-  CSFLogDebug(LOGTAG, "%s", __FUNCTION__);
-
-  description_ = pc_ + "| Receive audio[";
-  description_ += track_id_;
-  description_ += "]";
-
-  listener_->AddSelf();
-
-  return MediaPipelineReceive::Init();
-}
-
 void MediaPipelineReceiveAudio::SetPrincipalHandle_m(const PrincipalHandle& principal_handle)
 {
-  listener_->SetPrincipalHandle_m(principal_handle);
+  if (listener_) {
+    listener_->SetPrincipalHandle_m(principal_handle);
+  }
+}
+
+void
+MediaPipelineReceiveAudio::Start()
+{
+  conduit_->StartReceiving();
+  if (listener_) {
+    listener_->AddSelf();
+  }
+}
+
+void
+MediaPipelineReceiveAudio::Stop()
+{
+  if (listener_) {
+    listener_->RemoveSelf();
+  }
+  conduit_->StopReceiving();
 }
 
 class MediaPipelineReceiveVideo::PipelineListener
   : public GenericReceiveListener {
 public:
-  PipelineListener(SourceMediaStream* source, TrackID track_id)
-    : GenericReceiveListener(source, track_id)
+  explicit PipelineListener(SourceMediaStream* source)
+    : GenericReceiveListener(source, kVideoTrack)
     , image_container_()
     , image_()
     , mutex_("Video PipelineListener")
   {
     image_container_ =
       LayerManager::CreateImageContainer(ImageContainer::ASYNCHRONOUS);
   }
 
@@ -2292,67 +2308,64 @@ private:
   MediaPipelineReceiveVideo *pipeline_;  // Raw pointer to avoid cycles
 };
 
 
 MediaPipelineReceiveVideo::MediaPipelineReceiveVideo(
     const std::string& pc,
     nsCOMPtr<nsIEventTarget> main_thread,
     nsCOMPtr<nsIEventTarget> sts_thread,
-    SourceMediaStream *stream,
-    const std::string& media_stream_track_id,
-    TrackID numeric_track_id,
-    int level,
     RefPtr<VideoSessionConduit> conduit,
-    RefPtr<TransportFlow> rtp_transport,
-    RefPtr<TransportFlow> rtcp_transport,
-    nsAutoPtr<MediaPipelineFilter> filter) :
-  MediaPipelineReceive(pc, main_thread, sts_thread,
-                       stream, media_stream_track_id, level, conduit,
-                       rtp_transport, rtcp_transport, filter),
+    SourceMediaStream* aStream) :
+  MediaPipelineReceive(pc, main_thread, sts_thread, conduit),
   renderer_(new PipelineRenderer(this)),
-  listener_(new PipelineListener(stream, numeric_track_id))
-{}
+  listener_(aStream ? new PipelineListener(aStream) : nullptr)
+{
+  description_ = pc_ + "| Receive video";
+  conduit->AttachRenderer(renderer_);
+}
 
 void MediaPipelineReceiveVideo::DetachMedia()
 {
   ASSERT_ON_THREAD(main_thread_);
 
   // stop generating video and thus stop invoking the PipelineRenderer
   // and PipelineListener - the renderer has a raw ptr to the Pipeline to
   // avoid cycles, and the render callbacks are invoked from a different
   // thread so simple null-checks would cause TSAN bugs without locks.
   static_cast<VideoSessionConduit*>(conduit_.get())->DetachRenderer();
-  if (stream_ && listener_) {
+  if (listener_) {
     listener_->EndTrack();
-    stream_->RemoveListener(listener_);
-    stream_ = nullptr;
+    listener_ = nullptr;
   }
 }
 
-nsresult MediaPipelineReceiveVideo::Init() {
-  ASSERT_ON_THREAD(main_thread_);
-  CSFLogDebug(LOGTAG, "%s", __FUNCTION__);
-
-  description_ = pc_ + "| Receive video[";
-  description_ += track_id_;
-  description_ += "]";
-
-  listener_->AddSelf();
-
-  // Always happens before we can DetachMedia()
-  static_cast<VideoSessionConduit *>(conduit_.get())->
-      AttachRenderer(renderer_);
-
-  return MediaPipelineReceive::Init();
-}
-
 void MediaPipelineReceiveVideo::SetPrincipalHandle_m(const PrincipalHandle& principal_handle)
 {
-  listener_->SetPrincipalHandle_m(principal_handle);
+  if (listener_) {
+    listener_->SetPrincipalHandle_m(principal_handle);
+  }
+}
+
+void
+MediaPipelineReceiveVideo::Start()
+{
+  conduit_->StartReceiving();
+  if (listener_) {
+    listener_->AddSelf();
+  }
+}
+
+void
+MediaPipelineReceiveVideo::Stop()
+{
+  if (listener_) {
+    listener_->RemoveSelf();
+  }
+  conduit_->StopReceiving();
 }
 
 DOMHighResTimeStamp MediaPipeline::GetNow() {
   return webrtc::Clock::GetRealTimeClock()->TimeInMilliseconds();
 }
 
 DOMHighResTimeStamp
 MediaPipeline::RtpCSRCStats::GetExpiryFromTime(
--- a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.h
+++ b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.h
@@ -7,23 +7,23 @@
 
 #ifndef mediapipeline_h__
 #define mediapipeline_h__
 
 #include <map>
 
 #include "sigslot.h"
 
-#include "MediaConduitInterface.h"
+#include "signaling/src/media-conduit/MediaConduitInterface.h"
 #include "mozilla/ReentrantMonitor.h"
 #include "mozilla/Atomics.h"
 #include "SrtpFlow.h"
 #include "databuffer.h"
-#include "runnable_utils.h"
-#include "transportflow.h"
+#include "mtransport/runnable_utils.h"
+#include "mtransport/transportflow.h"
 #include "AudioPacketizer.h"
 #include "StreamTracks.h"
 #include "signaling/src/peerconnection/PacketDumper.h"
 
 #include "webrtc/modules/rtp_rtcp/include/rtp_header_parser.h"
 
 // Should come from MediaEngine.h, but that's a pain to include here
 // because of the MOZILLA_EXTERNAL_LINKAGE stuff.
@@ -78,63 +78,46 @@ class SourceMediaStream;
 class MediaPipeline : public sigslot::has_slots<> {
  public:
   enum Direction { TRANSMIT, RECEIVE };
   enum State { MP_CONNECTING, MP_OPEN, MP_CLOSED };
   MediaPipeline(const std::string& pc,
                 Direction direction,
                 nsCOMPtr<nsIEventTarget> main_thread,
                 nsCOMPtr<nsIEventTarget> sts_thread,
-                const std::string& track_id,
-                int level,
-                RefPtr<MediaSessionConduit> conduit,
-                RefPtr<TransportFlow> rtp_transport,
-                RefPtr<TransportFlow> rtcp_transport,
-                nsAutoPtr<MediaPipelineFilter> filter);
+                RefPtr<MediaSessionConduit> conduit);
 
-  // Must be called on the STS thread.  Must be called after ShutdownMedia_m().
-  void DetachTransport_s();
+  virtual void Start() = 0;
+  virtual void Stop() = 0;
+  virtual void DetachMedia() {}
+
+  void SetLevel(size_t level) { level_ = level; }
 
   // Must be called on the main thread.
-  void ShutdownMedia_m()
-  {
-    ASSERT_ON_THREAD(main_thread_);
+  void Shutdown_m();
 
-    if (direction_ == RECEIVE) {
-      conduit_->StopReceiving();
-    } else {
-      conduit_->StopTransmitting();
-    }
-    DetachMedia();
-  }
-
-  virtual nsresult Init();
-
-  void UpdateTransport_m(int level,
-                         RefPtr<TransportFlow> rtp_transport,
+  void UpdateTransport_m(RefPtr<TransportFlow> rtp_transport,
                          RefPtr<TransportFlow> rtcp_transport,
                          nsAutoPtr<MediaPipelineFilter> filter);
 
-  void UpdateTransport_s(int level,
-                         RefPtr<TransportFlow> rtp_transport,
+  void UpdateTransport_s(RefPtr<TransportFlow> rtp_transport,
                          RefPtr<TransportFlow> rtcp_transport,
                          nsAutoPtr<MediaPipelineFilter> filter);
 
   // Used only for testing; adds RTP header extension for RTP Stream Id with
   // the given id.
   void AddRIDExtension_m(size_t extension_id);
   void AddRIDExtension_s(size_t extension_id);
   // Used only for testing; installs a MediaPipelineFilter that filters
   // everything but the given RID
   void AddRIDFilter_m(const std::string& rid);
   void AddRIDFilter_s(const std::string& rid);
 
   virtual Direction direction() const { return direction_; }
-  virtual const std::string& trackid() const { return track_id_; }
-  virtual int level() const { return level_; }
+  int level() const { return level_; }
   virtual bool IsVideo() const = 0;
 
   bool IsDoingRtcpMux() const {
     return (rtp_.type_ == MUX);
   }
 
   class RtpCSRCStats {
   public:
@@ -208,37 +191,32 @@ class MediaPipeline : public sigslot::ha
 
     virtual nsresult SendRtpPacket(const uint8_t* data, size_t len);
     virtual nsresult SendRtcpPacket(const uint8_t* data, size_t len);
 
    private:
     nsresult SendRtpRtcpPacket_s(nsAutoPtr<DataBuffer> data,
                                  bool is_rtp);
 
-    MediaPipeline *pipeline_;  // Raw pointer to avoid cycles
+    // Creates a cycle, which we break with Detach
+    RefPtr<MediaPipeline> pipeline_;
     nsCOMPtr<nsIEventTarget> sts_thread_;
   };
 
-  RefPtr<PipelineTransport> GetPiplelineTransport() {
-    return transport_;
-  }
-
  protected:
   virtual ~MediaPipeline();
-  virtual void DetachMedia() {}
   nsresult AttachTransport_s();
   friend class PipelineTransport;
 
   class TransportInfo {
     public:
       TransportInfo(RefPtr<TransportFlow> flow, RtpType type) :
         transport_(flow),
         state_(MP_CONNECTING),
         type_(type) {
-        MOZ_ASSERT(flow);
       }
 
       void Detach()
       {
         transport_ = nullptr;
         send_srtp_ = nullptr;
         recv_srtp_ = nullptr;
       }
@@ -274,68 +252,61 @@ class MediaPipeline : public sigslot::ha
   void RtpPacketReceived(TransportLayer *layer, const unsigned char *data,
                          size_t len);
   void RtcpPacketReceived(TransportLayer *layer, const unsigned char *data,
                           size_t len);
   void PacketReceived(TransportLayer *layer, const unsigned char *data,
                       size_t len);
 
   Direction direction_;
-  std::string track_id_;        // The track on the stream.
-                                // Written on the main thread.
-                                // Used on STS and MediaStreamGraph threads.
-                                // Not used outside initialization in MediaPipelineTransmit
-  // The m-line index (starting at 0, to match convention) Atomic because
-  // this value is updated from STS, but read on main, and we don't want to
-  // bother with dispatches just to get an int occasionally.
-  Atomic<int> level_;
+  size_t level_;
   RefPtr<MediaSessionConduit> conduit_;  // Our conduit. Written on the main
                                          // thread. Read on STS thread.
 
   // The transport objects. Read/written on STS thread.
   TransportInfo rtp_;
   TransportInfo rtcp_;
 
   // Pointers to the threads we need. Initialized at creation
   // and used all over the place.
   nsCOMPtr<nsIEventTarget> main_thread_;
   nsCOMPtr<nsIEventTarget> sts_thread_;
 
-  // Created on Init. Referenced by the conduit and eventually
-  // destroyed on the STS thread.
+  // Created in c'tor. Referenced by the conduit.
   RefPtr<PipelineTransport> transport_;
 
   // Only safe to access from STS thread.
   // Build into TransportInfo?
   int32_t rtp_packets_sent_;
   int32_t rtcp_packets_sent_;
   int32_t rtp_packets_received_;
   int32_t rtcp_packets_received_;
   int64_t rtp_bytes_sent_;
   int64_t rtp_bytes_received_;
 
   // Only safe to access from STS thread.
   std::map<uint32_t, RtpCSRCStats> csrc_stats_;
 
-  // Written on Init. Read on STS thread.
+  // Written in c'tor. Read on STS thread.
   std::string pc_;
   std::string description_;
 
-  // Written on Init, all following accesses are on the STS thread.
+  // Written in c'tor, all following accesses are on the STS thread.
   nsAutoPtr<MediaPipelineFilter> filter_;
   nsAutoPtr<webrtc::RtpHeaderParser> rtp_parser_;
 
   nsAutoPtr<PacketDumper> packet_dumper_;
 
  private:
   // Gets the current time as a DOMHighResTimeStamp
   static DOMHighResTimeStamp GetNow();
-  nsresult Init_s();
 
   bool IsRtp(const unsigned char *data, size_t len);
+  // Must be called on the STS thread.  Must be called after DetachMedia().
+  void DetachTransport_s();
 };
 
 class ConduitDeleteEvent: public Runnable
 {
 public:
   explicit ConduitDeleteEvent(already_AddRefed<MediaSessionConduit> aConduit) :
     Runnable("ConduitDeleteEvent"),
     mConduit(aConduit) {}
@@ -349,28 +320,22 @@ private:
 // A specialization of pipeline for reading from an input device
 // and transmitting to the network.
 class MediaPipelineTransmit : public MediaPipeline {
 public:
   // Set rtcp_transport to nullptr to use rtcp-mux
   MediaPipelineTransmit(const std::string& pc,
                         nsCOMPtr<nsIEventTarget> main_thread,
                         nsCOMPtr<nsIEventTarget> sts_thread,
+                        bool is_video,
                         dom::MediaStreamTrack* domtrack,
-                        const std::string& track_id,
-                        int level,
-                        RefPtr<MediaSessionConduit> conduit,
-                        RefPtr<TransportFlow> rtp_transport,
-                        RefPtr<TransportFlow> rtcp_transport,
-                        nsAutoPtr<MediaPipelineFilter> filter);
+                        RefPtr<MediaSessionConduit> conduit);
 
-  // Initialize (stuff here may fail)
-  nsresult Init() override;
-
-  virtual void AttachToTrack(const std::string& track_id);
+  void Start() override;
+  void Stop() override;
 
   // written and used from MainThread
   bool IsVideo() const override;
 
   // When the principal of the domtrack changes, it calls through to here
   // so that we can determine whether to enable track transmission.
   // `track` has to be null or equal `domtrack_` for us to apply the update.
   virtual void UpdateSinkIdentity_m(dom::MediaStreamTrack* track,
@@ -382,130 +347,110 @@ public:
 
   // Override MediaPipeline::TransportReady.
   nsresult TransportReady_s(TransportInfo &info) override;
 
   // Replace a track with a different one
   // In non-compliance with the likely final spec, allow the new
   // track to be part of a different stream (since we don't support
   // multiple tracks of a type in a stream yet).  bug 1056650
-  virtual nsresult ReplaceTrack(dom::MediaStreamTrack& domtrack);
+  virtual nsresult ReplaceTrack(RefPtr<dom::MediaStreamTrack>& domtrack);
 
   // Separate classes to allow ref counting
   class PipelineListener;
   class VideoFrameFeeder;
 
  protected:
   ~MediaPipelineTransmit();
 
+  void SetDescription();
+
  private:
   RefPtr<PipelineListener> listener_;
   RefPtr<AudioProxyThread> audio_processing_;
   RefPtr<VideoFrameFeeder> feeder_;
   RefPtr<VideoFrameConverter> converter_;
-  dom::MediaStreamTrack* domtrack_;
+  bool is_video_;
+  RefPtr<dom::MediaStreamTrack> domtrack_;
+  bool transmitting_;
 };
 
 
 // A specialization of pipeline for reading from the network and
-// rendering video.
+// rendering media.
 class MediaPipelineReceive : public MediaPipeline {
  public:
   // Set rtcp_transport to nullptr to use rtcp-mux
   MediaPipelineReceive(const std::string& pc,
                        nsCOMPtr<nsIEventTarget> main_thread,
                        nsCOMPtr<nsIEventTarget> sts_thread,
-                       SourceMediaStream *stream,
-                       const std::string& track_id,
-                       int level,
-                       RefPtr<MediaSessionConduit> conduit,
-                       RefPtr<TransportFlow> rtp_transport,
-                       RefPtr<TransportFlow> rtcp_transport,
-                       nsAutoPtr<MediaPipelineFilter> filter);
+                       RefPtr<MediaSessionConduit> conduit);
 
   int segments_added() const { return segments_added_; }
 
   // Sets the PrincipalHandle we set on the media chunks produced by this
   // pipeline. Must be called on the main thread.
   virtual void SetPrincipalHandle_m(const PrincipalHandle& principal_handle) = 0;
+
  protected:
   ~MediaPipelineReceive();
 
-  RefPtr<SourceMediaStream> stream_;
   int segments_added_;
 
  private:
 };
 
 
 // A specialization of pipeline for reading from the network and
 // rendering audio.
 class MediaPipelineReceiveAudio : public MediaPipelineReceive {
  public:
   MediaPipelineReceiveAudio(const std::string& pc,
                             nsCOMPtr<nsIEventTarget> main_thread,
                             nsCOMPtr<nsIEventTarget> sts_thread,
-                            SourceMediaStream* stream,
-                            // This comes from an msid attribute. Everywhere
-                            // but MediaStreamGraph uses this.
-                            const std::string& media_stream_track_id,
-                            // This is an integer identifier that is only
-                            // unique within a single DOMMediaStream, which is
-                            // used by MediaStreamGraph
-                            TrackID numeric_track_id,
-                            int level,
                             RefPtr<AudioSessionConduit> conduit,
-                            RefPtr<TransportFlow> rtp_transport,
-                            RefPtr<TransportFlow> rtcp_transport,
-                            nsAutoPtr<MediaPipelineFilter> filter);
+                            SourceMediaStream* aStream);
 
   void DetachMedia() override;
 
-  nsresult Init() override;
   bool IsVideo() const override { return false; }
 
   void SetPrincipalHandle_m(const PrincipalHandle& principal_handle) override;
 
+  void Start() override;
+  void Stop() override;
+
  private:
   // Separate class to allow ref counting
   class PipelineListener;
 
   RefPtr<PipelineListener> listener_;
 };
 
 
 // A specialization of pipeline for reading from the network and
 // rendering video.
 class MediaPipelineReceiveVideo : public MediaPipelineReceive {
  public:
   MediaPipelineReceiveVideo(const std::string& pc,
                             nsCOMPtr<nsIEventTarget> main_thread,
                             nsCOMPtr<nsIEventTarget> sts_thread,
-                            SourceMediaStream *stream,
-                            // This comes from an msid attribute. Everywhere
-                            // but MediaStreamGraph uses this.
-                            const std::string& media_stream_track_id,
-                            // This is an integer identifier that is only
-                            // unique within a single DOMMediaStream, which is
-                            // used by MediaStreamGraph
-                            TrackID numeric_track_id,
-                            int level,
                             RefPtr<VideoSessionConduit> conduit,
-                            RefPtr<TransportFlow> rtp_transport,
-                            RefPtr<TransportFlow> rtcp_transport,
-                            nsAutoPtr<MediaPipelineFilter> filter);
+                            SourceMediaStream* aStream);
 
   // Called on the main thread.
   void DetachMedia() override;
 
-  nsresult Init() override;
   bool IsVideo() const override { return true; }
 
   void SetPrincipalHandle_m(const PrincipalHandle& principal_handle) override;
 
+  void Start() override;
+  void Stop() override;
+
  private:
   class PipelineRenderer;
   friend class PipelineRenderer;
 
   // Separate class to allow ref counting
   class PipelineListener;
 
   RefPtr<PipelineRenderer> renderer_;
--- a/media/webrtc/signaling/src/mediapipeline/MediaPipelineFilter.cpp
+++ b/media/webrtc/signaling/src/mediapipeline/MediaPipelineFilter.cpp
@@ -104,17 +104,18 @@ MediaPipelineFilter::FilterSenderReport(
 
   if (len < FIRST_SSRC_OFFSET + 4) {
     return false;
   }
 
   uint8_t payload_type = data[PT_OFFSET];
 
   if (payload_type != SENDER_REPORT_T) {
-    return false;
+    // Not a sender report, let it through
+    return true;
   }
 
   uint32_t ssrc = 0;
   ssrc += (uint32_t)data[FIRST_SSRC_OFFSET] << 24;
   ssrc += (uint32_t)data[FIRST_SSRC_OFFSET + 1] << 16;
   ssrc += (uint32_t)data[FIRST_SSRC_OFFSET + 2] << 8;
   ssrc += (uint32_t)data[FIRST_SSRC_OFFSET + 3];
 
--- a/media/webrtc/signaling/src/mediapipeline/moz.build
+++ b/media/webrtc/signaling/src/mediapipeline/moz.build
@@ -1,17 +1,19 @@
 # -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*-
 # vim: set filetype=python:
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 include('/media/webrtc/webrtc.mozbuild')
 
 LOCAL_INCLUDES += [
+    '!/ipc/ipdl/_ipdlheaders',
     '/dom/media',
+    '/ipc/chromium/src',
     '/media/libyuv/libyuv/include',
     '/media/mtransport',
     '/media/webrtc',
     '/media/webrtc/signaling/src/common/browser_logging',
     '/media/webrtc/signaling/src/media-conduit',
     '/media/webrtc/trunk',
     '/netwerk/srtp/src/crypto/include',
     '/netwerk/srtp/src/include',
deleted file mode 100644
--- a/media/webrtc/signaling/src/peerconnection/MediaPipelineFactory.cpp
+++ /dev/null
@@ -1,954 +0,0 @@
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this file,
- * You can obtain one at http://mozilla.org/MPL/2.0/. */
-
-#include "logging.h"
-#include "nsIGfxInfo.h"
-#include "nsServiceManagerUtils.h"
-
-#include "PeerConnectionImpl.h"
-#include "PeerConnectionMedia.h"
-#include "MediaPipelineFactory.h"
-#include "MediaPipelineFilter.h"
-#include "transportflow.h"
-#include "transportlayer.h"
-#include "transportlayerdtls.h"
-#include "transportlayerice.h"
-
-#include "signaling/src/jsep/JsepTrack.h"
-#include "signaling/src/jsep/JsepTransport.h"
-#include "signaling/src/common/PtrVector.h"
-
-#include "MediaStreamTrack.h"
-#include "nsIPrincipal.h"
-#include "nsIDocument.h"
-#include "mozilla/Preferences.h"
-#include "MediaEngine.h"
-
-#include "mozilla/Preferences.h"
-
-#include "WebrtcGmpVideoCodec.h"
-
-#include <stdlib.h>
-
-namespace mozilla {
-
-MOZ_MTLOG_MODULE("MediaPipelineFactory")
-
-static nsresult
-JsepCodecDescToCodecConfig(const JsepCodecDescription& aCodec,
-                           AudioCodecConfig** aConfig)
-{
-  MOZ_ASSERT(aCodec.mType == SdpMediaSection::kAudio);
-  if (aCodec.mType != SdpMediaSection::kAudio)
-    return NS_ERROR_INVALID_ARG;
-
-  const JsepAudioCodecDescription& desc =
-      static_cast<const JsepAudioCodecDescription&>(aCodec);
-
-  uint16_t pt;
-
-  if (!desc.GetPtAsInt(&pt)) {
-    MOZ_MTLOG(ML_ERROR, "Invalid payload type: " << desc.mDefaultPt);
-    return NS_ERROR_INVALID_ARG;
-  }
-
-  *aConfig = new AudioCodecConfig(pt,
-                                  desc.mName,
-                                  desc.mClock,
-                                  desc.mPacketSize,
-                                  desc.mForceMono ? 1 : desc.mChannels,
-                                  desc.mBitrate,
-                                  desc.mFECEnabled);
-  (*aConfig)->mMaxPlaybackRate = desc.mMaxPlaybackRate;
-  (*aConfig)->mDtmfEnabled = desc.mDtmfEnabled;
-
-  return NS_OK;
-}
-
-static std::vector<JsepCodecDescription*>
-GetCodecs(const JsepTrackNegotiatedDetails& aDetails)
-{
-  // We do not try to handle cases where a codec is not used on the primary
-  // encoding.
-  if (aDetails.GetEncodingCount()) {
-    return aDetails.GetEncoding(0).GetCodecs();
-  }
-  return std::vector<JsepCodecDescription*>();
-}
-
-static nsresult
-NegotiatedDetailsToAudioCodecConfigs(const JsepTrackNegotiatedDetails& aDetails,
-                                     PtrVector<AudioCodecConfig>* aConfigs)
-{
-  std::vector<JsepCodecDescription*> codecs(GetCodecs(aDetails));
-  for (const JsepCodecDescription* codec : codecs) {
-    AudioCodecConfig* config;
-    if (NS_FAILED(JsepCodecDescToCodecConfig(*codec, &config))) {
-      return NS_ERROR_INVALID_ARG;
-    }
-    aConfigs->values.push_back(config);
-  }
-  return NS_OK;
-}
-
-static nsresult
-JsepCodecDescToCodecConfig(const JsepCodecDescription& aCodec,
-                           VideoCodecConfig** aConfig)
-{
-  MOZ_ASSERT(aCodec.mType == SdpMediaSection::kVideo);
-  if (aCodec.mType != SdpMediaSection::kVideo) {
-    MOZ_ASSERT(false, "JsepCodecDescription has wrong type");
-    return NS_ERROR_INVALID_ARG;
-  }
-
-  const JsepVideoCodecDescription& desc =
-      static_cast<const JsepVideoCodecDescription&>(aCodec);
-
-  uint16_t pt;
-
-  if (!desc.GetPtAsInt(&pt)) {
-    MOZ_MTLOG(ML_ERROR, "Invalid payload type: " << desc.mDefaultPt);
-    return NS_ERROR_INVALID_ARG;
-  }
-
-  UniquePtr<VideoCodecConfigH264> h264Config;
-
-  if (desc.mName == "H264") {
-    h264Config = MakeUnique<VideoCodecConfigH264>();
-    size_t spropSize = sizeof(h264Config->sprop_parameter_sets);
-    strncpy(h264Config->sprop_parameter_sets,
-            desc.mSpropParameterSets.c_str(),
-            spropSize);
-    h264Config->sprop_parameter_sets[spropSize - 1] = '\0';
-    h264Config->packetization_mode = desc.mPacketizationMode;
-    h264Config->profile_level_id = desc.mProfileLevelId;
-    h264Config->tias_bw = 0; // TODO. Issue 165.
-  }
-
-  VideoCodecConfig* configRaw;
-  configRaw = new VideoCodecConfig(
-      pt, desc.mName, desc.mConstraints, h264Config.get());
-
-  configRaw->mAckFbTypes = desc.mAckFbTypes;
-  configRaw->mNackFbTypes = desc.mNackFbTypes;
-  configRaw->mCcmFbTypes = desc.mCcmFbTypes;
-  configRaw->mRembFbSet = desc.RtcpFbRembIsSet();
-  configRaw->mFECFbSet = desc.mFECEnabled;
-  if (desc.mFECEnabled) {
-    configRaw->mREDPayloadType = desc.mREDPayloadType;
-    configRaw->mULPFECPayloadType = desc.mULPFECPayloadType;
-  }
-
-  *aConfig = configRaw;
-  return NS_OK;
-}
-
-static nsresult
-NegotiatedDetailsToVideoCodecConfigs(const JsepTrackNegotiatedDetails& aDetails,
-                                     PtrVector<VideoCodecConfig>* aConfigs)
-{
-  std::vector<JsepCodecDescription*> codecs(GetCodecs(aDetails));
-  for (const JsepCodecDescription* codec : codecs) {
-    VideoCodecConfig* config;
-    if (NS_FAILED(JsepCodecDescToCodecConfig(*codec, &config))) {
-      return NS_ERROR_INVALID_ARG;
-    }
-
-    config->mTias = aDetails.GetTias();
-
-    for (size_t i = 0; i < aDetails.GetEncodingCount(); ++i) {
-      const JsepTrackEncoding& jsepEncoding(aDetails.GetEncoding(i));
-      if (jsepEncoding.HasFormat(codec->mDefaultPt)) {
-        VideoCodecConfig::SimulcastEncoding encoding;
-        encoding.rid = jsepEncoding.mRid;
-        encoding.constraints = jsepEncoding.mConstraints;
-        config->mSimulcastEncodings.push_back(encoding);
-      }
-    }
-
-    aConfigs->values.push_back(config);
-  }
-
-  return NS_OK;
-}
-
-// Accessing the PCMedia should be safe here because we shouldn't
-// have enqueued this function unless it was still active and
-// the ICE data is destroyed on the STS.
-static void
-FinalizeTransportFlow_s(RefPtr<PeerConnectionMedia> aPCMedia,
-                        RefPtr<TransportFlow> aFlow, size_t aLevel,
-                        bool aIsRtcp,
-                        nsAutoPtr<PtrVector<TransportLayer> > aLayerList)
-{
-  TransportLayerIce* ice =
-      static_cast<TransportLayerIce*>(aLayerList->values.front());
-  ice->SetParameters(aPCMedia->ice_ctx(),
-                     aPCMedia->ice_media_stream(aLevel),
-                     aIsRtcp ? 2 : 1);
-  nsAutoPtr<std::queue<TransportLayer*> > layerQueue(
-      new std::queue<TransportLayer*>);
-  for (auto& value : aLayerList->values) {
-    layerQueue->push(value);
-  }
-  aLayerList->values.clear();
-  (void)aFlow->PushLayers(layerQueue); // TODO(bug 854518): Process errors.
-}
-
-static void
-AddNewIceStreamForRestart_s(RefPtr<PeerConnectionMedia> aPCMedia,
-                            RefPtr<TransportFlow> aFlow,
-                            size_t aLevel,
-                            bool aIsRtcp)
-{
-  TransportLayerIce* ice =
-      static_cast<TransportLayerIce*>(aFlow->GetLayer("ice"));
-  ice->SetParameters(aPCMedia->ice_ctx(),
-                     aPCMedia->ice_media_stream(aLevel),
-                     aIsRtcp ? 2 : 1);
-}
-
-nsresult
-MediaPipelineFactory::CreateOrGetTransportFlow(
-    size_t aLevel,
-    bool aIsRtcp,
-    const JsepTransport& aTransport,
-    RefPtr<TransportFlow>* aFlowOutparam)
-{
-  nsresult rv;
-  RefPtr<TransportFlow> flow;
-
-  flow = mPCMedia->GetTransportFlow(aLevel, aIsRtcp);
-  if (flow) {
-    if (mPCMedia->IsIceRestarting()) {
-      MOZ_MTLOG(ML_INFO, "Flow[" << flow->id() << "]: "
-                                 << "detected ICE restart - level: "
-                                 << aLevel << " rtcp: " << aIsRtcp);
-
-      rv = mPCMedia->GetSTSThread()->Dispatch(
-          WrapRunnableNM(AddNewIceStreamForRestart_s,
-                         mPCMedia, flow, aLevel, aIsRtcp),
-          NS_DISPATCH_NORMAL);
-      if (NS_FAILED(rv)) {
-        MOZ_MTLOG(ML_ERROR, "Failed to dispatch AddNewIceStreamForRestart_s");
-        return rv;
-      }
-    }
-
-    *aFlowOutparam = flow;
-    return NS_OK;
-  }
-
-  std::ostringstream osId;
-  osId << mPC->GetHandle() << ":" << aLevel << ","
-       << (aIsRtcp ? "rtcp" : "rtp");
-  flow = new TransportFlow(osId.str());
-
-  // The media streams are made on STS so we need to defer setup.
-  auto ice = MakeUnique<TransportLayerIce>(mPC->GetHandle());
-  auto dtls = MakeUnique<TransportLayerDtls>();
-  dtls->SetRole(aTransport.mDtls->GetRole() ==
-                        JsepDtlsTransport::kJsepDtlsClient
-                    ? TransportLayerDtls::CLIENT
-                    : TransportLayerDtls::SERVER);
-
-  RefPtr<DtlsIdentity> pcid = mPC->Identity();
-  if (!pcid) {
-    MOZ_MTLOG(ML_ERROR, "Failed to get DTLS identity.");
-    return NS_ERROR_FAILURE;
-  }
-  dtls->SetIdentity(pcid);
-
-  const SdpFingerprintAttributeList& fingerprints =
-      aTransport.mDtls->GetFingerprints();
-  for (const auto& fingerprint : fingerprints.mFingerprints) {
-    std::ostringstream ss;
-    ss << fingerprint.hashFunc;
-    rv = dtls->SetVerificationDigest(ss.str(), &fingerprint.fingerprint[0],
-                                     fingerprint.fingerprint.size());
-    if (NS_FAILED(rv)) {
-      MOZ_MTLOG(ML_ERROR, "Could not set fingerprint");
-      return rv;
-    }
-  }
-
-  std::vector<uint16_t> srtpCiphers;
-  srtpCiphers.push_back(SRTP_AES128_CM_HMAC_SHA1_80);
-  srtpCiphers.push_back(SRTP_AES128_CM_HMAC_SHA1_32);
-
-  rv = dtls->SetSrtpCiphers(srtpCiphers);
-  if (NS_FAILED(rv)) {
-    MOZ_MTLOG(ML_ERROR, "Couldn't set SRTP ciphers");
-    return rv;
-  }
-
-  // Always permits negotiation of the confidential mode.
-  // Only allow non-confidential (which is an allowed default),
-  // if we aren't confidential.
-  std::set<std::string> alpn;
-  std::string alpnDefault = "";
-  alpn.insert("c-webrtc");
-  if (!mPC->PrivacyRequested()) {
-    alpnDefault = "webrtc";
-    alpn.insert(alpnDefault);
-  }
-  rv = dtls->SetAlpn(alpn, alpnDefault);
-  if (NS_FAILED(rv)) {
-    MOZ_MTLOG(ML_ERROR, "Couldn't set ALPN");
-    return rv;
-  }
-
-  nsAutoPtr<PtrVector<TransportLayer> > layers(new PtrVector<TransportLayer>);
-  layers->values.push_back(ice.release());
-  layers->values.push_back(dtls.release());
-
-  rv = mPCMedia->GetSTSThread()->Dispatch(
-      WrapRunnableNM(FinalizeTransportFlow_s, mPCMedia, flow, aLevel, aIsRtcp,
-                     layers),
-      NS_DISPATCH_NORMAL);
-  if (NS_FAILED(rv)) {
-    MOZ_MTLOG(ML_ERROR, "Failed to dispatch FinalizeTransportFlow_s");
-    return rv;
-  }
-
-  mPCMedia->AddTransportFlow(aLevel, aIsRtcp, flow);
-
-  *aFlowOutparam = flow;
-
-  return NS_OK;
-}
-
-nsresult
-MediaPipelineFactory::GetTransportParameters(
-    const JsepTrackPair& aTrackPair,
-    const JsepTrack& aTrack,
-    size_t* aLevelOut,
-    RefPtr<TransportFlow>* aRtpOut,
-    RefPtr<TransportFlow>* aRtcpOut,
-    nsAutoPtr<MediaPipelineFilter>* aFilterOut)
-{
-  *aLevelOut = aTrackPair.mLevel;
-
-  size_t transportLevel = aTrackPair.HasBundleLevel() ?
-                          aTrackPair.BundleLevel() :
-                          aTrackPair.mLevel;
-
-  nsresult rv = CreateOrGetTransportFlow(
-      transportLevel, false, *aTrackPair.mRtpTransport, aRtpOut);
-  if (NS_FAILED(rv)) {
-    return rv;
-  }
-  MOZ_ASSERT(aRtpOut);
-
-  if (aTrackPair.mRtcpTransport) {
-    rv = CreateOrGetTransportFlow(
-        transportLevel, true, *aTrackPair.mRtcpTransport, aRtcpOut);
-    if (NS_FAILED(rv)) {
-      return rv;
-    }
-    MOZ_ASSERT(aRtcpOut);
-  }
-
-  if (aTrackPair.HasBundleLevel()) {
-    bool receiving = aTrack.GetDirection() == sdp::kRecv;
-
-    *aFilterOut = new MediaPipelineFilter;
-
-    if (receiving) {
-      // Add remote SSRCs so we can distinguish which RTP packets actually
-      // belong to this pipeline (also RTCP sender reports).
-      for (unsigned int ssrc : aTrack.GetSsrcs()) {
-        (*aFilterOut)->AddRemoteSSRC(ssrc);
-      }
-
-      // TODO(bug 1105005): Tell the filter about the mid for this track
-
-      // Add unique payload types as a last-ditch fallback
-      auto uniquePts = aTrack.GetNegotiatedDetails()->GetUniquePayloadTypes();
-      for (unsigned char& uniquePt : uniquePts) {
-        (*aFilterOut)->AddUniquePT(uniquePt);
-      }
-    }
-  }
-
-  return NS_OK;
-}
-
-nsresult
-MediaPipelineFactory::CreateOrUpdateMediaPipeline(
-    const JsepTrackPair& aTrackPair,
-    const JsepTrack& aTrack)
-{
-  // The GMP code is all the way on the other side of webrtc.org, and it is not
-  // feasible to plumb this information all the way through. So, we set it (for
-  // the duration of this call) in a global variable. This allows the GMP code
-  // to report errors to the PC.
-  WebrtcGmpPCHandleSetter setter(mPC->GetHandle());
-
-  MOZ_ASSERT(aTrackPair.mRtpTransport);
-
-  bool receiving = aTrack.GetDirection() == sdp::kRecv;
-
-  size_t level;
-  RefPtr<TransportFlow> rtpFlow;
-  RefPtr<TransportFlow> rtcpFlow;
-  nsAutoPtr<MediaPipelineFilter> filter;
-
-  nsresult rv = GetTransportParameters(aTrackPair,
-                                       aTrack,
-                                       &level,
-                                       &rtpFlow,
-                                       &rtcpFlow,
-                                       &filter);
-  if (NS_FAILED(rv)) {
-    MOZ_MTLOG(ML_ERROR, "Failed to get transport parameters for pipeline, rv="
-              << static_cast<unsigned>(rv));
-    return rv;
-  }
-
-  if (aTrack.GetMediaType() == SdpMediaSection::kApplication) {
-    // GetTransportParameters has already done everything we need for
-    // datachannel.
-    return NS_OK;
-  }
-
-  // Find the stream we need
-  SourceStreamInfo* stream;
-  if (receiving) {
-    stream = mPCMedia->GetRemoteStreamById(aTrack.GetStreamId());
-  } else {
-    stream = mPCMedia->GetLocalStreamById(aTrack.GetStreamId());
-  }
-
-  if (!stream) {
-    MOZ_MTLOG(ML_ERROR, "Negotiated " << (receiving ? "recv" : "send")
-              << " stream id " << aTrack.GetStreamId() << " was never added");
-    MOZ_ASSERT(false);
-    return NS_ERROR_FAILURE;
-  }
-
-  if (!stream->HasTrack(aTrack.GetTrackId())) {
-    MOZ_MTLOG(ML_ERROR, "Negotiated " << (receiving ? "recv" : "send")
-              << " track id " << aTrack.GetTrackId() << " was never added");
-    MOZ_ASSERT(false);
-    return NS_ERROR_FAILURE;
-  }
-
-  RefPtr<MediaSessionConduit> conduit;
-  if (aTrack.GetMediaType() == SdpMediaSection::kAudio) {
-    rv = GetOrCreateAudioConduit(aTrackPair, aTrack, &conduit);
-    if (NS_FAILED(rv)) {
-      return rv;
-    }
-  } else if (aTrack.GetMediaType() == SdpMediaSection::kVideo) {
-    rv = GetOrCreateVideoConduit(aTrackPair, aTrack, &conduit);
-    if (NS_FAILED(rv)) {
-      return rv;
-    }
-    conduit->SetPCHandle(mPC->GetHandle());
-  } else {
-    // We've created the TransportFlow, nothing else to do here.
-    return NS_OK;
-  }
-
-  if (aTrack.GetActive()) {
-    if (receiving) {
-      auto error = conduit->StartReceiving();
-      if (error) {
-        MOZ_MTLOG(ML_ERROR, "StartReceiving failed: " << error);
-        return NS_ERROR_FAILURE;
-      }
-    } else {
-      auto error = conduit->StartTransmitting();
-      if (error) {
-        MOZ_MTLOG(ML_ERROR, "StartTransmitting failed: " << error);
-        return NS_ERROR_FAILURE;
-      }
-    }
-  } else {
-    if (receiving) {
-      auto error = conduit->StopReceiving();
-      if (error) {
-        MOZ_MTLOG(ML_ERROR, "StopReceiving failed: " << error);
-        return NS_ERROR_FAILURE;
-      }
-    } else {
-      auto error = conduit->StopTransmitting();
-      if (error) {
-        MOZ_MTLOG(ML_ERROR, "StopTransmitting failed: " << error);
-        return NS_ERROR_FAILURE;
-      }
-    }
-  }
-
-  RefPtr<MediaPipeline> pipeline =
-    stream->GetPipelineByTrackId_m(aTrack.GetTrackId());
-
-  if (pipeline && pipeline->level() != static_cast<int>(level)) {
-    MOZ_MTLOG(ML_WARNING, "Track " << aTrack.GetTrackId() <<
-                          " has moved from level " << pipeline->level() <<
-                          " to level " << level <<
-                          ". This requires re-creating the MediaPipeline.");
-    RefPtr<dom::MediaStreamTrack> domTrack =
-      stream->GetTrackById(aTrack.GetTrackId());
-    MOZ_ASSERT(domTrack, "MediaPipeline existed for a track, but no MediaStreamTrack");
-
-    // Since we do not support changing the conduit on a pre-existing
-    // MediaPipeline
-    pipeline = nullptr;
-    stream->RemoveTrack(aTrack.GetTrackId());
-    stream->AddTrack(aTrack.GetTrackId(), domTrack);
-  }
-
-  if (pipeline) {
-    pipeline->UpdateTransport_m(level, rtpFlow, rtcpFlow, filter);
-    return NS_OK;
-  }
-
-  MOZ_MTLOG(ML_DEBUG,
-            "Creating media pipeline"
-                << " m-line index=" << aTrackPair.mLevel
-                << " type=" << aTrack.GetMediaType()
-                << " direction=" << aTrack.GetDirection());
-
-  if (receiving) {
-    rv = CreateMediaPipelineReceiving(aTrackPair, aTrack,
-                                      level, rtpFlow, rtcpFlow, filter,
-                                      conduit);
-    if (NS_FAILED(rv))
-      return rv;
-  } else {
-    rv = CreateMediaPipelineSending(aTrackPair, aTrack,
-                                    level, rtpFlow, rtcpFlow, filter,
-                                    conduit);
-    if (NS_FAILED(rv))
-      return rv;
-  }
-
-  return NS_OK;
-}
-
-nsresult
-MediaPipelineFactory::CreateMediaPipelineReceiving(
-    const JsepTrackPair& aTrackPair,
-    const JsepTrack& aTrack,
-    size_t aLevel,
-    RefPtr<TransportFlow> aRtpFlow,
-    RefPtr<TransportFlow> aRtcpFlow,
-    nsAutoPtr<MediaPipelineFilter> aFilter,
-    const RefPtr<MediaSessionConduit>& aConduit)
-{
-  // We will error out earlier if this isn't here.
-  RefPtr<RemoteSourceStreamInfo> stream =
-      mPCMedia->GetRemoteStreamById(aTrack.GetStreamId());
-
-  RefPtr<MediaPipelineReceive> pipeline;
-
-  TrackID numericTrackId = stream->GetNumericTrackId(aTrack.GetTrackId());
-  MOZ_ASSERT(IsTrackIDExplicit(numericTrackId));
-
-  MOZ_MTLOG(ML_DEBUG, __FUNCTION__ << ": Creating pipeline for "
-            << numericTrackId << " -> " << aTrack.GetTrackId());
-
-  if (aTrack.GetMediaType() == SdpMediaSection::kAudio) {
-    pipeline = new MediaPipelineReceiveAudio(
-        mPC->GetHandle(),
-        mPC->GetMainThread().get(),
-        mPC->GetSTSThread(),
-        stream->GetMediaStream()->GetInputStream()->AsSourceStream(),
-        aTrack.GetTrackId(),
-        numericTrackId,
-        aLevel,
-        static_cast<AudioSessionConduit*>(aConduit.get()), // Ugly downcast.
-        aRtpFlow,
-        aRtcpFlow,
-        aFilter);
-  } else if (aTrack.GetMediaType() == SdpMediaSection::kVideo) {
-    pipeline = new MediaPipelineReceiveVideo(
-        mPC->GetHandle(),
-        mPC->GetMainThread().get(),
-        mPC->GetSTSThread(),
-        stream->GetMediaStream()->GetInputStream()->AsSourceStream(),
-        aTrack.GetTrackId(),
-        numericTrackId,
-        aLevel,
-        static_cast<VideoSessionConduit*>(aConduit.get()), // Ugly downcast.
-        aRtpFlow,
-        aRtcpFlow,
-        aFilter);
-  } else {
-    MOZ_ASSERT(false);
-    MOZ_MTLOG(ML_ERROR, "Invalid media type in CreateMediaPipelineReceiving");
-    return NS_ERROR_FAILURE;
-  }
-
-  nsresult rv = pipeline->Init();
-  if (NS_FAILED(rv)) {
-    MOZ_MTLOG(ML_ERROR, "Couldn't initialize receiving pipeline");
-    return rv;
-  }
-
-  rv = stream->StorePipeline(aTrack.GetTrackId(),
-                             RefPtr<MediaPipeline>(pipeline));
-  if (NS_FAILED(rv)) {
-    MOZ_MTLOG(ML_ERROR, "Couldn't store receiving pipeline " <<
-                        static_cast<unsigned>(rv));
-    return rv;
-  }
-
-  stream->SyncPipeline(pipeline);
-
-  return NS_OK;
-}
-
-nsresult
-MediaPipelineFactory::CreateMediaPipelineSending(
-    const JsepTrackPair& aTrackPair,
-    const JsepTrack& aTrack,
-    size_t aLevel,
-    RefPtr<TransportFlow> aRtpFlow,
-    RefPtr<TransportFlow> aRtcpFlow,
-    nsAutoPtr<MediaPipelineFilter> aFilter,
-    const RefPtr<MediaSessionConduit>& aConduit)
-{
-  nsresult rv;
-
-  // This is checked earlier
-  RefPtr<LocalSourceStreamInfo> stream =
-      mPCMedia->GetLocalStreamById(aTrack.GetStreamId());
-
-  dom::MediaStreamTrack* track =
-    stream->GetTrackById(aTrack.GetTrackId());
-  MOZ_ASSERT(track);
-
-  // Now we have all the pieces, create the pipeline
-  RefPtr<MediaPipelineTransmit> pipeline = new MediaPipelineTransmit(
-      mPC->GetHandle(),
-      mPC->GetMainThread().get(),
-      mPC->GetSTSThread(),
-      track,
-      aTrack.GetTrackId(),
-      aLevel,
-      aConduit,
-      aRtpFlow,
-      aRtcpFlow,
-      aFilter);
-
-  // implement checking for peerIdentity (where failure == black/silence)
-  nsIDocument* doc = mPC->GetWindow()->GetExtantDoc();
-  if (doc) {
-    pipeline->UpdateSinkIdentity_m(track,
-                                   doc->NodePrincipal(),
-                                   mPC->GetPeerIdentity());
-  } else {
-    MOZ_MTLOG(ML_ERROR, "Cannot initialize pipeline without attached doc");
-    return NS_ERROR_FAILURE; // Don't remove this till we know it's safe.
-  }
-
-  rv = pipeline->Init();
-  if (NS_FAILED(rv)) {
-    MOZ_MTLOG(ML_ERROR, "Couldn't initialize sending pipeline");
-    return rv;
-  }
-
-  rv = stream->StorePipeline(aTrack.GetTrackId(),
-                             RefPtr<MediaPipeline>(pipeline));
-  if (NS_FAILED(rv)) {
-    MOZ_MTLOG(ML_ERROR, "Couldn't store receiving pipeline " <<
-                        static_cast<unsigned>(rv));
-    return rv;
-  }
-
-  return NS_OK;
-}
-
-nsresult
-MediaPipelineFactory::GetOrCreateAudioConduit(
-    const JsepTrackPair& aTrackPair,
-    const JsepTrack& aTrack,
-    RefPtr<MediaSessionConduit>* aConduitp)
-{
-
-  if (!aTrack.GetNegotiatedDetails()) {
-    MOZ_ASSERT(false, "Track is missing negotiated details");
-    return NS_ERROR_INVALID_ARG;
-  }
-
-  bool receiving = aTrack.GetDirection() == sdp::kRecv;
-
-  RefPtr<AudioSessionConduit> conduit =
-    mPCMedia->GetAudioConduit(aTrackPair.mLevel);
-
-  if (!conduit) {
-    conduit = AudioSessionConduit::Create();
-    if (!conduit) {
-      MOZ_MTLOG(ML_ERROR, "Could not create audio conduit");
-      return NS_ERROR_FAILURE;
-    }
-
-    mPCMedia->AddAudioConduit(aTrackPair.mLevel, conduit);
-  }
-
-  PtrVector<AudioCodecConfig> configs;
-  nsresult rv = NegotiatedDetailsToAudioCodecConfigs(
-      *aTrack.GetNegotiatedDetails(), &configs);
-
-  if (NS_FAILED(rv)) {
-    MOZ_MTLOG(ML_ERROR, "Failed to convert JsepCodecDescriptions to "
-                        "AudioCodecConfigs.");
-    return rv;
-  }
-
-  if (configs.values.empty()) {
-    MOZ_MTLOG(ML_ERROR, "Can't set up a conduit with 0 codecs");
-    return NS_ERROR_FAILURE;
-  }
-
-  if (receiving) {
-    auto error = conduit->ConfigureRecvMediaCodecs(configs.values);
-
-    if (error) {
-      MOZ_MTLOG(ML_ERROR, "ConfigureRecvMediaCodecs failed: " << error);
-      return NS_ERROR_FAILURE;
-    }
-
-    if (!aTrackPair.mSending) {
-      // No send track, but we still need to configure an SSRC for receiver
-      // reports.
-      if (!conduit->SetLocalSSRCs(std::vector<unsigned int>(1,aTrackPair.mRecvonlySsrc))) {
-        MOZ_MTLOG(ML_ERROR, "SetLocalSSRC failed");
-        return NS_ERROR_FAILURE;
-      }
-    }
-  } else {
-    auto ssrcs = aTrack.GetSsrcs();
-    if (!ssrcs.empty()) {
-      if (!conduit->SetLocalSSRCs(ssrcs)) {
-        MOZ_MTLOG(ML_ERROR, "SetLocalSSRCs failed");
-        return NS_ERROR_FAILURE;
-      }
-    }
-
-    conduit->SetLocalCNAME(aTrack.GetCNAME().c_str());
-    conduit->SetLocalMID(aTrackPair.mRtpTransport->mTransportId);
-
-    for (auto value: configs.values) {
-      if (value->mName == "telephone-event") {
-        // we have a telephone event codec, so we need to make sure
-        // the dynamic pt is set properly
-        conduit->SetDtmfPayloadType(value->mType, value->mFreq);
-        break;
-      }
-    }
-
-    auto error = conduit->ConfigureSendMediaCodec(configs.values[0]);
-    if (error) {
-      MOZ_MTLOG(ML_ERROR, "ConfigureSendMediaCodec failed: " << error);
-      return NS_ERROR_FAILURE;
-    }
-
-    // Should these be genericized like they are in the video conduit case?
-    const SdpExtmapAttributeList::Extmap* audioLevelExt =
-        aTrack.GetNegotiatedDetails()->GetExt(
-            webrtc::RtpExtension::kAudioLevelUri);
-
-    if (audioLevelExt) {
-      MOZ_MTLOG(ML_DEBUG, "Calling EnableAudioLevelExtension");
-      error = conduit->EnableAudioLevelExtension(true, audioLevelExt->entry);
-
-      if (error) {
-        MOZ_MTLOG(ML_ERROR, "EnableAudioLevelExtension failed: " << error);
-        return NS_ERROR_FAILURE;
-      }
-    }
-
-    const SdpExtmapAttributeList::Extmap* midExt =
-        aTrack.GetNegotiatedDetails()->GetExt(webrtc::RtpExtension::kMIdUri);
-
-    if (midExt) {
-      MOZ_MTLOG(ML_DEBUG, "Calling EnableMIDExtension");
-      error = conduit->EnableMIDExtension(true, midExt->entry);
-
-      if (error) {
-        MOZ_MTLOG(ML_ERROR, "EnableMIDExtension failed: " << error);
-        return NS_ERROR_FAILURE;
-      }
-    }
-  }
-
-  *aConduitp = conduit;
-
-  return NS_OK;
-}
-
-nsresult
-MediaPipelineFactory::GetOrCreateVideoConduit(
-    const JsepTrackPair& aTrackPair,
-    const JsepTrack& aTrack,
-    RefPtr<MediaSessionConduit>* aConduitp)
-{
-  if (!aTrack.GetNegotiatedDetails()) {
-    MOZ_ASSERT(false, "Track is missing negotiated details");
-    return NS_ERROR_INVALID_ARG;
-  }
-
-  bool receiving = aTrack.GetDirection() == sdp::kRecv;
-
-  RefPtr<VideoSessionConduit> conduit =
-    mPCMedia->GetVideoConduit(aTrackPair.mLevel);
-
-  if (!conduit) {
-    conduit = VideoSessionConduit::Create(mPCMedia->mCall);
-    if (!conduit) {
-      MOZ_MTLOG(ML_ERROR, "Could not create video conduit");
-      return NS_ERROR_FAILURE;
-    }
-
-    mPCMedia->AddVideoConduit(aTrackPair.mLevel, conduit);
-  }
-
-  PtrVector<VideoCodecConfig> configs;
-  nsresult rv = NegotiatedDetailsToVideoCodecConfigs(
-      *aTrack.GetNegotiatedDetails(), &configs);
-
-  if (NS_FAILED(rv)) {
-    MOZ_MTLOG(ML_ERROR, "Failed to convert JsepCodecDescriptions to "
-                        "VideoCodecConfigs.");
-    return rv;
-  }
-
-  if (configs.values.empty()) {
-    MOZ_MTLOG(ML_ERROR, "Can't set up a conduit with 0 codecs");
-    return NS_ERROR_FAILURE;
-  }
-
-  const std::vector<uint32_t>* ssrcs;
-
-  const JsepTrackNegotiatedDetails* details = aTrack.GetNegotiatedDetails();
-  std::vector<webrtc::RtpExtension> extmaps;
-  if (details) {
-    // @@NG read extmap from track
-    details->ForEachRTPHeaderExtension(
-      [&extmaps](const SdpExtmapAttributeList::Extmap& extmap)
-    {
-      extmaps.emplace_back(extmap.extensionname,extmap.entry);
-    });
-  }
-
-  if (receiving) {
-    // NOTE(pkerr) - the Call API requires the both local_ssrc and remote_ssrc be
-    // set to a non-zero value or the CreateVideo...Stream call will fail.
-    if (aTrackPair.mSending) {
-      ssrcs = &aTrackPair.mSending->GetSsrcs();
-      if (!ssrcs->empty()) {
-        conduit->SetLocalSSRCs(*ssrcs);
-      }
-    } else {
-      // No send track, but we still need to configure an SSRC for receiver
-      // reports.
-      if (!conduit->SetLocalSSRCs(std::vector<unsigned int>(1,aTrackPair.mRecvonlySsrc))) {
-        MOZ_MTLOG(ML_ERROR, "SetLocalSSRCs failed");
-        return NS_ERROR_FAILURE;
-      }
-    }
-
-    ssrcs = &aTrack.GetSsrcs();
-    // NOTE(pkerr) - this is new behavior. Needed because the CreateVideoReceiveStream
-    // method of the Call API will assert (in debug) and fail if a value is not provided
-    // for the remote_ssrc that will be used by the far-end sender.
-    if (!ssrcs->empty()) {
-      conduit->SetRemoteSSRC(ssrcs->front());
-    }
-
-    if (!extmaps.empty()) {
-      conduit->SetLocalRTPExtensions(false, extmaps);
-    }
-    auto error = conduit->ConfigureRecvMediaCodecs(configs.values);
-    if (error) {
-      MOZ_MTLOG(ML_ERROR, "ConfigureRecvMediaCodecs failed: " << error);
-      return NS_ERROR_FAILURE;
-    }
-  } else { //Create a send side
-    // For now we only expect to have one ssrc per local track.
-    ssrcs = &aTrack.GetSsrcs();
-    if (ssrcs->empty()) {
-      MOZ_MTLOG(ML_ERROR, "No SSRC set for send track");
-      return NS_ERROR_FAILURE;
-    }
-
-    if (!conduit->SetLocalSSRCs(*ssrcs)) {
-      MOZ_MTLOG(ML_ERROR, "SetLocalSSRC failed");
-      return NS_ERROR_FAILURE;
-    }
-
-    conduit->SetLocalCNAME(aTrack.GetCNAME().c_str());
-    conduit->SetLocalMID(aTrackPair.mRtpTransport->mTransportId);
-
-    rv = ConfigureVideoCodecMode(aTrack, *conduit);
-    if (NS_FAILED(rv)) {
-      return rv;
-    }
-
-    if (!extmaps.empty()) {
-      conduit->SetLocalRTPExtensions(true, extmaps);
-    }
-
-    auto error = conduit->ConfigureSendMediaCodec(configs.values[0]);
-    if (error) {
-      MOZ_MTLOG(ML_ERROR, "ConfigureSendMediaCodec failed: " << error);
-      return NS_ERROR_FAILURE;
-    }
-  }
-
-  *aConduitp = conduit;
-
-  return NS_OK;
-}
-
-nsresult
-MediaPipelineFactory::ConfigureVideoCodecMode(const JsepTrack& aTrack,
-                                              VideoSessionConduit& aConduit)
-{
-  RefPtr<LocalSourceStreamInfo> stream =
-    mPCMedia->GetLocalStreamByTrackId(aTrack.GetTrackId());
-
-  //get video track
-  RefPtr<mozilla::dom::MediaStreamTrack> track =
-    stream->GetTrackById(aTrack.GetTrackId());
-
-  RefPtr<mozilla::dom::VideoStreamTrack> videotrack =
-    track->AsVideoStreamTrack();
-
-  if (!videotrack) {
-    MOZ_MTLOG(ML_ERROR, "video track not available");
-    return NS_ERROR_FAILURE;
-  }
-
-  dom::MediaSourceEnum source = videotrack->GetSource().GetMediaSource();
-  webrtc::VideoCodecMode mode = webrtc::kRealtimeVideo;
-  switch (source) {
-    case dom::MediaSourceEnum::Browser:
-    case dom::MediaSourceEnum::Screen:
-    case dom::MediaSourceEnum::Application:
-    case dom::MediaSourceEnum::Window:
-      mode = webrtc::kScreensharing;
-      break;
-
-    case dom::MediaSourceEnum::Camera:
-    default:
-      mode = webrtc::kRealtimeVideo;
-      break;
-  }
-
-  auto error = aConduit.ConfigureCodecMode(mode);
-  if (error) {
-    MOZ_MTLOG(ML_ERROR, "ConfigureCodecMode failed: " << error);
-    return NS_ERROR_FAILURE;
-  }
-
-  return NS_OK;
-}
-
-
-} // namespace mozilla
deleted file mode 100644
--- a/media/webrtc/signaling/src/peerconnection/MediaPipelineFactory.h
+++ /dev/null
@@ -1,78 +0,0 @@
-/* This Source Code Form is subject to the terms of the Mozilla Public
- * License, v. 2.0. If a copy of the MPL was not distributed with this file,
- * You can obtain one at http://mozilla.org/MPL/2.0/. */
-#ifndef _MEDIAPIPELINEFACTORY_H_
-#define _MEDIAPIPELINEFACTORY_H_
-
-#include "MediaConduitInterface.h"
-#include "PeerConnectionMedia.h"
-#include "transportflow.h"
-
-#include "signaling/src/jsep/JsepTrack.h"
-#include "mozilla/RefPtr.h"
-#include "mozilla/UniquePtr.h"
-
-namespace mozilla {
-
-class MediaPipelineFactory
-{
-public:
-  explicit MediaPipelineFactory(PeerConnectionMedia* aPCMedia)
-      : mPCMedia(aPCMedia), mPC(aPCMedia->GetPC())
-  {
-  }
-
-  nsresult CreateOrUpdateMediaPipeline(const JsepTrackPair& aTrackPair,
-                                       const JsepTrack& aTrack);
-
-private:
-  nsresult CreateMediaPipelineReceiving(
-      const JsepTrackPair& aTrackPair,
-      const JsepTrack& aTrack,
-      size_t level,
-      RefPtr<TransportFlow> aRtpFlow,
-      RefPtr<TransportFlow> aRtcpFlow,
-      nsAutoPtr<MediaPipelineFilter> filter,
-      const RefPtr<MediaSessionConduit>& aConduit);
-
-  nsresult CreateMediaPipelineSending(
-      const JsepTrackPair& aTrackPair,
-      const JsepTrack& aTrack,
-      size_t level,
-      RefPtr<TransportFlow> aRtpFlow,
-      RefPtr<TransportFlow> aRtcpFlow,
-      nsAutoPtr<MediaPipelineFilter> filter,
-      const RefPtr<MediaSessionConduit>& aConduit);
-
-  nsresult GetOrCreateAudioConduit(const JsepTrackPair& aTrackPair,
-                                   const JsepTrack& aTrack,
-                                   RefPtr<MediaSessionConduit>* aConduitp);
-
-  nsresult GetOrCreateVideoConduit(const JsepTrackPair& aTrackPair,
-                                   const JsepTrack& aTrack,
-                                   RefPtr<MediaSessionConduit>* aConduitp);
-
-  nsresult CreateOrGetTransportFlow(size_t aLevel, bool aIsRtcp,
-                                    const JsepTransport& transport,
-                                    RefPtr<TransportFlow>* out);
-
-  nsresult GetTransportParameters(const JsepTrackPair& aTrackPair,
-                                  const JsepTrack& aTrack,
-                                  size_t* aLevelOut,
-                                  RefPtr<TransportFlow>* aRtpOut,
-                                  RefPtr<TransportFlow>* aRtcpOut,
-                                  nsAutoPtr<MediaPipelineFilter>* aFilterOut);
-
-  nsresult ConfigureVideoCodecMode(const JsepTrack& aTrack,
-                                   VideoSessionConduit& aConduit);
-
-private:
-  // Not owned, and assumed to exist as long as the factory.
-  // The factory is a transient object, so this is fairly easy.
-  PeerConnectionMedia* mPCMedia;
-  PeerConnectionImpl* mPC;
-};
-
-} // namespace mozilla
-
-#endif
--- a/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp
+++ b/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.cpp
@@ -28,31 +28,35 @@
 #include "nsISocketTransportService.h"
 #include "nsIConsoleService.h"
 #include "nsThreadUtils.h"
 #include "nsIPrefService.h"
 #include "nsIPrefBranch.h"
 #include "nsProxyRelease.h"
 #include "nsQueryObject.h"
 #include "prtime.h"
+#include "MediaEngine.h"
 
 #include "AudioConduit.h"
 #include "VideoConduit.h"
 #include "runnable_utils.h"
 #include "PeerConnectionCtx.h"
 #include "PeerConnectionImpl.h"
 #include "PeerConnectionMedia.h"
+#include "RemoteTrackSource.h"
 #include "nsDOMDataChannelDeclarations.h"
 #include "dtlsidentity.h"
 #include "signaling/src/sdp/SdpAttribute.h"
 
 #include "signaling/src/jsep/JsepTrack.h"
 #include "signaling/src/jsep/JsepSession.h"
 #include "signaling/src/jsep/JsepSessionImpl.h"
 
+#include "signaling/src/mediapipeline/MediaPipeline.h"
+
 #include "mozilla/IntegerPrintfMacros.h"
 #include "mozilla/Sprintf.h"
 
 #ifdef XP_WIN
 // We need to undef the MS macro for nsIDocument::CreateEvent
 #ifdef CreateEvent
 #undef CreateEvent
 #endif
@@ -77,24 +81,24 @@
 #include "nsIURLParser.h"
 #include "nsIDOMDataChannel.h"
 #include "NullPrincipal.h"
 #include "mozilla/PeerIdentity.h"
 #include "mozilla/dom/RTCCertificate.h"
 #include "mozilla/dom/RTCConfigurationBinding.h"
 #include "mozilla/dom/RTCDTMFSenderBinding.h"
 #include "mozilla/dom/RTCDTMFToneChangeEvent.h"
+#include "mozilla/dom/RTCRtpReceiverBinding.h"
 #include "mozilla/dom/RTCRtpSenderBinding.h"
 #include "mozilla/dom/RTCStatsReportBinding.h"
 #include "mozilla/dom/RTCPeerConnectionBinding.h"
 #include "mozilla/dom/PeerConnectionImplBinding.h"
 #include "mozilla/dom/DataChannelBinding.h"
 #include "mozilla/dom/PerformanceTiming.h"
 #include "mozilla/dom/PluginCrashedEvent.h"
-#include "MediaStreamList.h"
 #include "MediaStreamTrack.h"
 #include "AudioStreamTrack.h"
 #include "VideoStreamTrack.h"
 #include "nsIScriptGlobalObject.h"
 #include "MediaStreamGraph.h"
 #include "DOMMediaStream.h"
 #include "rlogconnector.h"
 #include "WebrtcGlobalInformation.h"
@@ -241,16 +245,35 @@ RTCStatsQuery::RTCStatsQuery(bool intern
 
 RTCStatsQuery::~RTCStatsQuery() {
   MOZ_ASSERT(NS_IsMainThread());
 }
 
 
 NS_IMPL_ISUPPORTS0(PeerConnectionImpl)
 
+already_AddRefed<PeerConnectionImpl>
+PeerConnectionImpl::Constructor(const dom::GlobalObject& aGlobal, ErrorResult& rv)
+{
+  RefPtr<PeerConnectionImpl> pc = new PeerConnectionImpl(&aGlobal);
+
+  CSFLogDebug(LOGTAG, "Created PeerConnection: %p", pc.get());
+
+  return pc.forget();
+}
+
+PeerConnectionImpl* PeerConnectionImpl::CreatePeerConnection()
+{
+  PeerConnectionImpl *pc = new PeerConnectionImpl();
+
+  CSFLogDebug(LOGTAG, "Created PeerConnection: %p", pc);
+
+  return pc;
+}
+
 bool
 PeerConnectionImpl::WrapObject(JSContext* aCx,
                                JS::Handle<JSObject*> aGivenProto,
                                JS::MutableHandle<JSObject*> aReflector)
 {
   return PeerConnectionImplBinding::Wrap(aCx, this, aGivenProto, aReflector);
 }
 
@@ -311,17 +334,16 @@ PeerConnectionImpl::PeerConnectionImpl(c
   , mMedia(nullptr)
   , mUuidGen(MakeUnique<PCUuidGenerator>())
   , mIceRestartCount(0)
   , mIceRollbackCount(0)
   , mHaveConfiguredCodecs(false)
   , mHaveDataStream(false)
   , mAddCandidateErrorCount(0)
   , mTrickle(true) // TODO(ekr@rtfm.com): Use pref
-  , mNegotiationNeeded(false)
   , mPrivateWindow(false)
   , mActiveOnWindow(false)
   , mPacketDumpEnabled(false)
   , mPacketDumpFlagsMutex("Packet dump flags mutex")
 {
   MOZ_ASSERT(NS_IsMainThread());
   auto log = RLogConnector::CreateInstance();
   if (aGlobal) {
@@ -386,48 +408,28 @@ PeerConnectionImpl::~PeerConnectionImpl(
   // running at once
 
   // Right now, we delete PeerConnectionCtx at XPCOM shutdown only, but we
   // probably want to shut it down more aggressively to save memory.  We
   // could shut down here when there are no uses.  It might be more optimal
   // to release off a timer (and XPCOM Shutdown) to avoid churn
 }
 
-already_AddRefed<DOMMediaStream>
+OwningNonNull<DOMMediaStream>
 PeerConnectionImpl::MakeMediaStream()
 {
   MediaStreamGraph* graph =
     MediaStreamGraph::GetInstance(MediaStreamGraph::AUDIO_THREAD_DRIVER, GetWindow());
 
   RefPtr<DOMMediaStream> stream =
     DOMMediaStream::CreateSourceStreamAsInput(GetWindow(), graph);
 
   CSFLogDebug(LOGTAG, "Created media stream %p, inner: %p", stream.get(), stream->GetInputStream());
 
-  return stream.forget();
-}
-
-nsresult
-PeerConnectionImpl::CreateRemoteSourceStreamInfo(RefPtr<RemoteSourceStreamInfo>*
-                                                 aInfo,
-                                                 const std::string& aStreamID)
-{
-  MOZ_ASSERT(aInfo);
-  PC_AUTO_ENTER_API_CALL_NO_CHECK();
-
-  RefPtr<DOMMediaStream> stream = MakeMediaStream();
-  if (!stream) {
-    return NS_ERROR_FAILURE;
-  }
-
-  RefPtr<RemoteSourceStreamInfo> remote;
-  remote = new RemoteSourceStreamInfo(stream.forget(), mMedia, aStreamID);
-  *aInfo = remote;
-
-  return NS_OK;
+  return *stream;
 }
 
 /**
  * In JS, an RTCConfiguration looks like this:
  *
  * { "iceServers": [ { url:"stun:stun.example.org" },
  *                   { url:"turn:turn.example.org?transport=udp",
  *                     username: "jib", credential:"mypass"} ] }
@@ -1106,31 +1108,24 @@ nsresult
 PeerConnectionImpl::GetDatachannelParameters(
     uint32_t* channels,
     uint16_t* localport,
     uint16_t* remoteport,
     uint32_t* remotemaxmessagesize,
     bool*     mmsset,
     uint16_t* level) const {
 
-  auto trackPairs = mJsepSession->GetNegotiatedTrackPairs();
-  for (auto& trackPair : trackPairs) {
-    bool sendDataChannel =
-      trackPair.mSending &&
-      trackPair.mSending->GetMediaType() == SdpMediaSection::kApplication;
-    bool recvDataChannel =
-      trackPair.mReceiving &&
-      trackPair.mReceiving->GetMediaType() == SdpMediaSection::kApplication;
-    (void)recvDataChannel;
-    MOZ_ASSERT(sendDataChannel == recvDataChannel);
-
-    if (sendDataChannel) {
+  for (const auto& transceiver : mJsepSession->GetTransceivers()) {
+    bool dataChannel =
+      transceiver->GetMediaType() == SdpMediaSection::kApplication;
+
+    if (dataChannel && transceiver->mSendTrack.GetNegotiatedDetails()) {
       // This will release assert if there is no such index, and that's ok
       const JsepTrackEncoding& encoding =
-        trackPair.mSending->GetNegotiatedDetails()->GetEncoding(0);
+        transceiver->mSendTrack.GetNegotiatedDetails()->GetEncoding(0);
 
       if (encoding.GetCodecs().empty()) {
         CSFLogError(LOGTAG, "%s: Negotiated m=application with no codec. "
                             "This is likely to be broken.",
                             __FUNCTION__);
         return NS_ERROR_FAILURE;
       }
 
@@ -1160,81 +1155,134 @@ PeerConnectionImpl::GetDatachannelParame
         *localport =
           static_cast<const JsepApplicationCodecDescription*>(codec)->mLocalPort;
         *remoteport =
           static_cast<const JsepApplicationCodecDescription*>(codec)->mRemotePort;
         *remotemaxmessagesize = static_cast<const JsepApplicationCodecDescription*>
           (codec)->mRemoteMaxMessageSize;
         *mmsset = static_cast<const JsepApplicationCodecDescription*>
           (codec)->mRemoteMMSSet;
-        if (trackPair.HasBundleLevel()) {
-          *level = static_cast<uint16_t>(trackPair.BundleLevel());
+        if (transceiver->HasBundleLevel()) {
+          *level = static_cast<uint16_t>(transceiver->BundleLevel());
         } else {
-          *level = static_cast<uint16_t>(trackPair.mLevel);
+          *level = static_cast<uint16_t>(transceiver->GetLevel());
         }
         return NS_OK;
       }
     }
   }
 
   *channels = 0;
   *localport = 0;
   *remoteport = 0;
   *remotemaxmessagesize = 0;
   *mmsset = false;
   *level = 0;
   return NS_ERROR_FAILURE;
 }
 
-/* static */
-void
-PeerConnectionImpl::DeferredAddTrackToJsepSession(
-    const std::string& pcHandle,
-    SdpMediaSection::MediaType type,
-    const std::string& streamId,
-    const std::string& trackId)
-{
-  PeerConnectionWrapper wrapper(pcHandle);
-
-  if (wrapper.impl()) {
-    if (!PeerConnectionCtx::GetInstance()->isReady()) {
-      MOZ_CRASH("Why is DeferredAddTrackToJsepSession being executed when the "
-                "PeerConnectionCtx isn't ready?");
-    }
-    wrapper.impl()->AddTrackToJsepSession(type, streamId, trackId);
-  }
-}
-
 nsresult
-PeerConnectionImpl::AddTrackToJsepSession(SdpMediaSection::MediaType type,
-                                          const std::string& streamId,
-                                          const std::string& trackId)
+PeerConnectionImpl::AddRtpTransceiverToJsepSession(
+    RefPtr<JsepTransceiver>& transceiver)
 {
   nsresult res = ConfigureJsepSessionCodecs();
   if (NS_FAILED(res)) {
     CSFLogError(LOGTAG, "Failed to configure codecs");
     return res;
   }
 
-  res = mJsepSession->AddTrack(
-      new JsepTrack(type, streamId, trackId, sdp::kSend));
+  res = mJsepSession->AddTransceiver(transceiver);
 
   if (NS_FAILED(res)) {
     std::string errorString = mJsepSession->GetLastError();
     CSFLogError(LOGTAG, "%s (%s) : pc = %s, error = %s",
                 __FUNCTION__,
-                type == SdpMediaSection::kAudio ? "audio" : "video",
+                transceiver->GetMediaType() == SdpMediaSection::kAudio ?
+                  "audio" : "video",
                 mHandle.c_str(),
                 errorString.c_str());
     return NS_ERROR_FAILURE;
   }
 
   return NS_OK;
 }
 
+already_AddRefed<TransceiverImpl>
+PeerConnectionImpl::CreateTransceiverImpl(
+    JsepTransceiver* aJsepTransceiver,
+    dom::MediaStreamTrack* aSendTrack,
+    ErrorResult& aRv)
+{
+  // TODO: Maybe this should be done in PeerConnectionMedia?
+  if (aSendTrack) {
+    aSendTrack->AddPrincipalChangeObserver(this);
+  }
+
+  OwningNonNull<DOMMediaStream> receiveStream =
+    CreateReceiveStreamWithTrack(aJsepTransceiver->GetMediaType());
+
+  RefPtr<TransceiverImpl> transceiverImpl;
+
+  aRv = mMedia->AddTransceiver(aJsepTransceiver,
+                               *receiveStream,
+                               aSendTrack,
+                               &transceiverImpl);
+
+  return transceiverImpl.forget();
+}
+
+already_AddRefed<TransceiverImpl>
+PeerConnectionImpl::CreateTransceiverImpl(
+    const nsAString& aKind,
+    dom::MediaStreamTrack* aSendTrack,
+    ErrorResult& jrv)
+{
+  SdpMediaSection::MediaType type;
+  if (aKind.EqualsASCII("audio")) {
+    type = SdpMediaSection::MediaType::kAudio;
+  } else if (aKind.EqualsASCII("video")) {
+    type = SdpMediaSection::MediaType::kVideo;
+  } else {
+    MOZ_ASSERT(false);
+    jrv = NS_ERROR_INVALID_ARG;
+    return nullptr;
+  }
+
+  RefPtr<JsepTransceiver> jsepTransceiver = new JsepTransceiver(type);
+
+  RefPtr<TransceiverImpl> transceiverImpl =
+    CreateTransceiverImpl(jsepTransceiver, aSendTrack, jrv);
+
+  if (jrv.Failed()) {
+    // Would be nice if we could peek at the rv without stealing it, so we
+    // could log...
+    CSFLogError(LOGTAG, "%s: failed", __FUNCTION__);
+    return nullptr;
+  }
+
+  // Do this last, since it is not possible to roll back.
+  nsresult rv = AddRtpTransceiverToJsepSession(jsepTransceiver);
+  if (NS_FAILED(rv)) {
+    CSFLogError(LOGTAG, "%s: AddRtpTransceiverToJsepSession failed, res=%u",
+                         __FUNCTION__,
+                         static_cast<unsigned>(rv));
+    jrv = rv;
+    return nullptr;
+  }
+
+  return transceiverImpl.forget();
+}
+
+bool
+PeerConnectionImpl::CheckNegotiationNeeded(ErrorResult &rv)
+{
+  MOZ_ASSERT(mSignalingState == PCImplSignalingState::SignalingStable);
+  return mJsepSession->CheckNegotiationNeeded();
+}
+
 nsresult
 PeerConnectionImpl::InitializeDataChannel()
 {
   PC_AUTO_ENTER_API_CALL(false);
   CSFLogDebug(LOGTAG, "%s", __FUNCTION__);
 
   uint32_t channels = 0;
   uint16_t localport = 0;
@@ -1324,42 +1372,19 @@ PeerConnectionImpl::CreateDataChannel(co
     (aType == DataChannelConnection::PARTIAL_RELIABLE_TIMED ? aMaxTime : 0),
     nullptr, nullptr, aExternalNegotiated, aStream
   );
   NS_ENSURE_TRUE(dataChannel,NS_ERROR_FAILURE);
 
   CSFLogDebug(LOGTAG, "%s: making DOMDataChannel", __FUNCTION__);
 
   if (!mHaveDataStream) {
-
-    std::string streamId;
-    std::string trackId;
-
-    // Generate random ids because these aren't linked to any local streams.
-    if (!mUuidGen->Generate(&streamId)) {
-      return NS_ERROR_FAILURE;
-    }
-    if (!mUuidGen->Generate(&trackId)) {
-      return NS_ERROR_FAILURE;
-    }
-
-    RefPtr<JsepTrack> track(new JsepTrack(
-          mozilla::SdpMediaSection::kApplication,
-          streamId,
-          trackId,
-          sdp::kSend));
-
-    rv = mJsepSession->AddTrack(track);
-    if (NS_FAILED(rv)) {
-      CSFLogError(LOGTAG, "%s: Failed to add application track.",
-                          __FUNCTION__);
-      return rv;
-    }
+    mJsepSession->AddTransceiver(
+        new JsepTransceiver(SdpMediaSection::MediaType::kApplication));
     mHaveDataStream = true;
-    OnNegotiationNeeded();
   }
   nsIDOMDataChannel *retval;
   rv = NS_NewDOMDataChannel(dataChannel.forget(), mWindow, &retval);
   if (NS_FAILED(rv)) {
     return rv;
   }
   *aRetval = static_cast<nsDOMDataChannel*>(retval);
   return NS_OK;
@@ -1545,20 +1570,20 @@ PeerConnectionImpl::CreateOffer(const Js
         error = kInternalError;
     }
     std::string errorString = mJsepSession->GetLastError();
 
     CSFLogError(LOGTAG, "%s: pc = %s, error = %s",
                 __FUNCTION__, mHandle.c_str(), errorString.c_str());
     pco->OnCreateOfferError(error, ObString(errorString.c_str()), rv);
   } else {
+    UpdateSignalingState();
     pco->OnCreateOfferSuccess(ObString(offer.c_str()), rv);
   }
 
-  UpdateSignalingState();
   return NS_OK;
 }
 
 NS_IMETHODIMP
 PeerConnectionImpl::CreateAnswer()
 {
   PC_AUTO_ENTER_API_CALL(true);
 
@@ -1604,21 +1629,20 @@ PeerConnectionImpl::CreateAnswer()
         error = kInternalError;
     }
     std::string errorString = mJsepSession->GetLastError();
 
     CSFLogError(LOGTAG, "%s: pc = %s, error = %s",
                 __FUNCTION__, mHandle.c_str(), errorString.c_str());
     pco->OnCreateAnswerError(error, ObString(errorString.c_str()), rv);
   } else {
+    UpdateSignalingState();
     pco->OnCreateAnswerSuccess(ObString(answer.c_str()), rv);
   }
 
-  UpdateSignalingState();
-
   return NS_OK;
 }
 
 nsresult
 PeerConnectionImpl::SetupIceRestart()
 {
   if (mMedia->IsIceRestarting()) {
     CSFLogError(LOGTAG, "%s: ICE already restarting",
@@ -1738,20 +1762,20 @@ PeerConnectionImpl::SetLocalDescription(
         error = kInternalError;
     }
 
     std::string errorString = mJsepSession->GetLastError();
     CSFLogError(LOGTAG, "%s: pc = %s, error = %s",
                 __FUNCTION__, mHandle.c_str(), errorString.c_str());
     pco->OnSetLocalDescriptionError(error, ObString(errorString.c_str()), rv);
   } else {
+    UpdateSignalingState(sdpType == mozilla::kJsepSdpRollback);
     pco->OnSetLocalDescriptionSuccess(rv);
   }
 
-  UpdateSignalingState(sdpType == mozilla::kJsepSdpRollback);
   return NS_OK;
 }
 
 static void DeferredSetRemote(const std::string& aPcHandle,
                               int32_t aAction,
                               const std::string& aSdp) {
   PeerConnectionWrapper wrapper(aPcHandle);
 
@@ -1759,244 +1783,42 @@ static void DeferredSetRemote(const std:
     if (!PeerConnectionCtx::GetInstance()->isReady()) {
       MOZ_CRASH("Why is DeferredSetRemote being executed when the "
                 "PeerConnectionCtx isn't ready?");
     }
     wrapper.impl()->SetRemoteDescription(aAction, aSdp.c_str());
   }
 }
 
-static void StartTrack(MediaStream* aSource,
-                       TrackID aTrackId,
-                       nsAutoPtr<MediaSegment>&& aSegment) {
-  class Message : public ControlMessage {
-   public:
-    Message(MediaStream* aStream,
-            TrackID aTrack,
-            nsAutoPtr<MediaSegment>&& aSegment)
-      : ControlMessage(aStream),
-        track_id_(aTrack),
-        segment_(aSegment) {}
-
-    virtual void Run() override {
-      TrackRate track_rate = segment_->GetType() == MediaSegment::AUDIO ?
-        WEBRTC_DEFAULT_SAMPLE_RATE : mStream->GraphRate();
-      StreamTime current_end = mStream->GetTracksEnd();
-      TrackTicks current_ticks =
-        mStream->TimeToTicksRoundUp(track_rate, current_end);
-
-      // Add a track 'now' to avoid possible underrun, especially if we add
-      // a track "later".
-
-      if (current_end != 0L) {
-        CSFLogDebug(LOGTAG, "added track @ %u -> %f",
-                    static_cast<unsigned>(current_end),
-                    mStream->StreamTimeToSeconds(current_end));
-      }
-
-      // To avoid assertions, we need to insert a dummy segment that covers up
-      // to the "start" time for the track
-      segment_->AppendNullData(current_ticks);
-      if (segment_->GetType() == MediaSegment::AUDIO) {
-        mStream->AsSourceStream()->AddAudioTrack(
-            track_id_,
-            WEBRTC_DEFAULT_SAMPLE_RATE,
-            0,
-            static_cast<AudioSegment*>(segment_.forget()));
-      } else {
-        mStream->AsSourceStream()->AddTrack(track_id_, 0, segment_.forget());
-      }
-    }
-   private:
-    TrackID track_id_;
-    nsAutoPtr<MediaSegment> segment_;
-  };
-
-  aSource->GraphImpl()->AppendMessage(
-      MakeUnique<Message>(aSource, aTrackId, Move(aSegment)));
-  CSFLogInfo(LOGTAG, "Dispatched track-add for track id %u on stream %p",
-             aTrackId, aSource);
-}
-
-
-nsresult
-PeerConnectionImpl::CreateNewRemoteTracks(RefPtr<PeerConnectionObserver>& aPco)
+void
+PeerConnectionImpl::FireOnTrackEvents(RefPtr<PeerConnectionObserver>& aPco)
 {
-  JSErrorResult jrv;
-
-  std::vector<RefPtr<JsepTrack>> newTracks =
-    mJsepSession->GetRemoteTracksAdded();
-
-  // Group new tracks by stream id
-  std::map<std::string, std::vector<RefPtr<JsepTrack>>> tracksByStreamId;
-  for (auto track : newTracks) {
-    if (track->GetMediaType() == mozilla::SdpMediaSection::kApplication) {
+  for (auto& track : mJsepSession->GetRemoteTracksAdded()) {
+    if (track.GetMediaType() == mozilla::SdpMediaSection::kApplication) {
       // Ignore datachannel
       continue;
     }
 
-    tracksByStreamId[track->GetStreamId()].push_back(track);
-  }
-
-  for (auto& id : tracksByStreamId) {
-    std::string streamId = id.first;
-    std::vector<RefPtr<JsepTrack>>& tracks = id.second;
-
-    bool newStream = false;
-    RefPtr<RemoteSourceStreamInfo> info =
-      mMedia->GetRemoteStreamById(streamId);
-    if (!info) {
-      newStream = true;
-      nsresult nrv = CreateRemoteSourceStreamInfo(&info, streamId);
-      if (NS_FAILED(nrv)) {
-        aPco->OnSetRemoteDescriptionError(
-            kInternalError,
-            ObString("CreateRemoteSourceStreamInfo failed"),
-            jrv);
-        return nrv;
-      }
-
-      nrv = mMedia->AddRemoteStream(info);
-      if (NS_FAILED(nrv)) {
-        aPco->OnSetRemoteDescriptionError(
-            kInternalError,
-            ObString("AddRemoteStream failed"),
-            jrv);
-        return nrv;
-      }
-
-      CSFLogDebug(LOGTAG, "Added remote stream %s", info->GetId().c_str());
-
-      info->GetMediaStream()->AssignId(NS_ConvertUTF8toUTF16(streamId.c_str()));
-      info->GetMediaStream()->SetLogicalStreamStartTime(
-          info->GetMediaStream()->GetPlaybackStream()->GetCurrentTime());
-    }
-
-    Sequence<OwningNonNull<DOMMediaStream>> streams;
-    if (!streams.AppendElement(OwningNonNull<DOMMediaStream>(
-            *info->GetMediaStream()),
-            fallible)) {
-      MOZ_ASSERT(false);
-      return NS_ERROR_FAILURE;
-    }
-
-    // Set the principal used for creating the tracks. This makes the stream
-    // data (audio/video samples) accessible to the receiving page. We're
-    // only certain that privacy hasn't been requested if we're connected.
-    nsCOMPtr<nsIPrincipal> principal;
-    nsIDocument* doc = GetWindow()->GetExtantDoc();
-    MOZ_ASSERT(doc);
-    if (mDtlsConnected && !PrivacyRequested()) {
-      principal = doc->NodePrincipal();
-    } else {
-      // we're either certain that we need isolation for the streams, OR
-      // we're not sure and we can fix the stream in SetDtlsConnected
-      principal =  NullPrincipal::CreateWithInheritedAttributes(doc->NodePrincipal());
-    }
-
-    // We need to select unique ids, just use max + 1
-    TrackID maxTrackId = 0;
-    {
-      nsTArray<RefPtr<dom::MediaStreamTrack>> domTracks;
-      info->GetMediaStream()->GetTracks(domTracks);
-      for (auto& track : domTracks) {
-        maxTrackId = std::max(maxTrackId, track->mTrackID);
-      }
+    MOZ_ASSERT(!track.GetTrackId().empty());
+
+    nsString trackId = NS_ConvertUTF8toUTF16(track.GetTrackId().c_str());
+
+    dom::Sequence<nsString> streamIds;
+    for (const std::string& streamId : track.GetStreamIds()) {
+      // If this fails, oh well.
+      streamIds.AppendElement(
+          NS_ConvertASCIItoUTF16(streamId.c_str()), fallible);
     }
 
-    for (RefPtr<JsepTrack>& track : tracks) {
-      std::string webrtcTrackId(track->GetTrackId());
-      if (!info->HasTrack(webrtcTrackId)) {
-        RefPtr<RemoteTrackSource> source =
-          new RemoteTrackSource(principal, nsString());
-        TrackID trackID = ++maxTrackId;
-        RefPtr<MediaStreamTrack> domTrack;
-        nsAutoPtr<MediaSegment> segment;
-        if (track->GetMediaType() == SdpMediaSection::kAudio) {
-          domTrack =
-            info->GetMediaStream()->CreateDOMTrack(trackID,
-                                                   MediaSegment::AUDIO,
-                                                   source);
-          info->GetMediaStream()->AddTrackInternal(domTrack);
-          segment = new AudioSegment;
-        } else {
-          domTrack =
-            info->GetMediaStream()->CreateDOMTrack(trackID,
-                                                   MediaSegment::VIDEO,
-                                                   source);
-          info->GetMediaStream()->AddTrackInternal(domTrack);
-          segment = new VideoSegment;
-        }
-
-        StartTrack(info->GetMediaStream()->GetInputStream()->AsSourceStream(),
-                   trackID, Move(segment));
-        info->AddTrack(webrtcTrackId, domTrack);
-        CSFLogDebug(LOGTAG, "Added remote track %s/%s",
-                    info->GetId().c_str(), webrtcTrackId.c_str());
-
-        domTrack->AssignId(NS_ConvertUTF8toUTF16(webrtcTrackId.c_str()));
-        aPco->OnAddTrack(*domTrack, streams, jrv);
-        if (jrv.Failed()) {
-          CSFLogError(LOGTAG, ": OnAddTrack(%s) failed! Error: %u",
-                      webrtcTrackId.c_str(),
-                      jrv.ErrorCodeAsInt());
-        }
-      }
-    }
-
-    if (newStream) {
-      aPco->OnAddStream(*info->GetMediaStream(), jrv);
-      if (jrv.Failed()) {
-        CSFLogError(LOGTAG, ": OnAddStream() failed! Error: %u",
-                    jrv.ErrorCodeAsInt());
-      }
-    }
-  }
-  return NS_OK;
-}
-
-void
-PeerConnectionImpl::RemoveOldRemoteTracks(RefPtr<PeerConnectionObserver>& aPco)
-{
-  JSErrorResult jrv;
-
-  std::vector<RefPtr<JsepTrack>> removedTracks =
-    mJsepSession->GetRemoteTracksRemoved();
-
-  for (auto& removedTrack : removedTracks) {
-    const std::string& streamId = removedTrack->GetStreamId();
-    const std::string& trackId = removedTrack->GetTrackId();
-
-    if (removedTrack->GetMediaType() == SdpMediaSection::kApplication) {
-      // TODO do we need to notify content somehow here?
-      continue;
-    }
-
-    RefPtr<RemoteSourceStreamInfo> info = mMedia->GetRemoteStreamById(streamId);
-    if (!info) {
-      MOZ_ASSERT(false, "A stream/track was removed that wasn't in PCMedia. "
-                        "This is a bug.");
-      continue;
-    }
-
-    mMedia->RemoveRemoteTrack(streamId, trackId);
-
-    DOMMediaStream* stream = info->GetMediaStream();
-    nsTArray<RefPtr<MediaStreamTrack>> tracks;
-    stream->GetTracks(tracks);
-    for (auto& track : tracks) {
-      if (PeerConnectionImpl::GetTrackId(*track) == trackId) {
-        aPco->OnRemoveTrack(*track, jrv);
-        break;
-      }
-    }
-
-    // We might be holding the last ref, but that's ok.
-    if (!info->GetTrackCount()) {
-      aPco->OnRemoveStream(*stream, jrv);
+    JSErrorResult jrv;
+    aPco->OnTrack(trackId, streamIds, jrv);
+    if (jrv.Failed()) {
+      CSFLogError(LOGTAG, ": OnTrack(%s) failed! Error: %u",
+          track.GetTrackId().c_str(),
+          jrv.ErrorCodeAsInt());
     }
   }
 }
 
 NS_IMETHODIMP
 PeerConnectionImpl::SetRemoteDescription(int32_t action, const char* aSDP)
 {
   PC_AUTO_ENTER_API_CALL(true);
@@ -2049,16 +1871,17 @@ PeerConnectionImpl::SetRemoteDescription
     case IPeerConnection::kActionRollback:
       sdpType = mozilla::kJsepSdpRollback;
       break;
     default:
       MOZ_ASSERT(false);
       return NS_ERROR_FAILURE;
   }
 
+  size_t originalTransceiverCount = mJsepSession->GetTransceivers().size();
   nsresult nrv = mJsepSession->SetRemoteDescription(sdpType,
                                                     mRemoteRequestedSDP);
   if (NS_FAILED(nrv)) {
     Error error;
     switch (nrv) {
       case NS_ERROR_INVALID_ARG:
         error = kInvalidSessionDescription;
         break;
@@ -2069,29 +1892,73 @@ PeerConnectionImpl::SetRemoteDescription
         error = kInternalError;
     }
 
     std::string errorString = mJsepSession->GetLastError();
     CSFLogError(LOGTAG, "%s: pc = %s, error = %s",
                 __FUNCTION__, mHandle.c_str(), errorString.c_str());
     pco->OnSetRemoteDescriptionError(error, ObString(errorString.c_str()), jrv);
   } else {
-    nrv = CreateNewRemoteTracks(pco);
-    if (NS_FAILED(nrv)) {
-      // aPco was already notified, just return early.
-      return NS_OK;
+    // Iterate over the JSEP transceivers that were just created
+    for (size_t i = originalTransceiverCount;
+         i < mJsepSession->GetTransceivers().size();
+         ++i) {
+      RefPtr<JsepTransceiver> jsepTransceiver =
+        mJsepSession->GetTransceivers()[i];
+
+      if (jsepTransceiver->GetMediaType() ==
+          SdpMediaSection::MediaType::kApplication) {
+        continue;
+      }
+
+      // Audio or video transceiver, need to tell JS about it.
+      RefPtr<TransceiverImpl> transceiverImpl =
+        CreateTransceiverImpl(jsepTransceiver, nullptr, jrv);
+      if (jrv.Failed()) {
+        return NS_ERROR_FAILURE;
+      }
+
+      const JsepTrack& receiving(jsepTransceiver->mRecvTrack);
+      CSFLogInfo(LOGTAG, "%s: pc = %s, asking JS to create transceiver for %s",
+                  __FUNCTION__, mHandle.c_str(), receiving.GetTrackId().c_str());
+      switch (receiving.GetMediaType()) {
+        case SdpMediaSection::MediaType::kAudio:
+          pco->OnTransceiverNeeded(
+              NS_ConvertASCIItoUTF16("audio"), *transceiverImpl, jrv);
+          break;
+        case SdpMediaSection::MediaType::kVideo:
+          pco->OnTransceiverNeeded(
+              NS_ConvertASCIItoUTF16("video"), *transceiverImpl, jrv);
+          break;
+        default:
+          MOZ_RELEASE_ASSERT(false);
+      }
+
+      if (jrv.Failed()) {
+        nsresult rv = jrv.StealNSResult();
+        CSFLogError(LOGTAG, "%s: pc = %s, OnTransceiverNeeded failed. "
+                    "This should never happen. rv = %d",
+                    __FUNCTION__, mHandle.c_str(), static_cast<int>(rv));
+        MOZ_CRASH();
+        return NS_ERROR_FAILURE;
+      }
     }
 
-    RemoveOldRemoteTracks(pco);
+    UpdateSignalingState(sdpType == mozilla::kJsepSdpRollback);
+
+    // This needs to be done before we fire ontrack events
+    pco->SyncTransceivers(jrv);
+
+    FireOnTrackEvents(pco);
 
     pco->OnSetRemoteDescriptionSuccess(jrv);
+
     startCallTelem();
   }
 
-  UpdateSignalingState(sdpType == mozilla::kJsepSdpRollback);
   return NS_OK;
 }
 
 // WebRTC uses highres time relative to the UNIX epoch (Jan 1, 1970, UTC).
 
 nsresult
 PeerConnectionImpl::GetTimeSinceEpoch(DOMHighResTimeStamp *result) {
   MOZ_ASSERT(NS_IsMainThread());
@@ -2281,32 +2148,16 @@ PeerConnectionImpl::PrincipalChanged(Med
   nsIDocument* doc = GetWindow()->GetExtantDoc();
   if (doc) {
     mMedia->UpdateSinkIdentity_m(aTrack, doc->NodePrincipal(), mPeerIdentity);
   } else {
     CSFLogInfo(LOGTAG, "Can't update sink principal; document gone");
   }
 }
 
-std::string
-PeerConnectionImpl::GetTrackId(const MediaStreamTrack& aTrack)
-{
-  nsString wideTrackId;
-  aTrack.GetId(wideTrackId);
-  return NS_ConvertUTF16toUTF8(wideTrackId).get();
-}
-
-std::string
-PeerConnectionImpl::GetStreamId(const DOMMediaStream& aStream)
-{
-  nsString wideStreamId;
-  aStream.GetId(wideStreamId);
-  return NS_ConvertUTF16toUTF8(wideStreamId).get();
-}
-
 void
 PeerConnectionImpl::OnMediaError(const std::string& aError)
 {
   CSFLogError(LOGTAG, "Encountered media error! %s", aError.c_str());
   // TODO: Let content know about this somehow.
 }
 
 bool
@@ -2368,107 +2219,27 @@ PeerConnectionImpl::DumpPacket_m(size_t 
     return;
   }
 
   JSErrorResult jrv;
   pco->OnPacket(level, type, sending, arrayBuffer, jrv);
 }
 
 nsresult
-PeerConnectionImpl::AddTrack(MediaStreamTrack& aTrack,
-                             const Sequence<OwningNonNull<DOMMediaStream>>& aStreams)
-{
-  PC_AUTO_ENTER_API_CALL(true);
-
-  if (!aStreams.Length()) {
-    CSFLogError(LOGTAG, "%s: At least one stream arg required", __FUNCTION__);
-    return NS_ERROR_FAILURE;
-  }
-
-  return AddTrack(aTrack, aStreams[0]);
-}
-
-nsresult
-PeerConnectionImpl::AddTrack(MediaStreamTrack& aTrack,
-                             DOMMediaStream& aMediaStream)
-{
-  std::string streamId = PeerConnectionImpl::GetStreamId(aMediaStream);
-  std::string trackId = PeerConnectionImpl::GetTrackId(aTrack);
-  nsresult res = mMedia->AddTrack(aMediaStream, streamId, aTrack, trackId);
-  if (NS_FAILED(res)) {
-    return res;
-  }
-
-  CSFLogDebug(LOGTAG, "Added track (%s) to stream %s",
-                      trackId.c_str(), streamId.c_str());
-
-  aTrack.AddPrincipalChangeObserver(this);
-  PrincipalChanged(&aTrack);
-
-  if (aTrack.AsAudioStreamTrack()) {
-    res = AddTrackToJsepSession(SdpMediaSection::kAudio, streamId, trackId);
-    if (NS_FAILED(res)) {
-      return res;
-    }
-  }
-
-  if (aTrack.AsVideoStreamTrack()) {
-    if (!Preferences::GetBool("media.peerconnection.video.enabled", true)) {
-      // Before this code was moved, this would silently ignore just like it
-      // does now. Is this actually what we want to do?
-      return NS_OK;
-    }
-
-    res = AddTrackToJsepSession(SdpMediaSection::kVideo, streamId, trackId);
-    if (NS_FAILED(res)) {
-      return res;
-    }
-  }
-  OnNegotiationNeeded();
-  return NS_OK;
-}
-
-RefPtr<MediaPipeline>
-PeerConnectionImpl::GetMediaPipelineForTrack(MediaStreamTrack& aRecvTrack)
-{
-  for (size_t i = 0; i < mMedia->RemoteStreamsLength(); ++i) {
-    if (mMedia->GetRemoteStreamByIndex(i)->GetMediaStream()->
-        HasTrack(aRecvTrack)) {
-      auto& pipelines = mMedia->GetRemoteStreamByIndex(i)->GetPipelines();
-      std::string trackId = PeerConnectionImpl::GetTrackId(aRecvTrack);
-      auto it = pipelines.find(trackId);
-      if (it != pipelines.end()) {
-        return it->second;
-      }
-    }
-  }
-
-  return nullptr;
-}
-
-nsresult
 PeerConnectionImpl::AddRIDExtension(MediaStreamTrack& aRecvTrack,
                                     unsigned short aExtensionId)
 {
-  RefPtr<MediaPipeline> pipeline = GetMediaPipelineForTrack(aRecvTrack);
-  if (pipeline) {
-    pipeline->AddRIDExtension_m(aExtensionId);
-  }
-  return NS_OK;
+  return mMedia->AddRIDExtension(aRecvTrack, aExtensionId);
 }
 
 nsresult
 PeerConnectionImpl::AddRIDFilter(MediaStreamTrack& aRecvTrack,
                                  const nsAString& aRid)
 {
-  RefPtr<MediaPipeline> pipeline = GetMediaPipelineForTrack(aRecvTrack);
-  if (pipeline) {
-    pipeline->AddRIDFilter_m(NS_ConvertUTF16toUTF8(aRid).get());
-  }
-  return NS_OK;
+  return mMedia->AddRIDFilter(aRecvTrack, aRid);
 }
 
 nsresult
 PeerConnectionImpl::EnablePacketDump(unsigned long level,
                                      dom::mozPacketDumpType type,
                                      bool sending)
 {
   mPacketDumpEnabled = true;
@@ -2511,52 +2282,44 @@ PeerConnectionImpl::DisablePacketDump(un
 
   return NS_OK;
 }
 
 NS_IMETHODIMP
 PeerConnectionImpl::RemoveTrack(MediaStreamTrack& aTrack) {
   PC_AUTO_ENTER_API_CALL(true);
 
-  std::string trackId = PeerConnectionImpl::GetTrackId(aTrack);
-
-  nsString wideTrackId;
-  aTrack.GetId(wideTrackId);
-  for (size_t i = 0; i < mDTMFStates.Length(); ++i) {
-    if (mDTMFStates[i].mTrackId == wideTrackId) {
-      mDTMFStates[i].mSendTimer->Cancel();
-      mDTMFStates.RemoveElementAt(i);
+  std::vector<RefPtr<TransceiverImpl>>& transceivers =
+    mMedia->GetTransceivers();
+
+  nsresult rv = NS_ERROR_INVALID_ARG;
+
+  for (RefPtr<TransceiverImpl>& transceiver : transceivers) {
+    if (transceiver->HasSendTrack(&aTrack)) {
+      // TODO(bug 1401983): Move DTMF stuff to TransceiverImpl
+      for (size_t i = 0; i < mDTMFStates.Length(); ++i) {
+        if (mDTMFStates[i].mTransceiver.get() == transceiver.get()) {
+          mDTMFStates[i].mSendTimer->Cancel();
+          mDTMFStates.RemoveElementAt(i);
+          break;
+        }
+      }
+
+      rv = transceiver->UpdateSendTrack(nullptr);
       break;
     }
   }
 
-  RefPtr<LocalSourceStreamInfo> info = media()->GetLocalStreamByTrackId(trackId);
-
-  if (!info) {
-    CSFLogError(LOGTAG, "%s: Unknown stream", __FUNCTION__);
-    return NS_ERROR_INVALID_ARG;
-  }
-
-  nsresult rv =
-    mJsepSession->RemoveTrack(info->GetId(), trackId);
-
   if (NS_FAILED(rv)) {
-    CSFLogError(LOGTAG, "%s: Unknown stream/track ids %s %s",
-                __FUNCTION__,
-                info->GetId().c_str(),
-                trackId.c_str());
+    CSFLogError(LOGTAG, "Error updating send track on transceiver");
     return rv;
   }
 
-  media()->RemoveLocalTrack(info->GetId(), trackId);
-
   aTrack.RemovePrincipalChangeObserver(this);
 
-  OnNegotiationNeeded();
-
   return NS_OK;
 }
 
 static int GetDTMFToneCode(uint16_t c)
 {
   const char* DTMF_TONECODES = "0123456789*#ABCD";
 
   if (c == ',') {
@@ -2564,70 +2327,92 @@ static int GetDTMFToneCode(uint16_t c)
     return -1;
   }
 
   const char* i = strchr(DTMF_TONECODES, c);
   MOZ_ASSERT(i);
   return i - DTMF_TONECODES;
 }
 
+OwningNonNull<DOMMediaStream>
+PeerConnectionImpl::CreateReceiveStreamWithTrack(
+    SdpMediaSection::MediaType type) {
+
+  OwningNonNull<DOMMediaStream> stream = MakeMediaStream();
+
+  // Set the principal used for creating the tracks. This makes the stream
+  // data (audio/video samples) accessible to the receiving page. We're
+  // only certain that privacy hasn't been requested if we're connected.
+  nsCOMPtr<nsIPrincipal> principal;
+  nsIDocument* doc = GetWindow()->GetExtantDoc();
+  MOZ_ASSERT(doc);
+  if (mDtlsConnected && !PrivacyRequested()) {
+    principal = doc->NodePrincipal();
+  } else {
+    // we're either certain that we need isolation for the streams, OR
+    // we're not sure and we can fix the stream in SetDtlsConnected
+    principal =  NullPrincipal::CreateWithInheritedAttributes(doc->NodePrincipal());
+  }
+
+  RefPtr<MediaStreamTrack> track;
+  switch (type) {
+    case SdpMediaSection::MediaType::kAudio:
+      track = stream->CreateDOMTrack(
+          kAudioTrack,
+          MediaSegment::AUDIO,
+          new RemoteTrackSource(principal,
+                                NS_ConvertASCIItoUTF16("remote audio")));
+      break;
+    case SdpMediaSection::MediaType::kVideo:
+      track = stream->CreateDOMTrack(
+          kVideoTrack,
+          MediaSegment::VIDEO,
+          new RemoteTrackSource(principal,
+                                NS_ConvertASCIItoUTF16("remote video")));
+      break;
+    default:
+      MOZ_ASSERT(false, "Bad media kind; our JS passed some garbage");
+  }
+  stream->AddTrackInternal(track);
+
+  return stream;
+}
+
 NS_IMETHODIMP
-PeerConnectionImpl::InsertDTMF(mozilla::dom::RTCRtpSender& sender,
+PeerConnectionImpl::InsertDTMF(TransceiverImpl& transceiver,
                                const nsAString& tones, uint32_t duration,
                                uint32_t interToneGap) {
   PC_AUTO_ENTER_API_CALL(false);
 
   // Check values passed in from PeerConnection.js
   MOZ_ASSERT(duration >= 40, "duration must be at least 40");
   MOZ_ASSERT(duration <= 6000, "duration must be at most 6000");
   MOZ_ASSERT(interToneGap >= 30, "interToneGap must be at least 30");
 
   JSErrorResult jrv;
 
-  // Retrieve track
-  RefPtr<MediaStreamTrack> mst = sender.GetTrack(jrv);
-  if (jrv.Failed()) {
-    NS_WARNING("Failed to retrieve track for RTCRtpSender!");
-    return jrv.StealNSResult();
-  }
-
-  nsString senderTrackId;
-  mst->GetId(senderTrackId);
-
+  // TODO(bug 1401983): Move DTMF stuff to TransceiverImpl
   // Attempt to locate state for the DTMFSender
   DTMFState* state = nullptr;
   for (auto& dtmfState : mDTMFStates) {
-    if (dtmfState.mTrackId == senderTrackId) {
+    if (dtmfState.mTransceiver.get() == &transceiver) {
       state = &dtmfState;
       break;
     }
   }
 
   // No state yet, create a new one
   if (!state) {
     state = mDTMFStates.AppendElement();
-    state->mPeerConnectionImpl = this;
-    state->mTrackId = senderTrackId;
+    state->mPCObserver = mPCObserver;
+    state->mTransceiver = &transceiver;
     state->mSendTimer = NS_NewTimer();
   }
   MOZ_ASSERT(state);
 
-  auto trackPairs = mJsepSession->GetNegotiatedTrackPairs();
-  state->mLevel = -1;
-  for (auto& trackPair : trackPairs) {
-    if (state->mTrackId.EqualsASCII(trackPair.mSending->GetTrackId().c_str())) {
-      if (trackPair.HasBundleLevel()) {
-        state->mLevel = trackPair.BundleLevel();
-      } else {
-        state->mLevel = trackPair.mLevel;
-      }
-      break;
-    }
-  }
-
   state->mTones = tones;
   state->mDuration = duration;
   state->mInterToneGap = interToneGap;
   if (!state->mTones.IsEmpty()) {
     state->mSendTimer->InitWithNamedFuncCallback(DTMFSendTimerCallback_m, state, 0,
                                                  nsITimer::TYPE_ONE_SHOT,
                                                  "DTMFSendTimerCallback_m");
   }
@@ -2643,220 +2428,72 @@ PeerConnectionImpl::GetDTMFToneBuffer(mo
 
   // Retrieve track
   RefPtr<MediaStreamTrack> mst = sender.GetTrack(jrv);
   if (jrv.Failed()) {
     NS_WARNING("Failed to retrieve track for RTCRtpSender!");
     return jrv.StealNSResult();
   }
 
-  nsString senderTrackId;
-  mst->GetId(senderTrackId);
-
+  // TODO(bug 1401983): Move DTMF stuff to TransceiverImpl
   // Attempt to locate state for the DTMFSender
   for (auto& dtmfState : mDTMFStates) {
-    if (dtmfState.mTrackId == senderTrackId) {
+    if (dtmfState.mTransceiver->HasSendTrack(mst)) {
       outToneBuffer = dtmfState.mTones;
       break;
     }
   }
 
   return NS_OK;
 }
 
 NS_IMETHODIMP
-PeerConnectionImpl::ReplaceTrack(MediaStreamTrack& aThisTrack,
-                                 MediaStreamTrack& aWithTrack) {
+PeerConnectionImpl::ReplaceTrackNoRenegotiation(TransceiverImpl& aTransceiver,
+                                                MediaStreamTrack* aWithTrack) {
   PC_AUTO_ENTER_API_CALL(true);
 
-  nsString trackId;
-  aThisTrack.GetId(trackId);
-
+  RefPtr<dom::MediaStreamTrack> oldSendTrack(aTransceiver.GetSendTrack());
+  if (oldSendTrack) {
+    oldSendTrack->RemovePrincipalChangeObserver(this);
+  }
+
+  nsresult rv = aTransceiver.UpdateSendTrack(aWithTrack);
+
+  if (NS_FAILED(rv)) {
+    CSFLogError(LOGTAG,
+                "Failed to update transceiver: %d", static_cast<int>(rv));
+    return rv;
+  }
+
+  // TODO(bug 1401983): Move DTMF stuff to TransceiverImpl
   for (size_t i = 0; i < mDTMFStates.Length(); ++i) {
-    if (mDTMFStates[i].mTrackId == trackId) {
+    if (mDTMFStates[i].mTransceiver.get() == &aTransceiver) {
       mDTMFStates[i].mSendTimer->Cancel();
       mDTMFStates.RemoveElementAt(i);
       break;
     }
   }
 
-  RefPtr<PeerConnectionObserver> pco = do_QueryObjectReferent(mPCObserver);
-  if (!pco) {
-    return NS_ERROR_UNEXPECTED;
-  }
-  JSErrorResult jrv;
-
-  if (&aThisTrack == &aWithTrack) {
-    pco->OnReplaceTrackSuccess(jrv);
-    if (jrv.Failed()) {
-      CSFLogError(LOGTAG, "Error firing replaceTrack success callback");
-      return NS_ERROR_UNEXPECTED;
-    }
-    return NS_OK;
-  }
-
-  nsString thisKind;
-  aThisTrack.GetKind(thisKind);
-  nsString withKind;
-  aWithTrack.GetKind(withKind);
-
-  if (thisKind != withKind) {
-    pco->OnReplaceTrackError(kIncompatibleMediaStreamTrack,
-                             ObString(mJsepSession->GetLastError().c_str()),
-                             jrv);
-    if (jrv.Failed()) {
-      CSFLogError(LOGTAG, "Error firing replaceTrack success callback");
-      return NS_ERROR_UNEXPECTED;
-    }
-    return NS_OK;
-  }
-  std::string origTrackId = PeerConnectionImpl::GetTrackId(aThisTrack);
-  std::string newTrackId = PeerConnectionImpl::GetTrackId(aWithTrack);
-
-  RefPtr<LocalSourceStreamInfo> info =
-    media()->GetLocalStreamByTrackId(origTrackId);
-  if (!info) {
-    CSFLogError(LOGTAG, "Could not find stream from trackId");
-    return NS_ERROR_UNEXPECTED;
+  if (aWithTrack) {
+    aWithTrack->AddPrincipalChangeObserver(this);
+    PrincipalChanged(aWithTrack);
   }
 
-  std::string origStreamId = info->GetId();
-  std::string newStreamId =
-    PeerConnectionImpl::GetStreamId(*aWithTrack.mOwningStream);
-
-  nsresult rv = mJsepSession->ReplaceTrack(origStreamId,
-                                           origTrackId,
-                                           newStreamId,
-                                           newTrackId);
-  if (NS_FAILED(rv)) {
-    pco->OnReplaceTrackError(kInvalidMediastreamTrack,
-                             ObString(mJsepSession->GetLastError().c_str()),
-                             jrv);
-    if (jrv.Failed()) {
-      CSFLogError(LOGTAG, "Error firing replaceTrack error callback");
-      return NS_ERROR_UNEXPECTED;
-    }
-    return NS_OK;
-  }
-
-  rv = media()->ReplaceTrack(origStreamId,
-                             origTrackId,
-                             aWithTrack,
-                             newStreamId,
-                             newTrackId);
-
-  if (NS_FAILED(rv)) {
-    CSFLogError(LOGTAG, "Unexpected error in ReplaceTrack: %d",
-                        static_cast<int>(rv));
-    pco->OnReplaceTrackError(kInvalidMediastreamTrack,
-                             ObString("Failed to replace track"),
-                             jrv);
-    if (jrv.Failed()) {
-      CSFLogError(LOGTAG, "Error firing replaceTrack error callback");
-      return NS_ERROR_UNEXPECTED;
-    }
-    return NS_OK;
-  }
-  aThisTrack.RemovePrincipalChangeObserver(this);
-  aWithTrack.AddPrincipalChangeObserver(this);
-  PrincipalChanged(&aWithTrack);
-
   // We update the media pipelines here so we can apply different codec
   // settings for different sources (e.g. screensharing as opposed to camera.)
   // TODO: We should probably only do this if the source has in fact changed.
 
-  if (NS_FAILED((rv = mMedia->UpdateMediaPipelines(*mJsepSession)))) {
+  if (NS_FAILED((rv = mMedia->UpdateMediaPipelines()))) {
     CSFLogError(LOGTAG, "Error Updating MediaPipelines");
     return rv;
   }
 
-  pco->OnReplaceTrackSuccess(jrv);
-  if (jrv.Failed()) {
-    CSFLogError(LOGTAG, "Error firing replaceTrack success callback");
-    return NS_ERROR_UNEXPECTED;
-  }
-
   return NS_OK;
 }
 
-NS_IMETHODIMP
-PeerConnectionImpl::SetParameters(MediaStreamTrack& aTrack,
-                                  const RTCRtpParameters& aParameters) {
-  PC_AUTO_ENTER_API_CALL(true);
-
-  std::vector<JsepTrack::JsConstraints> constraints;
-  if (aParameters.mEncodings.WasPassed()) {
-    for (auto& encoding : aParameters.mEncodings.Value()) {
-      JsepTrack::JsConstraints constraint;
-      if (encoding.mRid.WasPassed()) {
-        constraint.rid = NS_ConvertUTF16toUTF8(encoding.mRid.Value()).get();
-      }
-      if (encoding.mMaxBitrate.WasPassed()) {
-        constraint.constraints.maxBr = encoding.mMaxBitrate.Value();
-      }
-      constraint.constraints.scaleDownBy = encoding.mScaleResolutionDownBy;
-      constraints.push_back(constraint);
-    }
-  }
-  return SetParameters(aTrack, constraints);
-}
-
-nsresult
-PeerConnectionImpl::SetParameters(
-    MediaStreamTrack& aTrack,
-    const std::vector<JsepTrack::JsConstraints>& aConstraints)
-{
-  std::string trackId = PeerConnectionImpl::GetTrackId(aTrack);
-  RefPtr<LocalSourceStreamInfo> info = media()->GetLocalStreamByTrackId(trackId);
-  if (!info) {
-    CSFLogError(LOGTAG, "%s: Unknown stream", __FUNCTION__);
-    return NS_ERROR_INVALID_ARG;
-  }
-  std::string streamId = info->GetId();
-
-  return mJsepSession->SetParameters(streamId, trackId, aConstraints);
-}
-
-NS_IMETHODIMP
-PeerConnectionImpl::GetParameters(MediaStreamTrack& aTrack,
-                                  RTCRtpParameters& aOutParameters) {
-  PC_AUTO_ENTER_API_CALL(true);
-
-  std::vector<JsepTrack::JsConstraints> constraints;
-  nsresult rv = GetParameters(aTrack, &constraints);
-  if (NS_FAILED(rv)) {
-    return rv;
-  }
-  aOutParameters.mEncodings.Construct();
-  for (auto& constraint : constraints) {
-    RTCRtpEncodingParameters encoding;
-    encoding.mRid.Construct(NS_ConvertASCIItoUTF16(constraint.rid.c_str()));
-    encoding.mMaxBitrate.Construct(constraint.constraints.maxBr);
-    encoding.mScaleResolutionDownBy = constraint.constraints.scaleDownBy;
-    aOutParameters.mEncodings.Value().AppendElement(Move(encoding), fallible);
-  }
-  return NS_OK;
-}
-
-nsresult
-PeerConnectionImpl::GetParameters(
-    MediaStreamTrack& aTrack,
-    std::vector<JsepTrack::JsConstraints>* aOutConstraints)
-{
-  std::string trackId = PeerConnectionImpl::GetTrackId(aTrack);
-  RefPtr<LocalSourceStreamInfo> info = media()->GetLocalStreamByTrackId(trackId);
-  if (!info) {
-    CSFLogError(LOGTAG, "%s: Unknown stream", __FUNCTION__);
-    return NS_ERROR_INVALID_ARG;
-  }
-  std::string streamId = info->GetId();
-
-  return mJsepSession->GetParameters(streamId, trackId, aOutConstraints);
-}
-
 nsresult
 PeerConnectionImpl::CalculateFingerprint(
     const std::string& algorithm,
     std::vector<uint8_t>* fingerprint) const {
   uint8_t buf[DtlsIdentity::HASH_ALGORITHM_MAX_LENGTH];
   size_t len = 0;
 
   MOZ_ASSERT(fingerprint);
@@ -3111,16 +2748,17 @@ PeerConnectionImpl::RecordEndOfCallTelem
                         type);
 }
 
 nsresult
 PeerConnectionImpl::CloseInt()
 {
   PC_AUTO_ENTER_API_CALL_NO_CHECK();
 
+  // TODO(bug 1401983): Move DTMF stuff to TransceiverImpl
   for (auto& dtmfState : mDTMFStates) {
     dtmfState.mSendTimer->Cancel();
   }
 
   // We do this at the end of the call because we want to make sure we've waited
   // for all trickle ICE candidates to come in; this can happen well after we've
   // transitioned to connected. As a bonus, this allows us to detect race
   // conditions where a stats dispatch happens right as the PC closes.
@@ -3150,20 +2788,20 @@ void
 PeerConnectionImpl::ShutdownMedia()
 {
   PC_AUTO_ENTER_API_CALL_NO_CHECK();
 
   if (!mMedia)
     return;
 
   // before we destroy references to local tracks, detach from them
-  for(uint32_t i = 0; i < media()->LocalStreamsLength(); ++i) {
-    LocalSourceStreamInfo *info = media()->GetLocalStreamByIndex(i);
-    for (const auto& pair : info->GetMediaStreamTracks()) {
-      pair.second->RemovePrincipalChangeObserver(this);
+  for(RefPtr<TransceiverImpl>& transceiver : mMedia->GetTransceivers()) {
+    RefPtr<dom::MediaStreamTrack> track = transceiver->GetSendTrack();
+    if (track) {
+      track->RemovePrincipalChangeObserver(this);
     }
   }
 
   // End of call to be recorded in Telemetry
   if (!mStartTime.IsNull()){
     TimeDuration timeDelta = TimeStamp::Now() - mStartTime;
     Telemetry::Accumulate(Telemetry::WEBRTC_CALL_DURATION,
                           timeDelta.ToSeconds());
@@ -3188,53 +2826,42 @@ PeerConnectionImpl::SetSignalingState_m(
       (aSignalingState == PCImplSignalingState::SignalingStable &&
        mSignalingState == PCImplSignalingState::SignalingHaveRemoteOffer &&
        !rollback)) {
     mMedia->EnsureTransports(*mJsepSession);
   }
 
   mSignalingState = aSignalingState;
 
-  bool fireNegotiationNeeded = false;
   if (mSignalingState == PCImplSignalingState::SignalingStable) {
     if (mMedia->GetIceRestartState() ==
             PeerConnectionMedia::ICE_RESTART_PROVISIONAL) {
       if (rollback) {
         RollbackIceRestart();
       } else {
         mMedia->CommitIceRestart();
       }
     }
 
-    // Either negotiation is done, or we've rolled back. In either case, we
-    // need to re-evaluate whether further negotiation is required.
-    mNegotiationNeeded = false;
     // If we're rolling back a local offer, we might need to remove some
-    // transports, but nothing further needs to be done.
+    // transports, and stomp some MediaPipeline setup, but nothing further
+    // needs to be done.
     mMedia->ActivateOrRemoveTransports(*mJsepSession, mForceIceTcp);
+    mMedia->UpdateTransceiverTransports(*mJsepSession);
+    if (NS_FAILED(mMedia->UpdateMediaPipelines())) {
+      CSFLogError(LOGTAG, "Error Updating MediaPipelines");
+      NS_ASSERTION(false, "Error Updating MediaPipelines in SetSignalingState_m()");
+      // XXX what now?  Not much we can do but keep going, without major restructuring
+    }
+
     if (!rollback) {
-      if (NS_FAILED(mMedia->UpdateMediaPipelines(*mJsepSession))) {
-        CSFLogError(LOGTAG, "Error Updating MediaPipelines");
-        NS_ASSERTION(false, "Error Updating MediaPipelines in SetSignalingState_m()");
-        // XXX what now?  Not much we can do but keep going, without major restructuring
-      }
       InitializeDataChannel();
       mMedia->StartIceChecks(*mJsepSession);
     }
 
-    if (!mJsepSession->AllLocalTracksAreAssigned()) {
-      CSFLogInfo(LOGTAG, "Not all local tracks were assigned to an "
-                 "m-section, either because the offerer did not offer"
-                 " to receive enough tracks, or because tracks were "
-                 "added after CreateOffer/Answer, but before "
-                 "offer/answer completed. This requires "
-                 "renegotiation.");
-      fireNegotiationNeeded = true;
-    }
-
     // Telemetry: record info on the current state of streams/renegotiations/etc
     // Note: this code gets run on rollbacks as well!
 
     // Update the max channels used with each direction for each type
     uint16_t receiving[SdpMediaSection::kMediaTypes];
     uint16_t sending[SdpMediaSection::kMediaTypes];
     mJsepSession->CountTracks(receiving, sending);
     for (size_t i = 0; i < SdpMediaSection::kMediaTypes; i++) {
@@ -3257,28 +2884,21 @@ PeerConnectionImpl::SetSignalingState_m(
   }
 
   RefPtr<PeerConnectionObserver> pco = do_QueryObjectReferent(mPCObserver);
   if (!pco) {
     return;
   }
   JSErrorResult rv;
   pco->OnStateChange(PCObserverStateType::SignalingState, rv);
-
-  if (fireNegotiationNeeded) {
-    // We don't use MaybeFireNegotiationNeeded here, since content might have
-    // already cased a transition from stable.
-    OnNegotiationNeeded();
-  }
 }
 
 void
 PeerConnectionImpl::UpdateSignalingState(bool rollback) {
-  mozilla::JsepSignalingState state =
-      mJsepSession->GetState();
+  mozilla::JsepSignalingState state = mJsepSession->GetState();
 
   PCImplSignalingState newState;
 
   switch(state) {
     case kJsepStateStable:
       newState = PCImplSignalingState::SignalingStable;
       break;
     case kJsepStateHaveLocalOffer:
@@ -3672,36 +3292,18 @@ PeerConnectionImpl::BuildStatsQuery_m(
       query->report->mLocalSdp.Construct(
           NS_ConvertASCIItoUTF16(localDescription.c_str()));
       query->report->mRemoteSdp.Construct(
           NS_ConvertASCIItoUTF16(remoteDescription.c_str()));
     }
   }
 
   // Gather up pipelines from mMedia so they may be inspected on STS
-
-  std::string trackId;
-  if (aSelector) {
-    trackId = PeerConnectionImpl::GetTrackId(*aSelector);
-  }
-
-  for (int i = 0, len = mMedia->LocalStreamsLength(); i < len; i++) {
-    for (auto pipeline : mMedia->GetLocalStreamByIndex(i)->GetPipelines()) {
-      if (!aSelector || pipeline.second->trackid() == trackId) {
-        query->pipelines.AppendElement(pipeline.second);
-      }
-    }
-  }
-  for (int i = 0, len = mMedia->RemoteStreamsLength(); i < len; i++) {
-    for (auto pipeline : mMedia->GetRemoteStreamByIndex(i)->GetPipelines()) {
-      if (!aSelector || pipeline.second->trackid() == trackId) {
-        query->pipelines.AppendElement(pipeline.second);
-      }
-    }
-  }
+  mMedia->GetTransmitPipelinesMatching(aSelector, &query->pipelines);
+  mMedia->GetReceivePipelinesMatching(aSelector, &query->pipelines);
 
   if (!aSelector) {
     query->grabAllLevels = true;
   }
 
   return rv;
 }
 
@@ -3811,17 +3413,17 @@ PeerConnectionImpl::ExecuteStatsQuery_s(
       continue;
     }
     const MediaPipeline& mp = *query->pipelines[p];
     bool isAudio = (mp.Conduit()->type() == MediaSessionConduit::AUDIO);
     nsString mediaType = isAudio ?
         NS_LITERAL_STRING("audio") : NS_LITERAL_STRING("video");
     nsString idstr = mediaType;
     idstr.AppendLiteral("_");
-    idstr.AppendInt(mp.level());
+    idstr.AppendInt((uint32_t)p);
 
     // TODO(@@NG):ssrcs handle Conduits having multiple stats at the same level
     // This is pending spec work
     // Gather pipeline stats.
     switch (mp.direction()) {
       case MediaPipeline::TRANSMIT: {
         nsString localId = NS_LITERAL_STRING("outbound_rtp_") + idstr;
         nsString remoteId;
@@ -4101,64 +3703,16 @@ void PeerConnectionImpl::DeliverStatsRep
 }
 
 void
 PeerConnectionImpl::RecordLongtermICEStatistics() {
   WebrtcGlobalInformation::StoreLongTermICEStatistics(*this);
 }
 
 void
-PeerConnectionImpl::OnNegotiationNeeded()
-{
-  if (mSignalingState != PCImplSignalingState::SignalingStable) {
-    // We will check whether we need to renegotiate when we reach stable again
-    return;
-  }
-
-  if (mNegotiationNeeded) {
-    return;
-  }
-
-  mNegotiationNeeded = true;
-
-  RUN_ON_THREAD(mThread,
-                WrapRunnableNM(&MaybeFireNegotiationNeeded_static, mHandle),
-                NS_DISPATCH_NORMAL);
-}
-
-/* static */
-void
-PeerConnectionImpl::MaybeFireNegotiationNeeded_static(
-    const std::string& pcHandle)
-{
-  PeerConnectionWrapper wrapper(pcHandle);
-  if (!wrapper.impl()) {
-    return;
-  }
-
-  wrapper.impl()->MaybeFireNegotiationNeeded();
-}
-
-void
-PeerConnectionImpl::MaybeFireNegotiationNeeded()
-{
-  if (!mNegotiationNeeded) {
-    return;
-  }
-
-  RefPtr<PeerConnectionObserver> pco = do_QueryObjectReferent(mPCObserver);
-  if (!pco) {
-    return;
-  }
-
-  JSErrorResult rv;
-  pco->OnNegotiationNeeded(rv);
-}
-
-void
 PeerConnectionImpl::IceStreamReady(NrIceMediaStream *aStream)
 {
   PC_AUTO_ENTER_API_CALL_NO_CHECK();
   MOZ_ASSERT(aStream);
 
   CSFLogDebug(LOGTAG, "%s: %s", __FUNCTION__, aStream->name().c_str());
 }
 
@@ -4172,40 +3726,16 @@ PeerConnectionImpl::startCallTelem() {
   // Start time for calls
   mStartTime = TimeStamp::Now();
 
   // Increment session call counter
   // If we want to track Loop calls independently here, we need two histograms.
   Telemetry::Accumulate(Telemetry::WEBRTC_CALL_COUNT_2, 1);
 }
 
-NS_IMETHODIMP
-PeerConnectionImpl::GetLocalStreams(nsTArray<RefPtr<DOMMediaStream > >& result)
-{
-  PC_AUTO_ENTER_API_CALL_NO_CHECK();
-  for(uint32_t i=0; i < media()->LocalStreamsLength(); i++) {
-    LocalSourceStreamInfo *info = media()->GetLocalStreamByIndex(i);
-    NS_ENSURE_TRUE(info, NS_ERROR_UNEXPECTED);
-    result.AppendElement(info->GetMediaStream());
-  }
-  return NS_OK;
-}
-
-NS_IMETHODIMP
-PeerConnectionImpl::GetRemoteStreams(nsTArray<RefPtr<DOMMediaStream > >& result)
-{
-  PC_AUTO_ENTER_API_CALL_NO_CHECK();
-  for(uint32_t i=0; i < media()->RemoteStreamsLength(); i++) {
-    RemoteSourceStreamInfo *info = media()->GetRemoteStreamByIndex(i);
-    NS_ENSURE_TRUE(info, NS_ERROR_UNEXPECTED);
-    result.AppendElement(info->GetMediaStream());
-  }
-  return NS_OK;
-}
-
 void
 PeerConnectionImpl::DTMFSendTimerCallback_m(nsITimer* timer, void* closure)
 {
   MOZ_ASSERT(NS_IsMainThread());
 
   auto state = static_cast<DTMFState*>(closure);
 
   nsString eventTone;
@@ -4223,41 +3753,33 @@ PeerConnectionImpl::DTMFSendTimerCallbac
                                                    "DTMFSendTimerCallback_m");
     } else {
       // Reset delay if necessary
       state->mSendTimer->InitWithNamedFuncCallback(DTMFSendTimerCallback_m, state,
                                                    state->mDuration + state->mInterToneGap,
                                                    nsITimer::TYPE_ONE_SHOT,
                                                    "DTMFSendTimerCallback_m");
 
-      RefPtr<AudioSessionConduit> conduit =
-        state->mPeerConnectionImpl->mMedia->GetAudioConduit(state->mLevel);
-
-      if (conduit) {
-        uint32_t duration = state->mDuration;
-        state->mPeerConnectionImpl->mSTSThread->Dispatch(WrapRunnableNM([conduit, tone, duration] () {
-            //Note: We default to channel 0, not inband, and 6dB attenuation.
-            //      here. We might want to revisit these choices in the future.
-            conduit->InsertDTMFTone(0, tone, true, duration, 6);
-          }), NS_DISPATCH_NORMAL);
-      }
-
+      state->mTransceiver->InsertDTMFTone(tone, state->mDuration);
     }
   } else {
     state->mSendTimer->Cancel();
   }
 
-  RefPtr<PeerConnectionObserver> pco = do_QueryObjectReferent(state->mPeerConnectionImpl->mPCObserver);
+  RefPtr<PeerConnectionObserver> pco = do_QueryObjectReferent(state->mPCObserver);
   if (!pco) {
     NS_WARNING("Failed to dispatch the RTCDTMFToneChange event!");
     return;
   }
 
   JSErrorResult jrv;
-  pco->OnDTMFToneChange(state->mTrackId, eventTone, jrv);
+  pco->OnDTMFToneChange(*state->mTransceiver->GetSendTrack(), eventTone, jrv);
 
   if (jrv.Failed()) {
     NS_WARNING("Failed to dispatch the RTCDTMFToneChange event!");
     return;
   }
 }
 
+PeerConnectionImpl::DTMFState::DTMFState() = default;
+PeerConnectionImpl::DTMFState::~DTMFState() = default;
+
 }  // end mozilla namespace
--- a/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.h
+++ b/media/webrtc/signaling/src/peerconnection/PeerConnectionImpl.h
@@ -33,16 +33,17 @@
 
 #include "signaling/src/jsep/JsepSession.h"
 #include "signaling/src/jsep/JsepSessionImpl.h"
 #include "signaling/src/sdp/SdpMediaSection.h"
 
 #include "mozilla/ErrorResult.h"
 #include "mozilla/dom/PeerConnectionImplEnumsBinding.h"
 #include "mozilla/dom/RTCPeerConnectionBinding.h" // mozPacketDumpType, maybe move?
+#include "mozilla/dom/RTCRtpTransceiverBinding.h"
 #include "PrincipalChangeObserver.h"
 #include "StreamTracks.h"
 
 #include "mozilla/TimeStamp.h"
 #include "mozilla/net/DataChannel.h"
 #include "VideoUtils.h"
 #include "VideoSegment.h"
 #include "mozilla/dom/RTCStatsReportBinding.h"
@@ -60,16 +61,17 @@ class nsDOMDataChannel;
 namespace mozilla {
 class DataChannel;
 class DtlsIdentity;
 class NrIceCtx;
 class NrIceMediaStream;
 class NrIceStunServer;
 class NrIceTurnServer;
 class MediaPipeline;
+class TransceiverImpl;
 
 class DOMMediaStream;
 
 namespace dom {
 class RTCCertificate;
 struct RTCConfiguration;
 class RTCDTMFSender;
 struct RTCIceServer;
@@ -250,17 +252,17 @@ public:
 
   NS_DECL_THREADSAFE_ISUPPORTS
 
   bool WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto, JS::MutableHandle<JSObject*> aReflector);
 
   static already_AddRefed<PeerConnectionImpl>
       Constructor(const mozilla::dom::GlobalObject& aGlobal, ErrorResult& rv);
   static PeerConnectionImpl* CreatePeerConnection();
-  already_AddRefed<DOMMediaStream> MakeMediaStream();
+  OwningNonNull<DOMMediaStream> MakeMediaStream();
 
   nsresult CreateRemoteSourceStreamInfo(RefPtr<RemoteSourceStreamInfo>* aInfo,
                                         const std::string& aId);
 
   // DataConnection observers
   void NotifyDataChannel(already_AddRefed<mozilla::DataChannel> aChannel)
     // PeerConnectionImpl only inherits from mozilla::DataChannelConnection
     // inside libxul.
@@ -357,19 +359,17 @@ public:
 
   NS_IMETHODIMP SetLocalDescription (int32_t aAction, const char* aSDP);
 
   void SetLocalDescription (int32_t aAction, const nsAString& aSDP, ErrorResult &rv)
   {
     rv = SetLocalDescription(aAction, NS_ConvertUTF16toUTF8(aSDP).get());
   }
 
-  nsresult CreateNewRemoteTracks(RefPtr<PeerConnectionObserver>& aPco);
-
-  void RemoveOldRemoteTracks(RefPtr<PeerConnectionObserver>& aPco);
+  void FireOnTrackEvents(RefPtr<PeerConnectionObserver>& aPco);
 
   NS_IMETHODIMP SetRemoteDescription (int32_t aAction, const char* aSDP);
 
   void SetRemoteDescription (int32_t aAction, const nsAString& aSDP, ErrorResult &rv)
   {
     rv = SetRemoteDescription(aAction, NS_ConvertUTF16toUTF8(aSDP).get());
   }
 
@@ -393,50 +393,50 @@ public:
 
   NS_IMETHODIMP CloseStreams();
 
   void CloseStreams(ErrorResult &rv)
   {
     rv = CloseStreams();
   }
 
-  NS_IMETHODIMP_TO_ERRORRESULT(AddTrack, ErrorResult &rv,
-      mozilla::dom::MediaStreamTrack& aTrack,
-      const mozilla::dom::Sequence<mozilla::OwningNonNull<DOMMediaStream>>& aStreams)
-  {
-    rv = AddTrack(aTrack, aStreams);
-  }
-
   NS_IMETHODIMP_TO_ERRORRESULT(RemoveTrack, ErrorResult &rv,
                                mozilla::dom::MediaStreamTrack& aTrack)
   {
     rv = RemoveTrack(aTrack);
   }
 
-  nsresult
-  AddTrack(mozilla::dom::MediaStreamTrack& aTrack, DOMMediaStream& aStream);
+  already_AddRefed<TransceiverImpl> CreateTransceiverImpl(
+      const nsAString& aKind,
+      dom::MediaStreamTrack* aSendTrack,
+      ErrorResult& rv);
+
+  OwningNonNull<DOMMediaStream> CreateReceiveStreamWithTrack(
+      SdpMediaSection::MediaType type);
+
+  bool CheckNegotiationNeeded(ErrorResult &rv);
 
   NS_IMETHODIMP_TO_ERRORRESULT(InsertDTMF, ErrorResult &rv,
-                               dom::RTCRtpSender& sender,
+                               TransceiverImpl& transceiver,
                                const nsAString& tones,
                                uint32_t duration, uint32_t interToneGap) {
-    rv = InsertDTMF(sender, tones, duration, interToneGap);
+    rv = InsertDTMF(transceiver, tones, duration, interToneGap);
   }
 
   NS_IMETHODIMP_TO_ERRORRESULT(GetDTMFToneBuffer, ErrorResult &rv,
                                dom::RTCRtpSender& sender,
                                nsAString& outToneBuffer) {
     rv = GetDTMFToneBuffer(sender, outToneBuffer);
   }
 
-  NS_IMETHODIMP_TO_ERRORRESULT(ReplaceTrack, ErrorResult &rv,
-                               mozilla::dom::MediaStreamTrack& aThisTrack,
-                               mozilla::dom::MediaStreamTrack& aWithTrack)
+  NS_IMETHODIMP_TO_ERRORRESULT(ReplaceTrackNoRenegotiation, ErrorResult &rv,
+                               TransceiverImpl& aTransceiver,
+                               mozilla::dom::MediaStreamTrack* aWithTrack)
   {
-    rv = ReplaceTrack(aThisTrack, aWithTrack);
+    rv = ReplaceTrackNoRenegotiation(aTransceiver, aWithTrack);
   }
 
   NS_IMETHODIMP_TO_ERRORRESULT(SetParameters, ErrorResult &rv,
                                dom::MediaStreamTrack& aTrack,
                                const dom::RTCRtpParameters& aParameters)
   {
     rv = SetParameters(aTrack, aParameters);
   }
@@ -590,28 +590,16 @@ public:
                                       const nsAString& aProtocol,
                                       uint16_t aType,
                                       bool outOfOrderAllowed,
                                       uint16_t aMaxTime,
                                       uint16_t aMaxNum,
                                       bool aExternalNegotiated,
                                       uint16_t aStream);
 
-  NS_IMETHODIMP_TO_ERRORRESULT(GetLocalStreams, ErrorResult &rv,
-                               nsTArray<RefPtr<DOMMediaStream > >& result)
-  {
-    rv = GetLocalStreams(result);
-  }
-
-  NS_IMETHODIMP_TO_ERRORRESULT(GetRemoteStreams, ErrorResult &rv,
-                               nsTArray<RefPtr<DOMMediaStream > >& result)
-  {
-    rv = GetRemoteStreams(result);
-  }
-
   // Called whenever something is unrecognized by the parser
   // May be called more than once and does not necessarily mean
   // that parsing was stopped, only that something was unrecognized.
   void OnSdpParseError(const char* errorMessage);
 
   // Called when OnLocal/RemoteDescriptionSuccess/Error
   // is called to start the list over.
   void ClearSdpParseErrorMessages();
@@ -640,19 +628,16 @@ public:
       RTCStatsQuery *query);
 
   static nsresult ExecuteStatsQuery_s(RTCStatsQuery *query);
 
   // for monitoring changes in track ownership
   // PeerConnectionMedia can't do it because it doesn't know about principals
   virtual void PrincipalChanged(dom::MediaStreamTrack* aTrack) override;
 
-  static std::string GetStreamId(const DOMMediaStream& aStream);
-  static std::string GetTrackId(const dom::MediaStreamTrack& track);
-
   void OnMediaError(const std::string& aError);
 
   bool ShouldDumpPacket(size_t level, dom::mozPacketDumpType type,
                         bool sending) const;
 
   void DumpPacket_m(size_t level, dom::mozPacketDumpType type, bool sending,
                     UniquePtr<uint8_t[]>& packet, size_t size);
 
@@ -697,24 +682,21 @@ private:
   nsresult GetDatachannelParameters(
       uint32_t* channels,
       uint16_t* localport,
       uint16_t* remoteport,
       uint32_t* maxmessagesize,
       bool*     mmsset,
       uint16_t* level) const;
 
-  static void DeferredAddTrackToJsepSession(const std::string& pcHandle,
-                                            SdpMediaSection::MediaType type,
-                                            const std::string& streamId,
-                                            const std::string& trackId);
-
-  nsresult AddTrackToJsepSession(SdpMediaSection::MediaType type,
-                                 const std::string& streamId,
-                                 const std::string& trackId);
+  nsresult AddRtpTransceiverToJsepSession(RefPtr<JsepTransceiver>& transceiver);
+  already_AddRefed<TransceiverImpl> CreateTransceiverImpl(
+      JsepTransceiver* aJsepTransceiver,
+      dom::MediaStreamTrack* aSendTrack,
+      ErrorResult& aRv);
 
   nsresult SetupIceRestart();
   nsresult RollbackIceRestart();
   void FinalizeIceRestart();
 
   static void GetStatsForPCObserver_s(
       const std::string& pcHandle,
       nsAutoPtr<RTCStatsQuery> query);
@@ -727,20 +709,16 @@ private:
 
   // When ICE completes, we record a bunch of statistics that outlive the
   // PeerConnection. This is just telemetry right now, but this can also
   // include things like dumping the RLogConnector somewhere, saving away
   // an RTCStatsReport somewhere so it can be inspected after the call is over,
   // or other things.
   void RecordLongtermICEStatistics();
 
-  void OnNegotiationNeeded();
-  static void MaybeFireNegotiationNeeded_static(const std::string& pcHandle);
-  void MaybeFireNegotiationNeeded();
-
   // Timecard used to measure processing time. This should be the first class
   // attribute so that we accurately measure the time required to instantiate
   // any other attributes of this class.
   Timecard *mTimeCard;
 
   mozilla::dom::PCImplSignalingState mSignalingState;
 
   // ICE State
@@ -812,41 +790,41 @@ private:
   bool mHaveConfiguredCodecs;
 
   bool mHaveDataStream;
 
   unsigned int mAddCandidateErrorCount;
 
   bool mTrickle;
 
-  bool mNegotiationNeeded;
-
   bool mPrivateWindow;
 
   // Whether this PeerConnection is being counted as active by mWindow
   bool mActiveOnWindow;
 
   // storage for Telemetry data
   uint16_t mMaxReceiving[SdpMediaSection::kMediaTypes];
   uint16_t mMaxSending[SdpMediaSection::kMediaTypes];
 
   // DTMF
   struct DTMFState {
-    PeerConnectionImpl* mPeerConnectionImpl;
+    DTMFState();
+    ~DTMFState();
+    nsWeakPtr mPCObserver;
+    RefPtr<TransceiverImpl> mTransceiver;
     nsCOMPtr<nsITimer> mSendTimer;
-    nsString mTrackId;
     nsString mTones;
-    size_t mLevel;
     uint32_t mDuration;
     uint32_t mInterToneGap;
   };
 
   static void
   DTMFSendTimerCallback_m(nsITimer* timer, void*);
 
+  // TODO(bug 1401983): Move DTMF stuff to TransceiverImpl
   nsTArray<DTMFState> mDTMFStates;
 
   std::vector<unsigned> mSendPacketDumpFlags;
   std::vector<unsigned> mRecvPacketDumpFlags;
   Atomic<bool> mPacketDumpEnabled;
   mutable Mutex mPacketDumpFlagsMutex;
 
 public:
--- a/media/webrtc/signaling/src/peerconnection/PeerConnectionMedia.cpp
+++ b/media/webrtc/signaling/src/peerconnection/PeerConnectionMedia.cpp
@@ -7,56 +7,44 @@
 #include <vector>
 
 #include "CSFLog.h"
 
 #include "nspr.h"
 
 #include "nricectx.h"
 #include "nricemediastream.h"
-#include "MediaPipelineFactory.h"
+#include "MediaPipelineFilter.h"
+#include "MediaPipeline.h"
 #include "PeerConnectionImpl.h"
 #include "PeerConnectionMedia.h"
-#include "AudioConduit.h"
-#include "VideoConduit.h"
 #include "runnable_utils.h"
 #include "transportlayerice.h"
 #include "transportlayerdtls.h"
 #include "signaling/src/jsep/JsepSession.h"
 #include "signaling/src/jsep/JsepTransport.h"
 
-#include "MediaSegment.h"
-#include "MediaStreamGraph.h"
-
-#include "MediaStreamGraphImpl.h"
-
 #include "nsContentUtils.h"
 #include "nsNetCID.h"
 #include "nsNetUtil.h"
 #include "nsIURI.h"
 #include "nsIScriptSecurityManager.h"
 #include "nsICancelable.h"
 #include "nsILoadInfo.h"
 #include "nsIContentPolicy.h"
 #include "nsIProxyInfo.h"
 #include "nsIProtocolProxyService.h"
 
 #include "nsProxyRelease.h"
 
-#include "MediaStreamList.h"
 #include "nsIScriptGlobalObject.h"
 #include "mozilla/Preferences.h"
 #include "mozilla/Telemetry.h"
-#include "mozilla/dom/RTCStatsReportBinding.h"
-#include "MediaStreamTrack.h"
-#include "VideoStreamTrack.h"
-#include "MediaStreamError.h"
 #include "MediaManager.h"
-
-
+#include "WebrtcGmpVideoCodec.h"
 
 namespace mozilla {
 using namespace dom;
 
 static const char* pcmLogTag = "PeerConnectionMedia";
 #ifdef LOGTAG
 #undef LOGTAG
 #endif
@@ -64,146 +52,16 @@ static const char* pcmLogTag = "PeerConn
 
 //XXX(pkerr) What about bitrate settings? Going with the defaults for now.
 RefPtr<WebRtcCallWrapper>
 CreateCall()
 {
   return WebRtcCallWrapper::Create();
 }
 
-nsresult
-PeerConnectionMedia::ReplaceTrack(const std::string& aOldStreamId,
-                                  const std::string& aOldTrackId,
-                                  MediaStreamTrack& aNewTrack,
-                                  const std::string& aNewStreamId,
-                                  const std::string& aNewTrackId)
-{
-  RefPtr<LocalSourceStreamInfo> oldInfo(GetLocalStreamById(aOldStreamId));
-
-  if (!oldInfo) {
-    CSFLogError(LOGTAG, "Failed to find stream id %s", aOldStreamId.c_str());
-    return NS_ERROR_NOT_AVAILABLE;
-  }
-
-  nsresult rv = AddTrack(*aNewTrack.mOwningStream, aNewStreamId,
-                         aNewTrack, aNewTrackId);
-  NS_ENSURE_SUCCESS(rv, rv);
-
-  RefPtr<LocalSourceStreamInfo> newInfo(GetLocalStreamById(aNewStreamId));
-
-  if (!newInfo) {
-    CSFLogError(LOGTAG, "Failed to add track id %s", aNewTrackId.c_str());
-    MOZ_ASSERT(false);
-    return NS_ERROR_FAILURE;
-  }
-
-  rv = newInfo->TakePipelineFrom(oldInfo, aOldTrackId, aNewTrack, aNewTrackId);
-  NS_ENSURE_SUCCESS(rv, rv);
-
-  return RemoveLocalTrack(aOldStreamId, aOldTrackId);
-}
-
-static void
-PipelineReleaseRef_m(RefPtr<MediaPipeline> pipeline)
-{}
-
-static void
-PipelineDetachTransport_s(RefPtr<MediaPipeline> pipeline,
-                          nsCOMPtr<nsIThread> mainThread)
-{
-  pipeline->DetachTransport_s();
-  mainThread->Dispatch(
-      // Make sure we let go of our reference before dispatching
-      // If the dispatch fails, well, we're hosed anyway.
-      WrapRunnableNM(PipelineReleaseRef_m, pipeline.forget()),
-      NS_DISPATCH_NORMAL);
-}
-
-void
-SourceStreamInfo::EndTrack(MediaStream* stream, dom::MediaStreamTrack* track)
-{
-  if (!stream || !stream->AsSourceStream()) {
-    return;
-  }
-
-  class Message : public ControlMessage {
-   public:
-    Message(MediaStream* stream, TrackID track)
-      : ControlMessage(stream),
-        track_id_(track) {}
-
-    virtual void Run() override {
-      mStream->AsSourceStream()->EndTrack(track_id_);
-    }
-   private:
-    TrackID track_id_;
-  };
-
-  stream->GraphImpl()->AppendMessage(
-      MakeUnique<Message>(stream, track->mTrackID));
-}
-
-void
-SourceStreamInfo::RemoveTrack(const std::string& trackId)
-{
-  mTracks.erase(trackId);
-
-  RefPtr<MediaPipeline> pipeline = GetPipelineByTrackId_m(trackId);
-  if (pipeline) {
-    mPipelines.erase(trackId);
-    pipeline->ShutdownMedia_m();
-    mParent->GetSTSThread()->Dispatch(
-        WrapRunnableNM(PipelineDetachTransport_s,
-                       pipeline.forget(),
-                       mParent->GetMainThread()),
-        NS_DISPATCH_NORMAL);
-  }
-}
-
-void SourceStreamInfo::DetachTransport_s()
-{
-  ASSERT_ON_THREAD(mParent->GetSTSThread());
-  // walk through all the MediaPipelines and call the shutdown
-  // transport functions. Must be on the STS thread.
-  for (auto& pipeline : mPipelines) {
-    pipeline.second->DetachTransport_s();
-  }
-}
-
-void SourceStreamInfo::DetachMedia_m()
-{
-  ASSERT_ON_THREAD(mParent->GetMainThread());
-
-  // walk through all the MediaPipelines and call the shutdown
-  // media functions. Must be on the main thread.
-  for (auto& pipeline : mPipelines) {
-    pipeline.second->ShutdownMedia_m();
-  }
-  mMediaStream = nullptr;
-}
-
-already_AddRefed<PeerConnectionImpl>
-PeerConnectionImpl::Constructor(const dom::GlobalObject& aGlobal, ErrorResult& rv)
-{
-  RefPtr<PeerConnectionImpl> pc = new PeerConnectionImpl(&aGlobal);
-
-  CSFLogDebug(LOGTAG, "Created PeerConnection: %p", pc.get());
-
-  return pc.forget();
-}
-
-PeerConnectionImpl* PeerConnectionImpl::CreatePeerConnection()
-{
-  PeerConnectionImpl *pc = new PeerConnectionImpl();
-
-  CSFLogDebug(LOGTAG, "Created PeerConnection: %p", pc);
-
-  return pc;
-}
-
 NS_IMETHODIMP PeerConnectionMedia::ProtocolProxyQueryHandler::
 OnProxyAvailable(nsICancelable *request,
                  nsIChannel *aChannel,
                  nsIProxyInfo *proxyinfo,
                  nsresult result) {
 
   if (!pcm_->mProxyRequest) {
     // PeerConnectionMedia is no longer waiting
@@ -294,16 +152,21 @@ PeerConnectionMedia::PeerConnectionMedia
       mUuidGen(MakeUnique<PCUuidGenerator>()),
       mMainThread(mParent->GetMainThread()),
       mSTSThread(mParent->GetSTSThread()),
       mProxyResolveCompleted(false),
       mIceRestartState(ICE_RESTART_NONE),
       mLocalAddrsCompleted(false) {
 }
 
+PeerConnectionMedia::~PeerConnectionMedia()
+{
+  MOZ_RELEASE_ASSERT(!mMainThread);
+}
+
 void
 PeerConnectionMedia::InitLocalAddrs()
 {
   if (XRE_IsContentProcess()) {
     CSFLogDebug(LOGTAG, "%s: Get stun addresses via IPC",
                 mParentHandle.c_str());
 
     nsCOMPtr<nsIEventTarget> target = mParent->GetWindow()
@@ -435,24 +298,27 @@ nsresult PeerConnectionMedia::Init(const
   mCall = CreateCall();
 
   return NS_OK;
 }
 
 void
 PeerConnectionMedia::EnsureTransports(const JsepSession& aSession)
 {
-  auto transports = aSession.GetTransports();
-  for (size_t i = 0; i < transports.size(); ++i) {
-    RefPtr<JsepTransport> transport = transports[i];
+  for (const auto& transceiver : aSession.GetTransceivers()) {
+    if (!transceiver->HasLevel()) {
+      continue;
+    }
+
+    RefPtr<JsepTransport> transport = transceiver->mTransport;
     RUN_ON_THREAD(
         GetSTSThread(),
         WrapRunnable(RefPtr<PeerConnectionMedia>(this),
                      &PeerConnectionMedia::EnsureTransport_s,
-                     i,
+                     transceiver->GetLevel(),
                      transport->mComponents),
         NS_DISPATCH_NORMAL);
   }
 
   GatherIfReady();
 }
 
 void
@@ -479,69 +345,92 @@ PeerConnectionMedia::EnsureTransport_s(s
     stream->SetLevel(aLevel);
     stream->SignalReady.connect(this, &PeerConnectionMedia::IceStreamReady_s);
     stream->SignalCandidate.connect(this,
                                     &PeerConnectionMedia::OnCandidateFound_s);
     mIceCtxHdlr->ctx()->SetStream(aLevel, stream);
   }
 }
 
-void
+nsresult
 PeerConnectionMedia::ActivateOrRemoveTransports(const JsepSession& aSession,
                                                 const bool forceIceTcp)
 {
-  auto transports = aSession.GetTransports();
-  for (size_t i = 0; i < transports.size(); ++i) {
-    RefPtr<JsepTransport> transport = transports[i];
+  for (const auto& transceiver : aSession.GetTransceivers()) {
+    if (!transceiver->HasLevel()) {
+      continue;
+    }
 
     std::string ufrag;
     std::string pwd;
     std::vector<std::string> candidates;
+    size_t components = 0;
 
-    if (transport->mComponents) {
-      MOZ_ASSERT(transport->mIce);
-      CSFLogDebug(LOGTAG, "Transport %u is active", static_cast<unsigned>(i));
+    RefPtr<JsepTransport> transport = transceiver->mTransport;
+    unsigned level = transceiver->GetLevel();
+
+    if (transport->mComponents &&
+        (!transceiver->HasBundleLevel() ||
+         (transceiver->BundleLevel() == level))) {
+      CSFLogDebug(LOGTAG, "ACTIVATING TRANSPORT! - PC %s: level=%u components=%u",
+                  mParentHandle.c_str(), (unsigned)level,
+                  (unsigned)transport->mComponents);
+
       ufrag = transport->mIce->GetUfrag();
       pwd = transport->mIce->GetPassword();
       candidates = transport->mIce->GetCandidates();
-    } else {
-      CSFLogDebug(LOGTAG, "Transport %u is disabled", static_cast<unsigned>(i));
-      // Make sure the MediaPipelineFactory doesn't try to use these.
-      RemoveTransportFlow(i, false);
-      RemoveTransportFlow(i, true);
-    }
-
-    if (forceIceTcp) {
-      candidates.erase(std::remove_if(candidates.begin(),
-                                      candidates.end(),
-                                      [](const std::string & s) {
-                                        return s.find(" UDP ") != std::string::npos ||
-                                               s.find(" udp ") != std::string::npos; }),
-                       candidates.end());
+      components = transport->mComponents;
+      if (forceIceTcp) {
+        candidates.erase(std::remove_if(candidates.begin(),
+                                        candidates.end(),
+                                        [](const std::string & s) {
+                                          return s.find(" UDP ") != std::string::npos ||
+                                                 s.find(" udp ") != std::string::npos; }),
+                         candidates.end());
+      }
     }
 
     RUN_ON_THREAD(
         GetSTSThread(),
         WrapRunnable(RefPtr<PeerConnectionMedia>(this),
                      &PeerConnectionMedia::ActivateOrRemoveTransport_s,
-                     i,
-                     transport->mComponents,
+                     transceiver->GetLevel(),
+                     components,
                      ufrag,
                      pwd,
                      candidates),
         NS_DISPATCH_NORMAL);
   }
 
   // We can have more streams than m-lines due to rollback.
   RUN_ON_THREAD(
       GetSTSThread(),
       WrapRunnable(RefPtr<PeerConnectionMedia>(this),
                    &PeerConnectionMedia::RemoveTransportsAtOrAfter_s,
-                   transports.size()),
+                   aSession.GetTransceivers().size()),
       NS_DISPATCH_NORMAL);
+
+  return NS_OK;
+}
+
+nsresult
+PeerConnectionMedia::UpdateTransceiverTransports(const JsepSession& aSession)
+{
+  for (const auto& transceiver : aSession.GetTransceivers()) {
+    nsresult rv = UpdateTransportFlows(*transceiver);
+    if (NS_FAILED(rv)) {
+      return rv;
+    }
+  }
+
+  for (const auto& transceiverImpl : mTransceivers) {
+    transceiverImpl->UpdateTransport(*this);
+  }
+
+  return NS_OK;
 }
 
 void
 PeerConnectionMedia::ActivateOrRemoveTransport_s(
     size_t aMLine,
     size_t aComponentCount,
     const std::string& aUfrag,
     const std::string& aPassword,
@@ -591,42 +480,210 @@ PeerConnectionMedia::ActivateOrRemoveTra
 void
 PeerConnectionMedia::RemoveTransportsAtOrAfter_s(size_t aMLine)
 {
   for (size_t i = aMLine; i < mIceCtxHdlr->ctx()->GetStreamCount(); ++i) {
     mIceCtxHdlr->ctx()->SetStream(i, nullptr);
   }
 }
 
-nsresult PeerConnectionMedia::UpdateMediaPipelines(
-    const JsepSession& session) {
-  auto trackPairs = session.GetNegotiatedTrackPairs();
-  MediaPipelineFactory factory(this);
+nsresult
+PeerConnectionMedia::UpdateMediaPipelines()
+{
+  // The GMP code is all the way on the other side of webrtc.org, and it is not
+  // feasible to plumb error information all the way back. So, we set up a
+  // handle to the PC (for the duration of this call) in a global variable.
+  // This allows the GMP code to report errors to the PC.
+  WebrtcGmpPCHandleSetter setter(mParentHandle);
+
+  for (RefPtr<TransceiverImpl>& transceiver : mTransceivers) {
+    nsresult rv = transceiver->UpdateConduit();
+    if (NS_FAILED(rv)) {
+      MOZ_CRASH();
+      return rv;
+    }
+
+    if (!transceiver->IsVideo()) {
+      rv = transceiver->SyncWithMatchingVideoConduits(mTransceivers);
+      if (NS_FAILED(rv)) {
+        MOZ_CRASH();
+        return rv;
+      }
+      // TODO: If there is no audio, we should probably de-sync. However, this
+      // has never been done before, and it is unclear whether it is safe...
+    }
+  }
+
+  return NS_OK;
+}
+
+nsresult
+PeerConnectionMedia::UpdateTransportFlows(const JsepTransceiver& aTransceiver)
+{
+  if (!aTransceiver.HasLevel()) {
+    // Nothing to do
+    return NS_OK;
+  }
+
+  size_t transportLevel = aTransceiver.GetTransportLevel();
+
+  nsresult rv =
+    UpdateTransportFlow(transportLevel, false, *aTransceiver.mTransport);
+  if (NS_FAILED(rv)) {
+    return rv;
+  }
+
+  return UpdateTransportFlow(transportLevel, true, *aTransceiver.mTransport);
+}
+
+// Accessing the PCMedia should be safe here because we shouldn't
+// have enqueued this function unless it was still active and
+// the ICE data is destroyed on the STS.
+static void
+FinalizeTransportFlow_s(RefPtr<PeerConnectionMedia> aPCMedia,
+                        RefPtr<TransportFlow> aFlow, size_t aLevel,
+                        bool aIsRtcp,
+                        nsAutoPtr<PtrVector<TransportLayer> > aLayerList)
+{
+  TransportLayerIce* ice =
+      static_cast<TransportLayerIce*>(aLayerList->values.front());
+  ice->SetParameters(aPCMedia->ice_media_stream(aLevel),
+                     aIsRtcp ? 2 : 1);
+  nsAutoPtr<std::queue<TransportLayer*> > layerQueue(
+      new std::queue<TransportLayer*>);
+  for (auto& value : aLayerList->values) {
+    layerQueue->push(value);
+  }
+  aLayerList->values.clear();
+  (void)aFlow->PushLayers(layerQueue); // TODO(bug 854518): Process errors.
+}
+
+static void
+AddNewIceStreamForRestart_s(RefPtr<PeerConnectionMedia> aPCMedia,
+                            RefPtr<TransportFlow> aFlow,
+                            size_t aLevel,
+                            bool aIsRtcp)
+{
+  TransportLayerIce* ice =
+      static_cast<TransportLayerIce*>(aFlow->GetLayer("ice"));
+  ice->SetParameters(aPCMedia->ice_media_stream(aLevel),
+                     aIsRtcp ? 2 : 1);
+}
+
+nsresult
+PeerConnectionMedia::UpdateTransportFlow(
+    size_t aLevel,
+    bool aIsRtcp,
+    const JsepTransport& aTransport)
+{
+  if (aIsRtcp && aTransport.mComponents < 2) {
+    RemoveTransportFlow(aLevel, aIsRtcp);
+    return NS_OK;
+  }
+
+  if (!aIsRtcp && !aTransport.mComponents) {
+    RemoveTransportFlow(aLevel, aIsRtcp);
+    return NS_OK;
+  }
+
   nsresult rv;
 
-  for (auto pair : trackPairs) {
-    if (pair.mReceiving) {
+  RefPtr<TransportFlow> flow = GetTransportFlow(aLevel, aIsRtcp);
+  if (flow) {
+    if (IsIceRestarting()) {
+      CSFLogInfo(LOGTAG, "Flow[%s]: detected ICE restart - level: %u rtcp: %d",
+                 flow->id().c_str(), (unsigned)aLevel, aIsRtcp);
 
-      rv = factory.CreateOrUpdateMediaPipeline(pair, *pair.mReceiving);
+      RefPtr<PeerConnectionMedia> pcMedia(this);
+      rv = GetSTSThread()->Dispatch(
+          WrapRunnableNM(AddNewIceStreamForRestart_s,
+                         pcMedia, flow, aLevel, aIsRtcp),
+          NS_DISPATCH_NORMAL);
       if (NS_FAILED(rv)) {
+        CSFLogError(LOGTAG, "Failed to dispatch AddNewIceStreamForRestart_s");
         return rv;
       }
     }
 
-    if (pair.mSending) {
-      rv = factory.CreateOrUpdateMediaPipeline(pair, *pair.mSending);
-      if (NS_FAILED(rv)) {
-        return rv;
-      }
+    return NS_OK;
+  }
+
+  std::ostringstream osId;
+  osId << mParentHandle << ":" << aLevel << "," << (aIsRtcp ? "rtcp" : "rtp");
+  flow = new TransportFlow(osId.str());
+
+  // The media streams are made on STS so we need to defer setup.
+  auto ice = MakeUnique<TransportLayerIce>();
+  auto dtls = MakeUnique<TransportLayerDtls>();
+  dtls->SetRole(aTransport.mDtls->GetRole() ==
+                        JsepDtlsTransport::kJsepDtlsClient
+                    ? TransportLayerDtls::CLIENT
+                    : TransportLayerDtls::SERVER);
+
+  RefPtr<DtlsIdentity> pcid = mParent->Identity();
+  if (!pcid) {
+    CSFLogError(LOGTAG, "Failed to get DTLS identity.");
+    return NS_ERROR_FAILURE;
+  }
+  dtls->SetIdentity(pcid);
+
+  const SdpFingerprintAttributeList& fingerprints =
+      aTransport.mDtls->GetFingerprints();
+  for (const auto& fingerprint : fingerprints.mFingerprints) {
+    std::ostringstream ss;
+    ss << fingerprint.hashFunc;
+    rv = dtls->SetVerificationDigest(ss.str(), &fingerprint.fingerprint[0],
+                                     fingerprint.fingerprint.size());
+    if (NS_FAILED(rv)) {
+      CSFLogError(LOGTAG, "Could not set fingerprint");
+      return rv;
     }
   }
 
-  for (auto& stream : mRemoteSourceStreams) {
-    stream->StartReceiving();
+  std::vector<uint16_t> srtpCiphers;
+  srtpCiphers.push_back(SRTP_AES128_CM_HMAC_SHA1_80);
+  srtpCiphers.push_back(SRTP_AES128_CM_HMAC_SHA1_32);
+
+  rv = dtls->SetSrtpCiphers(srtpCiphers);
+  if (NS_FAILED(rv)) {
+    CSFLogError(LOGTAG, "Couldn't set SRTP ciphers");
+    return rv;
+  }
+
+  // Always permits negotiation of the confidential mode.
+  // Only allow non-confidential (which is an allowed default),
+  // if we aren't confidential.
+  std::set<std::string> alpn;
+  std::string alpnDefault = "";
+  alpn.insert("c-webrtc");
+  if (!mParent->PrivacyRequested()) {
+    alpnDefault = "webrtc";
+    alpn.insert(alpnDefault);
   }
+  rv = dtls->SetAlpn(alpn, alpnDefault);
+  if (NS_FAILED(rv)) {
+    CSFLogError(LOGTAG, "Couldn't set ALPN");
+    return rv;
+  }
+
+  nsAutoPtr<PtrVector<TransportLayer> > layers(new PtrVector<TransportLayer>);
+  layers->values.push_back(ice.release());
+  layers->values.push_back(dtls.release());
+
+  RefPtr<PeerConnectionMedia> pcMedia(this);
+  rv = GetSTSThread()->Dispatch(
+      WrapRunnableNM(FinalizeTransportFlow_s, pcMedia, flow, aLevel, aIsRtcp,
+                     layers),
+      NS_DISPATCH_NORMAL);
+  if (NS_FAILED(rv)) {
+    CSFLogError(LOGTAG, "Failed to dispatch FinalizeTransportFlow_s");
+    return rv;
+  }
+
+  AddTransportFlow(aLevel, aIsRtcp, flow);
 
   return NS_OK;
 }
 
 void
 PeerConnectionMedia::StartIceChecks(const JsepSession& aSession)
 {
   nsCOMPtr<nsIRunnable> runnable(
@@ -995,152 +1052,69 @@ PeerConnectionMedia::EnsureIceGathering_
 
   // If there are no streams, we're probably in a situation where we've rolled
   // back while still waiting for our proxy configuration to come back. Make
   // sure content knows that the rollback has stuck wrt gathering.
   IceGatheringStateChange_s(mIceCtxHdlr->ctx().get(),
                             NrIceCtx::ICE_CTX_GATHER_COMPLETE);
 }
 
-nsresult
-PeerConnectionMedia::AddTrack(DOMMediaStream& aMediaStream,
-                              const std::string& streamId,
-                              MediaStreamTrack& aTrack,
-                              const std::string& trackId)
-{
-  ASSERT_ON_THREAD(mMainThread);
-
-  CSFLogDebug(LOGTAG, "%s: MediaStream: %p", __FUNCTION__, &aMediaStream);
-
-  RefPtr<LocalSourceStreamInfo> localSourceStream =
-    GetLocalStreamById(streamId);
-
-  if (!localSourceStream) {
-    localSourceStream = new LocalSourceStreamInfo(&aMediaStream, this, streamId);
-    mLocalSourceStreams.AppendElement(localSourceStream);
-  }
-
-  localSourceStream->AddTrack(trackId, &aTrack);
-  return NS_OK;
-}
-
-nsresult
-PeerConnectionMedia::RemoveLocalTrack(const std::string& streamId,
-                                      const std::string& trackId)
-{
-  ASSERT_ON_THREAD(mMainThread);
-
-  CSFLogDebug(LOGTAG, "%s: stream: %s track: %s", __FUNCTION__,
-                      streamId.c_str(), trackId.c_str());
-
-  RefPtr<LocalSourceStreamInfo> localSourceStream =
-    GetLocalStreamById(streamId);
-  if (!localSourceStream) {
-    return NS_ERROR_ILLEGAL_VALUE;
-  }
-
-  localSourceStream->RemoveTrack(trackId);
-  if (!localSourceStream->GetTrackCount()) {
-    mLocalSourceStreams.RemoveElement(localSourceStream);
-  }
-  return NS_OK;
-}
-
-nsresult
-PeerConnectionMedia::RemoveRemoteTrack(const std::string& streamId,
-                                       const std::string& trackId)
-{
-  ASSERT_ON_THREAD(mMainThread);
-
-  CSFLogDebug(LOGTAG, "%s: stream: %s track: %s", __FUNCTION__,
-                      streamId.c_str(), trackId.c_str());
-
-  RefPtr<RemoteSourceStreamInfo> remoteSourceStream =
-    GetRemoteStreamById(streamId);
-  if (!remoteSourceStream) {
-    return NS_ERROR_ILLEGAL_VALUE;
-  }
-
-  remoteSourceStream->RemoveTrack(trackId);
-  if (!remoteSourceStream->GetTrackCount()) {
-    mRemoteSourceStreams.RemoveElement(remoteSourceStream);
-  }
-  return NS_OK;
-}
-
 void
 PeerConnectionMedia::SelfDestruct()
 {
   ASSERT_ON_THREAD(mMainThread);
 
   CSFLogDebug(LOGTAG, "%s: ", __FUNCTION__);
 
-  // Shut down the media
-  for (uint32_t i=0; i < mLocalSourceStreams.Length(); ++i) {
-    mLocalSourceStreams[i]->DetachMedia_m();
-  }
-
-  for (uint32_t i=0; i < mRemoteSourceStreams.Length(); ++i) {
-    mRemoteSourceStreams[i]->DetachMedia_m();
-  }
-
   if (mStunAddrsRequest) {
     mStunAddrsRequest->Cancel();
     mStunAddrsRequest = nullptr;
   }
 
   if (mProxyRequest) {
     mProxyRequest->Cancel(NS_ERROR_ABORT);
     mProxyRequest = nullptr;
   }
 
+  for (auto& transceiver : mTransceivers) {
+    // transceivers are garbage-collected, so we need to poke them to perform
+    // cleanup right now so the appropriate events fire.
+    transceiver->Shutdown_m();
+  }
+
+  mTransceivers.clear();
+
   // Shutdown the transport (async)
   RUN_ON_THREAD(mSTSThread, WrapRunnable(
       this, &PeerConnectionMedia::ShutdownMediaTransport_s),
                 NS_DISPATCH_NORMAL);
 
   CSFLogDebug(LOGTAG, "%s: Media shut down", __FUNCTION__);
 }
 
 void
 PeerConnectionMedia::SelfDestruct_m()
 {
   CSFLogDebug(LOGTAG, "%s: ", __FUNCTION__);
 
   ASSERT_ON_THREAD(mMainThread);
 
-  mLocalSourceStreams.Clear();
-  mRemoteSourceStreams.Clear();
-
   mMainThread = nullptr;
 
   // Final self-destruct.
   this->Release();
 }
 
 void
 PeerConnectionMedia::ShutdownMediaTransport_s()
 {
   ASSERT_ON_THREAD(mSTSThread);
 
   CSFLogDebug(LOGTAG, "%s: ", __FUNCTION__);
 
-  // Here we access m{Local|Remote}SourceStreams off the main thread.
-  // That's OK because by here PeerConnectionImpl has forgotten about us,
-  // so there is no chance of getting a call in here from outside.
-  // The dispatches from SelfDestruct() and to SelfDestruct_m() provide
-  // memory barriers that protect us from badness.
-  for (uint32_t i=0; i < mLocalSourceStreams.Length(); ++i) {
-    mLocalSourceStreams[i]->DetachTransport_s();
-  }
-
-  for (uint32_t i=0; i < mRemoteSourceStreams.Length(); ++i) {
-    mRemoteSourceStreams[i]->DetachTransport_s();
-  }
-
   disconnect_all();
   mTransportFlows.clear();
 
 #if !defined(MOZILLA_EXTERNAL_LINKAGE)
   NrIceStats stats = mIceCtxHdlr->Destroy();
 
   CSFLogDebug(LOGTAG, "Ice Telemetry: stun (retransmits: %d)"
                       "   turn (401s: %d   403s: %d   438s: %d)",
@@ -1159,96 +1133,104 @@ PeerConnectionMedia::ShutdownMediaTransp
 
   mIceCtxHdlr = nullptr;
 
   // we're holding a ref to 'this' that's released by SelfDestruct_m
   mMainThread->Dispatch(WrapRunnable(this, &PeerConnectionMedia::SelfDestruct_m),
                         NS_DISPATCH_NORMAL);
 }
 
-LocalSourceStreamInfo*
-PeerConnectionMedia::GetLocalStreamByIndex(int aIndex)
-{
-  ASSERT_ON_THREAD(mMainThread);
-  if(aIndex < 0 || aIndex >= (int) mLocalSourceStreams.Length()) {
-    return nullptr;
-  }
-
-  MOZ_ASSERT(mLocalSourceStreams[aIndex]);
-  return mLocalSourceStreams[aIndex];
-}
-
-LocalSourceStreamInfo*
-PeerConnectionMedia::GetLocalStreamById(const std::string& id)
+nsresult
+PeerConnectionMedia::AddTransceiver(
+    JsepTransceiver* aJsepTransceiver,
+    DOMMediaStream& aReceiveStream,
+    dom::MediaStreamTrack* aSendTrack,
+    RefPtr<TransceiverImpl>* aTransceiverImpl)
 {
-  ASSERT_ON_THREAD(mMainThread);
-  for (size_t i = 0; i < mLocalSourceStreams.Length(); ++i) {
-    if (id == mLocalSourceStreams[i]->GetId()) {
-      return mLocalSourceStreams[i];
-    }
-  }
+  RefPtr<TransceiverImpl> transceiver = new TransceiverImpl(
+      mParent->GetHandle(),
+      aJsepTransceiver,
+      mMainThread.get(),
+      mSTSThread.get(),
+      aReceiveStream,
+      aSendTrack,
+      mCall.get());
 
-  return nullptr;
-}
-
-LocalSourceStreamInfo*
-PeerConnectionMedia::GetLocalStreamByTrackId(const std::string& id)
-{
-  ASSERT_ON_THREAD(mMainThread);
-  for (RefPtr<LocalSourceStreamInfo>& info : mLocalSourceStreams) {
-    if (info->HasTrack(id)) {
-      return info;
+  if (aSendTrack) {
+    // implement checking for peerIdentity (where failure == black/silence)
+    nsIDocument* doc = mParent->GetWindow()->GetExtantDoc();
+    if (doc) {
+      transceiver->UpdateSinkIdentity(nullptr,
+                                      doc->NodePrincipal(),
+                                      mParent->GetPeerIdentity());
+    } else {
+      MOZ_CRASH();
+      return NS_ERROR_FAILURE; // Don't remove this till we know it's safe.
     }
   }
 
-  return nullptr;
+  mTransceivers.push_back(transceiver);
+  *aTransceiverImpl = transceiver;
+
+  return NS_OK;
 }
 
-RemoteSourceStreamInfo*
-PeerConnectionMedia::GetRemoteStreamByIndex(size_t aIndex)
+void
+PeerConnectionMedia::GetTransmitPipelinesMatching(
+    MediaStreamTrack* aTrack,
+    nsTArray<RefPtr<MediaPipeline>>* aPipelines)
 {
-  ASSERT_ON_THREAD(mMainThread);
-  MOZ_ASSERT(mRemoteSourceStreams.SafeElementAt(aIndex));
-  return mRemoteSourceStreams.SafeElementAt(aIndex);
-}
-
-RemoteSourceStreamInfo*
-PeerConnectionMedia::GetRemoteStreamById(const std::string& id)
-{
-  ASSERT_ON_THREAD(mMainThread);
-  for (size_t i = 0; i < mRemoteSourceStreams.Length(); ++i) {
-    if (id == mRemoteSourceStreams[i]->GetId()) {
-      return mRemoteSourceStreams[i];
+  for (RefPtr<TransceiverImpl>& transceiver : mTransceivers) {
+    if (transceiver->HasSendTrack(aTrack)) {
+      aPipelines->AppendElement(transceiver->GetSendPipeline());
     }
   }
 
-  return nullptr;
+  if (!aPipelines->Length()) {
+    CSFLogWarn(LOGTAG, "%s: none found for %p", __FUNCTION__, aTrack);
+  }
 }
 
-RemoteSourceStreamInfo*
-PeerConnectionMedia::GetRemoteStreamByTrackId(const std::string& id)
+void
+PeerConnectionMedia::GetReceivePipelinesMatching(
+    MediaStreamTrack* aTrack,
+    nsTArray<RefPtr<MediaPipeline>>* aPipelines)
 {
-  ASSERT_ON_THREAD(mMainThread);
-  for (RefPtr<RemoteSourceStreamInfo>& info : mRemoteSourceStreams) {
-    if (info->HasTrack(id)) {
-      return info;
+  for (RefPtr<TransceiverImpl>& transceiver : mTransceivers) {
+    if (transceiver->HasReceiveTrack(aTrack)) {
+      aPipelines->AppendElement(transceiver->GetReceivePipeline());
     }
   }
 
-  return nullptr;
+  if (!aPipelines->Length()) {
+    CSFLogWarn(LOGTAG, "%s: none found for %p", __FUNCTION__, aTrack);
+  }
 }
 
-
 nsresult
-PeerConnectionMedia::AddRemoteStream(RefPtr<RemoteSourceStreamInfo> aInfo)
+PeerConnectionMedia::AddRIDExtension(MediaStreamTrack& aRecvTrack,
+                                     unsigned short aExtensionId)
 {
-  ASSERT_ON_THREAD(mMainThread);
+  for (RefPtr<TransceiverImpl>& transceiver : mTransceivers) {
+    if (transceiver->HasReceiveTrack(&aRecvTrack)) {
+      transceiver->AddRIDExtension(aExtensionId);
+    }
+  }
+  return NS_OK;
+}
 
-  mRemoteSourceStreams.AppendElement(aInfo);
-
+nsresult
+PeerConnectionMedia::AddRIDFilter(MediaStreamTrack& aRecvTrack,
+                                  const nsAString& aRid)
+{
+  for (RefPtr<TransceiverImpl>& transceiver : mTransceivers) {
+    if (transceiver->HasReceiveTrack(&aRecvTrack)) {
+      transceiver->AddRIDFilter(aRid);
+    }
+  }
   return NS_OK;
 }
 
 void
 PeerConnectionMedia::IceGatheringStateChange_s(NrIceCtx* ctx,
                                                NrIceCtx::GatheringState state)
 {
   ASSERT_ON_THREAD(mSTSThread);
@@ -1484,282 +1466,65 @@ void
 PeerConnectionMedia::ConnectDtlsListener_s(const RefPtr<TransportFlow>& aFlow)
 {
   TransportLayer* dtls = aFlow->GetLayer(TransportLayerDtls::ID());
   if (dtls) {
     dtls->SignalStateChange.connect(this, &PeerConnectionMedia::DtlsConnected_s);
   }
 }
 
-nsresult
-LocalSourceStreamInfo::TakePipelineFrom(RefPtr<LocalSourceStreamInfo>& info,
-                                        const std::string& oldTrackId,
-                                        MediaStreamTrack& aNewTrack,
-                                        const std::string& newTrackId)
-{
-  if (mPipelines.count(newTrackId)) {
-    CSFLogError(LOGTAG, "%s: Pipeline already exists for %s/%s",
-                __FUNCTION__, mId.c_str(), newTrackId.c_str());
-    return NS_ERROR_INVALID_ARG;
-  }
-
-  RefPtr<MediaPipeline> pipeline(info->ForgetPipelineByTrackId_m(oldTrackId));
-
-  if (!pipeline) {
-    // Replacetrack can potentially happen in the middle of offer/answer, before
-    // the pipeline has been created.
-    CSFLogInfo(LOGTAG, "%s: Replacing track before the pipeline has been "
-                       "created, nothing to do.", __FUNCTION__);
-    return NS_OK;
-  }
-
-  nsresult rv =
-    static_cast<MediaPipelineTransmit*>(pipeline.get())->ReplaceTrack(aNewTrack);
-  NS_ENSURE_SUCCESS(rv, rv);
-
-  mPipelines[newTrackId] = pipeline;
-
-  return NS_OK;
-}
-
 /**
  * Tells you if any local track is isolated to a specific peer identity.
  * Obviously, we want all the tracks to be isolated equally so that they can
  * all be sent or not.  We check once when we are setting a local description
  * and that determines if we flip the "privacy requested" bit on.  Once the bit
  * is on, all media originating from this peer connection is isolated.
  *
  * @returns true if any track has a peerIdentity set on it
  */
 bool
 PeerConnectionMedia::AnyLocalTrackHasPeerIdentity() const
 {
   ASSERT_ON_THREAD(mMainThread);
 
-  for (uint32_t u = 0; u < mLocalSourceStreams.Length(); u++) {
-    for (auto pair : mLocalSourceStreams[u]->GetMediaStreamTracks()) {
-      if (pair.second->GetPeerIdentity() != nullptr) {
-        return true;
-      }
+  for (const RefPtr<TransceiverImpl>& transceiver : mTransceivers) {
+    if (transceiver->GetSendTrack() &&
+        transceiver->GetSendTrack()->GetPeerIdentity()) {
+      return true;
     }
   }
   return false;
 }
 
 void
 PeerConnectionMedia::UpdateRemoteStreamPrincipals_m(nsIPrincipal* aPrincipal)
 {
   ASSERT_ON_THREAD(mMainThread);
 
-  for (uint32_t u = 0; u < mRemoteSourceStreams.Length(); u++) {
-    mRemoteSourceStreams[u]->UpdatePrincipal_m(aPrincipal);
+  for (RefPtr<TransceiverImpl>& transceiver : mTransceivers) {
+    transceiver->UpdatePrincipal(aPrincipal);
   }
 }
 
 void
 PeerConnectionMedia::UpdateSinkIdentity_m(MediaStreamTrack* aTrack,
                                           nsIPrincipal* aPrincipal,
                                           const PeerIdentity* aSinkIdentity)
 {
   ASSERT_ON_THREAD(mMainThread);
 
-  for (uint32_t u = 0; u < mLocalSourceStreams.Length(); u++) {
-    mLocalSourceStreams[u]->UpdateSinkIdentity_m(aTrack, aPrincipal,
-                                                 aSinkIdentity);
-  }
-}
-
-void
-LocalSourceStreamInfo::UpdateSinkIdentity_m(MediaStreamTrack* aTrack,
-                                            nsIPrincipal* aPrincipal,
-                                            const PeerIdentity* aSinkIdentity)
-{
-  for (auto& pipeline_ : mPipelines) {
-    MediaPipelineTransmit* pipeline =
-      static_cast<MediaPipelineTransmit*>(pipeline_.second.get());
-    pipeline->UpdateSinkIdentity_m(aTrack, aPrincipal, aSinkIdentity);
-  }
-}
-
-void RemoteSourceStreamInfo::UpdatePrincipal_m(nsIPrincipal* aPrincipal)
-{
-  // This blasts away the existing principal.
-  // We only do this when we become certain that the all tracks are safe to make
-  // accessible to the script principal.
-  for (auto& trackPair : mTracks) {
-    MOZ_RELEASE_ASSERT(trackPair.second);
-    RemoteTrackSource& source =
-      static_cast<RemoteTrackSource&>(trackPair.second->GetSource());
-    source.SetPrincipal(aPrincipal);
-
-    RefPtr<MediaPipeline> pipeline = GetPipelineByTrackId_m(trackPair.first);
-    if (pipeline) {
-      MOZ_ASSERT(pipeline->direction() == MediaPipeline::RECEIVE);
-      static_cast<MediaPipelineReceive*>(pipeline.get())
-        ->SetPrincipalHandle_m(MakePrincipalHandle(aPrincipal));
-    }
+  for (RefPtr<TransceiverImpl>& transceiver : mTransceivers) {
+    transceiver->UpdateSinkIdentity(aTrack, aPrincipal, aSinkIdentity);
   }
 }
 
 bool
 PeerConnectionMedia::AnyCodecHasPluginID(uint64_t aPluginID)
 {
-  for (uint32_t i=0; i < mLocalSourceStreams.Length(); ++i) {
-    if (mLocalSourceStreams[i]->AnyCodecHasPluginID(aPluginID)) {
-      return true;
-    }
-  }
-  for (uint32_t i=0; i < mRemoteSourceStreams.Length(); ++i) {
-    if (mRemoteSourceStreams[i]->AnyCodecHasPluginID(aPluginID)) {
-      return true;
-    }
-  }
-  return false;
-}
-
-bool
-SourceStreamInfo::AnyCodecHasPluginID(uint64_t aPluginID)
-{
-  // Scan the videoConduits for this plugin ID
-  for (auto& pipeline : mPipelines) {
-    if (pipeline.second->Conduit()->CodecPluginID() == aPluginID) {
+  for (RefPtr<TransceiverImpl>& transceiver : mTransceivers) {
+    if (transceiver->ConduitHasPluginID(aPluginID)) {
       return true;
     }
   }
   return false;
 }
 
-nsresult
-SourceStreamInfo::StorePipeline(
-    const std::string& trackId,
-    const RefPtr<mozilla::MediaPipeline>& aPipeline)
-{
-  MOZ_ASSERT(mPipelines.find(trackId) == mPipelines.end());
-  if (mPipelines.find(trackId) != mPipelines.end()) {
-    CSFLogError(LOGTAG, "%s: Storing duplicate track", __FUNCTION__);
-    return NS_ERROR_FAILURE;
-  }
-
-  mPipelines[trackId] = aPipeline;
-  return NS_OK;
-}
-
-void
-RemoteSourceStreamInfo::DetachMedia_m()
-{
-  for (auto& webrtcIdAndTrack : mTracks) {
-    EndTrack(mMediaStream->GetInputStream(), webrtcIdAndTrack.second);
-  }
-  SourceStreamInfo::DetachMedia_m();
-}
-
-void
-RemoteSourceStreamInfo::RemoveTrack(const std::string& trackId)
-{
-  auto it = mTracks.find(trackId);
-  if (it != mTracks.end()) {
-    EndTrack(mMediaStream->GetInputStream(), it->second);
-  }
-
-  SourceStreamInfo::RemoveTrack(trackId);
-}
-
-void
-RemoteSourceStreamInfo::SyncPipeline(
-  RefPtr<MediaPipelineReceive> aPipeline)
-{
-  // See if we have both audio and video here, and if so cross the streams and
-  // sync them
-  // TODO: Do we need to prevent multiple syncs if there is more than one audio
-  // or video track in a single media stream? What are we supposed to do in this
-  // case?
-  for (auto i = mPipelines.begin(); i != mPipelines.end(); ++i) {
-    if (i->second->IsVideo() != aPipeline->IsVideo()) {
-      // Ok, we have one video, one non-video - cross the streams!
-      WebrtcAudioConduit *audio_conduit =
-        static_cast<WebrtcAudioConduit*>(aPipeline->IsVideo() ?
-                                                  i->second->Conduit() :
-                                                  aPipeline->Conduit());
-      WebrtcVideoConduit *video_conduit =
-        static_cast<WebrtcVideoConduit*>(aPipeline->IsVideo() ?
-                                                  aPipeline->Conduit() :
-                                                  i->second->Conduit());
-      video_conduit->SyncTo(audio_conduit);
-      CSFLogDebug(LOGTAG, "Syncing %p to %p, %s to %s",
-                          video_conduit, audio_conduit,
-                          i->first.c_str(), aPipeline->trackid().c_str());
-    }
-  }
-}
-
-void
-RemoteSourceStreamInfo::StartReceiving()
-{
-  if (mReceiving || mPipelines.empty()) {
-    return;
-  }
-
-  mReceiving = true;
-
-  SourceMediaStream* source = GetMediaStream()->GetInputStream()->AsSourceStream();
-  source->SetPullEnabled(true);
-  // AdvanceKnownTracksTicksTime(HEAT_DEATH_OF_UNIVERSE) means that in
-  // theory per the API, we can't add more tracks before that
-  // time. However, the impl actually allows it, and it avoids a whole
-  // bunch of locking that would be required (and potential blocking)
-  // if we used smaller values and updated them on each NotifyPull.
-  source->AdvanceKnownTracksTime(STREAM_TIME_MAX);
-  CSFLogDebug(LOGTAG, "Finished adding tracks to MediaStream %p", source);
-}
-
-RefPtr<MediaPipeline> SourceStreamInfo::GetPipelineByTrackId_m(
-    const std::string& trackId) {
-  ASSERT_ON_THREAD(mParent->GetMainThread());
-
-  // Refuse to hand out references if we're tearing down.
-  // (Since teardown involves a dispatch to and from STS before MediaPipelines
-  // are released, it is safe to start other dispatches to and from STS with a
-  // RefPtr<MediaPipeline>, since that reference won't be the last one
-  // standing)
-  if (mMediaStream) {
-    if (mPipelines.count(trackId)) {
-      return mPipelines[trackId];
-    }
-  }
-
-  return nullptr;
-}
-
-already_AddRefed<MediaPipeline>
-LocalSourceStreamInfo::ForgetPipelineByTrackId_m(const std::string& trackId)
-{
-  ASSERT_ON_THREAD(mParent->GetMainThread());
-
-  // Refuse to hand out references if we're tearing down.
-  // (Since teardown involves a dispatch to and from STS before MediaPipelines
-  // are released, it is safe to start other dispatches to and from STS with a
-  // RefPtr<MediaPipeline>, since that reference won't be the last one
-  // standing)
-  if (mMediaStream) {
-    if (mPipelines.count(trackId)) {
-      RefPtr<MediaPipeline> pipeline(mPipelines[trackId]);
-      mPipelines.erase(trackId);
-      return pipeline.forget();
-    }
-  }
-
-  return nullptr;
-}
-
-auto
-RemoteTrackSource::ApplyConstraints(
-    nsPIDOMWindowInner* aWindow,
-    const dom::MediaTrackConstraints& aConstraints,
-    dom::CallerType aCallerType) -> already_AddRefed<PledgeVoid>
-{
-  RefPtr<PledgeVoid> p = new PledgeVoid();
-  p->Reject(new dom::MediaStreamError(aWindow,
-                                      NS_LITERAL_STRING("OverconstrainedError"),
-                                      NS_LITERAL_STRING("")));
-  return p.forget();
-}
-
 } // namespace mozilla
--- a/media/webrtc/signaling/src/peerconnection/PeerConnectionMedia.h
+++ b/media/webrtc/signaling/src/peerconnection/PeerConnectionMedia.h
@@ -4,239 +4,53 @@
 
 #ifndef _PEER_CONNECTION_MEDIA_H_
 #define _PEER_CONNECTION_MEDIA_H_
 
 #include <string>
 #include <vector>
 #include <map>
 
-#include "nspr.h"
-#include "prlock.h"
-
 #include "mozilla/RefPtr.h"
 #include "mozilla/UniquePtr.h"
 #include "mozilla/net/StunAddrsRequestChild.h"
-#include "nsComponentManagerUtils.h"
 #include "nsIProtocolProxyCallback.h"
 
-#include "signaling/src/jsep/JsepSession.h"
-#include "AudioSegment.h"
-
-#include "Layers.h"
-#include "VideoUtils.h"
-#include "ImageLayers.h"
-#include "VideoSegment.h"
-#include "MediaStreamTrack.h"
+#include "TransceiverImpl.h"
 
 class nsIPrincipal;
 
 namespace mozilla {
 class DataChannel;
 class PeerIdentity;
-class MediaPipelineFactory;
 namespace dom {
 struct RTCInboundRTPStreamStats;
 struct RTCOutboundRTPStreamStats;
+class MediaStreamTrack;
 }
 }
 
 #include "nricectxhandler.h"
 #include "nriceresolver.h"
 #include "nricemediastream.h"
-#include "MediaPipeline.h"
 
 namespace mozilla {
 
 class PeerConnectionImpl;
 class PeerConnectionMedia;
 class PCUuidGenerator;
-
-class SourceStreamInfo {
-public:
-  SourceStreamInfo(DOMMediaStream* aMediaStream,
-                   PeerConnectionMedia *aParent,
-                   const std::string& aId)
-      : mMediaStream(aMediaStream),
-        mParent(aParent),
-        mId(aId) {
-    MOZ_ASSERT(mMediaStream);
-  }
-
-  SourceStreamInfo(already_AddRefed<DOMMediaStream>& aMediaStream,
-                   PeerConnectionMedia *aParent,
-                   const std::string& aId)
-      : mMediaStream(aMediaStream),
-        mParent(aParent),
-        mId(aId) {
-    MOZ_ASSERT(mMediaStream);
-  }
-
-  virtual ~SourceStreamInfo() {}
-
-  DOMMediaStream* GetMediaStream() const {
-    return mMediaStream;
-  }
-
-  nsresult StorePipeline(const std::string& trackId,
-                         const RefPtr<MediaPipeline>& aPipeline);
-
-  virtual void AddTrack(const std::string& trackId,
-                        const RefPtr<dom::MediaStreamTrack>& aTrack)
-  {
-    mTracks.insert(std::make_pair(trackId, aTrack));
-  }
-  virtual void RemoveTrack(const std::string& trackId);
-  bool HasTrack(const std::string& trackId) const
-  {
-    return !!mTracks.count(trackId);
-  }
-  size_t GetTrackCount() const { return mTracks.size(); }
-
-  // This method exists for stats and the unittests.
-  // It allows visibility into the pipelines and flows.
-  const std::map<std::string, RefPtr<MediaPipeline>>&
-  GetPipelines() const { return mPipelines; }
-  RefPtr<MediaPipeline> GetPipelineByTrackId_m(const std::string& trackId);
-  // This is needed so PeerConnectionImpl can unregister itself as
-  // PrincipalChangeObserver from each track.
-  const std::map<std::string, RefPtr<dom::MediaStreamTrack>>&
-  GetMediaStreamTracks() const { return mTracks; }
-  dom::MediaStreamTrack* GetTrackById(const std::string& trackId) const
-  {
-    auto it = mTracks.find(trackId);
-    if (it == mTracks.end()) {
-      return nullptr;
-    }
-
-    return it->second;
-  }
-  const std::string& GetId() const { return mId; }
-
-  void DetachTransport_s();
-  virtual void DetachMedia_m();
-  bool AnyCodecHasPluginID(uint64_t aPluginID);
-protected:
-  void EndTrack(MediaStream* stream, dom::MediaStreamTrack* track);
-  RefPtr<DOMMediaStream> mMediaStream;
-  PeerConnectionMedia *mParent;
-  const std::string mId;
-  // These get set up before we generate our local description, the pipelines
-  // and conduits are set up once offer/answer completes.
-  std::map<std::string, RefPtr<dom::MediaStreamTrack>> mTracks;
-  std::map<std::string, RefPtr<MediaPipeline>> mPipelines;
-};
-
-// TODO(ekr@rtfm.com): Refactor {Local,Remote}SourceStreamInfo
-// bug 837539.
-class LocalSourceStreamInfo : public SourceStreamInfo {
-  ~LocalSourceStreamInfo() {
-    mMediaStream = nullptr;
-  }
-public:
-  LocalSourceStreamInfo(DOMMediaStream *aMediaStream,
-                        PeerConnectionMedia *aParent,
-                        const std::string& aId)
-     : SourceStreamInfo(aMediaStream, aParent, aId) {}
+class MediaPipeline;
+class MediaPipelineFilter;
+class JsepSession;
 
-  nsresult TakePipelineFrom(RefPtr<LocalSourceStreamInfo>& info,
-                            const std::string& oldTrackId,
-                            dom::MediaStreamTrack& aNewTrack,
-                            const std::string& newTrackId);
-
-  void UpdateSinkIdentity_m(dom::MediaStreamTrack* aTrack,
-                            nsIPrincipal* aPrincipal,
-                            const PeerIdentity* aSinkIdentity);
-
-  NS_INLINE_DECL_THREADSAFE_REFCOUNTING(LocalSourceStreamInfo)
-
-private:
-  already_AddRefed<MediaPipeline> ForgetPipelineByTrackId_m(
-      const std::string& trackId);
-};
-
-class RemoteTrackSource : public dom::MediaStreamTrackSource
-{
-public:
-  explicit RemoteTrackSource(nsIPrincipal* aPrincipal, const nsString& aLabel)
-    : dom::MediaStreamTrackSource(aPrincipal, aLabel) {}
-
-  dom::MediaSourceEnum GetMediaSource() const override
-  {
-    return dom::MediaSourceEnum::Other;
-  }
-
-  already_AddRefed<PledgeVoid>
-  ApplyConstraints(nsPIDOMWindowInner* aWindow,
-                   const dom::MediaTrackConstraints& aConstraints,
-                   dom::CallerType aCallerType) override;
-
-  void Stop() override
-  {
-    // XXX (Bug 1314270): Implement rejection logic if necessary when we have
-    //                    clarity in the spec.
-  }
-
-  void SetPrincipal(nsIPrincipal* aPrincipal)
-  {
-    mPrincipal = aPrincipal;
-    PrincipalChanged();
-  }
-
-protected:
-  virtual ~RemoteTrackSource() {}
-};
-
-class RemoteSourceStreamInfo : public SourceStreamInfo {
-  ~RemoteSourceStreamInfo() {}
- public:
-  RemoteSourceStreamInfo(already_AddRefed<DOMMediaStream> aMediaStream,
-                         PeerConnectionMedia *aParent,
-                         const std::string& aId)
-    : SourceStreamInfo(aMediaStream, aParent, aId),
-      mReceiving(false)
-  {
-  }
-
-  void DetachMedia_m() override;
-  void RemoveTrack(const std::string& trackId) override;
-  void SyncPipeline(RefPtr<MediaPipelineReceive> aPipeline);
-
-  void UpdatePrincipal_m(nsIPrincipal* aPrincipal);
-
-  NS_INLINE_DECL_THREADSAFE_REFCOUNTING(RemoteSourceStreamInfo)
-
-  void AddTrack(const std::string& trackId,
-                const RefPtr<dom::MediaStreamTrack>& aTrack) override
-  {
-    SourceStreamInfo::AddTrack(trackId, aTrack);
-  }
-
-  TrackID GetNumericTrackId(const std::string& trackId) const
-  {
-    dom::MediaStreamTrack* track = GetTrackById(trackId);
-    if (!track) {
-      return TRACK_INVALID;
-    }
-    return track->mTrackID;
-  }
-
-  void StartReceiving();
-
- private:
-  // True iff SetPullEnabled(true) has been called on the DOMMediaStream. This
-  // happens when offer/answer concludes.
-  bool mReceiving;
-};
-
+// TODO(bug 1402997): If we move the TransceiverImpl stuff out of here, this
+// will be a class that handles just the transport stuff, and we can rename it
+// to something more explanatory (say, PeerConnectionTransportManager).
 class PeerConnectionMedia : public sigslot::has_slots<> {
-  ~PeerConnectionMedia()
-  {
-    MOZ_RELEASE_ASSERT(!mMainThread);
-  }
+  ~PeerConnectionMedia();
 
  public:
   explicit PeerConnectionMedia(PeerConnectionImpl *parent);
 
   enum IceRestartState { ICE_RESTART_NONE,
                          ICE_RESTART_PROVISIONAL,
                          ICE_RESTART_COMMITTED
   };
@@ -259,18 +73,21 @@ class PeerConnectionMedia : public sigsl
     return mIceCtxHdlr->ctx()->GetStreamCount();
   }
 
   // Ensure ICE transports exist that we might need when offer/answer concludes
   void EnsureTransports(const JsepSession& aSession);
 
   // Activate or remove ICE transports at the conclusion of offer/answer,
   // or when rollback occurs.
-  void ActivateOrRemoveTransports(const JsepSession& aSession,
-                                  const bool forceIceTcp);
+  nsresult ActivateOrRemoveTransports(const JsepSession& aSession,
+                                      const bool forceIceTcp);
+
+  // Update the transports on the TransceiverImpls
+  nsresult UpdateTransceiverTransports(const JsepSession& aSession);
 
   // Start ICE checks.
   void StartIceChecks(const JsepSession& session);
 
   bool IsIceRestarting() const;
   IceRestartState GetIceRestartState() const;
 
   // Begin ICE restart
@@ -286,61 +103,48 @@ class PeerConnectionMedia : public sigsl
   // Process a trickle ICE candidate.
   void AddIceCandidate(const std::string& candidate, const std::string& mid,
                        uint32_t aMLine);
 
   // Handle notifications of network online/offline events.
   void UpdateNetworkState(bool online);
 
   // Handle complete media pipelines.
-  nsresult UpdateMediaPipelines(const JsepSession& session);
+  // This updates codec parameters, starts/stops send/receive, and other
+  // stuff that doesn't necessarily require negotiation. This can be called at
+  // any time, not just when an offer/answer exchange completes.
+  // TODO: Let's move this to PeerConnectionImpl
+  nsresult UpdateMediaPipelines();
 
-  // Add a track (main thread only)
-  nsresult AddTrack(DOMMediaStream& aMediaStream,
-                    const std::string& streamId,
-                    dom::MediaStreamTrack& aTrack,
-                    const std::string& trackId);
-
-  nsresult RemoveLocalTrack(const std::string& streamId,
-                            const std::string& trackId);
-  nsresult RemoveRemoteTrack(const std::string& streamId,
-                            const std::string& trackId);
+  // TODO: Let's move the TransceiverImpl stuff to PeerConnectionImpl.
+  nsresult AddTransceiver(
+      JsepTransceiver* aJsepTransceiver,
+      DOMMediaStream& aReceiveStream,
+      dom::MediaStreamTrack* aSendTrack,
+      RefPtr<TransceiverImpl>* aTransceiverImpl);
 
-  // Get a specific local stream
-  uint32_t LocalStreamsLength()
-  {
-    return mLocalSourceStreams.Length();
-  }
-  LocalSourceStreamInfo* GetLocalStreamByIndex(int index);
-  LocalSourceStreamInfo* GetLocalStreamById(const std::string& id);
-  LocalSourceStreamInfo* GetLocalStreamByTrackId(const std::string& id);
+  void GetTransmitPipelinesMatching(
+      dom::MediaStreamTrack* aTrack,
+      nsTArray<RefPtr<MediaPipeline>>* aPipelines);
 
-  // Get a specific remote stream
-  uint32_t RemoteStreamsLength()
-  {
-    return mRemoteSourceStreams.Length();
-  }
+  void GetReceivePipelinesMatching(
+      dom::MediaStreamTrack* aTrack,
+      nsTArray<RefPtr<MediaPipeline>>* aPipelines);
 
-  RemoteSourceStreamInfo* GetRemoteStreamByIndex(size_t index);
-  RemoteSourceStreamInfo* GetRemoteStreamById(const std::string& id);
-  RemoteSourceStreamInfo* GetRemoteStreamByTrackId(const std::string& id);
+  nsresult AddRIDExtension(dom::MediaStreamTrack& aRecvTrack,
+                           unsigned short aExtensionId);
 
-  // Add a remote stream.
-  nsresult AddRemoteStream(RefPtr<RemoteSourceStreamInfo> aInfo);
-
-  nsresult ReplaceTrack(const std::string& aOldStreamId,
-                        const std::string& aOldTrackId,
-                        dom::MediaStreamTrack& aNewTrack,
-                        const std::string& aNewStreamId,
-                        const std::string& aNewTrackId);
+  nsresult AddRIDFilter(dom::MediaStreamTrack& aRecvTrack,
+                        const nsAString& aRid);
 
   // In cases where the peer isn't yet identified, we disable the pipeline (not
   // the stream, that would potentially affect others), so that it sends
   // black/silence.  Once the peer is identified, re-enable those streams.
   // aTrack will be set if this update came from a principal change on aTrack.
+  // TODO: Move to PeerConnectionImpl
   void UpdateSinkIdentity_m(dom::MediaStreamTrack* aTrack,
                             nsIPrincipal* aPrincipal,
                             const PeerIdentity* aSinkIdentity);
   // this determines if any track is peerIdentity constrained
   bool AnyLocalTrackHasPeerIdentity() const;
   // When we finally learn who is on the other end, we need to change the ownership
   // on streams
   void UpdateRemoteStreamPrincipals_m(nsIPrincipal* aPrincipal);
@@ -361,79 +165,48 @@ class PeerConnectionMedia : public sigsl
     int index_inner = GetTransportFlowIndex(aStreamIndex, aIsRtcp);
 
     if (mTransportFlows.find(index_inner) == mTransportFlows.end())
       return nullptr;
 
     return mTransportFlows[index_inner];
   }
 
+  // Used by PCImpl in a couple of places. Might be good to move that code in
+  // here.
+  std::vector<RefPtr<TransceiverImpl>>& GetTransceivers()
+  {
+    return mTransceivers;
+  }
+
   // Add a transport flow
   void AddTransportFlow(int aIndex, bool aRtcp,
                         const RefPtr<TransportFlow> &aFlow);
   void RemoveTransportFlow(int aIndex, bool aRtcp);
   void ConnectDtlsListener_s(const RefPtr<TransportFlow>& aFlow);
   void DtlsConnected_s(TransportLayer* aFlow,
                        TransportLayer::State state);
   static void DtlsConnected_m(const std::string& aParentHandle,
                               bool aPrivacyRequested);
 
-  RefPtr<AudioSessionConduit> GetAudioConduit(size_t level) {
-    auto it = mConduits.find(level);
-    if (it == mConduits.end()) {
-      return nullptr;
-    }
-
-    if (it->second.first) {
-      MOZ_ASSERT(false, "In GetAudioConduit, we found a video conduit!");
-      return nullptr;
-    }
-
-    return RefPtr<AudioSessionConduit>(
-        static_cast<AudioSessionConduit*>(it->second.second.get()));
-  }
-
-  RefPtr<VideoSessionConduit> GetVideoConduit(size_t level) {
-    auto it = mConduits.find(level);
-    if (it == mConduits.end()) {
-      return nullptr;
-    }
-
-    if (!it->second.first) {
-      MOZ_ASSERT(false, "In GetVideoConduit, we found an audio conduit!");
-      return nullptr;
-    }
-
-    return RefPtr<VideoSessionConduit>(
-        static_cast<VideoSessionConduit*>(it->second.second.get()));
-  }
-
-  void AddVideoConduit(size_t level, const RefPtr<VideoSessionConduit> &aConduit) {
-    mConduits[level] = std::make_pair(true, aConduit);
-  }
-
-  // Add a conduit
-  void AddAudioConduit(size_t level, const RefPtr<AudioSessionConduit> &aConduit) {
-    mConduits[level] = std::make_pair(false, aConduit);
-  }
-
   // ICE state signals
   sigslot::signal2<NrIceCtx*, NrIceCtx::GatheringState>
       SignalIceGatheringStateChange;
   sigslot::signal2<NrIceCtx*, NrIceCtx::ConnectionState>
       SignalIceConnectionStateChange;
   // This passes a candidate:... attribute  and level
   sigslot::signal2<const std::string&, uint16_t> SignalCandidate;
   // This passes address, port, level of the default candidate.
   sigslot::signal5<const std::string&, uint16_t,
                    const std::string&, uint16_t, uint16_t>
       SignalUpdateDefaultCandidate;
   sigslot::signal1<uint16_t>
       SignalEndOfLocalCandidates;
 
+  // TODO: Move to PeerConnectionImpl
   RefPtr<WebRtcCallWrapper> mCall;
 
  private:
   void InitLocalAddrs(); // for stun local address IPC request
   nsresult InitProxy();
   class ProtocolProxyQueryHandler : public nsIProtocolProxyCallback {
    public:
     explicit ProtocolProxyQueryHandler(PeerConnectionMedia *pcm) :
@@ -473,16 +246,20 @@ class PeerConnectionMedia : public sigsl
   void EnsureTransport_s(size_t aLevel, size_t aComponentCount);
   void ActivateOrRemoveTransport_s(
       size_t aMLine,
       size_t aComponentCount,
       const std::string& aUfrag,
       const std::string& aPassword,
       const std::vector<std::string>& aCandidateList);
   void RemoveTransportsAtOrAfter_s(size_t aMLine);
+  nsresult UpdateTransportFlows(const JsepTransceiver& transceiver);
+  nsresult UpdateTransportFlow(size_t aLevel,
+                               bool aIsRtcp,
+                               const JsepTransport& aTransport);
 
   void GatherIfReady();
   void FlushIceCtxOperationQueueIfReady();
   void PerformOrEnqueueIceCtxOperation(nsIRunnable* runnable);
   void EnsureIceGathering_s(bool aDefaultRouteOnly, bool aProxyOnly);
   void StartIceChecks_s(bool aIsControlling,
                         bool aIsOfferer,
                         bool aIsIceLite,
@@ -539,25 +316,17 @@ class PeerConnectionMedia : public sigsl
   }
 
   // The parent PC
   PeerConnectionImpl *mParent;
   // and a loose handle on it for event driven stuff
   std::string mParentHandle;
   std::string mParentName;
 
-  // A list of streams returned from GetUserMedia
-  // This is only accessed on the main thread (with one special exception)
-  nsTArray<RefPtr<LocalSourceStreamInfo> > mLocalSourceStreams;
-
-  // A list of streams provided by the other side
-  // This is only accessed on the main thread (with one special exception)
-  nsTArray<RefPtr<RemoteSourceStreamInfo> > mRemoteSourceStreams;
-
-  std::map<size_t, std::pair<bool, RefPtr<MediaSessionConduit>>> mConduits;
+  std::vector<RefPtr<TransceiverImpl>> mTransceivers;
 
   // ICE objects
   RefPtr<NrIceCtxHandler> mIceCtxHdlr;
 
   // DNS
   RefPtr<NrIceResolver> mDNSResolver;
 
   // Transport flows: even is RTP, odd is RTCP
new file mode 100644
--- /dev/null
+++ b/media/webrtc/signaling/src/peerconnection/RemoteTrackSource.h
@@ -0,0 +1,56 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#ifndef _REMOTE_TRACK_SOURCE_H_
+#define _REMOTE_TRACK_SOURCE_H_
+
+#include "MediaStreamTrack.h"
+#include "MediaStreamError.h"
+
+namespace mozilla {
+
+class RemoteTrackSource : public dom::MediaStreamTrackSource
+{
+public:
+  explicit RemoteTrackSource(nsIPrincipal* aPrincipal, const nsString& aLabel)
+    : dom::MediaStreamTrackSource(aPrincipal, aLabel) {}
+
+  dom::MediaSourceEnum GetMediaSource() const override
+  {
+    return dom::MediaSourceEnum::Other;
+  }
+
+  already_AddRefed<PledgeVoid>
+  ApplyConstraints(nsPIDOMWindowInner* aWindow,
+                   const dom::MediaTrackConstraints& aConstraints,
+                   dom::CallerType aCallerType) override
+  {
+    RefPtr<PledgeVoid> p = new PledgeVoid();
+    p->Reject(
+        new dom::MediaStreamError(aWindow,
+                                  NS_LITERAL_STRING("OverconstrainedError"),
+                                  NS_LITERAL_STRING("")));
+    return p.forget();
+  }
+
+  void Stop() override
+  {
+    // XXX (Bug 1314270): Implement rejection logic if necessary when we have
+    //                    clarity in the spec.
+  }
+
+  void SetPrincipal(nsIPrincipal* aPrincipal)
+  {
+    mPrincipal = aPrincipal;
+    PrincipalChanged();
+  }
+
+protected:
+  virtual ~RemoteTrackSource() {}
+};
+
+} // namespace mozilla
+
+#endif // _REMOTE_TRACK_SOURCE_H_
+
new file mode 100644
--- /dev/null
+++ b/media/webrtc/signaling/src/peerconnection/TransceiverImpl.cpp
@@ -0,0 +1,1075 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+
+#include "TransceiverImpl.h"
+#include "mtransport/runnable_utils.h"
+#include "mozilla/UniquePtr.h"
+#include <sstream>
+#include <string>
+#include <vector>
+#include <queue>
+#include "AudioConduit.h"
+#include "VideoConduit.h"
+#include "MediaStreamGraph.h"
+#include "MediaPipeline.h"
+#include "MediaPipelineFilter.h"
+#include "signaling/src/jsep/JsepTrack.h"
+#include "MediaStreamGraphImpl.h"
+#include "logging.h"
+#include "MediaEngine.h"
+#include "nsIPrincipal.h"
+#include "MediaSegment.h"
+#include "RemoteTrackSource.h"
+#include "MediaConduitInterface.h"
+#include "PeerConnectionMedia.h"
+#include "mozilla/dom/RTCRtpReceiverBinding.h"
+#include "mozilla/dom/RTCRtpSenderBinding.h"
+#include "mozilla/dom/RTCRtpTransceiverBinding.h"
+#include "mozilla/dom/TransceiverImplBinding.h"
+
+namespace mozilla {
+
+MOZ_MTLOG_MODULE("transceiverimpl")
+
+TransceiverImpl::TransceiverImpl(
+    const std::string& aPCHandle,
+    JsepTransceiver* aJsepTransceiver,
+    nsIEventTarget* aMainThread,
+    nsIEventTarget* aStsThread,
+    DOMMediaStream& aReceiveStream,
+    dom::MediaStreamTrack* aSendTrack,
+    WebRtcCallWrapper* aCallWrapper) :
+  mPCHandle(aPCHandle),
+  mJsepTransceiver(aJsepTransceiver),
+  mHaveStartedReceiving(false),
+  mHaveSetupTransport(false),
+  mMainThread(aMainThread),
+  mStsThread(aStsThread),
+  mReceiveStream(&aReceiveStream),
+  mSendTrack(aSendTrack),
+  mCallWrapper(aCallWrapper)
+{
+  if (IsVideo()) {
+    InitVideo();
+  } else {
+    InitAudio();
+  }
+
+  mConduit->SetPCHandle(mPCHandle);
+
+  StartReceiveStream();
+
+  mTransmitPipeline = new MediaPipelineTransmit(
+      mPCHandle,
+      mMainThread.get(),
+      mStsThread.get(),
+      IsVideo(),
+      mSendTrack,
+      mConduit);
+}
+
+TransceiverImpl::~TransceiverImpl() = default;
+
+NS_IMPL_ISUPPORTS0(TransceiverImpl)
+
+void
+TransceiverImpl::InitAudio()
+{
+  mConduit = AudioSessionConduit::Create();
+
+  mReceivePipeline = new MediaPipelineReceiveAudio(
+      mPCHandle,
+      mMainThread.get(),
+      mStsThread.get(),
+      static_cast<AudioSessionConduit*>(mConduit.get()),
+      mReceiveStream->GetInputStream()->AsSourceStream());
+}
+
+void
+TransceiverImpl::InitVideo()
+{
+  mConduit = VideoSessionConduit::Create(mCallWrapper);
+
+  mReceivePipeline = new MediaPipelineReceiveVideo(
+      mPCHandle,
+      mMainThread.get(),
+      mStsThread.get(),
+      static_cast<VideoSessionConduit*>(mConduit.get()),
+      mReceiveStream->GetInputStream()->AsSourceStream());
+}
+
+nsresult
+TransceiverImpl::UpdateSinkIdentity(dom::MediaStreamTrack* aTrack,
+                                    nsIPrincipal* aPrincipal,
+                                    const PeerIdentity* aSinkIdentity)
+{
+  if (mJsepTransceiver->IsStopped()) {
+    return NS_OK;
+  }
+
+  mTransmitPipeline->UpdateSinkIdentity_m(aTrack, aPrincipal, aSinkIdentity);
+  return NS_OK;
+}
+
+void
+TransceiverImpl::Shutdown_m()
+{
+  mReceivePipeline->Shutdown_m();
+  mTransmitPipeline->Shutdown_m();
+  mReceivePipeline = nullptr;
+  mTransmitPipeline = nullptr;
+  mSendTrack = nullptr;
+  mConduit = nullptr;
+  RUN_ON_THREAD(mStsThread, WrapRelease(mRtpFlow.forget()), NS_DISPATCH_NORMAL);
+  RUN_ON_THREAD(mStsThread, WrapRelease(mRtcpFlow.forget()), NS_DISPATCH_NORMAL);
+}
+
+nsresult
+TransceiverImpl::UpdateSendTrack(dom::MediaStreamTrack* aSendTrack)
+{
+  if (mJsepTransceiver->IsStopped()) {
+    return NS_ERROR_UNEXPECTED;
+  }
+
+  MOZ_MTLOG(ML_DEBUG, mPCHandle << "[" << mMid << "]: " << __FUNCTION__ <<
+                      "(" << aSendTrack << ")");
+  mSendTrack = aSendTrack;
+  return mTransmitPipeline->ReplaceTrack(mSendTrack);
+}
+
+nsresult
+TransceiverImpl::UpdateTransport(PeerConnectionMedia& aTransportManager)
+{
+  if (!mJsepTransceiver->HasLevel()) {
+    return NS_OK;
+  }
+
+  if (!mHaveSetupTransport) {
+    mReceivePipeline->SetLevel(mJsepTransceiver->GetLevel());
+    mTransmitPipeline->SetLevel(mJsepTransceiver->GetLevel());
+    mHaveSetupTransport = true;
+  }
+
+  ASSERT_ON_THREAD(mMainThread);
+  nsAutoPtr<MediaPipelineFilter> filter;
+
+  mRtpFlow = aTransportManager.GetTransportFlow(
+      mJsepTransceiver->GetTransportLevel(), false);
+  mRtcpFlow = aTransportManager.GetTransportFlow(
+      mJsepTransceiver->GetTransportLevel(), true);
+
+  if (mJsepTransceiver->HasBundleLevel() &&
+      mJsepTransceiver->mRecvTrack.GetNegotiatedDetails()) {
+    filter = new MediaPipelineFilter;
+
+    // Add remote SSRCs so we can distinguish which RTP packets actually
+    // belong to this pipeline (also RTCP sender reports).
+    for (unsigned int ssrc : mJsepTransceiver->mRecvTrack.GetSsrcs()) {
+      filter->AddRemoteSSRC(ssrc);
+    }
+
+    // TODO(bug 1105005): Tell the filter about the mid for this track
+
+    // Add unique payload types as a last-ditch fallback
+    auto uniquePts =
+      mJsepTransceiver->mRecvTrack.GetNegotiatedDetails()->GetUniquePayloadTypes();
+    for (unsigned char& uniquePt : uniquePts) {
+      filter->AddUniquePT(uniquePt);
+    }
+  }
+
+  mReceivePipeline->UpdateTransport_m(mRtpFlow, mRtcpFlow, filter);
+  mTransmitPipeline->UpdateTransport_m(mRtpFlow, mRtcpFlow, nsAutoPtr<MediaPipelineFilter>());
+  return NS_OK;
+}
+
+nsresult
+TransceiverImpl::UpdateConduit()
+{
+  if (mJsepTransceiver->IsStopped()) {
+    return NS_OK;
+  }
+
+  if (mJsepTransceiver->IsAssociated()) {
+    mMid = mJsepTransceiver->GetMid();
+  } else {
+    mMid.clear();
+  }
+
+  mReceivePipeline->Stop();
+  mTransmitPipeline->Stop();
+
+  // NOTE(pkerr) - the Call API requires the both local_ssrc and remote_ssrc be
+  // set to a non-zero value or the CreateVideo...Stream call will fail.
+  if (mJsepTransceiver->mSendTrack.GetSsrcs().empty()) {
+    MOZ_MTLOG(ML_ERROR, mPCHandle << "[" << mMid << "]: " << __FUNCTION__ <<
+                        " No local SSRC set! (Should be set regardless of "
+                        "whether we're sending RTP; we need a local SSRC in "
+                        "all cases)");
+    return NS_ERROR_FAILURE;
+  }
+
+  if(!mConduit->SetLocalSSRCs(mJsepTransceiver->mSendTrack.GetSsrcs())) {
+    MOZ_MTLOG(ML_ERROR, mPCHandle << "[" << mMid << "]: " << __FUNCTION__ <<
+                        " SetLocalSSRCs failed");
+    return NS_ERROR_FAILURE;
+  }
+
+  mConduit->SetLocalCNAME(mJsepTransceiver->mSendTrack.GetCNAME().c_str());
+  mConduit->SetLocalMID(mJsepTransceiver->mTransport->mTransportId);
+
+  nsresult rv;
+
+  if (IsVideo()) {
+    rv = UpdateVideoConduit();
+  } else {
+    rv = UpdateAudioConduit();
+  }
+
+  if (NS_FAILED(rv)) {
+    return rv;
+  }
+
+  if (mJsepTransceiver->mRecvTrack.GetActive()) {
+    MOZ_ASSERT(mReceiveStream);
+    mReceivePipeline->Start();
+    mHaveStartedReceiving = true;
+  }
+
+  if (mJsepTransceiver->mSendTrack.GetActive()) {
+    if (!mSendTrack) {
+      MOZ_MTLOG(ML_WARNING, mPCHandle << "[" << mMid << "]: " << __FUNCTION__ <<
+                            " Starting transmit conduit without send track!");
+    }
+    mTransmitPipeline->Start();
+  }
+
+  return NS_OK;
+}
+
+nsresult
+TransceiverImpl::UpdatePrincipal(nsIPrincipal* aPrincipal)
+{
+  if (mJsepTransceiver->IsStopped()) {
+    return NS_OK;
+  }
+
+  nsTArray<RefPtr<dom::MediaStreamTrack>> receiveTracks;
+  mReceiveStream->GetTracks(receiveTracks);
+  if (receiveTracks.Length() != 1) {
+    MOZ_MTLOG(ML_ERROR, mPCHandle << "[" << mMid << "]: " << __FUNCTION__ <<
+                        " mReceiveStream doesn't have exactly one track "
+                        "(it has " << receiveTracks.Length() << ")");
+    MOZ_CRASH();
+    return NS_ERROR_FAILURE;
+  }
+
+  // This blasts away the existing principal.
+  // We only do this when we become certain that the all tracks are safe to make
+  // accessible to the script principal.
+  RemoteTrackSource& source =
+    static_cast<RemoteTrackSource&>(receiveTracks[0]->GetSource());
+  source.SetPrincipal(aPrincipal);
+
+  mReceivePipeline->SetPrincipalHandle_m(MakePrincipalHandle(aPrincipal));
+  return NS_OK;
+}
+
+nsresult
+TransceiverImpl::SyncWithMatchingVideoConduits(
+    std::vector<RefPtr<TransceiverImpl>>& transceivers)
+{
+  if (mJsepTransceiver->IsStopped()) {
+    return NS_OK;
+  }
+
+  if (IsVideo()) {
+    MOZ_MTLOG(ML_ERROR, mPCHandle << "[" << mMid << "]: " << __FUNCTION__ <<
+                        " called when transceiver is not "
+                        "video! This should never happen.");
+    MOZ_CRASH();
+    return NS_ERROR_UNEXPECTED;
+  }
+
+  std::set<std::string> myReceiveStreamIds;
+  myReceiveStreamIds.insert(mJsepTransceiver->mRecvTrack.GetStreamIds().begin(),
+                            mJsepTransceiver->mRecvTrack.GetStreamIds().end());
+
+  for (RefPtr<TransceiverImpl>& transceiver : transceivers) {
+    if (!transceiver->IsVideo()) {
+      // |this| is an audio transceiver, so we skip other audio transceivers
+      continue;
+    }
+
+    // Maybe could make this more efficient by cacheing this set, but probably
+    // not worth it.
+    for (const std::string& streamId :
+         transceiver->mJsepTransceiver->mRecvTrack.GetStreamIds()) {
+      if (myReceiveStreamIds.count(streamId)) {
+        // Ok, we have one video, one non-video - cross the streams!
+        WebrtcAudioConduit *audio_conduit =
+          static_cast<WebrtcAudioConduit*>(mConduit.get());
+        WebrtcVideoConduit *video_conduit =
+          static_cast<WebrtcVideoConduit*>(transceiver->mConduit.get());
+
+        video_conduit->SyncTo(audio_conduit);
+        MOZ_MTLOG(ML_DEBUG, mPCHandle << "[" << mMid << "]: " << __FUNCTION__ <<
+                            " Syncing " << video_conduit << " to "
+                            << audio_conduit);
+      }
+    }
+  }
+
+  return NS_OK;
+}
+
+bool
+TransceiverImpl::ConduitHasPluginID(uint64_t aPluginID)
+{
+  return mConduit->CodecPluginID() == aPluginID;
+}
+
+bool
+TransceiverImpl::HasSendTrack(const dom::MediaStreamTrack* aSendTrack) const
+{
+  if (!mSendTrack) {
+    return false;
+  }
+
+  if (!aSendTrack) {
+    return true;
+  }
+
+  return mSendTrack.get() == aSendTrack;
+}
+
+void
+TransceiverImpl::SyncWithJS(dom::RTCRtpTransceiver& aJsTransceiver,
+                            ErrorResult& aRv)
+{
+  MOZ_MTLOG(ML_DEBUG, mPCHandle << "[" << mMid << "]: " << __FUNCTION__
+                      << " Syncing with JS transceiver");
+
+  // Update stopped, both ways, since either JSEP or JS can stop these
+  if (mJsepTransceiver->IsStopped()) {
+    // We don't call RTCRtpTransceiver::Stop(), because that causes another sync
+    aJsTransceiver.SetStopped(aRv);
+    Stop();
+  } else if (aJsTransceiver.GetStopped(aRv)) {
+    mJsepTransceiver->Stop();
+    Stop();
+  }
+
+  // Lots of this in here for simple getters that should never fail. Lame.
+  // Just propagate the exception and let JS log it.
+  if (aRv.Failed()) {
+    return;
+  }
+
+  // Update direction from JS only
+  dom::RTCRtpTransceiverDirection direction = aJsTransceiver.GetDirection(aRv);
+
+  if (aRv.Failed()) {
+    return;
+  }
+
+  switch (direction) {
+    case dom::RTCRtpTransceiverDirection::Sendrecv:
+      mJsepTransceiver->mJsDirection =
+        SdpDirectionAttribute::Direction::kSendrecv;
+      break;
+    case dom::RTCRtpTransceiverDirection::Sendonly:
+      mJsepTransceiver->mJsDirection =
+        SdpDirectionAttribute::Direction::kSendonly;
+      break;
+    case dom::RTCRtpTransceiverDirection::Recvonly:
+      mJsepTransceiver->mJsDirection =
+        SdpDirectionAttribute::Direction::kRecvonly;
+      break;
+    case dom::RTCRtpTransceiverDirection::Inactive:
+      mJsepTransceiver->mJsDirection =
+        SdpDirectionAttribute::Direction::kInactive;
+      break;
+    default:
+      MOZ_ASSERT(false);
+      aRv = NS_ERROR_INVALID_ARG;
+      return;
+  }
+
+  // Update send track ids in JSEP
+  RefPtr<dom::RTCRtpSender> sender = aJsTransceiver.GetSender(aRv);
+  if (aRv.Failed()) {
+    return;
+  }
+
+  RefPtr<dom::MediaStreamTrack> sendTrack = sender->GetTrack(aRv);
+  if (aRv.Failed()) {
+    return;
+  }
+
+  if (sendTrack) {
+    nsString wideTrackId;
+    sendTrack->GetId(wideTrackId);
+    std::string trackId = NS_ConvertUTF16toUTF8(wideTrackId).get();
+    MOZ_ASSERT(!trackId.empty());
+
+    nsTArray<RefPtr<DOMMediaStream>> streams;
+    sender->GetStreams(streams, aRv);
+    if (aRv.Failed()) {
+      return;
+    }
+
+    std::vector<std::string> streamIds;
+    for (const auto& stream : streams) {
+      nsString wideStreamId;
+      stream->GetId(wideStreamId);
+      std::string streamId = NS_ConvertUTF16toUTF8(wideStreamId).get();
+      MOZ_ASSERT(!streamId.empty());
+      streamIds.push_back(streamId);
+    }
+
+    mJsepTransceiver->mSendTrack.UpdateTrackIds(streamIds, trackId);
+  }
+
+  // Update RTCRtpParameters
+  // TODO: Both ways for things like ssrc, codecs, header extensions, etc
+
+  dom::RTCRtpParameters parameters;
+  sender->GetParameters(parameters, aRv);
+
+  if (aRv.Failed()) {
+    return;
+  }
+
+  std::vector<JsepTrack::JsConstraints> constraints;
+
+  if (parameters.mEncodings.WasPassed()) {
+    for (auto& encoding : parameters.mEncodings.Value()) {
+      JsepTrack::JsConstraints constraint;
+      if (encoding.mRid.WasPassed()) {
+        // TODO: Either turn on the RID RTP header extension in JsepSession, or
+        // just leave that extension on all the time?
+        constraint.rid = NS_ConvertUTF16toUTF8(encoding.mRid.Value()).get();
+      }
+      if (encoding.mMaxBitrate.WasPassed()) {
+        constraint.constraints.maxBr = encoding.mMaxBitrate.Value();
+      }
+      constraint.constraints.scaleDownBy = encoding.mScaleResolutionDownBy;
+      constraints.push_back(constraint);
+    }
+  }
+
+  // TODO: Update conduits?
+
+  mJsepTransceiver->mSendTrack.SetJsConstraints(constraints);
+
+  // Update webrtc track id in JS; the ids in SDP are not surfaced to content,
+  // because they don't follow the rules that track/stream ids must. Our JS
+  // code must be able to map the SDP ids to the actual tracks/streams, and
+  // this is how the mapping for track ids is updated.
+  nsString webrtcTrackId =
+    NS_ConvertUTF8toUTF16(mJsepTransceiver->mRecvTrack.GetTrackId().c_str());
+  MOZ_MTLOG(ML_DEBUG, mPCHandle << "[" << mMid << "]: " << __FUNCTION__
+                      << " Setting webrtc track id: "
+                      << mJsepTransceiver->mRecvTrack.GetTrackId().c_str());
+  aJsTransceiver.SetRemoteTrackId(webrtcTrackId, aRv);
+
+  if (aRv.Failed()) {
+    return;
+  }
+
+  // mid from JSEP
+  if (mJsepTransceiver->IsAssociated()) {
+    aJsTransceiver.SetMid(
+        NS_ConvertUTF8toUTF16(mJsepTransceiver->GetMid().c_str()),
+        aRv);
+  } else {
+    aJsTransceiver.UnsetMid(aRv);
+  }
+
+  if (aRv.Failed()) {
+    return;
+  }
+
+  // currentDirection from JSEP, but not if "this transceiver has never been
+  // represented in an offer/answer exchange"
+  if (mJsepTransceiver->HasLevel()) {
+    dom::RTCRtpTransceiverDirection currentDirection;
+    if (mJsepTransceiver->mSendTrack.GetActive()) {
+      if (mJsepTransceiver->mRecvTrack.GetActive()) {
+        currentDirection = dom::RTCRtpTransceiverDirection::Sendrecv;
+      } else {
+        currentDirection = dom::RTCRtpTransceiverDirection::Sendonly;
+      }
+    } else {
+      if (mJsepTransceiver->mRecvTrack.GetActive()) {
+        currentDirection = dom::RTCRtpTransceiverDirection::Recvonly;
+      } else {
+        currentDirection = dom::RTCRtpTransceiverDirection::Inactive;
+      }
+    }
+
+    if (mJsepTransceiver->IsNegotiated()) {
+      aJsTransceiver.SetCurrentDirection(currentDirection, aRv);
+    }
+
+    if (aRv.Failed()) {
+      return;
+    }
+  }
+
+  // AddTrack magic from JS
+  if (aJsTransceiver.GetAddTrackMagic(aRv)) {
+    mJsepTransceiver->SetAddTrackMagic();
+  }
+
+  if (aRv.Failed()) {
+    return;
+  }
+
+  if (mJsepTransceiver->IsRemoved()) {
+    aJsTransceiver.Remove(aRv);
+  }
+}
+
+void
+TransceiverImpl::InsertDTMFTone(int tone, uint32_t duration)
+{
+  if (mJsepTransceiver->IsStopped()) {
+    return;
+  }
+
+  RefPtr<AudioSessionConduit> conduit(static_cast<AudioSessionConduit*>(
+        mConduit.get()));
+  mStsThread->Dispatch(WrapRunnableNM([conduit, tone, duration] () {
+        //Note: We default to channel 0, not inband, and 6dB attenuation.
+        //      here. We might want to revisit these choices in the future.
+        conduit->InsertDTMFTone(0, tone, true, duration, 6);
+        }), NS_DISPATCH_NORMAL);
+}
+
+bool
+TransceiverImpl::HasReceiveTrack(const dom::MediaStreamTrack* aRecvTrack) const
+{
+  if (!mHaveStartedReceiving) {
+    return false;
+  }
+
+  if (!aRecvTrack) {
+    return true;
+  }
+
+  return mReceiveStream->HasTrack(*aRecvTrack);
+}
+
+bool
+TransceiverImpl::WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto,
+                            JS::MutableHandle<JSObject*> aReflector)
+{
+  return dom::TransceiverImplBinding::Wrap(aCx, this, aGivenProto, aReflector);
+}
+
+already_AddRefed<dom::MediaStreamTrack>
+TransceiverImpl::GetReceiveTrack()
+{
+  nsTArray<RefPtr<dom::MediaStreamTrack>> receiveTracks;
+  mReceiveStream->GetTracks(receiveTracks);
+  if (receiveTracks.Length() != 1) {
+    return nullptr;
+  }
+
+  return receiveTracks[0].forget();
+}
+
+RefPtr<MediaPipeline>
+TransceiverImpl::GetSendPipeline()
+{
+  return mTransmitPipeline;
+}
+
+RefPtr<MediaPipeline>
+TransceiverImpl::GetReceivePipeline()
+{
+  return mReceivePipeline;
+}
+
+void
+TransceiverImpl::AddRIDExtension(unsigned short aExtensionId)
+{
+  if (mJsepTransceiver->IsStopped()) {
+    return;
+  }
+
+  mReceivePipeline->AddRIDExtension_m(aExtensionId);
+}
+
+void
+TransceiverImpl::AddRIDFilter(const nsAString& aRid)
+{
+  if (mJsepTransceiver->IsStopped()) {
+    return;
+  }
+
+  mReceivePipeline->AddRIDFilter_m(NS_ConvertUTF16toUTF8(aRid).get());
+}
+
+static std::vector<JsepCodecDescription*>
+GetCodecs(const JsepTrackNegotiatedDetails& aDetails)
+{
+  // We do not try to handle cases where a codec is not used on the primary
+  // encoding.
+  if (aDetails.GetEncodingCount()) {
+    return aDetails.GetEncoding(0).GetCodecs();
+  }
+  return std::vector<JsepCodecDescription*>();
+}
+
+static nsresult
+JsepCodecDescToAudioCodecConfig(const JsepCodecDescription& aCodec,
+                                AudioCodecConfig** aConfig)
+{
+  MOZ_ASSERT(aCodec.mType == SdpMediaSection::kAudio);
+  if (aCodec.mType != SdpMediaSection::kAudio)
+    return NS_ERROR_INVALID_ARG;
+
+  const JsepAudioCodecDescription& desc =
+      static_cast<const JsepAudioCodecDescription&>(aCodec);
+
+  uint16_t pt;
+
+  if (!desc.GetPtAsInt(&pt)) {
+    MOZ_MTLOG(ML_ERROR, "Invalid payload type: " << desc.mDefaultPt);
+    return NS_ERROR_INVALID_ARG;
+  }
+
+  *aConfig = new AudioCodecConfig(pt,
+                                  desc.mName,
+                                  desc.mClock,
+                                  desc.mPacketSize,
+                                  desc.mForceMono ? 1 : desc.mChannels,
+                                  desc.mBitrate,
+                                  desc.mFECEnabled);
+  (*aConfig)->mMaxPlaybackRate = desc.mMaxPlaybackRate;
+  (*aConfig)->mDtmfEnabled = desc.mDtmfEnabled;
+
+  return NS_OK;
+}
+
+static nsresult
+NegotiatedDetailsToAudioCodecConfigs(const JsepTrackNegotiatedDetails& aDetails,
+                                     PtrVector<AudioCodecConfig>* aConfigs)
+{
+  std::vector<JsepCodecDescription*> codecs(GetCodecs(aDetails));
+  for (const JsepCodecDescription* codec : codecs) {
+    AudioCodecConfig* config;
+    if (NS_FAILED(JsepCodecDescToAudioCodecConfig(*codec, &config))) {
+      return NS_ERROR_INVALID_ARG;
+    }
+    aConfigs->values.push_back(config);
+  }
+
+  if (aConfigs->values.empty()) {
+    MOZ_MTLOG(ML_ERROR, "Can't set up a conduit with 0 codecs");
+    return NS_ERROR_FAILURE;
+  }
+
+  return NS_OK;
+}
+
+nsresult
+TransceiverImpl::UpdateAudioConduit()
+{
+  RefPtr<AudioSessionConduit> conduit = static_cast<AudioSessionConduit*>(
+      mConduit.get());
+
+  if (mJsepTransceiver->mRecvTrack.GetNegotiatedDetails() &&
+      mJsepTransceiver->mRecvTrack.GetActive()) {
+    const auto& details(*mJsepTransceiver->mRecvTrack.GetNegotiatedDetails());
+    PtrVector<AudioCodecConfig> configs;
+    nsresult rv = NegotiatedDetailsToAudioCodecConfigs(details, &configs);
+
+    if (NS_FAILED(rv)) {
+      MOZ_MTLOG(ML_ERROR, mPCHandle << "[" << mMid << "]: " << __FUNCTION__ <<
+                          " Failed to convert JsepCodecDescriptions to "
+                          "AudioCodecConfigs (recv).");
+      return rv;
+    }
+
+    auto error = conduit->ConfigureRecvMediaCodecs(configs.values);
+
+    if (error) {
+      MOZ_MTLOG(ML_ERROR, mPCHandle << "[" << mMid << "]: " << __FUNCTION__ <<
+                          " ConfigureRecvMediaCodecs failed: " << error);
+      return NS_ERROR_FAILURE;
+    }
+  }
+
+  if (mJsepTransceiver->mSendTrack.GetNegotiatedDetails() &&
+      mJsepTransceiver->mSendTrack.GetActive()) {
+    const auto& details(*mJsepTransceiver->mSendTrack.GetNegotiatedDetails());
+    PtrVector<AudioCodecConfig> configs;
+    nsresult rv = NegotiatedDetailsToAudioCodecConfigs(details, &configs);
+
+    if (NS_FAILED(rv)) {
+      MOZ_MTLOG(ML_ERROR, mPCHandle << "[" << mMid << "]: " << __FUNCTION__ <<
+                          " Failed to convert JsepCodecDescriptions to "
+                          "AudioCodecConfigs (send).");
+      return rv;
+    }
+
+    for (auto value: configs.values) {
+      if (value->mName == "telephone-event") {
+        // we have a telephone event codec, so we need to make sure
+        // the dynamic pt is set properly
+        conduit->SetDtmfPayloadType(value->mType, value->mFreq);
+        break;
+      }
+    }
+
+    auto error = conduit->ConfigureSendMediaCodec(configs.values[0]);
+    if (error) {
+      MOZ_MTLOG(ML_ERROR, mPCHandle << "[" << mMid << "]: " << __FUNCTION__ <<
+                          " ConfigureSendMediaCodec failed: " << error);
+      return NS_ERROR_FAILURE;
+    }
+
+    // Should these be genericized like they are in the video conduit case?
+    const SdpExtmapAttributeList::Extmap* audioLevelExt =
+        details.GetExt(webrtc::RtpExtension::kAudioLevelUri);
+
+    if (audioLevelExt) {
+      MOZ_MTLOG(ML_DEBUG, "Calling EnableAudioLevelExtension");
+      error = conduit->EnableAudioLevelExtension(true, audioLevelExt->entry);
+
+      if (error) {
+        MOZ_MTLOG(ML_ERROR, mPCHandle << "[" << mMid << "]: " << __FUNCTION__ <<
+                            " EnableAudioLevelExtension failed: " << error);
+        return NS_ERROR_FAILURE;
+      }
+    }
+
+    const SdpExtmapAttributeList::Extmap* midExt =
+        details.GetExt(webrtc::RtpExtension::kMIdUri);
+
+    if (midExt) {
+      MOZ_MTLOG(ML_DEBUG, "Calling EnableMIDExtension");
+      error = conduit->EnableMIDExtension(true, midExt->entry);
+
+      if (error) {
+        MOZ_MTLOG(ML_ERROR, "EnableMIDExtension failed: " << error);
+        return NS_ERROR_FAILURE;
+      }
+    }
+  }
+
+  return NS_OK;
+}
+
+static nsresult
+JsepCodecDescToVideoCodecConfig(const JsepCodecDescription& aCodec,
+                                VideoCodecConfig** aConfig)
+{
+  MOZ_ASSERT(aCodec.mType == SdpMediaSection::kVideo);
+  if (aCodec.mType != SdpMediaSection::kVideo) {
+    MOZ_ASSERT(false, "JsepCodecDescription has wrong type");
+    return NS_ERROR_INVALID_ARG;
+  }
+
+  const JsepVideoCodecDescription& desc =
+      static_cast<const JsepVideoCodecDescription&>(aCodec);
+
+  uint16_t pt;
+
+  if (!desc.GetPtAsInt(&pt)) {
+    MOZ_MTLOG(ML_ERROR, "Invalid payload type: " << desc.mDefaultPt);
+    return NS_ERROR_INVALID_ARG;
+  }
+
+  UniquePtr<VideoCodecConfigH264> h264Config;
+
+  if (desc.mName == "H264") {
+    h264Config = MakeUnique<VideoCodecConfigH264>();
+    size_t spropSize = sizeof(h264Config->sprop_parameter_sets);
+    strncpy(h264Config->sprop_parameter_sets,
+            desc.mSpropParameterSets.c_str(),
+            spropSize);
+    h264Config->sprop_parameter_sets[spropSize - 1] = '\0';
+    h264Config->packetization_mode = desc.mPacketizationMode;
+    h264Config->profile_level_id = desc.mProfileLevelId;
+    h264Config->tias_bw = 0; // TODO(bug 1403206)
+  }
+
+  VideoCodecConfig* configRaw;
+  configRaw = new VideoCodecConfig(
+      pt, desc.mName, desc.mConstraints, h264Config.get());
+
+  configRaw->mAckFbTypes = desc.mAckFbTypes;
+  configRaw->mNackFbTypes = desc.mNackFbTypes;
+  configRaw->mCcmFbTypes = desc.mCcmFbTypes;
+  configRaw->mRembFbSet = desc.RtcpFbRembIsSet();
+  configRaw->mFECFbSet = desc.mFECEnabled;
+  if (desc.mFECEnabled) {
+    configRaw->mREDPayloadType = desc.mREDPayloadType;
+    configRaw->mULPFECPayloadType = desc.mULPFECPayloadType;
+  }
+
+  *aConfig = configRaw;
+  return NS_OK;
+}
+
+static nsresult
+NegotiatedDetailsToVideoCodecConfigs(const JsepTrackNegotiatedDetails& aDetails,
+                                     PtrVector<VideoCodecConfig>* aConfigs)
+{
+  std::vector<JsepCodecDescription*> codecs(GetCodecs(aDetails));
+  for (const JsepCodecDescription* codec : codecs) {
+    VideoCodecConfig* config;
+    if (NS_FAILED(JsepCodecDescToVideoCodecConfig(*codec, &config))) {
+      return NS_ERROR_INVALID_ARG;
+    }
+
+    config->mTias = aDetails.GetTias();
+
+    for (size_t i = 0; i < aDetails.GetEncodingCount(); ++i) {
+      const JsepTrackEncoding& jsepEncoding(aDetails.GetEncoding(i));
+      if (jsepEncoding.HasFormat(codec->mDefaultPt)) {
+        VideoCodecConfig::SimulcastEncoding encoding;
+        encoding.rid = jsepEncoding.mRid;
+        encoding.constraints = jsepEncoding.mConstraints;
+        config->mSimulcastEncodings.push_back(encoding);
+      }
+    }
+
+    aConfigs->values.push_back(config);
+  }
+
+  return NS_OK;
+}
+
+nsresult
+TransceiverImpl::UpdateVideoConduit()
+{
+  RefPtr<VideoSessionConduit> conduit = static_cast<VideoSessionConduit*>(
+      mConduit.get());
+
+  // NOTE(pkerr) - this is new behavior. Needed because the CreateVideoReceiveStream
+  // method of the Call API will assert (in debug) and fail if a value is not provided
+  // for the remote_ssrc that will be used by the far-end sender.
+  if (!mJsepTransceiver->mRecvTrack.GetSsrcs().empty()) {
+    MOZ_MTLOG(ML_DEBUG, mPCHandle << "[" << mMid << "]: " << __FUNCTION__ <<
+              " Setting remote SSRC " <<
+              mJsepTransceiver->mRecvTrack.GetSsrcs().front());
+    conduit->SetRemoteSSRC(mJsepTransceiver->mRecvTrack.GetSsrcs().front());
+  }
+
+  if (mJsepTransceiver->mRecvTrack.GetNegotiatedDetails() &&
+      mJsepTransceiver->mRecvTrack.GetActive()) {
+    const auto& details(*mJsepTransceiver->mRecvTrack.GetNegotiatedDetails());
+
+    UpdateVideoExtmap(details, false);
+
+    PtrVector<VideoCodecConfig> configs;
+    nsresult rv = NegotiatedDetailsToVideoCodecConfigs(details, &configs);
+
+    if (NS_FAILED(rv)) {
+      MOZ_MTLOG(ML_ERROR, mPCHandle << "[" << mMid << "]: " << __FUNCTION__ <<
+                          " Failed to convert JsepCodecDescriptions to "
+                          "VideoCodecConfigs (recv).");
+      return rv;
+    }
+
+    auto error = conduit->ConfigureRecvMediaCodecs(configs.values);
+
+    if (error) {
+      MOZ_MTLOG(ML_ERROR, mPCHandle << "[" << mMid << "]: " << __FUNCTION__ <<
+                          " ConfigureRecvMediaCodecs failed: " << error);
+      return NS_ERROR_FAILURE;
+    }
+  }
+
+  // It is possible for SDP to signal that there is a send track, but there not
+  // actually be a send track, according to the specification; all that needs to
+  // happen is for the transceiver to be configured to send...
+  if (mJsepTransceiver->mSendTrack.GetNegotiatedDetails() &&
+      mJsepTransceiver->mSendTrack.GetActive() &&
+      mSendTrack) {
+    const auto& details(*mJsepTransceiver->mSendTrack.GetNegotiatedDetails());
+
+    UpdateVideoExtmap(details, true);
+
+    nsresult rv = ConfigureVideoCodecMode(*conduit);
+    if (NS_FAILED(rv)) {
+      return rv;
+    }
+
+    PtrVector<VideoCodecConfig> configs;
+    rv = NegotiatedDetailsToVideoCodecConfigs(details, &configs);
+
+    if (NS_FAILED(rv)) {
+      MOZ_MTLOG(ML_ERROR, mPCHandle << "[" << mMid << "]: " << __FUNCTION__ <<
+                          " Failed to convert JsepCodecDescriptions to "
+                          "VideoCodecConfigs (send).");
+      return rv;
+    }
+
+    auto error = conduit->ConfigureSendMediaCodec(configs.values[0]);
+    if (error) {
+      MOZ_MTLOG(ML_ERROR, mPCHandle << "[" << mMid << "]: " << __FUNCTION__ <<
+                          " ConfigureSendMediaCodec failed: " << error);
+      return NS_ERROR_FAILURE;
+    }
+  }
+
+  return NS_OK;
+}
+
+nsresult
+TransceiverImpl::ConfigureVideoCodecMode(VideoSessionConduit& aConduit)
+{
+  RefPtr<mozilla::dom::VideoStreamTrack> videotrack =
+    mSendTrack->AsVideoStreamTrack();
+
+  if (!videotrack) {
+    MOZ_MTLOG(ML_ERROR, mPCHandle << "[" << mMid << "]: " << __FUNCTION__ <<
+                        " mSendTrack is not video! This should never happen!");
+    MOZ_CRASH();
+    return NS_ERROR_FAILURE;
+  }
+
+  dom::MediaSourceEnum source = videotrack->GetSource().GetMediaSource();
+  webrtc::VideoCodecMode mode = webrtc::kRealtimeVideo;
+  switch (source) {
+    case dom::MediaSourceEnum::Browser:
+    case dom::MediaSourceEnum::Screen:
+    case dom::MediaSourceEnum::Application:
+    case dom::MediaSourceEnum::Window:
+      mode = webrtc::kScreensharing;
+      break;
+
+    case dom::MediaSourceEnum::Camera:
+    default:
+      mode = webrtc::kRealtimeVideo;
+      break;
+  }
+
+  auto error = aConduit.ConfigureCodecMode(mode);
+  if (error) {
+    MOZ_MTLOG(ML_ERROR, mPCHandle << "[" << mMid << "]: " << __FUNCTION__ <<
+                        " ConfigureCodecMode failed: " << error);
+    return NS_ERROR_FAILURE;
+  }
+
+  return NS_OK;
+}
+
+void
+TransceiverImpl::UpdateVideoExtmap(const JsepTrackNegotiatedDetails& aDetails,
+                                   bool aSending)
+{
+  std::vector<webrtc::RtpExtension> extmaps;
+  // @@NG read extmap from track
+  aDetails.ForEachRTPHeaderExtension(
+    [&extmaps](const SdpExtmapAttributeList::Extmap& extmap)
+  {
+    extmaps.emplace_back(extmap.extensionname, extmap.entry);
+  });
+
+  RefPtr<VideoSessionConduit> conduit = static_cast<VideoSessionConduit*>(
+      mConduit.get());
+
+  if (!extmaps.empty()) {
+    conduit->SetLocalRTPExtensions(aSending, extmaps);
+  }
+}
+
+static void StartTrack(MediaStream* aSource,
+                       nsAutoPtr<MediaSegment>&& aSegment)
+{
+  class Message : public ControlMessage {
+   public:
+    Message(MediaStream* aStream, nsAutoPtr<MediaSegment>&& aSegment)
+      : ControlMessage(aStream),
+        segment_(aSegment) {}
+
+    void Run() override {
+      TrackRate track_rate = segment_->GetType() == MediaSegment::AUDIO ?
+        WEBRTC_DEFAULT_SAMPLE_RATE : mStream->GraphRate();
+      StreamTime current_end = mStream->GetTracksEnd();
+      MOZ_MTLOG(ML_DEBUG, "current_end = " << current_end);
+      TrackTicks current_ticks =
+        mStream->TimeToTicksRoundUp(track_rate, current_end);
+
+      // Add a track 'now' to avoid possible underrun, especially if we add
+      // a track "later".
+
+      if (current_end != 0L) {
+        MOZ_MTLOG(ML_DEBUG, "added track @ " << current_end << " -> "
+                  << mStream->StreamTimeToSeconds(current_end));
+      }
+
+      // To avoid assertions, we need to insert a dummy segment that covers up
+      // to the "start" time for the track
+      segment_->AppendNullData(current_ticks);
+      MOZ_MTLOG(ML_DEBUG, "segment_->GetDuration() = " << segment_->GetDuration());
+      if (segment_->GetType() == MediaSegment::AUDIO) {
+        MOZ_MTLOG(ML_DEBUG, "Calling AddAudioTrack");
+        mStream->AsSourceStream()->AddAudioTrack(
+            kAudioTrack,
+            WEBRTC_DEFAULT_SAMPLE_RATE,
+            0,
+            static_cast<AudioSegment*>(segment_.forget()));
+      } else {
+        mStream->AsSourceStream()->AddTrack(kVideoTrack, 0, segment_.forget());
+      }
+
+      mStream->AsSourceStream()->SetPullEnabled(true);
+      mStream->AsSourceStream()->AdvanceKnownTracksTime(STREAM_TIME_MAX);
+    }
+   private:
+    nsAutoPtr<MediaSegment> segment_;
+  };
+
+  aSource->GraphImpl()->AppendMessage(
+      MakeUnique<Message>(aSource, Move(aSegment)));
+  MOZ_MTLOG(ML_INFO, "Dispatched track-add on stream " << aSource);
+}
+
+void
+TransceiverImpl::StartReceiveStream()
+{
+  MOZ_MTLOG(ML_DEBUG, mPCHandle << "[" << mMid << "]: " << __FUNCTION__);
+  // TODO: Can this be simplified? There's an awful lot of moving pieces here.
+  SourceMediaStream* source(mReceiveStream->GetInputStream()->AsSourceStream());
+  mReceiveStream->SetLogicalStreamStartTime(
+      mReceiveStream->GetPlaybackStream()->GetCurrentTime());
+
+  nsAutoPtr<MediaSegment> segment;
+  if (IsVideo()) {
+    segment = new VideoSegment;
+  } else {
+    segment = new AudioSegment;
+  }
+
+  StartTrack(source, Move(segment));
+}
+
+void
+TransceiverImpl::Stop()
+{
+  mTransmitPipeline->Stop();
+  mTransmitPipeline->DetachMedia();
+  mReceivePipeline->Stop();
+  mReceivePipeline->DetachMedia();
+  // Make sure that stats queries stop working on this transceiver.
+  UpdateSendTrack(nullptr);
+  mHaveStartedReceiving = false;
+}
+
+bool
+TransceiverImpl::IsVideo() const
+{
+  return mJsepTransceiver->GetMediaType() == SdpMediaSection::MediaType::kVideo;
+}
+
+} // namespace mozilla
new file mode 100644
--- /dev/null
+++ b/media/webrtc/signaling/src/peerconnection/TransceiverImpl.h
@@ -0,0 +1,148 @@
+/* This Source Code Form is subject to the terms of the Mozilla Public
+ * License, v. 2.0. If a copy of the MPL was not distributed with this file,
+ * You can obtain one at http://mozilla.org/MPL/2.0/. */
+#ifndef _TRANSCEIVERIMPL_H_
+#define _TRANSCEIVERIMPL_H_
+
+#include <string>
+#include "mozilla/RefPtr.h"
+#include "nsCOMPtr.h"
+#include "nsIEventTarget.h"
+#include "nsTArray.h"
+#include "DOMMediaStream.h"
+#include "mozilla/OwningNonNull.h"
+#include "mozilla/dom/MediaStreamTrack.h"
+#include "ErrorList.h"
+#include "mtransport/transportflow.h"
+#include "signaling/src/jsep/JsepTransceiver.h"
+
+class nsIPrincipal;
+
+namespace mozilla {
+class PeerIdentity;
+class PeerConnectionMedia;
+class JsepTransceiver;
+class MediaSessionConduit;
+class VideoSessionConduit;
+class MediaPipelineReceive;
+class MediaPipelineTransmit;
+class MediaPipeline;
+class MediaPipelineFilter;
+class WebRtcCallWrapper;
+class JsepTrackNegotiatedDetails;
+
+namespace dom {
+class RTCRtpTransceiver;
+}
+
+/**
+ * This is what ties all the various pieces that make up a transceiver
+ * together. This includes:
+ * DOMMediaStream, MediaStreamTrack, SourceMediaStream for rendering and capture
+ * TransportFlow for RTP transmission/reception
+ * Audio/VideoConduit for feeding RTP/RTCP into webrtc.org for decoding, and
+ * feeding audio/video frames into webrtc.org for encoding into RTP/RTCP.
+*/
+class TransceiverImpl : public nsISupports {
+public:
+  /**
+   * |aReceiveStream| is always set; this holds even if the remote end has not
+   * negotiated one for this transceiver. |aSendTrack| might or might not be
+   * set.
+   */
+  TransceiverImpl(const std::string& aPCHandle,
+                  JsepTransceiver* aJsepTransceiver,
+                  nsIEventTarget* aMainThread,
+                  nsIEventTarget* aStsThread,
+                  DOMMediaStream& aReceiveStream,
+                  dom::MediaStreamTrack* aSendTrack,
+                  WebRtcCallWrapper* aCallWrapper);
+
+  nsresult UpdateSendTrack(dom::MediaStreamTrack* aSendTrack);
+
+  nsresult UpdateSinkIdentity(dom::MediaStreamTrack* aTrack,
+                              nsIPrincipal* aPrincipal,
+                              const PeerIdentity* aSinkIdentity);
+
+  nsresult UpdateTransport(PeerConnectionMedia& aTransportManager);
+
+  nsresult UpdateConduit();
+
+  nsresult UpdatePrincipal(nsIPrincipal* aPrincipal);
+
+  // TODO: We probably need to de-Sync when transceivers are stopped.
+  nsresult SyncWithMatchingVideoConduits(
+      std::vector<RefPtr<TransceiverImpl>>& transceivers);
+
+  void Shutdown_m();
+
+  bool ConduitHasPluginID(uint64_t aPluginID);
+
+  bool HasSendTrack(const dom::MediaStreamTrack* aSendTrack) const;
+
+  // This is so PCImpl can unregister from PrincipalChanged callbacks; maybe we
+  // should have TransceiverImpl handle these callbacks instead? It would need
+  // to be able to get a ref to PCImpl though.
+  RefPtr<dom::MediaStreamTrack> GetSendTrack()
+  {
+    return mSendTrack;
+  }
+
+  // for webidl
+  bool WrapObject(JSContext* aCx, JS::Handle<JSObject*> aGivenProto,
+                  JS::MutableHandle<JSObject*> aReflector);
+  already_AddRefed<dom::MediaStreamTrack> GetReceiveTrack();
+  void SyncWithJS(dom::RTCRtpTransceiver& aJsTransceiver, ErrorResult& aRv);
+
+  void InsertDTMFTone(int tone, uint32_t duration);
+
+  bool HasReceiveTrack(const dom::MediaStreamTrack* aReceiveTrack) const;
+
+  // TODO: These are for stats; try to find a cleaner way.
+  RefPtr<MediaPipeline> GetSendPipeline();
+
+  RefPtr<MediaPipeline> GetReceivePipeline();
+
+  void AddRIDExtension(unsigned short aExtensionId);
+
+  void AddRIDFilter(const nsAString& aRid);
+
+  bool IsVideo() const;
+
+  NS_DECL_THREADSAFE_ISUPPORTS
+
+private:
+  virtual ~TransceiverImpl();
+  void InitAudio();
+  void InitVideo();
+  nsresult UpdateAudioConduit();
+  nsresult UpdateVideoConduit();
+  nsresult ConfigureVideoCodecMode(VideoSessionConduit& aConduit);
+  // This will eventually update audio extmap too
+  void UpdateVideoExtmap(const JsepTrackNegotiatedDetails& aDetails,
+                         bool aSending);
+  void StartReceiveStream();
+  void Stop();
+
+  const std::string mPCHandle;
+  RefPtr<JsepTransceiver> mJsepTransceiver;
+  std::string mMid;
+  bool mHaveStartedReceiving;
+  bool mHaveSetupTransport;
+  nsCOMPtr<nsIEventTarget> mMainThread;
+  nsCOMPtr<nsIEventTarget> mStsThread;
+  RefPtr<DOMMediaStream> mReceiveStream;
+  RefPtr<dom::MediaStreamTrack> mSendTrack;
+  // state for webrtc.org that is shared between all transceivers
+  RefPtr<WebRtcCallWrapper> mCallWrapper;
+  RefPtr<TransportFlow> mRtpFlow;
+  RefPtr<TransportFlow> mRtcpFlow;
+  RefPtr<MediaSessionConduit> mConduit;
+  RefPtr<MediaPipelineReceive> mReceivePipeline;
+  RefPtr<MediaPipelineTransmit> mTransmitPipeline;
+};
+
+} // namespace mozilla
+
+#endif // _TRANSCEIVERIMPL_H_
+
--- a/media/webrtc/signaling/src/peerconnection/moz.build
+++ b/media/webrtc/signaling/src/peerconnection/moz.build
@@ -16,18 +16,17 @@ LOCAL_INCLUDES += [
     '/media/webrtc/signaling/src/common/browser_logging',
     '/media/webrtc/signaling/src/common/time_profiling',
     '/media/webrtc/signaling/src/media-conduit',
     '/media/webrtc/signaling/src/mediapipeline',
     '/media/webrtc/trunk',
 ]
 
 UNIFIED_SOURCES += [
-    'MediaPipelineFactory.cpp',
-    'MediaStreamList.cpp',
     'PacketDumper.cpp',
     'PeerConnectionCtx.cpp',
     'PeerConnectionImpl.cpp',
     'PeerConnectionMedia.cpp',
+    'TransceiverImpl.cpp',
     'WebrtcGlobalInformation.cpp',
 ]
 
 FINAL_LIBRARY = 'xul'