Backed out 3 changesets (bug 1337777) for bustage.
authorRyan VanderMeulen <ryanvm@gmail.com>
Fri, 10 Mar 2017 18:19:21 -0500
changeset 379020 e5eb0121a580e4b8acf53886861363f669a864ad
parent 379019 9e4af52826967f7cf49cac1d94a5afdbe1700f48
child 379021 c32166f8c0c41147e320418ae7ce359a4edb94e3
push id1419
push userjlund@mozilla.com
push dateMon, 10 Apr 2017 20:44:07 +0000
treeherdermozilla-release@5e6801b73ef6 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
bugs1337777
milestone53.0
backs out7b7a3daa9f812554d4efed205bb8afceedcbaae0
eb7181c6f54fe13d3e58979b820396b2feca9394
5094bc55377553ea338e993f3d24508440a6ea67
Backed out 3 changesets (bug 1337777) for bustage. Backed out changeset 7b7a3daa9f81 (bug 1337777) Backed out changeset eb7181c6f54f (bug 1337777) Backed out changeset 5094bc553775 (bug 1337777)
dom/media/systemservices/MediaUtils.h
dom/media/tests/mochitest/mochitest.ini
dom/media/tests/mochitest/pc.js
dom/media/tests/mochitest/sdpUtils.js
dom/media/tests/mochitest/templates.js
dom/media/tests/mochitest/test_peerConnection_captureStream_canvas_2d_noSSRC.html
media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
media/webrtc/signaling/src/media-conduit/AudioConduit.h
media/webrtc/signaling/src/media-conduit/MediaConduitInterface.h
media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
media/webrtc/signaling/src/media-conduit/VideoConduit.h
media/webrtc/signaling/src/media-conduit/WebrtcGmpVideoCodec.cpp
media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
media/webrtc/signaling/src/peerconnection/MediaPipelineFactory.cpp
--- a/dom/media/systemservices/MediaUtils.h
+++ b/dom/media/systemservices/MediaUtils.h
@@ -168,17 +168,17 @@ private:
  *
  * It's worse with more variables. Lambdas have a leg up with variable capture:
  *
  *   void Foo()
  *   {
  *     RefPtr<Bar> bar = new Bar();
  *     NS_DispatchToMainThread(media::NewRunnableFrom([bar]() mutable {
  *       // use bar
- *     }));
+ *     });
  *   }
  *
  * Capture is by-copy by default, so the nsRefPtr 'bar' is safely copied for
  * access on the other thread (threadsafe refcounting in bar is assumed).
  *
  * The 'mutable' keyword is only needed for non-const access to bar.
  */
 
--- a/dom/media/tests/mochitest/mochitest.ini
+++ b/dom/media/tests/mochitest/mochitest.ini
@@ -136,18 +136,16 @@ skip-if = (android_version == '18') # an
 [test_peerConnection_bug1042791.html]
 skip-if = os == 'android' # bug 1043403
 [test_peerConnection_bug1064223.html]
 [test_peerConnection_capturedVideo.html]
 tags=capturestream
 skip-if = android_version == '18' # android(Bug 1189784, timeouts on 4.3 emulator)
 [test_peerConnection_captureStream_canvas_2d.html]
 skip-if = android_version == '18' # android(Bug 1319019, timeouts on 4.3 emulator)
-[test_peerConnection_captureStream_canvas_2d_noSSRC.html]
-skip-if = android_version == '18' # android(Bug 1319019, timeouts on 4.3 emulator)
 [test_peerConnection_multiple_captureStream_canvas_2d.html]
 skip-if = (android_version == '18' && debug) # android(Bug 1189784, timeouts on 4.3 emulator)
 [test_peerConnection_captureStream_canvas_webgl.html]
 # [test_peerConnection_certificates.html] # bug 1180968
 [test_peerConnection_close.html]
 [test_peerConnection_closeDuringIce.html]
 [test_peerConnection_constructedStream.html]
 skip-if = android_version == '18' # android(Bug 1189784, timeouts on 4.3 emulator)
--- a/dom/media/tests/mochitest/pc.js
+++ b/dom/media/tests/mochitest/pc.js
@@ -58,17 +58,16 @@ function PeerConnectionTest(options) {
   options.commands = options.commands || makeDefaultCommands();
   options.is_local = "is_local" in options ? options.is_local : true;
   options.is_remote = "is_remote" in options ? options.is_remote : true;
 
   options.h264 = "h264" in options ? options.h264 : false;
   options.bundle = "bundle" in options ? options.bundle : true;
   options.rtcpmux = "rtcpmux" in options ? options.rtcpmux : true;
   options.opus = "opus" in options ? options.opus : true;
-  options.ssrc = "ssrc" in options ? options.ssrc : true;
 
   if (iceServersArray.length) {
     if (!options.turn_disabled_local) {
       options.config_local = options.config_local || {}
       options.config_local.iceServers = iceServersArray;
     }
     if (!options.turn_disabled_remote) {
       options.config_remote = options.config_remote || {}
@@ -447,24 +446,16 @@ PeerConnectionTest.prototype.updateChain
       'PC_LOCAL_CREATE_OFFER',
       [PC_LOCAL_REMOVE_BUNDLE_FROM_OFFER]);
   }
   if (!this.testOptions.rtcpmux) {
     this.chain.insertAfterEach(
       'PC_LOCAL_CREATE_OFFER',
       [PC_LOCAL_REMOVE_RTCPMUX_FROM_OFFER]);
   }
-  if (!this.testOptions.ssrc) {
-    this.chain.insertAfterEach(
-      'PC_LOCAL_CREATE_OFFER',
-      [PC_LOCAL_REMOVE_SSRC_FROM_OFFER]);
-    this.chain.insertAfterEach(
-      'PC_REMOTE_CREATE_ANSWER',
-      [PC_REMOTE_REMOVE_SSRC_FROM_ANSWER]);
-  }
   if (!this.testOptions.is_local) {
     this.chain.filterOut(/^PC_LOCAL/);
   }
   if (!this.testOptions.is_remote) {
     this.chain.filterOut(/^PC_REMOTE/);
   }
 };
 
--- a/dom/media/tests/mochitest/sdpUtils.js
+++ b/dom/media/tests/mochitest/sdpUtils.js
@@ -10,21 +10,16 @@ checkSdpAfterEndOfTrickle: function(sdp,
   ok(sdp.sdp.includes("a=end-of-candidates"), label + ": SDP contains end-of-candidates");
   sdputils.checkSdpCLineNotDefault(sdp.sdp, label);
 
   if (testOptions.rtcpmux) {
     ok(sdp.sdp.includes("a=rtcp-mux"), label + ": SDP contains rtcp-mux");
   } else {
     ok(sdp.sdp.includes("a=rtcp:"), label + ": SDP contains rtcp port");
   }
-  if (testOptions.ssrc) {
-    ok(sdp.sdp.includes("a=ssrc"), label + ": SDP contains a=ssrc");
-  } else {
-    ok(!sdp.sdp.includes("a=ssrc"), label + ": SDP does not contain a=ssrc");
-  }
 },
 
 // takes sdp in string form (or possibly a fragment, say an m-section), and
 // verifies that the default 0.0.0.0 addr is not present.
 checkSdpCLineNotDefault: function(sdpStr, label) {
   info("CLINE-NO-DEFAULT-ADDR-SDP: " + JSON.stringify(sdpStr));
   ok(!sdpStr.includes("c=IN IP4 0.0.0.0"), label + ": SDP contains non-zero IP c line");
 },
@@ -39,20 +34,16 @@ removeCodec: function(sdp, codec) {
     updated_sdp = updated_sdp.replace(new RegExp("a=rtcp-fb:" + codec + " ccm fir\\r\\n",""),"");
   return updated_sdp;
 },
 
 removeRtcpMux: function(sdp) {
   return sdp.replace(/a=rtcp-mux\r\n/g,"");
 },
 
-removeSSRCs: function(sdp) {
-  return sdp.replace(/a=ssrc.*\r\n/g,"");
-},
-
 removeBundle: function(sdp) {
   return sdp.replace(/a=group:BUNDLE .*\r\n/g, "");
 },
 
 reduceAudioMLineToPcmuPcma: function(sdp) {
   return sdp.replace(/m=audio .*\r\n/g, "m=audio 9 UDP/TLS/RTP/SAVPF 0 8\r\n");
 },
 
@@ -131,17 +122,16 @@ verifySdp: function(desc, expectedType, 
     ok(desc.sdp.includes("m=video"), "video m-line is present in SDP");
     if (testOptions.h264) {
       ok(desc.sdp.includes("a=rtpmap:126 H264/90000"), "H.264 codec is present in SDP");
     } else {
 	ok(desc.sdp.includes("a=rtpmap:120 VP8/90000") ||
 	   desc.sdp.includes("a=rtpmap:121 VP9/90000"), "VP8 or VP9 codec is present in SDP");
     }
     is(testOptions.rtcpmux, desc.sdp.includes("a=rtcp-mux"), "RTCP Mux is offered in SDP");
-    is(testOptions.ssrc, desc.sdp.includes("a=ssrc"), "a=ssrc signaled in SDP");
   }
 
   return requiresTrickleIce;
 },
 
 /**
  * Counts the amount of audio tracks in a given media constraint.
  *
--- a/dom/media/tests/mochitest/templates.js
+++ b/dom/media/tests/mochitest/templates.js
@@ -504,26 +504,16 @@ function PC_LOCAL_REMOVE_BUNDLE_FROM_OFF
   info("Updated no bundle offer: " + JSON.stringify(test.originalOffer));
 };
 
 function PC_LOCAL_REMOVE_RTCPMUX_FROM_OFFER(test) {
   test.originalOffer.sdp = sdputils.removeRtcpMux(test.originalOffer.sdp);
   info("Updated no RTCP-Mux offer: " + JSON.stringify(test.originalOffer));
 };
 
-function PC_LOCAL_REMOVE_SSRC_FROM_OFFER(test) {
-  test.originalOffer.sdp = sdputils.removeSSRCs(test.originalOffer.sdp);
-  info("Updated no SSRCs offer: " + JSON.stringify(test.originalOffer));
-};
-
-function PC_REMOTE_REMOVE_SSRC_FROM_ANSWER(test) {
-  test.originalAnswer.sdp = sdputils.removeSSRCs(test.originalAnswer.sdp);
-  info("Updated no SSRCs answer: " + JSON.stringify(test.originalAnswerr));
-};
-
 var addRenegotiation = (chain, commands, checks) => {
   chain.append(commands);
   chain.append(commandsPeerConnectionOfferAnswer);
   if (checks) {
     chain.append(checks);
   }
 };
 
deleted file mode 100644
--- a/dom/media/tests/mochitest/test_peerConnection_captureStream_canvas_2d_noSSRC.html
+++ /dev/null
@@ -1,74 +0,0 @@
-<!DOCTYPE HTML>
-<html>
-<head>
-  <script type="application/javascript" src="pc.js"></script>
-  <script type="application/javascript" src="/tests/dom/canvas/test/captureStream_common.js"></script>
-</head>
-<body>
-<pre id="test">
-<script type="application/javascript">
-createHTML({
-  title: "Canvas(2D)::CaptureStream as video-only input to peerconnection with no a=ssrc",
-  visible: true
-});
-
-var test;
-runNetworkTest((options) => {
-  options = options || { };
-  options.ssrc = false;
-  test = new PeerConnectionTest(options);
-  var mediaElement;
-  var h = new CaptureStreamTestHelper2D();
-  var canvas = document.createElement('canvas');
-  var stream;
-  canvas.id = 'source_canvas';
-  canvas.width = canvas.height = 10;
-  document.getElementById('content').appendChild(canvas);
-
-  test.setMediaConstraints([{video: true}], []);
-  test.chain.replace("PC_LOCAL_GUM", [
-    function PC_LOCAL_DRAW_INITIAL_LOCAL_GREEN(test) {
-      h.drawColor(canvas, h.green);
-    },
-    function PC_LOCAL_CANVAS_CAPTURESTREAM(test) {
-      stream = canvas.captureStream(0);
-      test.pcLocal.attachLocalStream(stream);
-    }
-  ]);
-  test.chain.append([
-    function PC_REMOTE_WAIT_FOR_REMOTE_GREEN() {
-      mediaElement = test.pcRemote.remoteMediaElements[0];
-      ok(!!mediaElement, "Should have remote video element for pcRemote");
-      return h.waitForPixelColor(mediaElement, h.green, 128,
-                                 "pcRemote's remote should become green");
-    },
-    function PC_LOCAL_DRAW_LOCAL_RED() {
-      // After requesting a frame it will be captured at the time of next render.
-      // Next render will happen at next stable state, at the earliest,
-      // i.e., this order of `requestFrame(); draw();` should work.
-      stream.requestFrame();
-      h.drawColor(canvas, h.red);
-      var i = 0;
-      return setInterval(function() {
-        try {
-          info("draw " + i ? "green" : "red");
-          h.drawColor(canvas, i ? h.green : h.red);
-          i = 1 - i;
-          stream.requestFrame();
-        } catch (e) {
-          // ignore; stream might have shut down, and we don't bother clearing
-          // the setInterval.
-        }
-      }, 500);
-    },
-    function PC_REMOTE_WAIT_FOR_REMOTE_RED() {
-      return h.waitForPixelColor(mediaElement, h.red, 128,
-                                 "pcRemote's remote should become red");
-    }
-  ]);
-  test.run();
-});
-</script>
-</pre>
-</body>
-</html>
--- a/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
+++ b/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
@@ -759,17 +759,17 @@ WebrtcAudioConduit::GetAudioFrame(int16_
 #endif
   CSFLogDebug(logTag,"%s GetAudioFrame:Got samples: length %d ",__FUNCTION__,
                                                                lengthSamples);
   return kMediaConduitNoError;
 }
 
 // Transport Layer Callbacks
 MediaConduitErrorCode
-WebrtcAudioConduit::ReceivedRTPPacket(const void *data, int len, uint32_t ssrc)
+WebrtcAudioConduit::ReceivedRTPPacket(const void *data, int len)
 {
   CSFLogDebug(logTag,  "%s : channel %d", __FUNCTION__, mChannel);
 
   if(mEngineReceiving)
   {
 #if !defined(MOZILLA_EXTERNAL_LINKAGE)
     if (MOZ_LOG_TEST(GetLatencyLog(), LogLevel::Debug)) {
       // timestamp is at 32 bits in ([1])
--- a/media/webrtc/signaling/src/media-conduit/AudioConduit.h
+++ b/media/webrtc/signaling/src/media-conduit/AudioConduit.h
@@ -52,17 +52,17 @@ class WebrtcAudioConduit: public AudioSe
 public:
   //VoiceEngine defined constant for Payload Name Size.
   static const unsigned int CODEC_PLNAME_SIZE;
 
   /**
    * APIs used by the registered external transport to this Conduit to
    * feed in received RTP Frames to the VoiceEngine for decoding
    */
-  virtual MediaConduitErrorCode ReceivedRTPPacket(const void *data, int len, uint32_t ssrc) override;
+  virtual MediaConduitErrorCode ReceivedRTPPacket(const void *data, int len) override;
 
   /**
    * APIs used by the registered external transport to this Conduit to
    * feed in received RTCP Frames to the VoiceEngine for decoding
    */
   virtual MediaConduitErrorCode ReceivedRTCPPacket(const void *data, int len) override;
 
   virtual MediaConduitErrorCode StopTransmitting() override;
@@ -158,17 +158,16 @@ public:
   /**
    * Webrtc transport implementation to send and receive RTCP packet.
    * AudioConduit registers itself as ExternalTransport to the VoiceEngine
    */
   virtual bool SendRtcp(const uint8_t *data,
                         size_t len) override;
 
   virtual uint64_t CodecPluginID() override { return 0; }
-  virtual void SetPCHandle(const std::string& aPCHandle) {}
 
   explicit WebrtcAudioConduit():
                       mVoiceEngine(nullptr),
                       mTransportMonitor("WebrtcAudioConduit"),
                       mTransmitterTransport(nullptr),
                       mReceiverTransport(nullptr),
                       mEngineTransmitting(false),
                       mEngineReceiving(false),
--- a/media/webrtc/signaling/src/media-conduit/MediaConduitInterface.h
+++ b/media/webrtc/signaling/src/media-conduit/MediaConduitInterface.h
@@ -191,17 +191,17 @@ public:
   /**
    * Function triggered on Incoming RTP packet from the remote
    * endpoint by the transport implementation.
    * @param data : RTP Packet (audio/video) to be processed
    * @param len  : Length of the media packet
    * Obtained packets are passed to the Media-Engine for further
    * processing , say, decoding
    */
-  virtual MediaConduitErrorCode ReceivedRTPPacket(const void *data, int len, uint32_t ssrc) = 0;
+  virtual MediaConduitErrorCode ReceivedRTPPacket(const void *data, int len) = 0;
 
   /**
    * Function triggered on Incoming RTCP packet from the remote
    * endpoint by the transport implementation.
    * @param data : RTCP Packet (audio/video) to be processed
    * @param len  : Length of the media packet
    * Obtained packets are passed to the Media-Engine for further
    * processing , say, decoding
@@ -273,18 +273,16 @@ public:
                                      uint32_t* cumulativeLost,
                                      int32_t* rttMs) = 0;
   virtual bool GetRTCPSenderReport(DOMHighResTimeStamp* timestamp,
                                    unsigned int* packetsSent,
                                    uint64_t* bytesSent) = 0;
 
   virtual uint64_t CodecPluginID() = 0;
 
-  virtual void SetPCHandle(const std::string& aPCHandle) = 0;
-
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(MediaSessionConduit)
 
 };
 
 // Abstract base classes for external encoder/decoder.
 class CodecPluginID
 {
 public:
--- a/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
+++ b/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
@@ -175,18 +175,16 @@ VideoSessionConduit::Create(RefPtr<WebRt
 WebrtcVideoConduit::WebrtcVideoConduit(RefPtr<WebRtcCallWrapper> aCall)
   : mTransportMonitor("WebrtcVideoConduit")
   , mRenderer(nullptr)
   , mEngineTransmitting(false)
   , mEngineReceiving(false)
   , mCapId(-1)
   , mCodecMutex("VideoConduit codec db")
   , mInReconfig(false)
-  , mRecvStream(nullptr)
-  , mSendStream(nullptr)
   , mLastWidth(0)
   , mLastHeight(0) // initializing as 0 forces a check for reconfig at start
   , mSendingWidth(0)
   , mSendingHeight(0)
   , mReceivingWidth(0)
   , mReceivingHeight(0)
   , mSendingFramerate(DEFAULT_VIDEO_MAX_FRAMERATE)
   , mLastFramerateTenths(DEFAULT_VIDEO_MAX_FRAMERATE * 10)
@@ -195,20 +193,20 @@ WebrtcVideoConduit::WebrtcVideoConduit(R
   , mVideoLatencyAvg(0)
   , mMinBitrate(0)
   , mStartBitrate(0)
   , mPrefMaxBitrate(0)
   , mNegotiatedMaxBitrate(0)
   , mMinBitrateEstimate(0)
   , mCodecMode(webrtc::kRealtimeVideo)
   , mCall(aCall) // refcounted store of the call object
+  , mSendStream(nullptr)
   , mSendStreamConfig(this) // 'this' is stored but not  dereferenced in the constructor.
+  , mRecvStream(nullptr)
   , mRecvStreamConfig(this) // 'this' is stored but not  dereferenced in the constructor.
-  , mRecvSSRCSet(false)
-  , mRecvSSRCSetInProgress(false)
   , mSendCodecPlugin(nullptr)
   , mRecvCodecPlugin(nullptr)
   , mVideoStatsTimer(do_CreateInstance(NS_TIMER_CONTRACTID))
 {
   mRecvStreamConfig.renderer = this;
 
   // Video Stats Callback
   nsTimerCallbackFunc callback = [](nsITimer* aTimer, void* aClosure) {
@@ -270,17 +268,16 @@ bool WebrtcVideoConduit::SetLocalSSRCs(c
   mSendStreamConfig.rtp.ssrcs = aSSRCs;
 
   bool wasTransmitting = mEngineTransmitting;
   if (StopTransmitting() != kMediaConduitNoError) {
     return false;
   }
 
   if (wasTransmitting) {
-    MutexAutoLock lock(mCodecMutex);
     DeleteSendStream();
     if (StartTransmitting() != kMediaConduitNoError) {
       return false;
     }
   }
 
   return true;
 }
@@ -323,34 +320,31 @@ PayloadNameToEncoderType(const std::stri
   }
 
   return webrtc::VideoEncoder::EncoderType::kUnsupportedCodec;
 }
 
 void
 WebrtcVideoConduit::DeleteSendStream()
 {
-  mCodecMutex.AssertCurrentThreadOwns();
   if (mSendStream) {
 
     if (mLoadManager && mSendStream->LoadStateObserver()) {
       mLoadManager->RemoveObserver(mSendStream->LoadStateObserver());
     }
 
     mCall->Call()->DestroyVideoSendStream(mSendStream);
     mSendStream = nullptr;
     mEncoder = nullptr;
   }
 }
 
 MediaConduitErrorCode
 WebrtcVideoConduit::CreateSendStream()
 {
-  mCodecMutex.AssertCurrentThreadOwns();
-
   webrtc::VideoEncoder::EncoderType encoder_type =
     PayloadNameToEncoderType(mSendStreamConfig.encoder_settings.payload_name);
   if (encoder_type == webrtc::VideoEncoder::EncoderType::kUnsupportedCodec) {
     return kMediaConduitInvalidSendCodec;
   }
 
   nsAutoPtr<webrtc::VideoEncoder> encoder(
     CreateEncoder(encoder_type, mEncoderConfig.StreamCount() > 0));
@@ -395,29 +389,26 @@ PayloadNameToDecoderType(const std::stri
   }
 
   return webrtc::VideoDecoder::DecoderType::kUnsupportedCodec;
 }
 
 void
 WebrtcVideoConduit::DeleteRecvStream()
 {
-  mCodecMutex.AssertCurrentThreadOwns();
   if (mRecvStream) {
     mCall->Call()->DestroyVideoReceiveStream(mRecvStream);
     mRecvStream = nullptr;
     mDecoders.clear();
   }
 }
 
 MediaConduitErrorCode
 WebrtcVideoConduit::CreateRecvStream()
 {
-  mCodecMutex.AssertCurrentThreadOwns();
-
   webrtc::VideoReceiveStream::Decoder decoder_desc;
   std::unique_ptr<webrtc::VideoDecoder> decoder;
   webrtc::VideoDecoder::DecoderType decoder_type;
 
   mRecvStreamConfig.decoders.clear();
   for (auto& config : mRecvCodecList) {
     decoder_type = PayloadNameToDecoderType(config->mName);
     if (decoder_type == webrtc::VideoDecoder::DecoderType::kUnsupportedCodec) {
@@ -658,17 +649,16 @@ WebrtcVideoConduit::ConfigureSendMediaCo
     }
 
     condError = StopTransmitting();
     if (condError != kMediaConduitNoError) {
       return condError;
     }
 
     // This will cause a new encoder to be created by StartTransmitting()
-    MutexAutoLock lock(mCodecMutex);
     DeleteSendStream();
   }
 
   mSendStreamConfig.encoder_settings.payload_name = codecConfig->mName;
   mSendStreamConfig.encoder_settings.payload_type = codecConfig->mType;
   mSendStreamConfig.rtp.rtcp_mode = webrtc::RtcpMode::kCompound;
   mSendStreamConfig.rtp.max_packet_size = kVideoMtu;
   mSendStreamConfig.overuse_callback = mLoadManager.get();
@@ -692,44 +682,37 @@ WebrtcVideoConduit::ConfigureSendMediaCo
 
   return condError;
 }
 
 bool
 WebrtcVideoConduit::SetRemoteSSRC(unsigned int ssrc)
 {
   mRecvStreamConfig.rtp.remote_ssrc = ssrc;
+  unsigned int current_ssrc;
 
-  unsigned int current_ssrc;
   if (!GetRemoteSSRC(&current_ssrc)) {
     return false;
   }
-  mRecvSSRCSet = true;
 
   if (current_ssrc == ssrc || !mEngineReceiving) {
     return true;
   }
 
   if (StopReceiving() != kMediaConduitNoError) {
     return false;
   }
 
-  // This will destroy mRecvStream and create a new one (argh, why can't we change
-  // it without a full destroy?)
-  // We're going to modify mRecvStream, we must lock.  Only modified on MainThread.
-  // All non-MainThread users must lock before reading/using
-  {
-    MutexAutoLock lock(mCodecMutex);
-    DeleteRecvStream();
-    MediaConduitErrorCode rval = CreateRecvStream();
-    if (rval != kMediaConduitNoError) {
-      CSFLogError(logTag, "%s Start Receive Error %d ", __FUNCTION__, rval);
-      return false;
-    }
+  DeleteRecvStream();
+  MediaConduitErrorCode rval = CreateRecvStream();
+  if (rval != kMediaConduitNoError) {
+    CSFLogError(logTag, "%s Start Receive Error %d ", __FUNCTION__, rval);
+    return false;
   }
+
   return (StartReceiving() == kMediaConduitNoError);
 }
 
 bool
 WebrtcVideoConduit::GetRemoteSSRC(unsigned int* ssrc)
 {
   {
     MutexAutoLock lock(mCodecMutex);
@@ -963,17 +946,16 @@ WebrtcVideoConduit::Init()
 }
 
 void
 WebrtcVideoConduit::Destroy()
 {
   // We can't delete the VideoEngine until all these are released!
   // And we can't use a Scoped ptr, since the order is arbitrary
 
-  MutexAutoLock lock(mCodecMutex);
   DeleteSendStream();
   DeleteRecvStream();
 }
 
 void
 WebrtcVideoConduit::SyncTo(WebrtcAudioConduit* aConduit)
 {
   CSFLogDebug(logTag, "%s Synced to %p", __FUNCTION__, aConduit);
@@ -1157,62 +1139,44 @@ WebrtcVideoConduit::ConfigureRecvMediaCo
     mRecvStreamConfig.rtp.keyframe_method = kf_request_method;
 
     if (use_fec) {
       mRecvStreamConfig.rtp.fec.ulpfec_payload_type = ulpfec_payload_type;
       mRecvStreamConfig.rtp.fec.red_payload_type = red_payload_type;
       mRecvStreamConfig.rtp.fec.red_rtx_payload_type = -1;
     }
 
-    if (!mRecvSSRCSet) {
-      // Handle un-signalled SSRCs by creating a random one and then when it actually gets set,
-      // we'll destroy and recreate.  Simpler than trying to unwind all the logic that assumes
-      // the receive stream is created and started when we ConfigureRecvMediaCodecs()
-      unsigned int ssrc;
-      do {
-        SECStatus rv = PK11_GenerateRandom(reinterpret_cast<unsigned char*>(&ssrc), sizeof(ssrc));
-        if (rv != SECSuccess) {
-          return kMediaConduitUnknownError;
-        }
-      } while (ssrc == 0); // webrtc.org code has fits if you select an SSRC of 0
-
-      mRecvStreamConfig.rtp.remote_ssrc = ssrc;
-    }
     // FIXME(jesup) - Bug 1325447 -- SSRCs configured here are a problem.
     // 0 isn't allowed.  Would be best to ask for a random SSRC from the RTP code.
     // Would need to call rtp_sender.cc -- GenerateSSRC(), which isn't exposed.  It's called on
     // collision, or when we decide to send.  it should be called on receiver creation.
     // Here, we're generating the SSRC value - but this causes ssrc_forced in set in rtp_sender,
     // which locks us into the SSRC - even a collision won't change it!!!
     auto ssrc = mRecvStreamConfig.rtp.remote_ssrc;
     do {
       SECStatus rv = PK11_GenerateRandom(reinterpret_cast<unsigned char*>(&ssrc), sizeof(ssrc));
       if (rv != SECSuccess) {
         return kMediaConduitUnknownError;
       }
-    } while (ssrc == mRecvStreamConfig.rtp.remote_ssrc || ssrc == 0);
-    // webrtc.org code has fits if you select an SSRC of 0
+    } while (ssrc == mRecvStreamConfig.rtp.remote_ssrc);
 
     mRecvStreamConfig.rtp.local_ssrc = ssrc;
 
     // XXX Copy over those that are the same and don't rebuild them
     mRecvCodecList.SwapElements(recv_codecs);
     recv_codecs.Clear();
     mRecvStreamConfig.rtp.rtx.clear();
+    // Rebuilds mRecvStream from mRecvStreamConfig
+    DeleteRecvStream();
+    MediaConduitErrorCode rval = CreateRecvStream();
+    if (rval != kMediaConduitNoError) {
+      CSFLogError(logTag, "%s Start Receive Error %d ", __FUNCTION__, rval);
+      return rval;
+    }
 
-    {
-      MutexAutoLock lock(mCodecMutex);
-      DeleteRecvStream();
-      // Rebuilds mRecvStream from mRecvStreamConfig
-      MediaConduitErrorCode rval = CreateRecvStream();
-      if (rval != kMediaConduitNoError) {
-        CSFLogError(logTag, "%s Start Receive Error %d ", __FUNCTION__, rval);
-        return rval;
-      }
-    }
     return StartReceiving();
   }
   return kMediaConduitNoError;
 }
 
 webrtc::VideoDecoder*
 WebrtcVideoConduit::CreateDecoder(webrtc::VideoDecoder::DecoderType aType)
 {
@@ -1776,71 +1740,18 @@ WebrtcVideoConduit::DeliverPacket(const 
     CSFLogError(logTag, "%s DeliverPacket Failed, %d", __FUNCTION__, status);
     return kMediaConduitRTPProcessingFailed;
   }
 
   return kMediaConduitNoError;
 }
 
 MediaConduitErrorCode
-WebrtcVideoConduit::ReceivedRTPPacket(const void* data, int len, uint32_t ssrc)
+WebrtcVideoConduit::ReceivedRTPPacket(const void* data, int len)
 {
-  bool queue = mRecvSSRCSetInProgress;
-  if (!mRecvSSRCSet && !mRecvSSRCSetInProgress) {
-    mRecvSSRCSetInProgress = true;
-    queue = true;
-    // Handle the ssrc-not-signaled case; lock onto first ssrc
-    // We can't just do this here; it has to happen on MainThread :-(
-    // We also don't want to drop the packet, nor stall this thread, so we hold
-    // the packet (and any following) for inserting once the SSRC is set.
-
-    // Ensure lamba captures refs
-    RefPtr<WebrtcVideoConduit> self = this;
-    nsCOMPtr<nsIThread> thread;
-    if (NS_WARN_IF(NS_FAILED(NS_GetCurrentThread(getter_AddRefs(thread))))) {
-      return kMediaConduitRTPProcessingFailed;
-    }
-    NS_DispatchToMainThread(media::NewRunnableFrom([self, thread, ssrc]() mutable {
-          // Normally this is done in CreateOrUpdateMediaPipeline() for
-          // initial creation and renegotiation, but here we're rebuilding the
-          // Receive channel at a lower level.  This is needed whenever we're
-          // creating a GMPVideoCodec (in particular, H264) so it can communicate
-          // errors to the PC.
-          WebrtcGmpPCHandleSetter setter(self->mPCHandle);
-          self->SetRemoteSSRC(ssrc); // this will likely re-create the VideoReceiveStream
-          // We want to unblock the queued packets on the original thread
-          thread->Dispatch(media::NewRunnableFrom([self]() mutable {
-                self->mRecvSSRCSetInProgress = false;
-                // SSRC is set; insert queued packets
-                for (auto& packet : self->mQueuedPackets) {
-                  CSFLogDebug(logTag, "%s: seq# %u, Len %d ", __FUNCTION__,
-                              (uint16_t)ntohs(((uint16_t*) packet->mData)[1]), packet->mLen);
-
-                  if (self->DeliverPacket(packet->mData, packet->mLen) != kMediaConduitNoError) {
-                    CSFLogError(logTag, "%s RTP Processing Failed", __FUNCTION__);
-                    // Keep delivering and then clear the queue
-                  }
-                }
-                self->mQueuedPackets.Clear();
-
-                return NS_OK;
-              }), NS_DISPATCH_NORMAL);
-          return NS_OK;
-        }));
-    // we'll return after queuing
-  }
-  if (queue) {
-    // capture packet for insertion after ssrc is set
-    UniquePtr<QueuedPacket> packet((QueuedPacket*) malloc(sizeof(QueuedPacket) + len-1));
-    packet->mLen = len;
-    memcpy(packet->mData, data, len);
-    mQueuedPackets.AppendElement(Move(packet));
-    return kMediaConduitNoError;
-  }
-
   CSFLogDebug(logTag, "%s: seq# %u, Len %d ", __FUNCTION__,
               (uint16_t)ntohs(((uint16_t*) data)[1]), len);
 
   if (DeliverPacket(data, len) != kMediaConduitNoError) {
     CSFLogError(logTag, "%s RTP Processing Failed", __FUNCTION__);
     return kMediaConduitRTPProcessingFailed;
   }
   return kMediaConduitNoError;
--- a/media/webrtc/signaling/src/media-conduit/VideoConduit.h
+++ b/media/webrtc/signaling/src/media-conduit/VideoConduit.h
@@ -97,17 +97,17 @@ public:
    */
   virtual MediaConduitErrorCode AttachRenderer(RefPtr<mozilla::VideoRenderer> aVideoRenderer) override;
   virtual void DetachRenderer() override;
 
   /**
    * APIs used by the registered external transport to this Conduit to
    * feed in received RTP Frames to the VideoEngine for decoding
    */
-  virtual MediaConduitErrorCode ReceivedRTPPacket(const void* data, int len, uint32_t ssrc) override;
+  virtual MediaConduitErrorCode ReceivedRTPPacket(const void* data, int len) override;
 
   /**
    * APIs used by the registered external transport to this Conduit to
    * feed in received RTP Frames to the VideoEngine for decoding
    */
   virtual MediaConduitErrorCode ReceivedRTCPPacket(const void* data, int len) override;
 
   virtual MediaConduitErrorCode StopTransmitting() override;
@@ -256,20 +256,16 @@ public:
    * ------------------------------------
    */
   virtual bool SmoothsRenderedFrames() const override {
     return false;
   }
 
   virtual uint64_t CodecPluginID() override;
 
-  virtual void SetPCHandle(const std::string& aPCHandle) override {
-    mPCHandle = aPCHandle;
-  }
-
   unsigned short SendingWidth() override {
     return mSendingWidth;
   }
 
   unsigned short SendingHeight() override {
     return mSendingHeight;
   }
 
@@ -451,39 +447,32 @@ private:
   // Engine state we are concerned with.
   mozilla::Atomic<bool> mEngineTransmitting; // If true ==> Transmit Subsystem is up and running
   mozilla::Atomic<bool> mEngineReceiving;    // if true ==> Receive Subsystem up and running
 
   int mCapId;   // Capturer for this conduit
   //Local database of currently applied receive codecs
   nsTArray<UniquePtr<VideoCodecConfig>> mRecvCodecList;
 
-  // protects mCurrSendCodecConfig, mInReconfig,mVideoSend/RecvStreamStats, mSend/RecvStreams
-  Mutex mCodecMutex;
+  Mutex mCodecMutex; // protects mCurrSendCodecConfig, mVideoSend/RecvStreamStats
   nsAutoPtr<VideoCodecConfig> mCurSendCodecConfig;
   bool mInReconfig;
-  SendStreamStatistics mSendStreamStats;
-  ReceiveStreamStatistics mRecvStreamStats;
-  // Must call webrtc::Call::DestroyVideoReceive/SendStream to delete these:
-  webrtc::VideoReceiveStream* mRecvStream;
-  webrtc::VideoSendStream* mSendStream;
 
   unsigned short mLastWidth;
   unsigned short mLastHeight;
   unsigned short mSendingWidth;
   unsigned short mSendingHeight;
   unsigned short mReceivingWidth;
   unsigned short mReceivingHeight;
   unsigned int   mSendingFramerate;
   // scaled by *10 because Atomic<double/float> isn't supported
   mozilla::Atomic<int32_t, mozilla::Relaxed> mLastFramerateTenths;
   unsigned short mNumReceivingStreams;
   bool mVideoLatencyTestEnable;
   uint64_t mVideoLatencyAvg;
-  // all in bps!
   int mMinBitrate;
   int mStartBitrate;
   int mPrefMaxBitrate;
   int mNegotiatedMaxBitrate;
   int mMinBitrateEstimate;
 
   bool mRtpStreamIdEnabled;
   uint8_t mRtpStreamIdExtId;
@@ -495,40 +484,33 @@ private:
   RefPtr<WebrtcAudioConduit> mSyncedTo;
 
   nsAutoPtr<LoadManager> mLoadManager;
   webrtc::VideoCodecMode mCodecMode;
 
   // WEBRTC.ORG Call API
   RefPtr<WebRtcCallWrapper> mCall;
 
+  webrtc::VideoSendStream* mSendStream;
+  // Must call webrtc::Call::DestroyVideoSendStream to delete
   webrtc::VideoSendStream::Config mSendStreamConfig;
   VideoEncoderConfigBuilder mEncoderConfig;
   webrtc::VideoCodecH264 mEncoderSpecificH264;
 
+  webrtc::VideoReceiveStream* mRecvStream;
+  // Must call webrtc::Call::DestroyVideoReceiveStream to delete
   webrtc::VideoReceiveStream::Config mRecvStreamConfig;
-  // We can't create mRecvStream without knowing the remote SSRC
-  // Atomic since we key off this on packet insertion, which happens
-  // on a different thread.
-  Atomic<bool> mRecvSSRCSet;
-  // The runnable to set the SSRC is in-flight; queue packets until it's done.
-  bool mRecvSSRCSetInProgress;
-  struct QueuedPacket {
-    int mLen;
-    uint8_t mData[1];
-  };
-  nsTArray<UniquePtr<QueuedPacket>> mQueuedPackets;
 
   // The lifetime of these codecs are maintained by the VideoConduit instance.
   // They are passed to the webrtc::VideoSendStream or VideoReceiveStream,
   // on construction.
   nsAutoPtr<webrtc::VideoEncoder> mEncoder; // only one encoder for now
   std::vector<std::unique_ptr<webrtc::VideoDecoder>> mDecoders;
   WebrtcVideoEncoder* mSendCodecPlugin;
   WebrtcVideoDecoder* mRecvCodecPlugin;
 
   nsCOMPtr<nsITimer> mVideoStatsTimer;
-
-  std::string mPCHandle;
+  SendStreamStatistics mSendStreamStats;
+  ReceiveStreamStatistics mRecvStreamStats;
 };
 } // end namespace
 
 #endif
--- a/media/webrtc/signaling/src/media-conduit/WebrtcGmpVideoCodec.cpp
+++ b/media/webrtc/signaling/src/media-conduit/WebrtcGmpVideoCodec.cpp
@@ -566,17 +566,17 @@ WebrtcGmpVideoEncoder::Encoded(GMPVideoE
         return;
     }
 
     struct nal_entry {
       uint32_t offset;
       uint32_t size;
     };
     AutoTArray<nal_entry, 1> nals;
-    uint32_t size = 0;
+    uint32_t size;
     // make sure we don't read past the end of the buffer getting the size
     while (buffer+size_bytes < end) {
       switch (aEncodedFrame->BufferType()) {
         case GMP_BufferSingle:
           size = aEncodedFrame->Size();
           break;
         case GMP_BufferLength8:
           size = *buffer++;
--- a/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
+++ b/media/webrtc/signaling/src/mediapipeline/MediaPipeline.cpp
@@ -1081,22 +1081,23 @@ void MediaPipeline::RtpPacketReceived(Tr
     SprintfLiteral(tmp, "%.2x %.2x %.2x %.2x",
                    inner_data[0],
                    inner_data[1],
                    inner_data[2],
                    inner_data[3]);
 
     MOZ_MTLOG(ML_NOTICE, "Error unprotecting RTP in " << description_
               << "len= " << len << "[" << tmp << "...]");
+
     return;
   }
   MOZ_MTLOG(ML_DEBUG, description_ << " received RTP packet.");
   increment_rtp_packets_received(out_len);
 
-  (void)conduit_->ReceivedRTPPacket(inner_data.get(), out_len, header.ssrc);  // Ignore error codes
+  (void)conduit_->ReceivedRTPPacket(inner_data.get(), out_len);  // Ignore error codes
 }
 
 void MediaPipeline::RtcpPacketReceived(TransportLayer *layer,
                                        const unsigned char *data,
                                        size_t len) {
   if (!transport_->pipeline()) {
     MOZ_MTLOG(ML_DEBUG, "Discarding incoming packet; transport disconnected");
     return;
--- a/media/webrtc/signaling/src/peerconnection/MediaPipelineFactory.cpp
+++ b/media/webrtc/signaling/src/peerconnection/MediaPipelineFactory.cpp
@@ -449,17 +449,16 @@ MediaPipelineFactory::CreateOrUpdateMedi
     if (NS_FAILED(rv)) {
       return rv;
     }
   } else if (aTrack.GetMediaType() == SdpMediaSection::kVideo) {
     rv = GetOrCreateVideoConduit(aTrackPair, aTrack, &conduit);
     if (NS_FAILED(rv)) {
       return rv;
     }
-    conduit->SetPCHandle(mPC->GetHandle());
   } else {
     // We've created the TransportFlow, nothing else to do here.
     return NS_OK;
   }
 
   if (aTrack.GetActive()) {
     if (receiving) {
       auto error = conduit->StartReceiving();
@@ -834,19 +833,21 @@ MediaPipelineFactory::GetOrCreateVideoCo
         return NS_ERROR_FAILURE;
       }
     }
 
     ssrcs = &aTrack.GetSsrcs();
     // NOTE(pkerr) - this is new behavior. Needed because the CreateVideoReceiveStream
     // method of the Call API will assert (in debug) and fail if a value is not provided
     // for the remote_ssrc that will be used by the far-end sender.
-    if (!ssrcs->empty()) {
-      conduit->SetRemoteSSRC(ssrcs->front());
+    if (ssrcs->empty()) {
+      MOZ_MTLOG(ML_ERROR, "No SSRC set for receive track");
+      return NS_ERROR_FAILURE;
     }
+    conduit->SetRemoteSSRC(ssrcs->front());
 
     auto error = conduit->ConfigureRecvMediaCodecs(configs.values);
     if (error) {
       MOZ_MTLOG(ML_ERROR, "ConfigureRecvMediaCodecs failed: " << error);
       return NS_ERROR_FAILURE;
     }
   } else { //Create a send side
     // For now we only expect to have one ssrc per local track.