Bug 1404992 - Audit, document, assert and fix threading policy of VideoConduit. r=dminor, r=bwc
authorAndreas Pehrson <pehrsons@mozilla.com>
Tue, 11 Sep 2018 14:51:32 +0200
changeset 494655 43ee5a35eaf11ca626a468b3aa32ff408d7769a2
parent 494654 dbd6b33843148b1434525875ae23e21767a0afc6
child 494656 b86237ac88552125c60871afe4f94a517bf88b35
push id1864
push userffxbld-merge
push dateMon, 03 Dec 2018 15:51:40 +0000
treeherdermozilla-release@f040763d99ad [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersdminor, bwc
bugs1404992
milestone64.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1404992 - Audit, document, assert and fix threading policy of VideoConduit. r=dminor, r=bwc
media/webrtc/signaling/gtest/mediaconduit_unittests.cpp
media/webrtc/signaling/gtest/videoconduit_unittests.cpp
media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
media/webrtc/signaling/src/media-conduit/AudioConduit.h
media/webrtc/signaling/src/media-conduit/MediaConduitInterface.h
media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
media/webrtc/signaling/src/media-conduit/VideoConduit.h
media/webrtc/signaling/src/peerconnection/TransceiverImpl.cpp
--- a/media/webrtc/signaling/gtest/mediaconduit_unittests.cpp
+++ b/media/webrtc/signaling/gtest/mediaconduit_unittests.cpp
@@ -434,17 +434,18 @@ class TransportConduitTest : public ::te
     cerr << "   ******************************************************** " << endl;
   }
 
   void TestVideoConduitCodecAPI()
   {
     int err = 0;
     RefPtr<mozilla::VideoSessionConduit> videoSession;
     //get pointer to VideoSessionConduit
-    videoSession = VideoSessionConduit::Create(WebRtcCallWrapper::Create());
+    videoSession = VideoSessionConduit::Create(
+      WebRtcCallWrapper::Create(), GetCurrentThreadEventTarget());
     if( !videoSession ) {
       ASSERT_NE(videoSession, (void*)nullptr);
     }
 
     std::vector<unsigned int> ssrcs = {SSRC};
     videoSession->SetLocalSSRCs(ssrcs);
 
     //Test Configure Recv Codec APIS
--- a/media/webrtc/signaling/gtest/videoconduit_unittests.cpp
+++ b/media/webrtc/signaling/gtest/videoconduit_unittests.cpp
@@ -85,18 +85,20 @@ class VideoConduitTest : public ::testin
 public:
 
   VideoConduitTest()
     : mCall(new MockCall())
     , mAdapter(new MockVideoAdapter)
   {
     NSS_NoDB_Init(nullptr);
 
-    mVideoConduit = new WebrtcVideoConduit(WebRtcCallWrapper::Create(UniquePtr<MockCall>(mCall)),
-                                           UniquePtr<cricket::VideoAdapter>(mAdapter));
+    mVideoConduit = new WebrtcVideoConduit(
+      WebRtcCallWrapper::Create(UniquePtr<MockCall>(mCall)),
+      UniquePtr<cricket::VideoAdapter>(mAdapter),
+      GetCurrentThreadEventTarget());
     std::vector<unsigned int> ssrcs = {42};
     mVideoConduit->SetLocalSSRCs(ssrcs);
   }
 
   ~VideoConduitTest() override
   {
     mVideoConduit->DeleteStreams();
   }
--- a/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
+++ b/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
@@ -137,17 +137,17 @@ bool WebrtcAudioConduit::SetLocalSSRCs(c
   if (wasTransmitting) {
     if (StartTransmitting() != kMediaConduitNoError) {
       return false;
     }
   }
   return true;
 }
 
-std::vector<unsigned int> WebrtcAudioConduit::GetLocalSSRCs() const {
+std::vector<unsigned int> WebrtcAudioConduit::GetLocalSSRCs() {
   unsigned int ssrc;
   if (!mPtrRTP->GetLocalSSRC(mChannel, ssrc)) {
     return std::vector<unsigned int>(1,ssrc);
   }
   return std::vector<unsigned int>();
 }
 
 bool WebrtcAudioConduit::GetRemoteSSRC(unsigned int* ssrc) {
--- a/media/webrtc/signaling/src/media-conduit/AudioConduit.h
+++ b/media/webrtc/signaling/src/media-conduit/AudioConduit.h
@@ -198,17 +198,17 @@ public:
   int GetChannel() { return mChannel; }
   webrtc::VoiceEngine* GetVoiceEngine() { return mVoiceEngine; }
 
   /* Set Local SSRC list.
    * Note: Until the refactor of the VoE into the call API is complete
    *   this list should contain only a single ssrc.
    */
   bool SetLocalSSRCs(const std::vector<unsigned int>& aSSRCs) override;
-  std::vector<unsigned int> GetLocalSSRCs() const override;
+  std::vector<unsigned int> GetLocalSSRCs() override;
   bool SetRemoteSSRC(unsigned int ssrc) override
   {
     return false;
   }
   bool UnsetRemoteSSRC(uint32_t ssrc) override
   {
     return true;
   }
--- a/media/webrtc/signaling/src/media-conduit/MediaConduitInterface.h
+++ b/media/webrtc/signaling/src/media-conduit/MediaConduitInterface.h
@@ -187,17 +187,17 @@ public:
    */
   virtual MediaConduitErrorCode SetReceiverTransport(RefPtr<TransportInterface> aTransport) = 0;
 
   /* Sets the local SSRCs
    * @return true iff the local ssrcs == aSSRCs upon return
    * Note: this is an ordered list and {a,b,c} != {b,a,c}
    */
   virtual bool SetLocalSSRCs(const std::vector<unsigned int>& aSSRCs) = 0;
-  virtual std::vector<unsigned int> GetLocalSSRCs() const = 0;
+  virtual std::vector<unsigned int> GetLocalSSRCs() = 0;
 
   /**
   * Adds negotiated RTP header extensions to the the conduit. Unknown extensions
   * are ignored.
   * @param aDirection the local direction to set the RTP header extensions for
   * @param aExtensions the RTP header extensions to set
   * @return if all extensions were set it returns a success code,
   *         if an extension fails to set it may immediately return an error code
@@ -373,17 +373,18 @@ class VideoSessionConduit : public Media
 public:
   /**
    * Factory function to create and initialize a Video Conduit Session
    * @param  webrtc::Call instance shared by paired audio and video
    *         media conduits
    * @result Concrete VideoSessionConduitObject or nullptr in the case
    *         of failure
    */
-  static RefPtr<VideoSessionConduit> Create(RefPtr<WebRtcCallWrapper> aCall);
+  static RefPtr<VideoSessionConduit> Create(
+    RefPtr<WebRtcCallWrapper> aCall, nsCOMPtr<nsIEventTarget> aStsThread);
 
   enum FrameRequestType
   {
     FrameRequestNone,
     FrameRequestFir,
     FrameRequestPli,
     FrameRequestUnknown
   };
--- a/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
+++ b/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
@@ -132,250 +132,473 @@ ConstrainPreservingAspectRatio(uint16_t 
     (*height) = max_width * (*height) / (*width);
     (*width) = max_width;
   } else {
     (*width) = max_height * (*width) / (*height);
     (*height) = max_height;
   }
 }
 
+/**
+ * Function to select and change the encoding frame rate based on incoming frame rate
+ * and max-mbps setting.
+ * @param current framerate
+ * @result new framerate
+ */
+static unsigned int
+SelectSendFrameRate(const VideoCodecConfig* codecConfig,
+                    unsigned int old_framerate,
+                    unsigned short sending_width,
+                    unsigned short sending_height)
+{
+  unsigned int new_framerate = old_framerate;
+
+  // Limit frame rate based on max-mbps
+  if (codecConfig && codecConfig->mEncodingConstraints.maxMbps)
+  {
+    unsigned int cur_fs, mb_width, mb_height;
+
+    mb_width = (sending_width + 15) >> 4;
+    mb_height = (sending_height + 15) >> 4;
+
+    cur_fs = mb_width * mb_height;
+    if (cur_fs > 0) { // in case no frames have been sent
+      new_framerate = codecConfig->mEncodingConstraints.maxMbps / cur_fs;
+
+      new_framerate = MinIgnoreZero(new_framerate, codecConfig->mEncodingConstraints.maxFps);
+    }
+  }
+  return new_framerate;
+}
+
+/**
+ * Perform validation on the codecConfig to be applied
+ */
+static MediaConduitErrorCode
+ValidateCodecConfig(const VideoCodecConfig* codecInfo)
+{
+  if(!codecInfo) {
+    CSFLogError(LOGTAG, "%s Null CodecConfig ", __FUNCTION__);
+    return kMediaConduitMalformedArgument;
+  }
+
+  if((codecInfo->mName.empty()) ||
+     (codecInfo->mName.length() >= WebrtcVideoConduit::CODEC_PLNAME_SIZE)) {
+    CSFLogError(LOGTAG, "%s Invalid Payload Name Length ", __FUNCTION__);
+    return kMediaConduitMalformedArgument;
+  }
+
+  return kMediaConduitNoError;
+}
+
+void
+WebrtcVideoConduit::CallStatistics::Update(
+  const webrtc::Call::Stats& aStats)
+{
+  ASSERT_ON_THREAD(mStatsThread);
+
+  int64_t rtt = aStats.rtt_ms;
+#ifdef DEBUG
+  if (rtt > INT32_MAX) {
+    CSFLogError(LOGTAG,
+      "%s for VideoConduit:%p RTT is larger than the"
+      " maximum size of an RTCP RTT.", __FUNCTION__, this);
+  }
+#endif
+  if (rtt > 0) {
+    mRttMs = rtt;
+  } else {
+    mRttMs = 0;
+  }
+}
+
+int32_t
+WebrtcVideoConduit::CallStatistics::RttMs() const
+{
+  ASSERT_ON_THREAD(mStatsThread);
+
+  return mRttMs;
+}
+
 void
 WebrtcVideoConduit::StreamStatistics::Update(const double aFrameRate,
-                                             const double aBitrate)
+                                             const double aBitrate,
+                                             const webrtc::RtcpPacketTypeCounter& aPacketCounts)
 {
+  ASSERT_ON_THREAD(mStatsThread);
+
   mFrameRate.Push(aFrameRate);
   mBitrate.Push(aBitrate);
+  mPacketCounts = aPacketCounts;
 }
 
 bool
 WebrtcVideoConduit::StreamStatistics::GetVideoStreamStats(
     double& aOutFrMean, double& aOutFrStdDev, double& aOutBrMean,
     double& aOutBrStdDev) const
 {
+  ASSERT_ON_THREAD(mStatsThread);
+
   if (mFrameRate.NumDataValues() && mBitrate.NumDataValues()) {
     aOutFrMean = mFrameRate.Mean();
     aOutFrStdDev = mFrameRate.StandardDeviation();
     aOutBrMean = mBitrate.Mean();
     aOutBrStdDev = mBitrate.StandardDeviation();
     return true;
   }
   return false;
 }
 
+const webrtc::RtcpPacketTypeCounter&
+WebrtcVideoConduit::StreamStatistics::PacketCounts() const
+{
+  ASSERT_ON_THREAD(mStatsThread);
+
+  return mPacketCounts;
+}
+
+bool
+WebrtcVideoConduit::StreamStatistics::Active() const
+{
+  ASSERT_ON_THREAD(mStatsThread);
+
+  return mActive;
+}
+
 void
-WebrtcVideoConduit::SendStreamStatistics::DroppedFrames(
-  uint32_t& aOutDroppedFrames) const
+WebrtcVideoConduit::StreamStatistics::SetActive(bool aActive)
+{
+  ASSERT_ON_THREAD(mStatsThread);
+
+  mActive = aActive;
+}
+
+uint32_t
+WebrtcVideoConduit::SendStreamStatistics::DroppedFrames() const
+{
+  ASSERT_ON_THREAD(mStatsThread);
+
+  return mDroppedFrames;
+}
+
+uint32_t
+WebrtcVideoConduit::SendStreamStatistics::FramesEncoded() const
+{
+  ASSERT_ON_THREAD(mStatsThread);
+
+  return mFramesEncoded;
+}
+
+void
+WebrtcVideoConduit::SendStreamStatistics::FrameDeliveredToEncoder()
+{
+  ASSERT_ON_THREAD(mStatsThread);
+
+  ++mFramesDeliveredToEncoder;
+}
+
+bool
+WebrtcVideoConduit::SendStreamStatistics::SsrcFound() const
 {
-      aOutDroppedFrames = mDroppedFrames;
+  ASSERT_ON_THREAD(mStatsThread);
+
+  return mSsrcFound;
+}
+
+uint32_t
+WebrtcVideoConduit::SendStreamStatistics::JitterMs() const
+{
+  ASSERT_ON_THREAD(mStatsThread);
+
+  return mJitterMs;
+}
+
+uint32_t
+WebrtcVideoConduit::SendStreamStatistics::CumulativeLost() const
+{
+  ASSERT_ON_THREAD(mStatsThread);
+
+  return mCumulativeLost;
+}
+
+uint64_t
+WebrtcVideoConduit::SendStreamStatistics::BytesReceived() const
+{
+  ASSERT_ON_THREAD(mStatsThread);
+
+  return mBytesReceived;
+}
+
+uint32_t
+WebrtcVideoConduit::SendStreamStatistics::PacketsReceived() const
+{
+  ASSERT_ON_THREAD(mStatsThread);
+
+  return mPacketsReceived;
 }
 
 void
 WebrtcVideoConduit::SendStreamStatistics::Update(
-  const webrtc::VideoSendStream::Stats& aStats)
+  const webrtc::VideoSendStream::Stats& aStats,
+  uint32_t aConfiguredSsrc)
 {
-  StreamStatistics::Update(aStats.encode_frame_rate, aStats.media_bitrate_bps);
-  if (!aStats.substreams.empty()) {
-    const webrtc::FrameCounts& fc =
-      aStats.substreams.begin()->second.frame_counts;
-    mFramesEncoded = fc.key_frames + fc.delta_frames;
-    CSFLogVerbose(LOGTAG,
-                  "%s: framerate: %u, bitrate: %u, dropped frames delta: %u",
-                  __FUNCTION__, aStats.encode_frame_rate,
-                  aStats.media_bitrate_bps,
-                  mFramesDeliveredToEncoder - mFramesEncoded - mDroppedFrames);
-    mDroppedFrames = mFramesDeliveredToEncoder - mFramesEncoded;
-  } else {
+  ASSERT_ON_THREAD(mStatsThread);
+
+  mSsrcFound = false;
+
+  if (aStats.substreams.empty()) {
     CSFLogVerbose(LOGTAG, "%s stats.substreams is empty", __FUNCTION__);
+    return;
   }
+
+  auto ind = aStats.substreams.find(aConfiguredSsrc);
+  if (ind == aStats.substreams.end()) {
+    CSFLogError(LOGTAG,
+      "%s for VideoConduit:%p ssrc not found in SendStream stats.",
+      __FUNCTION__, this);
+    return;
+  }
+
+  mSsrcFound = true;
+
+  StreamStatistics::Update(aStats.encode_frame_rate, aStats.media_bitrate_bps,
+                           ind->second.rtcp_packet_type_counts);
+
+  const webrtc::FrameCounts& fc = ind->second.frame_counts;
+  mFramesEncoded = fc.key_frames + fc.delta_frames;
+  CSFLogVerbose(LOGTAG,
+                "%s: framerate: %u, bitrate: %u, dropped frames delta: %u",
+                __FUNCTION__, aStats.encode_frame_rate,
+                aStats.media_bitrate_bps,
+                mFramesDeliveredToEncoder - mFramesEncoded - mDroppedFrames);
+  mDroppedFrames = mFramesDeliveredToEncoder - mFramesEncoded;
+  mJitterMs = ind->second.rtcp_stats.jitter /
+      (webrtc::kVideoPayloadTypeFrequency / 1000);
+  mCumulativeLost = ind->second.rtcp_stats.cumulative_lost;
+  mBytesReceived = ind->second.rtp_stats.MediaPayloadBytes();
+  mPacketsReceived = ind->second.rtp_stats.transmitted.packets;
 }
 
-void
-WebrtcVideoConduit::ReceiveStreamStatistics::DiscardedPackets(
-  uint32_t& aOutDiscPackets) const
+uint32_t
+WebrtcVideoConduit::ReceiveStreamStatistics::DiscardedPackets() const
 {
-  aOutDiscPackets = mDiscardedPackets;
+  ASSERT_ON_THREAD(mStatsThread);
+
+  return mDiscardedPackets;
+}
+
+uint32_t
+WebrtcVideoConduit::ReceiveStreamStatistics::FramesDecoded() const
+{
+  ASSERT_ON_THREAD(mStatsThread);
+
+  return mFramesDecoded;
 }
 
-void
-WebrtcVideoConduit::ReceiveStreamStatistics::FramesDecoded(
-  uint32_t& aFramesDecoded) const
+uint32_t
+WebrtcVideoConduit::ReceiveStreamStatistics::JitterMs() const
+{
+  ASSERT_ON_THREAD(mStatsThread);
+
+  return mJitterMs;
+}
+
+uint32_t
+WebrtcVideoConduit::ReceiveStreamStatistics::CumulativeLost() const
 {
-  aFramesDecoded = mFramesDecoded;
+  ASSERT_ON_THREAD(mStatsThread);
+
+  return mCumulativeLost;
+}
+
+uint32_t
+WebrtcVideoConduit::ReceiveStreamStatistics::Ssrc() const
+{
+  ASSERT_ON_THREAD(mStatsThread);
+
+  return mSsrc;
 }
 
 void
 WebrtcVideoConduit::ReceiveStreamStatistics::Update(
   const webrtc::VideoReceiveStream::Stats& aStats)
 {
+  ASSERT_ON_THREAD(mStatsThread);
+
   CSFLogVerbose(LOGTAG, "%s ", __FUNCTION__);
-  StreamStatistics::Update(aStats.decode_frame_rate, aStats.total_bitrate_bps);
+  StreamStatistics::Update(aStats.decode_frame_rate, aStats.total_bitrate_bps,
+                           aStats.rtcp_packet_type_counts);
   mDiscardedPackets = aStats.discarded_packets;
-  mFramesDecoded = aStats.frame_counts.key_frames
-                   + aStats.frame_counts.delta_frames;
+  mFramesDecoded =
+    aStats.frame_counts.key_frames + aStats.frame_counts.delta_frames;
+  mJitterMs =
+    aStats.rtcp_stats.jitter / (webrtc::kVideoPayloadTypeFrequency / 1000);
+  mCumulativeLost = aStats.rtcp_stats.cumulative_lost;
+  mSsrc = aStats.ssrc;
 }
 
 /**
  * Factory Method for VideoConduit
  */
 RefPtr<VideoSessionConduit>
-VideoSessionConduit::Create(RefPtr<WebRtcCallWrapper> aCall)
+VideoSessionConduit::Create(
+  RefPtr<WebRtcCallWrapper> aCall,
+  nsCOMPtr<nsIEventTarget> aStsThread)
 {
-  NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
-  NS_ASSERTION(aCall, "missing required parameter: aCall");
+  MOZ_ASSERT(NS_IsMainThread());
+  MOZ_ASSERT(aCall, "missing required parameter: aCall");
   CSFLogVerbose(LOGTAG, "%s", __FUNCTION__);
 
   if (!aCall) {
     return nullptr;
   }
 
   UniquePtr<cricket::VideoAdapter> videoAdapter(new cricket::VideoAdapter(1));
   nsAutoPtr<WebrtcVideoConduit> obj(new WebrtcVideoConduit(aCall,
-                                    std::move(videoAdapter)));
+                                    std::move(videoAdapter),
+                                    aStsThread));
   if(obj->Init() != kMediaConduitNoError) {
     CSFLogError(LOGTAG, "%s VideoConduit Init Failed ", __FUNCTION__);
     return nullptr;
   }
   CSFLogVerbose(LOGTAG, "%s Successfully created VideoConduit ", __FUNCTION__);
   return obj.forget();
 }
 
 WebrtcVideoConduit::WebrtcVideoConduit(RefPtr<WebRtcCallWrapper> aCall,
-                                       UniquePtr<cricket::VideoAdapter>&& aVideoAdapter)
+                                       UniquePtr<cricket::VideoAdapter>&& aVideoAdapter,
+                                       nsCOMPtr<nsIEventTarget> aStsThread)
   : mTransportMonitor("WebrtcVideoConduit")
+  , mStsThread(aStsThread)
+  , mMutex("WebrtcVideoConduit::mMutex")
   , mVideoAdapter(std::move(aVideoAdapter))
   , mBufferPool(false, SCALER_BUFFER_POOL_SIZE)
   , mEngineTransmitting(false)
   , mEngineReceiving(false)
-  , mCodecMutex("VideoConduit codec db")
+  , mSendStreamStats(aStsThread)
+  , mRecvStreamStats(aStsThread)
+  , mCallStats(aStsThread)
   , mSendingFramerate(DEFAULT_VIDEO_MAX_FRAMERATE)
   , mCodecMode(webrtc::kRealtimeVideo)
-  , mCall(aCall) // refcounted store of the call object
+  , mCall(aCall)
   , mSendStreamConfig(this) // 'this' is stored but not dereferenced in the constructor.
   , mRecvStreamConfig(this) // 'this' is stored but not dereferenced in the constructor.
+  , mRecvSSRC(0)
   , mVideoStatsTimer(NS_NewTimer())
 {
   mCall->RegisterConduit(this);
   mRecvStreamConfig.renderer = this;
-
-  // Video Stats Callback
-  nsTimerCallbackFunc callback = [](nsITimer* aTimer, void* aClosure) {
-    CSFLogDebug(LOGTAG, "StreamStats polling scheduled for VideoConduit: %p", aClosure);
-    auto self = static_cast<WebrtcVideoConduit*>(aClosure);
-    MutexAutoLock lock(self->mCodecMutex);
-    if (self->mEngineTransmitting && self->mSendStream) {
-      const auto& stats = self->mSendStream->GetStats();
-      self->mSendStreamStats.Update(stats);
-      if (!stats.substreams.empty()) {
-          self->mSendPacketCounts =
-            stats.substreams.begin()->second.rtcp_packet_type_counts;
-      }
-    }
-    if (self->mEngineReceiving && self->mRecvStream) {
-      const auto& stats = self->mRecvStream->GetStats();
-      self->mRecvStreamStats.Update(stats);
-      self->mRecvPacketCounts = stats.rtcp_packet_type_counts;
-    }
-  };
-  mVideoStatsTimer->InitWithNamedFuncCallback(
-    callback, this, 1000, nsITimer::TYPE_REPEATING_PRECISE_CAN_SKIP,
-    "WebrtcVideoConduit::WebrtcVideoConduit");
 }
 
 WebrtcVideoConduit::~WebrtcVideoConduit()
 {
+  MOZ_ASSERT(NS_IsMainThread());
+
   CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
-  NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
   mCall->UnregisterConduit(this);
-  if (mVideoStatsTimer) {
-    CSFLogDebug(LOGTAG, "canceling StreamStats for VideoConduit: %p", this);
-    MutexAutoLock lock(mCodecMutex);
-    CSFLogDebug(LOGTAG, "StreamStats cancelled for VideoConduit: %p", this);
-    mVideoStatsTimer->Cancel();
-  }
 
   // Release AudioConduit first by dropping reference on MainThread, where it expects to be
   SyncTo(nullptr);
   MOZ_ASSERT(!mSendStream && !mRecvStream, "Call DeleteStreams prior to ~WebrtcVideoConduit.");
 }
 
 MediaConduitErrorCode
 WebrtcVideoConduit::SetLocalRTPExtensions(LocalDirection aDirection,
                                           const RtpExtList& aExtensions)
 {
+  MOZ_ASSERT(NS_IsMainThread());
+
   auto& extList = aDirection == LocalDirection::kSend ?
                                     mSendStreamConfig.rtp.extensions :
                                     mRecvStreamConfig.rtp.extensions;
   extList = aExtensions;
   return kMediaConduitNoError;
 }
 
 bool WebrtcVideoConduit::SetLocalSSRCs(const std::vector<unsigned int> & aSSRCs)
 {
+  MOZ_ASSERT(NS_IsMainThread());
+
   // Special case: the local SSRCs are the same - do nothing.
   if (mSendStreamConfig.rtp.ssrcs == aSSRCs) {
     return true;
   }
 
-  // Update the value of the ssrcs in the config structure.
-  mSendStreamConfig.rtp.ssrcs = aSSRCs;
-
-  bool wasTransmitting = mEngineTransmitting;
-  if (StopTransmitting() != kMediaConduitNoError) {
-    return false;
-  }
-
-  MutexAutoLock lock(mCodecMutex);
-  // On the next StartTransmitting() or ConfigureSendMediaCodec, force
-  // building a new SendStream to switch SSRCs.
-  DeleteSendStream();
-  if (wasTransmitting) {
-    if (StartTransmitting() != kMediaConduitNoError) {
+  {
+    MutexAutoLock lock(mMutex);
+    // Update the value of the ssrcs in the config structure.
+    mSendStreamConfig.rtp.ssrcs = aSSRCs;
+
+    bool wasTransmitting = mEngineTransmitting;
+    if (StopTransmittingLocked() != kMediaConduitNoError) {
       return false;
     }
+
+    // On the next StartTransmitting() or ConfigureSendMediaCodec, force
+    // building a new SendStream to switch SSRCs.
+    DeleteSendStream();
+
+    if (wasTransmitting) {
+      if (StartTransmittingLocked() != kMediaConduitNoError) {
+        return false;
+      }
+    }
   }
 
   return true;
 }
 
 std::vector<unsigned int>
-WebrtcVideoConduit::GetLocalSSRCs() const
+WebrtcVideoConduit::GetLocalSSRCs()
 {
+  MutexAutoLock lock(mMutex);
+
   return mSendStreamConfig.rtp.ssrcs;
 }
 
 bool
 WebrtcVideoConduit::SetLocalCNAME(const char* cname)
 {
+  MOZ_ASSERT(NS_IsMainThread());
+  MutexAutoLock lock(mMutex);
+
   mSendStreamConfig.rtp.c_name = cname;
   return true;
 }
 
 bool WebrtcVideoConduit::SetLocalMID(const std::string& mid)
 {
+  MOZ_ASSERT(NS_IsMainThread());
+  MutexAutoLock lock(mMutex);
+
   mSendStreamConfig.rtp.mid = mid;
   return true;
 }
 
 MediaConduitErrorCode
 WebrtcVideoConduit::ConfigureCodecMode(webrtc::VideoCodecMode mode)
 {
+  MOZ_ASSERT(NS_IsMainThread());
+
   CSFLogVerbose(LOGTAG, "%s ", __FUNCTION__);
   if (mode == webrtc::VideoCodecMode::kRealtimeVideo ||
       mode == webrtc::VideoCodecMode::kScreensharing) {
     mCodecMode = mode;
     return kMediaConduitNoError;
   }
 
   return kMediaConduitMalformedArgument;
 }
 
 void
 WebrtcVideoConduit::DeleteSendStream()
 {
-  mCodecMutex.AssertCurrentThreadOwns();
+  MOZ_ASSERT(NS_IsMainThread());
+  mMutex.AssertCurrentThreadOwns();
+
   if (mSendStream) {
     mCall->Call()->DestroyVideoSendStream(mSendStream);
     mSendStream = nullptr;
     mEncoder = nullptr;
   }
 }
 
 webrtc::VideoCodecType
@@ -390,17 +613,18 @@ SupportedCodecType(webrtc::VideoCodecTyp
       return webrtc::VideoCodecType::kVideoCodecUnknown;
   }
   // NOTREACHED
 }
 
 MediaConduitErrorCode
 WebrtcVideoConduit::CreateSendStream()
 {
-  mCodecMutex.AssertCurrentThreadOwns();
+  MOZ_ASSERT(NS_IsMainThread());
+  mMutex.AssertCurrentThreadOwns();
 
   webrtc::VideoCodecType encoder_type =
     SupportedCodecType(
       webrtc::PayloadNameToCodecType(mSendStreamConfig.encoder_settings.payload_name)
         .value_or(webrtc::VideoCodecType::kVideoCodecUnknown));
   if (encoder_type == webrtc::VideoCodecType::kVideoCodecUnknown) {
     return kMediaConduitInvalidSendCodec;
   }
@@ -428,28 +652,31 @@ WebrtcVideoConduit::CreateSendStream()
   mEncoder = encoder;
 
   return kMediaConduitNoError;
 }
 
 void
 WebrtcVideoConduit::DeleteRecvStream()
 {
-  mCodecMutex.AssertCurrentThreadOwns();
+  MOZ_ASSERT(NS_IsMainThread());
+  mMutex.AssertCurrentThreadOwns();
+
   if (mRecvStream) {
     mCall->Call()->DestroyVideoReceiveStream(mRecvStream);
     mRecvStream = nullptr;
     mDecoders.clear();
   }
 }
 
 MediaConduitErrorCode
 WebrtcVideoConduit::CreateRecvStream()
 {
-  mCodecMutex.AssertCurrentThreadOwns();
+  MOZ_ASSERT(NS_IsMainThread());
+  mMutex.AssertCurrentThreadOwns();
 
   webrtc::VideoReceiveStream::Decoder decoder_desc;
   std::unique_ptr<webrtc::VideoDecoder> decoder;
   webrtc::VideoCodecType decoder_type;
 
   mRecvStreamConfig.decoders.clear();
   for (auto& config : mRecvCodecList) {
     decoder_type = SupportedCodecType(webrtc::PayloadNameToCodecType(config->mName)
@@ -488,19 +715,21 @@ WebrtcVideoConduit::CreateRecvStream()
   }
   CSFLogDebug(LOGTAG, "Created VideoReceiveStream %p for SSRC %u (0x%x)",
               mRecvStream, mRecvStreamConfig.rtp.remote_ssrc, mRecvStreamConfig.rtp.remote_ssrc);
 
   return kMediaConduitNoError;
 }
 
 static rtc::scoped_refptr<webrtc::VideoEncoderConfig::EncoderSpecificSettings>
-ConfigureVideoEncoderSettings(const VideoCodecConfig* aConfig,
-                              const WebrtcVideoConduit* aConduit)
+ConfigureVideoEncoderSettings(
+  const VideoCodecConfig* aConfig, const WebrtcVideoConduit* aConduit)
 {
+  MOZ_ASSERT(NS_IsMainThread());
+
   bool is_screencast = aConduit->CodecMode() == webrtc::VideoCodecMode::kScreensharing;
   // No automatic resizing when using simulcast or screencast.
   bool automatic_resize = !is_screencast && aConfig->mSimulcastEncodings.size() <= 1;
   bool frame_dropping = !is_screencast;
   bool denoising;
   bool codec_default_denoising = false;
   if (is_screencast) {
     denoising = false;
@@ -544,38 +773,62 @@ ConfigureVideoEncoderSettings(const Vide
     vp9_settings.denoisingOn = codec_default_denoising ? false : denoising;
     vp9_settings.frameDroppingOn = frame_dropping;
     return new rtc::RefCountedObject<
         webrtc::VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
   }
   return nullptr;
 }
 
+// Compare lists of codecs
+static bool
+CodecsDifferent(const nsTArray<UniquePtr<VideoCodecConfig>>& a,
+                const nsTArray<UniquePtr<VideoCodecConfig>>& b)
+{
+  // return a != b;
+  // would work if UniquePtr<> operator== compared contents!
+  auto len = a.Length();
+  if (len != b.Length()) {
+    return true;
+  }
+
+  // XXX std::equal would work, if we could use it on this - fails for the
+  // same reason as above.  c++14 would let us pass a comparator function.
+  for (uint32_t i = 0; i < len; ++i) {
+    if (!(*a[i] == *b[i])) {
+      return true;
+    }
+  }
+
+  return false;
+}
+
 std::vector<webrtc::VideoStream>
-WebrtcVideoConduit::VideoStreamFactory::CreateEncoderStreams(int width, int height,
-                                                             const webrtc::VideoEncoderConfig& config)
+WebrtcVideoConduit::VideoStreamFactory::CreateEncoderStreams(
+  int width, int height, const webrtc::VideoEncoderConfig& config)
 {
   size_t streamCount = config.number_of_streams;
+  webrtc::VideoCodecMode codecMode = mConduit->mCodecMode;
 
   // Disallow odd width and height, they will cause aspect ratio checks to
   // fail in the webrtc.org code. We can hit transient states after window
   // sharing ends where odd resolutions are requested for the camera.
   streamCount = std::min(streamCount, static_cast<size_t>(
                          1 + std::min(CountTrailingZeroes32(width),
                                       CountTrailingZeroes32(height))));
 
   // We only allow one layer when screensharing
-  if (mConduit->mCodecMode == webrtc::VideoCodecMode::kScreensharing) {
+  if (codecMode == webrtc::VideoCodecMode::kScreensharing) {
     streamCount = 1;
   }
 
   std::vector<webrtc::VideoStream> streams;
   streams.reserve(streamCount);
   MOZ_ASSERT(mConduit);
-  MutexAutoLock lock(mConduit->mCodecMutex); // for mCurSendCodecConfig
+  MutexAutoLock lock(mConduit->mMutex);
 
   // XXX webrtc.org code has a restriction on simulcast layers that each
   // layer must be 1/2 the dimension of the previous layer - not sure why.
   // This means we can't use scaleResolutionBy/scaleDownBy (yet), even if
   // the user specified it.  The one exception is that we can apply it on
   // the full-resolution stream (which also happens to handle the
   // non-simulcast usage case). NOTE: we make an assumption here, not in the
   // spec, that the first stream is the full-resolution stream.
@@ -633,17 +886,17 @@ WebrtcVideoConduit::VideoStreamFactory::
       // which use 1 (i.e 2 layers).
 
       // Oddly, though this is a 'bps' array, nothing really looks at the
       // values for normal video, just the size of the array to know the
       // number of temporal layers.
       // For VideoEncoderConfig::ContentType::kScreen, though, in
       // video_codec_initializer.cc it uses [0] to set the target bitrate
       // for the screenshare.
-      if (mConduit->mCodecMode == webrtc::VideoCodecMode::kScreensharing) {
+      if (codecMode == webrtc::VideoCodecMode::kScreensharing) {
         video_stream.temporal_layer_thresholds_bps.push_back(video_stream.target_bitrate_bps);
       } else {
         video_stream.temporal_layer_thresholds_bps.resize(2);
       }
       // XXX Bug 1390215 investigate using more of
       // simulcast.cc:GetSimulcastConfig() or our own algorithm to replace it
     }
 
@@ -666,16 +919,19 @@ WebrtcVideoConduit::VideoStreamFactory::
  * videoframe delivery threads (i.e in SendVideoFrame().  With
  * renegotiation/reconfiguration, this now needs a lock!  Alternatively
  * changes could be queued until the next frame is delivered using an
  * Atomic pointer and swaps.
  */
 MediaConduitErrorCode
 WebrtcVideoConduit::ConfigureSendMediaCodec(const VideoCodecConfig* codecConfig)
 {
+  MOZ_ASSERT(NS_IsMainThread());
+  MutexAutoLock lock(mMutex);
+
   CSFLogDebug(LOGTAG, "%s for %s", __FUNCTION__,
     codecConfig ? codecConfig->mName.c_str() : "<null>");
 
   MediaConduitErrorCode condError = kMediaConduitNoError;
 
   // validate basic params
   if ((condError = ValidateCodecConfig(codecConfig)) != kMediaConduitNoError) {
     return condError;
@@ -743,32 +999,28 @@ WebrtcVideoConduit::ConfigureSendMediaCo
   // Expected max number of encodings
   mEncoderConfig.SetMaxEncodings(codecConfig->mSimulcastEncodings.size());
 
   // If only encoder stream attibutes have been changed, there is no need to stop,
   // create a new webrtc::VideoSendStream, and restart.
   // Recreating on PayloadType change may be overkill, but is safe.
   if (mSendStream) {
     if (!RequiresNewSendStream(*codecConfig)) {
-      {
-        MutexAutoLock lock(mCodecMutex);
-        mCurSendCodecConfig->mEncodingConstraints = codecConfig->mEncodingConstraints;
-        mCurSendCodecConfig->mSimulcastEncodings = codecConfig->mSimulcastEncodings;
-      }
+      mCurSendCodecConfig->mEncodingConstraints = codecConfig->mEncodingConstraints;
+      mCurSendCodecConfig->mSimulcastEncodings = codecConfig->mSimulcastEncodings;
       mSendStream->ReconfigureVideoEncoder(mEncoderConfig.CopyConfig());
       return kMediaConduitNoError;
     }
 
-    condError = StopTransmitting();
+    condError = StopTransmittingLocked();
     if (condError != kMediaConduitNoError) {
       return condError;
     }
 
     // This will cause a new encoder to be created by StartTransmitting()
-    MutexAutoLock lock(mCodecMutex);
     DeleteSendStream();
   }
 
   mSendStreamConfig.encoder_settings.payload_name = codecConfig->mName;
   mSendStreamConfig.encoder_settings.payload_type = codecConfig->mType;
   mSendStreamConfig.rtp.rtcp_mode = webrtc::RtcpMode::kCompound;
   mSendStreamConfig.rtp.max_packet_size = kVideoMtu;
 
@@ -783,21 +1035,18 @@ WebrtcVideoConduit::ConfigureSendMediaCo
     mSendStreamConfig.rtp.ulpfec.ulpfec_payload_type = -1;
     mSendStreamConfig.rtp.ulpfec.red_payload_type = -1;
     mSendStreamConfig.rtp.ulpfec.red_rtx_payload_type = -1;
   }
 
   mSendStreamConfig.rtp.nack.rtp_history_ms =
     codecConfig->RtcpFbNackIsSet("") ? 1000 : 0;
 
-  {
-    MutexAutoLock lock(mCodecMutex);
-    // Copy the applied config for future reference.
-    mCurSendCodecConfig = new VideoCodecConfig(*codecConfig);
-  }
+  // Copy the applied config for future reference.
+  mCurSendCodecConfig = new VideoCodecConfig(*codecConfig);
 
   mSendStreamConfig.rtp.rids.clear();
   bool has_rid = false;
   for (size_t idx = 0; idx < streamCount; idx++) {
     auto& simulcastEncoding = mCurSendCodecConfig->mSimulcastEncodings[idx];
     if (simulcastEncoding.rid[0]) {
       has_rid = true;
       break;
@@ -827,65 +1076,79 @@ GenerateRandomSSRC()
   } while (ssrc == 0); // webrtc.org code has fits if you select an SSRC of 0
 
   return ssrc;
 }
 
 bool
 WebrtcVideoConduit::SetRemoteSSRC(unsigned int ssrc)
 {
+  MOZ_ASSERT(NS_IsMainThread());
+  MutexAutoLock lock(mMutex);
+
+  return SetRemoteSSRCLocked(ssrc);
+}
+
+bool
+WebrtcVideoConduit::SetRemoteSSRCLocked(unsigned int ssrc)
+{
+  MOZ_ASSERT(NS_IsMainThread());
+  mMutex.AssertCurrentThreadOwns();
+
   unsigned int current_ssrc;
-  if (!GetRemoteSSRC(&current_ssrc)) {
+  if (!GetRemoteSSRCLocked(&current_ssrc)) {
     return false;
   }
 
   if (current_ssrc == ssrc) {
     return true;
   }
 
   bool wasReceiving = mEngineReceiving;
-  if (StopReceiving() != kMediaConduitNoError) {
-    return false;
-  }
-
-  CSFLogDebug(LOGTAG, "%s: SSRC %u (0x%x)", __FUNCTION__, ssrc, ssrc);
-  if (!mCall->UnsetRemoteSSRC(ssrc)) {
-    CSFLogError(LOGTAG, "%s: Failed to unset SSRC %u (0x%x) on other conduits,"
-                " bailing", __FUNCTION__, ssrc, ssrc);
+  if (StopReceivingLocked() != kMediaConduitNoError) {
     return false;
   }
-  mRecvStreamConfig.rtp.remote_ssrc = ssrc;
-  mWaitingForInitialSsrc = false;
-
-  // This will destroy mRecvStream and create a new one (argh, why can't we change
-  // it without a full destroy?)
-  // We're going to modify mRecvStream, we must lock.  Only modified on MainThread.
-  // All non-MainThread users must lock before reading/using
+
   {
-    MutexAutoLock lock(mCodecMutex);
-    // On the next StartReceiving() or ConfigureRecvMediaCodec, force
-    // building a new RecvStream to switch SSRCs.
-    DeleteRecvStream();
-    if (!wasReceiving) {
-      return true;
-    }
-    MediaConduitErrorCode rval = CreateRecvStream();
-    if (rval != kMediaConduitNoError) {
-      CSFLogError(LOGTAG, "%s Start Receive Error %d ", __FUNCTION__, rval);
+    CSFLogDebug(LOGTAG, "%s: SSRC %u (0x%x)", __FUNCTION__, ssrc, ssrc);
+    MutexAutoUnlock unlock(mMutex);
+    if (!mCall->UnsetRemoteSSRC(ssrc)) {
+      CSFLogError(LOGTAG, "%s: Failed to unset SSRC %u (0x%x) on other conduits,"
+                  " bailing", __FUNCTION__, ssrc, ssrc);
       return false;
     }
   }
-  return (StartReceiving() == kMediaConduitNoError);
+
+  mRecvStreamConfig.rtp.remote_ssrc = ssrc;
+  mStsThread->Dispatch(NS_NewRunnableFunction(
+    "WebrtcVideoConduit::WaitingForInitialSsrcNoMore",
+    [this, self = RefPtr<WebrtcVideoConduit>(this)]() {
+      mWaitingForInitialSsrc = false;
+    }));
+  // On the next StartReceiving() or ConfigureRecvMediaCodec, force
+  // building a new RecvStream to switch SSRCs.
+  DeleteRecvStream();
+
+  if (wasReceiving) {
+    if (StartReceivingLocked() != kMediaConduitNoError) {
+      return false;
+    }
+  }
+
+  return true;
 }
 
 bool
 WebrtcVideoConduit::UnsetRemoteSSRC(uint32_t ssrc)
 {
+  MOZ_ASSERT(NS_IsMainThread());
+  MutexAutoLock lock(mMutex);
+
   unsigned int our_ssrc;
-  if (!GetRemoteSSRC(&our_ssrc)) {
+  if (!GetRemoteSSRCLocked(&our_ssrc)) {
     // This only fails when we aren't sending, which isn't really an error here
     return true;
   }
 
   if (our_ssrc != ssrc) {
     return true;
   }
 
@@ -894,204 +1157,271 @@ WebrtcVideoConduit::UnsetRemoteSSRC(uint
     if (our_ssrc == 0) {
       return false;
     }
   }
 
   // There is a (tiny) chance that this new random ssrc will collide with some
   // other conduit's remote ssrc, in which case that conduit will choose a new
   // one.
-  SetRemoteSSRC(our_ssrc);
+  SetRemoteSSRCLocked(our_ssrc);
   return true;
 }
 
 bool
 WebrtcVideoConduit::GetRemoteSSRC(unsigned int* ssrc)
 {
-  {
-    MutexAutoLock lock(mCodecMutex);
+  MutexAutoLock lock(mMutex);
+
+  return GetRemoteSSRCLocked(ssrc);
+}
+
+bool
+WebrtcVideoConduit::GetRemoteSSRCLocked(unsigned int* ssrc)
+{
+  mMutex.AssertCurrentThreadOwns();
+
+  if (NS_IsMainThread()) {
     if (!mRecvStream) {
       return false;
     }
-
-    const webrtc::VideoReceiveStream::Stats& stats = mRecvStream->GetStats();
-    *ssrc = stats.ssrc;
+    *ssrc = mRecvStream->GetStats().ssrc;
+  } else {
+    ASSERT_ON_THREAD(mStsThread);
+    *ssrc = mRecvStreamStats.Ssrc();
   }
-
   return true;
 }
 
 bool
 WebrtcVideoConduit::GetSendPacketTypeStats(
     webrtc::RtcpPacketTypeCounter* aPacketCounts)
 {
-  MutexAutoLock lock(mCodecMutex);
-  if (!mEngineTransmitting || !mSendStream) { // Not transmitting
+  ASSERT_ON_THREAD(mStsThread);
+
+  MutexAutoLock lock(mMutex);
+  if (!mSendStreamStats.Active()) {
     return false;
   }
-  *aPacketCounts = mSendPacketCounts;
+  *aPacketCounts = mSendStreamStats.PacketCounts();
   return true;
 }
 
 bool
 WebrtcVideoConduit::GetRecvPacketTypeStats(
     webrtc::RtcpPacketTypeCounter* aPacketCounts)
 {
-  MutexAutoLock lock(mCodecMutex);
-  if (!mEngineReceiving || !mRecvStream) { // Not receiving
+  ASSERT_ON_THREAD(mStsThread);
+
+  if (!mRecvStreamStats.Active()) {
     return false;
   }
-  *aPacketCounts = mRecvPacketCounts;
+  *aPacketCounts = mRecvStreamStats.PacketCounts();
   return true;
 }
 
+void
+WebrtcVideoConduit::PollStats()
+{
+  MOZ_ASSERT(NS_IsMainThread());
+
+  nsTArray<RefPtr<Runnable>> runnables(2);
+  if (mEngineTransmitting) {
+    MOZ_RELEASE_ASSERT(mSendStream);
+    if (!mSendStreamConfig.rtp.ssrcs.empty()) {
+      uint32_t ssrc = mSendStreamConfig.rtp.ssrcs.front();
+      webrtc::VideoSendStream::Stats stats = mSendStream->GetStats();
+      runnables.AppendElement(NS_NewRunnableFunction(
+        "WebrtcVideoConduit::SendStreamStatistics::Update",
+        [this, self = RefPtr<WebrtcVideoConduit>(this),
+         stats = std::move(stats), ssrc]()
+        {
+          mSendStreamStats.Update(stats, ssrc);
+        }));
+    }
+  }
+  if (mEngineReceiving) {
+    MOZ_RELEASE_ASSERT(mRecvStream);
+    webrtc::VideoReceiveStream::Stats stats = mRecvStream->GetStats();
+    runnables.AppendElement(NS_NewRunnableFunction(
+      "WebrtcVideoConduit::RecvStreamStatistics::Update",
+      [this, self = RefPtr<WebrtcVideoConduit>(this),
+       stats = std::move(stats)]()
+      {
+        mRecvStreamStats.Update(stats);
+      }));
+  }
+  webrtc::Call::Stats stats = mCall->Call()->GetStats();
+  mStsThread->Dispatch(NS_NewRunnableFunction(
+    "WebrtcVideoConduit::UpdateStreamStatistics",
+    [this, self = RefPtr<WebrtcVideoConduit>(this),
+     stats = std::move(stats), runnables = std::move(runnables)]()
+    {
+      mCallStats.Update(stats);
+      for (const auto& runnable : runnables) {
+        runnable->Run();
+      }
+    }));
+}
+
+void
+WebrtcVideoConduit::UpdateVideoStatsTimer()
+{
+  MOZ_ASSERT(NS_IsMainThread());
+
+  bool transmitting = mEngineTransmitting;
+  bool receiving = mEngineReceiving;
+  mStsThread->Dispatch(NS_NewRunnableFunction(
+    "WebrtcVideoConduit::SetSendStreamStatsActive",
+    [this, self = RefPtr<WebrtcVideoConduit>(this), transmitting, receiving]()
+    {
+      mSendStreamStats.SetActive(transmitting);
+      mRecvStreamStats.SetActive(receiving);
+    }));
+
+  bool shouldBeActive = transmitting || receiving;
+  if (mVideoStatsTimerActive == shouldBeActive) {
+    return;
+  }
+  mVideoStatsTimerActive = shouldBeActive;
+  if (shouldBeActive) {
+    nsTimerCallbackFunc callback = [](nsITimer*, void* aClosure)
+    {
+      CSFLogDebug(LOGTAG, "StreamStats polling scheduled for VideoConduit: %p", aClosure);
+      static_cast<WebrtcVideoConduit*>(aClosure)->PollStats();
+    };
+    mVideoStatsTimer->InitWithNamedFuncCallback(
+      callback, this, 1000, nsITimer::TYPE_REPEATING_PRECISE_CAN_SKIP,
+      "WebrtcVideoConduit::SendStreamStatsUpdater");
+  } else {
+    mVideoStatsTimer->Cancel();
+  }
+}
+
 bool
 WebrtcVideoConduit::GetVideoEncoderStats(double* framerateMean,
                                          double* framerateStdDev,
                                          double* bitrateMean,
                                          double* bitrateStdDev,
                                          uint32_t* droppedFrames,
                                          uint32_t* framesEncoded)
 {
-  {
-    MutexAutoLock lock(mCodecMutex);
-    if (!mEngineTransmitting || !mSendStream) {
-      return false;
-    }
-    mSendStreamStats.GetVideoStreamStats(*framerateMean, *framerateStdDev,
-      *bitrateMean, *bitrateStdDev);
-    mSendStreamStats.DroppedFrames(*droppedFrames);
-    *framesEncoded = mSendStreamStats.FramesEncoded();
-    return true;
+  ASSERT_ON_THREAD(mStsThread);
+
+  MutexAutoLock lock(mMutex);
+  if (!mEngineTransmitting || !mSendStream) {
+    return false;
   }
+  mSendStreamStats.GetVideoStreamStats(
+    *framerateMean, *framerateStdDev, *bitrateMean, *bitrateStdDev);
+  *droppedFrames = mSendStreamStats.DroppedFrames();
+  *framesEncoded = mSendStreamStats.FramesEncoded();
+  return true;
 }
 
 bool
 WebrtcVideoConduit::GetVideoDecoderStats(double* framerateMean,
                                          double* framerateStdDev,
                                          double* bitrateMean,
                                          double* bitrateStdDev,
                                          uint32_t* discardedPackets,
                                          uint32_t* framesDecoded)
 {
-  {
-    MutexAutoLock lock(mCodecMutex);
-    if (!mEngineReceiving || !mRecvStream) {
-      return false;
-    }
-    mRecvStreamStats.GetVideoStreamStats(*framerateMean, *framerateStdDev,
-      *bitrateMean, *bitrateStdDev);
-    mRecvStreamStats.DiscardedPackets(*discardedPackets);
-    mRecvStreamStats.FramesDecoded(*framesDecoded);
-    return true;
+  ASSERT_ON_THREAD(mStsThread);
+
+  MutexAutoLock lock(mMutex);
+  if (!mEngineReceiving || !mRecvStream) {
+    return false;
   }
+  mRecvStreamStats.GetVideoStreamStats(*framerateMean, *framerateStdDev,
+    *bitrateMean, *bitrateStdDev);
+  *discardedPackets = mRecvStreamStats.DiscardedPackets();
+  *framesDecoded = mRecvStreamStats.FramesDecoded();
+  return true;
 }
 
 bool
 WebrtcVideoConduit::GetAVStats(int32_t* jitterBufferDelayMs,
                                int32_t* playoutBufferDelayMs,
                                int32_t* avSyncOffsetMs)
 {
+  ASSERT_ON_THREAD(mStsThread);
+
   return false;
 }
 
 bool
-WebrtcVideoConduit::GetRTPStats(unsigned int* jitterMs,
-                                unsigned int* cumulativeLost)
+WebrtcVideoConduit::GetRTPStats(uint32_t* jitterMs,
+                                uint32_t* cumulativeLost)
 {
+  ASSERT_ON_THREAD(mStsThread);
+
   CSFLogVerbose(LOGTAG, "%s for VideoConduit:%p", __FUNCTION__, this);
-  {
-    MutexAutoLock lock(mCodecMutex);
-    if (!mRecvStream) {
-      return false;
-    }
-
-    const webrtc::VideoReceiveStream::Stats& stats = mRecvStream->GetStats();
-    *jitterMs =
-        stats.rtcp_stats.jitter / (webrtc::kVideoPayloadTypeFrequency / 1000);
-    *cumulativeLost = stats.rtcp_stats.cumulative_lost;
+  MutexAutoLock lock(mMutex);
+  if (!mRecvStream) {
+    return false;
   }
+
+  *jitterMs = mRecvStreamStats.JitterMs();
+  *cumulativeLost = mRecvStreamStats.CumulativeLost();
   return true;
 }
 
 bool WebrtcVideoConduit::GetRTCPReceiverReport(DOMHighResTimeStamp* timestamp,
                                                uint32_t* jitterMs,
                                                uint32_t* packetsReceived,
                                                uint64_t* bytesReceived,
                                                uint32_t* cumulativeLost,
                                                int32_t* rttMs)
 {
-  {
-    CSFLogVerbose(LOGTAG, "%s for VideoConduit:%p", __FUNCTION__, this);
-    MutexAutoLock lock(mCodecMutex);
-    if (!mSendStream) {
-      return false;
-    }
-    const webrtc::VideoSendStream::Stats& sendStats = mSendStream->GetStats();
-    if (sendStats.substreams.empty()
-        || mSendStreamConfig.rtp.ssrcs.empty()) {
-      return false;
-    }
-    uint32_t ssrc = mSendStreamConfig.rtp.ssrcs.front();
-    auto ind = sendStats.substreams.find(ssrc);
-    if (ind == sendStats.substreams.end()) {
-      CSFLogError(LOGTAG,
-        "%s for VideoConduit:%p ssrc not found in SendStream stats.",
-        __FUNCTION__, this);
-      return false;
-    }
-    *jitterMs = ind->second.rtcp_stats.jitter
-        / (webrtc::kVideoPayloadTypeFrequency / 1000);
-    *cumulativeLost = ind->second.rtcp_stats.cumulative_lost;
-    *bytesReceived = ind->second.rtp_stats.MediaPayloadBytes();
-    *packetsReceived = ind->second.rtp_stats.transmitted.packets;
-    auto stats = mCall->Call()->GetStats();
-    int64_t rtt = stats.rtt_ms;
-#ifdef DEBUG
-    if (rtt > INT32_MAX) {
-      CSFLogError(LOGTAG,
-        "%s for VideoConduit:%p RTT is larger than the"
-        " maximum size of an RTCP RTT.", __FUNCTION__, this);
-    }
-#endif
-    if (rtt > 0) {
-      *rttMs = rtt;
-    } else {
-      *rttMs = 0;
-    }
-    // Note: timestamp is not correct per the spec... should be time the rtcp
-    // was received (remote) or sent (local)
-    *timestamp = webrtc::Clock::GetRealTimeClock()->TimeInMilliseconds();
+  ASSERT_ON_THREAD(mStsThread);
+
+  CSFLogVerbose(LOGTAG, "%s for VideoConduit:%p", __FUNCTION__, this);
+  if (!mSendStreamStats.Active()) {
+    return false;
+  }
+  if (!mSendStreamStats.SsrcFound()) {
+    return false;
   }
+  *jitterMs = mSendStreamStats.JitterMs();
+  *packetsReceived = mSendStreamStats.PacketsReceived();
+  *bytesReceived = mSendStreamStats.BytesReceived();
+  *cumulativeLost = mSendStreamStats.CumulativeLost();
+  *rttMs = mCallStats.RttMs();
+  // Note: timestamp is not correct per the spec... should be time the rtcp
+  // was received (remote) or sent (local)
+  *timestamp = webrtc::Clock::GetRealTimeClock()->TimeInMilliseconds();
+
   return true;
 }
 
 bool
 WebrtcVideoConduit::GetRTCPSenderReport(DOMHighResTimeStamp* timestamp,
                                         unsigned int* packetsSent,
                                         uint64_t* bytesSent)
 {
+  ASSERT_ON_THREAD(mStsThread);
+
   CSFLogVerbose(LOGTAG, "%s for VideoConduit:%p", __FUNCTION__, this);
   webrtc::RTCPSenderInfo senderInfo;
   {
-    MutexAutoLock lock(mCodecMutex);
+    MutexAutoLock lock(mMutex);
     if (!mRecvStream || !mRecvStream->GetRemoteRTCPSenderInfo(&senderInfo)) {
       return false;
     }
   }
   *timestamp = webrtc::Clock::GetRealTimeClock()->TimeInMilliseconds();
   *packetsSent = senderInfo.sendPacketCount;
   *bytesSent = senderInfo.sendOctetCount;
   return true;
 }
 
 MediaConduitErrorCode
 WebrtcVideoConduit::InitMain()
 {
-  // already know we must be on MainThread barring unit test weirdness
   MOZ_ASSERT(NS_IsMainThread());
 
   nsresult rv;
   nsCOMPtr<nsIPrefService> prefs = do_GetService("@mozilla.org/preferences-service;1", &rv);
   if (!NS_WARN_IF(NS_FAILED(rv))) {
     nsCOMPtr<nsIPrefBranch> branch = do_QueryInterface(prefs);
 
     if (branch) {
@@ -1169,66 +1499,70 @@ WebrtcVideoConduit::InitMain()
 }
 
 /**
  * Performs initialization of the MANDATORY components of the Video Engine
  */
 MediaConduitErrorCode
 WebrtcVideoConduit::Init()
 {
+  MOZ_ASSERT(NS_IsMainThread());
+
   CSFLogDebug(LOGTAG, "%s this=%p", __FUNCTION__, this);
   MediaConduitErrorCode result;
-  // Run code that must run on MainThread first
-  MOZ_ASSERT(NS_IsMainThread());
   result = InitMain();
   if (result != kMediaConduitNoError) {
     return result;
   }
 
   CSFLogDebug(LOGTAG, "%s Initialization Done", __FUNCTION__);
   return kMediaConduitNoError;
 }
 
 void
 WebrtcVideoConduit::DeleteStreams()
 {
+  MOZ_ASSERT(NS_IsMainThread());
+
   // We can't delete the VideoEngine until all these are released!
   // And we can't use a Scoped ptr, since the order is arbitrary
 
-  MutexAutoLock lock(mCodecMutex);
+  MutexAutoLock lock(mMutex);
   DeleteSendStream();
   DeleteRecvStream();
 }
 
 void
 WebrtcVideoConduit::SyncTo(WebrtcAudioConduit* aConduit)
 {
+  MOZ_ASSERT(NS_IsMainThread());
+
   CSFLogDebug(LOGTAG, "%s Synced to %p", __FUNCTION__, aConduit);
-  {
-    MutexAutoLock lock(mCodecMutex);
-
-    if (!mRecvStream) {
-      CSFLogError(LOGTAG, "SyncTo called with no receive stream");
-      return;
-    }
-
-    if (aConduit) {
-      mRecvStream->SetSyncChannel(aConduit->GetVoiceEngine(),
-                                  aConduit->GetChannel());
-    } else if (mSyncedTo) {
-      mRecvStream->SetSyncChannel(mSyncedTo->GetVoiceEngine(), -1);
-    }
+
+  if (!mRecvStream) {
+    CSFLogError(LOGTAG, "SyncTo called with no receive stream");
+    return;
+  }
+
+  MutexAutoLock lock(mMutex);
+  if (aConduit) {
+    mRecvStream->SetSyncChannel(aConduit->GetVoiceEngine(),
+                                aConduit->GetChannel());
+  } else if (mSyncedTo) {
+    mRecvStream->SetSyncChannel(mSyncedTo->GetVoiceEngine(), -1);
   }
 
   mSyncedTo = aConduit;
 }
 
 MediaConduitErrorCode
 WebrtcVideoConduit::AttachRenderer(RefPtr<mozilla::VideoRenderer> aVideoRenderer)
 {
+  MOZ_ASSERT(NS_IsMainThread());
+
   CSFLogDebug(LOGTAG, "%s", __FUNCTION__);
 
   // null renderer
   if (!aVideoRenderer) {
     CSFLogError(LOGTAG, "%s NULL Renderer", __FUNCTION__);
     MOZ_ASSERT(false);
     return kMediaConduitInvalidRenderer;
   }
@@ -1244,51 +1578,57 @@ WebrtcVideoConduit::AttachRenderer(RefPt
   }
 
   return kMediaConduitNoError;
 }
 
 void
 WebrtcVideoConduit::DetachRenderer()
 {
-  {
-    ReentrantMonitorAutoEnter enter(mTransportMonitor);
-    if (mRenderer) {
-      mRenderer = nullptr;
-    }
+  MOZ_ASSERT(NS_IsMainThread());
+
+  ReentrantMonitorAutoEnter enter(mTransportMonitor);
+  if (mRenderer) {
+    mRenderer = nullptr;
   }
 }
 
 MediaConduitErrorCode
 WebrtcVideoConduit::SetTransmitterTransport(
   RefPtr<TransportInterface> aTransport)
 {
+  MOZ_ASSERT(NS_IsMainThread());
+
   CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
 
   ReentrantMonitorAutoEnter enter(mTransportMonitor);
   // set the transport
   mTransmitterTransport = aTransport;
   return kMediaConduitNoError;
 }
 
 MediaConduitErrorCode
 WebrtcVideoConduit::SetReceiverTransport(RefPtr<TransportInterface> aTransport)
 {
+  MOZ_ASSERT(NS_IsMainThread());
+
   CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
 
   ReentrantMonitorAutoEnter enter(mTransportMonitor);
   // set the transport
   mReceiverTransport = aTransport;
   return kMediaConduitNoError;
 }
 
 MediaConduitErrorCode
 WebrtcVideoConduit::ConfigureRecvMediaCodecs(
   const std::vector<VideoCodecConfig* >& codecConfigList)
 {
+  MOZ_ASSERT(NS_IsMainThread());
+
   CSFLogDebug(LOGTAG, "%s ", __FUNCTION__);
   MediaConduitErrorCode condError = kMediaConduitNoError;
   std::string payloadName;
 
   if (codecConfigList.empty()) {
     CSFLogError(LOGTAG, "%s Zero number of codecs to configure", __FUNCTION__);
     return kMediaConduitMalformedArgument;
   }
@@ -1367,18 +1707,19 @@ WebrtcVideoConduit::ConfigureRecvMediaCo
       CodecsDifferent(recv_codecs, mRecvCodecList) ||
       mRecvStreamConfig.rtp.nack.rtp_history_ms != (use_nack_basic ? 1000 : 0) ||
       mRecvStreamConfig.rtp.remb != use_remb ||
       mRecvStreamConfig.rtp.tmmbr != use_tmmbr ||
       mRecvStreamConfig.rtp.keyframe_method != kf_request_method ||
       (use_fec &&
        (mRecvStreamConfig.rtp.ulpfec.ulpfec_payload_type != ulpfec_payload_type ||
         mRecvStreamConfig.rtp.ulpfec.red_payload_type != red_payload_type))) {
-
-    condError = StopReceiving();
+    MutexAutoLock lock(mMutex);
+
+    condError = StopReceivingLocked();
     if (condError != kMediaConduitNoError) {
       return condError;
     }
 
     // If we fail after here things get ugly
     mRecvStreamConfig.rtp.rtcp_mode = webrtc::RtcpMode::kCompound;
     mRecvStreamConfig.rtp.nack.rtp_history_ms = use_nack_basic ? 1000 : 0;
     mRecvStreamConfig.rtp.remb = use_remb;
@@ -1439,34 +1780,27 @@ WebrtcVideoConduit::ConfigureRecvMediaCo
                 (uint32_t) mSendStreamConfig.rtp.ssrcs.size(),
                 mRecvStreamConfig.rtp.remote_ssrc);
 
     // XXX Copy over those that are the same and don't rebuild them
     mRecvCodecList.SwapElements(recv_codecs);
     recv_codecs.Clear();
     mRecvStreamConfig.rtp.rtx.clear();
 
-    {
-      MutexAutoLock lock(mCodecMutex);
-      DeleteRecvStream();
-      // Rebuilds mRecvStream from mRecvStreamConfig
-      MediaConduitErrorCode rval = CreateRecvStream();
-      if (rval != kMediaConduitNoError) {
-        CSFLogError(LOGTAG, "%s Start Receive Error %d ", __FUNCTION__, rval);
-        return rval;
-      }
-    }
-    return StartReceiving();
+    DeleteRecvStream();
+    return StartReceivingLocked();
   }
   return kMediaConduitNoError;
 }
 
 webrtc::VideoDecoder*
 WebrtcVideoConduit::CreateDecoder(webrtc::VideoCodecType aType)
 {
+  MOZ_ASSERT(NS_IsMainThread());
+
   webrtc::VideoDecoder* decoder = nullptr;
 #ifdef MOZ_WEBRTC_MEDIACODEC
   bool enabled = false;
 #endif
 
   // Attempt to create a decoder using MediaDataDecoder.
   decoder = MediaDataDecoderCodec::CreateDecoder(aType);
   if (decoder) {
@@ -1524,16 +1858,18 @@ WebrtcVideoConduit::CreateDecoder(webrtc
 
   return decoder;
 }
 
 webrtc::VideoEncoder*
 WebrtcVideoConduit::CreateEncoder(webrtc::VideoCodecType aType,
                                   bool enable_simulcast)
 {
+  MOZ_ASSERT(NS_IsMainThread());
+
   webrtc::VideoEncoder* encoder = nullptr;
 #ifdef MOZ_WEBRTC_MEDIACODEC
   bool enabled = false;
 #endif
 
   switch (aType) {
     case webrtc::VideoCodecType::kVideoCodecH264:
       // get an external encoder
@@ -1623,16 +1959,18 @@ GetLimitsFor(unsigned int aWidth, unsign
   MOZ_CRASH("Loop should have handled fallback");
 }
 
 void
 WebrtcVideoConduit::SelectBitrates(
   unsigned short width, unsigned short height, int cap,
   webrtc::VideoStream& aVideoStream)
 {
+  mMutex.AssertCurrentThreadOwns();
+
   int& out_min = aVideoStream.min_bitrate_bps;
   int& out_start = aVideoStream.target_bitrate_bps;
   int& out_max = aVideoStream.max_bitrate_bps;
 
   ResolutionAndBitrateLimits resAndLimits = GetLimitsFor(width, height);
   out_min = MinIgnoreZero(resAndLimits.min_bitrate_bps, cap);
   out_start = MinIgnoreZero(resAndLimits.start_bitrate_bps, cap);
   out_max = MinIgnoreZero(resAndLimits.max_bitrate_bps, cap);
@@ -1663,38 +2001,39 @@ WebrtcVideoConduit::SelectBitrates(
   out_start = std::min(out_max, std::max(out_start, out_min));
 
   MOZ_ASSERT(mPrefMaxBitrate == 0 || out_max <= mPrefMaxBitrate);
 }
 
 // XXX we need to figure out how to feed back changes in preferred capture
 // resolution to the getUserMedia source.
 void
-WebrtcVideoConduit::SelectSendResolution(unsigned short width,
-                                         unsigned short height)
+WebrtcVideoConduit::SelectSendResolution(
+  unsigned short width, unsigned short height)
 {
-  mCodecMutex.AssertCurrentThreadOwns();
+  mMutex.AssertCurrentThreadOwns();
   // XXX This will do bandwidth-resolution adaptation as well - bug 877954
 
   // Enforce constraints
   if (mCurSendCodecConfig) {
     uint16_t max_width = mCurSendCodecConfig->mEncodingConstraints.maxWidth;
     uint16_t max_height = mCurSendCodecConfig->mEncodingConstraints.maxHeight;
     if (max_width || max_height) {
       max_width = max_width ? max_width : UINT16_MAX;
       max_height = max_height ? max_height : UINT16_MAX;
       ConstrainPreservingAspectRatio(max_width, max_height, &width, &height);
     }
 
     // Limit resolution to max-fs
+    const auto& wants = mVideoBroadcaster.wants();
     if (mCurSendCodecConfig->mEncodingConstraints.maxFs) {
       // max-fs is in macroblocks, convert to pixels
       int max_fs(mCurSendCodecConfig->mEncodingConstraints.maxFs*(16*16));
-      if (max_fs > mLastSinkWanted.max_pixel_count.value_or(max_fs)) {
-        max_fs = mLastSinkWanted.max_pixel_count.value_or(max_fs);
+      if (max_fs > wants.max_pixel_count.value_or(max_fs)) {
+        max_fs = wants.max_pixel_count.value_or(max_fs);
       }
       mVideoAdapter->OnResolutionRequest(
         rtc::Optional<int>(max_fs), rtc::Optional<int>());
     }
   }
 
   unsigned int framerate = SelectSendFrameRate(mCurSendCodecConfig,
                                                mSendingFramerate,
@@ -1702,85 +2041,68 @@ WebrtcVideoConduit::SelectSendResolution
                                                height);
   if (mSendingFramerate != framerate) {
     CSFLogDebug(LOGTAG, "%s: framerate changing to %u (from %u)",
                 __FUNCTION__, framerate, mSendingFramerate);
     mSendingFramerate = framerate;
   }
 }
 
-unsigned int
-WebrtcVideoConduit::SelectSendFrameRate(const VideoCodecConfig* codecConfig,
-                                        unsigned int old_framerate,
-                                        unsigned short sending_width,
-                                        unsigned short sending_height) const
-{
-  unsigned int new_framerate = old_framerate;
-
-  // Limit frame rate based on max-mbps
-  if (codecConfig && codecConfig->mEncodingConstraints.maxMbps)
-  {
-    unsigned int cur_fs, mb_width, mb_height;
-
-    mb_width = (sending_width + 15) >> 4;
-    mb_height = (sending_height + 15) >> 4;
-
-    cur_fs = mb_width * mb_height;
-    if (cur_fs > 0) { // in case no frames have been sent
-      new_framerate = codecConfig->mEncodingConstraints.maxMbps / cur_fs;
-
-      new_framerate = MinIgnoreZero(new_framerate, codecConfig->mEncodingConstraints.maxFps);
-    }
-  }
-  return new_framerate;
-}
-
 void
 WebrtcVideoConduit::AddOrUpdateSink(
   rtc::VideoSinkInterface<webrtc::VideoFrame>* sink,
   const rtc::VideoSinkWants& wants)
 {
-  CSFLogDebug(LOGTAG, "%s (send SSRC %u (0x%x)) - wants pixels = %d/%d", __FUNCTION__,
-              mSendStreamConfig.rtp.ssrcs.front(), mSendStreamConfig.rtp.ssrcs.front(),
-              wants.max_pixel_count ? *wants.max_pixel_count : -1,
-              wants.max_pixel_count_step_up ? *wants.max_pixel_count_step_up : -1);
-
-  // MUST run on the same thread as first call (MainThread)
   if (!NS_IsMainThread()) {
-    // This can be asynchronous
-    RefPtr<WebrtcVideoConduit> self(this);
-    NS_DispatchToMainThread(media::NewRunnableFrom([self, sink, wants]() {
-          self->mVideoBroadcaster.AddOrUpdateSink(sink, wants);
-          self->OnSinkWantsChanged(self->mVideoBroadcaster.wants());
-          return NS_OK;
-        }));
-  } else {
-    mVideoBroadcaster.AddOrUpdateSink(sink, wants);
-    OnSinkWantsChanged(mVideoBroadcaster.wants());
+    // This may be called off main thread, but only to update an already added
+    // sink. If we add it after the dispatch we're at risk of a UAF.
+    NS_DispatchToMainThread(NS_NewRunnableFunction(
+      "WebrtcVideoConduit::UpdateSink",
+      [this, self = RefPtr<WebrtcVideoConduit>(this),
+       sink, wants = std::move(wants)]()
+      {
+        if (mRegisteredSinks.Contains(sink)) {
+          AddOrUpdateSink(sink, wants);
+        }
+      }));
+    return;
   }
+
+  if (!mRegisteredSinks.Contains(sink)) {
+    mRegisteredSinks.AppendElement(sink);
+  }
+  mVideoBroadcaster.AddOrUpdateSink(sink, wants);
+  OnSinkWantsChanged(mVideoBroadcaster.wants());
 }
 
 void
 WebrtcVideoConduit::RemoveSink(
   rtc::VideoSinkInterface<webrtc::VideoFrame>* sink)
 {
+  MOZ_ASSERT(NS_IsMainThread());
+
+  mRegisteredSinks.RemoveElement(sink);
   mVideoBroadcaster.RemoveSink(sink);
   OnSinkWantsChanged(mVideoBroadcaster.wants());
 }
 
 void
 WebrtcVideoConduit::OnSinkWantsChanged(
   const rtc::VideoSinkWants& wants)
 {
-  NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
+  MOZ_ASSERT(NS_IsMainThread());
 
   if (mLockScaling) {
     return;
   }
-  mLastSinkWanted = wants;
+
+  CSFLogDebug(LOGTAG, "%s (send SSRC %u (0x%x)) - wants pixels = %d/%d", __FUNCTION__,
+              mSendStreamConfig.rtp.ssrcs.front(), mSendStreamConfig.rtp.ssrcs.front(),
+              wants.max_pixel_count ? *wants.max_pixel_count : -1,
+              wants.max_pixel_count_step_up ? *wants.max_pixel_count_step_up : -1);
 
   if (!mCurSendCodecConfig) {
     return;
   }
 
   // limit sink wants based upon max-fs constraint
   int max_fs = mCurSendCodecConfig->mEncodingConstraints.maxFs*(16*16);
   rtc::Optional<int> max_pixel_count = wants.max_pixel_count;
@@ -1805,47 +2127,51 @@ WebrtcVideoConduit::OnSinkWantsChanged(
 MediaConduitErrorCode
 WebrtcVideoConduit::SendVideoFrame(const webrtc::VideoFrame& frame)
 {
   // XXX Google uses a "timestamp_aligner" to translate timestamps from the
   // camera via TranslateTimestamp(); we should look at doing the same.  This
   // avoids sampling error when capturing frames, but google had to deal with some
   // broken cameras, include Logitech c920's IIRC.
 
-  CSFLogVerbose(LOGTAG, "%s (send SSRC %u (0x%x))", __FUNCTION__,
-                mSendStreamConfig.rtp.ssrcs.front(), mSendStreamConfig.rtp.ssrcs.front());
-
-  if (frame.width() != mLastWidth || frame.height() != mLastHeight) {
-  // See if we need to recalculate what we're sending.
-    CSFLogVerbose(LOGTAG, "%s: call SelectSendResolution with %ux%u",
-                  __FUNCTION__, frame.width(), frame.height());
-    MOZ_ASSERT(frame.width() != 0 && frame.height() != 0);
-    // Note coverity will flag this since it thinks they can be 0
-
-    MutexAutoLock lock(mCodecMutex);
-    mLastWidth = frame.width();
-    mLastHeight = frame.height();
-    SelectSendResolution(frame.width(), frame.height());
-  }
-
-  // adapt input video to wants of sink
-  if (!mVideoBroadcaster.frame_wanted()) {
-    return kMediaConduitNoError;
-  }
-
   int cropWidth;
   int cropHeight;
   int adaptedWidth;
   int adaptedHeight;
-  if (!mVideoAdapter->AdaptFrameResolution(
-        frame.width(), frame.height(),
-        frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec,
-        &cropWidth, &cropHeight, &adaptedWidth, &adaptedHeight)) {
-    // VideoAdapter dropped the frame.
-    return kMediaConduitNoError;
+  {
+    MutexAutoLock lock(mMutex);
+    CSFLogVerbose(LOGTAG, "WebrtcVideoConduit %p %s (send SSRC %u (0x%x))",
+                  this, __FUNCTION__,
+                  mSendStreamConfig.rtp.ssrcs.front(),
+                  mSendStreamConfig.rtp.ssrcs.front());
+
+    if (frame.width() != mLastWidth || frame.height() != mLastHeight) {
+    // See if we need to recalculate what we're sending.
+      CSFLogVerbose(LOGTAG, "%s: call SelectSendResolution with %ux%u",
+                    __FUNCTION__, frame.width(), frame.height());
+      MOZ_ASSERT(frame.width() != 0 && frame.height() != 0);
+      // Note coverity will flag this since it thinks they can be 0
+
+      mLastWidth = frame.width();
+      mLastHeight = frame.height();
+      SelectSendResolution(frame.width(), frame.height());
+    }
+
+    // adapt input video to wants of sink
+    if (!mVideoBroadcaster.frame_wanted()) {
+      return kMediaConduitNoError;
+    }
+
+    if (!mVideoAdapter->AdaptFrameResolution(
+          frame.width(), frame.height(),
+          frame.timestamp_us() * rtc::kNumNanosecsPerMicrosec,
+          &cropWidth, &cropHeight, &adaptedWidth, &adaptedHeight)) {
+      // VideoAdapter dropped the frame.
+      return kMediaConduitNoError;
+    }
   }
 
   int cropX = (frame.width() - cropWidth) / 2;
   int cropY = (frame.height() - cropHeight) / 2;
 
   rtc::scoped_refptr<webrtc::VideoFrameBuffer> buffer;
   if (adaptedWidth == frame.width() && adaptedHeight == frame.height()) {
     // No adaption - optimized path.
@@ -1860,30 +2186,31 @@ WebrtcVideoConduit::SendVideoFrame(const
     }
     i420Buffer->CropAndScaleFrom(*frame.video_frame_buffer(), cropX, cropY, cropWidth, cropHeight);
     buffer = i420Buffer;
   }
 
   mVideoBroadcaster.OnFrame(webrtc::VideoFrame(
       buffer, frame.timestamp(), frame.render_time_ms(), frame.rotation()));
 
-  mSendStreamStats.FrameDeliveredToEncoder();
+  mStsThread->Dispatch(NS_NewRunnableFunction(
+    "SendStreamStatistics::FrameDeliveredToEncoder",
+    [self = RefPtr<WebrtcVideoConduit>(this), this]()
+    {
+      mSendStreamStats.FrameDeliveredToEncoder();
+    }));
   return kMediaConduitNoError;
 }
 
 // Transport Layer Callbacks
 
 MediaConduitErrorCode
 WebrtcVideoConduit::DeliverPacket(const void* data, int len)
 {
-  // Media Engine should be receiving already.
-  if (!mCall) {
-    CSFLogError(LOGTAG, "Error: %s when not receiving", __FUNCTION__);
-    return kMediaConduitSessionNotInited;
-  }
+  ASSERT_ON_THREAD(mStsThread);
 
   // XXX we need to get passed the time the packet was received
   webrtc::PacketReceiver::DeliveryStatus status =
     mCall->Call()->Receiver()->DeliverPacket(webrtc::MediaType::VIDEO,
                                              static_cast<const uint8_t*>(data),
                                              len, webrtc::PacketTime());
 
   if (status != webrtc::PacketReceiver::DELIVERY_OK) {
@@ -1892,86 +2219,85 @@ WebrtcVideoConduit::DeliverPacket(const 
   }
 
   return kMediaConduitNoError;
 }
 
 MediaConduitErrorCode
 WebrtcVideoConduit::ReceivedRTPPacket(const void* data, int len, uint32_t ssrc)
 {
+  ASSERT_ON_THREAD(mStsThread);
+
   if (mAllowSsrcChange || mWaitingForInitialSsrc) {
     // Handle the unknown ssrc (and ssrc-not-signaled case).
     // We can't just do this here; it has to happen on MainThread :-(
     // We also don't want to drop the packet, nor stall this thread, so we hold
     // the packet (and any following) for inserting once the SSRC is set.
-    bool queue = mRecvSSRCSetInProgress;
-    if (queue || mRecvSSRC != ssrc) {
+    if (mRecvSsrcSetInProgress || mRecvSSRC != ssrc) {
       // capture packet for insertion after ssrc is set -- do this before
       // sending the runnable, since it may pull from this.  Since it
       // dispatches back to us, it's less critial to do this here, but doesn't
       // hurt.
       UniquePtr<QueuedPacket> packet((QueuedPacket*) malloc(sizeof(QueuedPacket) + len-1));
       packet->mLen = len;
       memcpy(packet->mData, data, len);
       CSFLogDebug(LOGTAG, "queuing packet: seq# %u, Len %d ",
                   (uint16_t)ntohs(((uint16_t*) packet->mData)[1]), packet->mLen);
-      if (queue) {
+      if (mRecvSsrcSetInProgress) {
         mQueuedPackets.AppendElement(std::move(packet));
         return kMediaConduitNoError;
       }
       // a new switch needs to be done
       // any queued packets are from a previous switch that hasn't completed
       // yet; drop them and only process the latest SSRC
       mQueuedPackets.Clear();
       mQueuedPackets.AppendElement(std::move(packet));
 
       CSFLogDebug(LOGTAG, "%s: switching from SSRC %u to %u", __FUNCTION__,
-                  mRecvSSRC, ssrc);
+                  static_cast<uint32_t>(mRecvSSRC), ssrc);
       // we "switch" here immediately, but buffer until the queue is released
       mRecvSSRC = ssrc;
-      mRecvSSRCSetInProgress = true;
-      queue = true;
+      mRecvSsrcSetInProgress = true;
 
       // Ensure lamba captures refs
-      RefPtr<WebrtcVideoConduit> self = this;
-      nsCOMPtr<nsIThread> thread;
-      if (NS_WARN_IF(NS_FAILED(NS_GetCurrentThread(getter_AddRefs(thread))))) {
-        return kMediaConduitRTPProcessingFailed;
-      }
-      NS_DispatchToMainThread(media::NewRunnableFrom([self, thread, ssrc]() mutable {
-            // Normally this is done in CreateOrUpdateMediaPipeline() for
-            // initial creation and renegotiation, but here we're rebuilding the
-            // Receive channel at a lower level.  This is needed whenever we're
-            // creating a GMPVideoCodec (in particular, H264) so it can communicate
-            // errors to the PC.
-            WebrtcGmpPCHandleSetter setter(self->mPCHandle);
-            self->SetRemoteSSRC(ssrc); // this will likely re-create the VideoReceiveStream
-            // We want to unblock the queued packets on the original thread
-            thread->Dispatch(media::NewRunnableFrom([self, ssrc]() mutable {
-                  if (ssrc == self->mRecvSSRC) {
-                    // SSRC is set; insert queued packets
-                    for (auto& packet : self->mQueuedPackets) {
-                      CSFLogDebug(LOGTAG, "Inserting queued packets: seq# %u, Len %d ",
-                                  (uint16_t)ntohs(((uint16_t*) packet->mData)[1]), packet->mLen);
-
-                      if (self->DeliverPacket(packet->mData, packet->mLen) != kMediaConduitNoError) {
-                        CSFLogError(LOGTAG, "%s RTP Processing Failed", __FUNCTION__);
-                        // Keep delivering and then clear the queue
-                      }
-                    }
-                    self->mQueuedPackets.Clear();
-                    // we don't leave inprogress until there are no changes in-flight
-                    self->mRecvSSRCSetInProgress = false;
-                  }
-                  // else this is an intermediate switch; another is in-flight
-
-                  return NS_OK;
-                }), NS_DISPATCH_NORMAL);
-            return NS_OK;
-          }));
+      NS_DispatchToMainThread(NS_NewRunnableFunction(
+        "WebrtcVideoConduit::WebrtcGmpPCHandleSetter",
+        [this, self = RefPtr<WebrtcVideoConduit>(this), ssrc]() mutable
+        {
+          // Normally this is done in CreateOrUpdateMediaPipeline() for
+          // initial creation and renegotiation, but here we're rebuilding the
+          // Receive channel at a lower level.  This is needed whenever we're
+          // creating a GMPVideoCodec (in particular, H264) so it can communicate
+          // errors to the PC.
+          WebrtcGmpPCHandleSetter setter(mPCHandle);
+          SetRemoteSSRC(ssrc); // this will likely re-create the VideoReceiveStream
+          // We want to unblock the queued packets on the original thread
+          mStsThread->Dispatch(NS_NewRunnableFunction(
+            "WebrtcVideoConduit::QueuedPacketsHandler",
+            [this, self, ssrc]() mutable
+            {
+              if (ssrc != mRecvSSRC) {
+                // this is an intermediate switch; another is in-flight
+                return;
+              }
+              // SSRC is set; insert queued packets
+              for (auto& packet : mQueuedPackets) {
+                CSFLogDebug(LOGTAG, "Inserting queued packets: seq# %u, Len %d ",
+                            (uint16_t)ntohs(((uint16_t*) packet->mData)[1]), packet->mLen);
+
+                if (DeliverPacket(packet->mData, packet->mLen) != kMediaConduitNoError) {
+                  CSFLogError(LOGTAG, "%s RTP Processing Failed", __FUNCTION__);
+                  // Keep delivering and then clear the queue
+                }
+              }
+              mQueuedPackets.Clear();
+              // we don't leave inprogress until there are no changes in-flight
+              mRecvSsrcSetInProgress = false;
+            }));
+        }));
       return kMediaConduitNoError;
     }
   }
 
   CSFLogVerbose(LOGTAG, "%s: seq# %u, Len %d, SSRC %u (0x%x) ", __FUNCTION__,
                 (uint16_t)ntohs(((uint16_t*) data)[1]), len,
                 (uint32_t) ntohl(((uint32_t*) data)[2]),
                 (uint32_t) ntohl(((uint32_t*) data)[2]));
@@ -1981,106 +2307,156 @@ WebrtcVideoConduit::ReceivedRTPPacket(co
     return kMediaConduitRTPProcessingFailed;
   }
   return kMediaConduitNoError;
 }
 
 MediaConduitErrorCode
 WebrtcVideoConduit::ReceivedRTCPPacket(const void* data, int len)
 {
+  ASSERT_ON_THREAD(mStsThread);
+
   CSFLogVerbose(LOGTAG, " %s Len %d ", __FUNCTION__, len);
 
   if (DeliverPacket(data, len) != kMediaConduitNoError) {
     CSFLogError(LOGTAG, "%s RTCP Processing Failed", __FUNCTION__);
     return kMediaConduitRTPProcessingFailed;
   }
 
   return kMediaConduitNoError;
 }
 
 MediaConduitErrorCode
 WebrtcVideoConduit::StopTransmitting()
 {
+  MOZ_ASSERT(NS_IsMainThread());
+  MutexAutoLock lock(mMutex);
+
+  return StopTransmittingLocked();
+}
+
+MediaConduitErrorCode
+WebrtcVideoConduit::StartTransmitting()
+{
+  MOZ_ASSERT(NS_IsMainThread());
+  MutexAutoLock lock(mMutex);
+
+  return StartTransmittingLocked();
+}
+
+MediaConduitErrorCode
+WebrtcVideoConduit::StopReceiving()
+{
+  MOZ_ASSERT(NS_IsMainThread());
+  MutexAutoLock lock(mMutex);
+
+  return StopReceivingLocked();
+}
+
+MediaConduitErrorCode
+WebrtcVideoConduit::StartReceiving()
+{
+  MOZ_ASSERT(NS_IsMainThread());
+  MutexAutoLock lock(mMutex);
+
+  return StartReceivingLocked();
+}
+
+MediaConduitErrorCode
+WebrtcVideoConduit::StopTransmittingLocked()
+{
+  MOZ_ASSERT(NS_IsMainThread());
+  mMutex.AssertCurrentThreadOwns();
+
   if (mEngineTransmitting) {
-    {
-      MutexAutoLock lock(mCodecMutex);
-      if (mSendStream) {
-          CSFLogDebug(LOGTAG, "%s Engine Already Sending. Attemping to Stop ", __FUNCTION__);
-          mSendStream->Stop();
-      }
+    if (mSendStream) {
+        CSFLogDebug(LOGTAG, "%s Engine Already Sending. Attemping to Stop ", __FUNCTION__);
+        mSendStream->Stop();
     }
 
     mEngineTransmitting = false;
+    UpdateVideoStatsTimer();
   }
   return kMediaConduitNoError;
 }
 
 MediaConduitErrorCode
-WebrtcVideoConduit::StartTransmitting()
+WebrtcVideoConduit::StartTransmittingLocked()
 {
+  MOZ_ASSERT(NS_IsMainThread());
+  mMutex.AssertCurrentThreadOwns();
+
   if (mEngineTransmitting) {
     return kMediaConduitNoError;
   }
 
   CSFLogDebug(LOGTAG, "%s Attemping to start... ", __FUNCTION__);
-  {
-    // Start Transmitting on the video engine
-    MutexAutoLock lock(mCodecMutex);
-
-    if (!mSendStream) {
-      MediaConduitErrorCode rval = CreateSendStream();
-      if (rval != kMediaConduitNoError) {
-        CSFLogError(LOGTAG, "%s Start Send Error %d ", __FUNCTION__, rval);
-        return rval;
-      }
+  // Start Transmitting on the video engine
+  if (!mSendStream) {
+    MediaConduitErrorCode rval = CreateSendStream();
+    if (rval != kMediaConduitNoError) {
+      CSFLogError(LOGTAG, "%s Start Send Error %d ", __FUNCTION__, rval);
+      return rval;
     }
-
-    mSendStream->Start();
-    // XXX File a bug to consider hooking this up to the state of mtransport
-    mCall->Call()->SignalChannelNetworkState(webrtc::MediaType::VIDEO, webrtc::kNetworkUp);
-    mEngineTransmitting = true;
   }
 
+  mSendStream->Start();
+  // XXX File a bug to consider hooking this up to the state of mtransport
+  mCall->Call()->SignalChannelNetworkState(webrtc::MediaType::VIDEO, webrtc::kNetworkUp);
+  mEngineTransmitting = true;
+  UpdateVideoStatsTimer();
+
   return kMediaConduitNoError;
 }
 
 MediaConduitErrorCode
-WebrtcVideoConduit::StopReceiving()
+WebrtcVideoConduit::StopReceivingLocked()
 {
-  NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
+  MOZ_ASSERT(NS_IsMainThread());
+  mMutex.AssertCurrentThreadOwns();
+
   // Are we receiving already? If so, stop receiving and playout
   // since we can't apply new recv codec when the engine is playing.
   if (mEngineReceiving && mRecvStream) {
     CSFLogDebug(LOGTAG, "%s Engine Already Receiving . Attemping to Stop ", __FUNCTION__);
     mRecvStream->Stop();
   }
 
   mEngineReceiving = false;
+  UpdateVideoStatsTimer();
   return kMediaConduitNoError;
 }
 
 MediaConduitErrorCode
-WebrtcVideoConduit::StartReceiving()
+WebrtcVideoConduit::StartReceivingLocked()
 {
+  MOZ_ASSERT(NS_IsMainThread());
+  mMutex.AssertCurrentThreadOwns();
+
   if (mEngineReceiving) {
     return kMediaConduitNoError;
   }
 
-  CSFLogDebug(LOGTAG, "%s Attemping to start... (SSRC %u (0x%x))", __FUNCTION__, mRecvSSRC, mRecvSSRC);
-  {
-    // Start Receive on the video engine
-    MutexAutoLock lock(mCodecMutex);
-    MOZ_ASSERT(mRecvStream);
-
-    mRecvStream->Start();
-    // XXX File a bug to consider hooking this up to the state of mtransport
-    mCall->Call()->SignalChannelNetworkState(webrtc::MediaType::VIDEO, webrtc::kNetworkUp);
-    mEngineReceiving = true;
+  CSFLogDebug(LOGTAG, "%s Attemping to start... (SSRC %u (0x%x))", __FUNCTION__,
+              static_cast<uint32_t>(mRecvSSRC), static_cast<uint32_t>(mRecvSSRC));
+  // Start Receiving on the video engine
+  if (!mRecvStream) {
+    MediaConduitErrorCode rval = CreateRecvStream();
+    if (rval != kMediaConduitNoError) {
+      CSFLogError(LOGTAG, "%s Start Receive Error %d ", __FUNCTION__, rval);
+      return rval;
+    }
   }
 
+  mRecvStream->Start();
+  // XXX File a bug to consider hooking this up to the state of mtransport
+  mCall->Call()->SignalChannelNetworkState(webrtc::MediaType::VIDEO, webrtc::kNetworkUp);
+  mEngineReceiving = true;
+  UpdateVideoStatsTimer();
+
   return kMediaConduitNoError;
 }
 
 // WebRTC::RTP Callback Implementation
 // Called on MSG thread
 bool
 WebrtcVideoConduit::SendRtp(const uint8_t* packet, size_t length,
                             const webrtc::PacketOptions& options)
@@ -2129,17 +2505,18 @@ WebrtcVideoConduit::SendRtcp(const uint8
   CSFLogError(LOGTAG, "%s RTCP Packet Send Failed ", __FUNCTION__);
   return false;
 }
 
 void
 WebrtcVideoConduit::OnFrame(const webrtc::VideoFrame& video_frame)
 {
   CSFLogVerbose(LOGTAG, "%s: recv SSRC %u (0x%x), size %ux%u", __FUNCTION__,
-                mRecvSSRC, mRecvSSRC, video_frame.width(), video_frame.height());
+                static_cast<uint32_t>(mRecvSSRC), static_cast<uint32_t>(mRecvSSRC),
+                video_frame.width(), video_frame.height());
   ReentrantMonitorAutoEnter enter(mTransportMonitor);
 
   if (!mRenderer) {
     CSFLogError(LOGTAG, "%s Renderer is NULL  ", __FUNCTION__);
     return;
   }
 
   if (mReceivingWidth != video_frame.width() ||
@@ -2162,99 +2539,65 @@ WebrtcVideoConduit::OnFrame(const webrtc
     }
   }
 
   mRenderer->RenderVideoFrame(*video_frame.video_frame_buffer(),
                               video_frame.timestamp(),
                               video_frame.render_time_ms());
 }
 
-// Compare lists of codecs
-bool
-WebrtcVideoConduit::CodecsDifferent(const nsTArray<UniquePtr<VideoCodecConfig>>& a,
-                                    const nsTArray<UniquePtr<VideoCodecConfig>>& b)
-{
-  // return a != b;
-  // would work if UniquePtr<> operator== compared contents!
-  auto len = a.Length();
-  if (len != b.Length()) {
-    return true;
-  }
-
-  // XXX std::equal would work, if we could use it on this - fails for the
-  // same reason as above.  c++14 would let us pass a comparator function.
-  for (uint32_t i = 0; i < len; ++i) {
-    if (!(*a[i] == *b[i])) {
-      return true;
-    }
-  }
-
-  return false;
-}
-
-/**
- * Perform validation on the codecConfig to be applied
- * Verifies if the codec is already applied.
- */
-MediaConduitErrorCode
-WebrtcVideoConduit::ValidateCodecConfig(const VideoCodecConfig* codecInfo)
-{
-  if(!codecInfo) {
-    CSFLogError(LOGTAG, "%s Null CodecConfig ", __FUNCTION__);
-    return kMediaConduitMalformedArgument;
-  }
-
-  if((codecInfo->mName.empty()) ||
-     (codecInfo->mName.length() >= CODEC_PLNAME_SIZE)) {
-    CSFLogError(LOGTAG, "%s Invalid Payload Name Length ", __FUNCTION__);
-    return kMediaConduitMalformedArgument;
-  }
-
-  return kMediaConduitNoError;
-}
-
 void
 WebrtcVideoConduit::DumpCodecDB() const
 {
+  MOZ_ASSERT(NS_IsMainThread());
+
   for (auto& entry : mRecvCodecList) {
     CSFLogDebug(LOGTAG, "Payload Name: %s", entry->mName.c_str());
     CSFLogDebug(LOGTAG, "Payload Type: %d", entry->mType);
     CSFLogDebug(LOGTAG, "Payload Max Frame Size: %d", entry->mEncodingConstraints.maxFs);
     CSFLogDebug(LOGTAG, "Payload Max Frame Rate: %d", entry->mEncodingConstraints.maxFps);
   }
 }
 
 void
 WebrtcVideoConduit::VideoLatencyUpdate(uint64_t newSample)
 {
+  mTransportMonitor.AssertCurrentThreadIn();
+
   mVideoLatencyAvg = (sRoundingPadding * newSample + sAlphaNum * mVideoLatencyAvg) / sAlphaDen;
 }
 
 uint64_t
 WebrtcVideoConduit::MozVideoLatencyAvg()
 {
+  mTransportMonitor.AssertCurrentThreadIn();
+
   return mVideoLatencyAvg / sRoundingPadding;
 }
 
 uint64_t
 WebrtcVideoConduit::CodecPluginID()
 {
+  MOZ_ASSERT(NS_IsMainThread());
+
   if (mSendCodecPlugin) {
     return mSendCodecPlugin->PluginID();
   }
   if (mRecvCodecPlugin) {
     return mRecvCodecPlugin->PluginID();
   }
 
   return 0;
 }
 
 bool
 WebrtcVideoConduit::RequiresNewSendStream(const VideoCodecConfig& newConfig) const
 {
+  MOZ_ASSERT(NS_IsMainThread());
+
   return !mCurSendCodecConfig
     || mCurSendCodecConfig->mName != newConfig.mName
     || mCurSendCodecConfig->mType != newConfig.mType
     || mCurSendCodecConfig->RtcpFbNackIsSet("") != newConfig.RtcpFbNackIsSet("")
     || mCurSendCodecConfig->RtcpFbFECIsSet() != newConfig.RtcpFbFECIsSet()
 #if 0
     // XXX Do we still want/need to do this?
     || (newConfig.mName == "H264" &&
@@ -2262,74 +2605,94 @@ WebrtcVideoConduit::RequiresNewSendStrea
 #endif
     ;
 }
 
 void
 WebrtcVideoConduit::VideoEncoderConfigBuilder::SetEncoderSpecificSettings(
   rtc::scoped_refptr<webrtc::VideoEncoderConfig::EncoderSpecificSettings> aSettings)
 {
+  MOZ_ASSERT(NS_IsMainThread());
+
   mConfig.encoder_specific_settings = aSettings;
 }
 
 void
 WebrtcVideoConduit::VideoEncoderConfigBuilder::SetVideoStreamFactory(rtc::scoped_refptr<WebrtcVideoConduit::VideoStreamFactory> aFactory)
 {
+  MOZ_ASSERT(NS_IsMainThread());
+
   mConfig.video_stream_factory = aFactory;
 }
 
 void
 WebrtcVideoConduit::VideoEncoderConfigBuilder::SetMinTransmitBitrateBps(
   int aXmitMinBps)
 {
+  MOZ_ASSERT(NS_IsMainThread());
+
   mConfig.min_transmit_bitrate_bps = aXmitMinBps;
 }
 
 void
 WebrtcVideoConduit::VideoEncoderConfigBuilder::SetContentType(
   webrtc::VideoEncoderConfig::ContentType aContentType)
 {
+  MOZ_ASSERT(NS_IsMainThread());
+
   mConfig.content_type = aContentType;
 }
 
 void
 WebrtcVideoConduit::VideoEncoderConfigBuilder::SetResolutionDivisor(
   unsigned char aDivisor)
 {
+  MOZ_ASSERT(NS_IsMainThread());
+
   mConfig.resolution_divisor = aDivisor;
 }
 
 void
 WebrtcVideoConduit::VideoEncoderConfigBuilder::SetMaxEncodings(
   size_t aMaxStreams)
 {
+  MOZ_ASSERT(NS_IsMainThread());
+
   mConfig.number_of_streams = aMaxStreams;
 }
 
 void
 WebrtcVideoConduit::VideoEncoderConfigBuilder::AddStream(
   webrtc::VideoStream aStream)
 {
+  MOZ_ASSERT(NS_IsMainThread());
+
   mSimulcastStreams.push_back(SimulcastStreamConfig());
   MOZ_ASSERT(mSimulcastStreams.size() <= mConfig.number_of_streams);
 }
 
 void
 WebrtcVideoConduit::VideoEncoderConfigBuilder::AddStream(
   webrtc::VideoStream aStream, const SimulcastStreamConfig& aSimulcastConfig)
 {
+  MOZ_ASSERT(NS_IsMainThread());
+
   mSimulcastStreams.push_back(aSimulcastConfig);
   MOZ_ASSERT(mSimulcastStreams.size() <= mConfig.number_of_streams);
 }
 
 size_t
 WebrtcVideoConduit::VideoEncoderConfigBuilder::StreamCount() const
 {
+  MOZ_ASSERT(NS_IsMainThread());
+
   return mSimulcastStreams.size();
 }
 
 void
 WebrtcVideoConduit::VideoEncoderConfigBuilder::ClearStreams()
 {
+  MOZ_ASSERT(NS_IsMainThread());
+
   mSimulcastStreams.clear();
 }
 
 } // end namespace
--- a/media/webrtc/signaling/src/media-conduit/VideoConduit.h
+++ b/media/webrtc/signaling/src/media-conduit/VideoConduit.h
@@ -109,16 +109,21 @@ public:
    */
   MediaConduitErrorCode ReceivedRTCPPacket(const void* data, int len) override;
 
   MediaConduitErrorCode StopTransmitting() override;
   MediaConduitErrorCode StartTransmitting() override;
   MediaConduitErrorCode StopReceiving() override;
   MediaConduitErrorCode StartReceiving() override;
 
+  MediaConduitErrorCode StopTransmittingLocked();
+  MediaConduitErrorCode StartTransmittingLocked();
+  MediaConduitErrorCode StopReceivingLocked();
+  MediaConduitErrorCode StartReceivingLocked();
+
   /**
    * Function to configure sending codec mode for different content
    */
   MediaConduitErrorCode ConfigureCodecMode(webrtc::VideoCodecMode) override;
 
    /**
    * Function to configure send codec for the video session
    * @param sendSessionConfig: CodecConfiguration
@@ -165,27 +170,16 @@ public:
    * and current available bandwidth.
    * @param width, height: dimensions of the frame
    * @param frame: optional frame to submit for encoding after reconfig
    */
   void SelectSendResolution(unsigned short width,
                             unsigned short height);
 
   /**
-   * Function to select and change the encoding frame rate based on incoming frame rate
-   * and max-mbps setting.
-   * @param current framerate
-   * @result new framerate
-   */
-  unsigned int SelectSendFrameRate(const VideoCodecConfig* codecConfig,
-                                   unsigned int old_framerate,
-                                   unsigned short sending_width,
-                                   unsigned short sending_height) const;
-
-  /**
    * Function to deliver a capture video frame for encoding and transport.
    * If the frame's timestamp is 0, it will be automatically generated.
    *
    * NOTE: ConfigureSendMediaCodec() must be called before this function can
    *       be invoked. This ensures the inserted video-frames can be
    *       transmitted by the conduit.
    */
   MediaConduitErrorCode SendVideoFrame(
@@ -224,16 +218,17 @@ public:
   void RemoveSink(rtc::VideoSinkInterface<webrtc::VideoFrame>* sink) override;
 
   void OnSinkWantsChanged(const rtc::VideoSinkWants& wants);
 
   uint64_t CodecPluginID() override;
 
   void SetPCHandle(const std::string& aPCHandle) override
   {
+    MOZ_ASSERT(NS_IsMainThread());
     mPCHandle = aPCHandle;
   }
 
   void DeleteStreams() override;
 
   bool Denoising() const {
     return mDenoising;
   }
@@ -246,36 +241,42 @@ public:
     return mTemporalLayers;
   }
 
   webrtc::VideoCodecMode CodecMode() const {
     return mCodecMode;
   }
 
   WebrtcVideoConduit(RefPtr<WebRtcCallWrapper> aCall,
-                     UniquePtr<cricket::VideoAdapter>&& aVideoAdapter);
+                     UniquePtr<cricket::VideoAdapter>&& aVideoAdapter,
+                     nsCOMPtr<nsIEventTarget> aStsThread);
   virtual ~WebrtcVideoConduit();
 
   MediaConduitErrorCode InitMain();
   virtual MediaConduitErrorCode Init();
 
-  std::vector<unsigned int> GetLocalSSRCs() const override;
+  std::vector<unsigned int> GetLocalSSRCs() override;
   bool SetLocalSSRCs(const std::vector<unsigned int>& ssrcs) override;
   bool GetRemoteSSRC(unsigned int* ssrc) override;
   bool SetRemoteSSRC(unsigned int ssrc) override;
   bool UnsetRemoteSSRC(uint32_t ssrc) override;
   bool SetLocalCNAME(const char* cname) override;
   bool SetLocalMID(const std::string& mid) override;
 
+  bool GetRemoteSSRCLocked(unsigned int* ssrc);
+  bool SetRemoteSSRCLocked(unsigned int ssrc);
+
   bool GetSendPacketTypeStats(
       webrtc::RtcpPacketTypeCounter* aPacketCounts) override;
 
   bool GetRecvPacketTypeStats(
       webrtc::RtcpPacketTypeCounter* aPacketCounts) override;
 
+  void PollStats();
+  void UpdateVideoStatsTimer();
   bool GetVideoEncoderStats(double* framerateMean,
                             double* framerateStdDev,
                             double* bitrateMean,
                             double* bitrateStdDev,
                             uint32_t* droppedFrames,
                             uint32_t* framesEncoded) override;
   bool GetVideoDecoderStats(double* framerateMean,
                             double* framerateStdDev,
@@ -294,92 +295,145 @@ public:
                              uint32_t* cumulativeLost,
                              int32_t* rttMs) override;
   bool GetRTCPSenderReport(DOMHighResTimeStamp* timestamp,
                            unsigned int* packetsSent,
                            uint64_t* bytesSent) override;
   uint64_t MozVideoLatencyAvg();
 
   void DisableSsrcChanges() override {
+    ASSERT_ON_THREAD(mStsThread);
     mAllowSsrcChange = false;
   }
 
 private:
   // Don't allow copying/assigning.
   WebrtcVideoConduit(const WebrtcVideoConduit&) = delete;
   void operator=(const WebrtcVideoConduit&) = delete;
 
-  /** Shared statistics for receive and transmit video streams
+  /**
+   * Statistics for the Call associated with this VideoConduit.
+   * Single threaded.
+   */
+  class CallStatistics {
+  public:
+    explicit CallStatistics(nsCOMPtr<nsIEventTarget> aStatsThread)
+      : mStatsThread(aStatsThread)
+    {}
+    void Update(const webrtc::Call::Stats& aStats);
+    int32_t RttMs() const;
+  protected:
+    const nsCOMPtr<nsIEventTarget> mStatsThread;
+  private:
+    int32_t mRttMs = 0;
+  };
+
+  /**
+   * Shared statistics for receive and transmit video streams.
+   * Single threaded.
    */
   class StreamStatistics {
   public:
-    void Update(const double aFrameRate, const double aBitrate);
+    explicit StreamStatistics(nsCOMPtr<nsIEventTarget> aStatsThread)
+      : mStatsThread(aStatsThread)
+    {}
+    void Update(const double aFrameRate, const double aBitrate,
+                const webrtc::RtcpPacketTypeCounter& aPacketCounts);
     /**
      * Returns gathered stream statistics
      * @param aOutFrMean: mean framerate
      * @param aOutFrStdDev: standard deviation of framerate
      * @param aOutBrMean: mean bitrate
      * @param aOutBrStdDev: standard deviation of bitrate
      */
     bool GetVideoStreamStats(double& aOutFrMean,
         double& aOutFrStdDev,
         double& aOutBrMean,
         double& aOutBrStdDev) const;
+    const webrtc::RtcpPacketTypeCounter& PacketCounts() const;
+    bool Active() const;
+    void SetActive(bool aActive);
+  protected:
+    const nsCOMPtr<nsIEventTarget> mStatsThread;
   private:
+    bool mActive = false;
     RunningStat mFrameRate;
     RunningStat mBitrate;
+    webrtc::RtcpPacketTypeCounter mPacketCounts;
   };
 
   /**
-   * Statistics for sending streams
+   * Statistics for sending streams. Single threaded.
    */
   class SendStreamStatistics : public StreamStatistics {
   public:
+    explicit SendStreamStatistics(nsCOMPtr<nsIEventTarget> aStatsThread)
+      : StreamStatistics(std::forward<nsCOMPtr<nsIEventTarget>>(aStatsThread))
+    {}
     /**
      * Returns the calculate number of dropped frames
-     * @param aOutDroppedFrames: the number of dropped frames
      */
-    void DroppedFrames(uint32_t& aOutDroppedFrames) const;
+    uint32_t DroppedFrames() const;
     /**
      * Returns the number of frames that have been encoded so far
      */
-    uint32_t FramesEncoded() const {
-      return mFramesEncoded;
-    }
-    void Update(const webrtc::VideoSendStream::Stats& aStats);
+    uint32_t FramesEncoded() const;
+    void Update(const webrtc::VideoSendStream::Stats& aStats,
+                uint32_t aConfiguredSsrc);
     /**
      * Call once for every frame delivered for encoding
      */
-    void FrameDeliveredToEncoder() { ++mFramesDeliveredToEncoder; }
+    void FrameDeliveredToEncoder();
+
+    bool SsrcFound() const;
+    uint32_t JitterMs() const;
+    uint32_t CumulativeLost() const;
+    uint64_t BytesReceived() const;
+    uint32_t PacketsReceived() const;
   private:
     uint32_t mDroppedFrames = 0;
     uint32_t mFramesEncoded = 0;
-    mozilla::Atomic<int32_t> mFramesDeliveredToEncoder;
+    int32_t mFramesDeliveredToEncoder;
+
+    bool mSsrcFound = false;
+    uint32_t mJitterMs = 0;
+    uint32_t mCumulativeLost = 0;
+    uint64_t mBytesReceived = 0;
+    uint32_t mPacketsReceived = 0;
   };
 
-  /** Statistics for receiving streams
+  /**
+   * Statistics for receiving streams. Single threaded.
    */
   class ReceiveStreamStatistics : public StreamStatistics {
   public:
+    explicit ReceiveStreamStatistics(nsCOMPtr<nsIEventTarget> aStatsThread)
+      : StreamStatistics(std::forward<nsCOMPtr<nsIEventTarget>>(aStatsThread))
+    {}
     /**
      * Returns the number of discarded packets
-     * @param aOutDiscPackets: number of discarded packets
+     */
+    uint32_t DiscardedPackets() const;
+    /**
+     * Returns the number of frames decoded
      */
-    void DiscardedPackets(uint32_t& aOutDiscPackets) const;
-   /**
-    * Returns the number of frames decoded
-    * @param aOutDiscPackets: number of frames decoded
-    */
-    void FramesDecoded(uint32_t& aFramesDecoded) const;
+    uint32_t FramesDecoded() const;
+    uint32_t JitterMs() const;
+    uint32_t CumulativeLost() const;
+    uint32_t Ssrc() const;
     void Update(const webrtc::VideoReceiveStream::Stats& aStats);
   private:
     uint32_t mDiscardedPackets = 0;
     uint32_t mFramesDecoded = 0;
+    uint32_t mJitterMs = 0;
+    uint32_t mCumulativeLost = 0;
+    uint32_t mSsrc = 0;
   };
-  /*
+
+  /**
    * Stores encoder configuration information and produces
    * a VideoEncoderConfig from it.
    */
   class VideoStreamFactory;
 
   class VideoEncoderConfigBuilder {
   public:
     /**
@@ -405,29 +459,19 @@ private:
     webrtc::VideoEncoderConfig CopyConfig() const { return mConfig.Copy(); }
     size_t NumberOfStreams() const { return mConfig.number_of_streams; }
 
   private:
     webrtc::VideoEncoderConfig mConfig;
     std::vector<SimulcastStreamConfig> mSimulcastStreams;
   };
 
-  //Function to convert between WebRTC and Conduit codec structures
-  void CodecConfigToWebRTCCodec(const VideoCodecConfig* codecInfo,
-                                webrtc::VideoCodec& cinst);
-
-  //Checks the codec to be applied
-  MediaConduitErrorCode ValidateCodecConfig(const VideoCodecConfig* codecInfo);
-
-  //Utility function to dump recv codec database
+  // Utility function to dump recv codec database
   void DumpCodecDB() const;
 
-  bool CodecsDifferent(const nsTArray<UniquePtr<VideoCodecConfig>>& a,
-                       const nsTArray<UniquePtr<VideoCodecConfig>>& b);
-
   // Factory class for VideoStreams... vie_encoder.cc will call this to reconfigure.
   // We need to give it access to the conduit to make it's decisions
   class VideoStreamFactory : public webrtc::VideoEncoderConfig::VideoStreamFactoryInterface
   {
   public:
     VideoStreamFactory(const std::string& aCodecName,
                        WebrtcVideoConduit *aConduit)
       : mCodecName(aCodecName),
@@ -454,107 +498,180 @@ private:
   webrtc::VideoEncoder* CreateEncoder(webrtc::VideoCodecType aType,
                                       bool enable_simulcast);
 
   MediaConduitErrorCode DeliverPacket(const void *data, int len);
 
   bool RequiresNewSendStream(const VideoCodecConfig& newConfig) const;
 
   mozilla::ReentrantMonitor mTransportMonitor;
+
+  // Accessed on any thread under mTransportMonitor.
   RefPtr<TransportInterface> mTransmitterTransport;
+
+  // Accessed on any thread under mTransportMonitor.
   RefPtr<TransportInterface> mReceiverTransport;
+
+  // Accessed on any thread under mTransportMonitor.
   RefPtr<mozilla::VideoRenderer> mRenderer;
 
-  // Frame adapter - handle sinks that we feed data to, and handle resolution
-  // changes needed for them.
+  // Accessed on any thread under mTransportMonitor.
+  unsigned short mReceivingWidth = 0;
+
+  // Accessed on any thread under mTransportMonitor.
+  unsigned short mReceivingHeight = 0;
+
+  // Socket transport service thread that runs stats queries against us. Any thread.
+  const nsCOMPtr<nsIEventTarget> mStsThread;
+
+  Mutex mMutex;
+
+  // Adapter handling resolution constraints from signaling and sinks.
+  // Written only on main thread. Guarded by mMutex, except for reads on main.
   UniquePtr<cricket::VideoAdapter> mVideoAdapter;
+
+  // Our own record of the sinks added to mVideoBroadcaster so we can support
+  // dispatching updates to sinks from off-main-thread. Main thread only.
+  AutoTArray<rtc::VideoSinkInterface<webrtc::VideoFrame>*, 1> mRegisteredSinks;
+
+  // Broadcaster that distributes our frames to all registered sinks.
+  // Sinks can only be added, updated and removed on main thread.
+  // Frames can be passed in on any thread.
   rtc::VideoBroadcaster mVideoBroadcaster;
 
   // Buffer pool used for scaling frames.
   // Accessed on the frame-feeding thread only.
   webrtc::I420BufferPool mBufferPool;
 
-  // Engine state we are concerned with.
+  // Engine state we are concerned with. Written on main thread and read anywhere.
   mozilla::Atomic<bool> mEngineTransmitting; // If true ==> Transmit Subsystem is up and running
   mozilla::Atomic<bool> mEngineReceiving;    // if true ==> Receive Subsystem up and running
 
-  int mCapId = -1; // Capturer for this conduit
-  //Local database of currently applied receive codecs
+  //Local database of currently applied receive codecs. Main thread only.
   nsTArray<UniquePtr<VideoCodecConfig>> mRecvCodecList;
 
-  // protects mCurSendCodecConfig, mVideoSend/RecvStreamStats, mSend/RecvStreams, mSendPacketCounts, mRecvPacketCounts
-  Mutex mCodecMutex;
+  // Written only on main thread. Guarded by mMutex, except for reads on main.
   nsAutoPtr<VideoCodecConfig> mCurSendCodecConfig;
+
+  // Bookkeeping of send stream stats. Sts thread only.
   SendStreamStatistics mSendStreamStats;
+
+  // Bookkeeping of send stream stats. Sts thread only.
   ReceiveStreamStatistics mRecvStreamStats;
-  webrtc::RtcpPacketTypeCounter mSendPacketCounts;
-  webrtc::RtcpPacketTypeCounter mRecvPacketCounts;
+
+  // Bookkeeping of call stats. Sts thread only.
+  CallStatistics mCallStats;
 
-  // Must call webrtc::Call::DestroyVideoReceive/SendStream to delete these:
+  // Must call webrtc::Call::DestroyVideoReceive/SendStream to delete this.
+  // Written only on main thread. Guarded by mMutex, except for reads on main.
   webrtc::VideoReceiveStream* mRecvStream = nullptr;
+
+  // Must call webrtc::Call::DestroyVideoReceive/SendStream to delete this.
+  // Written only on main thread. Guarded by mMutex, except for reads on main.
   webrtc::VideoSendStream* mSendStream = nullptr;
 
+  // Written on the frame feeding thread.
+  // Guarded by mMutex, except for reads on the frame feeding thread.
   unsigned short mLastWidth = 0;
+
+  // Written on the frame feeding thread.
+  // Guarded by mMutex, except for reads on the frame feeding thread.
   unsigned short mLastHeight = 0;
-  unsigned short mReceivingWidth = 0;
-  unsigned short mReceivingHeight = 0;
-  unsigned int   mSendingFramerate;
+
+  // Accessed under mMutex.
+  unsigned int mSendingFramerate;
+
+  // Written on main thread at creation,
+  // then written or read on any thread under mTransportMonitor.
   bool mVideoLatencyTestEnable = false;
+
+  // Accessed from any thread under mTransportMonitor.
   uint64_t mVideoLatencyAvg = 0;
-  // all in bps!
+
+  // All in bps.
+  // All written on main thread and guarded by mMutex, except for reads on main.
   int mMinBitrate = 0;
   int mStartBitrate = 0;
   int mPrefMaxBitrate = 0;
   int mNegotiatedMaxBitrate = 0;
   int mMinBitrateEstimate = 0;
+
+  // Set to true to force denoising on.
+  // Written at creation, then read anywhere.
   bool mDenoising = false;
-  bool mLockScaling = false; // for tests that care about output resolution
+
+  // Set to true to ignore sink wants (scaling due to bwe and cpu usage).
+  // Written at creation, then read anywhere.
+  bool mLockScaling = false;
+
+  // Written at creation, then read anywhere.
   uint8_t mSpatialLayers = 1;
+
+  // Written at creation, then read anywhere.
   uint8_t mTemporalLayers = 1;
 
-  rtc::VideoSinkWants mLastSinkWanted;
-
   static const unsigned int sAlphaNum = 7;
   static const unsigned int sAlphaDen = 8;
   static const unsigned int sRoundingPadding = 1024;
 
+  // Main thread only.
   RefPtr<WebrtcAudioConduit> mSyncedTo;
 
-  webrtc::VideoCodecMode mCodecMode;
+  // Written on main thread, read anywhere.
+  Atomic<webrtc::VideoCodecMode> mCodecMode;
 
   // WEBRTC.ORG Call API
-  RefPtr<WebRtcCallWrapper> mCall;
+  // Const so can be accessed on any thread. Most methods are called on
+  // main thread, though Receiver() is called on STS. This seems fine.
+  const RefPtr<WebRtcCallWrapper> mCall;
 
+  // Written only on main thread. Guarded by mMutex, except for reads on main.
   webrtc::VideoSendStream::Config mSendStreamConfig;
+
+  // Main thread only.
   VideoEncoderConfigBuilder mEncoderConfig;
 
+  // Main thread only.
   webrtc::VideoReceiveStream::Config mRecvStreamConfig;
 
   // Are SSRC changes without signaling allowed or not
+  // Accessed only on mStsThread.
   bool mAllowSsrcChange = true;
+
+  // Accessed only on mStsThread.
   bool mWaitingForInitialSsrc = true;
 
-  // accessed on creation, and when receiving packets
-  uint32_t mRecvSSRC = 0; // this can change during a stream!
+  // The runnable to set the SSRC is in-flight; queue packets until it's done.
+  // Accessed only on mStsThread.
+  bool mRecvSsrcSetInProgress = false;
 
-  // The runnable to set the SSRC is in-flight; queue packets until it's done.
-  bool mRecvSSRCSetInProgress = false;
+  // Accessed during configuration/signaling (main),
+  // and when receiving packets (sts).
+  Atomic<uint32_t> mRecvSSRC; // this can change during a stream!
+
   struct QueuedPacket {
     int mLen;
     uint8_t mData[1];
   };
+  // Accessed only on mStsThread.
   nsTArray<UniquePtr<QueuedPacket>> mQueuedPackets;
 
   // The lifetime of these codecs are maintained by the VideoConduit instance.
   // They are passed to the webrtc::VideoSendStream or VideoReceiveStream,
   // on construction.
   nsAutoPtr<webrtc::VideoEncoder> mEncoder; // only one encoder for now
   std::vector<std::unique_ptr<webrtc::VideoDecoder>> mDecoders;
+  // Main thread only
   WebrtcVideoEncoder* mSendCodecPlugin = nullptr;
+  // Main thread only
   WebrtcVideoDecoder* mRecvCodecPlugin = nullptr;
 
+  // Timer that updates video stats periodically. Main thread only.
   nsCOMPtr<nsITimer> mVideoStatsTimer;
+  // True if mVideoStatsTimer is running. Main thread only.
+  bool mVideoStatsTimerActive = false;
 
+  // Main thread only
   std::string mPCHandle;
 };
 } // end namespace
 
 #endif
--- a/media/webrtc/signaling/src/peerconnection/TransceiverImpl.cpp
+++ b/media/webrtc/signaling/src/peerconnection/TransceiverImpl.cpp
@@ -97,17 +97,17 @@ TransceiverImpl::InitAudio()
       mStsThread.get(),
       static_cast<AudioSessionConduit*>(mConduit.get()),
       mReceiveTrack);
 }
 
 void
 TransceiverImpl::InitVideo()
 {
-  mConduit = VideoSessionConduit::Create(mCallWrapper);
+  mConduit = VideoSessionConduit::Create(mCallWrapper, mStsThread);
 
   if (!mConduit) {
     MOZ_MTLOG(ML_ERROR, mPCHandle << "[" << mMid << "]: " << __FUNCTION__ <<
                         ": Failed to create VideoSessionConduit");
     // TODO(bug 1422897): We need a way to record this when it happens in the
     // wild.
     return;
   }
@@ -872,17 +872,19 @@ TransceiverImpl::UpdateVideoConduit()
   }
 
   // TODO (bug 1423041) once we pay attention to receiving MID's in RTP packets
   // (see bug 1405495) we could make this depending on the presence of MID in
   // the RTP packets instead of relying on the signaling.
   if (mJsepTransceiver->HasBundleLevel() &&
       (!mJsepTransceiver->mRecvTrack.GetNegotiatedDetails() ||
        !mJsepTransceiver->mRecvTrack.GetNegotiatedDetails()->GetExt(webrtc::RtpExtension::kMIdUri))) {
-    conduit->DisableSsrcChanges();
+    mStsThread->Dispatch(NewRunnableMethod(
+      "VideoSessionConduit::DisableSsrcChanges",
+      conduit, &VideoSessionConduit::DisableSsrcChanges));
   }
 
   if (mJsepTransceiver->mRecvTrack.GetNegotiatedDetails() &&
       mJsepTransceiver->mRecvTrack.GetActive()) {
     const auto& details(*mJsepTransceiver->mRecvTrack.GetNegotiatedDetails());
 
     UpdateConduitRtpExtmap(details, LocalDirection::kRecv);