Bug 864654: merge backend for send and receive VideoConduits to match AudioConduits & cleanup r=ekr
authorRandell Jesup <rjesup@jesup.org>
Wed, 23 Oct 2013 06:20:54 -0400
changeset 166610 10e5f4a307a1e650565192e60e503f2750061dab
parent 166609 645e0afbd37b6009ef63bb354416f620832a7e8a
child 166611 fddf6d5ca3081e48137acae4403ef874608406c8
push id428
push userbbajaj@mozilla.com
push dateTue, 28 Jan 2014 00:16:25 +0000
treeherdermozilla-release@cd72a7ff3a75 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersekr
bugs864654
milestone27.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 864654: merge backend for send and receive VideoConduits to match AudioConduits & cleanup r=ekr
media/webrtc/signaling/src/media-conduit/MediaConduitInterface.h
media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
media/webrtc/signaling/src/media-conduit/VideoConduit.h
media/webrtc/signaling/src/media/VcmSIPCCBinding.cpp
media/webrtc/signaling/src/peerconnection/PeerConnectionMedia.h
media/webrtc/signaling/test/mediaconduit_unittests.cpp
--- a/media/webrtc/signaling/src/media-conduit/MediaConduitInterface.h
+++ b/media/webrtc/signaling/src/media-conduit/MediaConduitInterface.h
@@ -146,20 +146,20 @@ public:
  * Refer to the comments on MediaSessionConduit above for overall
  * information
  */
 class VideoSessionConduit : public MediaSessionConduit
 {
 public:
   /**
    * Factory function to create and initialize a Video Conduit Session
-   * return: Concrete VideoSessionConduitObject or NULL in the case
+   * return: Concrete VideoSessionConduitObject or nullptr in the case
    *         of failure
    */
-  static RefPtr<VideoSessionConduit> Create();
+  static RefPtr<VideoSessionConduit> Create(VideoSessionConduit *aOther);
 
   enum FrameRequestType
   {
     FrameRequestNone,
     FrameRequestFir,
     FrameRequestPli,
     FrameRequestUnknown
   };
@@ -255,18 +255,18 @@ public:
  * Refer to the comments on MediaSessionConduit above for overall
  * information
  */
 class AudioSessionConduit : public MediaSessionConduit
 {
 public:
 
    /**
-    * Factory function to create and initialize a Video Conduit Session
-    * return: Concrete VideoSessionConduitObject or NULL in the case
+    * Factory function to create and initialize an Audio Conduit Session
+    * return: Concrete AudioSessionConduitObject or nullptr in the case
     *         of failure
     */
   static mozilla::RefPtr<AudioSessionConduit> Create(AudioSessionConduit *aOther);
 
   virtual ~AudioSessionConduit() {}
 
   virtual Type type() const { return AUDIO; }
 
--- a/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
+++ b/media/webrtc/signaling/src/media-conduit/VideoConduit.cpp
@@ -5,147 +5,193 @@
 #include "CSFLog.h"
 #include "nspr.h"
 
 // For rtcp-fb constants
 #include "ccsdp.h"
 
 #include "VideoConduit.h"
 #include "AudioConduit.h"
+#include "nsThreadUtils.h"
+
 #include "webrtc/video_engine/include/vie_errors.h"
 
 #ifdef MOZ_WIDGET_ANDROID
 #include "AndroidJNIWrapper.h"
 #endif
 
 #include <algorithm>
 #include <math.h>
 
 namespace mozilla {
 
 static const char* logTag ="WebrtcVideoSessionConduit";
 
+// 32 bytes is what WebRTC CodecInst expects
 const unsigned int WebrtcVideoConduit::CODEC_PLNAME_SIZE = 32;
 
-//Factory Implementation
-mozilla::RefPtr<VideoSessionConduit> VideoSessionConduit::Create()
+/**
+ * Factory Method for VideoConduit
+ */
+mozilla::RefPtr<VideoSessionConduit> VideoSessionConduit::Create(VideoSessionConduit *aOther)
 {
+#ifdef MOZILLA_INTERNAL_API
+  // unit tests create their own "main thread"
+  NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
+#endif
   CSFLogDebug(logTag,  "%s ", __FUNCTION__);
 
   WebrtcVideoConduit* obj = new WebrtcVideoConduit();
-  if(obj->Init() != kMediaConduitNoError)
+  if(obj->Init(static_cast<WebrtcVideoConduit*>(aOther)) != kMediaConduitNoError)
   {
     CSFLogError(logTag,  "%s VideoConduit Init Failed ", __FUNCTION__);
     delete obj;
     return nullptr;
   }
   CSFLogDebug(logTag,  "%s Successfully created VideoConduit ", __FUNCTION__);
   return obj;
 }
 
 WebrtcVideoConduit::~WebrtcVideoConduit()
 {
+#ifdef MOZILLA_INTERNAL_API
+  // unit tests create their own "main thread"
+  NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
+#endif
   CSFLogDebug(logTag,  "%s ", __FUNCTION__);
 
   for(std::vector<VideoCodecConfig*>::size_type i=0;i < mRecvCodecList.size();i++)
   {
     delete mRecvCodecList[i];
   }
 
   delete mCurSendCodecConfig;
 
+  // The first one of a pair to be deleted shuts down media for both
   //Deal with External Capturer
   if(mPtrViECapture)
   {
-    mPtrViECapture->DisconnectCaptureDevice(mCapId);
-    mPtrViECapture->ReleaseCaptureDevice(mCapId);
-    mPtrExtCapture = nullptr;
+    if (!mShutDown) {
+      mPtrViECapture->DisconnectCaptureDevice(mCapId);
+      mPtrViECapture->ReleaseCaptureDevice(mCapId);
+      mPtrExtCapture = nullptr;
+      if (mOtherDirection)
+        mOtherDirection->mPtrExtCapture = nullptr;
+    }
     mPtrViECapture->Release();
   }
 
   //Deal with External Renderer
   if(mPtrViERender)
   {
-    if(mRenderer) {
-      mPtrViERender->StopRender(mChannel);
+    if (!mShutDown) {
+      if(mRenderer) {
+        mPtrViERender->StopRender(mChannel);
+      }
+      mPtrViERender->RemoveRenderer(mChannel);
     }
-    mPtrViERender->RemoveRenderer(mChannel);
     mPtrViERender->Release();
   }
 
   //Deal with the transport
   if(mPtrViENetwork)
   {
-    mPtrViENetwork->DeregisterSendTransport(mChannel);
+    if (!mShutDown) {
+      mPtrViENetwork->DeregisterSendTransport(mChannel);
+    }
     mPtrViENetwork->Release();
   }
 
   if(mPtrViECodec)
   {
     mPtrViECodec->Release();
   }
 
   if(mPtrViEBase)
   {
-    mPtrViEBase->StopSend(mChannel);
-    mPtrViEBase->StopReceive(mChannel);
-    SyncTo(nullptr);
-    mPtrViEBase->DeleteChannel(mChannel);
+    if (!mShutDown) {
+      mPtrViEBase->StopSend(mChannel);
+      mPtrViEBase->StopReceive(mChannel);
+      SyncTo(nullptr);
+      mPtrViEBase->DeleteChannel(mChannel);
+    }
     mPtrViEBase->Release();
   }
 
   if (mPtrRTP)
   {
     mPtrRTP->Release();
   }
-  if(mVideoEngine)
+
+  if (mOtherDirection)
   {
-    webrtc::VideoEngine::Delete(mVideoEngine);
+    // mOtherDirection owns these now!
+    mOtherDirection->mOtherDirection = NULL;
+    // let other side we terminated the channel
+    mOtherDirection->mShutDown = true;
+    mVideoEngine = nullptr;
+ } else {
+    // only one opener can call Delete.  Have it be the last to close.
+    if(mVideoEngine)
+    {
+      webrtc::VideoEngine::Delete(mVideoEngine);
+    }
   }
 }
 
 /**
  * Peforms intialization of the MANDATORY components of the Video Engine
  */
-MediaConduitErrorCode WebrtcVideoConduit::Init()
+MediaConduitErrorCode WebrtcVideoConduit::Init(WebrtcVideoConduit *other)
 {
+  CSFLogDebug(logTag,  "%s this=%p other=%p", __FUNCTION__, this, other);
 
-  CSFLogDebug(logTag,  "%s ", __FUNCTION__);
+  if (other) {
+    MOZ_ASSERT(!other->mOtherDirection);
+    other->mOtherDirection = this;
+    mOtherDirection = other;
+
+    // only one can call ::Create()/GetVideoEngine()
+    MOZ_ASSERT(other->mVideoEngine);
+    mVideoEngine = other->mVideoEngine;
+  } else {
 
 #ifdef MOZ_WIDGET_ANDROID
-  jobject context = jsjni_GetGlobalContextRef();
+    jobject context = jsjni_GetGlobalContextRef();
 
-  // get the JVM
-  JavaVM *jvm = jsjni_GetVM();
+    // get the JVM
+    JavaVM *jvm = jsjni_GetVM();
 
-  if (webrtc::VideoEngine::SetAndroidObjects(jvm, (void*)context) != 0) {
-    CSFLogError(logTag,  "%s: could not set Android objects", __FUNCTION__);
-    return kMediaConduitSessionNotInited;
-  }
+    if (webrtc::VideoEngine::SetAndroidObjects(jvm, (void*)context) != 0) {
+      CSFLogError(logTag,  "%s: could not set Android objects", __FUNCTION__);
+      return kMediaConduitSessionNotInited;
+    }
 #endif
 
-  if( !(mVideoEngine = webrtc::VideoEngine::Create()) )
-  {
-    CSFLogError(logTag, "%s Unable to create video engine ", __FUNCTION__);
-     return kMediaConduitSessionNotInited;
-  }
+    //Per WebRTC APIs below function calls return NULL on failure
+    if( !(mVideoEngine = webrtc::VideoEngine::Create()) )
+    {
+      CSFLogError(logTag, "%s Unable to create video engine ", __FUNCTION__);
+      return kMediaConduitSessionNotInited;
+    }
 
-  PRLogModuleInfo *logs = GetWebRTCLogInfo();
-  if (!gWebrtcTraceLoggingOn && logs && logs->level > 0) {
-    // no need to a critical section or lock here
-    gWebrtcTraceLoggingOn = 1;
+    PRLogModuleInfo *logs = GetWebRTCLogInfo();
+    if (!gWebrtcTraceLoggingOn && logs && logs->level > 0) {
+      // no need to a critical section or lock here
+      gWebrtcTraceLoggingOn = 1;
 
-    const char *file = PR_GetEnv("WEBRTC_TRACE_FILE");
-    if (!file) {
-      file = "WebRTC.log";
+      const char *file = PR_GetEnv("WEBRTC_TRACE_FILE");
+      if (!file) {
+        file = "WebRTC.log";
+      }
+      CSFLogDebug(logTag,  "%s Logging webrtc to %s level %d", __FUNCTION__,
+                  file, logs->level);
+      mVideoEngine->SetTraceFilter(logs->level);
+      mVideoEngine->SetTraceFile(file);
     }
-    CSFLogDebug(logTag,  "%s Logging webrtc to %s level %d", __FUNCTION__,
-                file, logs->level);
-    mVideoEngine->SetTraceFilter(logs->level);
-    mVideoEngine->SetTraceFile(file);
   }
 
   if( !(mPtrViEBase = ViEBase::GetInterface(mVideoEngine)))
   {
     CSFLogError(logTag, "%s Unable to get video base interface ", __FUNCTION__);
     return kMediaConduitSessionNotInited;
   }
 
@@ -174,97 +220,108 @@ MediaConduitErrorCode WebrtcVideoConduit
   }
 
   if( !(mPtrRTP = webrtc::ViERTP_RTCP::GetInterface(mVideoEngine)))
   {
     CSFLogError(logTag, "%s Unable to get video RTCP interface ", __FUNCTION__);
     return kMediaConduitSessionNotInited;
   }
 
-  CSFLogDebug(logTag, "%s Engine Created: Init'ng the interfaces ",__FUNCTION__);
+  if (other) {
+    mChannel = other->mChannel;
+    mPtrExtCapture = other->mPtrExtCapture;
+    mCapId = other->mCapId;
+  } else {
+    CSFLogDebug(logTag, "%s Engine Created: Init'ng the interfaces ",__FUNCTION__);
+
+    if(mPtrViEBase->Init() == -1)
+    {
+      CSFLogError(logTag, " %s Video Engine Init Failed %d ",__FUNCTION__,
+                  mPtrViEBase->LastError());
+      return kMediaConduitSessionNotInited;
+    }
 
-  if(mPtrViEBase->Init() == -1)
-  {
-    CSFLogError(logTag, " %s Video Engine Init Failed %d ",__FUNCTION__,
-                                               mPtrViEBase->LastError());
-    return kMediaConduitSessionNotInited;
-  }
+    if(mPtrViEBase->CreateChannel(mChannel) == -1)
+    {
+      CSFLogError(logTag, " %s Channel creation Failed %d ",__FUNCTION__,
+                  mPtrViEBase->LastError());
+      return kMediaConduitChannelError;
+    }
 
+    if(mPtrViENetwork->RegisterSendTransport(mChannel, *this) == -1)
+    {
+      CSFLogError(logTag,  "%s ViENetwork Failed %d ", __FUNCTION__,
+                  mPtrViEBase->LastError());
+      return kMediaConduitTransportRegistrationFail;
+    }
 
-  if(mPtrViEBase->CreateChannel(mChannel) == -1)
-  {
-    CSFLogError(logTag, " %s Channel creation Failed %d ",__FUNCTION__,
-                                               mPtrViEBase->LastError());
-    return kMediaConduitChannelError;
-  }
+    if(mPtrViECapture->AllocateExternalCaptureDevice(mCapId,
+                                                     mPtrExtCapture) == -1)
+    {
+      CSFLogError(logTag, "%s Unable to Allocate capture module: %d ",
+                  __FUNCTION__, mPtrViEBase->LastError());
+      return kMediaConduitCaptureError;
+    }
+
+    if(mPtrViECapture->ConnectCaptureDevice(mCapId,mChannel) == -1)
+    {
+      CSFLogError(logTag, "%s Unable to Connect capture module: %d ",
+                  __FUNCTION__,mPtrViEBase->LastError());
+      return kMediaConduitCaptureError;
+    }
 
-  if(mPtrViENetwork->RegisterSendTransport(mChannel, *this) == -1)
-  {
-    CSFLogError(logTag,  "%s ViENetwork Failed %d ", __FUNCTION__,
-                                          mPtrViEBase->LastError());
-    return kMediaConduitTransportRegistrationFail;
+    if(mPtrViERender->AddRenderer(mChannel,
+                                  webrtc::kVideoI420,
+                                  (webrtc::ExternalRenderer*) this) == -1)
+    {
+      CSFLogError(logTag, "%s Failed to added external renderer ", __FUNCTION__);
+      return kMediaConduitInvalidRenderer;
+    }
+    // Set up some parameters, per juberti. Set MTU.
+    if(mPtrViENetwork->SetMTU(mChannel, 1200) != 0)
+    {
+      CSFLogError(logTag,  "%s MTU Failed %d ", __FUNCTION__,
+                  mPtrViEBase->LastError());
+      return kMediaConduitMTUError;
+    }
+    // Turn on RTCP and loss feedback reporting.
+    if(mPtrRTP->SetRTCPStatus(mChannel, webrtc::kRtcpCompound_RFC4585) != 0)
+    {
+      CSFLogError(logTag,  "%s RTCPStatus Failed %d ", __FUNCTION__,
+                  mPtrViEBase->LastError());
+      return kMediaConduitRTCPStatusError;
+    }
   }
 
-
-  mPtrExtCapture = 0;
-
-  if(mPtrViECapture->AllocateExternalCaptureDevice(mCapId,
-                                                   mPtrExtCapture) == -1)
-  {
-    CSFLogError(logTag, "%s Unable to Allocate capture module: %d ",
-                               __FUNCTION__, mPtrViEBase->LastError());
-    return kMediaConduitCaptureError;
-  }
-
-  if(mPtrViECapture->ConnectCaptureDevice(mCapId,mChannel) == -1)
-  {
-    CSFLogError(logTag, "%s Unable to Connect capture module: %d ",
-                               __FUNCTION__,mPtrViEBase->LastError());
-    return kMediaConduitCaptureError;
-  }
-
-  if(mPtrViERender->AddRenderer(mChannel,
-                                webrtc::kVideoI420,
-                                (webrtc::ExternalRenderer*) this) == -1)
-  {
-    CSFLogError(logTag, "%s Failed to added external renderer ", __FUNCTION__);
-    return kMediaConduitInvalidRenderer;
-  }
-  // Set up some parameters, per juberti. Set MTU.
-  if(mPtrViENetwork->SetMTU(mChannel, 1200) != 0)
-  {
-    CSFLogError(logTag,  "%s MTU Failed %d ", __FUNCTION__,
-                mPtrViEBase->LastError());
-    return kMediaConduitMTUError;
-  }
-  // Turn on RTCP and loss feedback reporting.
-  if(mPtrRTP->SetRTCPStatus(mChannel, webrtc::kRtcpCompound_RFC4585) != 0)
-  {
-    CSFLogError(logTag,  "%s RTCPStatus Failed %d ", __FUNCTION__,
-                mPtrViEBase->LastError());
-    return kMediaConduitRTCPStatusError;
-  }
   CSFLogError(logTag, "%s Initialization Done", __FUNCTION__);
   return kMediaConduitNoError;
 }
 
 void
 WebrtcVideoConduit::SyncTo(WebrtcAudioConduit *aConduit)
 {
   CSFLogDebug(logTag, "%s Synced to %p", __FUNCTION__, aConduit);
 
+  // SyncTo(value) syncs to the AudioConduit, and if already synced replaces
+  // the current sync target.  SyncTo(nullptr) cancels any existing sync and
+  // releases the strong ref to AudioConduit.
   if (aConduit) {
     mPtrViEBase->SetVoiceEngine(aConduit->GetVoiceEngine());
     mPtrViEBase->ConnectAudioChannel(mChannel, aConduit->GetChannel());
     // NOTE: this means the VideoConduit will keep the AudioConduit alive!
-    mSyncedTo = aConduit;
-  } else if (mSyncedTo) {
+  } else if ((mOtherDirection && mOtherDirection->mSyncedTo) || mSyncedTo) {
     mPtrViEBase->DisconnectAudioChannel(mChannel);
     mPtrViEBase->SetVoiceEngine(nullptr);
-    mSyncedTo = nullptr;
+  }
+
+  // Now manage the shared sync reference (ugly)
+  if (mSyncedTo || !mOtherDirection ) {
+    mSyncedTo = aConduit;
+  } else {
+    mOtherDirection->mSyncedTo = aConduit;
   }
 }
 
 MediaConduitErrorCode
 WebrtcVideoConduit::AttachRenderer(mozilla::RefPtr<VideoRenderer> aVideoRenderer)
 {
   CSFLogDebug(logTag,  "%s ", __FUNCTION__);
 
@@ -307,21 +364,20 @@ WebrtcVideoConduit::DetachRenderer()
 }
 
 MediaConduitErrorCode
 WebrtcVideoConduit::AttachTransport(mozilla::RefPtr<TransportInterface> aTransport)
 {
   CSFLogDebug(logTag,  "%s ", __FUNCTION__);
   if(!aTransport)
   {
-    CSFLogError(logTag, "%s NULL Transport ", __FUNCTION__);
-    MOZ_ASSERT(PR_FALSE);
+    CSFLogError(logTag, "%s NULL Transport", __FUNCTION__);
     return kMediaConduitInvalidTransport;
   }
-  //Assign the transport
+  // set the transport
   mTransport = aTransport;
   return kMediaConduitNoError;
 }
 
 /**
  * Note: Setting the send-codec on the Video Engine will restart the encoder,
  * sets up new SSRC and reset RTP_RTCP module with the new codec setting.
  */
@@ -355,17 +411,16 @@ WebrtcVideoConduit::ConfigureSendMediaCo
     if(mPtrViEBase->StopSend(mChannel) == -1)
     {
       CSFLogError(logTag, "%s StopSend() Failed %d ",__FUNCTION__,
                   mPtrViEBase->LastError());
       return kMediaConduitUnknownError;
     }
   }
 
-  //reset the flag
   mEngineTransmitting = false;
 
   // we should be good here to set the new codec.
   for(int idx=0; idx < mPtrViECodec->NumberOfCodecs(); idx++)
   {
     if(0 == mPtrViECodec->GetCodec(idx, video_codec))
     {
       payloadName = video_codec.plName;
@@ -411,17 +466,17 @@ WebrtcVideoConduit::ConfigureSendMediaCo
 
   if(mPtrViEBase->StartSend(mChannel) == -1)
   {
     CSFLogError(logTag, "%s Start Send Error %d ", __FUNCTION__,
                 mPtrViEBase->LastError());
     return kMediaConduitUnknownError;
   }
 
-  //Copy the applied codec for future reference
+  //Copy the applied config for future reference.
   delete mCurSendCodecConfig;
 
   mCurSendCodecConfig = new VideoCodecConfig(*codecConfig);
 
   mPtrRTP->SetRembStatus(mChannel, true, false);
 
   // by now we should be successfully started the transmission
   mEngineTransmitting = true;
@@ -433,16 +488,18 @@ WebrtcVideoConduit::ConfigureRecvMediaCo
     const std::vector<VideoCodecConfig* >& codecConfigList)
 {
   CSFLogDebug(logTag,  "%s ", __FUNCTION__);
   MediaConduitErrorCode condError = kMediaConduitNoError;
   int error = 0; //webrtc engine errors
   bool success = false;
   std::string  payloadName;
 
+  // are we receiving already? If so, stop receiving and playout
+  // since we can't apply new recv codec when the engine is playing.
   if(mEngineReceiving)
   {
     CSFLogDebug(logTag, "%s Engine Already Receiving . Attemping to Stop ", __FUNCTION__);
     if(mPtrViEBase->StopReceive(mChannel) == -1)
     {
       error = mPtrViEBase->LastError();
       if(error == kViEBaseUnknownError)
       {
@@ -451,16 +508,18 @@ WebrtcVideoConduit::ConfigureRecvMediaCo
       } else {
         CSFLogError(logTag, "%s StopReceive() Failed %d ", __FUNCTION__,
                     mPtrViEBase->LastError());
         return kMediaConduitUnknownError;
       }
     }
   }
 
+  mEngineReceiving = false;
+
   if(codecConfigList.empty())
   {
     CSFLogError(logTag, "%s Zero number of codecs to configure", __FUNCTION__);
     return kMediaConduitMalformedArgument;
   }
 
   webrtc::ViEKeyFrameRequestMethod kf_request = webrtc::kViEKeyFrameRequestNone;
   bool use_nack_basic = false;
@@ -716,26 +775,24 @@ WebrtcVideoConduit::SelectSendResolution
       }
       CSFLogDebug(logTag, "%s: Encoder resolution changed to %ux%u",
                   __FUNCTION__, width, height);
     } // else no change; mSendingWidth likely was 0
   }
   return true;
 }
 
-
 MediaConduitErrorCode
 WebrtcVideoConduit::SendVideoFrame(unsigned char* video_frame,
                                    unsigned int video_frame_length,
                                    unsigned short width,
                                    unsigned short height,
                                    VideoType video_type,
                                    uint64_t capture_time)
 {
-
   CSFLogDebug(logTag,  "%s ", __FUNCTION__);
 
   //check for  the parameters sanity
   if(!video_frame || video_frame_length == 0 ||
      width == 0 || height == 0)
   {
     CSFLogError(logTag,  "%s Invalid Parameters ",__FUNCTION__);
     MOZ_ASSERT(PR_FALSE);
@@ -767,114 +824,135 @@ WebrtcVideoConduit::SendVideoFrame(unsig
   MOZ_ASSERT(!(height & 1));
 
   if (!SelectSendResolution(width, height))
   {
     return kMediaConduitCaptureError;
   }
 
   //insert the frame to video engine in I420 format only
+  MOZ_ASSERT(mPtrExtCapture);
   if(mPtrExtCapture->IncomingFrame(video_frame,
                                    video_frame_length,
                                    width, height,
                                    type,
                                    (unsigned long long)capture_time) == -1)
   {
     CSFLogError(logTag,  "%s IncomingFrame Failed %d ", __FUNCTION__,
                                             mPtrViEBase->LastError());
     return kMediaConduitCaptureError;
   }
 
-  CSFLogError(logTag, "%s Inserted A Frame", __FUNCTION__);
+  CSFLogDebug(logTag, "%s Inserted a frame", __FUNCTION__);
   return kMediaConduitNoError;
 }
 
 // Transport Layer Callbacks
 MediaConduitErrorCode
 WebrtcVideoConduit::ReceivedRTPPacket(const void *data, int len)
 {
-  CSFLogError(logTag, "%s: Channel %d, Len %d ", __FUNCTION__, mChannel, len);
+  CSFLogDebug(logTag, "%s: Channel %d, Len %d ", __FUNCTION__, mChannel, len);
 
   // Media Engine should be receiving already.
   if(mEngineReceiving)
   {
     // let the engine know of a RTP packet to decode
     if(mPtrViENetwork->ReceivedRTPPacket(mChannel,data,len) == -1)
     {
       int error = mPtrViEBase->LastError();
       CSFLogError(logTag, "%s RTP Processing Failed %d ", __FUNCTION__, error);
       if(error >= kViERtpRtcpInvalidChannelId && error <= kViERtpRtcpRtcpDisabled)
       {
         return kMediaConduitRTPProcessingFailed;
       }
       return kMediaConduitRTPRTCPModuleError;
     }
   } else {
-    CSFLogError(logTag, "%s Engine Error: Not Receiving !!! ", __FUNCTION__);
+    CSFLogError(logTag, "Error: %s when not receiving", __FUNCTION__);
     return kMediaConduitSessionNotInited;
   }
 
   return kMediaConduitNoError;
 }
 
 MediaConduitErrorCode
 WebrtcVideoConduit::ReceivedRTCPPacket(const void *data, int len)
 {
-  CSFLogError(logTag, " %s Channel %d, Len %d ", __FUNCTION__, mChannel, len);
+  CSFLogDebug(logTag, " %s Channel %d, Len %d ", __FUNCTION__, mChannel, len);
 
   //Media Engine should be receiving already
   if(mEngineTransmitting)
   {
-    //let the engine know of RTCP packet to decode.
     if(mPtrViENetwork->ReceivedRTCPPacket(mChannel,data,len) == -1)
     {
       int error = mPtrViEBase->LastError();
       CSFLogError(logTag, "%s RTP Processing Failed %d", __FUNCTION__, error);
       if(error >= kViERtpRtcpInvalidChannelId && error <= kViERtpRtcpRtcpDisabled)
       {
         return kMediaConduitRTPProcessingFailed;
       }
       return kMediaConduitRTPRTCPModuleError;
     }
   } else {
-    CSFLogError(logTag, "%s: Engine Error: Not Receiving", __FUNCTION__);
+    CSFLogError(logTag, "Error: %s when not receiving", __FUNCTION__);
     return kMediaConduitSessionNotInited;
   }
   return kMediaConduitNoError;
 }
 
 //WebRTC::RTP Callback Implementation
 int WebrtcVideoConduit::SendPacket(int channel, const void* data, int len)
 {
-  CSFLogError(logTag, "%s Channel %d, len %d ", __FUNCTION__, channel, len);
+  CSFLogDebug(logTag,  "%s : channel %d len %d %s", __FUNCTION__, channel, len,
+              (mEngineReceiving && mOtherDirection) ? "(using mOtherDirection)" : "");
 
-  if(mTransport && (mTransport->SendRtpPacket(data, len) == NS_OK))
+  if (mEngineReceiving)
   {
-    CSFLogDebug(logTag, "%s Sent RTP Packet ", __FUNCTION__);
-    return len;
+    if (mOtherDirection)
+    {
+      return mOtherDirection->SendPacket(channel, data, len);
+    }
+    CSFLogDebug(logTag,  "%s : Asked to send RTP without an RTP sender on channel %d",
+                __FUNCTION__, channel);
+    return -1;
   } else {
-    CSFLogError(logTag, "%s  Failed", __FUNCTION__);
-    return -1;
+    if(mTransport && (mTransport->SendRtpPacket(data, len) == NS_OK))
+    {
+      CSFLogDebug(logTag, "%s Sent RTP Packet ", __FUNCTION__);
+      return len;
+    } else {
+      CSFLogError(logTag, "%s RTP Packet Send Failed ", __FUNCTION__);
+      return -1;
+    }
   }
 }
 
 int WebrtcVideoConduit::SendRTCPPacket(int channel, const void* data, int len)
 {
-  CSFLogError(logTag,  "%s : channel %d , len %d ", __FUNCTION__, channel,len);
+  CSFLogDebug(logTag,  "%s : channel %d , len %d ", __FUNCTION__, channel,len);
+
+  if (mEngineTransmitting)
+  {
+    if (mOtherDirection)
+    {
+      return mOtherDirection->SendRTCPPacket(channel, data, len);
+    }
+  }
 
-  // can't enable this assertion, because we do.  Suppress it
-  // NS_ASSERTION(mEngineReceiving,"We shouldn't send RTCP on the receiver side");
-  if(mEngineReceiving && mTransport && (mTransport->SendRtcpPacket(data, len) == NS_OK))
-   {
-      CSFLogDebug(logTag, "%s Sent RTCP Packet ", __FUNCTION__);
-      return len;
-   } else {
-      CSFLogError(logTag, "%s Failed", __FUNCTION__);
-      return -1;
-   }
+  // We come here if we have only one pipeline/conduit setup,
+  // such as for unidirectional streams.
+  // We also end up here if we are receiving
+  if(mTransport && mTransport->SendRtcpPacket(data, len) == NS_OK)
+  {
+    CSFLogDebug(logTag, "%s Sent RTCP Packet ", __FUNCTION__);
+    return len;
+  } else {
+    CSFLogError(logTag, "%s RTCP Packet Send Failed ", __FUNCTION__);
+    return -1;
+  }
 }
 
 // WebRTC::ExternalMedia Implementation
 int
 WebrtcVideoConduit::FrameSizeChange(unsigned int width,
                                     unsigned int height,
                                     unsigned int numStreams)
 {
@@ -922,44 +1000,25 @@ WebrtcVideoConduit::CodecConfigToWebRTCC
   {
     cinst.maxFramerate = codecInfo->mMaxFrameRate;
   }
   cinst.minBitrate = 200;
   cinst.startBitrate = 300;
   cinst.maxBitrate = 2000;
 }
 
+//Copy the codec passed into Conduit's database
 bool
 WebrtcVideoConduit::CopyCodecToDB(const VideoCodecConfig* codecInfo)
 {
   VideoCodecConfig* cdcConfig = new VideoCodecConfig(*codecInfo);
   mRecvCodecList.push_back(cdcConfig);
   return true;
 }
 
-/**
- * Checks if the codec is already in Conduit's database
- */
-
-bool
-WebrtcVideoConduit::CheckCodecForMatch(const VideoCodecConfig* codecInfo) const
-{
-  //the db should have atleast one codec
-  for(std::vector<VideoCodecConfig*>::size_type i=0;i < mRecvCodecList.size();i++)
-  {
-    if(CheckCodecsForMatch(mRecvCodecList[i],codecInfo))
-    {
-      //match
-      return true;
-    }
-  }
-  //no match
-  return false;
-}
-
 bool
 WebrtcVideoConduit::CheckCodecsForMatch(const VideoCodecConfig* curCodecConfig,
                                         const VideoCodecConfig* codecInfo) const
 {
   if(!curCodecConfig)
   {
     return false;
   }
@@ -971,25 +1030,44 @@ WebrtcVideoConduit::CheckCodecsForMatch(
   {
     return true;
   }
 
   return false;
 }
 
 /**
+ * Checks if the codec is already in Conduit's database
+ */
+bool
+WebrtcVideoConduit::CheckCodecForMatch(const VideoCodecConfig* codecInfo) const
+{
+  //the db should have atleast one codec
+  for(std::vector<VideoCodecConfig*>::size_type i=0;i < mRecvCodecList.size();i++)
+  {
+    if(CheckCodecsForMatch(mRecvCodecList[i],codecInfo))
+    {
+      //match
+      return true;
+    }
+  }
+  //no match or empty local db
+  return false;
+}
+
+/**
  * Perform validation on the codecConfig to be applied
  * Verifies if the codec is already applied.
  */
-
 MediaConduitErrorCode
 WebrtcVideoConduit::ValidateCodecConfig(const VideoCodecConfig* codecInfo,
-                                         bool send) const
+                                        bool send) const
 {
   bool codecAppliedAlready = false;
+
   if(!codecInfo)
   {
     CSFLogError(logTag, "%s Null CodecConfig ", __FUNCTION__);
     return kMediaConduitMalformedArgument;
   }
 
   if((codecInfo->mName.empty()) ||
      (codecInfo->mName.length() >= CODEC_PLNAME_SIZE))
--- a/media/webrtc/signaling/src/media-conduit/VideoConduit.h
+++ b/media/webrtc/signaling/src/media-conduit/VideoConduit.h
@@ -38,19 +38,17 @@ class WebrtcAudioConduit;
 /**
  * Concrete class for Video session. Hooks up
  *  - media-source and target to external transport
  */
 class WebrtcVideoConduit:public VideoSessionConduit
                          ,public webrtc::Transport
                          ,public webrtc::ExternalRenderer
 {
-
 public:
-
   //VoiceEngine defined constant for Payload Name Size.
   static const unsigned int CODEC_PLNAME_SIZE;
 
   /**
    * Set up A/V sync between this (incoming) VideoConduit and an audio conduit.
    */
   void SyncTo(WebrtcAudioConduit *aConduit);
 
@@ -93,17 +91,17 @@ public:
    *          On failure, video engine transmit functionality is disabled.
    * NOTE: This API can be invoked multiple time. Invoking this API may involve restarting
    *        transmission sub-system on the engine.
    */
    virtual MediaConduitErrorCode ConfigureRecvMediaCodecs(
                                const std::vector<VideoCodecConfig* >& codecConfigList);
 
   /**
-   * Register External Transport to this Conduit. RTP and RTCP frames from the VoiceEnigne
+   * Register Transport for this Conduit. RTP and RTCP frames from the VideoEngine
    * shall be passed to the registered transport for transporting externally.
    */
   virtual MediaConduitErrorCode AttachTransport(mozilla::RefPtr<TransportInterface> aTransport);
 
   /**
    * Function to select and change the encoding resolution based on incoming frame size
    * and current available bandwidth.
    * @param width, height: dimensions of the frame
@@ -128,23 +126,23 @@ public:
                                                 unsigned short height,
                                                 VideoType video_type,
                                                 uint64_t capture_time);
 
 
 
   /**
    * Webrtc transport implementation to send and receive RTP packet.
-   * AudioConduit registers itself as ExternalTransport to the VideoEngine
+   * VideoConduit registers itself as ExternalTransport to the VideoEngine
    */
   virtual int SendPacket(int channel, const void *data, int len) ;
 
   /**
    * Webrtc transport implementation to send and receive RTCP packet.
-   * AudioConduit registers itself as ExternalTransport to the VideoEngine
+   * VideoConduit registers itself as ExternalTransport to the VideoEngine
    */
   virtual int SendRTCPPacket(int channel, const void *data, int len) ;
 
 
   /**
    * Webrtc External Renderer Implementation APIs.
    * Raw I420 Frames are delivred to the VideoConduit by the VideoEngine
    */
@@ -170,16 +168,18 @@ public:
   unsigned int SendingMaxFr() {
     if(mCurSendCodecConfig) {
       return mCurSendCodecConfig->mMaxFrameRate;
     }
     return 0;
   }
 
   WebrtcVideoConduit():
+                      mOtherDirection(nullptr),
+                      mShutDown(false),
                       mVideoEngine(nullptr),
                       mTransport(nullptr),
                       mRenderer(nullptr),
                       mPtrViEBase(nullptr),
                       mPtrViECapture(nullptr),
                       mPtrViECodec(nullptr),
                       mPtrViENetwork(nullptr),
                       mPtrViERender(nullptr),
@@ -190,22 +190,22 @@ public:
                       mChannel(-1),
                       mCapId(-1),
                       mCurSendCodecConfig(nullptr),
                       mSendingWidth(0),
                       mSendingHeight(0)
   {
   }
 
-
   virtual ~WebrtcVideoConduit() ;
 
-
+  MediaConduitErrorCode Init(WebrtcVideoConduit *other);
 
-  MediaConduitErrorCode Init();
+  int GetChannel() { return mChannel; }
+  webrtc::VideoEngine* GetVideoEngine() { return mVideoEngine; }
 
 private:
 
   WebrtcVideoConduit(const WebrtcVideoConduit& other) MOZ_DELETE;
   void operator=(const WebrtcVideoConduit& other) MOZ_DELETE;
 
   //Local database of currently applied receive codecs
   typedef std::vector<VideoCodecConfig* > RecvCodecList;
@@ -223,40 +223,47 @@ private:
   bool CheckCodecsForMatch(const VideoCodecConfig* curCodecConfig,
                            const VideoCodecConfig* codecInfo) const;
 
   //Checks the codec to be applied
   MediaConduitErrorCode ValidateCodecConfig(const VideoCodecConfig* codecInfo, bool send) const;
 
   //Utility function to dump recv codec database
   void DumpCodecDB() const;
-  webrtc::VideoEngine* mVideoEngine;
 
+  // The two sides of a send/receive pair of conduits each keep a pointer to the other.
+  // They also share a single VideoEngine and mChannel.  Shutdown must be coordinated
+  // carefully to avoid double-freeing or accessing after one frees.
+  WebrtcVideoConduit*  mOtherDirection;
+  // The other side has shut down our mChannel and related items already
+  bool mShutDown;
+
+  // A few of these are shared by both directions.  They're released by the last
+  // conduit to die.
+  webrtc::VideoEngine* mVideoEngine;          // shared
   mozilla::RefPtr<TransportInterface> mTransport;
   mozilla::RefPtr<VideoRenderer> mRenderer;
 
   webrtc::ViEBase* mPtrViEBase;
   webrtc::ViECapture* mPtrViECapture;
   webrtc::ViECodec* mPtrViECodec;
   webrtc::ViENetwork* mPtrViENetwork;
   webrtc::ViERender* mPtrViERender;
-  webrtc::ViEExternalCapture*  mPtrExtCapture;
+  webrtc::ViEExternalCapture*  mPtrExtCapture; // shared
   webrtc::ViERTP_RTCP* mPtrRTP;
 
   // Engine state we are concerned with.
   bool mEngineTransmitting; //If true ==> Transmit Sub-system is up and running
   bool mEngineReceiving;    // if true ==> Receive Sus-sysmtem up and running
 
   int mChannel; // Video Channel for this conduit
   int mCapId;   // Capturer for this conduit
   RecvCodecList    mRecvCodecList;
   VideoCodecConfig* mCurSendCodecConfig;
   unsigned short mSendingWidth;
   unsigned short mSendingHeight;
 
   mozilla::RefPtr<WebrtcAudioConduit> mSyncedTo;
 };
 
-
-
 } // end namespace
 
 #endif
--- a/media/webrtc/signaling/src/media/VcmSIPCCBinding.cpp
+++ b/media/webrtc/signaling/src/media/VcmSIPCCBinding.cpp
@@ -1568,21 +1568,24 @@ static int vcmRxStartICE_m(cc_mcapid_t m
       return VCM_ERROR;
     }
   }
 
   if (CC_IS_AUDIO(mcap_id)) {
     std::vector<mozilla::AudioCodecConfig *> configs;
 
     // Instantiate an appropriate conduit
-    mozilla::RefPtr<mozilla::AudioSessionConduit> tx_conduit =
+    mozilla::RefPtr<mozilla::MediaSessionConduit> tx_conduit =
       pc.impl()->media()->GetConduit(level, false);
-
+    MOZ_ASSERT_IF(tx_conduit, tx_conduit->type() == MediaSessionConduit::AUDIO);
+
+    // The two sides of a send/receive pair of conduits each keep a raw pointer to the other,
+    // and are responsible for cleanly shutting down.
     mozilla::RefPtr<mozilla::AudioSessionConduit> conduit =
-                    mozilla::AudioSessionConduit::Create(tx_conduit);
+      mozilla::AudioSessionConduit::Create(static_cast<AudioSessionConduit *>(tx_conduit.get()));
     if(!conduit)
       return VCM_ERROR;
 
     pc.impl()->media()->AddConduit(level, true, conduit);
 
     mozilla::AudioCodecConfig *config_raw;
 
     for(int i=0; i <num_payloads ; i++)
@@ -1620,21 +1623,29 @@ static int vcmRxStartICE_m(cc_mcapid_t m
     CSFLogDebug(logTag, "Created audio pipeline %p, conduit=%p, pc_stream=%d pc_track=%d",
                 pipeline.get(), conduit.get(), pc_stream_id, pc_track_id);
 
     stream->StorePipeline(pc_track_id, false, pipeline);
   } else if (CC_IS_VIDEO(mcap_id)) {
 
     std::vector<mozilla::VideoCodecConfig *> configs;
     // Instantiate an appropriate conduit
+    mozilla::RefPtr<mozilla::MediaSessionConduit> tx_conduit =
+      pc.impl()->media()->GetConduit(level, false);
+    MOZ_ASSERT_IF(tx_conduit, tx_conduit->type() == MediaSessionConduit::VIDEO);
+
+    // The two sides of a send/receive pair of conduits each keep a raw pointer to the other,
+    // and are responsible for cleanly shutting down.
     mozilla::RefPtr<mozilla::VideoSessionConduit> conduit =
-             mozilla::VideoSessionConduit::Create();
+      mozilla::VideoSessionConduit::Create(static_cast<VideoSessionConduit *>(tx_conduit.get()));
     if(!conduit)
       return VCM_ERROR;
 
+    pc.impl()->media()->AddConduit(level, true, conduit);
+
     mozilla::VideoCodecConfig *config_raw;
 
     for(int i=0; i <num_payloads; i++)
     {
       config_raw = new mozilla::VideoCodecConfig(
         payloads[i].remote_rtp_pt,
         ccsdpCodecName(payloads[i].codec_type),
         payloads[i].video.rtcp_fb_types);
@@ -2227,21 +2238,24 @@ static int vcmTxStartICE_m(cc_mcapid_t m
       payload->audio.packet_size,
       payload->audio.channels,
       payload->audio.bitrate);
 
     // Take possession of this pointer
     mozilla::ScopedDeletePtr<mozilla::AudioCodecConfig> config(config_raw);
 
     // Instantiate an appropriate conduit
-    mozilla::RefPtr<mozilla::AudioSessionConduit> rx_conduit =
+    mozilla::RefPtr<mozilla::MediaSessionConduit> rx_conduit =
       pc.impl()->media()->GetConduit(level, true);
-
+    MOZ_ASSERT_IF(rx_conduit, rx_conduit->type() == MediaSessionConduit::AUDIO);
+
+    // The two sides of a send/receive pair of conduits each keep a raw pointer to the other,
+    // and are responsible for cleanly shutting down.
     mozilla::RefPtr<mozilla::AudioSessionConduit> conduit =
-      mozilla::AudioSessionConduit::Create(rx_conduit);
+      mozilla::AudioSessionConduit::Create(static_cast<AudioSessionConduit *>(rx_conduit.get()));
 
     if (!conduit || conduit->ConfigureSendMediaCodec(config))
       return VCM_ERROR;
 
     pc.impl()->media()->AddConduit(level, false, conduit);
 
     mozilla::RefPtr<mozilla::MediaPipeline> pipeline =
         new mozilla::MediaPipelineTransmit(
@@ -2272,23 +2286,31 @@ static int vcmTxStartICE_m(cc_mcapid_t m
       payload->video.rtcp_fb_types,
       payload->video.max_fs,
       payload->video.max_fr);
 
     // Take possession of this pointer
     mozilla::ScopedDeletePtr<mozilla::VideoCodecConfig> config(config_raw);
 
     // Instantiate an appropriate conduit
+    mozilla::RefPtr<mozilla::MediaSessionConduit> rx_conduit =
+      pc.impl()->media()->GetConduit(level, true);
+    MOZ_ASSERT_IF(rx_conduit, rx_conduit->type() == MediaSessionConduit::VIDEO);
+
+    // The two sides of a send/receive pair of conduits each keep a raw pointer to the other,
+    // and are responsible for cleanly shutting down.
     mozilla::RefPtr<mozilla::VideoSessionConduit> conduit =
-      mozilla::VideoSessionConduit::Create();
+      mozilla::VideoSessionConduit::Create(static_cast<VideoSessionConduit *>(rx_conduit.get()));
 
     // Find the appropriate media conduit config
     if (!conduit || conduit->ConfigureSendMediaCodec(config))
       return VCM_ERROR;
 
+    pc.impl()->media()->AddConduit(level, false, conduit);
+
     // Now we have all the pieces, create the pipeline
     mozilla::RefPtr<mozilla::MediaPipeline> pipeline =
         new mozilla::MediaPipelineTransmit(
             pc.impl()->GetHandle(),
             pc.impl()->GetMainThread().get(),
             pc.impl()->GetSTSThread(),
             stream->GetMediaStream(),
             pc_track_id,
--- a/media/webrtc/signaling/src/peerconnection/PeerConnectionMedia.h
+++ b/media/webrtc/signaling/src/peerconnection/PeerConnectionMedia.h
@@ -303,39 +303,39 @@ class PeerConnectionMedia : public sigsl
     if (mTransportFlows.find(index_inner) == mTransportFlows.end())
       return NULL;
 
     return mTransportFlows[index_inner];
   }
 
   // Add a transport flow
   void AddTransportFlow(int aIndex, bool aRtcp,
-                        mozilla::RefPtr<mozilla::TransportFlow> aFlow) {
+                        const mozilla::RefPtr<mozilla::TransportFlow> &aFlow) {
     int index_inner = aIndex * 2 + (aRtcp ? 1 : 0);
 
     MOZ_ASSERT(!mTransportFlows[index_inner]);
     mTransportFlows[index_inner] = aFlow;
   }
 
-  mozilla::RefPtr<mozilla::AudioSessionConduit> GetConduit(int aStreamIndex, bool aReceive) {
+  mozilla::RefPtr<mozilla::MediaSessionConduit> GetConduit(int aStreamIndex, bool aReceive) {
     int index_inner = aStreamIndex * 2 + (aReceive ? 0 : 1);
 
-    if (mAudioConduits.find(index_inner) == mAudioConduits.end())
+    if (mConduits.find(index_inner) == mConduits.end())
       return NULL;
 
-    return mAudioConduits[index_inner];
+    return mConduits[index_inner];
   }
 
   // Add a conduit
   void AddConduit(int aIndex, bool aReceive,
-                  const mozilla::RefPtr<mozilla::AudioSessionConduit> &aConduit) {
+                  const mozilla::RefPtr<mozilla::MediaSessionConduit> &aConduit) {
     int index_inner = aIndex * 2 + (aReceive ? 0 : 1);
 
-    MOZ_ASSERT(!mAudioConduits[index_inner]);
-    mAudioConduits[index_inner] = aConduit;
+    MOZ_ASSERT(!mConduits[index_inner]);
+    mConduits[index_inner] = aConduit;
   }
 
   // ICE state signals
   sigslot::signal1<mozilla::NrIceCtx *> SignalIceGatheringCompleted;  // Done gathering
   sigslot::signal1<mozilla::NrIceCtx *> SignalIceCompleted;  // Done handshaking
   sigslot::signal1<mozilla::NrIceCtx *> SignalIceFailed;  // Self explanatory
 
  private:
@@ -369,17 +369,17 @@ class PeerConnectionMedia : public sigsl
   // DNS
   nsRefPtr<mozilla::NrIceResolver> mDNSResolver;
 
   // Transport flows: even is RTP, odd is RTCP
   std::map<int, mozilla::RefPtr<mozilla::TransportFlow> > mTransportFlows;
 
   // Conduits: even is receive, odd is transmit (for easier correlation with
   // flows)
-  std::map<int, mozilla::RefPtr<mozilla::AudioSessionConduit> > mAudioConduits;
+  std::map<int, mozilla::RefPtr<mozilla::MediaSessionConduit> > mConduits;
 
   // The main thread.
   nsCOMPtr<nsIThread> mMainThread;
 
   // The STS thread.
   nsCOMPtr<nsIEventTarget> mSTSThread;
 
   NS_INLINE_DECL_THREADSAFE_REFCOUNTING(PeerConnectionMedia)
--- a/media/webrtc/signaling/test/mediaconduit_unittests.cpp
+++ b/media/webrtc/signaling/test/mediaconduit_unittests.cpp
@@ -543,22 +543,22 @@ class TransportConduitTest : public ::te
     cerr << "   ******************************************************** " << endl;
   }
 
   //2. Dump audio samples to dummy external transport
   void TestDummyVideoAndTransport()
   {
     int err = 0;
     //get pointer to VideoSessionConduit
-    mVideoSession = mozilla::VideoSessionConduit::Create();
+    mVideoSession = mozilla::VideoSessionConduit::Create(NULL);
     if( !mVideoSession )
       ASSERT_NE(mVideoSession, (void*)NULL);
 
    // This session is for other one
-    mVideoSession2 = mozilla::VideoSessionConduit::Create();
+    mVideoSession2 = mozilla::VideoSessionConduit::Create(NULL);
     if( !mVideoSession2 )
       ASSERT_NE(mVideoSession2,(void*)NULL);
 
     mVideoRenderer = new DummyVideoTarget();
     ASSERT_NE(mVideoRenderer, (void*)NULL);
 
     FakeMediaTransport* xport = new FakeMediaTransport();
     ASSERT_NE(xport, (void*)NULL);
@@ -617,17 +617,17 @@ class TransportConduitTest : public ::te
 
   }
 
  void TestVideoConduitCodecAPI()
   {
     int err = 0;
     mozilla::RefPtr<mozilla::VideoSessionConduit> mVideoSession;
     //get pointer to VideoSessionConduit
-    mVideoSession = mozilla::VideoSessionConduit::Create();
+    mVideoSession = mozilla::VideoSessionConduit::Create(NULL);
     if( !mVideoSession )
       ASSERT_NE(mVideoSession, (void*)NULL);
 
     //Test Configure Recv Codec APIS
     cerr << "   *************************************************" << endl;
     cerr << "    Test Receive Codec Configuration API Now " << endl;
     cerr << "   *************************************************" << endl;
 
@@ -721,17 +721,17 @@ class TransportConduitTest : public ::te
 
   // Calculate new resolution for sending video by applying max-fs constraint.
   void GetVideoResolutionWithMaxFs(int orig_width, int orig_height, int max_fs,
                                    int *new_width, int *new_height)
   {
     int err = 0;
 
     // Get pointer to VideoSessionConduit.
-    mVideoSession = mozilla::VideoSessionConduit::Create();
+    mVideoSession = mozilla::VideoSessionConduit::Create(NULL);
     if( !mVideoSession )
       ASSERT_NE(mVideoSession, (void*)NULL);
 
     // Configure send codecs on the conduit.
     mozilla::VideoCodecConfig cinst1(120, "VP8", 0, max_fs, 0);
 
     err = mVideoSession->ConfigureSendMediaCodec(&cinst1);
     ASSERT_EQ(mozilla::kMediaConduitNoError, err);