Bug 818670: Enable AEC in PeerConnection, AGC/NoiseSuppression in gUM r=derf
authorRandell Jesup <rjesup@jesup.org>
Tue, 29 Jan 2013 11:55:09 -0500
changeset 120236 19e164f7d88d9df419f1632b7b8a82f2d83841c5
parent 120235 770b5184d68330b2f0acc2da32625462c1b2ad6f
child 120237 df75a87cce60fe1396b579941e4db1fe0495a610
push id22104
push userrjesup@wgate.com
push dateTue, 29 Jan 2013 16:58:32 +0000
treeherdermozilla-inbound@df75a87cce60 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersderf
bugs818670
milestone21.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 818670: Enable AEC in PeerConnection, AGC/NoiseSuppression in gUM r=derf
content/media/webrtc/MediaEngine.h
content/media/webrtc/MediaEngineDefault.h
content/media/webrtc/MediaEngineWebRTC.h
content/media/webrtc/MediaEngineWebRTCAudio.cpp
dom/media/MediaManager.cpp
dom/media/MediaManager.h
media/mtransport/build/Makefile.in
media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
media/webrtc/signaling/src/media-conduit/AudioConduit.h
media/webrtc/webrtc_config.gypi
modules/libpref/src/init/all.js
--- a/content/media/webrtc/MediaEngine.h
+++ b/content/media/webrtc/MediaEngine.h
@@ -84,16 +84,21 @@ public:
                           SourceMediaStream *aSource,
                           TrackID aId,
                           StreamTime aDesiredTime,
                           TrackTicks &aLastEndTime) = 0;
 
   /* Stop the device and release the corresponding MediaStream */
   virtual nsresult Stop(SourceMediaStream *aSource, TrackID aID) = 0;
 
+  /* Change device configuration.  */
+  virtual nsresult Config(bool aEchoOn, uint32_t aEcho,
+                          bool aAgcOn, uint32_t aAGC,
+                          bool aNoiseOn, uint32_t aNoise) = 0;
+
   /* Return false if device is currently allocated or started */
   bool IsAvailable() {
     if (mState == kAllocated || mState == kStarted) {
       return false;
     } else {
       return true;
     }
   }
--- a/content/media/webrtc/MediaEngineDefault.h
+++ b/content/media/webrtc/MediaEngineDefault.h
@@ -39,16 +39,19 @@ public:
   virtual void GetUUID(nsAString&);
 
   virtual const MediaEngineVideoOptions *GetOptions();
   virtual nsresult Allocate();
   virtual nsresult Deallocate();
   virtual nsresult Start(SourceMediaStream*, TrackID);
   virtual nsresult Stop(SourceMediaStream*, TrackID);
   virtual nsresult Snapshot(uint32_t aDuration, nsIDOMFile** aFile);
+  virtual nsresult Config(bool aEchoOn, uint32_t aEcho,
+                          bool aAgcOn, uint32_t aAGC,
+                          bool aNoiseOn, uint32_t aNoise) { return NS_OK; };
   virtual void NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime);
   virtual void NotifyPull(MediaStreamGraph* aGraph,
                           SourceMediaStream *aSource,
                           TrackID aId,
                           StreamTime aDesiredTime,
                           TrackTicks &aLastEndTime) {}
 
   NS_DECL_ISUPPORTS
@@ -81,16 +84,19 @@ public:
   virtual void GetName(nsAString&);
   virtual void GetUUID(nsAString&);
 
   virtual nsresult Allocate();
   virtual nsresult Deallocate();
   virtual nsresult Start(SourceMediaStream*, TrackID);
   virtual nsresult Stop(SourceMediaStream*, TrackID);
   virtual nsresult Snapshot(uint32_t aDuration, nsIDOMFile** aFile);
+  virtual nsresult Config(bool aEchoOn, uint32_t aEcho,
+                          bool aAgcOn, uint32_t aAGC,
+                          bool aNoiseOn, uint32_t aNoise) { return NS_OK; };
   virtual void NotifyPull(MediaStreamGraph* aGraph, StreamTime aDesiredTime);
   virtual void NotifyPull(MediaStreamGraph* aGraph,
                           SourceMediaStream *aSource,
                           TrackID aId,
                           StreamTime aDesiredTime,
                           TrackTicks &aLastEndTime) {}
 
   NS_DECL_ISUPPORTS
--- a/content/media/webrtc/MediaEngineWebRTC.h
+++ b/content/media/webrtc/MediaEngineWebRTC.h
@@ -30,16 +30,17 @@
 // Audio Engine
 #include "voice_engine/include/voe_base.h"
 #include "voice_engine/include/voe_codec.h"
 #include "voice_engine/include/voe_hardware.h"
 #include "voice_engine/include/voe_network.h"
 #include "voice_engine/include/voe_audio_processing.h"
 #include "voice_engine/include/voe_volume_control.h"
 #include "voice_engine/include/voe_external_media.h"
+#include "voice_engine/include/voe_audio_processing.h"
 
 // Video Engine
 #include "video_engine/include/vie_base.h"
 #include "video_engine/include/vie_codec.h"
 #include "video_engine/include/vie_render.h"
 #include "video_engine/include/vie_capture.h"
 #include "video_engine/include/vie_file.h"
 
@@ -85,16 +86,19 @@ public:
   virtual void GetName(nsAString&);
   virtual void GetUUID(nsAString&);
   virtual const MediaEngineVideoOptions *GetOptions();
   virtual nsresult Allocate();
   virtual nsresult Deallocate();
   virtual nsresult Start(SourceMediaStream*, TrackID);
   virtual nsresult Stop(SourceMediaStream*, TrackID);
   virtual nsresult Snapshot(uint32_t aDuration, nsIDOMFile** aFile);
+  virtual nsresult Config(bool aEchoOn, uint32_t aEcho,
+                          bool aAgcOn, uint32_t aAGC,
+                          bool aNoiseOn, uint32_t aNoise) { return NS_OK; };
   virtual void NotifyPull(MediaStreamGraph* aGraph,
                           SourceMediaStream *aSource,
                           TrackID aId,
                           StreamTime aDesiredTime,
                           TrackTicks &aLastEndTime);
 
   NS_DECL_ISUPPORTS
 
@@ -172,16 +176,20 @@ class MediaEngineWebRTCAudioSource : pub
 public:
   MediaEngineWebRTCAudioSource(webrtc::VoiceEngine* aVoiceEnginePtr, int aIndex,
     const char* name, const char* uuid)
     : mVoiceEngine(aVoiceEnginePtr)
     , mMonitor("WebRTCMic.Monitor")
     , mCapIndex(aIndex)
     , mChannel(-1)
     , mInitDone(false)
+    , mEchoOn(false), mAgcOn(false), mNoiseOn(false)
+    , mEchoCancel(webrtc::kEcDefault)
+    , mAGC(webrtc::kAgcDefault)
+    , mNoiseSuppress(webrtc::kNsDefault)
     , mNullTransport(nullptr) {
     MOZ_ASSERT(aVoiceEnginePtr);
     mState = kReleased;
     mDeviceName.Assign(NS_ConvertUTF8toUTF16(name));
     mDeviceUUID.Assign(NS_ConvertUTF8toUTF16(uuid));
     Init();
   }
   ~MediaEngineWebRTCAudioSource() { Shutdown(); }
@@ -189,16 +197,20 @@ public:
   virtual void GetName(nsAString&);
   virtual void GetUUID(nsAString&);
 
   virtual nsresult Allocate();
   virtual nsresult Deallocate();
   virtual nsresult Start(SourceMediaStream*, TrackID);
   virtual nsresult Stop(SourceMediaStream*, TrackID);
   virtual nsresult Snapshot(uint32_t aDuration, nsIDOMFile** aFile);
+  virtual nsresult Config(bool aEchoOn, uint32_t aEcho,
+                          bool aAgcOn, uint32_t aAGC,
+                          bool aNoiseOn, uint32_t aNoise);
+
   virtual void NotifyPull(MediaStreamGraph* aGraph,
                           SourceMediaStream *aSource,
                           TrackID aId,
                           StreamTime aDesiredTime,
                           TrackTicks &aLastEndTime);
 
   // VoEMediaProcess.
   void Process(const int channel, const webrtc::ProcessingTypes type,
@@ -213,31 +225,37 @@ private:
 
   void Init();
   void Shutdown();
 
   webrtc::VoiceEngine* mVoiceEngine;
   webrtc::VoEBase* mVoEBase;
   webrtc::VoEExternalMedia* mVoERender;
   webrtc::VoENetwork*  mVoENetwork;
+  webrtc::VoEAudioProcessing *mVoEProcessing;
 
   // mMonitor protects mSources[] access/changes, and transitions of mState
   // from kStarted to kStopped (which are combined with EndTrack()).
   // mSources[] is accessed from webrtc threads.
   mozilla::ReentrantMonitor mMonitor;
   nsTArray<SourceMediaStream *> mSources; // When this goes empty, we shut down HW
 
   int mCapIndex;
   int mChannel;
   TrackID mTrackID;
   bool mInitDone;
 
   nsString mDeviceName;
   nsString mDeviceUUID;
 
+  bool mEchoOn, mAgcOn, mNoiseOn;
+  webrtc::EcModes  mEchoCancel;
+  webrtc::AgcModes mAGC;
+  webrtc::NsModes  mNoiseSuppress;
+
   NullTransport *mNullTransport;
 };
 
 class MediaEngineWebRTC : public MediaEngine
 {
 public:
   MediaEngineWebRTC()
   : mMutex("mozilla::MediaEngineWebRTC")
--- a/content/media/webrtc/MediaEngineWebRTCAudio.cpp
+++ b/content/media/webrtc/MediaEngineWebRTCAudio.cpp
@@ -42,16 +42,70 @@ MediaEngineWebRTCAudioSource::GetUUID(ns
   if (mInitDone) {
     aUUID.Assign(mDeviceUUID);
   }
 
   return;
 }
 
 nsresult
+MediaEngineWebRTCAudioSource::Config(bool aEchoOn, uint32_t aEcho,
+                                     bool aAgcOn, uint32_t aAGC,
+                                     bool aNoiseOn, uint32_t aNoise)
+{
+  LOG(("Audio config: aec: %d, agc: %d, noise: %d",
+       aEchoOn ? aEcho : -1,
+       aAgcOn ? aAGC : -1,
+       aNoiseOn ? aNoise : -1));
+
+  bool update_agc = (mAgcOn == aAgcOn);
+  bool update_noise = (mNoiseOn == aNoiseOn);
+  mAgcOn = aAgcOn;
+  mNoiseOn = aNoiseOn;
+
+  if ((webrtc::AgcModes) aAGC != webrtc::kAgcUnchanged) {
+    if (mAGC != (webrtc::AgcModes) aAGC) {
+      update_agc = true;
+      mAGC = (webrtc::AgcModes) aAGC;
+    }
+  }
+  if ((webrtc::NsModes) aNoise != webrtc::kNsUnchanged) {
+    if (mNoiseSuppress != (webrtc::NsModes) aNoise) {
+      update_noise = true;
+      mNoiseSuppress = (webrtc::NsModes) aNoise;
+    }
+  }
+
+  if (mInitDone) {
+    int error;
+#if 0
+    // Until we can support feeding our full output audio from the browser
+    // through the MediaStream, this won't work.  Or we need to move AEC to
+    // below audio input and output, perhaps invoked from here.
+    mEchoOn = aEchoOn;
+    if ((webrtc::EcModes) aEcho != webrtc::kEcUnchanged)
+      mEchoCancel = (webrtc::EcModes) aEcho;
+    mVoEProcessing->SetEcStatus(mEchoOn, aEcho);
+#else
+    (void) aEcho; (void) aEchoOn; // suppress warnings
+#endif
+
+    if (update_agc &&
+      0 != (error = mVoEProcessing->SetAgcStatus(mAgcOn, (webrtc::AgcModes) aAGC))) {
+      LOG(("%s Error setting AGC Status: %d ",__FUNCTION__, error));
+    }
+    if (update_noise &&
+      0 != (error = mVoEProcessing->SetNsStatus(mNoiseOn, (webrtc::NsModes) aNoise))) {
+      LOG(("%s Error setting NoiseSuppression Status: %d ",__FUNCTION__, error));
+    }
+  }
+  return NS_OK;
+}
+
+nsresult
 MediaEngineWebRTCAudioSource::Allocate()
 {
   if (mState == kReleased && mInitDone) {
     webrtc::VoEHardware* ptrVoEHw = webrtc::VoEHardware::GetInterface(mVoiceEngine);
     int res = ptrVoEHw->SetRecordingDevice(mCapIndex);
     ptrVoEHw->Release();
     if (res) {
       return NS_ERROR_FAILURE;
@@ -101,16 +155,21 @@ MediaEngineWebRTCAudioSource::Start(Sour
   LOG(("Initial audio"));
   mTrackID = aID;
 
   if (mState == kStarted) {
     return NS_OK;
   }
   mState = kStarted;
 
+  // Configure audio processing in webrtc code
+  Config(mEchoOn, webrtc::kEcUnchanged,
+         mAgcOn, webrtc::kAgcUnchanged,
+         mNoiseOn, webrtc::kNsUnchanged);
+
   if (mVoEBase->StartReceive(mChannel)) {
     return NS_ERROR_FAILURE;
   }
   if (mVoEBase->StartSend(mChannel)) {
     return NS_ERROR_FAILURE;
   }
 
   // Attach external media processor, so this::Process will be called.
@@ -187,16 +246,21 @@ MediaEngineWebRTCAudioSource::Init()
   if (!mVoERender) {
     return;
   }
   mVoENetwork = webrtc::VoENetwork::GetInterface(mVoiceEngine);
   if (!mVoENetwork) {
     return;
   }
 
+  mVoEProcessing = webrtc::VoEAudioProcessing::GetInterface(mVoiceEngine);
+  if (!mVoEProcessing) {
+    return;
+  }
+
   mChannel = mVoEBase->CreateChannel();
   if (mChannel < 0) {
     return;
   }
   mNullTransport = new NullTransport();
   if (mVoENetwork->RegisterExternalTransport(mChannel, *mNullTransport)) {
     return;
   }
--- a/dom/media/MediaManager.cpp
+++ b/dom/media/MediaManager.cpp
@@ -6,16 +6,18 @@
 
 #include "MediaStreamGraph.h"
 #include "nsIDOMFile.h"
 #include "nsIEventTarget.h"
 #include "nsIUUIDGenerator.h"
 #include "nsIScriptGlobalObject.h"
 #include "nsIPopupWindowManager.h"
 #include "nsISupportsArray.h"
+#include "nsIPrefService.h"
+#include "nsIPrefBranch.h"
 
 // For PR_snprintf
 #include "prprf.h"
 
 #include "nsJSUtils.h"
 #include "nsDOMFile.h"
 #include "nsGlobalWindow.h"
 
@@ -377,16 +379,40 @@ public:
     // Dispatch to the media thread to ask it to start the sources,
     // because that can take a while
     nsIThread *mediaThread = MediaManager::GetThread();
     nsRefPtr<MediaOperationRunnable> runnable(
       new MediaOperationRunnable(MEDIA_START, mListener,
                                  mAudioSource, mVideoSource, false));
     mediaThread->Dispatch(runnable, NS_DISPATCH_NORMAL);
 
+    nsresult rv;
+    nsCOMPtr<nsIPrefService> prefs = do_GetService("@mozilla.org/preferences-service;1", &rv);
+    if (NS_SUCCEEDED(rv)) {
+      nsCOMPtr<nsIPrefBranch> branch = do_QueryInterface(prefs);
+
+      if (branch) {
+        int32_t aec = (int32_t) webrtc::kEcUnchanged;
+        int32_t agc = (int32_t) webrtc::kAgcUnchanged;
+        int32_t noise = (int32_t) webrtc::kNsUnchanged;
+        bool aec_on = false, agc_on = false, noise_on = false;
+
+        branch->GetBoolPref("media.peerconnection.aec_enabled", &aec_on);
+        branch->GetIntPref("media.peerconnection.aec", &aec);
+        branch->GetBoolPref("media.peerconnection.agc_enabled", &agc_on);
+        branch->GetIntPref("media.peerconnection.agc", &agc);
+        branch->GetBoolPref("media.peerconnection.noise_enabled", &noise_on);
+        branch->GetIntPref("media.peerconnection.noise", &noise);
+
+        mListener->AudioConfig(aec_on, (uint32_t) aec,
+                               agc_on, (uint32_t) agc,
+                               noise_on, (uint32_t) noise);
+      }
+    }
+
     // We're in the main thread, so no worries here either.
     nsCOMPtr<nsIDOMGetUserMediaSuccessCallback> success(mSuccess);
     nsCOMPtr<nsIDOMGetUserMediaErrorCallback> error(mError);
 
     if (!(mManager->IsWindowStillActive(mWindowID))) {
       return NS_OK;
     }
     // This is safe since we're on main-thread, and the windowlist can only
--- a/dom/media/MediaManager.h
+++ b/dom/media/MediaManager.h
@@ -16,16 +16,18 @@
 
 #include "nsPIDOMWindow.h"
 #include "nsIDOMNavigatorUserMedia.h"
 #include "nsXULAppAPI.h"
 #include "mozilla/Attributes.h"
 #include "mozilla/StaticPtr.h"
 #include "prlog.h"
 
+#include "mtransport/runnable_utils.h"
+
 namespace mozilla {
 
 #ifdef PR_LOGGING
 extern PRLogModuleInfo* GetMediaManagerLog();
 #define MM_LOG(msg) PR_LOG(GetMediaManagerLog(), PR_LOG_DEBUG, msg)
 #else
 #define MM_LOG(msg)
 #endif
@@ -124,16 +126,30 @@ public:
     return mStream->AsSourceStream();
   }
 
   // implement in .cpp to avoid circular dependency with MediaOperationRunnable
   // Can be invoked from EITHER MainThread or MSG thread
   void Invalidate();
 
   void
+  AudioConfig(bool aEchoOn, uint32_t aEcho,
+              bool aAgcOn, uint32_t aAGC,
+              bool aNoiseOn, uint32_t aNoise)
+  {
+    if (mAudioSource) {
+      RUN_ON_THREAD(mMediaThread,
+                    WrapRunnable(nsRefPtr<MediaEngineSource>(mAudioSource), // threadsafe
+                                 &MediaEngineSource::Config,
+                                 aEchoOn, aEcho, aAgcOn, aAGC, aNoiseOn, aNoise),
+                    NS_DISPATCH_NORMAL);
+    }
+  }
+
+  void
   Remove()
   {
     NS_ASSERTION(NS_IsMainThread(), "Only call on main thread");
     // allow calling even if inactive (!mStream) for easier cleanup
     // Caller holds strong reference to us, so no death grip required
     MutexAutoLock lock(mLock); // protect access to mRemoved
     if (mStream && !mRemoved) {
       MM_LOG(("Listener removed on purpose, mFinished = %d", (int) mFinished));
--- a/media/mtransport/build/Makefile.in
+++ b/media/mtransport/build/Makefile.in
@@ -32,16 +32,17 @@ EXPORTS_mtransport = \
   ../transportlayer.h \
   ../transportlayerdtls.h \
   ../transportlayerice.h \
   ../transportlayerlog.h \
   ../transportlayerloopback.h \
   ../transportlayerprsock.h \
   ../m_cpp_utils.h \
   ../runnable_utils.h \
+  ../runnable_utils_generated.h \
   ../sigslot.h \
   $(NULL)
 
 CPPSRCS = \
 	$(MTRANSPORT_LCPPSRCS) \
 	$(NULL)
 
 include $(srcdir)/../objs.mk
--- a/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
+++ b/media/webrtc/signaling/src/media-conduit/AudioConduit.cpp
@@ -1,13 +1,19 @@
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "AudioConduit.h"
+#include "nsCOMPtr.h"
+#include "mozilla/Services.h"
+#include "nsServiceManagerUtils.h"
+#include "nsIPrefService.h"
+#include "nsIPrefBranch.h"
+
 #include "CSFLog.h"
 #include "voice_engine/include/voe_errors.h"
 
 
 namespace mozilla {
 
 static const char* logTag ="WebrtcAudioSessionConduit";
 
@@ -46,16 +52,21 @@ WebrtcAudioConduit::~WebrtcAudioConduit(
 
   if(mPtrVoEXmedia)
   {
     mPtrVoEXmedia->SetExternalRecordingStatus(false);
     mPtrVoEXmedia->SetExternalPlayoutStatus(false);
     mPtrVoEXmedia->Release();
   }
 
+  if(mPtrVoEProcessing)
+  {
+    mPtrVoEProcessing->Release();
+  }
+
   //Deal with the transport
   if(mPtrVoENetwork)
   {
     mPtrVoENetwork->DeRegisterExternalTransport(mChannel);
     mPtrVoENetwork->Release();
   }
 
   if(mPtrVoECodec)
@@ -121,16 +132,22 @@ MediaConduitErrorCode WebrtcAudioConduit
   }
 
   if(!(mPtrVoECodec = VoECodec::GetInterface(mVoiceEngine)))
   {
     CSFLogError(logTag, "%s Unable to initialize VoEBCodec", __FUNCTION__);
     return kMediaConduitSessionNotInited;
   }
 
+  if(!(mPtrVoEProcessing = VoEAudioProcessing::GetInterface(mVoiceEngine)))
+  {
+    CSFLogError(logTag, "%s Unable to initialize VoEProcessing", __FUNCTION__);
+    return kMediaConduitSessionNotInited;
+  }
+
   if(!(mPtrVoEXmedia = VoEExternalMedia::GetInterface(mVoiceEngine)))
   {
     CSFLogError(logTag, "%s Unable to initialize VoEExternalMedia", __FUNCTION__);
     return kMediaConduitSessionNotInited;
   }
 
   // init the engine with our audio device layer
   if(mPtrVoEBase->Init() == -1)
@@ -230,16 +247,43 @@ WebrtcAudioConduit::ConfigureSendMediaCo
     if(error ==  VE_CANNOT_SET_SEND_CODEC || error == VE_CODEC_ERROR)
     {
       return kMediaConduitInvalidSendCodec;
     }
 
     return kMediaConduitUnknownError;
   }
 
+  // TEMPORARY - see bug 694814 comment 2
+  nsresult rv;
+  nsCOMPtr<nsIPrefService> prefs = do_GetService("@mozilla.org/preferences-service;1", &rv);
+  if (NS_SUCCEEDED(rv)) {
+    nsCOMPtr<nsIPrefBranch> branch = do_QueryInterface(prefs);
+
+    if (branch) {
+      int32_t aec = 0; // 0 == unchanged
+      bool aec_on = false;
+
+      branch->GetBoolPref("media.peerconnection.aec_enabled", &aec_on);
+      branch->GetIntPref("media.peerconnection.aec", &aec);
+
+      CSFLogDebug(logTag,"Audio config: aec: %d", aec_on ? aec : -1);
+      mEchoOn = aec_on;
+      if (static_cast<webrtc::EcModes>(aec) != webrtc::kEcUnchanged)
+        mEchoCancel = static_cast<webrtc::EcModes>(aec);
+
+      branch->GetIntPref("media.peerconnection.capture_delay", &mCaptureDelay);
+    }
+  }
+
+  if (0 != (error = mPtrVoEProcessing->SetEcStatus(mEchoOn, mEchoCancel))) {
+    CSFLogError(logTag,"%s Error setting EVStatus: %d ",__FUNCTION__, error);
+    return kMediaConduitUnknownError;
+  }
+
   //Let's Send Transport State-machine on the Engine
   if(mPtrVoEBase->StartSend(mChannel) == -1)
   {
     error = mPtrVoEBase->LastError();
     CSFLogError(logTag, "%s StartSend failed %d", __FUNCTION__, error);
     return kMediaConduitUnknownError;
   }
 
@@ -399,17 +443,17 @@ WebrtcAudioConduit::SendAudioFrame(const
 
   // if transmission is not started .. conduit cannot insert frames
   if(!mEngineTransmitting)
   {
     CSFLogError(logTag, "%s Engine not transmitting ", __FUNCTION__);
     return kMediaConduitSessionNotInited;
   }
 
-
+  capture_delay = mCaptureDelay;
   //Insert the samples
   if(mPtrVoEXmedia->ExternalRecordingInsertData(audio_data,
                                                 lengthSamples,
                                                 samplingFreqHz,
                                                 capture_delay) == -1)
   {
     int error = mPtrVoEBase->LastError();
     CSFLogError(logTag,  "%s Inserting audio data Failed %d", __FUNCTION__, error);
@@ -743,9 +787,8 @@ WebrtcAudioConduit::DumpCodecDB() const
       CSFLogDebug(logTag,"Payload Type: %d", mRecvCodecList[i]->mType);
       CSFLogDebug(logTag,"Payload Frequency: %d", mRecvCodecList[i]->mFreq);
       CSFLogDebug(logTag,"Payload PacketSize: %d", mRecvCodecList[i]->mPacSize);
       CSFLogDebug(logTag,"Payload Channels: %d", mRecvCodecList[i]->mChannels);
       CSFLogDebug(logTag,"Payload Sampling Rate: %d", mRecvCodecList[i]->mRate);
     }
  }
 }// end namespace
-
--- a/media/webrtc/signaling/src/media-conduit/AudioConduit.h
+++ b/media/webrtc/signaling/src/media-conduit/AudioConduit.h
@@ -13,22 +13,24 @@
 // Audio Engine Includes
 #include "common_types.h"
 #include "voice_engine/include/voe_base.h"
 #include "voice_engine/include/voe_volume_control.h"
 #include "voice_engine/include/voe_codec.h"
 #include "voice_engine/include/voe_file.h"
 #include "voice_engine/include/voe_network.h"
 #include "voice_engine/include/voe_external_media.h"
+#include "voice_engine/include/voe_audio_processing.h"
 
 //Some WebRTC types for short notations
  using webrtc::VoEBase;
  using webrtc::VoENetwork;
  using webrtc::VoECodec;
  using webrtc::VoEExternalMedia;
+ using webrtc::VoEAudioProcessing;
 
 /** This file hosts several structures identifying different aspects
  * of a RTP Session.
  */
 
 namespace mozilla {
 
 /**
@@ -141,17 +143,20 @@ public:
 
 
   WebrtcAudioConduit():
                       mVoiceEngine(NULL),
                       mTransport(NULL),
                       mEngineTransmitting(false),
                       mEngineReceiving(false),
                       mChannel(-1),
-                      mCurSendCodecConfig(NULL)
+                      mCurSendCodecConfig(NULL),
+                      mCaptureDelay(150),
+                      mEchoOn(true),
+                      mEchoCancel(webrtc::kEcAec)
   {
   }
 
   virtual ~WebrtcAudioConduit();
 
   MediaConduitErrorCode Init();
 
 private:
@@ -186,22 +191,29 @@ private:
   void DumpCodecDB() const;
 
   webrtc::VoiceEngine* mVoiceEngine;
   mozilla::RefPtr<TransportInterface> mTransport;
   webrtc::VoENetwork*  mPtrVoENetwork;
   webrtc::VoEBase*     mPtrVoEBase;
   webrtc::VoECodec*    mPtrVoECodec;
   webrtc::VoEExternalMedia* mPtrVoEXmedia;
+  webrtc::VoEAudioProcessing* mPtrVoEProcessing;
 
   //engine states of our interets
   bool mEngineTransmitting; // If true => VoiceEngine Send-subsystem is up
   bool mEngineReceiving;    // If true => VoiceEngine Receive-subsystem is up
                             // and playout is enabled
 
   int mChannel;
   RecvCodecList    mRecvCodecList;
   AudioCodecConfig* mCurSendCodecConfig;
+
+  // Current "capture" delay (really output plus input delay)
+  int32_t mCaptureDelay;
+
+  bool mEchoOn;
+  webrtc::EcModes  mEchoCancel;
 };
 
 } // end namespace
 
 #endif
--- a/media/webrtc/webrtc_config.gypi
+++ b/media/webrtc/webrtc_config.gypi
@@ -1,23 +1,28 @@
 # This Source Code Form is subject to the terms of the Mozilla Public
 # License, v. 2.0. If a copy of the MPL was not distributed with this
 # file, You can obtain one at http://mozilla.org/MPL/2.0/.
 
 # definitions to control what gets built in webrtc
+# NOTE!!! if you change something here, due to .gyp files not
+# being reprocessed on .gypi changes, run this before building:
+# "find . -name '*.gyp' | xargs touch"
 {
   'variables': {
     # basic stuff for everything
     'include_internal_video_render': 0,
     'clang_use_chrome_plugins': 0,
     'enable_protobuf': 0,
     'include_pulse_audio': 0,
     'include_tests': 0,
     'use_system_libjpeg': 1,
     'use_system_libvpx': 1,
+# Creates AEC internal sample dump files in current directory
+#    'aec_debug_dump': 1,
 
     # codec enable/disables:
     # Note: if you change one here, you must modify shared_libs.mk!
     'codec_g711_enable': 1,
     'codec_opus_enable': 1,
     'codec_g722_enable': 0,
     'codec_ilbc_enable': 0,
     'codec_isac_enable': 0,
--- a/modules/libpref/src/init/all.js
+++ b/modules/libpref/src/init/all.js
@@ -173,16 +173,25 @@ pref("media.dash.enabled", false);
 #endif
 #ifdef MOZ_GSTREAMER
 pref("media.gstreamer.enabled", true);
 #endif
 #ifdef MOZ_WEBRTC
 pref("media.navigator.enabled", true);
 pref("media.peerconnection.enabled", false);
 pref("media.navigator.permission.disabled", false);
+// These values (aec, agc, and noice) are from media/webrtc/trunk/webrtc/common_types.h
+// kXxxUnchanged = 0, kXxxDefault = 1, and higher values are specific to each 
+// setting (for Xxx = Ec, Agc, or Ns).  Defaults are all set to kXxxDefault here.
+pref("media.peerconnection.aec_enabled", true);
+pref("media.peerconnection.aec", 1);
+pref("media.peerconnection.agc_enabled", false);
+pref("media.peerconnection.agc", 1);
+pref("media.peerconnection.noise_enabled", false);
+pref("media.peerconnection.noise", 1);
 #else
 #ifdef ANDROID
 pref("media.navigator.enabled", true);
 #endif
 #endif
 
 // Whether to enable Web Audio support
 pref("media.webaudio.enabled", false);