Bug 1059573 - Add an option to use hardware AEC for WebRTC. r=jesup,ted a=bajaj
authorGian-Carlo Pascutto <gpascutto@mozilla.com>
Wed, 24 Sep 2014 19:11:58 +0200
changeset 225160 5d0ec7211d638b87bde88cd49049cbf76278a98d
parent 225159 055d46b81ed1e8a5988cf4f369cbdca098909f93
child 225161 ba4f8bb18ef9cb199014bd6ece347017d7c60d10
push id3979
push userraliiev@mozilla.com
push dateMon, 13 Oct 2014 16:35:44 +0000
treeherdermozilla-beta@30f2cc610691 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjesup, ted, bajaj
bugs1059573
milestone34.0a2
Bug 1059573 - Add an option to use hardware AEC for WebRTC. r=jesup,ted a=bajaj
build/gyp.mozbuild
config/system-headers
configure.in
media/libcubeb/src/android/sles_definitions.h
media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_input.cc
media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_input.h
media/webrtc/trunk/webrtc/modules/audio_device/audio_device.gypi
modules/libpref/init/all.js
--- a/build/gyp.mozbuild
+++ b/build/gyp.mozbuild
@@ -40,19 +40,23 @@ gyp_vars = {
     'arm_neon': 0,
     'arm_neon_optional': 1,
 
     'moz_widget_toolkit_gonk': 0,
     'moz_webrtc_omx': 0,
 
     # (for vp8) chromium sets to 0 also
     'use_temporal_layers': 0,
+
     # Creates AEC internal sample dump files in current directory
     'aec_debug_dump': 1,
 
+    # Enable and force use of hardware AEC
+    'hardware_aec_ns': 1 if CONFIG['MOZ_WEBRTC_HARDWARE_AEC_NS'] else 0,
+
     # codec enable/disables:
     'include_g711': 1,
     'include_opus': 1,
     'include_g722': 1,
     'include_ilbc': 0,
     'include_isac': 0,
     'include_pcm16b': 1,
 }
--- a/config/system-headers
+++ b/config/system-headers
@@ -195,16 +195,18 @@ atlctl.cpp
 atlctl.h
 ATLCTL.H
 atlhost.h
 atlimpl.cpp
 atlwin.cpp
 ATSTypes.h
 ATSUnicode.h
 #ifdef ANDROID
+audio_effects/effect_aec.h
+audio_effects/effect_ns.h
 AudioParameter.h
 AudioSystem.h
 AudioTrack.h
 avc_utils.h
 #endif
 Balloons.h
 base/pblock.h
 base/PCR_Base.h
@@ -726,16 +728,18 @@ mapiguid.h
 mapi.h
 mapitags.h
 mapiutil.h
 mapix.h
 Math64.h
 math.h
 mbstring.h
 #ifdef ANDROID
+media/AudioEffect.h
+media/AudioSystem.h
 media/ICrypto.h
 media/IOMX.h
 media/MediaProfiles.h
 media/MediaRecorderBase.h
 media/openmax/OMX_Audio.h
 media/stagefright/AACWriter.h
 media/stagefright/AMRWriter.h
 media/stagefright/AudioSource.h
--- a/configure.in
+++ b/configure.in
@@ -3775,16 +3775,17 @@ else
 fi
 MOZ_EME=1
 MOZ_FFMPEG=
 MOZ_WEBRTC=1
 MOZ_PEERCONNECTION=
 MOZ_SRTP=
 MOZ_WEBRTC_SIGNALING=
 MOZ_WEBRTC_ASSERT_ALWAYS=1
+MOZ_WEBRTC_HARDWARE_AEC_NS=
 MOZ_SCTP=
 MOZ_ANDROID_OMX=
 MOZ_MEDIA_NAVIGATOR=
 MOZ_OMX_PLUGIN=
 MOZ_VPX=
 MOZ_VPX_ERROR_CONCEALMENT=
 MOZ_WEBSPEECH=1
 VPX_AS=
@@ -4978,21 +4979,34 @@ dnl enable once PeerConnection lands
     AC_DEFINE(MOZ_SRTP)
     if test -n "$MOZ_X11"; then
       MOZ_WEBRTC_X11_LIBS="-lXext -lXdamage -lXfixes -lXcomposite"
     fi
 else
     MOZ_SYNTH_PICO=
 fi
 
+dnl ========================================================
+dnl = Force hardware AEC, disable webrtc.org AEC
+dnl ========================================================
+MOZ_ARG_ENABLE_BOOL(hardware-aec-ns,
+[  --enable-hardware-aec-ns   Enable support for hardware AEC and noise suppression],
+    MOZ_WEBRTC_HARDWARE_AEC_NS=1,
+    MOZ_WEBRTC_HARDWARE_AEC_NS=)
+
+if test -n "$MOZ_WEBRTC_HARDWARE_AEC_NS"; then
+    AC_DEFINE(MOZ_WEBRTC_HARDWARE_AEC_NS)
+fi
+
 AC_SUBST(MOZ_WEBRTC)
 AC_SUBST(MOZ_WEBRTC_LEAKING_TESTS)
 AC_SUBST(MOZ_WEBRTC_SIGNALING)
 AC_SUBST(MOZ_PEERCONNECTION)
 AC_SUBST(MOZ_WEBRTC_ASSERT_ALWAYS)
+AC_SUBST(MOZ_WEBRTC_HARDWARE_AEC_NS)
 AC_SUBST(MOZ_SCTP)
 AC_SUBST(MOZ_SRTP)
 AC_SUBST_LIST(MOZ_WEBRTC_X11_LIBS)
 
 dnl Use integers over floats for audio on B2G and Android
 dnl (regarless of the CPU architecture, because audio
 dnl backends for those platforms don't support floats. We also
 dnl use integers on ARM with other OS, because it's more efficient.
--- a/media/libcubeb/src/android/sles_definitions.h
+++ b/media/libcubeb/src/android/sles_definitions.h
@@ -28,24 +28,30 @@
 /*---------------------------------------------------------------------------*/
 
 /** Audio recording preset */
 /** Audio recording preset key */
 #define SL_ANDROID_KEY_RECORDING_PRESET ((const SLchar*) "androidRecordingPreset")
 /** Audio recording preset values */
 /**   preset "none" cannot be set, it is used to indicate the current settings
  *     do not match any of the presets. */
-#define SL_ANDROID_RECORDING_PRESET_NONE              ((SLuint32) 0x00000000)
+#define SL_ANDROID_RECORDING_PRESET_NONE                ((SLuint32) 0x00000000)
 /**   generic recording configuration on the platform */
-#define SL_ANDROID_RECORDING_PRESET_GENERIC           ((SLuint32) 0x00000001)
+#define SL_ANDROID_RECORDING_PRESET_GENERIC             ((SLuint32) 0x00000001)
 /**   uses the microphone audio source with the same orientation as the camera
  *     if available, the main device microphone otherwise */
-#define SL_ANDROID_RECORDING_PRESET_CAMCORDER         ((SLuint32) 0x00000002)
+#define SL_ANDROID_RECORDING_PRESET_CAMCORDER           ((SLuint32) 0x00000002)
 /**   uses the main microphone tuned for voice recognition */
-#define SL_ANDROID_RECORDING_PRESET_VOICE_RECOGNITION ((SLuint32) 0x00000003)
+#define SL_ANDROID_RECORDING_PRESET_VOICE_RECOGNITION   ((SLuint32) 0x00000003)
+/**   uses the main microphone tuned for audio communications */
+#define SL_ANDROID_RECORDING_PRESET_VOICE_COMMUNICATION ((SLuint32) 0x00000004)
+
+/** Audio recording get session ID (read only) */
+/** Audio recording get session ID key */
+#define SL_ANDROID_KEY_RECORDING_SESSION_ID ((const SLchar*) "androidRecordingSessionId")
 
 /*---------------------------------------------------------------------------*/
 /* Android AudioPlayer configuration                                         */
 /*---------------------------------------------------------------------------*/
 
 /** Audio playback stream type */
 /** Audio playback stream type key */
 #define SL_ANDROID_KEY_STREAM_TYPE ((const SLchar*) "androidPlaybackStreamType")
--- a/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_input.cc
+++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_input.cc
@@ -17,16 +17,23 @@
 #include "webrtc/modules/audio_device/android/audio_common.h"
 #include "webrtc/modules/audio_device/android/opensles_common.h"
 #include "webrtc/modules/audio_device/android/single_rw_fifo.h"
 #include "webrtc/modules/audio_device/audio_device_buffer.h"
 #include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
 #include "webrtc/system_wrappers/interface/thread_wrapper.h"
 #include "webrtc/system_wrappers/interface/trace.h"
 
+#if defined(WEBRTC_GONK) && defined(WEBRTC_HARDWARE_AEC_NS)
+#include <media/AudioSystem.h>
+#include <audio_effects/effect_aec.h>
+#include <audio_effects/effect_ns.h>
+#include <utils/Errors.h>
+#endif
+
 #define VOID_RETURN
 #define OPENSL_RETURN_ON_FAILURE(op, ret_val)                    \
   do {                                                           \
     SLresult err = (op);                                         \
     if (err != SL_RESULT_SUCCESS) {                              \
       WEBRTC_TRACE(kTraceError, kTraceAudioDevice, id_,          \
                    "OpenSL error: %d", err);                     \
       assert(false);                                             \
@@ -60,16 +67,20 @@ OpenSlesInput::OpenSlesInput(
       sles_engine_itf_(NULL),
       sles_recorder_(NULL),
       sles_recorder_itf_(NULL),
       sles_recorder_sbq_itf_(NULL),
       audio_buffer_(NULL),
       active_queue_(0),
       rec_sampling_rate_(0),
       agc_enabled_(false),
+#if defined(WEBRTC_GONK) && defined(WEBRTC_HARDWARE_AEC_NS)
+      aec_(NULL),
+      ns_(NULL),
+#endif
       recording_delay_(0),
       opensles_lib_(NULL) {
 }
 
 OpenSlesInput::~OpenSlesInput() {
 }
 
 int32_t OpenSlesInput::SetAndroidAudioDeviceObjects(void* javaVM,
@@ -385,16 +396,114 @@ bool OpenSlesInput::EnqueueAllBuffers() 
   // |fifo_|.
   while (fifo_->size() != 0) {
     // Clear the fifo.
     fifo_->Pop();
   }
   return true;
 }
 
+#if defined(WEBRTC_GONK) && defined(WEBRTC_HARDWARE_AEC_NS)
+bool OpenSlesInput::CheckPlatformAEC() {
+  effect_descriptor_t fxDesc;
+  uint32_t numFx;
+
+  if (android::AudioEffect::queryNumberEffects(&numFx) != android::NO_ERROR) {
+    return false;
+  }
+
+  WEBRTC_TRACE(kTraceError, kTraceAudioDevice, id_, "Platform has %d effects", numFx);
+
+  for (uint32_t i = 0; i < numFx; i++) {
+    if (android::AudioEffect::queryEffect(i, &fxDesc) != android::NO_ERROR) {
+      continue;
+    }
+    if (memcmp(&fxDesc.type, FX_IID_AEC, sizeof(fxDesc.type)) == 0) {
+      return true;
+    }
+  }
+
+  return false;
+}
+
+void OpenSlesInput::SetupVoiceMode() {
+  SLAndroidConfigurationItf configItf;
+  SLresult res = (*sles_recorder_)->GetInterface(sles_recorder_, SL_IID_ANDROIDCONFIGURATION_,
+                                                 (void*)&configItf);
+  WEBRTC_TRACE(kTraceError, kTraceAudioDevice, id_, "OpenSL GetInterface: %d", res);
+
+  if (res == SL_RESULT_SUCCESS) {
+    SLuint32 voiceMode = SL_ANDROID_RECORDING_PRESET_VOICE_COMMUNICATION;
+    SLuint32 voiceSize = sizeof(voiceMode);
+
+    res = (*configItf)->SetConfiguration(configItf,
+                                         SL_ANDROID_KEY_RECORDING_PRESET,
+                                         &voiceMode, voiceSize);
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, id_, "OpenSL Set Voice mode res: %d", res);
+  }
+}
+
+void OpenSlesInput::SetupAECAndNS() {
+  bool hasAec = CheckPlatformAEC();
+  WEBRTC_TRACE(kTraceError, kTraceAudioDevice, id_, "Platform has AEC: %d", hasAec);
+  // This code should not have been enabled if this fails, because it means the
+  // software AEC has will have been disabled as well. If you hit this, you need
+  // to fix your B2G config or fix the hardware AEC on your device.
+  assert(hasAec);
+
+  SLAndroidConfigurationItf configItf;
+  SLresult res = (*sles_recorder_)->GetInterface(sles_recorder_, SL_IID_ANDROIDCONFIGURATION_,
+                                                 (void*)&configItf);
+  WEBRTC_TRACE(kTraceError, kTraceAudioDevice, id_, "OpenSL GetInterface: %d", res);
+
+  if (res == SL_RESULT_SUCCESS) {
+    SLuint32 sessionId = 0;
+    SLuint32 idSize = sizeof(sessionId);
+    res = (*configItf)->GetConfiguration(configItf,
+                                         SL_ANDROID_KEY_RECORDING_SESSION_ID,
+                                         &idSize, &sessionId);
+    WEBRTC_TRACE(kTraceError, kTraceAudioDevice, id_, "OpenSL Get sessionId res: %d", res);
+
+    if (res == SL_RESULT_SUCCESS && idSize == sizeof(sessionId)) {
+      WEBRTC_TRACE(kTraceError, kTraceAudioDevice, id_, "OpenSL sessionId: %d", sessionId);
+
+      aec_ = new android::AudioEffect(FX_IID_AEC, NULL, 0, 0, 0, sessionId, 0);
+      WEBRTC_TRACE(kTraceError, kTraceAudioDevice, id_, "OpenSL aec: %p", aec_);
+
+      if (aec_) {
+        android::status_t status = aec_->initCheck();
+        if (status == android::NO_ERROR || status == android::ALREADY_EXISTS) {
+          WEBRTC_TRACE(kTraceError, kTraceAudioDevice, id_, "OpenSL aec enabled");
+          aec_->setEnabled(true);
+        } else {
+          WEBRTC_TRACE(kTraceError, kTraceAudioDevice, id_, "OpenSL aec disabled: %d", status);
+          delete aec_;
+          aec_ = NULL;
+        }
+      }
+
+      ns_ = new android::AudioEffect(FX_IID_NS, NULL, 0, 0, 0, sessionId, 0);
+      WEBRTC_TRACE(kTraceError, kTraceAudioDevice, id_, "OpenSL ns: %p", ns_);
+
+      if (ns_) {
+        android::status_t status = ns_->initCheck();
+        if (status == android::NO_ERROR || status == android::ALREADY_EXISTS) {
+          WEBRTC_TRACE(kTraceError, kTraceAudioDevice, id_, "OpenSL ns enabled");
+          ns_->setEnabled(true);
+        } else {
+          WEBRTC_TRACE(kTraceError, kTraceAudioDevice, id_, "OpenSL ns disabled: %d", status);
+          delete ns_;
+          ns_ = NULL;
+        }
+      }
+    }
+  }
+}
+#endif
+
 bool OpenSlesInput::CreateAudioRecorder() {
   if (!event_.Start()) {
     assert(false);
     return false;
   }
   SLDataLocator_IODevice micLocator = {
     SL_DATALOCATOR_IODEVICE, SL_IODEVICE_AUDIOINPUT,
     SL_DEFAULTDEVICEID_AUDIOINPUT, NULL };
@@ -420,35 +529,52 @@ bool OpenSlesInput::CreateAudioRecorder(
                                                &sles_recorder_,
                                                &audio_source,
                                                &audio_sink,
                                                kNumInterfaces,
                                                id,
                                                req),
       false);
 
+#if defined(WEBRTC_GONK) && defined(WEBRTC_HARDWARE_AEC_NS)
+  SetupVoiceMode();
+#endif
+
   // Realize the recorder in synchronous mode.
   OPENSL_RETURN_ON_FAILURE((*sles_recorder_)->Realize(sles_recorder_,
                                                       SL_BOOLEAN_FALSE),
                            false);
+
+#if defined(WEBRTC_GONK) && defined(WEBRTC_HARDWARE_AEC_NS)
+  SetupAECAndNS();
+#endif
+
   OPENSL_RETURN_ON_FAILURE(
       (*sles_recorder_)->GetInterface(sles_recorder_, SL_IID_RECORD_,
                                       static_cast<void*>(&sles_recorder_itf_)),
       false);
   OPENSL_RETURN_ON_FAILURE(
       (*sles_recorder_)->GetInterface(
           sles_recorder_,
           SL_IID_ANDROIDSIMPLEBUFFERQUEUE_,
           static_cast<void*>(&sles_recorder_sbq_itf_)),
       false);
   return true;
 }
 
 void OpenSlesInput::DestroyAudioRecorder() {
   event_.Stop();
+
+#if defined(WEBRTC_GONK) && defined(WEBRTC_HARDWARE_AEC_NS)
+  delete aec_;
+  delete ns_;
+  aec_ = NULL;
+  ns_ = NULL;
+#endif
+
   if (sles_recorder_sbq_itf_) {
     // Release all buffers currently queued up.
     OPENSL_RETURN_ON_FAILURE(
         (*sles_recorder_sbq_itf_)->Clear(sles_recorder_sbq_itf_),
         VOID_RETURN);
     sles_recorder_sbq_itf_ = NULL;
   }
   sles_recorder_itf_ = NULL;
--- a/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_input.h
+++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_input.h
@@ -12,16 +12,18 @@
 #define WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_OPENSLES_INPUT_H_
 
 #include <SLES/OpenSLES.h>
 #include <SLES/OpenSLES_Android.h>
 #include <SLES/OpenSLES_AndroidConfiguration.h>
 
 #if !defined(WEBRTC_GONK)
 #include "webrtc/modules/audio_device/android/audio_manager_jni.h"
+#else
+#include "media/AudioEffect.h"
 #endif
 #include "webrtc/modules/audio_device/android/low_latency_event.h"
 #include "webrtc/modules/audio_device/include/audio_device.h"
 #include "webrtc/modules/audio_device/include/audio_device_defines.h"
 #include "webrtc/system_wrappers/interface/scoped_ptr.h"
 
 namespace webrtc {
 
@@ -138,16 +140,21 @@ class OpenSlesInput {
   void CalculateNumFifoBuffersNeeded();
   void AllocateBuffers();
   int TotalBuffersUsed() const;
   bool EnqueueAllBuffers();
   // This function also configures the audio recorder, e.g. sample rate to use
   // etc, so it should be called when starting recording.
   bool CreateAudioRecorder();
   void DestroyAudioRecorder();
+#if defined(WEBRTC_GONK) && defined(WEBRTC_HARDWARE_AEC_NS)
+  void SetupAECAndNS();
+  void SetupVoiceMode();
+  bool CheckPlatformAEC();
+#endif
 
   // When overrun happens there will be more frames received from OpenSL than
   // the desired number of buffers. It is possible to expand the number of
   // buffers as you go but that would greatly increase the complexity of this
   // code. HandleOverrun gracefully handles the scenario by restarting playout,
   // throwing away all pending audio data. This will sound like a click. This
   // is also logged to identify these types of clicks.
   // This function returns true if there has been overrun. Further processing
@@ -215,16 +222,20 @@ class OpenSlesInput {
   // next time RecorderSimpleBufferQueueCallbackHandler is invoked.
   // Ready means buffer contains audio data from the device.
   int active_queue_;
 
   // Audio settings
   uint32_t rec_sampling_rate_;
   bool agc_enabled_;
 
+#if defined(WEBRTC_GONK) && defined(WEBRTC_HARDWARE_AEC_NS)
+  android::AudioEffect* aec_;
+  android::AudioEffect* ns_;
+#endif
   // Audio status
   uint16_t recording_delay_;
 
   // dlopen for OpenSLES
   void *opensles_lib_;
   typedef SLresult (*slCreateEngine_t)(SLObjectItf *,
                                        SLuint32,
                                        const SLEngineOption *,
--- a/media/webrtc/trunk/webrtc/modules/audio_device/audio_device.gypi
+++ b/media/webrtc/trunk/webrtc/modules/audio_device/audio_device.gypi
@@ -47,16 +47,21 @@
         'dummy/audio_device_utility_dummy.h',
       ],
       'conditions': [
         ['build_with_mozilla==1', {
           'cflags_mozilla': [
             '$(NSPR_CFLAGS)',
           ],
         }],
+        ['hardware_aec_ns==1', {
+          'defines': [
+            'WEBRTC_HARDWARE_AEC_NS',
+          ],
+        }],
         ['OS=="linux" or include_alsa_audio==1 or include_pulse_audio==1', {
           'include_dirs': [
             'linux',
           ],
         }], # OS=="linux" or include_alsa_audio==1 or include_pulse_audio==1
         ['OS=="ios"', {
           'include_dirs': [
             'ios',
@@ -76,17 +81,20 @@
           'include_dirs': [
             '/widget/android',
             'android',
           ],
         }], # OS==android
         ['moz_widget_toolkit_gonk==1', {
           'cflags_mozilla': [
             '-I$(ANDROID_SOURCE)/frameworks/wilhelm/include',
+            '-I$(ANDROID_SOURCE)/frameworks/av/include',
             '-I$(ANDROID_SOURCE)/system/media/wilhelm/include',
+            '-I$(ANDROID_SOURCE)/system/media/audio_effects/include',
+            '-I$(ANDROID_SOURCE)/frameworks/native/include',
           ],
           'include_dirs': [
             'android',
           ],
         }], # moz_widget_toolkit_gonk==1
         ['enable_android_opensl==1', {
           'include_dirs': [
             'opensl',
--- a/modules/libpref/init/all.js
+++ b/modules/libpref/init/all.js
@@ -341,21 +341,26 @@ pref("media.peerconnection.use_document_
 // Do not enable identity before fixing domain comparison: see Bug 958741
 // Do not enable identity before fixing origin spoofing: see Bug 968335
 pref("media.peerconnection.identity.enabled", false);
 pref("media.peerconnection.identity.timeout", 10000);
 // These values (aec, agc, and noice) are from media/webrtc/trunk/webrtc/common_types.h
 // kXxxUnchanged = 0, kXxxDefault = 1, and higher values are specific to each
 // setting (for Xxx = Ec, Agc, or Ns).  Defaults are all set to kXxxDefault here.
 pref("media.peerconnection.turn.disable", false);
+#if defined(MOZ_WEBRTC_HARDWARE_AEC_NS)
+pref("media.getusermedia.aec_enabled", false);
+pref("media.getusermedia.noise_enabled", false);
+#else
 pref("media.getusermedia.aec_enabled", true);
+pref("media.getusermedia.noise_enabled", true);
+#endif
+pref("media.getusermedia.noise", 1);
 pref("media.getusermedia.agc_enabled", false);
 pref("media.getusermedia.agc", 1);
-pref("media.getusermedia.noise_enabled", true);
-pref("media.getusermedia.noise", 1);
 // Adjustments for OS-specific input delay (lower bound)
 // Adjustments for OS-specific AudioStream+cubeb+output delay (lower bound)
 #if defined(XP_MACOSX)
 pref("media.peerconnection.capture_delay", 50);
 pref("media.getusermedia.playout_delay", 10);
 #elif defined(XP_WIN)
 pref("media.peerconnection.capture_delay", 50);
 pref("media.getusermedia.playout_delay", 40);