Bug 974378 - Make webrtc.org OpenSL ES output code optional. Increase input buffers. r=jesup
authorGian-Carlo Pascutto <gpascutto@mozilla.com>
Wed, 26 Feb 2014 19:55:07 +0100
changeset 187988 953f462e19acb0d48c0d8f5b99ec05fddf219ecd
parent 187987 9d58d9a2c8b168dc17c45c79cbd5053260cf1d4b
child 187989 36b02f8ee7738ad84a968cafd7fd4e17aa7693f7
push id3503
push userraliiev@mozilla.com
push dateMon, 28 Apr 2014 18:51:11 +0000
treeherdermozilla-beta@c95ac01e332e [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjesup
bugs974378
milestone30.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 974378 - Make webrtc.org OpenSL ES output code optional. Increase input buffers. r=jesup
build/gyp.mozbuild
media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_opensles_android.cc
media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_opensles_android.h
media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_input.cc
media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_input.h
media/webrtc/trunk/webrtc/modules/audio_device/audio_device.gypi
--- a/build/gyp.mozbuild
+++ b/build/gyp.mozbuild
@@ -12,16 +12,17 @@ gyp_vars = {
     'include_alsa_audio': 1 if CONFIG['MOZ_ALSA'] else 0,
     'include_pulse_audio': 1 if CONFIG['MOZ_PULSEAUDIO'] else 0,
     # basic stuff for everything
     'include_internal_video_render': 0,
     'clang_use_chrome_plugins': 0,
     'enable_protobuf': 0,
     'include_tests': 0,
     'enable_android_opensl': 1,
+    'enable_android_opensl_output': 0,
     # use_system_lib* still seems to be in use in trunk/build
     'use_system_libjpeg': 0,
     'use_system_libvpx': 0,
     'build_libjpeg': 0,
     'build_libvpx': 0,
     'build_libyuv': 0,
     'libyuv_dir': '/media/libyuv',
     'yuv_disable_avx2': 0 if CONFIG['HAVE_X86_AVX2'] else 1,
--- a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_opensles_android.cc
+++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_opensles_android.cc
@@ -11,116 +11,173 @@
 #include "webrtc/modules/audio_device/android/audio_device_opensles_android.h"
 
 #include "webrtc/modules/audio_device/android/opensles_input.h"
 #include "webrtc/modules/audio_device/android/opensles_output.h"
 
 namespace webrtc {
 
 AudioDeviceAndroidOpenSLES::AudioDeviceAndroidOpenSLES(const int32_t id)
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
     : output_(id),
-      input_(id, &output_) {
+      input_(id, &output_)
+#else
+    : input_(id, 0)
+#endif
+{
 }
 
 AudioDeviceAndroidOpenSLES::~AudioDeviceAndroidOpenSLES() {
 }
 
 int32_t AudioDeviceAndroidOpenSLES::ActiveAudioLayer(
     AudioDeviceModule::AudioLayer& audioLayer) const { // NOLINT
   return 0;
 }
 
 int32_t AudioDeviceAndroidOpenSLES::Init() {
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.Init() | input_.Init();
+#else
+  return input_.Init();
+#endif
 }
 
 int32_t AudioDeviceAndroidOpenSLES::Terminate()  {
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.Terminate() | input_.Terminate();
+#else
+  return input_.Terminate();
+#endif
 }
 
 bool AudioDeviceAndroidOpenSLES::Initialized() const {
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.Initialized() && input_.Initialized();
+#else
+  return input_.Initialized();
+#endif
 }
 
 int16_t AudioDeviceAndroidOpenSLES::PlayoutDevices() {
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.PlayoutDevices();
+#else
+  return 0;
+#endif
 }
 
 int16_t AudioDeviceAndroidOpenSLES::RecordingDevices() {
   return input_.RecordingDevices();
 }
 
 int32_t AudioDeviceAndroidOpenSLES::PlayoutDeviceName(
     uint16_t index,
     char name[kAdmMaxDeviceNameSize],
     char guid[kAdmMaxGuidSize]) {
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.PlayoutDeviceName(index, name, guid);
+#else
+  return -1;
+#endif
 }
 
 int32_t AudioDeviceAndroidOpenSLES::RecordingDeviceName(
     uint16_t index,
     char name[kAdmMaxDeviceNameSize],
     char guid[kAdmMaxGuidSize]) {
   return input_.RecordingDeviceName(index, name, guid);
 }
 
 int32_t AudioDeviceAndroidOpenSLES::SetPlayoutDevice(uint16_t index) {
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.SetPlayoutDevice(index);
+#else
+  return -1;
+#endif
 }
 
 int32_t AudioDeviceAndroidOpenSLES::SetPlayoutDevice(
     AudioDeviceModule::WindowsDeviceType device) {
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.SetPlayoutDevice(device);
+#else
+  return -1;
+#endif
 }
 
 int32_t AudioDeviceAndroidOpenSLES::SetRecordingDevice(uint16_t index) {
   return input_.SetRecordingDevice(index);
 }
 
 int32_t AudioDeviceAndroidOpenSLES::SetRecordingDevice(
     AudioDeviceModule::WindowsDeviceType device) {
   return input_.SetRecordingDevice(device);
 }
 
 int32_t AudioDeviceAndroidOpenSLES::PlayoutIsAvailable(
     bool& available) {  // NOLINT
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.PlayoutIsAvailable(available);
+#else
+  return -1;
+#endif
 }
 
 int32_t AudioDeviceAndroidOpenSLES::InitPlayout() {
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.InitPlayout();
+#else
+  return -1;
+#endif
 }
 
 bool AudioDeviceAndroidOpenSLES::PlayoutIsInitialized() const {
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.PlayoutIsInitialized();
+#else
+  return false;
+#endif
 }
 
 int32_t AudioDeviceAndroidOpenSLES::RecordingIsAvailable(
     bool& available) {  // NOLINT
   return input_.RecordingIsAvailable(available);
 }
 
 int32_t AudioDeviceAndroidOpenSLES::InitRecording() {
   return input_.InitRecording();
 }
 
 bool AudioDeviceAndroidOpenSLES::RecordingIsInitialized() const {
   return input_.RecordingIsInitialized();
 }
 
 int32_t AudioDeviceAndroidOpenSLES::StartPlayout() {
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.StartPlayout();
+#else
+  return -1;
+#endif
 }
 
 int32_t AudioDeviceAndroidOpenSLES::StopPlayout() {
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.StopPlayout();
+#else
+  return -1;
+#endif
 }
 
 bool AudioDeviceAndroidOpenSLES::Playing() const {
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.Playing();
+#else
+  return false;
+#endif
 }
 
 int32_t AudioDeviceAndroidOpenSLES::StartRecording() {
   return input_.StartRecording();
 }
 
 int32_t AudioDeviceAndroidOpenSLES::StopRecording() {
   return input_.StopRecording();
@@ -146,25 +203,37 @@ int32_t AudioDeviceAndroidOpenSLES::SetW
 int32_t AudioDeviceAndroidOpenSLES::WaveOutVolume(
     uint16_t& volumeLeft,           // NOLINT
     uint16_t& volumeRight) const {  // NOLINT
   return -1;
 }
 
 int32_t AudioDeviceAndroidOpenSLES::SpeakerIsAvailable(
     bool& available) {  // NOLINT
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.SpeakerIsAvailable(available);
+#else
+  return -1;
+#endif
 }
 
 int32_t AudioDeviceAndroidOpenSLES::InitSpeaker() {
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.InitSpeaker();
+#else
+  return -1;
+#endif
 }
 
 bool AudioDeviceAndroidOpenSLES::SpeakerIsInitialized() const {
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.SpeakerIsInitialized();
+#else
+  return false;
+#endif
 }
 
 int32_t AudioDeviceAndroidOpenSLES::MicrophoneIsAvailable(
     bool& available) {  // NOLINT
   return input_.MicrophoneIsAvailable(available);
 }
 
 int32_t AudioDeviceAndroidOpenSLES::InitMicrophone() {
@@ -172,41 +241,65 @@ int32_t AudioDeviceAndroidOpenSLES::Init
 }
 
 bool AudioDeviceAndroidOpenSLES::MicrophoneIsInitialized() const {
   return input_.MicrophoneIsInitialized();
 }
 
 int32_t AudioDeviceAndroidOpenSLES::SpeakerVolumeIsAvailable(
     bool& available) {  // NOLINT
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.SpeakerVolumeIsAvailable(available);
+#else
+  return -1;
+#endif
 }
 
 int32_t AudioDeviceAndroidOpenSLES::SetSpeakerVolume(uint32_t volume) {
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.SetSpeakerVolume(volume);
+#else
+  return -1;
+#endif
 }
 
 int32_t AudioDeviceAndroidOpenSLES::SpeakerVolume(
     uint32_t& volume) const {  // NOLINT
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.SpeakerVolume(volume);
+#else
+  return -1;
+#endif
 }
 
 int32_t AudioDeviceAndroidOpenSLES::MaxSpeakerVolume(
     uint32_t& maxVolume) const {  // NOLINT
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.MaxSpeakerVolume(maxVolume);
+#else
+  return -1;
+#endif
 }
 
 int32_t AudioDeviceAndroidOpenSLES::MinSpeakerVolume(
     uint32_t& minVolume) const {  // NOLINT
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.MinSpeakerVolume(minVolume);
+#else
+  return -1;
+#endif
 }
 
 int32_t AudioDeviceAndroidOpenSLES::SpeakerVolumeStepSize(
     uint16_t& stepSize) const {  // NOLINT
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.SpeakerVolumeStepSize(stepSize);
+#else
+  return -1;
+#endif
 }
 
 int32_t AudioDeviceAndroidOpenSLES::MicrophoneVolumeIsAvailable(
     bool& available) {  // NOLINT
   return input_.MicrophoneVolumeIsAvailable(available);
 }
 
 int32_t AudioDeviceAndroidOpenSLES::SetMicrophoneVolume(uint32_t volume) {
@@ -230,26 +323,38 @@ int32_t AudioDeviceAndroidOpenSLES::MinM
 
 int32_t AudioDeviceAndroidOpenSLES::MicrophoneVolumeStepSize(
     uint16_t& stepSize) const {  // NOLINT
   return input_.MicrophoneVolumeStepSize(stepSize);
 }
 
 int32_t AudioDeviceAndroidOpenSLES::SpeakerMuteIsAvailable(
     bool& available) {  // NOLINT
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.SpeakerMuteIsAvailable(available);
+#else
+  return -1;
+#endif
 }
 
 int32_t AudioDeviceAndroidOpenSLES::SetSpeakerMute(bool enable) {
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.SetSpeakerMute(enable);
+#else
+  return -1;
+#endif
 }
 
 int32_t AudioDeviceAndroidOpenSLES::SpeakerMute(
     bool& enabled) const {  // NOLINT
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.SpeakerMute(enabled);
+#else
+  return -1;
+#endif
 }
 
 int32_t AudioDeviceAndroidOpenSLES::MicrophoneMuteIsAvailable(
     bool& available) {  // NOLINT
   return input_.MicrophoneMuteIsAvailable(available);
 }
 
 int32_t AudioDeviceAndroidOpenSLES::SetMicrophoneMute(bool enable) {
@@ -272,26 +377,38 @@ int32_t AudioDeviceAndroidOpenSLES::SetM
 
 int32_t AudioDeviceAndroidOpenSLES::MicrophoneBoost(
     bool& enabled) const {  // NOLINT
   return input_.MicrophoneBoost(enabled);
 }
 
 int32_t AudioDeviceAndroidOpenSLES::StereoPlayoutIsAvailable(
     bool& available) {  // NOLINT
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.StereoPlayoutIsAvailable(available);
+#else
+  return -1;
+#endif
 }
 
 int32_t AudioDeviceAndroidOpenSLES::SetStereoPlayout(bool enable) {
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.SetStereoPlayout(enable);
+#else
+  return -1;
+#endif
 }
 
 int32_t AudioDeviceAndroidOpenSLES::StereoPlayout(
     bool& enabled) const {  // NOLINT
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.StereoPlayout(enabled);
+#else
+  return -1;
+#endif
 }
 
 int32_t AudioDeviceAndroidOpenSLES::StereoRecordingIsAvailable(
     bool& available) {  // NOLINT
   return input_.StereoRecordingIsAvailable(available);
 }
 
 int32_t AudioDeviceAndroidOpenSLES::SetStereoRecording(bool enable) {
@@ -301,80 +418,118 @@ int32_t AudioDeviceAndroidOpenSLES::SetS
 int32_t AudioDeviceAndroidOpenSLES::StereoRecording(
     bool& enabled) const {  // NOLINT
   return input_.StereoRecording(enabled);
 }
 
 int32_t AudioDeviceAndroidOpenSLES::SetPlayoutBuffer(
     const AudioDeviceModule::BufferType type,
     uint16_t sizeMS) {
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.SetPlayoutBuffer(type, sizeMS);
+#else
+  return -1;
+#endif
 }
 
 int32_t AudioDeviceAndroidOpenSLES::PlayoutBuffer(
     AudioDeviceModule::BufferType& type,
     uint16_t& sizeMS) const {  // NOLINT
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.PlayoutBuffer(type, sizeMS);
+#else
+  return -1;
+#endif
 }
 
 int32_t AudioDeviceAndroidOpenSLES::PlayoutDelay(
     uint16_t& delayMS) const {  // NOLINT
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.PlayoutDelay(delayMS);
+#else
+  return -1;
+#endif
 }
 
 int32_t AudioDeviceAndroidOpenSLES::RecordingDelay(
     uint16_t& delayMS) const {  // NOLINT
   return input_.RecordingDelay(delayMS);
 }
 
 int32_t AudioDeviceAndroidOpenSLES::CPULoad(
     uint16_t& load) const {  // NOLINT
   return -1;
 }
 
 bool AudioDeviceAndroidOpenSLES::PlayoutWarning() const {
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.PlayoutWarning();
+#else
+  return false;
+#endif
 }
 
 bool AudioDeviceAndroidOpenSLES::PlayoutError() const {
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.PlayoutError();
+#else
+  return false;
+#endif
 }
 
 bool AudioDeviceAndroidOpenSLES::RecordingWarning() const {
   return input_.RecordingWarning();
 }
 
 bool AudioDeviceAndroidOpenSLES::RecordingError() const {
   return input_.RecordingError();
 }
 
 void AudioDeviceAndroidOpenSLES::ClearPlayoutWarning() {
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.ClearPlayoutWarning();
+#else
+  return;
+#endif
 }
 
 void AudioDeviceAndroidOpenSLES::ClearPlayoutError() {
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.ClearPlayoutError();
+#else
+  return;
+#endif
 }
 
 void AudioDeviceAndroidOpenSLES::ClearRecordingWarning() {
   return input_.ClearRecordingWarning();
 }
 
 void AudioDeviceAndroidOpenSLES::ClearRecordingError() {
   return input_.ClearRecordingError();
 }
 
 void AudioDeviceAndroidOpenSLES::AttachAudioBuffer(
     AudioDeviceBuffer* audioBuffer) {
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   output_.AttachAudioBuffer(audioBuffer);
+#endif
   input_.AttachAudioBuffer(audioBuffer);
 }
 
 int32_t AudioDeviceAndroidOpenSLES::SetLoudspeakerStatus(bool enable) {
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.SetLoudspeakerStatus(enable);
+#else
+  return -1;
+#endif
 }
 
 int32_t AudioDeviceAndroidOpenSLES::GetLoudspeakerStatus(
     bool& enable) const {  // NOLINT
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   return output_.GetLoudspeakerStatus(enable);
+#else
+  return -1;
+#endif
 }
 
 }  // namespace webrtc
--- a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_opensles_android.h
+++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_opensles_android.h
@@ -150,15 +150,17 @@ class AudioDeviceAndroidOpenSLES : publi
   // Attach audio buffer
   virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
 
   // Speaker audio routing
   virtual int32_t SetLoudspeakerStatus(bool enable);
   virtual int32_t GetLoudspeakerStatus(bool& enable) const;
 
  private:
+#ifdef WEBRTC_ANDROID_OPENSLES_OUTPUT
   OpenSlesOutput output_;
+#endif
   OpenSlesInput input_;
 };
 
 }  // namespace webrtc
 
 #endif  // WEBRTC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_DEVICE_OPENSLES_ANDROID_H_
--- a/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_input.cc
+++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_input.cc
@@ -549,16 +549,17 @@ bool OpenSlesInput::CbThreadImpl() {
   CriticalSectionScoped lock(crit_sect_.get());
   if (HandleOverrun(event_id, event_msg)) {
     return recording_;
   }
   // If the fifo_ has audio data process it.
   while (fifo_->size() > 0 && recording_) {
     int8_t* audio = fifo_->Pop();
     audio_buffer_->SetRecordedBuffer(audio, buffer_size_samples());
-    audio_buffer_->SetVQEData(delay_provider_->PlayoutDelayMs(),
+    audio_buffer_->SetVQEData(delay_provider_ ?
+                              delay_provider_->PlayoutDelayMs() : 0,
                               recording_delay_, 0);
     audio_buffer_->DeliverRecordedData();
   }
   return recording_;
 }
 
 }  // namespace webrtc
--- a/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_input.h
+++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/opensles_input.h
@@ -116,17 +116,17 @@ class OpenSlesInput {
   void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
 
  private:
   enum {
     kNumInterfaces = 2,
     // Keep as few OpenSL buffers as possible to avoid wasting memory. 2 is
     // minimum for playout. Keep 2 for recording as well.
     kNumOpenSlBuffers = 2,
-    kNum10MsToBuffer = 4,
+    kNum10MsToBuffer = 8,
   };
 
   int InitSampleRate();
   int buffer_size_samples() const;
   int buffer_size_bytes() const;
   void UpdateRecordingDelay();
   void UpdateSampleRate();
   void CalculateNumFifoBuffersNeeded();
--- a/media/webrtc/trunk/webrtc/modules/audio_device/audio_device.gypi
+++ b/media/webrtc/trunk/webrtc/modules/audio_device/audio_device.gypi
@@ -148,31 +148,38 @@
                     'opensl/fine_audio_buffer.cc',
                     'opensl/fine_audio_buffer.h',
                     'opensl/low_latency_event_posix.cc',
                     'opensl/low_latency_event.h',
                     'opensl/opensles_common.cc',
                     'opensl/opensles_common.h',
                     'opensl/opensles_input.cc',
                     'opensl/opensles_input.h',
-                    'opensl/opensles_output.cc',
                     'opensl/opensles_output.h',
                     'opensl/single_rw_fifo.cc',
                     'opensl/single_rw_fifo.h',
 		    'shared/audio_device_utility_shared.cc',
 		    'shared/audio_device_utility_shared.h',
                   ],
                 }, {
                   'sources': [
 		    'shared/audio_device_utility_shared.cc',
 		    'shared/audio_device_utility_shared.h',
 		    'android/audio_device_jni_android.cc',
 		    'android/audio_device_jni_android.h',
                   ],
                 }],
+                ['enable_android_opensl_output==1', {
+                  'sources': [
+                    'opensl/opensles_output.cc'
+                  ],
+                  'defines': [
+                    'WEBRTC_ANDROID_OPENSLES_OUTPUT',
+                  ]},
+                ],
               ],
             }],
             ['OS=="linux"', {
               'link_settings': {
                 'libraries': [
                   '-ldl','-lX11',
                 ],
               },