Bug 815905 - Provide JNI as a fallback if OpenSLES opening fails. r=jesup
authorGian-Carlo Pascutto <gpascutto@mozilla.com>
Tue, 16 Apr 2013 15:47:41 +0200
changeset 139884 bc114c0cf8a7fad4f5b9e2c7624b38cb22b28fb2
parent 139883 a3fe83ef26e321b6e51a7afd0ddfd3123edb87da
child 139885 15553a58337e18435a03876549b924391e504cc2
push id2579
push userakeybl@mozilla.com
push dateMon, 24 Jun 2013 18:52:47 +0000
treeherdermozilla-beta@b69b7de8a05a [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjesup
bugs815905
milestone23.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 815905 - Provide JNI as a fallback if OpenSLES opening fails. r=jesup
media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_jni_android.h
media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_opensles_android.h
media/webrtc/trunk/webrtc/modules/audio_device/audio_device.gypi
media/webrtc/trunk/webrtc/modules/audio_device/audio_device_impl.cc
--- a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_jni_android.h
+++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_jni_android.h
@@ -19,25 +19,16 @@
 #include "critical_section_wrapper.h"
 
 #include <jni.h> // For accessing AudioDeviceAndroid java class
 
 namespace webrtc
 {
 class EventWrapper;
 
-const WebRtc_UWord32 N_REC_SAMPLES_PER_SEC = 16000; // Default is 16 kHz
-const WebRtc_UWord32 N_PLAY_SAMPLES_PER_SEC = 16000; // Default is 16 kHz
-
-const WebRtc_UWord32 N_REC_CHANNELS = 1; // default is mono recording
-const WebRtc_UWord32 N_PLAY_CHANNELS = 1; // default is mono playout
-
-const WebRtc_UWord32 REC_BUF_SIZE_IN_SAMPLES = 480; // Handle max 10 ms @ 48 kHz
-
-
 class ThreadWrapper;
 
 class AudioDeviceAndroidJni : public AudioDeviceGeneric {
  public:
   AudioDeviceAndroidJni(const WebRtc_Word32 id);
   ~AudioDeviceAndroidJni();
 
   static WebRtc_Word32 SetAndroidAudioDeviceObjects(void* javaVM,
@@ -154,16 +145,24 @@ class AudioDeviceAndroidJni : public Aud
   virtual WebRtc_Word32 SetRecordingSampleRate(
       const WebRtc_UWord32 samplesPerSec);
   virtual WebRtc_Word32 SetPlayoutSampleRate(
       const WebRtc_UWord32 samplesPerSec);
 
   virtual WebRtc_Word32 SetLoudspeakerStatus(bool enable);
   virtual WebRtc_Word32 GetLoudspeakerStatus(bool& enable) const;
 
+  static const WebRtc_UWord32 N_REC_SAMPLES_PER_SEC = 16000; // Default is 16 kHz
+  static const WebRtc_UWord32 N_PLAY_SAMPLES_PER_SEC = 16000; // Default is 16 kHz
+
+  static const WebRtc_UWord32 N_REC_CHANNELS = 1; // default is mono recording
+  static const WebRtc_UWord32 N_PLAY_CHANNELS = 1; // default is mono playout
+
+  static const WebRtc_UWord32 REC_BUF_SIZE_IN_SAMPLES = 480; // Handle max 10 ms @ 48 kHz
+
  private:
   // Lock
   void Lock() {
     _critSect.Enter();
   };
   void UnLock() {
     _critSect.Leave();
   };
--- a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_opensles_android.h
+++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_opensles_android.h
@@ -23,46 +23,16 @@
 
 #include "modules/audio_device/audio_device_generic.h"
 #include "system_wrappers/interface/critical_section_wrapper.h"
 
 namespace webrtc {
 
 class EventWrapper;
 
-const WebRtc_UWord32 N_MAX_INTERFACES = 3;
-const WebRtc_UWord32 N_MAX_OUTPUT_DEVICES = 6;
-const WebRtc_UWord32 N_MAX_INPUT_DEVICES = 3;
-
-const WebRtc_UWord32 N_REC_SAMPLES_PER_SEC = 16000;  // Default fs
-const WebRtc_UWord32 N_PLAY_SAMPLES_PER_SEC = 16000;  // Default fs
-
-const WebRtc_UWord32 N_REC_CHANNELS = 1;
-const WebRtc_UWord32 N_PLAY_CHANNELS = 1;
-
-const WebRtc_UWord32 REC_BUF_SIZE_IN_SAMPLES = 480;
-const WebRtc_UWord32 PLAY_BUF_SIZE_IN_SAMPLES = 480;
-
-const WebRtc_UWord32 REC_MAX_TEMP_BUF_SIZE_PER_10ms =
-    N_REC_CHANNELS * REC_BUF_SIZE_IN_SAMPLES * sizeof(int16_t);
-
-const WebRtc_UWord32 PLAY_MAX_TEMP_BUF_SIZE_PER_10ms =
-    N_PLAY_CHANNELS * PLAY_BUF_SIZE_IN_SAMPLES * sizeof(int16_t);
-
-// Number of the buffers in playout queue
-const WebRtc_UWord16 N_PLAY_QUEUE_BUFFERS = 8;
-// Number of buffers in recording queue
-// TODO(xian): Reduce the numbers of buffers to improve the latency.
-const WebRtc_UWord16 N_REC_QUEUE_BUFFERS = 16;
-// Some values returned from getMinBufferSize
-// (Nexus S playout  72ms, recording 64ms)
-// (Galaxy,         167ms,           44ms)
-// (Nexus 7,         72ms,           48ms)
-// (Xoom             92ms,           40ms)
-
 class ThreadWrapper;
 
 class AudioDeviceAndroidOpenSLES: public AudioDeviceGeneric {
  public:
   explicit AudioDeviceAndroidOpenSLES(const WebRtc_Word32 id);
   ~AudioDeviceAndroidOpenSLES();
 
   // Retrieve the currently utilized audio layer
@@ -206,16 +176,46 @@ class AudioDeviceAndroidOpenSLES: public
 
   // Attach audio buffer
   virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
 
   // Speaker audio routing
   virtual WebRtc_Word32 SetLoudspeakerStatus(bool enable);
   virtual WebRtc_Word32 GetLoudspeakerStatus(bool& enable) const;  // NOLINT
 
+  static const WebRtc_UWord32 N_MAX_INTERFACES = 3;
+  static const WebRtc_UWord32 N_MAX_OUTPUT_DEVICES = 6;
+  static const WebRtc_UWord32 N_MAX_INPUT_DEVICES = 3;
+
+  static const WebRtc_UWord32 N_REC_SAMPLES_PER_SEC = 16000;  // Default fs
+  static const WebRtc_UWord32 N_PLAY_SAMPLES_PER_SEC = 16000;  // Default fs
+
+  static const WebRtc_UWord32 N_REC_CHANNELS = 1;
+  static const WebRtc_UWord32 N_PLAY_CHANNELS = 1;
+
+  static const WebRtc_UWord32 REC_BUF_SIZE_IN_SAMPLES = 480;
+  static const WebRtc_UWord32 PLAY_BUF_SIZE_IN_SAMPLES = 480;
+
+  static const WebRtc_UWord32 REC_MAX_TEMP_BUF_SIZE_PER_10ms =
+      N_REC_CHANNELS * REC_BUF_SIZE_IN_SAMPLES * sizeof(int16_t);
+
+  static const WebRtc_UWord32 PLAY_MAX_TEMP_BUF_SIZE_PER_10ms =
+      N_PLAY_CHANNELS * PLAY_BUF_SIZE_IN_SAMPLES * sizeof(int16_t);
+
+  // Number of the buffers in playout queue
+  static const WebRtc_UWord16 N_PLAY_QUEUE_BUFFERS = 8;
+  // Number of buffers in recording queue
+  // TODO(xian): Reduce the numbers of buffers to improve the latency.
+  static const WebRtc_UWord16 N_REC_QUEUE_BUFFERS = 16;
+  // Some values returned from getMinBufferSize
+  // (Nexus S playout  72ms, recording 64ms)
+  // (Galaxy,         167ms,           44ms)
+  // (Nexus 7,         72ms,           48ms)
+  // (Xoom             92ms,           40ms)
+
  private:
   // Lock
   void Lock() {
     crit_sect_.Enter();
   };
   void UnLock() {
     crit_sect_.Leave();
   };
--- a/media/webrtc/trunk/webrtc/modules/audio_device/audio_device.gypi
+++ b/media/webrtc/trunk/webrtc/modules/audio_device/audio_device.gypi
@@ -105,38 +105,29 @@
             'win/audio_device_wave_win.cc',
             'win/audio_device_wave_win.h',
             'win/audio_device_utility_win.cc',
             'win/audio_device_utility_win.h',
             'win/audio_mixer_manager_win.cc',
             'win/audio_mixer_manager_win.h',
             'android/audio_device_utility_android.cc',
             'android/audio_device_utility_android.h',
+            'android/audio_device_opensles_android.cc',
+            'android/audio_device_opensles_android.h',
+            'android/audio_device_jni_android.cc',
+            'android/audio_device_jni_android.h',
           ],
           'conditions': [
             ['OS=="android"', {
               'link_settings': {
                 'libraries': [
                   '-llog',
                   '-lOpenSLES',
                 ],
               },
-              'conditions': [
-                ['enable_android_opensl==1', {
-                  'sources': [
-                    'android/audio_device_opensles_android.cc',
-                    'android/audio_device_opensles_android.h',
-                  ],
-                }, {
-                  'sources': [
-                    'android/audio_device_jni_android.cc',
-                    'android/audio_device_jni_android.h',
-                  ],
-                }],
-              ],
             }],
             ['OS=="linux"', {
               'defines': [
                 'LINUX_ALSA',
               ],
               'link_settings': {
                 'libraries': [
                   '-ldl',
--- a/media/webrtc/trunk/webrtc/modules/audio_device/audio_device_impl.cc
+++ b/media/webrtc/trunk/webrtc/modules/audio_device/audio_device_impl.cc
@@ -19,18 +19,20 @@
 #if defined(_WIN32)
     #include "audio_device_utility_win.h"
     #include "audio_device_wave_win.h"
  #if defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
     #include "audio_device_core_win.h"
  #endif
 #elif defined(WEBRTC_ANDROID_OPENSLES)
     #include <stdlib.h>
+    #include <dlfcn.h>
     #include "audio_device_utility_android.h"
     #include "audio_device_opensles_android.h"
+    #include "audio_device_jni_android.h"
 #elif defined(WEBRTC_ANDROID)
     #include <stdlib.h>
     #include "audio_device_utility_android.h"
     #include "audio_device_jni_android.h"
 #elif defined(WEBRTC_LINUX)
     #include "audio_device_utility_linux.h"
  #if defined(LINUX_ALSA)
     #include "audio_device_alsa_linux.h"
@@ -254,48 +256,53 @@ WebRtc_Word32 AudioDeviceModuleImpl::Cre
         //
         ptrAudioDeviceUtility = new AudioDeviceUtilityWindows(Id());
     }
 #endif  // #if defined(_WIN32)
 
     // Create the *Android OpenSLES* implementation of the Audio Device
     //
 #if defined(WEBRTC_ANDROID_OPENSLES)
-    if (audioLayer == kPlatformDefaultAudio)
-    {
-        // Create *Android OpenELSE Audio* implementation
-        ptrAudioDevice = new AudioDeviceAndroidOpenSLES(Id());
-        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
-                     "Android OpenSLES Audio APIs will be utilized");
+    // Check if the OpenSLES library is available before going further.
+    void* opensles_lib = dlopen("libOpenSLES.so", RTLD_LAZY);
+    if (opensles_lib) {
+        // That worked, close for now and proceed normally.
+        dlclose(opensles_lib);
+        if (audioLayer == kPlatformDefaultAudio)
+        {
+            // Create *Android OpenSLES Audio* implementation
+            ptrAudioDevice = new AudioDeviceAndroidOpenSLES(Id());
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                         "Android OpenSLES Audio APIs will be utilized");
+        }
     }
 
     if (ptrAudioDevice != NULL)
     {
         // Create the Android implementation of the Device Utility.
         ptrAudioDeviceUtility = new AudioDeviceUtilityAndroid(Id());
     }
-    // END #if defined(WEBRTC_ANDROID_OPENSLES)
+#elif defined(WEBRTC_ANDROID_OPENSLES) or defined(WEBRTC_ANDROID)
+    // Fall back to this case if on Android 2.2/OpenSLES not available.
+    if (ptrAudioDevice == NULL) {
+        // Create the *Android Java* implementation of the Audio Device
+        if (audioLayer == kPlatformDefaultAudio)
+        {
+            // Create *Android JNI Audio* implementation
+            ptrAudioDevice = new AudioDeviceAndroidJni(Id());
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Android JNI Audio APIs will be utilized");
+        }
 
-    // Create the *Android Java* implementation of the Audio Device
-    //
-#elif defined(WEBRTC_ANDROID)
-    if (audioLayer == kPlatformDefaultAudio)
-    {
-        // Create *Android JNI Audio* implementation
-        ptrAudioDevice = new AudioDeviceAndroidJni(Id());
-        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Android JNI Audio APIs will be utilized");
+        if (ptrAudioDevice != NULL)
+        {
+            // Create the Android implementation of the Device Utility.
+            ptrAudioDeviceUtility = new AudioDeviceUtilityAndroid(Id());
+        }
     }
 
-    if (ptrAudioDevice != NULL)
-    {
-        // Create the Android implementation of the Device Utility.
-        ptrAudioDeviceUtility = new AudioDeviceUtilityAndroid(Id());
-    }
-    // END #if defined(WEBRTC_ANDROID)
-
     // Create the *Linux* implementation of the Audio Device
     //
 #elif defined(WEBRTC_LINUX)
     if ((audioLayer == kLinuxPulseAudio) || (audioLayer == kPlatformDefaultAudio))
     {
 #if defined(LINUX_PULSE)
         WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "attempting to use the Linux PulseAudio APIs...");