bug 880879: Rollup of changes previously applied to media/webrtc/trunk/webrtc rs=derf f=gcp r=jesup
authorRandell Jesup <rjesup@jesup.org>
Wed, 10 Jul 2013 03:12:59 -0400
changeset 150240 edd376cd77bfe3d0670939c5fbdac6e18bb123a6
parent 150239 6063eaf3633fec74f5289571c7dd2ef36ab07074
child 150241 1adc72e64db00ee68a106b878251e11f4433c05a
push id2859
push userakeybl@mozilla.com
push dateMon, 16 Sep 2013 19:14:59 +0000
treeherdermozilla-beta@87d3c51cd2bf [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersderf, jesup
bugs880879
milestone25.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
bug 880879: Rollup of changes previously applied to media/webrtc/trunk/webrtc rs=derf f=gcp r=jesup
media/webrtc/shared_libs.mk
media/webrtc/signaling/signaling.gyp
media/webrtc/trunk/webrtc/build/arm_neon.gypi
media/webrtc/trunk/webrtc/build/common.gypi
media/webrtc/trunk/webrtc/build/merge_libs.gyp
media/webrtc/trunk/webrtc/common_types.h
media/webrtc/trunk/webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.c
media/webrtc/trunk/webrtc/modules/audio_coding/main/source/audio_coding_module.gypi
media/webrtc/trunk/webrtc/modules/audio_coding/neteq/neteq_defines.h
media/webrtc/trunk/webrtc/modules/audio_coding/neteq/packet_buffer.c
media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_jni_android.cc
media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_jni_android.h
media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_opensles_android.cc
media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_opensles_android.h
media/webrtc/trunk/webrtc/modules/audio_device/audio_device.gypi
media/webrtc/trunk/webrtc/modules/audio_device/audio_device_impl.cc
media/webrtc/trunk/webrtc/modules/audio_device/audio_device_utility.cc
media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc
media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h
media/webrtc/trunk/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.cc
media/webrtc/trunk/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h
media/webrtc/trunk/webrtc/modules/audio_device/linux/pulseaudiosymboltable_linux.cc
media/webrtc/trunk/webrtc/modules/audio_processing/audio_processing.gypi
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc
media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_utility.cc
media/webrtc/trunk/webrtc/modules/utility/source/rtp_dump_impl.cc
media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.cc
media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.h
media/webrtc/trunk/webrtc/modules/video_capture/android/java/org/webrtc/videoengine/VideoCaptureAndroid.java
media/webrtc/trunk/webrtc/modules/video_capture/android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java
media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.cc
media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.h
media/webrtc/trunk/webrtc/modules/video_capture/device_info_impl.cc
media/webrtc/trunk/webrtc/modules/video_capture/linux/device_info_linux.cc
media/webrtc/trunk/webrtc/modules/video_capture/linux/video_capture_linux.cc
media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit.mm
media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info.mm
media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.h
media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.mm
media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_objc.mm
media/webrtc/trunk/webrtc/modules/video_capture/mac/video_capture_mac.mm
media/webrtc/trunk/webrtc/modules/video_capture/video_capture.gypi
media/webrtc/trunk/webrtc/modules/video_capture/windows/device_info_ds.cc
media/webrtc/trunk/webrtc/modules/video_capture/windows/sink_filter_ds.cc
media/webrtc/trunk/webrtc/modules/video_capture/windows/sink_filter_ds.h
media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/vp8.gyp
media/webrtc/trunk/webrtc/modules/video_processing/main/source/video_processing.gypi
media/webrtc/trunk/webrtc/system_wrappers/interface/asm_defines.h
media/webrtc/trunk/webrtc/system_wrappers/interface/tick_util.h
media/webrtc/trunk/webrtc/system_wrappers/source/atomic32_posix.cc
media/webrtc/trunk/webrtc/system_wrappers/source/condition_variable.cc
media/webrtc/trunk/webrtc/system_wrappers/source/condition_variable_posix.cc
media/webrtc/trunk/webrtc/system_wrappers/source/cpu_info.cc
media/webrtc/trunk/webrtc/system_wrappers/source/rw_lock.cc
media/webrtc/trunk/webrtc/system_wrappers/source/system_wrappers.gyp
media/webrtc/trunk/webrtc/system_wrappers/source/thread_posix.cc
media/webrtc/trunk/webrtc/system_wrappers/source/trace_impl.cc
media/webrtc/trunk/webrtc/system_wrappers/source/trace_posix.cc
media/webrtc/trunk/webrtc/test/channel_transport/udp_transport_impl.cc
media/webrtc/trunk/webrtc/typedefs.h
media/webrtc/trunk/webrtc/video_engine/vie_defines.h
media/webrtc/trunk/webrtc/video_engine/vie_impl.cc
media/webrtc/trunk/webrtc/voice_engine/include/voe_base.h
media/webrtc/trunk/webrtc/voice_engine/include/voe_volume_control.h
media/webrtc/trunk/webrtc/voice_engine/voice_engine_core.gypi
media/webrtc/trunk/webrtc/voice_engine/voice_engine_defines.h
media/webrtc/trunk/webrtc/voice_engine/voice_engine_impl.cc
--- a/media/webrtc/shared_libs.mk
+++ b/media/webrtc/shared_libs.mk
@@ -12,24 +12,24 @@ WEBRTC_LIBS = \
   $(call EXPAND_LIBNAME_PATH,signal_processing,$(DEPTH)/media/webrtc/trunk/webrtc/common_audio/common_audio_signal_processing) \
   $(call EXPAND_LIBNAME_PATH,G711,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_G711) \
   $(call EXPAND_LIBNAME_PATH,PCM16B,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_PCM16B) \
   $(call EXPAND_LIBNAME_PATH,NetEq,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_NetEq) \
   $(call EXPAND_LIBNAME_PATH,resampler,$(DEPTH)/media/webrtc/trunk/webrtc/common_audio/common_audio_resampler) \
   $(call EXPAND_LIBNAME_PATH,vad,$(DEPTH)/media/webrtc/trunk/webrtc/common_audio/common_audio_vad) \
   $(call EXPAND_LIBNAME_PATH,system_wrappers,$(DEPTH)/media/webrtc/trunk/webrtc/system_wrappers/source/system_wrappers_system_wrappers) \
   $(call EXPAND_LIBNAME_PATH,webrtc_video_coding,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_webrtc_video_coding) \
+  $(call EXPAND_LIBNAME_PATH,video_coding_utility,$(DEPTH)/media/webrtc/trunk/webrtc/modules/video_coding/utility/video_coding_utility_video_coding_utility) \
   $(call EXPAND_LIBNAME_PATH,webrtc_i420,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_webrtc_i420) \
   $(call EXPAND_LIBNAME_PATH,webrtc_vp8,$(DEPTH)/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/vp8_webrtc_vp8) \
   $(call EXPAND_LIBNAME_PATH,webrtc_opus,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_webrtc_opus) \
   $(call EXPAND_LIBNAME_PATH,video_render_module,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_video_render_module) \
   $(call EXPAND_LIBNAME_PATH,video_engine_core,$(DEPTH)/media/webrtc/trunk/webrtc/video_engine/video_engine_video_engine_core) \
   $(call EXPAND_LIBNAME_PATH,media_file,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_media_file) \
   $(call EXPAND_LIBNAME_PATH,rtp_rtcp,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_rtp_rtcp) \
-  $(call EXPAND_LIBNAME_PATH,udp_transport,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_udp_transport) \
   $(call EXPAND_LIBNAME_PATH,bitrate_controller,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_bitrate_controller) \
   $(call EXPAND_LIBNAME_PATH,remote_bitrate_estimator,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_remote_bitrate_estimator) \
   $(call EXPAND_LIBNAME_PATH,paced_sender,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_paced_sender) \
   $(call EXPAND_LIBNAME_PATH,video_processing,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_video_processing) \
   $(call EXPAND_LIBNAME_PATH,voice_engine_core,$(DEPTH)/media/webrtc/trunk/webrtc/voice_engine/voice_engine_voice_engine_core) \
   $(call EXPAND_LIBNAME_PATH,audio_conference_mixer,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_audio_conference_mixer) \
   $(call EXPAND_LIBNAME_PATH,audio_device,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_audio_device) \
   $(call EXPAND_LIBNAME_PATH,audio_processing,$(DEPTH)/media/webrtc/trunk/webrtc/modules/modules_audio_processing) \
--- a/media/webrtc/signaling/signaling.gyp
+++ b/media/webrtc/signaling/signaling.gyp
@@ -51,16 +51,17 @@
         './src/sipcc/cpr/include',
         '../../../ipc/chromium/src',
         '../../../ipc/chromium/src/base/third_party/nspr',
         '../../../xpcom/base',
         '$(DEPTH)/dist/include',
         '../../../dom/base',
         '../../../content/media',
         '../../../media/mtransport',
+        '../trunk',
         '../trunk/webrtc',
         '../trunk/webrtc/video_engine/include',
         '../trunk/webrtc/voice_engine/include',
         '../trunk/webrtc/modules/interface',
         '../trunk/webrtc/peerconnection',
         '../../../netwerk/srtp/src/include',
         '../../../netwerk/srtp/src/crypto/include',
         '../../../ipc/chromium/src',
--- a/media/webrtc/trunk/webrtc/build/arm_neon.gypi
+++ b/media/webrtc/trunk/webrtc/build/arm_neon.gypi
@@ -18,13 +18,35 @@
 #   ],
 #   'includes': ['path/to/this/gypi/file'],
 # }
 
 {
   'cflags!': [
     '-mfpu=vfpv3-d16',
   ],
+  'cflags_mozilla!': [
+    '-mfpu=vfpv3-d16',
+  ],
   'cflags': [
     '-mfpu=neon',
     '-flax-vector-conversions',
   ],
+  'cflags_mozilla': [
+    '-mfpu=neon',
+    '-flax-vector-conversions',
+  ],
+  'asflags!': [
+    '-mfpu=vfpv3-d16',
+  ],
+  'asflags_mozilla!': [
+    '-mfpu=vfpv3-d16',
+  ],
+  'asflags': [
+    '-mfpu=neon',
+    '-flax-vector-conversions',
+  ],
+  'asflags_mozilla': [
+    '-mfpu=neon',
+    '-flax-vector-conversions',
+  ],
+
 }
--- a/media/webrtc/trunk/webrtc/build/common.gypi
+++ b/media/webrtc/trunk/webrtc/build/common.gypi
@@ -30,23 +30,34 @@
           }],
         ],
       },
       'build_with_chromium%': '<(build_with_chromium)',
       'build_with_libjingle%': '<(build_with_libjingle)',
       'webrtc_root%': '<(webrtc_root)',
 
       'webrtc_vp8_dir%': '<(webrtc_root)/modules/video_coding/codecs/vp8',
+      'include_g711%': 1,
+      'include_g722%': 1,
+      'include_ilbc%': 1,
       'include_opus%': 1,
+      'include_isac%': 1,
+      'include_pcm16b%': 1,
     },
     'build_with_chromium%': '<(build_with_chromium)',
     'build_with_libjingle%': '<(build_with_libjingle)',
     'webrtc_root%': '<(webrtc_root)',
     'webrtc_vp8_dir%': '<(webrtc_vp8_dir)',
+
+    'include_g711%': '<(include_g711)',
+    'include_g722%': '<(include_g722)',
+    'include_ilbc%': '<(include_ilbc)',
     'include_opus%': '<(include_opus)',
+    'include_isac%': '<(include_isac)',
+    'include_pcm16b%': '<(include_pcm16b)',
 
     # The Chromium common.gypi we use treats all gyp files without
     # chromium_code==1 as third party code. This disables many of the
     # preferred warning settings.
     #
     # We can set this here to have WebRTC code treated as Chromium code. Our
     # third party code will still have the reduced warning settings.
     'chromium_code': 1,
@@ -115,16 +126,31 @@
         # flood of chromium-style warnings. Investigate enabling them:
         # http://code.google.com/p/webrtc/issues/detail?id=163
         'clang_use_chrome_plugins%': 0,
 
         # Switch between Android audio device OpenSL ES implementation
         # and Java Implementation
         'enable_android_opensl%': 0,
       }],
+      ['OS=="linux"', {
+        'include_alsa_audio%': 1,
+      }, {
+        'include_alsa_audio%': 0,
+      }],
+      ['OS=="solaris" or os_bsd==1', {
+        'include_pulse_audio%': 1,
+      }, {
+        'include_pulse_audio%': 0,
+      }],
+      ['OS=="linux" or OS=="solaris" or os_bsd==1', {
+        'include_v4l2_video_capture%': 1,
+      }, {
+        'include_v4l2_video_capture%': 0,
+      }],
       ['OS=="ios"', {
         'enable_video%': 0,
         'enable_protobuf%': 0,
         'build_libjpeg%': 0,
         'build_libyuv%': 0,
         'build_libvpx%': 0,
         'include_tests%': 0,
       }],
@@ -145,29 +171,28 @@
       '../..',
       # To include the top-level directory when building in Chrome, so we can
       # use full paths (e.g. headers inside testing/ or third_party/).
       '<(DEPTH)',
     ],
     'defines': [
       # TODO(leozwang): Run this as a gclient hook rather than at build-time:
       # http://code.google.com/p/webrtc/issues/detail?id=687
-      'WEBRTC_SVNREVISION="Unavailable(issue687)"',
+      'WEBRTC_SVNREVISION="\\\"Unavailable_issue687\\\""',
       #'WEBRTC_SVNREVISION="<!(python <(webrtc_root)/build/version.py)"',
     ],
     'conditions': [
+      ['moz_widget_toolkit_gonk==1', {
+        'defines' : [
+          'WEBRTC_GONK',
+        ],
+      }],
       ['enable_tracing==1', {
         'defines': ['WEBRTC_LOGGING',],
       }],
-      ['build_with_mozilla==1', {
-        'defines': [
-          # Changes settings for Mozilla build.
-          'WEBRTC_MOZILLA_BUILD',
-         ],
-      }],
       ['build_with_chromium==1', {
         'defines': [
           # Changes settings for Chromium build.
           'WEBRTC_CHROMIUM_BUILD',
         ],
       }, {
         'conditions': [
           ['os_posix==1', {
@@ -180,33 +205,58 @@
             ],
             'cflags_cc': [
               # This is enabled for clang; enable for gcc as well.
               '-Woverloaded-virtual',
             ],
           }],
         ],
       }],
+      ['build_with_mozilla==1', {
+        'defines': [
+          # Changes settings for Mozilla build.
+          'WEBRTC_MOZILLA_BUILD',
+        ],
+      }],
+      ['build_with_mozilla==1', {
+        'defines': [
+          # Changes settings for Mozilla build.
+          'WEBRTC_MOZILLA_BUILD',
+        ],
+      }],
       ['target_arch=="arm"', {
         'defines': [
           'WEBRTC_ARCH_ARM',
         ],
         'conditions': [
           ['armv7==1', {
             'defines': ['WEBRTC_ARCH_ARM_V7',],
             'conditions': [
               ['arm_neon==1', {
-                'defines': ['WEBRTC_ARCH_ARM_NEON',],
-              }, {
-                'defines': ['WEBRTC_DETECT_ARM_NEON',],
+                'defines': ['WEBRTC_ARCH_ARM_NEON',
+                            'WEBRTC_BUILD_NEON_LIBS',
+                            'WEBRTC_DETECT_ARM_NEON'],
               }],
             ],
           }],
         ],
       }],
+      ['os_bsd==1', {
+        'defines': [
+          'WEBRTC_BSD',
+          'WEBRTC_THREAD_RR',
+        ],
+      }],
+      ['OS=="dragonfly" or OS=="netbsd"', {
+        'defines': [
+          # doesn't support pthread_condattr_setclock
+          'WEBRTC_CLOCK_TYPE_REALTIME',
+        ],
+      }],
+      # Mozilla: if we support Mozilla on MIPS, we'll need to mod the cflags entries here
       ['target_arch=="mipsel"', {
         'defines': [
           'MIPS32_LE',
         ],
         'conditions': [
           ['mips_fpu==1', {
             'defines': [
               'MIPS_FPU_LE',
@@ -259,16 +309,23 @@
         'defines': [
           'WEBRTC_MAC',
           'WEBRTC_IOS',
           'WEBRTC_THREAD_RR',
           'WEBRTC_CLOCK_TYPE_REALTIME',
         ],
       }],
       ['OS=="linux"', {
+        'conditions': [
+          ['have_clock_monotonic==1', {
+            'defines': [
+              'WEBRTC_CLOCK_TYPE_REALTIME',
+            ],
+          }],
+        ],
         'defines': [
           'WEBRTC_LINUX',
           'WEBRTC_THREAD_RR',
           # TODO(andrew): can we select this automatically?
           # Define this if the Linux system does not support CLOCK_MONOTONIC.
           #'WEBRTC_CLOCK_TYPE_REALTIME',
         ],
       }],
@@ -277,16 +334,17 @@
           'WEBRTC_MAC',
           'WEBRTC_THREAD_RR',
           'WEBRTC_CLOCK_TYPE_REALTIME',
         ],
       }],
       ['OS=="win"', {
         'defines': [
           'WEBRTC_WIN',
+	  'WEBRTC_EXPORT',
         ],
         # TODO(andrew): enable all warnings when possible.
         # TODO(phoglund): get rid of 4373 supression when
         # http://code.google.com/p/webrtc/issues/detail?id=261 is solved.
         'msvs_disabled_warnings': [
           4373,  # legacy warning for ignoring const / volatile in signatures.
           4389,  # Signed/unsigned mismatch.
         ],
--- a/media/webrtc/trunk/webrtc/build/merge_libs.gyp
+++ b/media/webrtc/trunk/webrtc/build/merge_libs.gyp
@@ -39,10 +39,12 @@
           'outputs': ['<(output_lib)'],
           'action': ['python',
                      'merge_libs.py',
                      '<(PRODUCT_DIR)',
                      '<(output_lib)',],
         },
       ],
     },
+#      }],
+#    ],
   ],
 }
--- a/media/webrtc/trunk/webrtc/common_types.h
+++ b/media/webrtc/trunk/webrtc/common_types.h
@@ -324,17 +324,17 @@ typedef struct        // All levels are 
 enum NsModes    // type of Noise Suppression
 {
     kNsUnchanged = 0,   // previously set mode
     kNsDefault,         // platform default
     kNsConference,      // conferencing default
     kNsLowSuppression,  // lowest suppression
     kNsModerateSuppression,
     kNsHighSuppression,
-    kNsVeryHighSuppression,     // highest suppression
+    kNsVeryHighSuppression     // highest suppression
 };
 
 enum AgcModes                  // type of Automatic Gain Control
 {
     kAgcUnchanged = 0,        // previously set mode
     kAgcDefault,              // platform default
     // adaptive mode for use when analog volume control exists (e.g. for
     // PC softphone)
@@ -349,17 +349,17 @@ enum AgcModes                  // type o
 
 // EC modes
 enum EcModes                   // type of Echo Control
 {
     kEcUnchanged = 0,          // previously set mode
     kEcDefault,                // platform default
     kEcConference,             // conferencing default (aggressive AEC)
     kEcAec,                    // Acoustic Echo Cancellation
-    kEcAecm,                   // AEC mobile
+    kEcAecm                    // AEC mobile
 };
 
 // AECM modes
 enum AecmModes                 // mode of AECM
 {
     kAecmQuietEarpieceOrHeadset = 0,
                                // Quiet earpiece or headset use
     kAecmEarpiece,             // most earpiece use
@@ -401,31 +401,31 @@ enum NetEqModes             // NetEQ pla
     // Improved jitter robustness at the cost of increased delay. Can be
     // used in one-way communication.
     kNetEqStreaming = 1,
     // Optimzed for decodability of fax signals rather than for perceived audio
     // quality.
     kNetEqFax = 2,
     // Minimal buffer management. Inserts zeros for lost packets and during
     // buffer increases.
-    kNetEqOff = 3,
+    kNetEqOff = 3
 };
 
 enum OnHoldModes            // On Hold direction
 {
     kHoldSendAndPlay = 0,    // Put both sending and playing in on-hold state.
     kHoldSendOnly,           // Put only sending in on-hold state.
     kHoldPlayOnly            // Put only playing in on-hold state.
 };
 
 enum AmrMode
 {
     kRfc3267BwEfficient = 0,
     kRfc3267OctetAligned = 1,
-    kRfc3267FileStorage = 2,
+    kRfc3267FileStorage = 2
 };
 
 // ==================================================================
 // Video specific types
 // ==================================================================
 
 // Raw video types
 enum RawVideoType
--- a/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.c
+++ b/media/webrtc/trunk/webrtc/modules/audio_coding/codecs/pcm16b/pcm16b.c
@@ -10,32 +10,28 @@
 
 
 #include "pcm16b.h"
 
 #include <stdlib.h>
 
 #include "typedefs.h"
 
-#ifdef WEBRTC_BIG_ENDIAN
-#include "signal_processing_library.h"
-#endif
-
 #define HIGHEND 0xFF00
 #define LOWEND    0xFF
 
 
 
 /* Encoder with int16_t Output */
 int16_t WebRtcPcm16b_EncodeW16(int16_t *speechIn16b,
                                int16_t len,
                                int16_t *speechOut16b)
 {
 #ifdef WEBRTC_BIG_ENDIAN
-    WEBRTC_SPL_MEMCPY_W16(speechOut16b, speechIn16b, len);
+    memcpy(speechOut16b, speechIn16b, len * sizeof(WebRtc_Word16));
 #else
     int i;
     for (i=0;i<len;i++) {
         speechOut16b[i]=(((uint16_t)speechIn16b[i])>>8)|((((uint16_t)speechIn16b[i])<<8)&0xFF00);
     }
 #endif
     return(len<<1);
 }
@@ -64,17 +60,17 @@ int16_t WebRtcPcm16b_Encode(int16_t *spe
 /* Decoder with int16_t Input instead of char when the int16_t Encoder is used */
 int16_t WebRtcPcm16b_DecodeW16(void *inst,
                                int16_t *speechIn16b,
                                int16_t len,
                                int16_t *speechOut16b,
                                int16_t* speechType)
 {
 #ifdef WEBRTC_BIG_ENDIAN
-    WEBRTC_SPL_MEMCPY_W8(speechOut16b, speechIn16b, ((len*sizeof(int16_t)+1)>>1));
+    memcpy(speechOut16b, speechIn16b, ((len*sizeof(WebRtc_Word16)+1)>>1));
 #else
     int i;
     int samples=len>>1;
 
     for (i=0;i<samples;i++) {
         speechOut16b[i]=(((uint16_t)speechIn16b[i])>>8)|(((uint16_t)(speechIn16b[i]&0xFF))<<8);
     }
 #endif
--- a/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/audio_coding_module.gypi
+++ b/media/webrtc/trunk/webrtc/modules/audio_coding/main/source/audio_coding_module.gypi
@@ -5,33 +5,78 @@
 # tree. An additional intellectual property rights grant can be found
 # in the file PATENTS.  All contributing project authors may
 # be found in the AUTHORS file in the root of the source tree.
 
 {
   'variables': {
     'audio_coding_dependencies': [
       'CNG',
-      'G711',
-      'G722',
-      'iLBC',
-      'iSAC',
-      'iSACFix',
-      'PCM16B',
       'NetEq',
       '<(webrtc_root)/common_audio/common_audio.gyp:resampler',
       '<(webrtc_root)/common_audio/common_audio.gyp:signal_processing',
       '<(webrtc_root)/common_audio/common_audio.gyp:vad',
       '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
     ],
     'audio_coding_defines': [],
     'conditions': [
       ['include_opus==1', {
         'audio_coding_dependencies': ['webrtc_opus',],
         'audio_coding_defines': ['WEBRTC_CODEC_OPUS',],
+        'audio_coding_sources': [
+          'acm_opus.cc',
+          'acm_opus.h',
+        ],
+      }],
+      ['include_g711==1', {
+        'audio_coding_dependencies': ['G711',],
+        'audio_coding_defines': ['WEBRTC_CODEC_G711',],
+        'audio_coding_sources': [
+          'acm_pcma.cc',
+          'acm_pcma.h',
+          'acm_pcmu.cc',
+          'acm_pcmu.h',
+        ],
+      }],
+      ['include_g722==1', {
+        'audio_coding_dependencies': ['G722',],
+        'audio_coding_defines': ['WEBRTC_CODEC_G722',],
+        'audio_coding_sources': [
+          'acm_g722.cc',
+          'acm_g722.h',
+          'acm_g7221.cc',
+          'acm_g7221.h',
+          'acm_g7221c.cc',
+          'acm_g7221c.h',
+        ],
+      }],
+      ['include_ilbc==1', {
+        'audio_coding_dependencies': ['iLBC',],
+        'audio_coding_defines': ['WEBRTC_CODEC_ILBC',],
+        'audio_coding_sources': [
+          'acm_ilbc.cc',
+          'acm_ilbc.h',
+        ],
+      }],
+      ['include_isac==1', {
+        'audio_coding_dependencies': ['iSAC', 'iSACFix',],
+        'audio_coding_defines': ['WEBRTC_CODEC_ISAC', 'WEBRTC_CODEC_ISACFX',],
+        'audio_coding_sources': [
+          'acm_isac.cc',
+          'acm_isac.h',
+          'acm_isac_macros.h',
+        ],
+      }],
+      ['include_pcm16b==1', {
+        'audio_coding_dependencies': ['PCM16B',],
+        'audio_coding_defines': ['WEBRTC_CODEC_PCM16',],
+        'audio_coding_sources': [
+          'acm_pcm16b.cc',
+          'acm_pcm16b.h',
+        ],
       }],
     ],
   },
   'targets': [
     {
       'target_name': 'audio_coding_module',
       'type': 'static_library',
       'defines': [
@@ -46,57 +91,34 @@
       ],
       'direct_dependent_settings': {
         'include_dirs': [
           '../interface',
           '../../../interface',
         ],
       },
       'sources': [
+#        '<@(audio_coding_sources)',
         '../interface/audio_coding_module.h',
         '../interface/audio_coding_module_typedefs.h',
-        'acm_amr.cc',
-        'acm_amr.h',
-        'acm_amrwb.cc',
-        'acm_amrwb.h',
-        'acm_celt.cc',
-        'acm_celt.h',
         'acm_cng.cc',
         'acm_cng.h',
         'acm_codec_database.cc',
         'acm_codec_database.h',
         'acm_dtmf_detection.cc',
         'acm_dtmf_detection.h',
         'acm_dtmf_playout.cc',
         'acm_dtmf_playout.h',
-        'acm_g722.cc',
-        'acm_g722.h',
-        'acm_g7221.cc',
-        'acm_g7221.h',
-        'acm_g7221c.cc',
-        'acm_g7221c.h',
-        'acm_g729.cc',
-        'acm_g729.h',
-        'acm_g7291.cc',
-        'acm_g7291.h',
         'acm_generic_codec.cc',
         'acm_generic_codec.h',
-        'acm_gsmfr.cc',
-        'acm_gsmfr.h',
-        'acm_ilbc.cc',
-        'acm_ilbc.h',
-        'acm_isac.cc',
-        'acm_isac.h',
-        'acm_isac_macros.h',
         'acm_neteq.cc',
         'acm_neteq.h',
+# cheat until I get audio_coding_sources to work
         'acm_opus.cc',
         'acm_opus.h',
-        'acm_speex.cc',
-        'acm_speex.h',
         'acm_pcm16b.cc',
         'acm_pcm16b.h',
         'acm_pcma.cc',
         'acm_pcma.h',
         'acm_pcmu.cc',
         'acm_pcmu.h',
         'acm_red.cc',
         'acm_red.h',
--- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/neteq_defines.h
+++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/neteq_defines.h
@@ -64,16 +64,18 @@
  * NETEQ_ISAC_CODEC               Enable iSAC
  *
  * NETEQ_ISAC_SWB_CODEC           Enable iSAC-SWB
  *
  * Note that the decoder of iSAC full-band operates at 32 kHz, that is the
  * decoded signal is at 32 kHz.
  * NETEQ_ISAC_FB_CODEC            Enable iSAC-FB
  *
+ * NETEQ_OPUS_CODEC               Enable Opus
+ *
  * NETEQ_G722_CODEC               Enable G.722
  *
  * NETEQ_G729_CODEC               Enable G.729
  *
  * NETEQ_G729_1_CODEC             Enable G.729.1
  *
  * NETEQ_G726_CODEC               Enable G.726
  *
@@ -316,39 +318,46 @@
     #define NETEQ_RED_CODEC
     #define NETEQ_VAD
     #define NETEQ_ARBITRARY_CODEC
 
     /* Narrowband codecs */
     #define NETEQ_PCM16B_CODEC
     #define NETEQ_G711_CODEC
     #define NETEQ_ILBC_CODEC
+    #define NETEQ_OPUS_CODEC
     #define NETEQ_G729_CODEC
     #define NETEQ_G726_CODEC
     #define NETEQ_GSMFR_CODEC
     #define NETEQ_AMR_CODEC
 
     /* Wideband codecs */
     #define NETEQ_WIDEBAND
     #define NETEQ_ISAC_CODEC
+    /*#define NETEQ_OPUS_CODEC define only once */
     #define NETEQ_G722_CODEC
     #define NETEQ_G722_1_CODEC
     #define NETEQ_G729_1_CODEC
     #define NETEQ_SPEEX_CODEC
     #define NETEQ_AMRWB_CODEC
 
     /* Super wideband 32kHz codecs */
     #define NETEQ_ISAC_SWB_CODEC
+    /*#define NETEQ_OPUS_CODEC*/
     #define NETEQ_32KHZ_WIDEBAND
     #define NETEQ_G722_1C_CODEC
     #define NETEQ_CELT_CODEC
+    /*#define NETEQ_OPUS_CODEC/
+
+    /* hack in 48 kHz support */
+    #define NETEQ_48KHZ_WIDEBAND
 
     /* Super wideband 48kHz codecs */
     #define NETEQ_48KHZ_WIDEBAND
-    #define NETEQ_OPUS_CODEC
+    /*#define NETEQ_OPUS_CODEC*/
     #define NETEQ_ISAC_FB
 #endif
 
 /* Max output size from decoding one frame */
 #if defined(NETEQ_48KHZ_WIDEBAND)
     #define NETEQ_MAX_FRAME_SIZE 5760  /* 120 ms super wideband */
     #define NETEQ_MAX_OUTPUT_SIZE 6480  /* 120+15 ms super wideband (120 ms
                                          * decoded + 15 ms for merge overlap) */
--- a/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/packet_buffer.c
+++ b/media/webrtc/trunk/webrtc/modules/audio_coding/neteq/packet_buffer.c
@@ -673,16 +673,21 @@ int WebRtcNetEQ_GetDefaultCodecSettings(
             codecBytes = 1560; /* 240ms @ 52kbps (30ms frames) */
             codecBuffers = 8;
         }
         else if (codecID[i] == kDecoderOpus)
         {
             codecBytes = 15300; /* 240ms @ 510kbps (60ms frames) */
             codecBuffers = 30;  /* Replicating the value for PCMu/a */
         }
+        else if (codecID[i] == kDecoderOpus)
+        {
+            codecBytes = 15300; /* 240ms @ 510kbps (60ms frames) */
+            codecBuffers = 30;  /* ?? Codec supports down to 2.5-60 ms frames */
+        }
         else if ((codecID[i] == kDecoderPCM16B) ||
             (codecID[i] == kDecoderPCM16B_2ch))
         {
             codecBytes = 3360; /* 210ms */
             codecBuffers = 15;
         }
         else if ((codecID[i] == kDecoderPCM16Bwb) ||
             (codecID[i] == kDecoderPCM16Bwb_2ch))
--- a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_jni_android.cc
+++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_jni_android.cc
@@ -21,91 +21,85 @@
 #include "audio_device_utility.h"
 #include "audio_device_jni_android.h"
 #include "audio_device_config.h"
 
 #include "trace.h"
 #include "thread_wrapper.h"
 #include "event_wrapper.h"
 
+#include "AndroidJNIWrapper.h"
+
 namespace webrtc
 {
 // TODO(leozwang): Refactor jni and the following global variables, a
 // good example is jni_helper in Chromium.
 JavaVM* AudioDeviceAndroidJni::globalJvm = NULL;
-JNIEnv* AudioDeviceAndroidJni::globalJNIEnv = NULL;
 jobject AudioDeviceAndroidJni::globalContext = NULL;
 jclass AudioDeviceAndroidJni::globalScClass = NULL;
 
 // ----------------------------------------------------------------------------
 //  SetAndroidAudioDeviceObjects
 //
 //  Global function for setting Java pointers and creating Java
 //  objects that are global to all instances of VoiceEngine used
 //  by the same Java application.
 // ----------------------------------------------------------------------------
 
 int32_t AudioDeviceAndroidJni::SetAndroidAudioDeviceObjects(
     void* javaVM,
-    void* env,
     void* context) {
-  __android_log_print(ANDROID_LOG_DEBUG, "WEBRTC", "JNI:%s", __FUNCTION__);
+  WEBRTC_TRACE(kTraceMemory, kTraceAudioDevice, -1,
+               "%s called", __FUNCTION__);
 
   // TODO(leozwang): Make this function thread-safe.
   globalJvm = reinterpret_cast<JavaVM*>(javaVM);
 
-  if (env) {
-    globalJNIEnv = reinterpret_cast<JNIEnv*>(env);
+  JNIEnv* env = NULL;
+
+  // Check if we already got a reference
+  if (globalJvm && !globalScClass) {
+      if (globalJvm->GetEnv((void**)&env, JNI_VERSION_1_4) != JNI_OK) {
+      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioDevice, -1,
+                   "%s: could not get Java environment", __FUNCTION__);
+      return -1;
+    }
+    globalJvm->AttachCurrentThread(&env, NULL);
+
     // Get java class type (note path to class packet).
-    jclass javaScClassLocal = globalJNIEnv->FindClass(
-        "org/webrtc/voiceengine/WebRTCAudioDevice");
-    if (!javaScClassLocal) {
+    globalScClass = jsjni_GetGlobalClassRef(AudioCaptureClass);
+    if (!globalScClass) {
       WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
                    "%s: could not find java class", __FUNCTION__);
       return -1; // exception thrown
     }
 
-    // Create a global reference to the class (to tell JNI that we are
-    // referencing it after this function has returned).
-    globalScClass = reinterpret_cast<jclass> (
-        globalJNIEnv->NewGlobalRef(javaScClassLocal));
-    if (!globalScClass) {
-      WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
-                   "%s: could not create reference", __FUNCTION__);
-      return -1;
-    }
-
-    globalContext = globalJNIEnv->NewGlobalRef(
+    globalContext = env->NewGlobalRef(
         reinterpret_cast<jobject>(context));
     if (!globalContext) {
       WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
                    "%s: could not create context reference", __FUNCTION__);
       return -1;
     }
-
-    // Delete local class ref, we only use the global ref
-    globalJNIEnv->DeleteLocalRef(javaScClassLocal);
   }
   else { // User is resetting the env variable
     WEBRTC_TRACE(kTraceStateInfo, kTraceAudioDevice, -1,
                  "%s: env is NULL, assuming deinit", __FUNCTION__);
 
-    if (!globalJNIEnv) {
+    if (!env) {
       WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, -1,
                    "%s: saved env already NULL", __FUNCTION__);
       return 0;
     }
 
-    globalJNIEnv->DeleteGlobalRef(globalScClass);
+    env->DeleteGlobalRef(globalScClass);
     globalScClass = reinterpret_cast<jclass>(NULL);
 
-    globalJNIEnv->DeleteGlobalRef(globalContext);
+    env->DeleteGlobalRef(globalContext);
     globalContext = reinterpret_cast<jobject>(NULL);
-
-    globalJNIEnv = reinterpret_cast<JNIEnv*>(NULL);
   }
 
   return 0;
 }
 
 // ============================================================================
 //                            Construction & Destruction
 // ============================================================================
@@ -2206,17 +2200,17 @@ int32_t AudioDeviceAndroidJni::InitJavaR
     if (cid == NULL)
     {
         WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
                      "%s: could not get constructor ID", __FUNCTION__);
         return -1; /* exception thrown */
     }
 
     WEBRTC_TRACE(kTraceDebug, kTraceAudioDevice, _id,
-                 "construct object", __FUNCTION__);
+                 "%s: construct object", __FUNCTION__);
 
     // construct the object
     jobject javaScObjLocal = env->NewObject(_javaScClass, cid);
     if (!javaScObjLocal)
     {
         WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, _id,
                      "%s: could not create Java sc object", __FUNCTION__);
         return -1;
--- a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_jni_android.h
+++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_jni_android.h
@@ -15,38 +15,30 @@
 #ifndef WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_JNI_ANDROID_H
 #define WEBRTC_AUDIO_DEVICE_AUDIO_DEVICE_JNI_ANDROID_H
 
 #include "audio_device_generic.h"
 #include "critical_section_wrapper.h"
 
 #include <jni.h> // For accessing AudioDeviceAndroid java class
 
+#define AudioCaptureClass "org/webrtc/voiceengine/WebRTCAudioDevice"
+
 namespace webrtc
 {
 class EventWrapper;
 
-const uint32_t N_REC_SAMPLES_PER_SEC = 16000; // Default is 16 kHz
-const uint32_t N_PLAY_SAMPLES_PER_SEC = 16000; // Default is 16 kHz
-
-const uint32_t N_REC_CHANNELS = 1; // default is mono recording
-const uint32_t N_PLAY_CHANNELS = 1; // default is mono playout
-
-const uint32_t REC_BUF_SIZE_IN_SAMPLES = 480; // Handle max 10 ms @ 48 kHz
-
-
 class ThreadWrapper;
 
 class AudioDeviceAndroidJni : public AudioDeviceGeneric {
  public:
   AudioDeviceAndroidJni(const int32_t id);
   ~AudioDeviceAndroidJni();
 
   static int32_t SetAndroidAudioDeviceObjects(void* javaVM,
-                                              void* env,
                                               void* context);
 
   virtual int32_t ActiveAudioLayer(
       AudioDeviceModule::AudioLayer& audioLayer) const;
 
   virtual int32_t Init();
   virtual int32_t Terminate();
   virtual bool Initialized() const;
@@ -153,16 +145,24 @@ class AudioDeviceAndroidJni : public Aud
   virtual int32_t SetRecordingSampleRate(
       const uint32_t samplesPerSec);
   virtual int32_t SetPlayoutSampleRate(
       const uint32_t samplesPerSec);
 
   virtual int32_t SetLoudspeakerStatus(bool enable);
   virtual int32_t GetLoudspeakerStatus(bool& enable) const;
 
+  static const uint32_t N_REC_SAMPLES_PER_SEC = 16000; // Default is 16 kHz
+  static const uint32_t N_PLAY_SAMPLES_PER_SEC = 16000; // Default is 16 kHz
+
+  static const uint32_t N_REC_CHANNELS = 1; // default is mono recording
+  static const uint32_t N_PLAY_CHANNELS = 1; // default is mono playout
+
+  static const uint32_t REC_BUF_SIZE_IN_SAMPLES = 480; // Handle max 10 ms @ 48 kHz
+
  private:
   // Lock
   void Lock() {
     _critSect.Enter();
   };
   void UnLock() {
     _critSect.Leave();
   };
--- a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_opensles_android.cc
+++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_opensles_android.cc
@@ -12,16 +12,17 @@
 
 #ifdef WEBRTC_ANDROID_DEBUG
 #include <android/log.h>
 #endif
 #include <sys/resource.h>
 #include <sys/syscall.h>
 #include <sys/time.h>
 #include <time.h>
+#include <dlfcn.h>
 
 #include "modules/audio_device/audio_device_utility.h"
 #include "system_wrappers/interface/event_wrapper.h"
 #include "system_wrappers/interface/thread_wrapper.h"
 #include "system_wrappers/interface/trace.h"
 
 #ifdef WEBRTC_ANDROID_DEBUG
 #define WEBRTC_OPENSL_TRACE(a, b, c, ...)                               \
@@ -59,22 +60,24 @@ AudioDeviceAndroidOpenSLES::AudioDeviceA
       is_playing_(false),
       is_rec_initialized_(false),
       is_play_initialized_(false),
       is_mic_initialized_(false),
       is_speaker_initialized_(false),
       playout_delay_(0),
       recording_delay_(0),
       agc_enabled_(false),
+      rec_thread_(NULL),
       rec_timer_(*EventWrapper::Create()),
       mic_sampling_rate_(N_REC_SAMPLES_PER_SEC * 1000),
       speaker_sampling_rate_(N_PLAY_SAMPLES_PER_SEC * 1000),
       max_speaker_vol_(0),
       min_speaker_vol_(0),
-      loundspeaker_on_(false) {
+      loundspeaker_on_(false),
+      opensles_lib_(0) {
   WEBRTC_OPENSL_TRACE(kTraceMemory, kTraceAudioDevice, id, "%s created",
                       __FUNCTION__);
   memset(rec_buf_, 0, sizeof(rec_buf_));
   memset(play_buf_, 0, sizeof(play_buf_));
 }
 
 AudioDeviceAndroidOpenSLES::~AudioDeviceAndroidOpenSLES() {
   WEBRTC_OPENSL_TRACE(kTraceMemory, kTraceAudioDevice, id_, "%s destroyed",
@@ -109,20 +112,55 @@ int32_t AudioDeviceAndroidOpenSLES::Acti
 }
 
 int32_t AudioDeviceAndroidOpenSLES::Init() {
   CriticalSectionScoped lock(&crit_sect_);
 
   if (is_initialized_)
     return 0;
 
+  /* Try to dynamically open the OpenSLES library */
+  opensles_lib_ = dlopen("libOpenSLES.so", RTLD_LAZY);
+  if (!opensles_lib_) {
+      WEBRTC_OPENSL_TRACE(kTraceError, kTraceAudioDevice, id_,
+                          "  failed to dlopen OpenSLES library");
+      return -1;
+  }
+
+  typedef SLresult (*slCreateEngine_t)(SLObjectItf *,
+                                       SLuint32,
+                                       const SLEngineOption *,
+                                       SLuint32,
+                                       const SLInterfaceID *,
+                                       const SLboolean *);
+  slCreateEngine_t f_slCreateEngine =
+    (slCreateEngine_t)dlsym(opensles_lib_, "slCreateEngine");
+  SL_IID_ENGINE_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_ENGINE");
+  SL_IID_BUFFERQUEUE_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_BUFFERQUEUE");
+  SL_IID_ANDROIDCONFIGURATION_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_ANDROIDCONFIGURATION");
+  SL_IID_PLAY_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_PLAY");
+  SL_IID_ANDROIDSIMPLEBUFFERQUEUE_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_ANDROIDSIMPLEBUFFERQUEUE");
+  SL_IID_RECORD_ = *(SLInterfaceID *)dlsym(opensles_lib_, "SL_IID_RECORD");
+
+  if (!f_slCreateEngine ||
+      !SL_IID_ENGINE_ ||
+      !SL_IID_BUFFERQUEUE_ ||
+      !SL_IID_ANDROIDCONFIGURATION_ ||
+      !SL_IID_PLAY_ ||
+      !SL_IID_ANDROIDSIMPLEBUFFERQUEUE_ ||
+      !SL_IID_RECORD_) {
+      WEBRTC_OPENSL_TRACE(kTraceError, kTraceAudioDevice, id_,
+                          "  failed to find OpenSLES function");
+      return -1;
+  }
+
   SLEngineOption EngineOption[] = {
     { SL_ENGINEOPTION_THREADSAFE, static_cast<SLuint32>(SL_BOOLEAN_TRUE) },
   };
-  int32_t res = slCreateEngine(&sles_engine_, 1, EngineOption, 0, NULL, NULL);
+  int32_t res = f_slCreateEngine(&sles_engine_, 1, EngineOption, 0, NULL, NULL);
 
   if (res != SL_RESULT_SUCCESS) {
     WEBRTC_OPENSL_TRACE(kTraceError, kTraceAudioDevice, id_,
                         "  failed to create SL Engine Object");
     return -1;
   }
 
   // Realizing the SL Engine in synchronous mode.
@@ -130,17 +168,17 @@ int32_t AudioDeviceAndroidOpenSLES::Init
       != SL_RESULT_SUCCESS) {
     WEBRTC_OPENSL_TRACE(kTraceError, kTraceAudioDevice, id_,
                         "  failed to Realize SL Engine");
     return -1;
   }
 
   if ((*sles_engine_)->GetInterface(
           sles_engine_,
-          SL_IID_ENGINE,
+          SL_IID_ENGINE_,
           &sles_engine_itf_) != SL_RESULT_SUCCESS) {
     WEBRTC_OPENSL_TRACE(kTraceError, kTraceAudioDevice, id_,
                         "  failed to get SL Engine interface");
     return -1;
   }
 
   // Check the sample rate to be used for playback and recording
   if (InitSampleRate() != 0) {
@@ -183,16 +221,18 @@ int32_t AudioDeviceAndroidOpenSLES::Term
   StopPlayout();
 
   if (sles_engine_ != NULL) {
     (*sles_engine_)->Destroy(sles_engine_);
     sles_engine_ = NULL;
     sles_engine_itf_ = NULL;
   }
 
+  dlclose(opensles_lib_);
+
   is_initialized_ = false;
   return 0;
 }
 
 bool AudioDeviceAndroidOpenSLES::Initialized() const {
   return (is_initialized_);
 }
 
@@ -278,17 +318,17 @@ int32_t AudioDeviceAndroidOpenSLES::SetS
     WEBRTC_OPENSL_TRACE(kTraceError, kTraceAudioDevice, id_,
                         "SetSpeakerVolume, SL Engine object doesnt exist");
     return -1;
   }
 
   if (sles_engine_itf_ == NULL) {
     if ((*sles_engine_)->GetInterface(
             sles_engine_,
-            SL_IID_ENGINE,
+            SL_IID_ENGINE_,
             &sles_engine_itf_) != SL_RESULT_SUCCESS) {
       WEBRTC_OPENSL_TRACE(kTraceError, kTraceAudioDevice, id_,
                           "  failed to GetInterface SL Engine Interface");
       return -1;
     }
   }
   return 0;
 }
@@ -719,17 +759,17 @@ int32_t AudioDeviceAndroidOpenSLES::Init
   }
 
   player_pcm_.endianness = SL_BYTEORDER_LITTLEENDIAN;
   // Setup the data sink structure.
   locator_outputmix.locatorType = SL_DATALOCATOR_OUTPUTMIX;
   locator_outputmix.outputMix = sles_output_mixer_;
 
   SLInterfaceID ids[N_MAX_INTERFACES] = {
-    SL_IID_BUFFERQUEUE, SL_IID_ANDROIDCONFIGURATION };
+    SL_IID_BUFFERQUEUE_, SL_IID_ANDROIDCONFIGURATION_ };
   SLboolean req[N_MAX_INTERFACES] = {
     SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE };
   res = (*sles_engine_itf_)->CreateAudioPlayer(sles_engine_itf_,
                                                &sles_player_, &audio_source,
                                                &audio_sink, 2, ids, req);
   if (res != SL_RESULT_SUCCESS) {
     WEBRTC_OPENSL_TRACE(kTraceError, kTraceAudioDevice, id_,
                         "  failed to create AudioPlayer");
@@ -739,25 +779,25 @@ int32_t AudioDeviceAndroidOpenSLES::Init
   // Realizing the player in synchronous mode.
   res = (*sles_player_)->Realize(sles_player_, SL_BOOLEAN_FALSE);
   if (res != SL_RESULT_SUCCESS) {
     WEBRTC_OPENSL_TRACE(kTraceError, kTraceAudioDevice, id_,
                         "  failed to realize the player");
     return -1;
   }
   res = (*sles_player_)->GetInterface(
-      sles_player_, SL_IID_PLAY,
+      sles_player_, SL_IID_PLAY_,
       static_cast<void*>(&sles_player_itf_));
   if (res != SL_RESULT_SUCCESS) {
     WEBRTC_OPENSL_TRACE(kTraceError, kTraceAudioDevice, id_,
                         "  failed to get Player interface");
     return -1;
   }
   res = (*sles_player_)->GetInterface(
-      sles_player_, SL_IID_BUFFERQUEUE,
+      sles_player_, SL_IID_BUFFERQUEUE_,
       static_cast<void*>(&sles_player_sbq_itf_));
   if (res != SL_RESULT_SUCCESS) {
     WEBRTC_OPENSL_TRACE(kTraceError, kTraceAudioDevice, id_,
                         "  failed to get Player SimpleBufferQueue interface");
     return -1;
   }
 
   // Setup to receive buffer queue event callbacks
@@ -839,17 +879,17 @@ int32_t AudioDeviceAndroidOpenSLES::Init
     record_pcm_.channelMask = SL_SPEAKER_FRONT_LEFT | SL_SPEAKER_FRONT_RIGHT;
   } else {
     WEBRTC_OPENSL_TRACE(kTraceError, kTraceAudioDevice, id_,
                         "  %d rec channels not supported", N_REC_CHANNELS);
   }
   record_pcm_.endianness = SL_BYTEORDER_LITTLEENDIAN;
 
   const SLInterfaceID id[2] = {
-    SL_IID_ANDROIDSIMPLEBUFFERQUEUE, SL_IID_ANDROIDCONFIGURATION };
+    SL_IID_ANDROIDSIMPLEBUFFERQUEUE_, SL_IID_ANDROIDCONFIGURATION_ };
   const SLboolean req[2] = {
     SL_BOOLEAN_TRUE, SL_BOOLEAN_TRUE };
   int32_t res = -1;
   res = (*sles_engine_itf_)->CreateAudioRecorder(sles_engine_itf_,
                                                  &sles_recorder_,
                                                  &audio_source,
                                                  &audio_sink,
                                                  2,
@@ -866,28 +906,28 @@ int32_t AudioDeviceAndroidOpenSLES::Init
   if (res != SL_RESULT_SUCCESS) {
     WEBRTC_OPENSL_TRACE(kTraceError, kTraceAudioDevice, id_,
                         "  failed to realize Recorder");
     return -1;
   }
 
   // Get the RECORD interface - it is an implicit interface
   res = (*sles_recorder_)->GetInterface(
-      sles_recorder_, SL_IID_RECORD,
+      sles_recorder_, SL_IID_RECORD_,
       static_cast<void*>(&sles_recorder_itf_));
   if (res != SL_RESULT_SUCCESS) {
     WEBRTC_OPENSL_TRACE(kTraceError, kTraceAudioDevice, id_,
                         "  failed to get Recorder interface");
     return -1;
   }
 
   // Get the simpleBufferQueue interface
   res = (*sles_recorder_)->GetInterface(
       sles_recorder_,
-      SL_IID_ANDROIDSIMPLEBUFFERQUEUE,
+      SL_IID_ANDROIDSIMPLEBUFFERQUEUE_,
       static_cast<void*>(&sles_recorder_sbq_itf_));
   if (res != SL_RESULT_SUCCESS) {
     WEBRTC_OPENSL_TRACE(kTraceError, kTraceAudioDevice, id_,
                         "  failed to get Recorder Simple Buffer Queue");
     return -1;
   }
 
   // Setup to receive buffer queue event callbacks
@@ -1390,17 +1430,17 @@ void AudioDeviceAndroidOpenSLES::Recorde
     // TODO(leozwang): OpenSL ES doesn't support AudioRecorder
     // volume control now, add it when it's ready.
   }
 }
 
 void AudioDeviceAndroidOpenSLES::CheckErr(SLresult res) {
   if (res != SL_RESULT_SUCCESS) {
     WEBRTC_OPENSL_TRACE(kTraceError, kTraceAudioDevice, id_,
-                        "  AudioDeviceAndroidOpenSLES::CheckErr(%d)", res);
+                        "  AudioDeviceAndroidOpenSLES::CheckErr(%lu)", res);
     exit(-1);
   }
 }
 
 void AudioDeviceAndroidOpenSLES::UpdatePlayoutDelay(
     uint32_t nSamplePlayed) {
   // TODO(leozwang): Add accurate delay estimat.
   playout_delay_ = (N_PLAY_QUEUE_BUFFERS - 0.5) * 10 +
--- a/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_opensles_android.h
+++ b/media/webrtc/trunk/webrtc/modules/audio_device/android/audio_device_opensles_android.h
@@ -23,46 +23,16 @@
 
 #include "modules/audio_device/audio_device_generic.h"
 #include "system_wrappers/interface/critical_section_wrapper.h"
 
 namespace webrtc {
 
 class EventWrapper;
 
-const uint32_t N_MAX_INTERFACES = 3;
-const uint32_t N_MAX_OUTPUT_DEVICES = 6;
-const uint32_t N_MAX_INPUT_DEVICES = 3;
-
-const uint32_t N_REC_SAMPLES_PER_SEC = 16000;  // Default fs
-const uint32_t N_PLAY_SAMPLES_PER_SEC = 16000;  // Default fs
-
-const uint32_t N_REC_CHANNELS = 1;
-const uint32_t N_PLAY_CHANNELS = 1;
-
-const uint32_t REC_BUF_SIZE_IN_SAMPLES = 480;
-const uint32_t PLAY_BUF_SIZE_IN_SAMPLES = 480;
-
-const uint32_t REC_MAX_TEMP_BUF_SIZE_PER_10ms =
-    N_REC_CHANNELS * REC_BUF_SIZE_IN_SAMPLES * sizeof(int16_t);
-
-const uint32_t PLAY_MAX_TEMP_BUF_SIZE_PER_10ms =
-    N_PLAY_CHANNELS * PLAY_BUF_SIZE_IN_SAMPLES * sizeof(int16_t);
-
-// Number of the buffers in playout queue
-const uint16_t N_PLAY_QUEUE_BUFFERS = 8;
-// Number of buffers in recording queue
-// TODO(xian): Reduce the numbers of buffers to improve the latency.
-const uint16_t N_REC_QUEUE_BUFFERS = 8;
-// Some values returned from getMinBufferSize
-// (Nexus S playout  72ms, recording 64ms)
-// (Galaxy,         167ms,           44ms)
-// (Nexus 7,         72ms,           48ms)
-// (Xoom             92ms,           40ms)
-
 class ThreadWrapper;
 
 class AudioDeviceAndroidOpenSLES: public AudioDeviceGeneric {
  public:
   explicit AudioDeviceAndroidOpenSLES(const int32_t id);
   ~AudioDeviceAndroidOpenSLES();
 
   // Retrieve the currently utilized audio layer
@@ -205,16 +175,46 @@ class AudioDeviceAndroidOpenSLES: public
 
   // Attach audio buffer
   virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
 
   // Speaker audio routing
   virtual int32_t SetLoudspeakerStatus(bool enable);
   virtual int32_t GetLoudspeakerStatus(bool& enable) const;  // NOLINT
 
+  static const uint32_t N_MAX_INTERFACES = 3;
+  static const uint32_t N_MAX_OUTPUT_DEVICES = 6;
+  static const uint32_t N_MAX_INPUT_DEVICES = 3;
+
+  static const uint32_t N_REC_SAMPLES_PER_SEC = 16000;  // Default fs
+  static const uint32_t N_PLAY_SAMPLES_PER_SEC = 16000;  // Default fs
+
+  static const uint32_t N_REC_CHANNELS = 1;
+  static const uint32_t N_PLAY_CHANNELS = 1;
+
+  static const uint32_t REC_BUF_SIZE_IN_SAMPLES = 480;
+  static const uint32_t PLAY_BUF_SIZE_IN_SAMPLES = 480;
+
+  static const uint32_t REC_MAX_TEMP_BUF_SIZE_PER_10ms =
+      N_REC_CHANNELS * REC_BUF_SIZE_IN_SAMPLES * sizeof(int16_t);
+
+  static const uint32_t PLAY_MAX_TEMP_BUF_SIZE_PER_10ms =
+      N_PLAY_CHANNELS * PLAY_BUF_SIZE_IN_SAMPLES * sizeof(int16_t);
+
+  // Number of the buffers in playout queue
+  static const uint16_t N_PLAY_QUEUE_BUFFERS = 8;
+  // Number of buffers in recording queue
+  // TODO(xian): Reduce the numbers of buffers to improve the latency.
+  static const uint16_t N_REC_QUEUE_BUFFERS = 8;
+  // Some values returned from getMinBufferSize
+  // (Nexus S playout  72ms, recording 64ms)
+  // (Galaxy,         167ms,           44ms)
+  // (Nexus 7,         72ms,           48ms)
+  // (Xoom             92ms,           40ms)
+
  private:
   // Lock
   void Lock() {
     crit_sect_.Enter();
   };
   void UnLock() {
     crit_sect_.Leave();
   };
@@ -304,13 +304,22 @@ class AudioDeviceAndroidOpenSLES: public
   int8_t rec_buf_[N_REC_QUEUE_BUFFERS][
       N_REC_CHANNELS * sizeof(int16_t) * REC_BUF_SIZE_IN_SAMPLES];
   int8_t rec_voe_buf_[N_REC_QUEUE_BUFFERS][
       N_REC_CHANNELS * sizeof(int16_t) * REC_BUF_SIZE_IN_SAMPLES];
 
   std::queue<int8_t*> play_queue_;
   int8_t play_buf_[N_PLAY_QUEUE_BUFFERS][
       N_PLAY_CHANNELS * sizeof(int16_t) * PLAY_BUF_SIZE_IN_SAMPLES];
+
+  // dlopen for OpenSLES
+  void *opensles_lib_;
+  SLInterfaceID SL_IID_ENGINE_;
+  SLInterfaceID SL_IID_BUFFERQUEUE_;
+  SLInterfaceID SL_IID_ANDROIDCONFIGURATION_;
+  SLInterfaceID SL_IID_PLAY_;
+  SLInterfaceID SL_IID_ANDROIDSIMPLEBUFFERQUEUE_;
+  SLInterfaceID SL_IID_RECORD_;
 };
 
 }  // namespace webrtc
 
 #endif  // SRC_MODULES_AUDIO_DEVICE_ANDROID_AUDIO_DEVICE_OPENSLES_ANDROID_H_
--- a/media/webrtc/trunk/webrtc/modules/audio_device/audio_device.gypi
+++ b/media/webrtc/trunk/webrtc/modules/audio_device/audio_device.gypi
@@ -40,21 +40,21 @@
         'audio_device_utility.h',
         'audio_device_impl.cc',
         'audio_device_impl.h',
         'audio_device_config.h',
         'dummy/audio_device_dummy.h',
         'dummy/audio_device_utility_dummy.h',
       ],
       'conditions': [
-        ['OS=="linux"', {
+        ['OS=="linux" or include_alsa_audio==1 or include_pulse_audio==1', {
           'include_dirs': [
             'linux',
           ],
-        }], # OS==linux
+        }], # OS=="linux" or include_alsa_audio==1 or include_pulse_audio==1
         ['OS=="ios"', {
           'include_dirs': [
             'ios',
           ],
         }], # OS==ios
         ['OS=="mac"', {
           'include_dirs': [
             'mac',
@@ -62,34 +62,29 @@
         }], # OS==mac
         ['OS=="win"', {
           'include_dirs': [
             'win',
           ],
         }],
         ['OS=="android"', {
           'include_dirs': [
+            '$(topsrcdir)/widget/android',
             'android',
           ],
         }], # OS==android
         ['include_internal_audio_device==0', {
           'defines': [
             'WEBRTC_DUMMY_AUDIO_BUILD',
           ],
         }],
         ['include_internal_audio_device==1', {
           'sources': [
-            'linux/alsasymboltable_linux.cc',
-            'linux/alsasymboltable_linux.h',
-            'linux/audio_device_alsa_linux.cc',
-            'linux/audio_device_alsa_linux.h',
             'linux/audio_device_utility_linux.cc',
             'linux/audio_device_utility_linux.h',
-            'linux/audio_mixer_manager_alsa_linux.cc',
-            'linux/audio_mixer_manager_alsa_linux.h',
             'linux/latebindingsymboltable_linux.cc',
             'linux/latebindingsymboltable_linux.h',
             'ios/audio_device_ios.cc',
             'ios/audio_device_ios.h',
             'ios/audio_device_utility_ios.cc',
             'ios/audio_device_utility_ios.h',
             'mac/audio_device_mac.cc',
             'mac/audio_device_mac.h',
@@ -105,62 +100,67 @@
             'win/audio_device_wave_win.cc',
             'win/audio_device_wave_win.h',
             'win/audio_device_utility_win.cc',
             'win/audio_device_utility_win.h',
             'win/audio_mixer_manager_win.cc',
             'win/audio_mixer_manager_win.h',
             'android/audio_device_utility_android.cc',
             'android/audio_device_utility_android.h',
+            'android/audio_device_opensles_android.cc',
+            'android/audio_device_opensles_android.h',
+            'android/audio_device_jni_android.cc',
+            'android/audio_device_jni_android.h',
           ],
           'conditions': [
             ['OS=="android"', {
               'link_settings': {
                 'libraries': [
                   '-llog',
                   '-lOpenSLES',
                 ],
               },
-              'conditions': [
-                ['enable_android_opensl==1', {
-                  'sources': [
-                    'android/audio_device_opensles_android.cc',
-                    'android/audio_device_opensles_android.h',
-                  ],
-                }, {
-                  'sources': [
-                    'android/audio_device_jni_android.cc',
-                    'android/audio_device_jni_android.h',
-                  ],
-                }],
-              ],
             }],
             ['OS=="linux"', {
-              'defines': [
-                'LINUX_ALSA',
-              ],
               'link_settings': {
                 'libraries': [
                   '-ldl',
                 ],
               },
-              'conditions': [
-                ['include_pulse_audio==1', {
-                  'defines': [
-                    'LINUX_PULSE',
-                  ],
-                  'sources': [
-                    'linux/audio_device_pulse_linux.cc',
-                    'linux/audio_device_pulse_linux.h',
-                    'linux/audio_mixer_manager_pulse_linux.cc',
-                    'linux/audio_mixer_manager_pulse_linux.h',
-                    'linux/pulseaudiosymboltable_linux.cc',
-                    'linux/pulseaudiosymboltable_linux.h',
-                  ],
-                }],
+            }],
+            ['include_alsa_audio==1', {
+              'cflags_mozilla': [
+                '$(MOZ_ALSA_CFLAGS)',
+              ],
+              'defines': [
+                'LINUX_ALSA',
+              ],
+              'sources': [
+                'linux/alsasymboltable_linux.cc',
+                'linux/alsasymboltable_linux.h',
+                'linux/audio_device_alsa_linux.cc',
+                'linux/audio_device_alsa_linux.h',
+                'linux/audio_mixer_manager_alsa_linux.cc',
+                'linux/audio_mixer_manager_alsa_linux.h',
+              ],
+            }],
+            ['include_pulse_audio==1', {
+              'cflags_mozilla': [
+                '$(MOZ_PULSEAUDIO_CFLAGS)',
+              ],
+              'defines': [
+                'LINUX_PULSE',
+              ],
+              'sources': [
+                'linux/audio_device_pulse_linux.cc',
+                'linux/audio_device_pulse_linux.h',
+                'linux/audio_mixer_manager_pulse_linux.cc',
+                'linux/audio_mixer_manager_pulse_linux.h',
+                'linux/pulseaudiosymboltable_linux.cc',
+                'linux/pulseaudiosymboltable_linux.h',
               ],
             }],
             ['OS=="mac" or OS=="ios"', {
               'link_settings': {
                 'libraries': [
                   '$(SDKROOT)/System/Library/Frameworks/AudioToolbox.framework',
                   '$(SDKROOT)/System/Library/Frameworks/CoreAudio.framework',
                 ],
--- a/media/webrtc/trunk/webrtc/modules/audio_device/audio_device_impl.cc
+++ b/media/webrtc/trunk/webrtc/modules/audio_device/audio_device_impl.cc
@@ -19,18 +19,20 @@
 #if defined(_WIN32)
     #include "audio_device_utility_win.h"
     #include "audio_device_wave_win.h"
  #if defined(WEBRTC_WINDOWS_CORE_AUDIO_BUILD)
     #include "audio_device_core_win.h"
  #endif
 #elif defined(WEBRTC_ANDROID_OPENSLES)
     #include <stdlib.h>
+    #include <dlfcn.h>
     #include "audio_device_utility_android.h"
     #include "audio_device_opensles_android.h"
+    #include "audio_device_jni_android.h"
 #elif defined(WEBRTC_ANDROID)
     #include <stdlib.h>
     #include "audio_device_utility_android.h"
     #include "audio_device_jni_android.h"
 #elif defined(WEBRTC_LINUX)
     #include "audio_device_utility_linux.h"
  #if defined(LINUX_ALSA)
     #include "audio_device_alsa_linux.h"
@@ -254,48 +256,54 @@ int32_t AudioDeviceModuleImpl::CreatePla
         //
         ptrAudioDeviceUtility = new AudioDeviceUtilityWindows(Id());
     }
 #endif  // #if defined(_WIN32)
 
     // Create the *Android OpenSLES* implementation of the Audio Device
     //
 #if defined(WEBRTC_ANDROID_OPENSLES)
-    if (audioLayer == kPlatformDefaultAudio)
-    {
-        // Create *Android OpenELSE Audio* implementation
-        ptrAudioDevice = new AudioDeviceAndroidOpenSLES(Id());
-        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
-                     "Android OpenSLES Audio APIs will be utilized");
+    // Check if the OpenSLES library is available before going further.
+    void* opensles_lib = dlopen("libOpenSLES.so", RTLD_LAZY);
+    if (opensles_lib) {
+        // That worked, close for now and proceed normally.
+        dlclose(opensles_lib);
+        if (audioLayer == kPlatformDefaultAudio)
+        {
+            // Create *Android OpenSLES Audio* implementation
+            ptrAudioDevice = new AudioDeviceAndroidOpenSLES(Id());
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id,
+                         "Android OpenSLES Audio APIs will be utilized");
+        }
     }
 
     if (ptrAudioDevice != NULL)
     {
         // Create the Android implementation of the Device Utility.
         ptrAudioDeviceUtility = new AudioDeviceUtilityAndroid(Id());
     }
-    // END #if defined(WEBRTC_ANDROID_OPENSLES)
+#endif
+#if defined(WEBRTC_ANDROID_OPENSLES) or defined(WEBRTC_ANDROID)
+    // Fall back to this case if on Android 2.2/OpenSLES not available.
+    if (ptrAudioDevice == NULL) {
+        // Create the *Android Java* implementation of the Audio Device
+        if (audioLayer == kPlatformDefaultAudio)
+        {
+            // Create *Android JNI Audio* implementation
+            ptrAudioDevice = new AudioDeviceAndroidJni(Id());
+            WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Android JNI Audio APIs will be utilized");
+        }
 
-    // Create the *Android Java* implementation of the Audio Device
-    //
-#elif defined(WEBRTC_ANDROID)
-    if (audioLayer == kPlatformDefaultAudio)
-    {
-        // Create *Android JNI Audio* implementation
-        ptrAudioDevice = new AudioDeviceAndroidJni(Id());
-        WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "Android JNI Audio APIs will be utilized");
+        if (ptrAudioDevice != NULL)
+        {
+            // Create the Android implementation of the Device Utility.
+            ptrAudioDeviceUtility = new AudioDeviceUtilityAndroid(Id());
+        }
     }
 
-    if (ptrAudioDevice != NULL)
-    {
-        // Create the Android implementation of the Device Utility.
-        ptrAudioDeviceUtility = new AudioDeviceUtilityAndroid(Id());
-    }
-    // END #if defined(WEBRTC_ANDROID)
-
     // Create the *Linux* implementation of the Audio Device
     //
 #elif defined(WEBRTC_LINUX)
     if ((audioLayer == kLinuxPulseAudio) || (audioLayer == kPlatformDefaultAudio))
     {
 #if defined(LINUX_PULSE)
         WEBRTC_TRACE(kTraceInfo, kTraceAudioDevice, _id, "attempting to use the Linux PulseAudio APIs...");
 
--- a/media/webrtc/trunk/webrtc/modules/audio_device/audio_device_utility.cc
+++ b/media/webrtc/trunk/webrtc/modules/audio_device/audio_device_utility.cc
@@ -41,17 +41,17 @@ bool AudioDeviceUtility::StringCompare(
     const char* str1 , const char* str2,
     const uint32_t length)
 {
 	return ((_strnicmp(str1, str2, length) == 0) ? true : false);
 }
 
 }  // namespace webrtc
 
-#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
 
 // ============================================================================
 //                                 Linux & Mac
 // ============================================================================
 
 #include <sys/time.h>   // gettimeofday
 #include <time.h>       // gettimeofday
 #include <string.h>     // strncasecmp
@@ -104,11 +104,11 @@ uint32_t AudioDeviceUtility::GetTimeInMS
 bool AudioDeviceUtility::StringCompare(
     const char* str1 , const char* str2, const uint32_t length)
 {
     return (strncasecmp(str1, str2, length) == 0)?true: false;
 }
 
 }  // namespace webrtc
 
-#endif  // defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+#endif  // defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
 
 
--- a/media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc
+++ b/media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_alsa_linux.cc
@@ -944,17 +944,18 @@ int32_t AudioDeviceLinuxALSA::PlayoutDev
 
     memset(name, 0, kAdmMaxDeviceNameSize);
 
     if (guid != NULL)
     {
         memset(guid, 0, kAdmMaxGuidSize);
     }
 
-    return GetDevicesInfo(1, true, index, name, kAdmMaxDeviceNameSize);
+    return GetDevicesInfo(1, false, index, name, kAdmMaxDeviceNameSize,
+                          guid, kAdmMaxGuidSize);
 }
 
 int32_t AudioDeviceLinuxALSA::RecordingDeviceName(
     uint16_t index,
     char name[kAdmMaxDeviceNameSize],
     char guid[kAdmMaxGuidSize])
 {
 
@@ -1615,40 +1616,41 @@ int32_t AudioDeviceLinuxALSA::StartPlayo
         WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
                      "    failed to create the play audio thread");
         _playing = false;
         delete [] _playoutBuffer;
         _playoutBuffer = NULL;
         return -1;
     }
 
+    int errVal = LATE(snd_pcm_prepare)(_handlePlayout);
+    if (errVal < 0)
+    {
+        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
+                     "     playout snd_pcm_prepare failed (%s)\n",
+                     LATE(snd_strerror)(errVal));
+        // just log error
+        // if snd_pcm_open fails will return -1
+    }
+
+
     unsigned int threadID(0);
     if (!_ptrThreadPlay->Start(threadID))
     {
         WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
                      "  failed to start the play audio thread");
         _playing = false;
         delete _ptrThreadPlay;
         _ptrThreadPlay = NULL;
         delete [] _playoutBuffer;
         _playoutBuffer = NULL;
         return -1;
     }
     _playThreadID = threadID;
 
-    int errVal = LATE(snd_pcm_prepare)(_handlePlayout);
-    if (errVal < 0)
-    {
-        WEBRTC_TRACE(kTraceCritical, kTraceAudioDevice, _id,
-                     "     playout snd_pcm_prepare failed (%s)\n",
-                     LATE(snd_strerror)(errVal));
-        // just log error
-        // if snd_pcm_open fails will return -1
-    }
-
     return 0;
 }
 
 int32_t AudioDeviceLinuxALSA::StopPlayout()
 {
 
     {
         CriticalSectionScoped lock(&_critSect);
@@ -1810,17 +1812,19 @@ void AudioDeviceLinuxALSA::ClearRecordin
 //                                 Private Methods
 // ============================================================================
 
 int32_t AudioDeviceLinuxALSA::GetDevicesInfo(
     const int32_t function,
     const bool playback,
     const int32_t enumDeviceNo,
     char* enumDeviceName,
-    const int32_t ednLen) const
+    const WebRtc_Word32 ednLen,
+    char* enumDeviceId,
+    const WebRtc_Word32 ediLen) const
 {
     
     // Device enumeration based on libjingle implementation
     // by Tristan Schmelcher at Google Inc.
 
     const char *type = playback ? "Output" : "Input";
     // dmix and dsnoop are only for playback and capture, respectively, but ALSA
     // stupidly includes them in both lists.
@@ -1849,16 +1853,18 @@ int32_t AudioDeviceLinuxALSA::GetDevices
             return -1;
         }
 
         enumCount++; // default is 0
         if ((function == FUNC_GET_DEVICE_NAME ||
             function == FUNC_GET_DEVICE_NAME_FOR_AN_ENUM) && enumDeviceNo == 0)
         {
             strcpy(enumDeviceName, "default");
+            if (enumDeviceId)
+                memset(enumDeviceId, 0, ediLen);
 
             err = LATE(snd_device_name_free_hint)(hints);
             if (err != 0)
             {
                 WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
                              "GetDevicesInfo - device name free hint error: %s",
                              LATE(snd_strerror)(err));
             }
@@ -1911,28 +1917,38 @@ int32_t AudioDeviceLinuxALSA::GetDevices
 
                 }
                 if ((FUNC_GET_DEVICE_NAME == function) &&
                     (enumDeviceNo == enumCount))
                 {
                     // We have found the enum device, copy the name to buffer.
                     strncpy(enumDeviceName, desc, ednLen);
                     enumDeviceName[ednLen-1] = '\0';
+                    if (enumDeviceId)
+                    {
+                        strncpy(enumDeviceId, name, ediLen);
+                        enumDeviceId[ediLen-1] = '\0';
+                    }
                     keepSearching = false;
                     // Replace '\n' with '-'.
                     char * pret = strchr(enumDeviceName, '\n'/*0xa*/); //LF
                     if (pret)
                         *pret = '-';
                 }
                 if ((FUNC_GET_DEVICE_NAME_FOR_AN_ENUM == function) &&
                     (enumDeviceNo == enumCount))
                 {
                     // We have found the enum device, copy the name to buffer.
                     strncpy(enumDeviceName, name, ednLen);
                     enumDeviceName[ednLen-1] = '\0';
+                    if (enumDeviceId)
+                    {
+                        strncpy(enumDeviceId, name, ediLen);
+                        enumDeviceId[ediLen-1] = '\0';
+                    }
                     keepSearching = false;
                 }
 
                 if (keepSearching)
                     ++enumCount;
 
                 if (desc != name)
                     free(desc);
@@ -1947,17 +1963,17 @@ int32_t AudioDeviceLinuxALSA::GetDevices
         err = LATE(snd_device_name_free_hint)(hints);
         if (err != 0)
         {
             WEBRTC_TRACE(kTraceError, kTraceAudioDevice, _id,
                          "GetDevicesInfo - device name free hint error: %s",
                          LATE(snd_strerror)(err));
             // Continue and return true anyway, since we did get the whole list.
         }
-    }
+      }
 
     if (FUNC_GET_NUM_OF_DEVICE == function)
     {
         if (enumCount == 1) // only default?
             enumCount = 0;
         return enumCount; // Normal return point for function 0
     }
 
--- a/media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h
+++ b/media/webrtc/trunk/webrtc/modules/audio_device/linux/audio_device_alsa_linux.h
@@ -159,17 +159,19 @@ public:
 public:
     virtual void AttachAudioBuffer(AudioDeviceBuffer* audioBuffer);
 
 private:
     int32_t GetDevicesInfo(const int32_t function,
                            const bool playback,
                            const int32_t enumDeviceNo = 0,
                            char* enumDeviceName = NULL,
-                           const int32_t ednLen = 0) const;
+                           const WebRtc_Word32 ednLen = 0,
+                           char* enumDeviceID = NULL,
+                           const WebRtc_Word32 ediLen = 0) const;
     int32_t ErrorRecovery(int32_t error, snd_pcm_t* deviceHandle);
 
 private:
     void Lock() { _critSect.Enter(); };
     void UnLock() { _critSect.Leave(); };
 private:
     inline int32_t InputSanityCheckAfterUnlockedPeriod() const;
     inline int32_t OutputSanityCheckAfterUnlockedPeriod() const;
--- a/media/webrtc/trunk/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.cc
+++ b/media/webrtc/trunk/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.cc
@@ -22,68 +22,68 @@
  * OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
  * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
  * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
  * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  */
 
 #include "latebindingsymboltable_linux.h"
 
-#ifdef WEBRTC_LINUX
+#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
 #include <dlfcn.h>
 #endif
 
 // TODO(grunell): Either put inside webrtc namespace or use webrtc:: instead.
 using namespace webrtc;
 
 namespace webrtc_adm_linux {
 
 inline static const char *GetDllError() {
-#ifdef WEBRTC_LINUX
-  char *err = dlerror();
+#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
+  const char *err = dlerror();
   if (err) {
     return err;
   } else {
     return "No error";
   }
 #else
 #error Not implemented
 #endif
 }
 
 DllHandle InternalLoadDll(const char dll_name[]) {
-#ifdef WEBRTC_LINUX
+#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
   DllHandle handle = dlopen(dll_name, RTLD_NOW);
 #else
 #error Not implemented
 #endif
   if (handle == kInvalidDllHandle) {
     WEBRTC_TRACE(kTraceWarning, kTraceAudioDevice, -1,
                "Can't load %s : %s", dll_name, GetDllError());
   }
   return handle;
 }
 
 void InternalUnloadDll(DllHandle handle) {
-#ifdef WEBRTC_LINUX
+#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
   if (dlclose(handle) != 0) {
     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
                "%s", GetDllError());
   }
 #else
 #error Not implemented
 #endif
 }
 
 static bool LoadSymbol(DllHandle handle,
                        const char *symbol_name,
                        void **symbol) {
-#ifdef WEBRTC_LINUX
+#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
   *symbol = dlsym(handle, symbol_name);
-  char *err = dlerror();
+  const char *err = dlerror();
   if (err) {
     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
                "Error loading symbol %s : %d", symbol_name, err);
     return false;
   } else if (!*symbol) {
     WEBRTC_TRACE(kTraceError, kTraceAudioDevice, -1,
                "Symbol %s is NULL", symbol_name);
     return false;
@@ -96,17 +96,17 @@ static bool LoadSymbol(DllHandle handle,
 
 // This routine MUST assign SOME value for every symbol, even if that value is
 // NULL, or else some symbols may be left with uninitialized data that the
 // caller may later interpret as a valid address.
 bool InternalLoadSymbols(DllHandle handle,
                          int num_symbols,
                          const char *const symbol_names[],
                          void *symbols[]) {
-#ifdef WEBRTC_LINUX
+#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
   // Clear any old errors.
   dlerror();
 #endif
   for (int i = 0; i < num_symbols; ++i) {
     if (!LoadSymbol(handle, symbol_names[i], &symbols[i])) {
       return false;
     }
   }
--- a/media/webrtc/trunk/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h
+++ b/media/webrtc/trunk/webrtc/modules/audio_device/linux/latebindingsymboltable_linux.h
@@ -37,17 +37,17 @@
 
 // This file provides macros for creating "symbol table" classes to simplify the
 // dynamic loading of symbols from DLLs. Currently the implementation only
 // supports Linux and pure C symbols.
 // See talk/sound/pulseaudiosymboltable.(h|cc) for an example.
 
 namespace webrtc_adm_linux {
 
-#ifdef WEBRTC_LINUX
+#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
 typedef void *DllHandle;
 
 const DllHandle kInvalidDllHandle = NULL;
 #else
 #error Not implemented
 #endif
 
 // These are helpers for use only by the class below.
--- a/media/webrtc/trunk/webrtc/modules/audio_device/linux/pulseaudiosymboltable_linux.cc
+++ b/media/webrtc/trunk/webrtc/modules/audio_device/linux/pulseaudiosymboltable_linux.cc
@@ -24,16 +24,20 @@
  * OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
  * ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
  */
 
 #include "pulseaudiosymboltable_linux.h"
 
 namespace webrtc_adm_linux_pulse {
 
+#ifdef __OpenBSD__
+LATE_BINDING_SYMBOL_TABLE_DEFINE_BEGIN(PulseAudioSymbolTable, "libpulse.so")
+#else
 LATE_BINDING_SYMBOL_TABLE_DEFINE_BEGIN(PulseAudioSymbolTable, "libpulse.so.0")
+#endif
 #define X(sym) \
     LATE_BINDING_SYMBOL_TABLE_DEFINE_ENTRY(PulseAudioSymbolTable, sym)
 PULSE_AUDIO_SYMBOLS_LIST
 #undef X
 LATE_BINDING_SYMBOL_TABLE_DEFINE_END(PulseAudioSymbolTable)
 
 }  // namespace webrtc_adm_linux_pulse
--- a/media/webrtc/trunk/webrtc/modules/audio_processing/audio_processing.gypi
+++ b/media/webrtc/trunk/webrtc/modules/audio_processing/audio_processing.gypi
@@ -150,16 +150,17 @@
         {
           'target_name': 'audio_processing_sse2',
           'type': 'static_library',
           'sources': [
             'aec/aec_core_sse2.c',
             'aec/aec_rdft_sse2.c',
           ],
           'cflags': ['-msse2',],
+          'cflags_mozilla': [ '-msse2', ],
           'xcode_settings': {
             'OTHER_CFLAGS': ['-msse2',],
           },
         },
       ],
     }],
     ['target_arch=="arm" and armv7==1', {
       'targets': [{
@@ -173,21 +174,24 @@
           'aecm/aecm_core_neon.c',
           'ns/nsx_core_neon.c',
         ],
         'conditions': [
           ['OS=="android" or OS=="ios"', {
             'dependencies': [
               'audio_processing_offsets',
             ],
-            'sources': [
+	    #
+	    # We disable the ASM source, because our gyp->Makefile translator
+	    # does not support the build steps to get the asm offsets.
+            'sources!': [
               'aecm/aecm_core_neon.S',
               'ns/nsx_core_neon.S',
             ],
-            'sources!': [
+            'sources': [
               'aecm/aecm_core_neon.c',
               'ns/nsx_core_neon.c',
             ],
             'includes!': ['../../build/arm_neon.gypi',],
           }],
         ],
       }],
       'conditions': [
--- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc
+++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/forward_error_correction.cc
@@ -7,16 +7,17 @@
  *  in the file PATENTS.  All contributing project authors may
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
 #include "modules/rtp_rtcp/source/forward_error_correction.h"
 
 #include <algorithm>
 #include <cassert>
+#include <cstdlib> // for abs()
 #include <cstring>
 #include <iterator>
 
 #include "modules/rtp_rtcp/source/forward_error_correction_internal.h"
 #include "modules/rtp_rtcp/source/rtp_utility.h"
 #include "system_wrappers/interface/trace.h"
 
 namespace webrtc {
--- a/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_utility.cc
+++ b/media/webrtc/trunk/webrtc/modules/rtp_rtcp/source/rtp_utility.cc
@@ -13,17 +13,17 @@
 #include <cassert>
 #include <cmath>  // ceil
 #include <cstring>  // memcpy
 
 #if defined(_WIN32)
 #include <Windows.h>  // FILETIME
 #include <WinSock.h>  // timeval
 #include <MMSystem.h>  // timeGetTime
-#elif ((defined WEBRTC_LINUX) || (defined WEBRTC_MAC))
+#elif ((defined WEBRTC_LINUX) || (defined WEBRTC_BSD) || (defined WEBRTC_MAC))
 #include <sys/time.h>  // gettimeofday
 #include <time.h>
 #endif
 #if (defined(_DEBUG) && defined(_WIN32) && (_MSC_VER >= 1400))
 #include <stdio.h>
 #endif
 
 #include "system_wrappers/interface/tick_util.h"
@@ -64,19 +64,19 @@ uint32_t GetCurrentRTP(Clock* clock, uin
   local_clock->CurrentNtp(secs, frac);
   if (use_global_clock) {
     delete local_clock;
   }
   return ConvertNTPTimeToRTP(secs, frac, freq);
 }
 
 uint32_t ConvertNTPTimeToRTP(uint32_t NTPsec, uint32_t NTPfrac, uint32_t freq) {
-  float ftemp = (float)NTPfrac / (float)NTP_FRAC;
+  float ftemp = (float)NTPfrac / (float)NTP_FRAC; 
   uint32_t tmp = (uint32_t)(ftemp * freq);
-  return NTPsec * freq + tmp;
+ return NTPsec * freq + tmp;
 }
 
 uint32_t ConvertNTPTimeToMS(uint32_t NTPsec, uint32_t NTPfrac) {
   int freq = 1000;
   float ftemp = (float)NTPfrac / (float)NTP_FRAC;
   uint32_t tmp = (uint32_t)(ftemp * freq);
   uint32_t MStime = NTPsec * freq + tmp;
   return MStime;
@@ -98,17 +98,17 @@ uint16_t GetPayloadDataLength(const WebR
   return static_cast<uint16_t>(length);
 }
 
 #if defined(_WIN32)
 bool StringCompare(const char* str1, const char* str2,
                    const uint32_t length) {
   return (_strnicmp(str1, str2, length) == 0) ? true : false;
 }
-#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
 bool StringCompare(const char* str1, const char* str2,
                    const uint32_t length) {
   return (strncasecmp(str1, str2, length) == 0) ? true : false;
 }
 #endif
 
 #if !defined(WEBRTC_LITTLE_ENDIAN) && !defined(WEBRTC_BIG_ENDIAN)
 #error Either WEBRTC_LITTLE_ENDIAN or WEBRTC_BIG_ENDIAN must be defined
@@ -138,17 +138,17 @@ void AssignUWord24ToBuffer(uint8_t* data
 #else
   dataBuffer[0] = static_cast<uint8_t>(value);
   dataBuffer[1] = static_cast<uint8_t>(value >> 8);
   dataBuffer[2] = static_cast<uint8_t>(value >> 16);
 #endif
 }
 
 void AssignUWord16ToBuffer(uint8_t* dataBuffer, uint16_t value) {
-#if defined(WEBRTC_LITTLE_ENDIAN)
+#if defined(WEBRTC_LITTLE_ENDIAN) 
   dataBuffer[0] = static_cast<uint8_t>(value >> 8);
   dataBuffer[1] = static_cast<uint8_t>(value);
 #else
   uint16_t* ptr = reinterpret_cast<uint16_t*>(dataBuffer);
   ptr[0] = value;
 #endif
 }
 
--- a/media/webrtc/trunk/webrtc/modules/utility/source/rtp_dump_impl.cc
+++ b/media/webrtc/trunk/webrtc/modules/utility/source/rtp_dump_impl.cc
@@ -14,17 +14,17 @@
 #include <stdio.h>
 
 #include "critical_section_wrapper.h"
 #include "trace.h"
 
 #if defined(_WIN32)
 #include <Windows.h>
 #include <mmsystem.h>
-#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
 #include <string.h>
 #include <sys/time.h>
 #include <time.h>
 #endif
 
 #if (defined(_DEBUG) && defined(_WIN32))
 #define DEBUG_PRINT(expr)   OutputDebugString(##expr)
 #define DEBUG_PRINTP(expr, p)   \
@@ -228,21 +228,21 @@ bool RtpDumpImpl::RTCP(const uint8_t* pa
     case 204: case 205: case 206: case 207:
         is_rtcp = true;
         break;
     }
     return is_rtcp;
 }
 
 // TODO (hellner): why is TickUtil not used here?
-inline uint32_t RtpDumpImpl::GetTimeInMS() const
+inline WebRtc_UWord32 RtpDumpImpl::GetTimeInMS() const
 {
 #if defined(_WIN32)
     return timeGetTime();
-#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
     struct timeval tv;
     struct timezone tz;
     unsigned long val;
 
     gettimeofday(&tv, &tz);
     val = tv.tv_sec * 1000 + tv.tv_usec / 1000;
     return val;
 #else
--- a/media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.cc
+++ b/media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.cc
@@ -11,16 +11,18 @@
 #include "device_info_android.h"
 
 #include <stdio.h>
 
 #include "ref_count.h"
 #include "trace.h"
 #include "video_capture_android.h"
 
+#include "AndroidJNIWrapper.h"
+
 namespace webrtc
 {
 
 namespace videocapturemodule
 {
 
 static jclass g_capabilityClass = NULL;
 
@@ -174,17 +176,17 @@ int32_t DeviceInfoAndroid::CreateCapabil
   if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
           env,
           javaCmDevInfoClass,
           javaCmDevInfoObject,
           attached) != 0)
     return -1;
 
   // Find the capability class
-  jclass javaCapClass = g_capabilityClass;
+  jclass javaCapClass = jsjni_GetGlobalClassRef(AndroidJavaCaptureCapabilityClass);
   if (javaCapClass == NULL) {
     VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
     WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
                  "%s: SetAndroidCaptureClasses must be called first!",
                  __FUNCTION__);
     return -1;
   }
 
@@ -252,16 +254,18 @@ int32_t DeviceInfoAndroid::CreateCapabil
 
   _lastUsedDeviceNameLength = strlen((char*) deviceUniqueIdUTF8);
   _lastUsedDeviceName = (char*) realloc(_lastUsedDeviceName,
                                         _lastUsedDeviceNameLength + 1);
   memcpy(_lastUsedDeviceName,
          deviceUniqueIdUTF8,
          _lastUsedDeviceNameLength + 1);
 
+  env->DeleteGlobalRef(javaCapClass);
+
   VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
   WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
                "CreateCapabilityMap %d", _captureCapabilities.Size());
 
   return _captureCapabilities.Size();
 }
 
 int32_t DeviceInfoAndroid::GetOrientation(
--- a/media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.h
+++ b/media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.h
@@ -10,16 +10,19 @@
 
 #ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_DEVICE_INFO_ANDROID_H_
 #define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_DEVICE_INFO_ANDROID_H_
 
 #include <jni.h>
 #include "../video_capture_impl.h"
 #include "../device_info_impl.h"
 
+#define AndroidJavaCaptureDeviceInfoClass "org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid"
+#define AndroidJavaCaptureCapabilityClass "org/webrtc/videoengine/CaptureCapabilityAndroid"
+
 namespace webrtc
 {
 namespace videocapturemodule
 {
 
 // Android logging, uncomment to print trace to
 // logcat instead of trace file/callback
 // #include <android/log.h>
--- a/media/webrtc/trunk/webrtc/modules/video_capture/android/java/org/webrtc/videoengine/VideoCaptureAndroid.java
+++ b/media/webrtc/trunk/webrtc/modules/video_capture/android/java/org/webrtc/videoengine/VideoCaptureAndroid.java
@@ -20,243 +20,373 @@ import org.webrtc.videoengine.VideoCaptu
 import android.graphics.ImageFormat;
 import android.graphics.PixelFormat;
 import android.graphics.Rect;
 import android.graphics.SurfaceTexture;
 import android.graphics.YuvImage;
 import android.hardware.Camera;
 import android.hardware.Camera.PreviewCallback;
 import android.util.Log;
+import android.view.Surface;
 import android.view.SurfaceHolder;
 import android.view.SurfaceHolder.Callback;
+import android.view.SurfaceView;
+import android.view.TextureView;
+import android.view.TextureView.SurfaceTextureListener;
+import android.view.View;
+
+import org.mozilla.gecko.GeckoApp;
+import org.mozilla.gecko.GeckoAppShell;
+import org.mozilla.gecko.GeckoAppShell.AppStateListener;
+import org.mozilla.gecko.util.ThreadUtils;
 
 public class VideoCaptureAndroid implements PreviewCallback, Callback {
 
     private final static String TAG = "WEBRTC-JC";
 
     private Camera camera;
+    private int cameraId;
     private AndroidVideoCaptureDevice currentDevice = null;
     public ReentrantLock previewBufferLock = new ReentrantLock();
     // This lock takes sync with StartCapture and SurfaceChanged
     private ReentrantLock captureLock = new ReentrantLock();
     private int PIXEL_FORMAT = ImageFormat.NV21;
     PixelFormat pixelFormat = new PixelFormat();
     // True when the C++ layer has ordered the camera to be started.
     private boolean isCaptureStarted = false;
     private boolean isCaptureRunning = false;
     private boolean isSurfaceReady = false;
+    private SurfaceHolder surfaceHolder = null;
+    private SurfaceTexture surfaceTexture = null;
+    private SurfaceTexture dummySurfaceTexture = null;
 
     private final int numCaptureBuffers = 3;
     private int expectedFrameSize = 0;
     private int orientation = 0;
     private int id = 0;
     // C++ callback context variable.
     private long context = 0;
     private SurfaceHolder localPreview = null;
-    private SurfaceTexture dummySurfaceTexture = null;
     // True if this class owns the preview video buffers.
     private boolean ownsBuffers = false;
 
     private int mCaptureWidth = -1;
     private int mCaptureHeight = -1;
     private int mCaptureFPS = -1;
+    private int mCaptureRotation = 0;
+
+    private AppStateListener mAppStateListener = null;
+
+    public class MySurfaceTextureListener implements TextureView.SurfaceTextureListener {
+        public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
+            Log.d(TAG, "VideoCaptureAndroid::onSurfaceTextureAvailable");
+
+            captureLock.lock();
+            isSurfaceReady = true;
+            surfaceTexture = surface;
+
+            tryStartCapture(mCaptureWidth, mCaptureHeight, mCaptureFPS);
+            captureLock.unlock();
+        }
+
+        public void onSurfaceTextureSizeChanged(SurfaceTexture surface,
+                                                int width, int height) {
+            // Ignored, Camera does all the work for us
+            // Note that for a TextureView we start on onSurfaceTextureAvailable,
+            // for a SurfaceView we start on surfaceChanged. TextureView
+            // will not give out an onSurfaceTextureSizeChanged during creation.
+        }
+
+        public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
+            Log.d(TAG, "VideoCaptureAndroid::onSurfaceTextureDestroyed");
+            isSurfaceReady = false;
+            DetachCamera();
+            return true;
+        }
+
+        public void onSurfaceTextureUpdated(SurfaceTexture surface) {
+            // Invoked every time there's a new Camera preview frame
+        }
+    }
 
     public static
     void DeleteVideoCaptureAndroid(VideoCaptureAndroid captureAndroid) {
         Log.d(TAG, "DeleteVideoCaptureAndroid");
-        if (captureAndroid.camera == null) {
-            return;
-        }
+
+        GeckoAppShell.getGeckoInterface().removeAppStateListener(captureAndroid.mAppStateListener);
 
         captureAndroid.StopCapture();
-        captureAndroid.camera.release();
-        captureAndroid.camera = null;
+        if (captureAndroid.camera != null) {
+            captureAndroid.camera.release();
+            captureAndroid.camera = null;
+        }
         captureAndroid.context = 0;
+
+        View cameraView = GeckoAppShell.getGeckoInterface().getCameraView();
+        if (cameraView instanceof SurfaceView) {
+            ((SurfaceView)cameraView).getHolder().removeCallback(captureAndroid);
+        } else if (cameraView instanceof TextureView) {
+            // No need to explicitly remove the Listener:
+            // i.e. ((SurfaceView)cameraView).setSurfaceTextureListener(null);
+        }
+        ThreadUtils.getUiHandler().post(new Runnable() {
+            @Override
+            public void run() {
+                try {
+                    GeckoAppShell.getGeckoInterface().disableCameraView();
+                } catch (Exception e) {
+                    Log.e(TAG,
+                          "VideoCaptureAndroid disableCameraView exception: " +
+                          e.getLocalizedMessage());
+                }
+           }
+        });
     }
 
     public VideoCaptureAndroid(int in_id, long in_context, Camera in_camera,
-            AndroidVideoCaptureDevice in_device) {
+                               AndroidVideoCaptureDevice in_device,
+                               int in_cameraId) {
         id = in_id;
         context = in_context;
         camera = in_camera;
+        cameraId = in_cameraId;
         currentDevice = in_device;
+        mCaptureRotation = GetRotateAmount();
+
+        try {
+            View cameraView = GeckoAppShell.getGeckoInterface().getCameraView();
+            if (cameraView instanceof SurfaceView) {
+                ((SurfaceView)cameraView).getHolder().addCallback(this);
+            } else if (cameraView instanceof TextureView) {
+                MySurfaceTextureListener listener = new MySurfaceTextureListener();
+                ((TextureView)cameraView).setSurfaceTextureListener(listener);
+            }
+            ThreadUtils.getUiHandler().post(new Runnable() {
+                @Override
+                public void run() {
+                    try {
+                        GeckoAppShell.getGeckoInterface().enableCameraView();
+                    } catch (Exception e) {
+                        Log.e(TAG,
+                              "VideoCaptureAndroid enableCameraView exception: "
+                               + e.getLocalizedMessage());
+                    }
+                }
+            });
+        } catch (Exception ex) {
+            Log.e(TAG, "VideoCaptureAndroid constructor exception: " +
+                  ex.getLocalizedMessage());
+        }
+
+        mAppStateListener = new AppStateListener() {
+            @Override
+            public void onPause() {
+                StopCapture();
+                if (camera != null) {
+                    camera.release();
+                    camera = null;
+                }
+            }
+            @Override
+            public void onResume() {
+                try {
+                    if(android.os.Build.VERSION.SDK_INT>8) {
+                        camera = Camera.open(cameraId);
+                    } else {
+                        camera = Camera.open();
+                    }
+                } catch (Exception ex) {
+                    Log.e(TAG, "Error reopening to the camera: " + ex.getMessage());
+                }
+                captureLock.lock();
+                isCaptureStarted = true;
+                tryStartCapture(mCaptureWidth, mCaptureHeight, mCaptureFPS);
+                captureLock.unlock();
+            }
+            @Override
+            public void onConfigurationChanged() {
+                mCaptureRotation = GetRotateAmount();
+            }
+        };
+
+        GeckoAppShell.getGeckoInterface().addAppStateListener(mAppStateListener);
+    }
+
+    public int GetRotateAmount() {
+        android.hardware.Camera.CameraInfo info =
+            new android.hardware.Camera.CameraInfo();
+        android.hardware.Camera.getCameraInfo(cameraId, info);
+        int rotation = GeckoAppShell.getGeckoInterface().getActivity().getWindowManager().getDefaultDisplay().getRotation();
+        int degrees = 0;
+        switch (rotation) {
+            case Surface.ROTATION_0: degrees = 0; break;
+            case Surface.ROTATION_90: degrees = 90; break;
+            case Surface.ROTATION_180: degrees = 180; break;
+            case Surface.ROTATION_270: degrees = 270; break;
+        }
+
+        int result;
+        if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
+            result = (info.orientation + degrees) % 360;
+        } else {  // back-facing
+            result = (info.orientation - degrees + 360) % 360;
+        }
+
+        return result;
     }
 
     private int tryStartCapture(int width, int height, int frameRate) {
         if (camera == null) {
             Log.e(TAG, "Camera not initialized %d" + id);
             return -1;
         }
 
-        Log.d(TAG, "tryStartCapture: " + width +
-            "x" + height +", frameRate: " + frameRate +
-            ", isCaptureRunning: " + isCaptureRunning +
-            ", isSurfaceReady: " + isSurfaceReady +
-            ", isCaptureStarted: " + isCaptureStarted);
+        Log.d(TAG, "tryStartCapture " + width +
+                " height " + height +" frame rate " + frameRate +
+                " isCaptureRunning " + isCaptureRunning +
+                " isSurfaceReady " + isSurfaceReady +
+                " isCaptureStarted " + isCaptureStarted);
 
-        if (isCaptureRunning || !isCaptureStarted) {
+        if (isCaptureRunning || !isSurfaceReady || !isCaptureStarted) {
             return 0;
         }
 
-        CaptureCapabilityAndroid currentCapability =
-                new CaptureCapabilityAndroid();
-        currentCapability.width = width;
-        currentCapability.height = height;
-        currentCapability.maxFPS = frameRate;
-        PixelFormat.getPixelFormatInfo(PIXEL_FORMAT, pixelFormat);
+        try {
+            if (surfaceHolder != null)
+                camera.setPreviewDisplay(surfaceHolder);
+            if (surfaceTexture != null)
+                camera.setPreviewTexture(surfaceTexture);
+            if (surfaceHolder == null && surfaceTexture == null) {
+                // No local renderer.  Camera won't capture without
+                // setPreview{Texture,Display}, so we create a dummy SurfaceTexture
+                // and hand it over to Camera, but never listen for frame-ready
+                // callbacks, and never call updateTexImage on it.
+                try {
+                    dummySurfaceTexture = new SurfaceTexture(42);
+                    camera.setPreviewTexture(dummySurfaceTexture);
+                } catch (IOException e) {
+                    throw new RuntimeException(e);
+                }
+            }
 
-        Camera.Parameters parameters = camera.getParameters();
-        parameters.setPreviewSize(currentCapability.width,
-                currentCapability.height);
-        parameters.setPreviewFormat(PIXEL_FORMAT);
-        parameters.setPreviewFrameRate(currentCapability.maxFPS);
-        camera.setParameters(parameters);
+            CaptureCapabilityAndroid currentCapability =
+                    new CaptureCapabilityAndroid();
+            currentCapability.width = width;
+            currentCapability.height = height;
+            currentCapability.maxFPS = frameRate;
+            PixelFormat.getPixelFormatInfo(PIXEL_FORMAT, pixelFormat);
 
-        int bufSize = width * height * pixelFormat.bitsPerPixel / 8;
-        byte[] buffer = null;
-        for (int i = 0; i < numCaptureBuffers; i++) {
-            buffer = new byte[bufSize];
-            camera.addCallbackBuffer(buffer);
+            Camera.Parameters parameters = camera.getParameters();
+            parameters.setPreviewSize(currentCapability.width,
+                    currentCapability.height);
+            parameters.setPreviewFormat(PIXEL_FORMAT);
+            parameters.setPreviewFrameRate(currentCapability.maxFPS);
+            camera.setParameters(parameters);
+
+            int bufSize = width * height * pixelFormat.bitsPerPixel / 8;
+            byte[] buffer = null;
+            for (int i = 0; i < numCaptureBuffers; i++) {
+                buffer = new byte[bufSize];
+                camera.addCallbackBuffer(buffer);
+            }
+            camera.setPreviewCallbackWithBuffer(this);
+            ownsBuffers = true;
+
+            camera.startPreview();
+            previewBufferLock.lock();
+            expectedFrameSize = bufSize;
+            isCaptureRunning = true;
+            previewBufferLock.unlock();
+
         }
-        camera.setPreviewCallbackWithBuffer(this);
-        ownsBuffers = true;
+        catch (Exception ex) {
+            Log.e(TAG, "Failed to start camera: " + ex.getMessage());
+            return -1;
+        }
 
-        camera.startPreview();
-        previewBufferLock.lock();
-        expectedFrameSize = bufSize;
         isCaptureRunning = true;
-        previewBufferLock.unlock();
-
         return 0;
     }
 
     public int StartCapture(int width, int height, int frameRate) {
         Log.d(TAG, "StartCapture width " + width +
                 " height " + height +" frame rate " + frameRate);
-        // Get the local preview SurfaceHolder from the static render class
-        localPreview = ViERenderer.GetLocalRenderer();
-        if (localPreview != null) {
-            if (localPreview.getSurface() != null &&
-                localPreview.getSurface().isValid()) {
-                surfaceCreated(localPreview);
-            }
-            localPreview.addCallback(this);
-        } else {
-          // No local renderer.  Camera won't capture without
-          // setPreview{Texture,Display}, so we create a dummy SurfaceTexture
-          // and hand it over to Camera, but never listen for frame-ready
-          // callbacks, and never call updateTexImage on it.
-          captureLock.lock();
-          try {
-            dummySurfaceTexture = new SurfaceTexture(42);
-            camera.setPreviewTexture(dummySurfaceTexture);
-          } catch (IOException e) {
-            throw new RuntimeException(e);
-          }
-          captureLock.unlock();
-        }
-
         captureLock.lock();
         isCaptureStarted = true;
         mCaptureWidth = width;
         mCaptureHeight = height;
         mCaptureFPS = frameRate;
 
         int res = tryStartCapture(mCaptureWidth, mCaptureHeight, mCaptureFPS);
 
         captureLock.unlock();
         return res;
     }
 
-    public int StopCapture() {
-        Log.d(TAG, "StopCapture");
+    public int DetachCamera() {
         try {
             previewBufferLock.lock();
             isCaptureRunning = false;
             previewBufferLock.unlock();
-            camera.stopPreview();
-            camera.setPreviewCallbackWithBuffer(null);
-        } catch (RuntimeException e) {
-            Log.e(TAG, "Failed to stop camera", e);
+            if (camera != null) {
+                camera.setPreviewCallbackWithBuffer(null);
+                camera.stopPreview();
+            }
+        } catch (Exception ex) {
+            Log.e(TAG, "Failed to stop camera: " + ex.getMessage());
             return -1;
         }
-
-        isCaptureStarted = false;
         return 0;
     }
 
-    native void ProvideCameraFrame(byte[] data, int length, long captureObject);
+    public int StopCapture() {
+        Log.d(TAG, "StopCapture");
+        isCaptureStarted = false;
+        return DetachCamera();
+    }
+
+    native void ProvideCameraFrame(byte[] data, int length, int rotation,
+                                   long captureObject);
 
     public void onPreviewFrame(byte[] data, Camera camera) {
         previewBufferLock.lock();
 
         // The following line is for debug only
-        // Log.v(TAG, "preview frame length " + data.length +
-        //            " context" + context);
+        Log.v(TAG, "preview frame length " + data.length +
+              " context" + context);
         if (isCaptureRunning) {
             // If StartCapture has been called but not StopCapture
             // Call the C++ layer with the captured frame
             if (data.length == expectedFrameSize) {
-                ProvideCameraFrame(data, expectedFrameSize, context);
+                ProvideCameraFrame(data, expectedFrameSize, mCaptureRotation,
+                                   context);
                 if (ownsBuffers) {
                     // Give the video buffer to the camera service again.
                     camera.addCallbackBuffer(data);
                 }
             }
         }
         previewBufferLock.unlock();
     }
 
-    // Sets the rotation of the preview render window.
-    // Does not affect the captured video image.
-    public void SetPreviewRotation(int rotation) {
-        Log.v(TAG, "SetPreviewRotation:" + rotation);
-
-        if (camera == null) {
-            return;
-        }
-
-        int resultRotation = 0;
-        if (currentDevice.frontCameraType ==
-            VideoCaptureDeviceInfoAndroid.FrontFacingCameraType.Android23) {
-            // this is a 2.3 or later front facing camera.
-            // SetDisplayOrientation will flip the image horizontally
-            // before doing the rotation.
-            resultRotation = ( 360 - rotation ) % 360; // compensate the mirror
-        }
-        else {
-            // Back facing or 2.2 or previous front camera
-            resultRotation = rotation;
-        }
-        camera.setDisplayOrientation(resultRotation);
-    }
-
     public void surfaceChanged(SurfaceHolder holder,
                                int format, int width, int height) {
         Log.d(TAG, "VideoCaptureAndroid::surfaceChanged");
+
+        captureLock.lock();
+        isSurfaceReady = true;
+        surfaceHolder = holder;
+
+        tryStartCapture(mCaptureWidth, mCaptureHeight, mCaptureFPS);
+        captureLock.unlock();
+        return;
     }
 
     public void surfaceCreated(SurfaceHolder holder) {
         Log.d(TAG, "VideoCaptureAndroid::surfaceCreated");
-        captureLock.lock();
-        try {
-          if (camera != null) {
-              camera.setPreviewDisplay(holder);
-          }
-        } catch (IOException e) {
-            Log.e(TAG, "Failed to set preview surface!", e);
-        }
-        captureLock.unlock();
     }
 
     public void surfaceDestroyed(SurfaceHolder holder) {
         Log.d(TAG, "VideoCaptureAndroid::surfaceDestroyed");
-        captureLock.lock();
-        try {
-            if (camera != null) {
-                camera.setPreviewDisplay(null);
-            }
-        } catch (IOException e) {
-            Log.e(TAG, "Failed to clear preview surface!", e);
-        }
-        captureLock.unlock();
+        isSurfaceReady = false;
+        DetachCamera();
     }
 }
--- a/media/webrtc/trunk/webrtc/modules/video_capture/android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java
+++ b/media/webrtc/trunk/webrtc/modules/video_capture/android/java/org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid.java
@@ -112,31 +112,42 @@ public class VideoCaptureDeviceInfoAndro
 
                 camera = Camera.open(i);
                 Camera.Parameters parameters = camera.getParameters();
                 AddDeviceInfo(newDevice, parameters);
                 camera.release();
                 camera = null;
                 deviceList.add(newDevice);
             }
+        } else {
+          camera = Camera.open();
+          Camera.Parameters parameters = camera.getParameters();
+          AndroidVideoCaptureDevice newDevice = new AndroidVideoCaptureDevice();
+          AddDeviceInfo(newDevice, parameters);
+          newDevice.deviceUniqueName = "Camera";
+          camera.release();
+          camera = null;
+          deviceList.add(newDevice);
         }
         VerifyCapabilities();
         return 0;
     }
 
     // Adds the capture capabilities of the currently opened device
     private void AddDeviceInfo(AndroidVideoCaptureDevice newDevice,
             Camera.Parameters parameters) {
 
         List<Size> sizes = parameters.getSupportedPreviewSizes();
         List<Integer> frameRates = parameters.getSupportedPreviewFrameRates();
         int maxFPS = 0;
-        for(Integer frameRate:frameRates) {
-            if(frameRate > maxFPS) {
-                maxFPS = frameRate;
+        if (frameRates != null) {
+            for(Integer frameRate:frameRates) {
+                if(frameRate > maxFPS) {
+                    maxFPS = frameRate;
+                }
             }
         }
 
         newDevice.captureCapabilies = new CaptureCapabilityAndroid[sizes.size()];
         for(int i = 0; i < sizes.size(); ++i) {
             Size s = sizes.get(i);
             newDevice.captureCapabilies[i] = new CaptureCapabilityAndroid();
             newDevice.captureCapabilies[i].height = s.height;
@@ -245,44 +256,47 @@ public class VideoCaptureDeviceInfoAndro
 
     // Returns an instance of VideoCaptureAndroid.
     public VideoCaptureAndroid AllocateCamera(int id, long context,
             String deviceUniqueId) {
         try {
             Log.d(TAG, "AllocateCamera " + deviceUniqueId);
 
             Camera camera = null;
+            int cameraId = 0;
             AndroidVideoCaptureDevice deviceToUse = null;
             for (AndroidVideoCaptureDevice device: deviceList) {
                 if(device.deviceUniqueName.equals(deviceUniqueId)) {
                     // Found the wanted camera
                     deviceToUse = device;
                     switch(device.frontCameraType) {
                         case GalaxyS:
                             camera = AllocateGalaxySFrontCamera();
                             break;
                         case HTCEvo:
                             camera = AllocateEVOFrontFacingCamera();
                             break;
                         default:
                             // From Android 2.3 and onwards)
-                            if(android.os.Build.VERSION.SDK_INT>8)
-                                camera=Camera.open(device.index);
-                            else
-                                camera=Camera.open(); // Default camera
+                            if(android.os.Build.VERSION.SDK_INT>8) {
+                                cameraId = device.index;
+                                camera = Camera.open(device.index);
+                            } else {
+                                camera = Camera.open(); // Default_ camera
+                            }
                     }
                 }
             }
 
             if(camera == null) {
                 return null;
             }
             Log.v(TAG, "AllocateCamera - creating VideoCaptureAndroid");
 
-            return new VideoCaptureAndroid(id, context, camera, deviceToUse);
+            return new VideoCaptureAndroid(id, context, camera, deviceToUse, cameraId);
         } catch (NoSuchMethodException e) {
             Log.e(TAG, "AllocateCamera Failed to open camera", e);
         } catch (ClassNotFoundException e) {
             Log.e(TAG, "AllocateCamera Failed to open camera", e);
         } catch (InvocationTargetException e) {
             Log.e(TAG, "AllocateCamera Failed to open camera", e);
         } catch (IllegalAccessException e) {
             Log.e(TAG, "AllocateCamera Failed to open camera", e);
--- a/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.cc
+++ b/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.cc
@@ -11,16 +11,18 @@
 #include "video_capture_android.h"
 
 #include <stdio.h>
 
 #include "critical_section_wrapper.h"
 #include "ref_count.h"
 #include "trace.h"
 
+#include "AndroidJNIWrapper.h"
+
 namespace webrtc
 {
 #if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
 // TODO(leozwang) These SetAndroidVM apis will be refactored, thus we only
 // keep and reference java vm.
 int32_t SetCaptureAndroidVM(void* javaVM, void* javaContext) {
   return videocapturemodule::VideoCaptureAndroid::SetAndroidObjects(
       javaVM,
@@ -54,103 +56,68 @@ VideoCaptureModule* VideoCaptureImpl::Cr
 
 JavaVM* VideoCaptureAndroid::g_jvm = NULL;
 //VideoCaptureAndroid.java
 jclass VideoCaptureAndroid::g_javaCmClass = NULL;
 //VideoCaptureDeviceInfoAndroid.java
 jclass VideoCaptureAndroid::g_javaCmDevInfoClass = NULL;
 //static instance of VideoCaptureDeviceInfoAndroid.java
 jobject VideoCaptureAndroid::g_javaCmDevInfoObject = NULL;
-jobject VideoCaptureAndroid::g_javaContext = NULL;
 
 /*
  * Register references to Java Capture class.
  */
 int32_t VideoCaptureAndroid::SetAndroidObjects(void* javaVM,
                                                void* javaContext) {
 
   g_jvm = static_cast<JavaVM*> (javaVM);
-  g_javaContext = static_cast<jobject> (javaContext);
 
   if (javaVM) {
+    // Already done? Exit early.
+    if (g_javaCmClass != NULL
+        && g_javaCmDevInfoClass != NULL
+        && g_javaCmDevInfoObject != NULL) {
+        return 0;
+    }
+
     JNIEnv* env = NULL;
     if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
       WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
                    "%s: could not get Java environment", __FUNCTION__);
       return -1;
     }
     // get java capture class type (note path to class packet)
-    jclass javaCmClassLocal = env->FindClass(AndroidJavaCaptureClass);
-    if (!javaCmClassLocal) {
+    g_javaCmClass = jsjni_GetGlobalClassRef(AndroidJavaCaptureClass);
+    if (!g_javaCmClass) {
       WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
                    "%s: could not find java class", __FUNCTION__);
       return -1;
     }
-    // create a global reference to the class
-    // (to tell JNI that we are referencing it
-    // after this function has returned)
-    g_javaCmClass = static_cast<jclass>
-        (env->NewGlobalRef(javaCmClassLocal));
-    if (!g_javaCmClass) {
-      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
-                   "%s: InitVideoEngineJava(): could not create"
-                   " Java Camera class reference",
-                   __FUNCTION__);
-      return -1;
-    }
-    // Delete local class ref, we only use the global ref
-    env->DeleteLocalRef(javaCmClassLocal);
     JNINativeMethod nativeFunctions =
-        { "ProvideCameraFrame", "([BIJ)V",
+        { "ProvideCameraFrame", "([BIIJ)V",
           (void*) &VideoCaptureAndroid::ProvideCameraFrame };
     if (env->RegisterNatives(g_javaCmClass, &nativeFunctions, 1) == 0) {
       WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
                    "%s: Registered native functions", __FUNCTION__);
     }
     else {
       WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
                    "%s: Failed to register native functions",
                    __FUNCTION__);
       return -1;
     }
 
-    jclass capabilityClassLocal = env->FindClass(
-        "org/webrtc/videoengine/CaptureCapabilityAndroid");
-    if (!capabilityClassLocal) {
+    // get java capture class type (note path to class packet)
+    g_javaCmDevInfoClass = jsjni_GetGlobalClassRef(
+                 AndroidJavaCaptureDeviceInfoClass);
+    if (!g_javaCmDevInfoClass) {
       WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
                    "%s: could not find java class", __FUNCTION__);
       return -1;
     }
-    jclass capabilityClassGlobal = reinterpret_cast<jclass>(env->NewGlobalRef(
-        capabilityClassLocal));
-    DeviceInfoAndroid::SetAndroidCaptureClasses(capabilityClassGlobal);
-
-    // get java capture class type (note path to class packet)
-    jclass javaCmDevInfoClassLocal = env->FindClass(
-        "org/webrtc/videoengine/VideoCaptureDeviceInfoAndroid");
-    if (!javaCmDevInfoClassLocal) {
-      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
-                   "%s: could not find java class", __FUNCTION__);
-      return -1;
-    }
-
-    // create a global reference to the class
-    // (to tell JNI that we are referencing it
-    // after this function has returned)
-    g_javaCmDevInfoClass = static_cast<jclass>
-        (env->NewGlobalRef(javaCmDevInfoClassLocal));
-    if (!g_javaCmDevInfoClass) {
-      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
-                   "%s: InitVideoEngineJava(): could not create Java "
-                   "Camera Device info class reference",
-                   __FUNCTION__);
-      return -1;
-    }
-    // Delete local class ref, we only use the global ref
-    env->DeleteLocalRef(javaCmDevInfoClassLocal);
 
     WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
                  "VideoCaptureDeviceInfoAndroid get method id");
 
     // get the method ID for the Android Java CaptureClass static
     //CreateVideoCaptureAndroid factory method.
     jmethodID cid = env->GetStaticMethodID(
         g_javaCmDevInfoClass,
@@ -167,17 +134,17 @@ int32_t VideoCaptureAndroid::SetAndroidO
 
     WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
                  "%s: construct static java device object", __FUNCTION__);
 
     // construct the object by calling the static constructor object
     jobject javaCameraDeviceInfoObjLocal =
         env->CallStaticObjectMethod(g_javaCmDevInfoClass,
                                     cid, (int) -1,
-                                    g_javaContext);
+                                    javaContext);
     if (!javaCameraDeviceInfoObjLocal) {
       WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1,
                    "%s: could not create Java Capture Device info object",
                    __FUNCTION__);
       return -1;
     }
     // create a reference to the object (to tell JNI that
     // we are referencing it after this function has returned)
@@ -279,21 +246,39 @@ int32_t VideoCaptureAndroid::ReleaseAndr
  * Class:     org_webrtc_capturemodule_VideoCaptureAndroid
  * Method:    ProvideCameraFrame
  * Signature: ([BIJ)V
  */
 void JNICALL VideoCaptureAndroid::ProvideCameraFrame(JNIEnv * env,
                                                      jobject,
                                                      jbyteArray javaCameraFrame,
                                                      jint length,
+                                                     jint rotation,
                                                      jlong context) {
   VideoCaptureAndroid* captureModule =
       reinterpret_cast<VideoCaptureAndroid*>(context);
   WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture,
                -1, "%s: IncomingFrame %d", __FUNCTION__,length);
+
+  switch (rotation) {
+    case 90:
+      captureModule->SetCaptureRotation(kCameraRotate90);
+      break;
+    case 180:
+      captureModule->SetCaptureRotation(kCameraRotate180);
+      break;
+    case 270:
+      captureModule->SetCaptureRotation(kCameraRotate270);
+      break;
+    case 0:
+    default:
+      captureModule->SetCaptureRotation(kCameraRotate0);
+      break;
+  }
+
   jbyte* cameraFrame= env->GetByteArrayElements(javaCameraFrame,NULL);
   captureModule->IncomingFrame((uint8_t*) cameraFrame,
                                length,captureModule->_frameInfo,0);
   env->ReleaseByteArrayElements(javaCameraFrame,cameraFrame,JNI_ABORT);
 }
 
 
 
@@ -306,17 +291,17 @@ VideoCaptureAndroid::VideoCaptureAndroid
 
 // ----------------------------------------------------------------------------
 //  Init
 //
 //  Initializes needed Java resources like the JNI interface to
 //  VideoCaptureAndroid.java
 // ----------------------------------------------------------------------------
 int32_t VideoCaptureAndroid::Init(const int32_t id,
-                                  const char* deviceUniqueIdUTF8) {
+                                        const char* deviceUniqueIdUTF8) {
   const int nameLength = strlen(deviceUniqueIdUTF8);
   if (nameLength >= kVideoCaptureUniqueNameLength) {
     return -1;
   }
 
   // Store the device name
   _deviceUniqueId = new char[nameLength + 1];
   memcpy(_deviceUniqueId, deviceUniqueIdUTF8, nameLength + 1);
@@ -336,16 +321,17 @@ int32_t VideoCaptureAndroid::Init(const 
   if (!g_jvm) {
     WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
                  "%s: Not a valid Java VM pointer", __FUNCTION__);
     return -1;
   }
   // get the JNI env for this thread
   JNIEnv *env;
   bool isAttached = false;
+  int32_t rotation = 0;
 
   // get the JNI env for this thread
   if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
     // try to attach the thread and get the env
     // Attach this thread to JVM
     jint res = g_jvm->AttachCurrentThread(&env, NULL);
     if ((res < 0) || !env) {
       WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
@@ -429,37 +415,45 @@ VideoCaptureAndroid::~VideoCaptureAndroi
                      "%s: Could not attach thread to JVM (%d, %p)",
                      __FUNCTION__, res, env);
       }
       else {
         isAttached = true;
       }
     }
 
-    // get the method ID for the Android Java CaptureClass static
-    // DeleteVideoCaptureAndroid  method. Call this to release the camera so
-    // another application can use it.
-    jmethodID cid = env->GetStaticMethodID(
-        g_javaCmClass,
-        "DeleteVideoCaptureAndroid",
-        "(Lorg/webrtc/videoengine/VideoCaptureAndroid;)V");
-    if (cid != NULL) {
-      WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
-                   "%s: Call DeleteVideoCaptureAndroid", __FUNCTION__);
-      // Close the camera by calling the static destruct function.
-      env->CallStaticVoidMethod(g_javaCmClass, cid, _javaCaptureObj);
+    if (env) {
+      // get the method ID for the Android Java CaptureClass static
+      // DeleteVideoCaptureAndroid  method. Call this to release the camera so
+      // another application can use it.
+      jmethodID cid = env->GetStaticMethodID(
+          g_javaCmClass,
+          "DeleteVideoCaptureAndroid",
+          "(Lorg/webrtc/videoengine/VideoCaptureAndroid;)V");
+      if (cid != NULL) {
+        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
+                     "%s: Call DeleteVideoCaptureAndroid", __FUNCTION__);
+        // Close the camera by calling the static destruct function.
+        env->CallStaticVoidMethod(g_javaCmClass, cid, _javaCaptureObj);
 
-      // Delete global object ref to the camera.
-      env->DeleteGlobalRef(_javaCaptureObj);
-      _javaCaptureObj = NULL;
-    }
-    else {
-      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
-                   "%s: Failed to find DeleteVideoCaptureAndroid id",
-                   __FUNCTION__);
+        // Delete global object ref to the camera.
+        env->DeleteGlobalRef(_javaCaptureObj);
+        // Clean up the global class references
+        env->DeleteGlobalRef(g_javaCmClass);
+        env->DeleteGlobalRef(g_javaCmDevInfoClass);
+
+        _javaCaptureObj = NULL;
+        VideoCaptureAndroid::g_javaCmClass = NULL;
+        VideoCaptureAndroid::g_javaCmDevInfoClass = NULL;
+      }
+      else {
+        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                     "%s: Failed to find DeleteVideoCaptureAndroid id",
+                     __FUNCTION__);
+      }
     }
 
     // Detach this thread if it was attached
     if (isAttached) {
       if (g_jvm->DetachCurrentThread() < 0) {
         WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice,
                      _id, "%s: Could not detach thread from JVM",
                      __FUNCTION__);
@@ -471,16 +465,17 @@ VideoCaptureAndroid::~VideoCaptureAndroi
 int32_t VideoCaptureAndroid::StartCapture(
     const VideoCaptureCapability& capability) {
   CriticalSectionScoped cs(&_apiCs);
   WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
                "%s: ", __FUNCTION__);
 
   bool isAttached = false;
   int32_t result = 0;
+  int32_t rotation = 0;
   // get the JNI env for this thread
   JNIEnv *env;
   if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
     // try to attach the thread and get the env
     // Attach this thread to JVM
     jint res = g_jvm->AttachCurrentThread(&env, NULL);
     if ((res < 0) || !env) {
       WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
@@ -524,16 +519,17 @@ int32_t VideoCaptureAndroid::StartCaptur
 
   // Detach this thread if it was attached
   if (isAttached) {
     if (g_jvm->DetachCurrentThread() < 0) {
       WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id,
                    "%s: Could not detach thread from JVM", __FUNCTION__);
     }
   }
+
   if (result == 0) {
     _requestedCapability = capability;
     _captureStarted = true;
   }
   WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
                "%s: result %d", __FUNCTION__, result);
   return result;
 }
@@ -605,71 +601,13 @@ int32_t VideoCaptureAndroid::CaptureSett
                "%s: ", __FUNCTION__);
   settings = _requestedCapability;
   return 0;
 }
 
 int32_t VideoCaptureAndroid::SetCaptureRotation(
     VideoCaptureRotation rotation) {
   CriticalSectionScoped cs(&_apiCs);
-  if (VideoCaptureImpl::SetCaptureRotation(rotation) == 0) {
-    if (!g_jvm)
-      return -1;
-
-    // get the JNI env for this thread
-    JNIEnv *env;
-    bool isAttached = false;
-
-    // get the JNI env for this thread
-    if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
-      // try to attach the thread and get the env
-      // Attach this thread to JVM
-      jint res = g_jvm->AttachCurrentThread(&env, NULL);
-      if ((res < 0) || !env) {
-        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
-                     _id,
-                     "%s: Could not attach thread to JVM (%d, %p)",
-                     __FUNCTION__, res, env);
-        return -1;
-      }
-      isAttached = true;
-    }
-
-    jmethodID cid = env->GetMethodID(g_javaCmClass, "SetPreviewRotation",
-                                     "(I)V");
-    if (cid == NULL) {
-      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
-                   "%s: could not get java SetPreviewRotation ID",
-                   __FUNCTION__);
-      return -1;
-    }
-    jint rotateFrame = 0;
-    switch (rotation) {
-      case kCameraRotate0:
-        rotateFrame = 0;
-        break;
-      case kCameraRotate90:
-        rotateFrame = 90;
-        break;
-      case kCameraRotate180:
-        rotateFrame = 180;
-        break;
-      case kCameraRotate270:
-        rotateFrame = 270;
-        break;
-    }
-    env->CallVoidMethod(_javaCaptureObj, cid, rotateFrame);
-
-    // Detach this thread if it was attached
-    if (isAttached) {
-      if (g_jvm->DetachCurrentThread() < 0) {
-        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice,
-                     _id, "%s: Could not detach thread from JVM",
-                     __FUNCTION__);
-      }
-    }
-
-  }
-  return 0;
+  return VideoCaptureImpl::SetCaptureRotation(rotation);
 }
 
 }  // namespace videocapturemodule
 }  // namespace webrtc
--- a/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.h
+++ b/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.h
@@ -41,25 +41,26 @@ class VideoCaptureAndroid : public Video
   virtual int32_t CaptureSettings(VideoCaptureCapability& settings);
   virtual int32_t SetCaptureRotation(VideoCaptureRotation rotation);
 
  protected:
   virtual ~VideoCaptureAndroid();
   static void JNICALL ProvideCameraFrame (JNIEnv * env,
                                           jobject,
                                           jbyteArray javaCameraFrame,
-                                          jint length, jlong context);
+                                          jint length,
+                                          jint rotation,
+                                          jlong context);
   DeviceInfoAndroid _capInfo;
   jobject _javaCaptureObj; // Java Camera object.
   VideoCaptureCapability _frameInfo;
   bool _captureStarted;
 
   static JavaVM* g_jvm;
   static jclass g_javaCmClass;
   static jclass g_javaCmDevInfoClass;
   //Static java object implementing the needed device info functions;
   static jobject g_javaCmDevInfoObject;
-  static jobject g_javaContext; // Java Application context
 };
 
 }  // namespace videocapturemodule
 }  // namespace webrtc
 #endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_VIDEO_CAPTURE_ANDROID_H_
--- a/media/webrtc/trunk/webrtc/modules/video_capture/device_info_impl.cc
+++ b/media/webrtc/trunk/webrtc/modules/video_capture/device_info_impl.cc
@@ -49,17 +49,17 @@ int32_t DeviceInfoImpl::NumberOfCapabili
     if (!deviceUniqueIdUTF8)
         return -1;
 
     _apiLock.AcquireLockShared();
 
     if (_lastUsedDeviceNameLength == strlen((char*) deviceUniqueIdUTF8))
     {
         // Is it the same device that is asked for again.
-#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX)
+#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
         if(strncasecmp((char*)_lastUsedDeviceName,
                        (char*) deviceUniqueIdUTF8,
                        _lastUsedDeviceNameLength)==0)
 #else
         if (_strnicmp((char*) _lastUsedDeviceName,
                       (char*) deviceUniqueIdUTF8,
                       _lastUsedDeviceNameLength) == 0)
 #endif
@@ -86,17 +86,17 @@ int32_t DeviceInfoImpl::GetCapability(co
     {
         WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
                    "deviceUniqueIdUTF8 parameter not set in call to GetCapability");
         return -1;
     }
     ReadLockScoped cs(_apiLock);
 
     if ((_lastUsedDeviceNameLength != strlen((char*) deviceUniqueIdUTF8))
-#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX)
+#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
         || (strncasecmp((char*)_lastUsedDeviceName,
                         (char*) deviceUniqueIdUTF8,
                         _lastUsedDeviceNameLength)!=0))
 #else
         || (_strnicmp((char*) _lastUsedDeviceName,
                       (char*) deviceUniqueIdUTF8,
                       _lastUsedDeviceNameLength) != 0))
 #endif
@@ -150,17 +150,17 @@ int32_t DeviceInfoImpl::GetBestMatchedCa
 {
 
 
     if (!deviceUniqueIdUTF8)
         return -1;
 
     ReadLockScoped cs(_apiLock);
     if ((_lastUsedDeviceNameLength != strlen((char*) deviceUniqueIdUTF8))
-#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX)
+#if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
         || (strncasecmp((char*)_lastUsedDeviceName,
                         (char*) deviceUniqueIdUTF8,
                         _lastUsedDeviceNameLength)!=0))
 #else
         || (_strnicmp((char*) _lastUsedDeviceName,
                       (char*) deviceUniqueIdUTF8,
                       _lastUsedDeviceNameLength) != 0))
 #endif
--- a/media/webrtc/trunk/webrtc/modules/video_capture/linux/device_info_linux.cc
+++ b/media/webrtc/trunk/webrtc/modules/video_capture/linux/device_info_linux.cc
@@ -14,17 +14,23 @@
 #include <unistd.h>
 #include <sys/ioctl.h>
 #include <sys/stat.h>
 #include <fcntl.h>
 #include <stdio.h>
 #include <stdlib.h>
 
 //v4l includes
+#if defined(__DragonFly__) || defined(__NetBSD__) || defined(__OpenBSD__)
+#include <sys/videoio.h>
+#elif defined(__sun)
+#include <sys/videodev2.h>
+#else
 #include <linux/videodev2.h>
+#endif
 
 #include "ref_count.h"
 #include "trace.h"
 
 
 namespace webrtc
 {
 namespace videocapturemodule
@@ -89,19 +95,20 @@ int32_t DeviceInfoLinux::GetDeviceName(
 {
     WEBRTC_TRACE(webrtc::kTraceApiCall, webrtc::kTraceVideoCapture, _id, "%s", __FUNCTION__);
 
     // Travel through /dev/video [0-63]
     uint32_t count = 0;
     char device[20];
     int fd = -1;
     bool found = false;
-    for (int n = 0; n < 64; n++)
+    int device_index;
+    for (device_index = 0; device_index < 64; device_index++)
     {
-        sprintf(device, "/dev/video%d", n);
+        sprintf(device, "/dev/video%d", device_index);
         if ((fd = open(device, O_RDONLY)) != -1)
         {
             if (count == deviceNumber) {
                 // Found the device
                 found = true;
                 break;
             } else {
                 close(fd);
@@ -150,73 +157,84 @@ int32_t DeviceInfoLinux::GetDeviceName(
                    strlen((const char*) cap.bus_info));
         }
         else
         {
             WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
                        "buffer passed is too small");
             return -1;
         }
+    } else {
+        // if there's no bus info to use for uniqueId, invent one - and it has to be repeatable
+        if (snprintf(deviceUniqueIdUTF8, deviceUniqueIdUTF8Length, "fake_%u", device_index) >=
+            deviceUniqueIdUTF8Length)
+        {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
+                       "buffer passed is too small");
+            return -1;
+        }
     }
 
     return 0;
 }
 
 int32_t DeviceInfoLinux::CreateCapabilityMap(
                                         const char* deviceUniqueIdUTF8)
 {
     int fd;
     char device[32];
     bool found = false;
+    int device_index;
 
     const int32_t deviceUniqueIdUTF8Length =
                             (int32_t) strlen((char*) deviceUniqueIdUTF8);
     if (deviceUniqueIdUTF8Length > kVideoCaptureUniqueNameLength)
     {
         WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "Device name too long");
         return -1;
     }
     WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
                "CreateCapabilityMap called for device %s", deviceUniqueIdUTF8);
 
     /* detect /dev/video [0-63] entries */
-    for (int n = 0; n < 64; ++n)
+    if (sscanf(deviceUniqueIdUTF8,"fake_%d",&device_index) == 1)
     {
-        sprintf(device, "/dev/video%d", n);
+        sprintf(device, "/dev/video%d", device_index);
         fd = open(device, O_RDONLY);
-        if (fd == -1)
-          continue;
+        if (fd != -1) {
+            found = true;
+        }
+    } else {
+        /* detect /dev/video [0-63] entries */
+        for (int n = 0; n < 64; ++n)
+        {
+            sprintf(device, "/dev/video%d", n);
+            fd = open(device, O_RDONLY);
+            if (fd == -1)
+                continue;
 
-        // query device capabilities
-        struct v4l2_capability cap;
-        if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0)
-        {
-            if (cap.bus_info[0] != 0)
+            // query device capabilities
+            struct v4l2_capability cap;
+            if (ioctl(fd, VIDIOC_QUERYCAP, &cap) == 0)
             {
-                if (strncmp((const char*) cap.bus_info,
-                            (const char*) deviceUniqueIdUTF8,
-                            strlen((const char*) deviceUniqueIdUTF8)) == 0) //match with device id
+                if (cap.bus_info[0] != 0)
                 {
-                    found = true;
-                    break; // fd matches with device unique id supplied
+                    if (strncmp((const char*) cap.bus_info,
+                                (const char*) deviceUniqueIdUTF8,
+                                strlen((const char*) deviceUniqueIdUTF8)) == 0) //match with device id
+                    {
+                        found = true;
+                        break; // fd matches with device unique id supplied
+                    }
                 }
+                // else can't be a match as the test for fake_* above would have matched it
             }
-            else //match for device name
-            {
-                if (IsDeviceNameMatches((const char*) cap.card,
-                                        (const char*) deviceUniqueIdUTF8))
-                {
-                    found = true;
-                    break;
-                }
-            }
+            close(fd); // close since this is not the matching device
         }
-        close(fd); // close since this is not the matching device
     }
-
     if (!found)
     {
         WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id, "no matching device found");
         return -1;
     }
 
     // now fd will point to the matching device
     // reset old capability map
--- a/media/webrtc/trunk/webrtc/modules/video_capture/linux/video_capture_linux.cc
+++ b/media/webrtc/trunk/webrtc/modules/video_capture/linux/video_capture_linux.cc
@@ -7,23 +7,30 @@
  *  in the file PATENTS.  All contributing project authors may
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
 #include <sys/ioctl.h>
 #include <unistd.h>
 #include <sys/stat.h>
 #include <fcntl.h>
-#include <linux/videodev2.h>
 #include <errno.h>
 #include <stdio.h>
 #include <sys/mman.h>
 #include <string.h>
 
-#include <iostream>
+//v4l includes
+#if defined(__DragonFly__) || defined(__NetBSD__) || defined(__OpenBSD__)
+#include <sys/videoio.h>
+#elif defined(__sun)
+#include <sys/videodev2.h>
+#else
+#include <linux/videodev2.h>
+#endif
+
 #include <new>
 
 #include "ref_count.h"
 #include "trace.h"
 #include "thread_wrapper.h"
 #include "critical_section_wrapper.h"
 #include "video_capture_linux.h"
 
@@ -66,16 +73,23 @@ int32_t VideoCaptureModuleV4L2::Init(con
 {
     int len = strlen((const char*) deviceUniqueIdUTF8);
     _deviceUniqueId = new (std::nothrow) char[len + 1];
     if (_deviceUniqueId)
     {
         memcpy(_deviceUniqueId, deviceUniqueIdUTF8, len + 1);
     }
 
+    int device_index;
+    if (sscanf(deviceUniqueIdUTF8,"fake_%d", &device_index) == 1)
+    {
+      _deviceId = device_index;
+      return 0;
+    }
+
     int fd;
     char device[32];
     bool found = false;
 
     /* detect /dev/video [0-63] entries */
     int n;
     for (n = 0; n < 64; n++)
     {
--- a/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit.mm
+++ b/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit.mm
@@ -10,16 +10,30 @@
 
 #include "video_capture_qtkit.h"
 #import "video_capture_qtkit_objc.h"
 #import "video_capture_qtkit_info_objc.h"
 #include "trace.h"
 #include "critical_section_wrapper.h"
 #include "../../video_capture_config.h"
 
+class nsAutoreleasePool {
+public:
+    nsAutoreleasePool()
+    {
+        mLocalPool = [[NSAutoreleasePool alloc] init];
+    }
+    ~nsAutoreleasePool()
+    {
+        [mLocalPool release];
+    }
+private:
+    NSAutoreleasePool *mLocalPool;
+};
+
 namespace webrtc
 {
 
 namespace videocapturemodule
 {
 
 VideoCaptureMacQTKit::VideoCaptureMacQTKit(const int32_t id) :
     VideoCaptureImpl(id),
@@ -36,16 +50,17 @@ VideoCaptureMacQTKit::VideoCaptureMacQTK
     memset(_currentDeviceNameUTF8, 0, MAX_NAME_LENGTH);
     memset(_currentDeviceUniqueIdUTF8, 0, MAX_NAME_LENGTH);
     memset(_currentDeviceProductUniqueIDUTF8, 0, MAX_NAME_LENGTH);
 }
 
 VideoCaptureMacQTKit::~VideoCaptureMacQTKit()
 {
 
+    nsAutoreleasePool localPool;
     WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
                  "~VideoCaptureMacQTKit() called");
     if(_captureDevice)
     {
         [_captureDevice stopCapture];
         [_captureDevice release];
     }
 
@@ -65,16 +80,18 @@ int32_t VideoCaptureMacQTKit::Init(
         (int32_t) strlen((char*)iDeviceUniqueIdUTF8);
     if(nameLength>kVideoCaptureUniqueNameLength)
         return -1;
 
     // Store the device name
     _deviceUniqueId = new char[nameLength+1];
     memcpy(_deviceUniqueId, iDeviceUniqueIdUTF8,nameLength+1);
 
+    nsAutoreleasePool localPool;
+
     _captureDevice = [[VideoCaptureMacQTKitObjC alloc] init];
     if(NULL == _captureDevice)
     {
         WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, id,
                      "Failed to create an instance of "
                      "VideoCaptureMacQTKitObjC");
         return -1;
     }
@@ -164,16 +181,17 @@ int32_t VideoCaptureMacQTKit::Init(
                  "successfully Init VideoCaptureMacQTKit" );
     return 0;
 }
 
 int32_t VideoCaptureMacQTKit::StartCapture(
     const VideoCaptureCapability& capability)
 {
 
+    nsAutoreleasePool localPool;
     _captureWidth = capability.width;
     _captureHeight = capability.height;
     _captureFrameRate = capability.maxFPS;
     _captureDelay = 120;
 
     if(-1 == [[_captureDevice setCaptureHeight:_captureHeight
                AndWidth:_captureWidth AndFrameRate:_captureFrameRate]intValue])
     {
@@ -188,16 +206,17 @@ int32_t VideoCaptureMacQTKit::StartCaptu
         return -1;
     }
     _isCapturing = true;
     return 0;
 }
 
 int32_t VideoCaptureMacQTKit::StopCapture()
 {
+    nsAutoreleasePool localPool;
     [_captureDevice stopCapture];
 
     _isCapturing = false;
     return 0;
 }
 
 bool VideoCaptureMacQTKit::CaptureStarted()
 {
--- a/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info.mm
+++ b/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info.mm
@@ -9,54 +9,71 @@
  */
 
 #include "trace.h"
 #include "../../video_capture_config.h"
 #import "video_capture_qtkit_info_objc.h"
 
 #include "video_capture.h"
 
+class nsAutoreleasePool {
+public:
+    nsAutoreleasePool()
+    {
+        mLocalPool = [[NSAutoreleasePool alloc] init];
+    }
+    ~nsAutoreleasePool()
+    {
+        [mLocalPool release];
+    }
+private:
+    NSAutoreleasePool *mLocalPool;
+};
+
 namespace webrtc
 {
 namespace videocapturemodule
 {
 
 VideoCaptureMacQTKitInfo::VideoCaptureMacQTKitInfo(const int32_t id) :
     DeviceInfoImpl(id)
 {
+    nsAutoreleasePool localPool;
     _captureInfo = [[VideoCaptureMacQTKitInfoObjC alloc] init];
 }
 
 VideoCaptureMacQTKitInfo::~VideoCaptureMacQTKitInfo()
 {
+    nsAutoreleasePool localPool;
     [_captureInfo release];
-
 }
 
 int32_t VideoCaptureMacQTKitInfo::Init()
 {
 
     return 0;
 }
 
 uint32_t VideoCaptureMacQTKitInfo::NumberOfDevices()
 {
 
+    nsAutoreleasePool localPool;
     uint32_t captureDeviceCount =
         [[_captureInfo getCaptureDeviceCount]intValue];
     return captureDeviceCount;
 
 }
 
 int32_t VideoCaptureMacQTKitInfo::GetDeviceName(
     uint32_t deviceNumber, char* deviceNameUTF8,
     uint32_t deviceNameLength, char* deviceUniqueIdUTF8,
     uint32_t deviceUniqueIdUTF8Length, char* productUniqueIdUTF8,
     uint32_t productUniqueIdUTF8Length)
 {
+    nsAutoreleasePool localPool;
     int errNum = [[_captureInfo getDeviceNamesFromIndex:deviceNumber
                    DefaultName:deviceNameUTF8 WithLength:deviceNameLength
                    AndUniqueID:deviceUniqueIdUTF8
                    WithLength:deviceUniqueIdUTF8Length
                    AndProductID:productUniqueIdUTF8
                    WithLength:productUniqueIdUTF8Length]intValue];
     return errNum;
 }
@@ -100,16 +117,17 @@ int32_t VideoCaptureMacQTKitInfo::GetBes
 }
 
 int32_t VideoCaptureMacQTKitInfo::DisplayCaptureSettingsDialogBox(
     const char* deviceUniqueIdUTF8,
     const char* dialogTitleUTF8, void* parentWindow,
     uint32_t positionX, uint32_t positionY)
 {
 
+    nsAutoreleasePool localPool;
     return [[_captureInfo
              displayCaptureSettingsDialogBoxWithDevice:deviceUniqueIdUTF8
              AndTitle:dialogTitleUTF8
              AndParentWindow:parentWindow AtX:positionX AndY:positionY]
              intValue];
 }
 
 int32_t VideoCaptureMacQTKitInfo::CreateCapabilityMap(
--- a/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.h
+++ b/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.h
@@ -19,17 +19,16 @@
 #import <QTKit/QTKit.h>
 #import <Foundation/Foundation.h>
 #include "video_capture_qtkit_utility.h"
 #include "video_capture_qtkit_info.h"
 
 @interface VideoCaptureMacQTKitInfoObjC : NSObject{
     bool                                _OSSupportedInfo;
     NSArray*                            _captureDevicesInfo;
-    NSAutoreleasePool*                    _poolInfo;
     int                                    _captureDeviceCountInfo;
 
 }
 
 /**************************************************************************
  *
  *   The following functions are considered to be private
  *
--- a/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.mm
+++ b/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_info_objc.mm
@@ -89,21 +89,25 @@ using namespace webrtc;
         return [NSNumber numberWithInt:0];
     }
 
     if(index >= (uint32_t)_captureDeviceCountInfo)
     {
         return [NSNumber numberWithInt:-1];
     }
 
-    QTCaptureDevice* tempCaptureDevice =
-        (QTCaptureDevice*)[_captureDevicesInfo objectAtIndex:index];
+    if ([_captureDevicesInfo count] <= index)
+    {
+      return [NSNumber numberWithInt:-1];
+    }
+
+    QTCaptureDevice* tempCaptureDevice = (QTCaptureDevice*)[_captureDevicesInfo objectAtIndex:index];
     if(!tempCaptureDevice)
     {
-        return [NSNumber numberWithInt:-1];
+      return [NSNumber numberWithInt:-1];
     }
 
     memset(deviceName, 0, deviceNameLength);
     memset(deviceUniqueID, 0, deviceUniqueIDLength);
 
     bool successful = NO;
 
     NSString* tempString = [tempCaptureDevice localizedDisplayName];
@@ -133,17 +137,16 @@ using namespace webrtc;
 
 - (NSNumber*)initializeVariables
 {
     if(NO == _OSSupportedInfo)
     {
         return [NSNumber numberWithInt:0];
     }
 
-    _poolInfo = [[NSAutoreleasePool alloc]init];
     _captureDeviceCountInfo = 0;
     [self getCaptureDevices];
 
     return [NSNumber numberWithInt:0];
 }
 
 // ***** Checks to see if the QTCaptureSession framework is available in the OS
 // ***** If it is not, isOSSupprted = NO
--- a/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_objc.mm
+++ b/media/webrtc/trunk/webrtc/modules/video_capture/mac/qtkit/video_capture_qtkit_objc.mm
@@ -266,17 +266,27 @@ using namespace videocapturemodule;
 
     if(NO == _capturing)
     {
         return [NSNumber numberWithInt:0];
     }
 
     if(YES == _capturing)
     {
-        [_captureSession stopRunning];
+        // This method is often called on a secondary thread.  Which means
+        // that the following can sometimes run "too early", causing crashes
+        // and/or weird errors concerning initialization.  On OS X 10.7 and
+        // 10.8, the CoreMediaIO method CMIOUninitializeGraph() is called from
+        // -[QTCaptureSession stopRunning].  If this is called too early,
+        // low-level session data gets uninitialized before low-level code
+        // is finished trying to use it.  The solution is to make stopRunning
+        // always run on the main thread.  See bug 837539.
+        [_captureSession performSelectorOnMainThread:@selector(stopRunning)
+                                          withObject:nil
+                                       waitUntilDone:NO];
     }
 
     _capturing = NO;
     return [NSNumber numberWithInt:0];
 }
 
 // ********** "private" functions below here **********
 #pragma mark **** "private" methods
--- a/media/webrtc/trunk/webrtc/modules/video_capture/mac/video_capture_mac.mm
+++ b/media/webrtc/trunk/webrtc/modules/video_capture/mac/video_capture_mac.mm
@@ -25,18 +25,18 @@
 #include <QuickTime/QuickTime.h>
 
 // 10.4 support must be decided runtime. We will just decide which framework to
 // use at compile time "work" classes. One for QTKit, one for QuickTime
 #if __MAC_OS_X_VERSION_MIN_REQUIRED == __MAC_10_4 // QuickTime version
 #include "QuickTime/video_capture_quick_time.h"
 #include "QuickTime/video_capture_quick_time_info.h"
 #else
-#include "QTKit/video_capture_qtkit.h"
-#include "QTKit/video_capture_qtkit_info.h"
+#include "qtkit/video_capture_qtkit.h"
+#include "qtkit/video_capture_qtkit_info.h"
 #endif
 
 namespace webrtc
 {
 namespace videocapturemodule
 {
 
 // static
--- a/media/webrtc/trunk/webrtc/modules/video_capture/video_capture.gypi
+++ b/media/webrtc/trunk/webrtc/modules/video_capture/video_capture.gypi
@@ -11,20 +11,27 @@
     {
       'target_name': 'video_capture_module',
       'type': 'static_library',
       'dependencies': [
         'webrtc_utility',
         '<(webrtc_root)/common_video/common_video.gyp:common_video',
         '<(webrtc_root)/system_wrappers/source/system_wrappers.gyp:system_wrappers',
       ],
+
+      'cflags_mozilla': [
+        '$(NSPR_CFLAGS)',
+      ],
+
       'include_dirs': [
         'include',
         '../interface',
         '<(webrtc_root)/common_video/libyuv/include',
+# added for mozilla for use_system_libjpeg
+        '$(DIST)/include',
       ],
       'sources': [
         'device_info_impl.cc',
         'device_info_impl.h',
         'include/video_capture.h',
         'include/video_capture_defines.h',
         'include/video_capture_factory.h',
         'video_capture_config.h',
@@ -36,17 +43,17 @@
       'conditions': [
         ['include_internal_video_capture==0', {
           'sources': [
             'external/device_info_external.cc',
             'external/video_capture_external.cc',
           ],
         }, {  # include_internal_video_capture == 1
           'conditions': [
-            ['OS=="linux"', {
+            ['include_v4l2_video_capture==1', {
               'include_dirs': [
                 'linux',
               ],
               'sources': [
                 'linux/device_info_linux.cc',
                 'linux/device_info_linux.h',
                 'linux/video_capture_linux.cc',
                 'linux/video_capture_linux.h',
@@ -74,18 +81,22 @@
                 'xcode_settings': {
                   'OTHER_LDFLAGS': [
                     '-framework QTKit',
                   ],
                 },
               },
             }],  # mac
             ['OS=="win"', {
-              'dependencies': [
-                '<(DEPTH)/third_party/winsdk_samples/winsdk_samples.gyp:directshow_baseclasses',
+              'conditions': [
+                ['build_with_mozilla==0', {
+                  'dependencies': [
+                    '<(DEPTH)/third_party/winsdk_samples/winsdk_samples.gyp:directshow_baseclasses',
+                  ],
+                }],
               ],
               'include_dirs': [
                 'windows',
               ],
               'sources': [
                 'windows/device_info_ds.cc',
                 'windows/device_info_ds.h',
                 'windows/device_info_mf.cc',
@@ -94,16 +105,20 @@
                 'windows/help_functions_ds.h',
                 'windows/sink_filter_ds.cc',
                 'windows/sink_filter_ds.h',
                 'windows/video_capture_ds.cc',
                 'windows/video_capture_ds.h',
                 'windows/video_capture_factory_windows.cc',
                 'windows/video_capture_mf.cc',
                 'windows/video_capture_mf.h',
+                'windows/BasePin.cpp',
+                'windows/BaseFilter.cpp',
+                'windows/BaseInputPin.cpp',
+                'windows/MediaType.cpp',
               ],
               'link_settings': {
                 'libraries': [
                   '-lStrmiids.lib',
                 ],
               },
             }],  # win
             ['OS=="android"', {
@@ -137,29 +152,33 @@
           'include_dirs': [
             'include',
           ],
           'sources': [
             'test/video_capture_unittest.cc',
             'test/video_capture_main_mac.mm',
           ],
           'conditions': [
-            ['OS=="mac" or OS=="linux"', {
+            ['OS!="win" and OS!="android"', {
               'cflags': [
                 '-Wno-write-strings',
               ],
               'ldflags': [
                 '-lpthread -lm',
               ],
             }],
+            ['include_v4l2_video_capture==1', {
+              'libraries': [
+                '-lXext',
+                '-lX11',
+              ],
+            }],
             ['OS=="linux"', {
               'libraries': [
                 '-lrt',
-                '-lXext',
-                '-lX11',
               ],
             }],
             ['OS=="mac"', {
               'dependencies': [
                 # Link with a special main for mac so we can use the webcam.
                 '<(webrtc_root)/test/test.gyp:test_support_main_threaded_mac',
               ],
               'xcode_settings': {
--- a/media/webrtc/trunk/webrtc/modules/video_capture/windows/device_info_ds.cc
+++ b/media/webrtc/trunk/webrtc/modules/video_capture/windows/device_info_ds.cc
@@ -11,17 +11,16 @@
 #include "device_info_ds.h"
 
 #include "../video_capture_config.h"
 #include "../video_capture_delay.h"
 #include "help_functions_ds.h"
 #include "ref_count.h"
 #include "trace.h"
 
-#include <Streams.h>
 #include <Dvdmedia.h>
 
 namespace webrtc
 {
 namespace videocapturemodule
 {
 const int32_t NoWindowsCaptureDelays = 1;
 const DelayValues WindowsCaptureDelays[NoWindowsCaptureDelays] = {
@@ -37,16 +36,33 @@ const DelayValues WindowsCaptureDelays[N
     {160,120,109},
     {1280,720,166},
     {960,544,126},
     {800,448,120},
     {800,600,127}
   },
 };
 
+
+  void _FreeMediaType(AM_MEDIA_TYPE& mt)
+{
+    if (mt.cbFormat != 0)
+    {
+        CoTaskMemFree((PVOID)mt.pbFormat);
+        mt.cbFormat = 0;
+        mt.pbFormat = NULL;
+    }
+    if (mt.pUnk != NULL)
+    {
+        // pUnk should not be used.
+        mt.pUnk->Release();
+        mt.pUnk = NULL;
+    }
+}
+
 // static
 DeviceInfoDS* DeviceInfoDS::Create(const int32_t id)
 {
     DeviceInfoDS* dsInfo = new DeviceInfoDS(id);
     if (!dsInfo || dsInfo->Init() != 0)
     {
         delete dsInfo;
         dsInfo = NULL;
@@ -679,17 +695,17 @@ int32_t DeviceInfoDS::CreateCapabilityMa
                                                       capability->width,
                                                       capability->height);
             _captureCapabilities.Insert(index++, capability);
             WEBRTC_TRACE( webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
                          "Camera capability, width:%d height:%d type:%d fps:%d",
                          capability->width, capability->height,
                          capability->rawType, capability->maxFPS);
         }
-        DeleteMediaType(pmt);
+        _FreeMediaType(*pmt);
         pmt = NULL;
     }
     RELEASE_AND_CLEAR(streamConfig);
     RELEASE_AND_CLEAR(videoControlConfig);
     RELEASE_AND_CLEAR(outputCapturePin);
     RELEASE_AND_CLEAR(captureDevice); // Release the capture device
 
     // Store the new used device name
--- a/media/webrtc/trunk/webrtc/modules/video_capture/windows/sink_filter_ds.cc
+++ b/media/webrtc/trunk/webrtc/modules/video_capture/windows/sink_filter_ds.cc
@@ -16,49 +16,52 @@
 #include <Dvdmedia.h> // VIDEOINFOHEADER2
 #include <initguid.h>
 
 #define DELETE_RESET(p) { delete (p) ; (p) = NULL ;}
 
 DEFINE_GUID(CLSID_SINKFILTER, 0x88cdbbdc, 0xa73b, 0x4afa, 0xac, 0xbf, 0x15, 0xd5,
             0xe2, 0xce, 0x12, 0xc3);
 
+using namespace mozilla::media;
+using namespace mozilla;
+
 namespace webrtc
 {
 namespace videocapturemodule
 {
 
 typedef struct tagTHREADNAME_INFO
 {
    DWORD dwType;        // must be 0x1000
    LPCSTR szName;       // pointer to name (in user addr space)
    DWORD dwThreadID;    // thread ID (-1=caller thread)
    DWORD dwFlags;       // reserved for future use, must be zero
 } THREADNAME_INFO;
 
 CaptureInputPin::CaptureInputPin (int32_t moduleId,
                             IN TCHAR * szName,
                             IN CaptureSinkFilter* pFilter,
-                            IN CCritSec * pLock,
+                            IN CriticalSection * pLock,
                             OUT HRESULT * pHr,
                             IN LPCWSTR pszName)
-    : CBaseInputPin (szName, pFilter, pLock, pHr, pszName),
+    : BaseInputPin (szName, pFilter, pLock, pHr, pszName),
       _requestedCapability(),
       _resultingCapability()
 {
     _moduleId=moduleId;
     _threadHandle = NULL;
 }
 
 CaptureInputPin::~CaptureInputPin()
 {
 }
 
 HRESULT
-CaptureInputPin::GetMediaType (IN int iPosition, OUT CMediaType * pmt)
+CaptureInputPin::GetMediaType (IN int iPosition, OUT MediaType * pmt)
 {
     // reset the thread handle
     _threadHandle = NULL;
 
     if(iPosition < 0)
     return E_INVALIDARG;
 
     VIDEOINFOHEADER* pvi = (VIDEOINFOHEADER*) pmt->AllocFormatBuffer(
@@ -156,17 +159,17 @@ CaptureInputPin::GetMediaType (IN int iP
     WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _moduleId,
              "GetMediaType position %d, width %d, height %d, biCompression 0x%x",
              iPosition, _requestedCapability.width,
              _requestedCapability.height,pvi->bmiHeader.biCompression);
     return NOERROR;
 }
 
 HRESULT
-CaptureInputPin::CheckMediaType ( IN const CMediaType * pMediaType)
+CaptureInputPin::CheckMediaType ( IN const MediaType * pMediaType)
 {
     // reset the thread handle
     _threadHandle = NULL;
 
     const GUID *type = pMediaType->Type();
     if (*type != MEDIATYPE_Video)
     return E_INVALIDARG;
 
@@ -314,18 +317,18 @@ CaptureInputPin::CheckMediaType ( IN con
     return E_INVALIDARG;
 }
 
 HRESULT
 CaptureInputPin::Receive ( IN IMediaSample * pIMediaSample )
 {
     HRESULT hr = S_OK;
 
-    ASSERT (m_pFilter);
-    ASSERT (pIMediaSample);
+    assert (mFilter);
+    assert (pIMediaSample);
 
     // get the thread handle of the delivering thread inc its priority
     if( _threadHandle == NULL)
     {
         HANDLE handle= GetCurrentThread();
         SetThreadPriority(handle, THREAD_PRIORITY_HIGHEST);
         _threadHandle = handle;
         // See http://msdn.microsoft.com/en-us/library/xcb2z8hs(VS.71).aspx for details on the code
@@ -343,37 +346,37 @@ CaptureInputPin::Receive ( IN IMediaSamp
                             (DWORD_PTR*)&info );
         }
         __except (EXCEPTION_CONTINUE_EXECUTION)
         {
         }
 
     }
 
-    reinterpret_cast <CaptureSinkFilter *>(m_pFilter)->LockReceive();
-    hr = CBaseInputPin::Receive (pIMediaSample);
+    reinterpret_cast <CaptureSinkFilter *>(mFilter)->LockReceive();
+    hr = BaseInputPin::Receive (pIMediaSample);
 
     if (SUCCEEDED (hr))
     {
         const int32_t length = pIMediaSample->GetActualDataLength();
 
         unsigned char* pBuffer = NULL;
         if(S_OK != pIMediaSample->GetPointer(&pBuffer))
         {
-            reinterpret_cast <CaptureSinkFilter *>(m_pFilter)->UnlockReceive();
+            reinterpret_cast <CaptureSinkFilter *>(mFilter)->UnlockReceive();
             return S_FALSE;
         }
 
         // NOTE: filter unlocked within Send call
-        reinterpret_cast <CaptureSinkFilter *> (m_pFilter)->ProcessCapturedFrame(
+        reinterpret_cast <CaptureSinkFilter *> (mFilter)->ProcessCapturedFrame(
                                         pBuffer,length,_resultingCapability);
     }
     else
     {
-        reinterpret_cast <CaptureSinkFilter *>(m_pFilter)->UnlockReceive();
+        reinterpret_cast <CaptureSinkFilter *>(mFilter)->UnlockReceive();
     }
 
     return hr;
 }
 
 // called under LockReceive
 HRESULT CaptureInputPin::SetMatchingMediaType(
                                     const VideoCaptureCapability& capability)
@@ -384,23 +387,25 @@ HRESULT CaptureInputPin::SetMatchingMedi
     return S_OK;
 }
 //  ----------------------------------------------------------------------------
 CaptureSinkFilter::CaptureSinkFilter (IN TCHAR * tszName,
                               IN LPUNKNOWN punk,
                               OUT HRESULT * phr,
                               VideoCaptureExternal& captureObserver,
                               int32_t moduleId)
-    : CBaseFilter(tszName,punk,& m_crtFilter,CLSID_SINKFILTER),
+    : BaseFilter(tszName, CLSID_SINKFILTER),
+      m_crtFilter("CaptureSinkFilter::m_crtFilter"),
+      m_crtRecv("CaptureSinkFilter::m_crtRecv"),
       m_pInput(NULL),
       _captureObserver(captureObserver),
       _moduleId(moduleId)
 {
     (* phr) = S_OK;
-    m_pInput = new CaptureInputPin(moduleId,NAME ("VideoCaptureInputPin"),
+    m_pInput = new CaptureInputPin(moduleId, L"VideoCaptureInputPin",
                                    this,
                                    & m_crtFilter,
                                    phr, L"VideoCapture");
     if (m_pInput == NULL || FAILED (* phr))
     {
         (* phr) = FAILED (* phr) ? (* phr) : E_OUTOFMEMORY;
         goto cleanup;
     }
@@ -413,87 +418,87 @@ CaptureSinkFilter::~CaptureSinkFilter()
     delete m_pInput;
 }
 
 int CaptureSinkFilter::GetPinCount()
 {
     return 1;
 }
 
-CBasePin *
+BasePin *
 CaptureSinkFilter::GetPin(IN int Index)
 {
-    CBasePin * pPin;
+    BasePin * pPin;
     LockFilter ();
     if (Index == 0)
     {
         pPin = m_pInput;
     }
     else
     {
         pPin = NULL;
     }
     UnlockFilter ();
     return pPin;
 }
 
 STDMETHODIMP CaptureSinkFilter::Pause()
 {
     LockFilter();
-    if (m_State == State_Stopped)
+    if (mState == State_Stopped)
     {
         //  change the state, THEN activate the input pin
-        m_State = State_Paused;
+        mState = State_Paused;
         if (m_pInput && m_pInput->IsConnected())
         {
             m_pInput->Active();
         }
         if (m_pInput && !m_pInput->IsConnected())
         {
-            m_State = State_Running;
+            mState = State_Running;
         }
     }
-    else if (m_State == State_Running)
+    else if (mState == State_Running)
     {
-        m_State = State_Paused;
+        mState = State_Paused;
     }
     UnlockFilter();
     return S_OK;
 }
 
 STDMETHODIMP CaptureSinkFilter::Stop()
 {
     LockReceive();
     LockFilter();
 
     //  set the state
-    m_State = State_Stopped;
+    mState = State_Stopped;
 
     //  inactivate the pins
     if (m_pInput)
         m_pInput->Inactive();
 
     UnlockFilter();
     UnlockReceive();
     return S_OK;
 }
 
 void CaptureSinkFilter::SetFilterGraph(IGraphBuilder* graph)
 {
     LockFilter();
-    m_pGraph = graph;
+    mGraph = graph;
     UnlockFilter();
 }
 
 void CaptureSinkFilter::ProcessCapturedFrame(unsigned char* pBuffer,
                                          int32_t length,
                                          const VideoCaptureCapability& frameInfo)
 {
     //  we have the receiver lock
-    if (m_State == State_Running)
+    if (mState == State_Running)
     {
         _captureObserver.IncomingFrame(pBuffer, length, frameInfo);
 
         // trying to hold it since it's only a memcpy
         // IMPROVEMENT if this work move critsect
         UnlockReceive();
         return;
     }
--- a/media/webrtc/trunk/webrtc/modules/video_capture/windows/sink_filter_ds.h
+++ b/media/webrtc/trunk/webrtc/modules/video_capture/windows/sink_filter_ds.h
@@ -6,95 +6,117 @@
  *  tree. An additional intellectual property rights grant can be found
  *  in the file PATENTS.  All contributing project authors may
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
 #ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_DS_H_
 #define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_DS_H_
 
-#include <Streams.h> // Include base DS filter header files
-
 #include "video_capture_defines.h"
+#include "BaseInputPin.h"
+#include "BaseFilter.h"
+#include "MediaType.h"
 
 namespace webrtc
 {
 namespace videocapturemodule
 {
 //forward declaration
 
 class CaptureSinkFilter;
 /**
  *	input pin for camera input
  *
  */
-class CaptureInputPin: public CBaseInputPin
+class CaptureInputPin: public mozilla::media::BaseInputPin
 {
 public:
     int32_t _moduleId;
 
     VideoCaptureCapability _requestedCapability;
     VideoCaptureCapability _resultingCapability;
     HANDLE _threadHandle;
 
     CaptureInputPin(int32_t moduleId,
                     IN TCHAR* szName,
                     IN CaptureSinkFilter* pFilter,
-                    IN CCritSec * pLock,
+                    IN mozilla::CriticalSection * pLock,
                     OUT HRESULT * pHr,
                     IN LPCWSTR pszName);
     virtual ~CaptureInputPin();
 
-    HRESULT GetMediaType (IN int iPos, OUT CMediaType * pmt);
-    HRESULT CheckMediaType (IN const CMediaType * pmt);
+    HRESULT GetMediaType (IN int iPos, OUT mozilla::media::MediaType * pmt);
+    HRESULT CheckMediaType (IN const mozilla::media::MediaType * pmt);
     STDMETHODIMP Receive (IN IMediaSample *);
     HRESULT SetMatchingMediaType(const VideoCaptureCapability& capability);
 };
 
-class CaptureSinkFilter: public CBaseFilter
+class CaptureSinkFilter: public mozilla::media::BaseFilter
 {
 
 public:
     CaptureSinkFilter(IN TCHAR * tszName,
                       IN LPUNKNOWN punk,
                       OUT HRESULT * phr,
                       VideoCaptureExternal& captureObserver,
                       int32_t moduleId);
     virtual ~CaptureSinkFilter();
 
     //  --------------------------------------------------------------------
     //  class methods
 
     void ProcessCapturedFrame(unsigned char* pBuffer, int32_t length,
                               const VideoCaptureCapability& frameInfo);
     //  explicit receiver lock aquisition and release
-    void LockReceive()  { m_crtRecv.Lock();}
-    void UnlockReceive() {m_crtRecv.Unlock();}
+    void LockReceive()  { m_crtRecv.Enter();}
+    void UnlockReceive() {m_crtRecv.Leave();}
+
     //  explicit filter lock aquisition and release
-    void LockFilter() {m_crtFilter.Lock();}
-    void UnlockFilter() { m_crtFilter.Unlock(); }
+    void LockFilter() {m_crtFilter.Enter();}
+    void UnlockFilter() { m_crtFilter.Leave(); }
     void SetFilterGraph(IGraphBuilder* graph); // Used if EVR
 
     //  --------------------------------------------------------------------
     //  COM interfaces
-DECLARE_IUNKNOWN    ;
+    STDMETHODIMP QueryInterface(REFIID aIId, void **aInterface)
+    {
+      return mozilla::media::BaseFilter::QueryInterface(aIId, aInterface);
+    }
+    STDMETHODIMP_(ULONG) AddRef()
+    {
+      return ::InterlockedIncrement(&mRefCnt);
+    }
+
+    STDMETHODIMP_(ULONG) Release()
+    {
+      unsigned long newRefCnt = ::InterlockedDecrement(&mRefCnt);
+
+      if (!newRefCnt) {
+        delete this;
+      }
+
+      return newRefCnt;
+    }
+
     STDMETHODIMP SetMatchingMediaType(const VideoCaptureCapability& capability);
 
     //  --------------------------------------------------------------------
     //  CBaseFilter methods
     int GetPinCount ();
-    CBasePin * GetPin ( IN int Index);
+    mozilla::media::BasePin * GetPin ( IN int Index);
     STDMETHODIMP Pause ();
     STDMETHODIMP Stop ();
     STDMETHODIMP GetClassID ( OUT CLSID * pCLSID);
     //  --------------------------------------------------------------------
     //  class factory calls this
-    static CUnknown * CreateInstance (IN LPUNKNOWN punk, OUT HRESULT * phr);
+    static IUnknown * CreateInstance (IN LPUNKNOWN punk, OUT HRESULT * phr);
 private:
-    CCritSec m_crtFilter; //  filter lock
-    CCritSec m_crtRecv;  //  receiver lock; always acquire before filter lock
+    mozilla::CriticalSection m_crtFilter; //  filter lock
+    mozilla::CriticalSection m_crtRecv;  //  receiver lock; always acquire before filter lock
     CaptureInputPin * m_pInput;
     VideoCaptureExternal& _captureObserver;
     int32_t _moduleId;
+    unsigned long mRefCnt;
 };
 } // namespace videocapturemodule
 } // namespace webrtc
 #endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_WINDOWS_SINK_FILTER_DS_H_
--- a/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/vp8.gyp
+++ b/media/webrtc/trunk/webrtc/modules/video_coding/codecs/vp8/vp8.gyp
@@ -26,16 +26,25 @@
         '<(webrtc_root)/modules/video_coding/codecs/interface',
         '<(webrtc_root)/modules/interface',
       ],
       'conditions': [
         ['build_libvpx==1', {
           'dependencies': [
             '<(DEPTH)/third_party/libvpx/libvpx.gyp:libvpx',
           ],
+        },{
+          'include_dirs': [
+            '$(DIST)/include',
+          ],
+          'link_settings': {
+            'libraries': [
+              '$(LIBVPX_OBJ)/libvpx.a',
+            ],
+          },
         }],
       ],
       'direct_dependent_settings': {
         'include_dirs': [
           'include',
           '<(webrtc_root)/common_video/interface',
           '<(webrtc_root)/modules/video_coding/codecs/interface',
         ],
@@ -90,16 +99,25 @@
             'default_temporal_layers_unittest.cc',
             'reference_picture_selection_unittest.cc',
           ],
           'conditions': [
             ['build_libvpx==1', {
               'dependencies': [
                 '<(DEPTH)/third_party/libvpx/libvpx.gyp:libvpx',
               ],
+            },{
+              'include_dirs': [
+                '$(DIST)/include',
+              ],
+              'link_settings': {
+                'libraries': [
+                  '$(LIBVPX_OBJ)/libvpx.a',
+                ],
+              },
             }],
           ],
         },
         {
           'target_name': 'vp8_coder',
           'type': 'executable',
           'dependencies': [
             'webrtc_vp8',
--- a/media/webrtc/trunk/webrtc/modules/video_processing/main/source/video_processing.gypi
+++ b/media/webrtc/trunk/webrtc/modules/video_processing/main/source/video_processing.gypi
@@ -68,16 +68,17 @@
           ],
           'include_dirs': [
             '../interface',
             '../../../interface',
           ],
           'conditions': [
             ['os_posix==1 and OS!="mac"', {
               'cflags': [ '-msse2', ],
+              'cflags_mozilla': [ '-msse2', ],
             }],
             ['OS=="mac"', {
               'xcode_settings': {
                 'OTHER_CFLAGS': [ '-msse2', ],
               },
             }],
           ],
         },
--- a/media/webrtc/trunk/webrtc/system_wrappers/interface/asm_defines.h
+++ b/media/webrtc/trunk/webrtc/system_wrappers/interface/asm_defines.h
@@ -6,17 +6,17 @@
  *  tree. An additional intellectual property rights grant can be found
  *  in the file PATENTS.  All contributing project authors may
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
 #ifndef WEBRTC_SYSTEM_WRAPPERS_INTERFACE_ASM_DEFINES_H_
 #define WEBRTC_SYSTEM_WRAPPERS_INTERFACE_ASM_DEFINES_H_
 
-#if defined(__linux__) && defined(__ELF__)
+#if (defined(__linux__) || defined(__FreeBSD__)) && defined(__ELF__)
 .section .note.GNU-stack,"",%progbits
 #endif
 
 // Define the macros used in ARM assembly code, so that for Mac or iOS builds
 // we add leading underscores for the function names.
 #ifdef __APPLE__
 .macro GLOBAL_FUNCTION name
 .global _\name
--- a/media/webrtc/trunk/webrtc/system_wrappers/interface/tick_util.h
+++ b/media/webrtc/trunk/webrtc/system_wrappers/interface/tick_util.h
@@ -189,17 +189,17 @@ inline int64_t TickTime::QueryOsForTicks
     // 0x0fffffff ~3.1 days, the code will not take that long to execute
     // so it must have been a wrap around.
     if (old > 0xf0000000 && now < 0x0fffffff) {
       num_wrap_time_get_time++;
     }
   }
   result.ticks_ = now + (num_wrap_time_get_time << 32);
 #endif
-#elif defined(WEBRTC_LINUX)
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
   struct timespec ts;
   // TODO(wu): Remove CLOCK_REALTIME implementation.
 #ifdef WEBRTC_CLOCK_TYPE_REALTIME
   clock_gettime(CLOCK_REALTIME, &ts);
 #else
   clock_gettime(CLOCK_MONOTONIC, &ts);
 #endif
   result.ticks_ = 1000000000LL * static_cast<int64_t>(ts.tv_sec) +
@@ -236,34 +236,34 @@ inline int64_t TickTime::MillisecondTime
 #if _WIN32
 #ifdef USE_QUERY_PERFORMANCE_COUNTER
   LARGE_INTEGER qpfreq;
   QueryPerformanceFrequency(&qpfreq);
   return (ticks * 1000) / qpfreq.QuadPart;
 #else
   return ticks;
 #endif
-#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
   return ticks / 1000000LL;
 #else
   return ticks / 1000LL;
 #endif
 }
 
 inline int64_t TickTime::MicrosecondTimestamp() {
   int64_t ticks = TickTime::Now().Ticks();
 #if _WIN32
 #ifdef USE_QUERY_PERFORMANCE_COUNTER
   LARGE_INTEGER qpfreq;
   QueryPerformanceFrequency(&qpfreq);
   return (ticks * 1000) / (qpfreq.QuadPart / 1000);
 #else
   return ticks * 1000LL;
 #endif
-#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
   return ticks / 1000LL;
 #else
   return ticks;
 #endif
 }
 
 inline int64_t TickTime::Ticks() const {
   return ticks_;
@@ -273,33 +273,33 @@ inline int64_t TickTime::MillisecondsToT
 #if _WIN32
 #ifdef USE_QUERY_PERFORMANCE_COUNTER
   LARGE_INTEGER qpfreq;
   QueryPerformanceFrequency(&qpfreq);
   return (qpfreq.QuadPart * ms) / 1000;
 #else
   return ms;
 #endif
-#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
   return ms * 1000000LL;
 #else
   return ms * 1000LL;
 #endif
 }
 
 inline int64_t TickTime::TicksToMilliseconds(const int64_t ticks) {
 #if _WIN32
 #ifdef USE_QUERY_PERFORMANCE_COUNTER
   LARGE_INTEGER qpfreq;
   QueryPerformanceFrequency(&qpfreq);
   return (ticks * 1000) / qpfreq.QuadPart;
 #else
   return ticks;
 #endif
-#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
   return ticks / 1000000LL;
 #else
   return ticks / 1000LL;
 #endif
 }
 
 inline TickTime& TickTime::operator+=(const int64_t& ticks) {
   ticks_ += ticks;
@@ -318,17 +318,17 @@ inline int64_t TickInterval::Millisecond
 #ifdef USE_QUERY_PERFORMANCE_COUNTER
   LARGE_INTEGER qpfreq;
   QueryPerformanceFrequency(&qpfreq);
   return (interval_ * 1000) / qpfreq.QuadPart;
 #else
   // interval_ is in ms
   return interval_;
 #endif
-#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
   // interval_ is in ns
   return interval_ / 1000000;
 #else
   // interval_ is usecs
   return interval_ / 1000;
 #endif
 }
 
@@ -337,17 +337,17 @@ inline int64_t TickInterval::Microsecond
 #ifdef USE_QUERY_PERFORMANCE_COUNTER
   LARGE_INTEGER qpfreq;
   QueryPerformanceFrequency(&qpfreq);
   return (interval_ * 1000000) / qpfreq.QuadPart;
 #else
   // interval_ is in ms
   return interval_ * 1000LL;
 #endif
-#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
   // interval_ is in ns
   return interval_ / 1000;
 #else
   // interval_ is usecs
   return interval_;
 #endif
 }
 
--- a/media/webrtc/trunk/webrtc/system_wrappers/source/atomic32_posix.cc
+++ b/media/webrtc/trunk/webrtc/system_wrappers/source/atomic32_posix.cc
@@ -7,17 +7,16 @@
  *  in the file PATENTS.  All contributing project authors may
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
 #include "atomic32.h"
 
 #include <assert.h>
 #include <inttypes.h>
-#include <malloc.h>
 
 #include "common_types.h"
 
 namespace webrtc {
 
 Atomic32::Atomic32(int32_t initial_value)
     : value_(initial_value) {
   assert(Is32bitAligned());
--- a/media/webrtc/trunk/webrtc/system_wrappers/source/condition_variable.cc
+++ b/media/webrtc/trunk/webrtc/system_wrappers/source/condition_variable.cc
@@ -9,33 +9,33 @@
  */
 
 #include "webrtc/system_wrappers/interface/condition_variable_wrapper.h"
 
 #if defined(_WIN32)
 #include <windows.h>
 #include "webrtc/system_wrappers/source/condition_variable_event_win.h"
 #include "webrtc/system_wrappers/source/condition_variable_native_win.h"
-#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
 #include <pthread.h>
 #include "webrtc/system_wrappers/source/condition_variable_posix.h"
 #endif
 
 namespace webrtc {
 
 ConditionVariableWrapper* ConditionVariableWrapper::CreateConditionVariable() {
 #if defined(_WIN32)
   // Try to create native condition variable implementation.
   ConditionVariableWrapper* ret_val = ConditionVariableNativeWin::Create();
   if (!ret_val) {
     // Native condition variable implementation does not exist. Create generic
     // condition variable based on events.
     ret_val = new ConditionVariableEventWin();
   }
   return ret_val;
-#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
   return ConditionVariablePosix::Create();
 #else
   return NULL;
 #endif
 }
 
 } // namespace webrtc
--- a/media/webrtc/trunk/webrtc/system_wrappers/source/condition_variable_posix.cc
+++ b/media/webrtc/trunk/webrtc/system_wrappers/source/condition_variable_posix.cc
@@ -74,17 +74,17 @@ void ConditionVariablePosix::SleepCS(Cri
       &crit_sect);
   pthread_cond_wait(&cond_, &cs->mutex_);
 }
 
 bool ConditionVariablePosix::SleepCS(CriticalSectionWrapper& crit_sect,
                                      unsigned long max_time_inMS) {
   const unsigned long INFINITE =  0xFFFFFFFF;
   const int MILLISECONDS_PER_SECOND = 1000;
-#ifndef WEBRTC_LINUX
+#if !defined(WEBRTC_LINUX) && !defined(WEBRTC_BSD)
   const int MICROSECONDS_PER_MILLISECOND = 1000;
 #endif
   const int NANOSECONDS_PER_SECOND = 1000000000;
   const int NANOSECONDS_PER_MILLISECOND  = 1000000;
 
   CriticalSectionPosix* cs = reinterpret_cast<CriticalSectionPosix*>(
       &crit_sect);
 
--- a/media/webrtc/trunk/webrtc/system_wrappers/source/cpu_info.cc
+++ b/media/webrtc/trunk/webrtc/system_wrappers/source/cpu_info.cc
@@ -7,24 +7,26 @@
  *  in the file PATENTS.  All contributing project authors may
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
 #include "webrtc/system_wrappers/interface/cpu_info.h"
 
 #if defined(_WIN32)
 #include <Windows.h>
-#elif defined(WEBRTC_MAC)
+#elif defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
+#include <sys/types.h>
 #include <sys/sysctl.h>
-#include <sys/types.h>
 #elif defined(WEBRTC_ANDROID)
 // Not implemented yet, might be possible to use Linux implementation
-#else // defined(WEBRTC_LINUX)
+#elif defined(WEBRTC_LINUX)
 #include <unistd.h>  // required for get_nprocs() with uClibc
 #include <sys/sysinfo.h>
+#else // defined(_SC_NPROCESSORS_ONLN)
+#include <unistd.h>
 #endif
 
 #include "system_wrappers/interface/trace.h"
 
 namespace webrtc {
 
 uint32_t CpuInfo::number_of_cores_ = 0;
 
@@ -32,34 +34,43 @@ uint32_t CpuInfo::DetectNumberOfCores() 
   if (!number_of_cores_) {
 #if defined(_WIN32)
     SYSTEM_INFO si;
     GetSystemInfo(&si);
     number_of_cores_ = static_cast<uint32_t>(si.dwNumberOfProcessors);
     WEBRTC_TRACE(kTraceStateInfo, kTraceUtility, -1,
                  "Available number of cores:%d", number_of_cores_);
 
-#elif defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID)
+#elif defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID) && !defined(WEBRTC_GONK)
     number_of_cores_ = get_nprocs();
     WEBRTC_TRACE(kTraceStateInfo, kTraceUtility, -1,
                  "Available number of cores:%d", number_of_cores_);
 
-#elif defined(WEBRTC_MAC)
-    int name[] = {CTL_HW, HW_AVAILCPU};
+#elif defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
+    int name[] = {
+      CTL_HW,
+#ifdef HW_AVAILCPU
+      HW_AVAILCPU,
+#else
+      HW_NCPU,
+#endif
+    };
     int ncpu;
     size_t size = sizeof(ncpu);
     if (0 == sysctl(name, 2, &ncpu, &size, NULL, 0)) {
       number_of_cores_ = static_cast<uint32_t>(ncpu);
       WEBRTC_TRACE(kTraceStateInfo, kTraceUtility, -1,
                    "Available number of cores:%d", number_of_cores_);
     } else {
       WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
                    "Failed to get number of cores");
       number_of_cores_ = 1;
     }
+#elif defined(_SC_NPROCESSORS_ONLN)
+    number_of_cores_ = sysconf(_SC_NPROCESSORS_ONLN);
 #else
     WEBRTC_TRACE(kTraceWarning, kTraceUtility, -1,
                  "No function to get number of cores");
     number_of_cores_ = 1;
 #endif
   }
   return number_of_cores_;
 }
--- a/media/webrtc/trunk/webrtc/system_wrappers/source/rw_lock.cc
+++ b/media/webrtc/trunk/webrtc/system_wrappers/source/rw_lock.cc
@@ -10,28 +10,33 @@
 
 #include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
 
 #include <assert.h>
 
 #if defined(_WIN32)
 #include "webrtc/system_wrappers/source/rw_lock_generic.h"
 #include "webrtc/system_wrappers/source/rw_lock_win.h"
+#elif defined(ANDROID)
+#include "webrtc/system_wrappers/source/rw_lock_generic.h"
 #else
 #include "webrtc/system_wrappers/source/rw_lock_posix.h"
 #endif
 
 namespace webrtc {
 
 RWLockWrapper* RWLockWrapper::CreateRWLock() {
 #ifdef _WIN32
   // Native implementation is faster, so use that if available.
   RWLockWrapper* lock = RWLockWin::Create();
   if (lock) {
     return lock;
   }
   return new RWLockGeneric();
+#elif defined(ANDROID)
+  // Android 2.2 and before do not have POSIX pthread rwlocks.
+  return new RWLockGeneric();
 #else
   return RWLockPosix::Create();
 #endif
 }
 
 }  // namespace webrtc
--- a/media/webrtc/trunk/webrtc/system_wrappers/source/system_wrappers.gyp
+++ b/media/webrtc/trunk/webrtc/system_wrappers/source/system_wrappers.gyp
@@ -127,18 +127,23 @@
             'trace_impl.cc',
             'trace_impl.h',
             'trace_posix.cc',
             'trace_posix.h',
             'trace_win.cc',
             'trace_win.h',
           ],
         }],
-        ['OS=="android"', {
+        ['OS=="android" or moz_widget_toolkit_gonk==1', {
           'dependencies': [ 'cpu_features_android', ],
+          'sources!': [
+            # Android doesn't have these in <=2.2
+            'rw_lock_posix.cc',
+            'rw_lock_posix.h',
+          ],
         }],
         ['OS=="linux"', {
           'link_settings': {
             'libraries': [ '-lrt', ],
           },
         }],
         ['OS=="mac"', {
           'link_settings': {
@@ -171,17 +176,17 @@
       # Disable warnings to enable Win64 build, issue 1323.
       'msvs_disabled_warnings': [
         4267,  # size_t to int truncation.
         4334,  # Ignore warning on shift operator promotion.
       ],
     },
   ], # targets
   'conditions': [
-    ['OS=="android"', {
+    ['OS=="android" or moz_widget_toolkit_gonk==1', {
       'targets': [
         {
           'variables': {
             # Treat this as third-party code.
             'chromium_code': 0,
           },
           'target_name': 'cpu_features_android',
           'type': 'static_library',
--- a/media/webrtc/trunk/webrtc/system_wrappers/source/thread_posix.cc
+++ b/media/webrtc/trunk/webrtc/system_wrappers/source/thread_posix.cc
@@ -53,16 +53,27 @@
 #ifdef WEBRTC_LINUX
 #include <sys/types.h>
 #include <sched.h>
 #include <sys/syscall.h>
 #include <linux/unistd.h>
 #include <sys/prctl.h>
 #endif
 
+#if defined(__NetBSD__)
+#include <lwp.h>
+#elif defined(__FreeBSD__)
+#include <sys/param.h>
+#include <sys/thr.h>
+#endif
+
+#if defined(WEBRTC_BSD) && !defined(__NetBSD__)
+#include <pthread_np.h>
+#endif
+
 #include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
 #include "webrtc/system_wrappers/interface/event_wrapper.h"
 #include "webrtc/system_wrappers/interface/sleep.h"
 #include "webrtc/system_wrappers/interface/trace.h"
 
 namespace webrtc {
 
 int ConvertToSystemPriority(ThreadPriority priority, int min_prio,
@@ -118,41 +129,55 @@ ThreadPosix::ThreadPosix(ThreadRunFuncti
       obj_(obj),
       crit_state_(CriticalSectionWrapper::CreateCriticalSection()),
       alive_(false),
       dead_(true),
       prio_(prio),
       event_(EventWrapper::Create()),
       name_(),
       set_thread_name_(false),
-#if (defined(WEBRTC_LINUX) || defined(WEBRTC_ANDROID))
+#if (defined(WEBRTC_LINUX) || defined(WEBRTC_ANDROID) || defined(WEBRTC_GONK))
       pid_(-1),
 #endif
       attr_(),
       thread_(0) {
   if (thread_name != NULL) {
     set_thread_name_ = true;
     strncpy(name_, thread_name, kThreadMaxNameLength);
     name_[kThreadMaxNameLength - 1] = '\0';
   }
 }
 
 uint32_t ThreadWrapper::GetThreadId() {
-#if defined(WEBRTC_ANDROID) || defined(WEBRTC_LINUX)
+#if defined(WEBRTC_ANDROID) || defined(WEBRTC_LINUX) || defined(WEBRTC_GONK)
   return static_cast<uint32_t>(syscall(__NR_gettid));
 #elif defined(WEBRTC_MAC) || defined(WEBRTC_IOS)
   return pthread_mach_thread_np(pthread_self());
+#elif defined(__NetBSD__)
+  return _lwp_self();
+#elif defined(__DragonFly__)
+  return lwp_gettid();
+#elif defined(__OpenBSD__)
+  return reinterpret_cast<uintptr_t> (pthread_self());
+#elif defined(__FreeBSD__)
+#  if __FreeBSD_version > 900030
+    return pthread_getthreadid_np();
+#  else
+    long lwpid;
+    thr_self(&lwpid);
+    return lwpid;
+#  endif
 #else
   return reinterpret_cast<uint32_t>(pthread_self());
 #endif
 }
 
 int ThreadPosix::Construct() {
   int result = 0;
-#if !defined(WEBRTC_ANDROID)
+#if !defined(WEBRTC_ANDROID) && !defined(WEBRTC_GONK)
   // Enable immediate cancellation if requested, see Shutdown().
   result = pthread_setcancelstate(PTHREAD_CANCEL_ENABLE, NULL);
   if (result != 0) {
     return -1;
   }
   result = pthread_setcanceltype(PTHREAD_CANCEL_ASYNCHRONOUS, NULL);
   if (result != 0) {
     return -1;
@@ -166,28 +191,38 @@ int ThreadPosix::Construct() {
 }
 
 ThreadPosix::~ThreadPosix() {
   pthread_attr_destroy(&attr_);
   delete event_;
   delete crit_state_;
 }
 
-#define HAS_THREAD_ID !defined(WEBRTC_IOS) && !defined(WEBRTC_MAC)
+#define HAS_THREAD_ID !defined(WEBRTC_IOS) && !defined(WEBRTC_MAC) && !defined(WEBRTC_BSD)
 
 bool ThreadPosix::Start(unsigned int& thread_id)
 {
   int result = pthread_attr_setdetachstate(&attr_, PTHREAD_CREATE_DETACHED);
   // Set the stack stack size to 1M.
   result |= pthread_attr_setstacksize(&attr_, 1024 * 1024);
+#if 0
+// Temporarily remove the attempt to set this to real-time scheduling.
+//
+// See: https://code.google.com/p/webrtc/issues/detail?id=1956
+//
+// To be removed when upstream is fixed.
 #ifdef WEBRTC_THREAD_RR
   const int policy = SCHED_RR;
 #else
   const int policy = SCHED_FIFO;
 #endif
+#else
+  const int policy = SCHED_OTHER;
+#endif
+
   event_->Reset();
   // If pthread_create was successful, a thread was created and is running.
   // Don't return false if it was successful since if there are any other
   // failures the state will be: thread was started but not configured as
   // asked for. However, the caller of this API will assume that a false
   // return value means that the thread never started.
   result |= pthread_create(&thread_, &attr_, &StartThread, this);
   if (result != 0) {
@@ -230,31 +265,39 @@ bool ThreadPosix::Start(unsigned int& th
     WEBRTC_TRACE(kTraceError, kTraceUtility, -1,
                  "unable to set thread priority");
   }
   return true;
 }
 
 // CPU_ZERO and CPU_SET are not available in NDK r7, so disable
 // SetAffinity on Android for now.
-#if (defined(WEBRTC_LINUX) && (!defined(WEBRTC_ANDROID)))
+#if defined(__FreeBSD__) || (defined(WEBRTC_LINUX) && !defined(WEBRTC_ANDROID) && !defined(WEBRTC_GONK))
 bool ThreadPosix::SetAffinity(const int* processor_numbers,
                               const unsigned int amount_of_processors) {
   if (!processor_numbers || (amount_of_processors == 0)) {
     return false;
   }
+#if defined(__FreeBSD__)
+  cpuset_t mask;
+#else
   cpu_set_t mask;
+#endif
   CPU_ZERO(&mask);
 
   for (unsigned int processor = 0;
        processor < amount_of_processors;
        ++processor) {
     CPU_SET(processor_numbers[processor], &mask);
   }
-#if defined(WEBRTC_ANDROID)
+#if defined(__FreeBSD__)
+  const int result = pthread_setaffinity_np(thread_,
+                             sizeof(mask),
+                             &mask);
+#elif defined(WEBRTC_ANDROID) || defined(WEBRTC_GONK)
   // Android.
   const int result = syscall(__NR_sched_setaffinity,
                              pid_,
                              sizeof(mask),
                              &mask);
 #else
   // "Normal" Linux.
   const int result = sched_setaffinity(pid_,
@@ -305,25 +348,29 @@ bool ThreadPosix::Stop() {
   }
 }
 
 void ThreadPosix::Run() {
   {
     CriticalSectionScoped cs(crit_state_);
     alive_ = true;
   }
-#if (defined(WEBRTC_LINUX) || defined(WEBRTC_ANDROID))
+#if (defined(WEBRTC_LINUX) || defined(WEBRTC_ANDROID) || defined(WEBRTC_GONK))
   pid_ = GetThreadId();
 #endif
   // The event the Start() is waiting for.
   event_->Set();
 
   if (set_thread_name_) {
 #ifdef WEBRTC_LINUX
     prctl(PR_SET_NAME, (unsigned long)name_, 0, 0, 0);
+#elif defined(__NetBSD__)
+        pthread_setname_np(pthread_self(), "%s", (void *)name_);
+#elif defined(WEBRTC_BSD)
+        pthread_set_name_np(pthread_self(), name_);
 #endif
     WEBRTC_TRACE(kTraceStateInfo, kTraceUtility, -1,
                  "Thread with name:%s started ", name_);
   } else {
     WEBRTC_TRACE(kTraceStateInfo, kTraceUtility, -1,
                  "Thread without name started");
   }
   bool alive = true;
--- a/media/webrtc/trunk/webrtc/system_wrappers/source/trace_impl.cc
+++ b/media/webrtc/trunk/webrtc/system_wrappers/source/trace_impl.cc
@@ -443,23 +443,23 @@ void TraceImpl::AddMessageToList(
 
   uint16_t idx = next_free_idx_[active_queue_];
   next_free_idx_[active_queue_]++;
 
   level_[active_queue_][idx] = level;
   length_[active_queue_][idx] = length;
   memcpy(message_queue_[active_queue_][idx], trace_message, length);
 
-  if (next_free_idx_[active_queue_] == WEBRTC_TRACE_MAX_QUEUE - 1) {
+  if (next_free_idx_[active_queue_] >= WEBRTC_TRACE_MAX_QUEUE - 1) {
     // Logging more messages than can be worked off. Log a warning.
     const char warning_msg[] = "WARNING MISSING TRACE MESSAGES\n";
-    level_[active_queue_][next_free_idx_[active_queue_]] = kTraceWarning;
-    length_[active_queue_][next_free_idx_[active_queue_]] = strlen(warning_msg);
-    memcpy(message_queue_[active_queue_][next_free_idx_[active_queue_]],
-           warning_msg, strlen(warning_msg));
+    level_[active_queue_][WEBRTC_TRACE_MAX_QUEUE-1] = kTraceWarning;
+    length_[active_queue_][WEBRTC_TRACE_MAX_QUEUE-1] = strlen(warning_msg);
+    memcpy(message_queue_[active_queue_][WEBRTC_TRACE_MAX_QUEUE-1],
+           warning_msg, length_[active_queue_][WEBRTC_TRACE_MAX_QUEUE-1]);
     next_free_idx_[active_queue_]++;
   }
 }
 
 bool TraceImpl::Run(void* obj) {
   return static_cast<TraceImpl*>(obj)->Process();
 }
 
--- a/media/webrtc/trunk/webrtc/system_wrappers/source/trace_posix.cc
+++ b/media/webrtc/trunk/webrtc/system_wrappers/source/trace_posix.cc
@@ -13,18 +13,16 @@
 #include <cassert>
 #include <stdarg.h>
 #include <stdio.h>
 #include <string.h>
 #include <sys/time.h>
 #include <time.h>
 #ifdef WEBRTC_ANDROID
 #include <pthread.h>
-#else
-#include <iostream>
 #endif
 
 #if defined(_DEBUG)
 #define BUILDMODE "d"
 #elif defined(DEBUG)
 #define BUILDMODE "d"
 #elif defined(NDEBUG)
 #define BUILDMODE "r"
@@ -52,17 +50,17 @@ TracePosix::~TracePosix() {
 
 int32_t TracePosix::AddTime(char* trace_message, const TraceLevel level) const {
   struct timeval system_time_high_res;
   if (gettimeofday(&system_time_high_res, 0) == -1) {
     return -1;
   }
   struct tm buffer;
   const struct tm* system_time =
-    localtime_r(&system_time_high_res.tv_sec, &buffer);
+    localtime_r((const time_t *)(&system_time_high_res.tv_sec), &buffer);
 
   const uint32_t ms_time = system_time_high_res.tv_usec / 1000;
   uint32_t prev_tickCount = 0;
   {
     CriticalSectionScoped lock(&crit_sect_);
     if (level == kTraceApiCall) {
       prev_tickCount = prev_tick_count_;
       prev_tick_count_ = ms_time;
--- a/media/webrtc/trunk/webrtc/test/channel_transport/udp_transport_impl.cc
+++ b/media/webrtc/trunk/webrtc/test/channel_transport/udp_transport_impl.cc
@@ -13,60 +13,62 @@
 #include <stdio.h>
 #include <stdlib.h>
 #include <string.h>
 #include <time.h>
 
 #if defined(_WIN32)
 #include <winsock2.h>
 #include <ws2tcpip.h>
-#elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+#elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
 #include <arpa/inet.h>
 #include <ctype.h>
 #include <fcntl.h>
 #include <netdb.h>
+#include <sys/socket.h>
 #include <net/if.h>
 #include <netinet/in.h>
 #include <stdlib.h>
 #include <sys/ioctl.h>
-#include <sys/socket.h>
 #include <sys/time.h>
 #include <unistd.h>
 #ifndef WEBRTC_IOS
 #include <net/if_arp.h>
 #endif
 #endif // defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
 
 #if defined(WEBRTC_MAC)
+#include <machine/types.h>
+#endif
+#if defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
 #include <ifaddrs.h>
-#include <machine/types.h>
 #endif
 #if defined(WEBRTC_LINUX)
 #include <linux/netlink.h>
 #include <linux/rtnetlink.h>
 #endif
 
 #include "webrtc/common_types.h"
 #include "webrtc/typedefs.h"
 #include "webrtc/system_wrappers/interface/critical_section_wrapper.h"
 #include "webrtc/system_wrappers/interface/rw_lock_wrapper.h"
 #include "webrtc/system_wrappers/interface/trace.h"
 #include "webrtc/test/channel_transport/udp_socket_manager_wrapper.h"
 
-#if defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
 #define GetLastError() errno
 
 #define IFRSIZE ((int)(size * sizeof (struct ifreq)))
 
 #define NLMSG_OK_NO_WARNING(nlh,len)                                    \
   ((len) >= (int)sizeof(struct nlmsghdr) &&                             \
    (int)(nlh)->nlmsg_len >= (int)sizeof(struct nlmsghdr) &&             \
    (int)(nlh)->nlmsg_len <= (len))
 
-#endif // defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+#endif // defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
 
 namespace webrtc {
 namespace test {
 
 class SocketFactory : public UdpTransportImpl::SocketFactoryInterface {
  public:
   UdpSocketWrapper* CreateSocket(const int32_t id,
                                  UdpSocketManager* mgr,
@@ -2325,17 +2327,17 @@ uint32_t UdpTransport::InetAddrIPV4(cons
 {
     return ::inet_addr(ip);
 }
 
 int32_t UdpTransport::InetPresentationToNumeric(int32_t af,
                                                 const char* src,
                                                 void* dst)
 {
-#if defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
     const int32_t result = inet_pton(af, src, dst);
     return result > 0 ? 0 : -1;
 
 #elif defined(_WIN32)
     SocketAddress temp;
     int length=sizeof(SocketAddress);
 
     if(af == AF_INET)
@@ -2447,17 +2449,17 @@ int32_t UdpTransport::LocalHostAddressIP
                 break;
         };
     }
     freeaddrinfo(result);
     WEBRTC_TRACE(kTraceWarning, kTraceTransport, -1,
                  "getaddrinfo failed to find address");
     return -1;
 
-#elif defined(WEBRTC_MAC)
+#elif defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
     struct ifaddrs* ptrIfAddrs = NULL;
     struct ifaddrs* ptrIfAddrsStart = NULL;
 
     getifaddrs(&ptrIfAddrsStart);
     ptrIfAddrs = ptrIfAddrsStart;
     while(ptrIfAddrs)
     {
         if(ptrIfAddrs->ifa_addr->sa_family == AF_INET6)
@@ -2639,17 +2641,17 @@ int32_t UdpTransport::LocalHostAddress(u
     }
     else
     {
         int32_t error = WSAGetLastError();
         WEBRTC_TRACE(kTraceWarning, kTraceTransport, -1,
                      "gethostbyname failed, error:%d", error);
         return -1;
     }
-#elif (defined(WEBRTC_MAC))
+#elif (defined(WEBRTC_BSD) || defined(WEBRTC_MAC))
     char localname[255];
     if (gethostname(localname, 255) != -1)
     {
         hostent* localHost;
         localHost = gethostbyname(localname);
         if(localHost)
         {
             if(localHost->h_addrtype != AF_INET)
@@ -2778,17 +2780,17 @@ int32_t UdpTransport::IPAddress(const So
         }
 
         source_port = address._sockaddr_in6.sin6_port;
     }
     // Convert port number to network byte order.
     sourcePort = htons(source_port);
     return 0;
 
- #elif defined(WEBRTC_LINUX) || defined(WEBRTC_MAC)
+ #elif defined(WEBRTC_LINUX) || defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
     int32_t ipFamily = address._sockaddr_storage.sin_family;
     const void* ptrNumericIP = NULL;
 
     if(ipFamily == AF_INET)
     {
         ptrNumericIP = &(address._sockaddr_in.sin_addr);
     }
     else if(ipFamily == AF_INET6)
--- a/media/webrtc/trunk/webrtc/typedefs.h
+++ b/media/webrtc/trunk/webrtc/typedefs.h
@@ -12,17 +12,17 @@
 // Much of it is derived from Chromium's build/build_config.h.
 
 #ifndef WEBRTC_TYPEDEFS_H_
 #define WEBRTC_TYPEDEFS_H_
 
 // For access to standard POSIXish features, use WEBRTC_POSIX instead of a
 // more specific macro.
 #if defined(WEBRTC_MAC) || defined(WEBRTC_LINUX) || \
-    defined(WEBRTC_ANDROID)
+    defined(WEBRTC_ANDROID) || defined(WEBRTC_BSD)
 #define WEBRTC_POSIX
 #endif
 
 // Processor architecture detection.  For more info on what's defined, see:
 //   http://msdn.microsoft.com/en-us/library/b0084kay.aspx
 //   http://www.agner.org/optimize/calling_conventions.pdf
 //   or with gcc, run: "echo | gcc -E -dM -"
 // TODO(andrew): replace WEBRTC_LITTLE_ENDIAN with WEBRTC_ARCH_LITTLE_ENDIAN.
@@ -44,20 +44,80 @@
 // definition warnings.
 //#define WEBRTC_ARCH_ARM
 // TODO(andrew): Chromium uses the following two defines. Should we switch?
 //#define WEBRTC_ARCH_ARM_FAMILY
 //#define WEBRTC_ARCH_ARMEL
 #define WEBRTC_ARCH_32_BITS
 #define WEBRTC_ARCH_LITTLE_ENDIAN
 #define WEBRTC_LITTLE_ENDIAN
-#elif defined(__MIPSEL__)
-#define WEBRTC_ARCH_32_BITS
+#elif defined(__powerpc64__)
+#define WEBRTC_ARCH_PPC64 1
+#define WEBRTC_ARCH_64_BITS 1
+#define WEBRTC_ARCH_BIG_ENDIAN
+#define WEBRTC_BIG_ENDIAN
+#elif defined(__ppc__) || defined(__powerpc__)
+#define WEBRTC_ARCH_PPC 1
+#define WEBRTC_ARCH_32_BITS 1
+#define WEBRTC_ARCH_BIG_ENDIAN
+#define WEBRTC_BIG_ENDIAN
+#elif defined(__sparc64__)
+#define WEBRTC_ARCH_SPARC 1
+#define WEBRTC_ARCH_64_BITS 1
+#define WEBRTC_ARCH_BIG_ENDIAN
+#define WEBRTC_BIG_ENDIAN
+#elif defined(__sparc__)
+#define WEBRTC_ARCH_SPARC 1
+#define WEBRTC_ARCH_32_BITS 1
+#define WEBRTC_ARCH_BIG_ENDIAN
+#define WEBRTC_BIG_ENDIAN
+#elif defined(__mips__)
+#define WEBRTC_ARCH_MIPS 1
+#if defined(_ABI64) && _MIPS_SIM == _ABI64
+#define WEBRTC_ARCH_64_BITS 1
+#else
+#define WEBRTC_ARCH_32_BITS 1
+#endif
+#if defined(__MIPSEB__)
+#define WEBRTC_ARCH_BIG_ENDIAN
+#define WEBRTC_BIG_ENDIAN
+#else
 #define WEBRTC_ARCH_LITTLE_ENDIAN
 #define WEBRTC_LITTLE_ENDIAN
+#endif
+#elif defined(__hppa__)
+#define WEBRTC_ARCH_HPPA 1
+#define WEBRTC_ARCH_32_BITS 1
+#define WEBRTC_ARCH_BIG_ENDIAN
+#define WEBRTC_BIG_ENDIAN
+#elif defined(__ia64__)
+#define WEBRTC_ARCH_IA64 1
+#define WEBRTC_ARCH_64_BITS 1
+#define WEBRTC_ARCH_LITTLE_ENDIAN
+#define WEBRTC_LITTLE_ENDIAN
+#elif defined(__s390x__)
+#define WEBRTC_ARCH_S390X 1
+#define WEBRTC_ARCH_64_BITS 1
+#define WEBRTC_ARCH_BIG_ENDIAN
+#define WEBRTC_BIG_ENDIAN
+#elif defined(__s390__)
+#define WEBRTC_ARCH_S390 1
+#define WEBRTC_ARCH_32_BITS 1
+#define WEBRTC_ARCH_BIG_ENDIAN
+#define WEBRTC_BIG_ENDIAN
+#elif defined(__alpha__)
+#define WEBRTC_ARCH_ALPHA 1
+#define WEBRTC_ARCH_64_BITS 1
+#define WEBRTC_ARCH_LITTLE_ENDIAN
+#define WEBRTC_LITTLE_ENDIAN
+#elif defined(__avr32__)
+#define WEBRTC_ARCH_AVR32 1
+#define WEBRTC_ARCH_32_BITS 1
+#define WEBRTC_ARCH_BIG_ENDIAN
+#define WEBRTC_BIG_ENDIAN
 #else
 #error Please add support for your architecture in typedefs.h
 #endif
 
 #if defined(__SSE2__) || defined(_MSC_VER)
 #define WEBRTC_USE_SSE2
 #endif
 
--- a/media/webrtc/trunk/webrtc/video_engine/vie_defines.h
+++ b/media/webrtc/trunk/webrtc/video_engine/vie_defines.h
@@ -172,17 +172,17 @@ inline int ChannelId(const int moduleId)
 
   // Example: "Oct 10 2002 12:05:30 r".
   #define BUILDINFO BUILDDATE TEXT(" ") BUILDTIME TEXT(" ") BUILDMODE
   #define RENDER_MODULE_TYPE kRenderWindows
 #endif
 
 // Linux specific.
 #ifndef WEBRTC_ANDROID
-#ifdef WEBRTC_LINUX
+#if defined(WEBRTC_LINUX) || defined(WEBRTC_BSD)
   //  Build information macros.
   #if defined(_DEBUG)
   #define BUILDMODE "d"
   #elif defined(DEBUG)
   #define BUILDMODE "d"
   #elif defined(NDEBUG)
   #define BUILDMODE "r"
   #else
--- a/media/webrtc/trunk/webrtc/video_engine/vie_impl.cc
+++ b/media/webrtc/trunk/webrtc/video_engine/vie_impl.cc
@@ -209,21 +209,23 @@ int VideoEngine::SetAndroidObjects(void*
                "SetAndroidObjects()");
 
 #if defined(WEBRTC_ANDROID) && !defined(WEBRTC_CHROMIUM_BUILD)
   if (SetCaptureAndroidVM(javaVM, javaContext) != 0) {
     WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
                  "Could not set capture Android VM");
     return -1;
   }
+#ifdef WEBRTC_INCLUDE_INTERNAL_VIDEO_RENDER
   if (SetRenderAndroidVM(javaVM) != 0) {
     WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
                  "Could not set render Android VM");
     return -1;
   }
+#endif
   return 0;
 #else
   WEBRTC_TRACE(kTraceError, kTraceVideo, g_vie_active_instance_counter,
                "WEBRTC_ANDROID not defined for VideoEngine::SetAndroidObjects");
   return -1;
 #endif
 }
 
--- a/media/webrtc/trunk/webrtc/voice_engine/include/voe_base.h
+++ b/media/webrtc/trunk/webrtc/voice_engine/include/voe_base.h
@@ -77,17 +77,17 @@ public:
     // Sets the name of the trace file and enables non-encrypted trace messages.
     static int SetTraceFile(const char* fileNameUTF8,
                             const bool addFileCounter = false);
 
     // Installs the TraceCallback implementation to ensure that the user
     // receives callbacks for generated trace messages.
     static int SetTraceCallback(TraceCallback* callback);
 
-    static int SetAndroidObjects(void* javaVM, void* env, void* context);
+    static int SetAndroidObjects(void* javaVM, void* context);
 
 protected:
     VoiceEngine() {}
     ~VoiceEngine() {}
 };
 
 // VoEBase
 class WEBRTC_DLLEXPORT VoEBase
--- a/media/webrtc/trunk/webrtc/voice_engine/include/voe_volume_control.h
+++ b/media/webrtc/trunk/webrtc/voice_engine/include/voe_volume_control.h
@@ -113,15 +113,15 @@ public:
     // Scales volume of the |left| and |right| channels independently.
     // Valid scale range is [0.0, 1.0].
     virtual int SetOutputVolumePan(int channel, float left, float right) = 0;
 
     // Gets the current left and right scaling factors.
     virtual int GetOutputVolumePan(int channel, float& left, float& right) = 0;
 
 protected:
-    VoEVolumeControl() {};
-    virtual ~VoEVolumeControl() {};
+    VoEVolumeControl() {}
+    virtual ~VoEVolumeControl() {}
 };
 
 }  // namespace webrtc
 
 #endif  // #ifndef WEBRTC_VOICE_ENGINE_VOE_VOLUME_CONTROL_H
--- a/media/webrtc/trunk/webrtc/voice_engine/voice_engine_core.gypi
+++ b/media/webrtc/trunk/webrtc/voice_engine/voice_engine_core.gypi
@@ -27,16 +27,19 @@
         'include',
         '<(webrtc_root)/modules/audio_device',
       ],
       'direct_dependent_settings': {
         'include_dirs': [
           'include',
         ],
       },
+      'defines': [
+        'WEBRTC_EXTERNAL_TRANSPORT',
+      ],
       'sources': [
         '../common_types.h',
         '../engine_configurations.h',
         '../typedefs.h',
         'include/voe_audio_processing.h',
         'include/voe_base.h',
         'include/voe_call_report.h',
         'include/voe_codec.h',
--- a/media/webrtc/trunk/webrtc/voice_engine/voice_engine_defines.h
+++ b/media/webrtc/trunk/webrtc/voice_engine/voice_engine_defines.h
@@ -309,21 +309,28 @@ inline int VoEChannelId(const int module
 #ifdef ANDROID
 
 // ----------------------------------------------------------------------------
 //  Defines
 // ----------------------------------------------------------------------------
 
   // Always excluded for Android builds
   #undef WEBRTC_CODEC_ISAC
-  #undef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
+  // We need WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT to make things work on Android.
+  // Motivation for the commented-out undef below is unclear.
+  //
+  // #undef WEBRTC_VOE_EXTERNAL_REC_AND_PLAYOUT
   #undef WEBRTC_CONFERENCING
   #undef WEBRTC_TYPING_DETECTION
 
-  #define ANDROID_NOT_SUPPORTED(stat) NOT_SUPPORTED(stat)
+  // This macro used to cause the calling function to set an error code and return.
+  // However, not doing that seems to cause the unit tests to pass / behave reasonably,
+  // so it's disabled for now; see bug 819856.
+  #define ANDROID_NOT_SUPPORTED(stat)
+  //#define ANDROID_NOT_SUPPORTED(stat) NOT_SUPPORTED(stat)
 
 #else // LINUX PC
 
 // ----------------------------------------------------------------------------
 //  Defines
 // ----------------------------------------------------------------------------
 
   #define ANDROID_NOT_SUPPORTED(stat)
@@ -332,41 +339,43 @@ inline int VoEChannelId(const int module
 
 #else
 #define ANDROID_NOT_SUPPORTED(stat)
 #endif  // #ifdef WEBRTC_LINUX
 
 // *** WEBRTC_MAC ***
 // including iPhone
 
-#ifdef WEBRTC_MAC
+#if defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
 
 #include <pthread.h>
 #include <sys/types.h>
 #include <sys/socket.h>
 #include <netinet/in.h>
 #include <arpa/inet.h>
 #include <stdio.h>
 #include <string.h>
 #include <stdlib.h>
 #include <errno.h>
 #include <sys/stat.h>
 #include <unistd.h>
 #include <fcntl.h>
 #include <sched.h>
 #include <sys/time.h>
 #include <time.h>
+#if !defined(WEBRTC_BSD)
 #include <AudioUnit/AudioUnit.h>
 #if !defined(WEBRTC_IOS)
   #include <CoreServices/CoreServices.h>
   #include <CoreAudio/CoreAudio.h>
   #include <AudioToolbox/DefaultAudioOutput.h>
   #include <AudioToolbox/AudioConverter.h>
   #include <CoreAudio/HostTime.h>
 #endif
+#endif
 
 #define DWORD unsigned long int
 #define WINAPI
 #define LPVOID void *
 #define FALSE 0
 #define TRUE 1
 #define SOCKADDR_IN struct sockaddr_in
 #define UINT unsigned int
@@ -412,11 +421,11 @@ inline int VoEChannelId(const int module
 //  Defines
 // ----------------------------------------------------------------------------
 
   #define IPHONE_NOT_SUPPORTED(stat)
 #endif
 
 #else
 #define IPHONE_NOT_SUPPORTED(stat)
-#endif  // #ifdef WEBRTC_MAC
+#endif  // #if defined(WEBRTC_BSD) || defined(WEBRTC_MAC)
 
 #endif // WEBRTC_VOICE_ENGINE_VOICE_ENGINE_DEFINES_H
--- a/media/webrtc/trunk/webrtc/voice_engine/voice_engine_impl.cc
+++ b/media/webrtc/trunk/webrtc/voice_engine/voice_engine_impl.cc
@@ -3,17 +3,17 @@
  *
  *  Use of this source code is governed by a BSD-style license
  *  that can be found in the LICENSE file in the root of the source
  *  tree. An additional intellectual property rights grant can be found
  *  in the file PATENTS.  All contributing project authors may
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
-#if defined(WEBRTC_ANDROID) && !defined(WEBRTC_ANDROID_OPENSLES)
+#if defined(ANDROID) && !defined(MOZ_WIDGET_GONK)
 #include "modules/audio_device/android/audio_device_jni_android.h"
 #endif
 
 #include "voice_engine_impl.h"
 #include "trace.h"
 
 namespace webrtc
 {
@@ -136,23 +136,19 @@ bool VoiceEngine::Delete(VoiceEngine*& v
         WEBRTC_TRACE(kTraceWarning, kTraceVoice, -1,
             "VoiceEngine::Delete did not release the very last reference.  "
             "%d references remain.", ref);
     }
 
     return true;
 }
 
-int VoiceEngine::SetAndroidObjects(void* javaVM, void* env, void* context)
+int VoiceEngine::SetAndroidObjects(void* javaVM, void* context)
 {
-#ifdef WEBRTC_ANDROID
-#ifdef WEBRTC_ANDROID_OPENSLES
-  return 0;
-#else
-  return AudioDeviceAndroidJni::SetAndroidAudioDeviceObjects(
-      javaVM, env, context);
-#endif
+#if defined(ANDROID) && !defined(MOZ_WIDGET_GONK)
+    return AudioDeviceAndroidJni::SetAndroidAudioDeviceObjects(
+         javaVM, context);
 #else
   return -1;
 #endif
 }
 
 } //namespace webrtc