Bug 918372 - Use RAII and JNI Frames for when we cannot attach+detach the JVM. r=blassey a=lsblakk
authorGian-Carlo Pascutto <gpascutto@mozilla.com>
Wed, 25 Sep 2013 08:08:37 +0200
changeset 155558 891eaf3a213e096f86bc2c49602f3d1aee8b6185
parent 155557 bc7b5e1c60a2543f79e415fb6a77eb108a69b8ce
child 155559 d9f5ddccb15ebfbe2ff6fb3225ea07c4c406db95
push id4327
push usergpascutto@mozilla.com
push dateMon, 30 Sep 2013 12:10:44 +0000
treeherdermozilla-aurora@891eaf3a213e [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersblassey, lsblakk
bugs918372
milestone26.0a2
Bug 918372 - Use RAII and JNI Frames for when we cannot attach+detach the JVM. r=blassey a=lsblakk
media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.cc
media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.cc
media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.h
--- a/media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.cc
+++ b/media/webrtc/trunk/webrtc/modules/video_capture/android/device_info_android.cc
@@ -49,67 +49,60 @@ DeviceInfoAndroid::DeviceInfoAndroid(con
 int32_t DeviceInfoAndroid::Init() {
   return 0;
 }
 
 DeviceInfoAndroid::~DeviceInfoAndroid() {
 }
 
 uint32_t DeviceInfoAndroid::NumberOfDevices() {
-  JNIEnv *env;
-  jclass javaCmDevInfoClass;
-  jobject javaCmDevInfoObject;
-  bool attached = false;
-  if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
-          env,
-          javaCmDevInfoClass,
-          javaCmDevInfoObject,
-          attached) != 0)
-    return 0;
+  AutoLocalJNIFrame jniFrame;
+  JNIEnv* env = jniFrame.GetEnv();
+  if (!env)
+      return 0;
+
+  jclass javaCmDevInfoClass = jniFrame.GetCmDevInfoClass();
+  jobject javaCmDevInfoObject = jniFrame.GetCmDevInfoObject();
 
   WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
                "%s GetMethodId", __FUNCTION__);
   // get the method ID for the Android Java GetDeviceUniqueName name.
   jmethodID cid = env->GetMethodID(javaCmDevInfoClass,
                                    "NumberOfDevices",
                                    "()I");
 
   jint numberOfDevices = 0;
   if (cid != NULL) {
     WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
                  "%s Calling Number of devices", __FUNCTION__);
     numberOfDevices = env->CallIntMethod(javaCmDevInfoObject, cid);
   }
-  VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
 
   if (numberOfDevices > 0)
     return numberOfDevices;
   return 0;
 }
 
 int32_t DeviceInfoAndroid::GetDeviceName(
     uint32_t deviceNumber,
     char* deviceNameUTF8,
     uint32_t deviceNameLength,
     char* deviceUniqueIdUTF8,
     uint32_t deviceUniqueIdUTF8Length,
     char* /*productUniqueIdUTF8*/,
     uint32_t /*productUniqueIdUTF8Length*/) {
 
-  JNIEnv *env;
-  jclass javaCmDevInfoClass;
-  jobject javaCmDevInfoObject;
   int32_t result = 0;
-  bool attached = false;
-  if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
-          env,
-          javaCmDevInfoClass,
-          javaCmDevInfoObject,
-          attached)!= 0)
-    return -1;
+  AutoLocalJNIFrame jniFrame;
+  JNIEnv* env = jniFrame.GetEnv();
+  if (!env)
+      return -1;
+
+  jclass javaCmDevInfoClass = jniFrame.GetCmDevInfoClass();
+  jobject javaCmDevInfoObject = jniFrame.GetCmDevInfoObject();
 
   // get the method ID for the Android Java GetDeviceUniqueName name.
   jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetDeviceUniqueName",
                                    "(I)Ljava/lang/String;");
   if (cid != NULL) {
     jobject javaDeviceNameObj = env->CallObjectMethod(javaCmDevInfoObject,
                                                       cid, deviceNumber);
     if (javaDeviceNameObj == NULL) {
@@ -148,90 +141,80 @@ int32_t DeviceInfoAndroid::GetDeviceName
   }
   else {
     WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
                  "%s: Failed to find GetDeviceUniqueName function id",
                  __FUNCTION__);
     result = -1;
   }
 
-  VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
-
   WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
                "%s: result %d", __FUNCTION__, (int) result);
   return result;
 
 }
 
 int32_t DeviceInfoAndroid::CreateCapabilityMap(
     const char* deviceUniqueIdUTF8) {
   MapItem* item = NULL;
   while ((item = _captureCapabilities.Last())) {
     delete (VideoCaptureCapability*) item->GetItem();
     _captureCapabilities.Erase(item);
   }
 
-  JNIEnv *env;
-  jclass javaCmDevInfoClass;
-  jobject javaCmDevInfoObject;
-  bool attached = false;
-  if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
-          env,
-          javaCmDevInfoClass,
-          javaCmDevInfoObject,
-          attached) != 0)
-    return -1;
+  AutoLocalJNIFrame jniFrame;
+  JNIEnv* env = jniFrame.GetEnv();
+  if (!env)
+      return -1;
+
+  jclass javaCmDevInfoClass = jniFrame.GetCmDevInfoClass();
+  jobject javaCmDevInfoObject = jniFrame.GetCmDevInfoObject();
 
   // Find the capability class
   jclass javaCapClass = jsjni_GetGlobalClassRef(AndroidJavaCaptureCapabilityClass);
   if (javaCapClass == NULL) {
-    VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
     WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
                  "%s: SetAndroidCaptureClasses must be called first!",
                  __FUNCTION__);
     return -1;
   }
 
   // get the method ID for the Android Java GetCapabilityArray .
   jmethodID cid = env->GetMethodID(
       javaCmDevInfoClass,
       "GetCapabilityArray",
       "(Ljava/lang/String;)[Lorg/webrtc/videoengine/CaptureCapabilityAndroid;");
   if (cid == NULL) {
-    VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
     WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
                  "%s: Can't find method GetCapabilityArray.", __FUNCTION__);
     return -1;
   }
   // Create a jstring so we can pass the deviceUniquName to the java method.
   jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
 
   if (capureIdString == NULL) {
-    VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
     WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
                  "%s: Can't create string for  method GetCapabilityArray.",
                  __FUNCTION__);
     return -1;
   }
   // Call the java class and get an array with capabilities back.
   jobject javaCapabilitiesObj = env->CallObjectMethod(javaCmDevInfoObject,
                                                       cid, capureIdString);
   if (!javaCapabilitiesObj) {
-    VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
     WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
                  "%s: Failed to call java GetCapabilityArray.",
                  __FUNCTION__);
     return -1;
   }
 
   jfieldID widthField = env->GetFieldID(javaCapClass, "width", "I");
   jfieldID heigtField = env->GetFieldID(javaCapClass, "height", "I");
   jfieldID maxFpsField = env->GetFieldID(javaCapClass, "maxFPS", "I");
   if (widthField == NULL || heigtField == NULL || maxFpsField == NULL) {
-    VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
     WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
                  "%s: Failed to get field Id.", __FUNCTION__);
     return -1;
   }
 
   const jsize numberOfCapabilities =
       env->GetArrayLength((jarray) javaCapabilitiesObj);
 
@@ -256,59 +239,52 @@ int32_t DeviceInfoAndroid::CreateCapabil
   _lastUsedDeviceName = (char*) realloc(_lastUsedDeviceName,
                                         _lastUsedDeviceNameLength + 1);
   memcpy(_lastUsedDeviceName,
          deviceUniqueIdUTF8,
          _lastUsedDeviceNameLength + 1);
 
   env->DeleteGlobalRef(javaCapClass);
 
-  VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
   WEBRTC_TRACE(webrtc::kTraceInfo, webrtc::kTraceVideoCapture, _id,
                "CreateCapabilityMap %d", _captureCapabilities.Size());
 
   return _captureCapabilities.Size();
 }
 
 int32_t DeviceInfoAndroid::GetOrientation(
     const char* deviceUniqueIdUTF8,
     VideoCaptureRotation& orientation) {
-  JNIEnv *env;
-  jclass javaCmDevInfoClass;
-  jobject javaCmDevInfoObject;
-  bool attached = false;
-  if (VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
-          env,
-          javaCmDevInfoClass,
-          javaCmDevInfoObject,
-          attached) != 0)
-    return -1;
+  AutoLocalJNIFrame jniFrame;
+  JNIEnv* env = jniFrame.GetEnv();
+  if (!env)
+      return -1;
+
+  jclass javaCmDevInfoClass = jniFrame.GetCmDevInfoClass();
+  jobject javaCmDevInfoObject = jniFrame.GetCmDevInfoObject();
 
   // get the method ID for the Android Java GetOrientation .
   jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "GetOrientation",
                                    "(Ljava/lang/String;)I");
   if (cid == NULL) {
-    VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
     WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
                  "%s: Can't find method GetOrientation.", __FUNCTION__);
     return -1;
   }
   // Create a jstring so we can pass the deviceUniquName to the java method.
   jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
   if (capureIdString == NULL) {
-    VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
     WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
                  "%s: Can't create string for  method GetCapabilityArray.",
                  __FUNCTION__);
     return -1;
   }
   // Call the java class and get the orientation.
   jint jorientation = env->CallIntMethod(javaCmDevInfoObject, cid,
                                          capureIdString);
-  VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(attached);
 
   int32_t retValue = 0;
   switch (jorientation) {
     case -1: // Error
       orientation = kCameraRotate0;
       retValue = -1;
       break;
     case 0:
--- a/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.cc
+++ b/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.cc
@@ -211,72 +211,16 @@ int32_t VideoCaptureAndroid::SetAndroidO
       return -1;
     }
     return 0;
     env = (JNIEnv *) NULL;
   }
   return 0;
 }
 
-int32_t VideoCaptureAndroid::AttachAndUseAndroidDeviceInfoObjects(
-    JNIEnv*& env,
-    jclass& javaCmDevInfoClass,
-    jobject& javaCmDevInfoObject,
-    bool& attached) {
-
-    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
-                 "%s: AttachAndUseAndroidDeviceInfoObj.",
-                 __FUNCTION__);
-
-  // get the JNI env for this thread
-  if (!g_jvm) {
-    WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
-                 "%s: SetAndroidObjects not called with a valid JVM.",
-                 __FUNCTION__);
-    return -1;
-  }
-  attached = false;
-  if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
-    // try to attach the thread and get the env
-    // Attach this thread to JVM
-    jint res = g_jvm->AttachCurrentThread(&env, NULL);
-    if ((res < 0) || !env) {
-      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
-                   "%s: Could not attach thread to JVM (%d, %p)",
-                   __FUNCTION__, res, env);
-      return -1;
-    }
-    attached = true;
-    WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
-                 "%s: attach success", __FUNCTION__);
-  } else {
-    WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
-                 "%s: did not attach because JVM Env present", __FUNCTION__);
-  }
-  MOZ_ASSERT(g_javaCmDevInfoClass != nullptr);
-  MOZ_ASSERT(g_javaCmDevInfoObject != nullptr);
-  javaCmDevInfoClass = g_javaCmDevInfoClass;
-  javaCmDevInfoObject = g_javaCmDevInfoObject;
-  return 0;
-
-}
-
-int32_t VideoCaptureAndroid::ReleaseAndroidDeviceInfoObjects(
-    bool attached) {
-  if (attached && g_jvm->DetachCurrentThread() < 0) {
-    WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, -1,
-                 "%s: Could not detach thread from JVM", __FUNCTION__);
-    return -1;
-  } else if (!attached) {
-      WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
-                   "%s: not attached, no detach", __FUNCTION__);
-  }
-  return 0;
-}
-
 /*
  * JNI callback from Java class. Called
  * when the camera has a new frame to deliver
  * Class:     org_webrtc_capturemodule_VideoCaptureAndroid
  * Method:    ProvideCameraFrame
  * Signature: ([BIJ)V
  */
 void JNICALL VideoCaptureAndroid::ProvideCameraFrame(JNIEnv * env,
@@ -350,54 +294,45 @@ int32_t VideoCaptureAndroid::Init(const 
   WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, "%s:",
                __FUNCTION__);
   // use the jvm that has been set
   if (!g_jvm) {
     WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
                  "%s: Not a valid Java VM pointer", __FUNCTION__);
     return -1;
   }
-  // get the JNI env for this thread
-  JNIEnv *env;
-  bool isAttached = false;
-  int32_t rotation = 0;
 
-  // get the JNI env for this thread
-  if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
-    // try to attach the thread and get the env
-    // Attach this thread to JVM
-    jint res = g_jvm->AttachCurrentThread(&env, NULL);
-    if ((res < 0) || !env) {
-      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
-                   "%s: Could not attach thread to JVM (%d, %p)",
-                   __FUNCTION__, res, env);
+  AutoLocalJNIFrame jniFrame;
+  JNIEnv* env = jniFrame.GetEnv();
+  if (!env)
       return -1;
-    }
-    isAttached = true;
-  }
+
+  jclass javaCmDevInfoClass = jniFrame.GetCmDevInfoClass();
+  jobject javaCmDevInfoObject = jniFrame.GetCmDevInfoObject();
+
+  int32_t rotation = 0;
 
   WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, _id,
                "get method id");
-
   // get the method ID for the Android Java
   // CaptureDeviceInfoClass AllocateCamera factory method.
   char signature[256];
   sprintf(signature, "(IJLjava/lang/String;)L%s;", AndroidJavaCaptureClass);
 
-  jmethodID cid = env->GetMethodID(g_javaCmDevInfoClass, "AllocateCamera",
+  jmethodID cid = env->GetMethodID(javaCmDevInfoClass, "AllocateCamera",
                                    signature);
   if (cid == NULL) {
     WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
                  "%s: could not get constructor ID", __FUNCTION__);
     return -1; /* exception thrown */
   }
 
   jstring capureIdString = env->NewStringUTF((char*) deviceUniqueIdUTF8);
   // construct the object by calling the static constructor object
-  jobject javaCameraObjLocal = env->CallObjectMethod(g_javaCmDevInfoObject,
+  jobject javaCameraObjLocal = env->CallObjectMethod(javaCmDevInfoObject,
                                                      cid, (jint) id,
                                                      (jlong) this,
                                                      capureIdString);
   if (!javaCameraObjLocal) {
     WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceVideoCapture, _id,
                  "%s: could not create Java Capture object", __FUNCTION__);
     return -1;
   }
@@ -407,117 +342,68 @@ int32_t VideoCaptureAndroid::Init(const 
   _javaCaptureObj = env->NewGlobalRef(javaCameraObjLocal);
   if (!_javaCaptureObj) {
     WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceAudioDevice, _id,
                  "%s: could not create Java camera object reference",
                  __FUNCTION__);
     return -1;
   }
 
-  // Delete local object ref, we only use the global ref
-  env->DeleteLocalRef(javaCameraObjLocal);
-
-  // Detach this thread if it was attached
-  if (isAttached) {
-    if (g_jvm->DetachCurrentThread() < 0) {
-      WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id,
-                   "%s: Could not detach thread from JVM", __FUNCTION__);
-    }
-  }
-
   return 0;
 }
 
 VideoCaptureAndroid::~VideoCaptureAndroid() {
   WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1, "%s:",
                __FUNCTION__);
   if (_javaCaptureObj == NULL || g_jvm == NULL) {
     WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
                  "%s: Nothing to clean", __FUNCTION__);
   }
   else {
-    bool isAttached = false;
-    // get the JNI env for this thread
-    JNIEnv *env;
-    if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
-      // try to attach the thread and get the env
-      // Attach this thread to JVM
-      jint res = g_jvm->AttachCurrentThread(&env, NULL);
-      if ((res < 0) || !env) {
-        WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture,
-                     _id,
-                     "%s: Could not attach thread to JVM (%d, %p)",
-                     __FUNCTION__, res, env);
-      }
-      else {
-        isAttached = true;
-      }
-    }
+    AutoLocalJNIFrame jniFrame;
+    JNIEnv* env = jniFrame.GetEnv();
+    if (!env)
+        return;
 
-    if (env) {
-      // get the method ID for the Android Java CaptureClass static
-      // DeleteVideoCaptureAndroid  method. Call this to release the camera so
-      // another application can use it.
-      jmethodID cid = env->GetStaticMethodID(
-          g_javaCmClass,
-          "DeleteVideoCaptureAndroid",
-          "(Lorg/webrtc/videoengine/VideoCaptureAndroid;)V");
-      if (cid != NULL) {
-        WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
-                     "%s: Call DeleteVideoCaptureAndroid", __FUNCTION__);
-        // Close the camera by calling the static destruct function.
-        env->CallStaticVoidMethod(g_javaCmClass, cid, _javaCaptureObj);
+    // get the method ID for the Android Java CaptureClass static
+    // DeleteVideoCaptureAndroid  method. Call this to release the camera so
+    // another application can use it.
+    jmethodID cid = env->GetStaticMethodID(g_javaCmClass,
+                                           "DeleteVideoCaptureAndroid",
+                                           "(Lorg/webrtc/videoengine/VideoCaptureAndroid;)V");
+    if (cid != NULL) {
+      WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
+                   "%s: Call DeleteVideoCaptureAndroid", __FUNCTION__);
+      // Close the camera by calling the static destruct function.
+      env->CallStaticVoidMethod(g_javaCmClass, cid, _javaCaptureObj);
 
-        // Delete global object ref to the camera.
-        env->DeleteGlobalRef(_javaCaptureObj);
-
-        _javaCaptureObj = NULL;
-      }
-      else {
+      // Delete global object ref to the camera.
+      env->DeleteGlobalRef(_javaCaptureObj);
+      _javaCaptureObj = NULL;
+    } else {
         WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
                      "%s: Failed to find DeleteVideoCaptureAndroid id",
                      __FUNCTION__);
-      }
-    }
-
-    // Detach this thread if it was attached
-    if (isAttached) {
-      if (g_jvm->DetachCurrentThread() < 0) {
-        WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice,
-                     _id, "%s: Could not detach thread from JVM",
-                     __FUNCTION__);
-      }
     }
   }
 }
 
 int32_t VideoCaptureAndroid::StartCapture(
     const VideoCaptureCapability& capability) {
   CriticalSectionScoped cs(&_apiCs);
   WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
                "%s: ", __FUNCTION__);
 
-  bool isAttached = false;
   int32_t result = 0;
   int32_t rotation = 0;
-  // get the JNI env for this thread
-  JNIEnv *env;
-  if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
-    // try to attach the thread and get the env
-    // Attach this thread to JVM
-    jint res = g_jvm->AttachCurrentThread(&env, NULL);
-    if ((res < 0) || !env) {
-      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
-                   "%s: Could not attach thread to JVM (%d, %p)",
-                   __FUNCTION__, res, env);
-    }
-    else {
-      isAttached = true;
-    }
-  }
+
+  AutoLocalJNIFrame jniFrame;
+  JNIEnv* env = jniFrame.GetEnv();
+  if (!env)
+      return -1;
 
   if (_capInfo.GetBestMatchedCapability(_deviceUniqueId, capability,
                                         _frameInfo) < 0) {
     WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
                  "%s: GetBestMatchedCapability failed. Req cap w%d h%d",
                  __FUNCTION__, capability.width, capability.height);
     return -1;
   }
@@ -539,55 +425,36 @@ int32_t VideoCaptureAndroid::StartCaptur
     result = env->CallIntMethod(_javaCaptureObj, cid, _frameInfo.width,
                                 _frameInfo.height, _frameInfo.maxFPS);
   }
   else {
     WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
                  "%s: Failed to find StartCapture id", __FUNCTION__);
   }
 
-  // Detach this thread if it was attached
-  if (isAttached) {
-    if (g_jvm->DetachCurrentThread() < 0) {
-      WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id,
-                   "%s: Could not detach thread from JVM", __FUNCTION__);
-    }
-  }
-
   if (result == 0) {
     _requestedCapability = capability;
     _captureStarted = true;
   }
   WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
                "%s: result %d", __FUNCTION__, result);
   return result;
 }
 
 int32_t VideoCaptureAndroid::StopCapture() {
   CriticalSectionScoped cs(&_apiCs);
   WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
                "%s: ", __FUNCTION__);
 
-  bool isAttached = false;
   int32_t result = 0;
-  // get the JNI env for this thread
-  JNIEnv *env = NULL;
-  if (g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4) != JNI_OK) {
-    // try to attach the thread and get the env
-    // Attach this thread to JVM
-    jint res = g_jvm->AttachCurrentThread(&env, NULL);
-    if ((res < 0) || !env) {
-      WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, _id,
-                   "%s: Could not attach thread to JVM (%d, %p)",
-                   __FUNCTION__, res, env);
-    }
-    else {
-      isAttached = true;
-    }
-  }
+
+  AutoLocalJNIFrame jniFrame;
+  JNIEnv* env = jniFrame.GetEnv();
+  if (!env)
+      return -1;
 
   memset(&_requestedCapability, 0, sizeof(_requestedCapability));
   memset(&_frameInfo, 0, sizeof(_frameInfo));
 
   // get the method ID for the Android Java CaptureClass StopCapture  method.
   jmethodID cid = env->GetMethodID(g_javaCmClass, "StopCapture", "()I");
   if (cid != NULL) {
     WEBRTC_TRACE(webrtc::kTraceDebug, webrtc::kTraceVideoCapture, -1,
@@ -595,23 +462,16 @@ int32_t VideoCaptureAndroid::StopCapture
     // Close the camera by calling the static destruct function.
     result = env->CallIntMethod(_javaCaptureObj, cid);
   }
   else {
     WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
                  "%s: Failed to find StopCapture id", __FUNCTION__);
   }
 
-  // Detach this thread if it was attached
-  if (isAttached) {
-    if (g_jvm->DetachCurrentThread() < 0) {
-      WEBRTC_TRACE(webrtc::kTraceWarning, webrtc::kTraceAudioDevice, _id,
-                   "%s: Could not detach thread from JVM", __FUNCTION__);
-    }
-  }
   _captureStarted = false;
 
   WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
                "%s: result %d", __FUNCTION__, result);
   return result;
 }
 
 bool VideoCaptureAndroid::CaptureStarted() {
--- a/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.h
+++ b/media/webrtc/trunk/webrtc/modules/video_capture/android/video_capture_android.h
@@ -7,60 +7,169 @@
  *  in the file PATENTS.  All contributing project authors may
  *  be found in the AUTHORS file in the root of the source tree.
  */
 
 #ifndef WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_VIDEO_CAPTURE_ANDROID_H_
 #define WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_VIDEO_CAPTURE_ANDROID_H_
 
 #include <jni.h>
+#include <assert.h>
+#include "trace.h"
 #include "device_info_android.h"
 #include "../video_capture_impl.h"
 
 #define AndroidJavaCaptureClass "org/webrtc/videoengine/VideoCaptureAndroid"
 
 namespace webrtc {
 namespace videocapturemodule {
 
 class VideoCaptureAndroid : public VideoCaptureImpl {
  public:
   static int32_t SetAndroidObjects(void* javaVM, void* javaContext);
-  static int32_t AttachAndUseAndroidDeviceInfoObjects(
-      JNIEnv*& env,
-      jclass& javaCmDevInfoClass,
-      jobject& javaCmDevInfoObject,
-      bool& attached);
-  static int32_t ReleaseAndroidDeviceInfoObjects(bool attached);
-
   VideoCaptureAndroid(const int32_t id);
   virtual int32_t Init(const int32_t id, const char* deviceUniqueIdUTF8);
 
-
   virtual int32_t StartCapture(
       const VideoCaptureCapability& capability);
   virtual int32_t StopCapture();
   virtual bool CaptureStarted();
   virtual int32_t CaptureSettings(VideoCaptureCapability& settings);
   virtual int32_t SetCaptureRotation(VideoCaptureRotation rotation);
 
+  friend class AutoLocalJNIFrame;
+
  protected:
   virtual ~VideoCaptureAndroid();
   static void JNICALL ProvideCameraFrame (JNIEnv * env,
                                           jobject,
                                           jbyteArray javaCameraFrame,
                                           jint length,
                                           jint rotation,
                                           jlong context);
   DeviceInfoAndroid _capInfo;
   jobject _javaCaptureObj; // Java Camera object.
   VideoCaptureCapability _frameInfo;
   bool _captureStarted;
-
   static JavaVM* g_jvm;
   static jclass g_javaCmClass;
   static jclass g_javaCmDevInfoClass;
   //Static java object implementing the needed device info functions;
   static jobject g_javaCmDevInfoObject;
 };
 
+// Reworked version of what is available in AndroidBridge,
+// can attach/deatch in addition to push/pop frames.
+class AutoLocalJNIFrame {
+public:
+ AutoLocalJNIFrame(int nEntries = 128)
+     : mEntries(nEntries), mHasFrameBeenPushed(false), mAttached(false)
+    {
+        mJNIEnv = InitJNIEnv();
+        Push();
+    }
+
+    JNIEnv* GetEnv() {
+        return mJNIEnv;
+    }
+
+    jclass GetCmDevInfoClass() {
+        assert(VideoCaptureAndroid::g_javaCmDevInfoClass != nullptr);
+        return VideoCaptureAndroid::g_javaCmDevInfoClass;
+    }
+
+    jobject GetCmDevInfoObject() {
+        assert(VideoCaptureAndroid::g_javaCmDevInfoObject != nullptr);
+        return VideoCaptureAndroid::g_javaCmDevInfoObject;
+    }
+
+    bool CheckForException() {
+        if (mJNIEnv->ExceptionCheck()) {
+            mJNIEnv->ExceptionDescribe();
+            mJNIEnv->ExceptionClear();
+            return true;
+        }
+
+        return false;
+    }
+
+    ~AutoLocalJNIFrame() {
+        if (!mJNIEnv)
+            return;
+
+        CheckForException();
+
+        if (mHasFrameBeenPushed)
+            mJNIEnv->PopLocalFrame(NULL);
+
+        if (mAttached) {
+            int res = VideoCaptureAndroid::g_jvm->DetachCurrentThread();
+            if (res < 0) {
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                         "%s: JVM Detach failed.", __FUNCTION__);
+            }
+        }
+    }
+
+private:
+    void Push() {
+        if (!mJNIEnv)
+            return;
+
+        // Make sure there is enough space to store a local ref to the
+        // exception.  I am not completely sure this is needed, but does
+        // not hurt.
+        jint ret = mJNIEnv->PushLocalFrame(mEntries + 1);
+        assert(ret == 0);
+        if (ret < 0)
+            CheckForException();
+        else
+            mHasFrameBeenPushed = true;
+    }
+
+    JNIEnv* InitJNIEnv()
+    {
+        JNIEnv* env = nullptr;
+
+        // Get the JNI env for this thread.
+        if (!VideoCaptureAndroid::g_jvm) {
+            WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                         "%s: SetAndroidObjects not called with a valid JVM.",
+                         __FUNCTION__);
+            return nullptr;
+        }
+
+        jint res = VideoCaptureAndroid::g_jvm->GetEnv((void**) &env, JNI_VERSION_1_4);
+        if (res == JNI_EDETACHED) {
+            // Try to attach this thread to the JVM and get the env.
+            res = VideoCaptureAndroid::g_jvm->AttachCurrentThread(&env, NULL);
+            if ((res < 0) || !env) {
+                // Attaching failed, error out.
+                WEBRTC_TRACE(webrtc::kTraceError, webrtc::kTraceVideoCapture, -1,
+                             "%s: Could not attach thread to JVM (%d, %p)",
+                             __FUNCTION__, res, env);
+                return nullptr;
+            }
+            mAttached = true;
+            WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
+                         "%s: attach success", __FUNCTION__);
+        } else if (res == JNI_OK) {
+            // Already attached, GetEnv succeeded.
+            WEBRTC_TRACE(webrtc::kTraceStateInfo, webrtc::kTraceVideoCapture, -1,
+                         "%s: did not attach because JVM Env already present",
+                         __FUNCTION__);
+        } else {
+            // Non-recoverable error in GetEnv.
+            return nullptr;
+        }
+
+        return env;
+    }
+
+    int mEntries;
+    JNIEnv* mJNIEnv;
+    bool mHasFrameBeenPushed;
+    bool mAttached;
+};
+
 }  // namespace videocapturemodule
 }  // namespace webrtc
 #endif // WEBRTC_MODULES_VIDEO_CAPTURE_MAIN_SOURCE_ANDROID_VIDEO_CAPTURE_ANDROID_H_