Bug 1440255 - Make VideoCaptureAndroid reconfigurable through subsequent startCapture. r=dminor, a=RyanVM
authorAndreas Pehrson <pehrsons@mozilla.com>
Wed, 04 Apr 2018 20:17:21 +0200
changeset 463118 866aefe6026da5de622d865ffc6f6ec21ddafeee
parent 463117 88d31bceba4719c1f7c3d3ca7d75bfa09ab900c1
child 463119 4e9b4893ee3e2b9d8a8f73ed5b50f49428ff5dbb
push id1683
push usersfraser@mozilla.com
push dateThu, 26 Apr 2018 16:43:40 +0000
treeherdermozilla-release@5af6cb21869d [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersdminor, RyanVM
bugs1440255
milestone60.0
Bug 1440255 - Make VideoCaptureAndroid reconfigurable through subsequent startCapture. r=dminor, a=RyanVM MozReview-Commit-ID: 9yslx7rMN2w
media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java
--- a/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java
+++ b/media/webrtc/trunk/webrtc/modules/video_capture/android/java/src/org/webrtc/videoengine/VideoCaptureAndroid.java
@@ -104,23 +104,22 @@ public class VideoCaptureAndroid impleme
   // thread that calls open(), so this is done on the CameraThread.  Since ViE
   // API needs a synchronous success return value we wait for the result.
   @WebRTCJNITarget
   private synchronized boolean startCapture(
       final int width, final int height,
       final int min_mfps, final int max_mfps) {
     Log.d(TAG, "startCapture: " + width + "x" + height + "@" +
         min_mfps + ":" + max_mfps);
-    if (cameraThread != null || cameraThreadHandler != null) {
-      throw new RuntimeException("Camera thread already started!");
+    if (cameraThread == null && cameraThreadHandler == null) {
+      Exchanger<Handler> handlerExchanger = new Exchanger<Handler>();
+      cameraThread = new CameraThread(handlerExchanger);
+      cameraThread.start();
+      cameraThreadHandler = exchange(handlerExchanger, null);
     }
-    Exchanger<Handler> handlerExchanger = new Exchanger<Handler>();
-    cameraThread = new CameraThread(handlerExchanger);
-    cameraThread.start();
-    cameraThreadHandler = exchange(handlerExchanger, null);
 
     final Exchanger<Boolean> result = new Exchanger<Boolean>();
     cameraThreadHandler.post(new Runnable() {
         @Override public void run() {
           boolean startResult =
             startCaptureOnCameraThread(width, height, min_mfps, max_mfps);
           exchange(result, startResult);
         }
@@ -137,60 +136,82 @@ public class VideoCaptureAndroid impleme
     // synchronizing us either. ProvideCameraFrame has to do the null check.
     native_capturer = 0;
   }
 
   private boolean startCaptureOnCameraThread(
       int width, int height, int min_mfps, int max_mfps) {
     Throwable error = null;
     try {
-      camera = Camera.open(id);
+      boolean isRunning = camera != null;
+      if (!isRunning) {
+        camera = Camera.open(id);
 
-      if (localPreview != null) {
-        localPreview.addCallback(this);
-        if (localPreview.getSurface() != null &&
-            localPreview.getSurface().isValid()) {
-	  try {
-	    camera.setPreviewDisplay(localPreview);
-	  } catch (IOException e) {
-	    throw new RuntimeException(e);
-	  }
-        }
-      } else {
-        // No local renderer (we only care about onPreviewFrame() buffers, not a
-        // directly-displayed UI element).  Camera won't capture without
-        // setPreview{Texture,Display}, so we create a SurfaceTexture and hand
-        // it over to Camera, but never listen for frame-ready callbacks,
-        // and never call updateTexImage on it.
-        try {
-          cameraGlTextures = new int[1];
-          // Generate one texture pointer and bind it as an external texture.
-          GLES20.glGenTextures(1, cameraGlTextures, 0);
-          GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
-              cameraGlTextures[0]);
-          GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
-              GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
-          GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
-              GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
-          GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
-              GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
-          GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
-              GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
+        if (localPreview != null) {
+          localPreview.addCallback(this);
+          if (localPreview.getSurface() != null &&
+              localPreview.getSurface().isValid()) {
+	    try {
+	      camera.setPreviewDisplay(localPreview);
+	    } catch (IOException e) {
+	      throw new RuntimeException(e);
+	    }
+          }
+        } else {
+          // No local renderer (we only care about onPreviewFrame() buffers, not a
+          // directly-displayed UI element).  Camera won't capture without
+          // setPreview{Texture,Display}, so we create a SurfaceTexture and hand
+          // it over to Camera, but never listen for frame-ready callbacks,
+          // and never call updateTexImage on it.
+          try {
+            cameraGlTextures = new int[1];
+            // Generate one texture pointer and bind it as an external texture.
+            GLES20.glGenTextures(1, cameraGlTextures, 0);
+            GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
+                cameraGlTextures[0]);
+            GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
+                GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
+            GLES20.glTexParameterf(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
+                GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
+            GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
+                GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
+            GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES,
+                GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
 
-          cameraSurfaceTexture = new SurfaceTexture(cameraGlTextures[0]);
-          cameraSurfaceTexture.setOnFrameAvailableListener(null);
-          camera.setPreviewTexture(cameraSurfaceTexture);
-        } catch (IOException e) {
-          throw new RuntimeException(e);
+            cameraSurfaceTexture = new SurfaceTexture(cameraGlTextures[0]);
+            cameraSurfaceTexture.setOnFrameAvailableListener(null);
+            camera.setPreviewTexture(cameraSurfaceTexture);
+          } catch (IOException e) {
+            throw new RuntimeException(e);
+          }
         }
       }
 
       Log.d(TAG, "Camera orientation: " + info.orientation +
           " .Device orientation: " + getDeviceOrientation());
       Camera.Parameters parameters = camera.getParameters();
+
+      if (isRunning) {
+        Camera.Size size = parameters.getPreviewSize();
+
+        int[] fpsRange = new int[2];
+        parameters.getPreviewFpsRange(fpsRange);
+        int minFps = fpsRange[Parameters.PREVIEW_FPS_MIN_INDEX] / frameDropRatio;
+        int maxFps = fpsRange[Parameters.PREVIEW_FPS_MAX_INDEX] / frameDropRatio;
+        if (size.width == width && size.height == height &&
+            minFps == min_mfps && maxFps == max_mfps) {
+          return true;
+        } else {
+          if (!stopCaptureOnCameraThread()) {
+            throw new RuntimeException("Stopping on reconfig failed");
+          }
+          return startCaptureOnCameraThread(width, height, min_mfps, max_mfps);
+        }
+      }
+
       Log.d(TAG, "isVideoStabilizationSupported: " +
           parameters.isVideoStabilizationSupported());
       if (parameters.isVideoStabilizationSupported()) {
         parameters.setVideoStabilization(true);
       }
 
       List<String> focusModes = parameters.getSupportedFocusModes();
       if (focusModes.contains(android.hardware.Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO)) {