Backed out changeset 1870acb46d67 (bug 1434538) for permafailing mda's dom/media/tests/mochitest/test_peerConnection_twoAudioVideoStreams.html, at least on Linux. a=backout on a CLOSED TREE DEVEDITION_59_0b8_BUILD2 DEVEDITION_59_0b8_RELEASE FIREFOX_59_0b8_BUILD2 FIREFOX_59_0b8_RELEASE
authorSebastian Hengst <archaeopteryx@coole-files.de>
Thu, 08 Feb 2018 21:36:42 +0200
changeset 753376 73ef186ad51ac2c4fc27b1f149fcc94355fe7dc6
parent 753375 aa1153f2621f4e896b92796f572f6d9bd42413a6
child 753377 6d8199a6e629e07fd4cea1bda1d7447f1ff4358c
push id98553
push userbmo:edilee@mozilla.com
push dateFri, 09 Feb 2018 23:52:17 +0000
reviewersbackout
bugs1434538
milestone59.0
backs out1870acb46d67b36dcd407be651240b741875819c
Backed out changeset 1870acb46d67 (bug 1434538) for permafailing mda's dom/media/tests/mochitest/test_peerConnection_twoAudioVideoStreams.html, at least on Linux. a=backout on a CLOSED TREE
dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
--- a/dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
+++ b/dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
@@ -451,146 +451,123 @@ MediaEngineRemoteVideoSource::DeliverFra
 {
   MonitorAutoLock lock(mMonitor);
   // Check for proper state.
   if (mState != kStarted || !mImageContainer) {
     LOG(("DeliverFrame: video not started"));
     return 0;
   }
 
-  int width = aProps.width();
-  int height = aProps.height();
-
   // Update the dimensions
-  FrameSizeChange(width, height);
+  FrameSizeChange(aProps.width(), aProps.height());
 
   MOZ_ASSERT(mSources.Length() == mPrincipalHandles.Length());
   MOZ_ASSERT(mSources.Length() == mTargetCapabilities.Length());
   MOZ_ASSERT(mSources.Length() == mHandleIds.Length());
   MOZ_ASSERT(mSources.Length() == mImages.Length());
 
-  // Take copies while iterating so handles or capabilities don't change while
-  // we unlock for rescale and copy
-  AutoTArray<uint64_t, 5> handleIds(mHandleIds);
-  AutoTArray<webrtc::CaptureCapability, 5> targetCapabilities(mTargetCapabilities);
-  for (uint32_t i = 0; i < targetCapabilities.Length(); ++i) {
-    int32_t req_max_width = targetCapabilities[i].width & 0xffff;
-    int32_t req_max_height = targetCapabilities[i].height & 0xffff;
-    int32_t req_ideal_width = (targetCapabilities[i].width >> 16) & 0xffff;
-    int32_t req_ideal_height = (targetCapabilities[i].height >> 16) & 0xffff;
+  for (uint32_t i = 0; i < mTargetCapabilities.Length(); i++ ) {
+    int32_t req_max_width = mTargetCapabilities[i].width & 0xffff;
+    int32_t req_max_height = mTargetCapabilities[i].height & 0xffff;
+    int32_t req_ideal_width = (mTargetCapabilities[i].width >> 16) & 0xffff;
+    int32_t req_ideal_height = (mTargetCapabilities[i].height >> 16) & 0xffff;
 
     int32_t dest_max_width = std::min(req_max_width, mWidth);
     int32_t dest_max_height = std::min(req_max_height, mHeight);
     // This logic works for both camera and screen sharing case.
     // for camera case, req_ideal_width and req_ideal_height is 0.
     // The following snippet will set dst_width to dest_max_width and dst_height to dest_max_height
     int32_t dst_width = std::min(req_ideal_width > 0 ? req_ideal_width : mWidth, dest_max_width);
     int32_t dst_height = std::min(req_ideal_height > 0 ? req_ideal_height : mHeight, dest_max_height);
 
     int dst_stride_y = dst_width;
     int dst_stride_uv = (dst_width + 1) / 2;
 
-    RefPtr<layers::PlanarYCbCrImage> image;
-    {
-      // Unlock the monitor while rescaling and copying to avoid slowing
-      // the MediaStreamGraph (i.e., the audio callback!)
-      MonitorAutoUnlock open(mMonitor);
-      camera::VideoFrameProperties properties;
-      UniquePtr<uint8_t []> frameBuf;
-      uint8_t* frame;
-      bool needReScale = !((dst_width == width && dst_height == height) ||
-                           (dst_width > width || dst_height > height));
-
-      if (!needReScale) {
-        dst_width = width;
-        dst_height = height;
-        frame = aBuffer;
-      } else {
-        rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer;
-        i420Buffer = webrtc::I420Buffer::Create(width, height, width,
-                                                (width + 1) / 2, (width + 1) / 2);
-
-        const int conversionResult = webrtc::ConvertToI420(webrtc::kI420,
-                                                           aBuffer,
-                                                           0, 0,  // No cropping
-                                                           width, height,
-                                                           width * height * 3 / 2,
-                                                           webrtc::kVideoRotation_0,
-                                                           i420Buffer.get());
-
-        webrtc::VideoFrame captureFrame(i420Buffer, 0, 0, webrtc::kVideoRotation_0);
-        if (conversionResult < 0) {
-          return 0;
-        }
-
-        rtc::scoped_refptr<webrtc::I420Buffer> scaledBuffer;
-        scaledBuffer = webrtc::I420Buffer::Create(dst_width, dst_height, dst_stride_y,
-                                                  dst_stride_uv, dst_stride_uv);
+    camera::VideoFrameProperties properties;
+    UniquePtr<uint8_t []> frameBuf;
+    uint8_t* frame;
+    bool needReScale = !((dst_width == mWidth && dst_height == mHeight) ||
+                         (dst_width > mWidth || dst_height > mHeight));
 
-        scaledBuffer->CropAndScaleFrom(*captureFrame.video_frame_buffer().get());
-        webrtc::VideoFrame scaledFrame(scaledBuffer, 0, 0, webrtc::kVideoRotation_0);
-
-        VideoFrameUtils::InitFrameBufferProperties(scaledFrame, properties);
-        frameBuf.reset(new (fallible) uint8_t[properties.bufferSize()]);
-        frame = frameBuf.get();
-
-        if (!frame) {
-          return 0;
-        }
-
-        VideoFrameUtils::CopyVideoFrameBuffers(frame,
-                                               properties.bufferSize(), scaledFrame);
-      }
-
-      // Create a video frame and append it to the track.
-      image = mImageContainer->CreatePlanarYCbCrImage();
+    if (!needReScale) {
+      dst_width = mWidth;
+      dst_height = mHeight;
+      frame = aBuffer;
+    } else {
+      rtc::scoped_refptr<webrtc::I420Buffer> i420Buffer;
+      i420Buffer = webrtc::I420Buffer::Create(mWidth, mHeight, mWidth,
+                                              (mWidth + 1) / 2, (mWidth + 1) / 2);
 
-      const uint8_t lumaBpp = 8;
-      const uint8_t chromaBpp = 4;
-
-      layers::PlanarYCbCrData data;
+      const int conversionResult = webrtc::ConvertToI420(webrtc::kI420,
+                                                         aBuffer,
+                                                         0, 0,  // No cropping
+                                                         mWidth, mHeight,
+                                                         mWidth * mHeight * 3 / 2,
+                                                         webrtc::kVideoRotation_0,
+                                                         i420Buffer.get());
 
-      // Take lots of care to round up!
-      data.mYChannel = frame;
-      data.mYSize = IntSize(dst_width, dst_height);
-      data.mYStride = (dst_width * lumaBpp + 7) / 8;
-      data.mCbCrStride = (dst_width * chromaBpp + 7) / 8;
-      data.mCbChannel = frame + dst_height * data.mYStride;
-      data.mCrChannel = data.mCbChannel + ((dst_height + 1) / 2) * data.mCbCrStride;
-      data.mCbCrSize = IntSize((dst_width + 1) / 2, (dst_height + 1) / 2);
-      data.mPicX = 0;
-      data.mPicY = 0;
-      data.mPicSize = IntSize(dst_width, dst_height);
-      data.mStereoMode = StereoMode::MONO;
-
-      if (!image->CopyData(data)) {
-        MOZ_ASSERT(false);
+      webrtc::VideoFrame captureFrame(i420Buffer, 0, 0, webrtc::kVideoRotation_0);
+      if (conversionResult < 0) {
         return 0;
       }
 
-#ifdef DEBUG
-      static uint32_t frame_num = 0;
-      LOGFRAME(("frame %d (%dx%d); timeStamp %u, ntpTimeMs %" PRIu64 ", renderTimeMs %" PRIu64,
-                frame_num++, width, height,
-                aProps.timeStamp(), aProps.ntpTimeMs(), aProps.renderTimeMs()));
-#endif
+      rtc::scoped_refptr<webrtc::I420Buffer> scaledBuffer;
+      scaledBuffer = webrtc::I420Buffer::Create(dst_width, dst_height, dst_stride_y,
+                                                dst_stride_uv, dst_stride_uv);
+
+      scaledBuffer->CropAndScaleFrom(*captureFrame.video_frame_buffer().get());
+      webrtc::VideoFrame scaledFrame(scaledBuffer, 0, 0, webrtc::kVideoRotation_0);
+
+      VideoFrameUtils::InitFrameBufferProperties(scaledFrame, properties);
+      frameBuf.reset(new (fallible) uint8_t[properties.bufferSize()]);
+      frame = frameBuf.get();
+
+      if (!frame) {
+        return 0;
+      }
+
+      VideoFrameUtils::CopyVideoFrameBuffers(frame,
+                                             properties.bufferSize(), scaledFrame);
     }
 
-    // The allocated handles (and associated target capabilities) may have
-    // changed during rescale/copy. Try to locate the index of the current
-    // allocation handle and set the image if found.
-    // Note that mMonitor is now locked again.
+    // Create a video frame and append it to the track.
+    RefPtr<layers::PlanarYCbCrImage> image = mImageContainer->CreatePlanarYCbCrImage();
+
+    const uint8_t lumaBpp = 8;
+    const uint8_t chromaBpp = 4;
+
+    layers::PlanarYCbCrData data;
 
-    for (size_t j = 0; j < mHandleIds.Length(); ++j) {
-      if (handleIds[i] == mHandleIds[j]) {
-        // implicitly releases last image
-        mImages[j] = image.forget();
-        break;
-      }
+    // Take lots of care to round up!
+    data.mYChannel = frame;
+    data.mYSize = IntSize(dst_width, dst_height);
+    data.mYStride = (dst_width * lumaBpp + 7) / 8;
+    data.mCbCrStride = (dst_width * chromaBpp + 7) / 8;
+    data.mCbChannel = frame + dst_height * data.mYStride;
+    data.mCrChannel = data.mCbChannel + ((dst_height + 1) / 2) * data.mCbCrStride;
+    data.mCbCrSize = IntSize((dst_width + 1) / 2, (dst_height + 1) / 2);
+    data.mPicX = 0;
+    data.mPicY = 0;
+    data.mPicSize = IntSize(dst_width, dst_height);
+    data.mStereoMode = StereoMode::MONO;
+
+    if (!image->CopyData(data)) {
+      MOZ_ASSERT(false);
+      return 0;
     }
+
+#ifdef DEBUG
+    static uint32_t frame_num = 0;
+    LOGFRAME(("frame %d (%dx%d); timeStamp %u, ntpTimeMs %" PRIu64 ", renderTimeMs %" PRIu64,
+              frame_num++, mWidth, mHeight,
+              aProps.timeStamp(), aProps.ntpTimeMs(), aProps.renderTimeMs()));
+#endif
+
+    // implicitly releases last image
+    mImages[i] = image.forget();
   }
 
   // We'll push the frame into the MSG on the next NotifyPull. This will avoid
   // swamping the MSG with frames should it be taking longer than normal to run
   // an iteration.
 
   return 0;
 }