Backed out 3 changesets (bug 1509316) for MDA failures on dom/media/tests/mochitest/test_peerConnection_captureStream_canvas_2d.html
authorDorel Luca <dluca@mozilla.com>
Fri, 27 Sep 2019 23:31:51 +0300
changeset 495450 6121d0104cd004af92b82874103ed7feb0cc2c6a
parent 495449 a057810d24fb8a118f01d350e8bd29723678fe54
child 495451 bbd6585e5a80c45a03f2eb8046fb0a53d6a6569a
push id96625
push userdluca@mozilla.com
push dateFri, 27 Sep 2019 20:33:21 +0000
treeherderautoland@6121d0104cd0 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
bugs1509316
milestone71.0a1
backs outd64c35b9c21149f01d57e1e2608900253fc57643
817f14af91f0927746c00309f013351783465389
b4bee18bb4997b5fd10bf567c303c0882adc491f
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Backed out 3 changesets (bug 1509316) for MDA failures on dom/media/tests/mochitest/test_peerConnection_captureStream_canvas_2d.html Backed out changeset d64c35b9c211 (bug 1509316) Backed out changeset 817f14af91f0 (bug 1509316) Backed out changeset b4bee18bb499 (bug 1509316)
dom/media/MediaData.cpp
dom/media/MediaData.h
dom/media/VideoFrameContainer.cpp
dom/media/platforms/android/RemoteDataDecoder.cpp
gfx/layers/GLImages.h
modules/libpref/init/StaticPrefList.yaml
--- a/dom/media/MediaData.cpp
+++ b/dom/media/MediaData.cpp
@@ -219,16 +219,35 @@ VideoData::VideoData(int64_t aOffset, co
       mNextKeyFrameTime(TimeUnit::Invalid()) {
   MOZ_ASSERT(!mDuration.IsNegative(), "Frame must have non-negative duration.");
   mKeyframe = aKeyframe;
   mTimecode = aTimecode;
 }
 
 VideoData::~VideoData() {}
 
+void VideoData::SetListener(UniquePtr<Listener> aListener) {
+  MOZ_ASSERT(!mSentToCompositor,
+             "Listener should be registered before sending data");
+
+  mListener = std::move(aListener);
+}
+
+void VideoData::MarkSentToCompositor() {
+  if (mSentToCompositor) {
+    return;
+  }
+
+  mSentToCompositor = true;
+  if (mListener != nullptr) {
+    mListener->OnSentToCompositor();
+    mListener = nullptr;
+  }
+}
+
 size_t VideoData::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const {
   size_t size = aMallocSizeOf(this);
 
   // Currently only PLANAR_YCBCR has a well defined function for determining
   // it's size, so reporting is limited to that type.
   if (mImage && mImage->GetFormat() == ImageFormat::PLANAR_YCBCR) {
     const mozilla::layers::PlanarYCbCrImage* img =
         static_cast<const mozilla::layers::PlanarYCbCrImage*>(mImage.get());
--- a/dom/media/MediaData.h
+++ b/dom/media/MediaData.h
@@ -446,16 +446,22 @@ class VideoData : public MediaData {
     };
 
     Plane mPlanes[3];
     YUVColorSpace mYUVColorSpace = YUVColorSpace::UNKNOWN;
     ColorDepth mColorDepth = ColorDepth::COLOR_8;
     ColorRange mColorRange = ColorRange::LIMITED;
   };
 
+  class Listener {
+   public:
+    virtual void OnSentToCompositor() = 0;
+    virtual ~Listener() {}
+  };
+
   // Constructs a VideoData object. If aImage is nullptr, creates a new Image
   // holding a copy of the YCbCr data passed in aBuffer. If aImage is not
   // nullptr, it's stored as the underlying video image and aBuffer is assumed
   // to point to memory within aImage so no copy is made. aTimecode is a codec
   // specific number representing the timestamp of the frame of video data.
   // Returns nsnull if an error occurs. This may indicate that memory couldn't
   // be allocated to create the VideoData object, or it may indicate some
   // problem with the input data (e.g. negative stride).
@@ -500,32 +506,34 @@ class VideoData : public MediaData {
 
   int32_t mFrameID;
 
   VideoData(int64_t aOffset, const media::TimeUnit& aTime,
             const media::TimeUnit& aDuration, bool aKeyframe,
             const media::TimeUnit& aTimecode, IntSize aDisplay,
             uint32_t aFrameID);
 
-  void MarkSentToCompositor() { mSentToCompositor = true; }
+  void SetListener(UniquePtr<Listener> aListener);
+  void MarkSentToCompositor();
   bool IsSentToCompositor() { return mSentToCompositor; }
 
   void UpdateDuration(const media::TimeUnit& aDuration);
   void UpdateTimestamp(const media::TimeUnit& aTimestamp);
 
   void SetNextKeyFrameTime(const media::TimeUnit& aTime) {
     mNextKeyFrameTime = aTime;
   }
 
   const media::TimeUnit& NextKeyFrameTime() const { return mNextKeyFrameTime; }
 
  protected:
   ~VideoData();
 
   bool mSentToCompositor;
+  UniquePtr<Listener> mListener;
   media::TimeUnit mNextKeyFrameTime;
 };
 
 enum class CryptoScheme : uint8_t {
   None,
   Cenc,
   Cbcs,
 };
--- a/dom/media/VideoFrameContainer.cpp
+++ b/dom/media/VideoFrameContainer.cpp
@@ -1,21 +1,17 @@
 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
 /* vim:set ts=2 sw=2 sts=2 et cindent: */
 /* This Source Code Form is subject to the terms of the Mozilla Public
  * License, v. 2.0. If a copy of the MPL was not distributed with this file,
  * You can obtain one at http://mozilla.org/MPL/2.0/. */
 
 #include "VideoFrameContainer.h"
-
-#ifdef MOZ_WIDGET_ANDROID
-#include "GLImages.h"  // for SurfaceTextureImage
-#endif
+#include "mozilla/Telemetry.h"
 #include "MediaDecoderOwner.h"
-#include "mozilla/Telemetry.h"
 
 using namespace mozilla::layers;
 
 namespace mozilla {
 static LazyLogModule gVideoFrameContainerLog("VideoFrameContainer");
 #define CONTAINER_LOG(type, msg) MOZ_LOG(gVideoFrameContainerLog, type, msg)
 
 #define NS_DispatchToMainThread(...) CompileError_UseAbstractMainThreadInstead
@@ -77,60 +73,33 @@ void VideoFrameContainer::UpdatePrincipa
     const ImageContainer::FrameID& aFrameID) {
   if (mPendingPrincipalHandle == aPrincipalHandle) {
     return;
   }
   mPendingPrincipalHandle = aPrincipalHandle;
   mFrameIDForPendingPrincipalHandle = aFrameID;
 }
 
-#ifdef MOZ_WIDGET_ANDROID
-static void NotifySetCurrent(Image* aImage) {
-  if (aImage == nullptr) {
-    return;
-  }
-
-  SurfaceTextureImage* image = aImage->AsSurfaceTextureImage();
-  if (image == nullptr) {
-    return;
-  }
-
-  image->OnSetCurrent();
-}
-#endif
-
 void VideoFrameContainer::SetCurrentFrame(const gfx::IntSize& aIntrinsicSize,
                                           Image* aImage,
                                           const TimeStamp& aTargetTime) {
-#ifdef MOZ_WIDGET_ANDROID
-    NotifySetCurrent(aImage);
-#endif
   if (aImage) {
     MutexAutoLock lock(mMutex);
     AutoTArray<ImageContainer::NonOwningImage, 1> imageList;
     imageList.AppendElement(
         ImageContainer::NonOwningImage(aImage, aTargetTime, ++mFrameID));
     SetCurrentFramesLocked(aIntrinsicSize, imageList);
   } else {
     ClearCurrentFrame(aIntrinsicSize);
   }
 }
 
 void VideoFrameContainer::SetCurrentFrames(
     const gfx::IntSize& aIntrinsicSize,
     const nsTArray<ImageContainer::NonOwningImage>& aImages) {
-#ifdef MOZ_WIDGET_ANDROID
-  // When there are multiple frames, only the last one is effective
-  // (see bug 1299068 comment 4). Here I just count on VideoSink and VideoOutput
-  // to send one frame at a time and warn if not.
-  Unused << NS_WARN_IF(aImages.Length() > 1);
-  for (auto& image : aImages) {
-    NotifySetCurrent(image.mImage);
-  }
-#endif
   MutexAutoLock lock(mMutex);
   SetCurrentFramesLocked(aIntrinsicSize, aImages);
 }
 
 void VideoFrameContainer::SetCurrentFramesLocked(
     const gfx::IntSize& aIntrinsicSize,
     const nsTArray<ImageContainer::NonOwningImage>& aImages) {
   mMutex.AssertCurrentThreadOwns();
--- a/dom/media/platforms/android/RemoteDataDecoder.cpp
+++ b/dom/media/platforms/android/RemoteDataDecoder.cpp
@@ -57,24 +57,23 @@ class RenderOrReleaseOutput {
   CodecProxy::GlobalRef mCodec;
   Sample::GlobalRef mSample;
 };
 
 class RemoteVideoDecoder : public RemoteDataDecoder {
  public:
   // Render the output to the surface when the frame is sent
   // to compositor, or release it if not presented.
-  class CompositeListener
-      : private RenderOrReleaseOutput,
-        public layers::SurfaceTextureImage::SetCurrentCallback {
+  class CompositeListener : private RenderOrReleaseOutput,
+                            public VideoData::Listener {
    public:
     CompositeListener(CodecProxy::Param aCodec, Sample::Param aSample)
         : RenderOrReleaseOutput(aCodec, aSample) {}
 
-    void operator()(void) override { ReleaseOutput(true); }
+    void OnSentToCompositor() override { ReleaseOutput(true); }
   };
 
   class InputInfo {
    public:
     InputInfo() {}
 
     InputInfo(const int64_t aDurationUs, const gfx::IntSize& aImageSize,
               const gfx::IntSize& aDisplaySize)
@@ -251,17 +250,17 @@ class RemoteVideoDecoder : public Remote
     }
 
     AssertOnTaskQueue();
     if (GetState() == State::SHUTDOWN) {
       aSample->Dispose();
       return;
     }
 
-    UniquePtr<layers::SurfaceTextureImage::SetCurrentCallback> releaseSample(
+    UniquePtr<VideoData::Listener> releaseSample(
         new CompositeListener(mJavaDecoder, aSample));
 
     BufferInfo::LocalRef info = aSample->Info();
     MOZ_ASSERT(info);
 
     int32_t flags;
     bool ok = NS_SUCCEEDED(info->Flags(&flags));
 
@@ -287,26 +286,25 @@ class RemoteVideoDecoder : public Remote
       // Ignore output with no corresponding input.
       return;
     }
 
     if (ok && (size > 0 || presentationTimeUs >= 0)) {
       RefPtr<layers::Image> img = new layers::SurfaceTextureImage(
           mSurfaceHandle, inputInfo.mImageSize, false /* NOT continuous */,
           gl::OriginPos::BottomLeft, mConfig.HasAlpha());
-      img->AsSurfaceTextureImage()->RegisterSetCurrentCallback(
-          std::move(releaseSample));
 
       RefPtr<VideoData> v = VideoData::CreateFromImage(
           inputInfo.mDisplaySize, offset,
           TimeUnit::FromMicroseconds(presentationTimeUs),
-          TimeUnit::FromMicroseconds(inputInfo.mDurationUs), img.forget(),
+          TimeUnit::FromMicroseconds(inputInfo.mDurationUs), img,
           !!(flags & MediaCodec::BUFFER_FLAG_SYNC_FRAME),
           TimeUnit::FromMicroseconds(presentationTimeUs));
 
+      v->SetListener(std::move(releaseSample));
       RemoteDataDecoder::UpdateOutputStatus(std::move(v));
     }
 
     if (isEOS) {
       DrainComplete();
     }
   }
 
--- a/gfx/layers/GLImages.h
+++ b/gfx/layers/GLImages.h
@@ -26,22 +26,16 @@ class GLImage : public Image {
 
   GLImage* AsGLImage() override { return this; }
 };
 
 #ifdef MOZ_WIDGET_ANDROID
 
 class SurfaceTextureImage : public GLImage {
  public:
-  class SetCurrentCallback {
-   public:
-    virtual void operator()(void) = 0;
-    virtual ~SetCurrentCallback() {}
-  };
-
   SurfaceTextureImage(AndroidSurfaceTextureHandle aHandle,
                       const gfx::IntSize& aSize, bool aContinuous,
                       gl::OriginPos aOriginPos, bool aHasAlpha = true);
 
   gfx::IntSize GetSize() const override { return mSize; }
   AndroidSurfaceTextureHandle GetHandle() const { return mHandle; }
   bool GetContinuous() const { return mContinuous; }
   gl::OriginPos GetOriginPos() const { return mOriginPos; }
@@ -51,34 +45,22 @@ class SurfaceTextureImage : public GLIma
     // We can implement this, but currently don't want to because it will cause
     // the SurfaceTexture to be permanently bound to the snapshot readback
     // context.
     return nullptr;
   }
 
   SurfaceTextureImage* AsSurfaceTextureImage() override { return this; }
 
-  void RegisterSetCurrentCallback(UniquePtr<SetCurrentCallback> aCallback) {
-    mSetCurrentCallback = std::move(aCallback);
-  }
-
-  void OnSetCurrent() {
-    if (mSetCurrentCallback) {
-      (*mSetCurrentCallback)();
-      mSetCurrentCallback.reset();
-    }
-  }
-
  private:
   AndroidSurfaceTextureHandle mHandle;
   gfx::IntSize mSize;
   bool mContinuous;
   gl::OriginPos mOriginPos;
   const bool mHasAlpha;
-  UniquePtr<SetCurrentCallback> mSetCurrentCallback;
 };
 
 #endif  // MOZ_WIDGET_ANDROID
 
 }  // namespace layers
 }  // namespace mozilla
 
 #endif  // GFX_GLIMAGES_H
--- a/modules/libpref/init/StaticPrefList.yaml
+++ b/modules/libpref/init/StaticPrefList.yaml
@@ -6086,30 +6086,33 @@
 -     name: media.navigator.hardware.vp8_decode.acceleration_enabled
       type: bool
       value: false
       mirror: never
   #endif  # ANDROID
 
   # Use MediaDataDecoder API for VP8/VP9 in WebRTC. This includes hardware
   # acceleration for decoding.
+  # disable on android bug 1509316
 -   name: media.navigator.mediadatadecoder_vpx_enabled
     type: RelaxedAtomicBool
-  #if defined(NIGHTLY_BUILD)
+  #if defined(NIGHTLY_BUILD) && !defined(ANDROID)
     value: true
   #else
     value: false
   #endif
     mirror: always
 
   # Use MediaDataDecoder API for H264 in WebRTC. This includes hardware
-  # acceleration for decoding.
+  # acceleration for decoding. False on Android due to bug 1509316.
 -   name: media.navigator.mediadatadecoder_h264_enabled
     type: RelaxedAtomicBool
-  #if defined(_ARM64_) && defined(XP_WIN)
+  #if defined(ANDROID)
+    value: false
+  #elif defined(_ARM64_) && defined(XP_WIN)
     value: false
   #else
     value: true
   #endif
     mirror: always
 
 #endif  # MOZ_WEBRTC