Bug 1660336 Implement DMABufSurfaceWrapper and VAAPIDisplayHolder as templates, r=jya
☠☠ backed out by 0ecc51ff0cdd ☠ ☠
authorMartin Stransky <stransky@redhat.com>
Thu, 26 Nov 2020 21:25:38 +0000
changeset 558531 5ccda5ab6563123911565d5ddb8999ae4cbcc7a2
parent 558530 e6f396b25887b75f78b23f050c3c822e39f9cf3f
child 558532 373a658bb281eb9099a6ca6534cd33caf513c0ff
push id37984
push usernbeleuzu@mozilla.com
push dateFri, 27 Nov 2020 15:53:21 +0000
treeherdermozilla-central@c42696dc97c6 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjya
bugs1660336
milestone85.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1660336 Implement DMABufSurfaceWrapper and VAAPIDisplayHolder as templates, r=jya Implemented DMABufSurfaceWrapper and VAAPIDisplayHolder as a versioned class templates as they are going to be used by both system ffmpeg and bundled ffvpx decoders. Differential Revision: https://phabricator.services.mozilla.com/D90555 Depends on D90554
dom/media/platforms/ffmpeg/FFmpegVideoDecoder.cpp
dom/media/platforms/ffmpeg/FFmpegVideoDecoder.h
--- a/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.cpp
+++ b/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.cpp
@@ -121,51 +121,51 @@ static AVPixelFormat ChooseVAAPIPixelFor
         break;
     }
   }
 
   NS_WARNING("FFmpeg does not share any supported pixel formats.");
   return AV_PIX_FMT_NONE;
 }
 
-DMABufSurfaceWrapper::DMABufSurfaceWrapper(DMABufSurface* aSurface,
-                                           FFmpegLibWrapper* aLib)
+DMABufSurfaceWrapper<LIBAV_VER>::DMABufSurfaceWrapper(DMABufSurface* aSurface,
+                                                      FFmpegLibWrapper* aLib)
     : mSurface(aSurface),
       mLib(aLib),
       mAVHWFramesContext(nullptr),
       mHWAVBuffer(nullptr) {
   // Create global refcount object to track mSurface usage over
   // gects rendering engine. We can't release it until it's used
   // by GL compositor / WebRender.
   mSurface->GlobalRefCountCreate();
   FFMPEG_LOG("DMABufSurfaceWrapper: creating surface UID = %d",
              mSurface->GetUID());
 }
 
-void DMABufSurfaceWrapper::LockVAAPIData(AVCodecContext* aAVCodecContext,
-                                         AVFrame* aAVFrame) {
+void DMABufSurfaceWrapper<LIBAV_VER>::LockVAAPIData(
+    AVCodecContext* aAVCodecContext, AVFrame* aAVFrame) {
   FFMPEG_LOG("DMABufSurfaceWrapper: VAAPI locking dmabuf surface UID = %d",
              mSurface->GetUID());
   if (aAVCodecContext && aAVFrame) {
     mAVHWFramesContext = mLib->av_buffer_ref(aAVCodecContext->hw_frames_ctx);
     mHWAVBuffer = mLib->av_buffer_ref(aAVFrame->buf[0]);
   }
 }
 
-void DMABufSurfaceWrapper::ReleaseVAAPIData() {
+void DMABufSurfaceWrapper<LIBAV_VER>::ReleaseVAAPIData() {
   FFMPEG_LOG("DMABufSurfaceWrapper: VAAPI releasing dmabuf surface UID = %d",
              mSurface->GetUID());
   if (mHWAVBuffer && mAVHWFramesContext) {
     mLib->av_buffer_unref(&mHWAVBuffer);
     mLib->av_buffer_unref(&mAVHWFramesContext);
   }
   mSurface->ReleaseSurface();
 }
 
-DMABufSurfaceWrapper::~DMABufSurfaceWrapper() {
+DMABufSurfaceWrapper<LIBAV_VER>::~DMABufSurfaceWrapper() {
   FFMPEG_LOG("DMABufSurfaceWrapper: deleting dmabuf surface UID = %d",
              mSurface->GetUID());
   ReleaseVAAPIData();
 }
 
 AVCodec* FFmpegVideoDecoder<LIBAV_VER>::FindVAAPICodec() {
   AVCodec* decoder = mLib->avcodec_find_decoder(mCodecID);
   for (int i = 0;; i++) {
@@ -178,29 +178,37 @@ AVCodec* FFmpegVideoDecoder<LIBAV_VER>::
       return decoder;
     }
   }
 
   FFMPEG_LOG("Decoder does not support VAAPI device type");
   return nullptr;
 }
 
-class VAAPIDisplayHolder {
+template <int V>
+class VAAPIDisplayHolder {};
+
+template <>
+class VAAPIDisplayHolder<LIBAV_VER>;
+
+template <>
+class VAAPIDisplayHolder<LIBAV_VER> {
  public:
   VAAPIDisplayHolder(FFmpegLibWrapper* aLib, VADisplay aDisplay)
       : mLib(aLib), mDisplay(aDisplay){};
   ~VAAPIDisplayHolder() { mLib->vaTerminate(mDisplay); }
 
  private:
   FFmpegLibWrapper* mLib;
   VADisplay mDisplay;
 };
 
 static void VAAPIDisplayReleaseCallback(struct AVHWDeviceContext* hwctx) {
-  auto displayHolder = static_cast<VAAPIDisplayHolder*>(hwctx->user_opaque);
+  auto displayHolder =
+      static_cast<VAAPIDisplayHolder<LIBAV_VER>*>(hwctx->user_opaque);
   delete displayHolder;
 }
 
 bool FFmpegVideoDecoder<LIBAV_VER>::CreateVAAPIDeviceContext() {
   mVAAPIDeviceContext = mLib->av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_VAAPI);
   if (!mVAAPIDeviceContext) {
     return false;
   }
@@ -226,17 +234,17 @@ bool FFmpegVideoDecoder<LIBAV_VER>::Crea
     }
     mDisplay = mLib->vaGetDisplayWl(display);
     if (!mDisplay) {
       FFMPEG_LOG("Can't get Wayland VA-API display.");
       return false;
     }
   }
 
-  hwctx->user_opaque = new VAAPIDisplayHolder(mLib, mDisplay);
+  hwctx->user_opaque = new VAAPIDisplayHolder<LIBAV_VER>(mLib, mDisplay);
   hwctx->free = VAAPIDisplayReleaseCallback;
 
   int major, minor;
   int status = mLib->vaInitialize(mDisplay, &major, &minor);
   if (status != VA_STATUS_SUCCESS) {
     return false;
   }
 
@@ -700,17 +708,17 @@ void FFmpegVideoDecoder<LIBAV_VER>::Rele
   int len = mDMABufSurfaces.Length();
   for (int i = 0; i < len; i++) {
     if (!mDMABufSurfaces[i].IsUsed()) {
       mDMABufSurfaces[i].ReleaseVAAPIData();
     }
   }
 }
 
-DMABufSurfaceWrapper*
+DMABufSurfaceWrapper<LIBAV_VER>*
 FFmpegVideoDecoder<LIBAV_VER>::GetUnusedDMABufSurfaceWrapper() {
   int len = mDMABufSurfaces.Length();
   for (int i = 0; i < len; i++) {
     if (!mDMABufSurfaces[i].IsUsed()) {
       return &(mDMABufSurfaces[i]);
     }
   }
   return nullptr;
@@ -766,17 +774,18 @@ MediaResult FFmpegVideoDecoder<LIBAV_VER
   if (mVAAPIDeviceContext && !GetVAAPISurfaceDescriptor(vaDesc)) {
     return MediaResult(
         NS_ERROR_OUT_OF_MEMORY,
         RESULT_DETAIL("Unable to get frame by vaExportSurfaceHandle()"));
   }
 
   RefPtr<DMABufSurfaceYUV> surface;
 
-  DMABufSurfaceWrapper* surfaceWrapper = GetUnusedDMABufSurfaceWrapper();
+  DMABufSurfaceWrapper<LIBAV_VER>* surfaceWrapper =
+      GetUnusedDMABufSurfaceWrapper();
   if (!surfaceWrapper) {
     if (mVAAPIDeviceContext) {
       surface = DMABufSurfaceYUV::CreateYUVSurface(vaDesc);
     } else {
       surface = DMABufSurfaceYUV::CreateYUVSurface(
           mFrame->width, mFrame->height, (void**)mFrame->data,
           mFrame->linesize);
     }
--- a/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.h
+++ b/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.h
@@ -50,17 +50,24 @@ namespace mozilla {
 //
 // FFmpeg SW decoding scenario
 //
 // When SW ffmpeg decoding is running, DMABufSurfaceWrapper contains only
 // a DMABufSurface reference and VA-API related members are null.
 // We own the DMABufSurface underlying GPU data and we use it for
 // repeated rendering of video frames.
 //
-class DMABufSurfaceWrapper final {
+template <int V>
+class DMABufSurfaceWrapper {};
+
+template <>
+class DMABufSurfaceWrapper<LIBAV_VER>;
+
+template <>
+class DMABufSurfaceWrapper<LIBAV_VER> final {
  public:
   DMABufSurfaceWrapper(DMABufSurface* aSurface, FFmpegLibWrapper* aLib);
   ~DMABufSurfaceWrapper();
 
   // Lock VAAPI related data
   void LockVAAPIData(AVCodecContext* aAVCodecContext, AVFrame* aAVFrame);
 
   // Release VAAPI related data, DMABufSurface can be reused
@@ -157,17 +164,17 @@ class FFmpegVideoDecoder<LIBAV_VER>
   bool IsHardwareAccelerated(nsACString& aFailureReason) const override;
   bool GetVAAPISurfaceDescriptor(VADRMPRIMESurfaceDescriptor& aVaDesc);
 
   MediaResult CreateImageDMABuf(int64_t aOffset, int64_t aPts,
                                 int64_t aDuration,
                                 MediaDataDecoder::DecodedData& aResults);
 
   void ReleaseUnusedVAAPIFrames();
-  DMABufSurfaceWrapper* GetUnusedDMABufSurfaceWrapper();
+  DMABufSurfaceWrapper<LIBAV_VER>* GetUnusedDMABufSurfaceWrapper();
   void ReleaseDMABufSurfaces();
 #endif
 
   /**
    * This method allocates a buffer for FFmpeg's decoder, wrapped in an Image.
    * Currently it only supports Planar YUV420, which appears to be the only
    * non-hardware accelerated image format that FFmpeg's H264 decoder is
    * capable of outputting.
@@ -175,17 +182,17 @@ class FFmpegVideoDecoder<LIBAV_VER>
   int AllocateYUV420PVideoBuffer(AVCodecContext* aCodecContext,
                                  AVFrame* aFrame);
 
 #ifdef MOZ_WAYLAND_USE_VAAPI
   AVBufferRef* mVAAPIDeviceContext;
   const bool mDisableHardwareDecoding;
   VADisplay mDisplay;
   bool mUseDMABufSurfaces;
-  nsTArray<DMABufSurfaceWrapper> mDMABufSurfaces;
+  nsTArray<DMABufSurfaceWrapper<LIBAV_VER>> mDMABufSurfaces;
 #endif
   RefPtr<KnowsCompositor> mImageAllocator;
   RefPtr<ImageContainer> mImageContainer;
   VideoInfo mInfo;
 
   class PtsCorrectionContext {
    public:
     PtsCorrectionContext();