Bug 1629788 [Wayland] Implement SW decoding to WaylandDMABufSurface, r=jya
authorMartin Stransky <stransky@redhat.com>
Sat, 13 Jun 2020 18:38:38 +0000
changeset 535624 8a463700d1698fead0b1804d96fcd59309db6f6e
parent 535623 0c7df6f9b0c1999f37b231705f85e44df689bab0
child 535625 b4fee14fe36ae084fb392de2a40bcbd9a226bec3
push id118972
push userabutkovits@mozilla.com
push dateSat, 13 Jun 2020 19:12:28 +0000
treeherderautoland@b4fee14fe36a [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersjya
bugs1629788
milestone79.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1629788 [Wayland] Implement SW decoding to WaylandDMABufSurface, r=jya - Use WaylandDMABufSurface for SW decoded frames when they can be converted to NV12 format. - Rename VAAPIFrameHolder to DMABufSurface and use it as general placeholder for WaylandDMABufSurface surface. It's used for VA-API/SW video playback and holds decoded video images until thery are used by gecko rendering engine. - Implmenet a linked list of DMABufSurface where recently used frames are stored. The frames are recycled by ffmpeg decoder for VA-API and SW video playback. Differential Revision: https://phabricator.services.mozilla.com/D78292
dom/media/platforms/ffmpeg/FFmpegVideoDecoder.cpp
dom/media/platforms/ffmpeg/FFmpegVideoDecoder.h
--- a/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.cpp
+++ b/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.cpp
@@ -117,38 +117,53 @@ static AVPixelFormat ChooseVAAPIPixelFor
         break;
     }
   }
 
   NS_WARNING("FFmpeg does not share any supported pixel formats.");
   return AV_PIX_FMT_NONE;
 }
 
-VAAPIFrameHolder::VAAPIFrameHolder(FFmpegLibWrapper* aLib,
-                                   WaylandDMABufSurface* aSurface,
-                                   AVCodecContext* aAVCodecContext,
-                                   AVFrame* aAVFrame)
-    : mLib(aLib),
-      mSurface(aSurface),
-      mAVHWFramesContext(mLib->av_buffer_ref(aAVCodecContext->hw_frames_ctx)),
-      mHWAVBuffer(mLib->av_buffer_ref(aAVFrame->buf[0])) {
-  FFMPEG_LOG("VAAPIFrameHolder is adding dmabuf surface UID = %d",
-             mSurface->GetUID());
-
+DMABufSurface::DMABufSurface(WaylandDMABufSurface* aSurface,
+                             FFmpegLibWrapper* aLib)
+    : mSurface(aSurface),
+      mLib(aLib),
+      mAVHWFramesContext(nullptr),
+      mHWAVBuffer(nullptr) {
   // Create global refcount object to track mSurface usage over
   // gects rendering engine. We can't release it until it's used
   // by GL compositor / WebRender.
   mSurface->GlobalRefCountCreate();
+  FFMPEG_LOG("DMABufSurface: creating surface UID = %d", mSurface->GetUID());
+}
+
+void DMABufSurface::LockVAAPIData(AVCodecContext* aAVCodecContext,
+                                  AVFrame* aAVFrame) {
+  FFMPEG_LOG("DMABufSurface: VAAPI locking dmabuf surface UID = %d",
+             mSurface->GetUID());
+  if (aAVCodecContext && aAVFrame) {
+    mAVHWFramesContext = mLib->av_buffer_ref(aAVCodecContext->hw_frames_ctx);
+    mHWAVBuffer = mLib->av_buffer_ref(aAVFrame->buf[0]);
+  }
 }
 
-VAAPIFrameHolder::~VAAPIFrameHolder() {
-  FFMPEG_LOG("VAAPIFrameHolder is releasing dmabuf surface UID = %d",
+void DMABufSurface::ReleaseVAAPIData() {
+  FFMPEG_LOG("DMABufSurface: VAAPI releasing dmabuf surface UID = %d",
              mSurface->GetUID());
-  mLib->av_buffer_unref(&mHWAVBuffer);
-  mLib->av_buffer_unref(&mAVHWFramesContext);
+  if (mHWAVBuffer && mAVHWFramesContext) {
+    mLib->av_buffer_unref(&mHWAVBuffer);
+    mLib->av_buffer_unref(&mAVHWFramesContext);
+  }
+  mSurface->ReleaseSurface();
+}
+
+DMABufSurface::~DMABufSurface() {
+  FFMPEG_LOG("DMABufSurface: deleting dmabuf surface UID = %d",
+             mSurface->GetUID());
+  ReleaseVAAPIData();
 }
 
 AVCodec* FFmpegVideoDecoder<LIBAV_VER>::FindVAAPICodec() {
   AVCodec* decoder = mLib->avcodec_find_decoder(mCodecID);
   for (int i = 0;; i++) {
     const AVCodecHWConfig* config = mLib->avcodec_get_hw_config(decoder, i);
     if (!config) {
       break;
@@ -220,24 +235,16 @@ bool FFmpegVideoDecoder<LIBAV_VER>::Crea
 MediaResult FFmpegVideoDecoder<LIBAV_VER>::InitVAAPIDecoder() {
   FFMPEG_LOG("Initialising VA-API FFmpeg decoder");
 
   if (!mLib->IsVAAPIAvailable()) {
     FFMPEG_LOG("libva library or symbols are missing.");
     return NS_ERROR_NOT_AVAILABLE;
   }
 
-  auto layersBackend = mImageAllocator
-                           ? mImageAllocator->GetCompositorBackendType()
-                           : layers::LayersBackend::LAYERS_BASIC;
-  if (layersBackend != layers::LayersBackend::LAYERS_WR) {
-    FFMPEG_LOG("VA-API works with WebRender only!");
-    return NS_ERROR_NOT_AVAILABLE;
-  }
-
   AVCodec* codec = FindVAAPICodec();
   if (!codec) {
     FFMPEG_LOG("Couldn't find ffmpeg VA-API decoder");
     return NS_ERROR_DOM_MEDIA_FATAL_ERR;
   }
 
   StaticMutexAutoLock mon(sMonitor);
 
@@ -313,32 +320,45 @@ FFmpegVideoDecoder<LIBAV_VER>::FFmpegVid
     FFmpegLibWrapper* aLib, TaskQueue* aTaskQueue, const VideoInfo& aConfig,
     KnowsCompositor* aAllocator, ImageContainer* aImageContainer,
     bool aLowLatency, bool aDisableHardwareDecoding)
     : FFmpegDataDecoder(aLib, aTaskQueue, GetCodecId(aConfig.mMimeType)),
 #ifdef MOZ_WAYLAND_USE_VAAPI
       mVAAPIDeviceContext(nullptr),
       mDisableHardwareDecoding(aDisableHardwareDecoding),
       mDisplay(nullptr),
+      mUseDMABufSurfaces(false),
 #endif
       mImageAllocator(aAllocator),
       mImageContainer(aImageContainer),
       mInfo(aConfig),
       mLowLatency(aLowLatency) {
   // Use a new MediaByteBuffer as the object will be modified during
   // initialization.
   mExtraData = new MediaByteBuffer;
   mExtraData->AppendElements(*aConfig.mExtraData);
+
+#ifdef MOZ_WAYLAND_USE_VAAPI
+  mUseDMABufSurfaces =
+      gfxPlatformGtk::GetPlatform()->UseWaylandDMABufVideoTextures() &&
+      mImageAllocator &&
+      (mImageAllocator->GetCompositorBackendType() ==
+       layers::LayersBackend::LAYERS_WR);
+
+  if (!mUseDMABufSurfaces) {
+    FFMPEG_LOG("DMA-BUF/VA-API can't be used, WebRender/Wayland is disabled");
+  }
+#endif
 }
 
 RefPtr<MediaDataDecoder::InitPromise> FFmpegVideoDecoder<LIBAV_VER>::Init() {
   MediaResult rv;
 
 #ifdef MOZ_WAYLAND_USE_VAAPI
-  if (!mDisableHardwareDecoding) {
+  if (mUseDMABufSurfaces && !mDisableHardwareDecoding) {
     rv = InitVAAPIDecoder();
     if (NS_SUCCEEDED(rv)) {
       return InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, __func__);
     }
   }
 #endif
 
   rv = InitDecoder();
@@ -428,16 +448,18 @@ MediaResult FFmpegVideoDecoder<LIBAV_VER
   }
   do {
     if (!PrepareFrame()) {
       NS_WARNING("FFmpeg h264 decoder failed to allocate frame.");
       return MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__);
     }
 
 #  ifdef MOZ_WAYLAND_USE_VAAPI
+    // Release unused VA-API surfaces before avcodec_receive_frame() as
+    // ffmpeg recycles VASurface for HW decoding.
     if (mVAAPIDeviceContext) {
       ReleaseUnusedVAAPIFrames();
     }
 #  endif
 
     res = mLib->avcodec_receive_frame(mCodecContext, mFrame);
     if (res == int(AVERROR_EOF)) {
       return NS_ERROR_DOM_MEDIA_END_OF_STREAM;
@@ -448,20 +470,29 @@ MediaResult FFmpegVideoDecoder<LIBAV_VER
     if (res < 0) {
       FFMPEG_LOG("avcodec_receive_frame error: %d", res);
       return MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR,
                          RESULT_DETAIL("avcodec_receive_frame error: %d", res));
     }
 
     MediaResult rv;
 #  ifdef MOZ_WAYLAND_USE_VAAPI
-    if (mVAAPIDeviceContext) {
-      MOZ_ASSERT(mFrame->format == AV_PIX_FMT_VAAPI_VLD);
-      rv = CreateImageVAAPI(mFrame->pkt_pos, mFrame->pkt_pts,
-                            mFrame->pkt_duration, aResults);
+    if (mVAAPIDeviceContext || mUseDMABufSurfaces) {
+      rv = CreateImageDMABuf(mFrame->pkt_pos, mFrame->pkt_pts,
+                             mFrame->pkt_duration, aResults);
+
+      // If VA-API playback failed, just quit. Decoder is going to be restarted
+      // without VA-API.
+      // If VA-API is already off, disable DMABufSurfaces and fallback to
+      // default.
+      if (NS_FAILED(rv) && !mVAAPIDeviceContext) {
+        mUseDMABufSurfaces = false;
+        rv = CreateImage(mFrame->pkt_pos, mFrame->pkt_pts, mFrame->pkt_duration,
+                         aResults);
+      }
     } else
 #  endif
     {
       rv = CreateImage(mFrame->pkt_pos, mFrame->pkt_pts, mFrame->pkt_duration,
                        aResults);
     }
     if (NS_FAILED(rv)) {
       return rv;
@@ -640,79 +671,132 @@ MediaResult FFmpegVideoDecoder<LIBAV_VER
                        RESULT_DETAIL("image allocation error"));
   }
   aResults.AppendElement(std::move(v));
   return NS_OK;
 }
 
 #ifdef MOZ_WAYLAND_USE_VAAPI
 void FFmpegVideoDecoder<LIBAV_VER>::ReleaseUnusedVAAPIFrames() {
-  std::list<UniquePtr<VAAPIFrameHolder>>::iterator holder =
-      mFrameHolders.begin();
-  while (holder != mFrameHolders.end()) {
-    if (!(*holder)->IsUsed()) {
-      holder = mFrameHolders.erase(holder);
-    } else {
-      holder++;
+  int len = mDMABufSurfaces.Length();
+  for (int i = 0; i < len; i++) {
+    if (!mDMABufSurfaces[i].IsUsed()) {
+      mDMABufSurfaces[i].ReleaseVAAPIData();
     }
   }
 }
 
-void FFmpegVideoDecoder<LIBAV_VER>::ReleaseAllVAAPIFrames() {
-  mFrameHolders.clear();
+DMABufSurface* FFmpegVideoDecoder<LIBAV_VER>::GetUnusedDMABufSurface() {
+  int len = mDMABufSurfaces.Length();
+  for (int i = 0; i < len; i++) {
+    if (!mDMABufSurfaces[i].IsUsed()) {
+      return &(mDMABufSurfaces[i]);
+    }
+  }
+  return nullptr;
 }
 
-MediaResult FFmpegVideoDecoder<LIBAV_VER>::CreateImageVAAPI(
-    int64_t aOffset, int64_t aPts, int64_t aDuration,
-    MediaDataDecoder::DecodedData& aResults) {
-  FFMPEG_LOG("Got one VAAPI frame output with pts=%" PRId64 " dts=%" PRId64
-             " duration=%" PRId64 " opaque=%" PRId64,
-             aPts, mFrame->pkt_dts, aDuration, mCodecContext->reordered_opaque);
+void FFmpegVideoDecoder<LIBAV_VER>::ReleaseDMABufSurfaces() {
+  mDMABufSurfaces.Clear();
+}
 
-  VADRMPRIMESurfaceDescriptor va_desc;
+bool FFmpegVideoDecoder<LIBAV_VER>::GetVAAPISurfaceDescriptor(
+    VADRMPRIMESurfaceDescriptor& aVaDesc) {
   VASurfaceID surface_id = (VASurfaceID)(uintptr_t)mFrame->data[3];
   VAStatus vas = mLib->vaExportSurfaceHandle(
       mDisplay, surface_id, VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME_2,
       VA_EXPORT_SURFACE_READ_ONLY | VA_EXPORT_SURFACE_SEPARATE_LAYERS,
-      &va_desc);
+      &aVaDesc);
   if (vas != VA_STATUS_SUCCESS) {
-    return MediaResult(
-        NS_ERROR_OUT_OF_MEMORY,
-        RESULT_DETAIL("Unable to get frame by vaExportSurfaceHandle()"));
+    return false;
   }
   vas = mLib->vaSyncSurface(mDisplay, surface_id);
   if (vas != VA_STATUS_SUCCESS) {
     NS_WARNING("vaSyncSurface() failed.");
   }
 
-  va_desc.width = mFrame->width;
-  va_desc.height = mFrame->height;
+  aVaDesc.width = mFrame->width;
+  aVaDesc.height = mFrame->height;
+
+  return true;
+}
 
-  RefPtr<WaylandDMABufSurfaceNV12> surface =
-      WaylandDMABufSurfaceNV12::CreateNV12Surface(va_desc);
-  if (!surface) {
+MediaResult FFmpegVideoDecoder<LIBAV_VER>::CreateImageDMABuf(
+    int64_t aOffset, int64_t aPts, int64_t aDuration,
+    MediaDataDecoder::DecodedData& aResults) {
+  FFMPEG_LOG("DMABUF/VA-API Got one frame output with pts=%" PRId64
+             "dts=%" PRId64 " duration=%" PRId64 " opaque=%" PRId64,
+             aPts, mFrame->pkt_dts, aDuration, mCodecContext->reordered_opaque);
+
+  // With SW decode we support only YUV420P format with DMABuf surfaces.
+  if (!mVAAPIDeviceContext && mCodecContext->pix_fmt != AV_PIX_FMT_YUV420P) {
+    return MediaResult(
+        NS_ERROR_NOT_IMPLEMENTED,
+        RESULT_DETAIL("DMA-BUF textures supports YUV420P format only"));
+  }
+
+  VADRMPRIMESurfaceDescriptor vaDesc;
+  if (mVAAPIDeviceContext && !GetVAAPISurfaceDescriptor(vaDesc)) {
     return MediaResult(
         NS_ERROR_OUT_OF_MEMORY,
-        RESULT_DETAIL("Unable to allocate WaylandDMABufSurfaceNV12."));
+        RESULT_DETAIL("Unable to get frame by vaExportSurfaceHandle()"));
   }
 
+  RefPtr<WaylandDMABufSurfaceNV12> waylandSurface;
+
+  DMABufSurface* surface = GetUnusedDMABufSurface();
+  if (!surface) {
+    if (mVAAPIDeviceContext) {
+      waylandSurface = WaylandDMABufSurfaceNV12::CreateNV12Surface(vaDesc);
+    } else {
+      waylandSurface = WaylandDMABufSurfaceNV12::CreateNV12Surface(
+          mFrame->width, mFrame->height, (void**)mFrame->data,
+          mFrame->linesize);
+    }
+    if (!waylandSurface) {
+      return MediaResult(
+          NS_ERROR_OUT_OF_MEMORY,
+          RESULT_DETAIL("Unable to get WaylandDMABufSurfaceNV12"));
+    }
+
 #  ifdef MOZ_LOGGING
-  static int uid = 0;
-  surface->SetUID(++uid);
-  FFMPEG_LOG("Created dmabuf UID = %d HW surface %x", uid, surface_id);
+    static int uid = 0;
+    waylandSurface->SetUID(++uid);
+    FFMPEG_LOG("Created new WaylandDMABufSurface UID = %d", uid);
 #  endif
+    mDMABufSurfaces.AppendElement(DMABufSurface(waylandSurface, mLib));
+    surface = &(mDMABufSurfaces[mDMABufSurfaces.Length() - 1]);
+  } else {
+    waylandSurface = surface->GetWaylandDMABufSurface();
+    bool ret;
 
-  surface->SetYUVColorSpace(GetFrameColorSpace());
+    if (mVAAPIDeviceContext) {
+      ret = waylandSurface->UpdateNV12Data(vaDesc);
+    } else {
+      ret = waylandSurface->UpdateNV12Data((void**)mFrame->data,
+                                           mFrame->linesize);
+    }
 
-  // Store reference to the decoded HW buffer, see VAAPIFrameHolder struct.
-  auto holder =
-      MakeUnique<VAAPIFrameHolder>(mLib, surface, mCodecContext, mFrame);
-  mFrameHolders.push_back(std::move(holder));
+    if (!ret) {
+      return MediaResult(
+          NS_ERROR_OUT_OF_MEMORY,
+          RESULT_DETAIL("Unable to upload data to WaylandDMABufSurfaceNV12"));
+    }
+    FFMPEG_LOG("Reusing WaylandDMABufSurface UID = %d",
+               waylandSurface->GetUID());
+  }
 
-  RefPtr<layers::Image> im = new layers::WaylandDMABUFSurfaceImage(surface);
+  if (mVAAPIDeviceContext) {
+    surface->LockVAAPIData(mCodecContext, mFrame);
+  }
+
+  waylandSurface->SetYUVColorSpace(GetFrameColorSpace());
+
+  RefPtr<layers::Image> im =
+      new layers::WaylandDMABUFSurfaceImage(waylandSurface);
 
   RefPtr<VideoData> vp = VideoData::CreateFromImage(
       mInfo.mDisplay, aOffset, TimeUnit::FromMicroseconds(aPts),
       TimeUnit::FromMicroseconds(aDuration), im, !!mFrame->key_frame,
       TimeUnit::FromMicroseconds(-1));
 
   if (!vp) {
     return MediaResult(NS_ERROR_OUT_OF_MEMORY,
@@ -753,18 +837,18 @@ AVCodecID FFmpegVideoDecoder<LIBAV_VER>:
   }
 #endif
 
   return AV_CODEC_ID_NONE;
 }
 
 void FFmpegVideoDecoder<LIBAV_VER>::ProcessShutdown() {
 #ifdef MOZ_WAYLAND_USE_VAAPI
+  ReleaseDMABufSurfaces();
   if (mVAAPIDeviceContext) {
-    ReleaseAllVAAPIFrames();
     mLib->av_buffer_unref(&mVAAPIDeviceContext);
   }
 #endif
   FFmpegDataDecoder<LIBAV_VER>::ProcessShutdown();
 }
 
 #ifdef MOZ_WAYLAND_USE_VAAPI
 bool FFmpegVideoDecoder<LIBAV_VER>::IsHardwareAccelerated(
--- a/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.h
+++ b/dom/media/platforms/ffmpeg/FFmpegVideoDecoder.h
@@ -7,51 +7,82 @@
 #ifndef __FFmpegVideoDecoder_h__
 #define __FFmpegVideoDecoder_h__
 
 #include "FFmpegLibWrapper.h"
 #include "FFmpegDataDecoder.h"
 #include "SimpleMap.h"
 #ifdef MOZ_WAYLAND_USE_VAAPI
 #  include "mozilla/widget/WaylandDMABufSurface.h"
-#  include <list>
+#  include "mozilla/LinkedList.h"
 #endif
 
 namespace mozilla {
 
 #ifdef MOZ_WAYLAND_USE_VAAPI
+// DMABufSurface holds a reference to GPU data with a video frame.
+//
+// Actual GPU pixel data are stored at WaylandDMABufSurface and
+// WaylandDMABufSurfaces is passed to gecko GL rendering pipeline via.
+// WaylandDMABUFSurfaceImage.
+//
+// DMABufSurface can optionally hold VA-API ffmpeg related data to keep
+// GPU data locked untill we need them.
+//
+// DMABufSurface is used for both HW accelerated video decoding (VA-API)
+// and ffmpeg SW decoding.
+//
+// VA-API scenario
+//
 // When VA-API decoding is running, ffmpeg allocates AVHWFramesContext - a pool
 // of "hardware" frames. Every "hardware" frame (VASurface) is backed
 // by actual piece of GPU memory which holds the decoded image data.
 //
 // The VASurface is wrapped by WaylandDMABufSurface and transferred to
 // rendering queue by WaylandDMABUFSurfaceImage, where TextureClient is
 // created and VASurface is used as a texture there.
 //
 // As there's a limited number of VASurfaces, ffmpeg reuses them to decode
 // next frames ASAP even if they are still attached to WaylandDMABufSurface
 // and used as a texture in our rendering engine.
 //
 // Unfortunately there isn't any obvious way how to mark particular VASurface
 // as used. The best we can do is to hold a reference to particular AVBuffer
 // from decoded AVFrame and AVHWFramesContext which owns the AVBuffer.
-
-class VAAPIFrameHolder final {
+//
+// FFmpeg SW decoding scenario
+//
+// When SW ffmpeg decoding is running, DMABufSurface contains only
+// a WaylandDMABufSurface reference and VA-API related members are null.
+// We own the WaylandDMABufSurface underlying GPU data and we use it for
+// repeated rendering of video frames.
+//
+class DMABufSurface final {
  public:
-  VAAPIFrameHolder(FFmpegLibWrapper* aLib, WaylandDMABufSurface* aSurface,
-                   AVCodecContext* aAVCodecContext, AVFrame* aAVFrame);
-  ~VAAPIFrameHolder();
+  DMABufSurface(WaylandDMABufSurface* aSurface, FFmpegLibWrapper* aLib);
+  ~DMABufSurface();
+
+  // Lock VAAPI related data
+  void LockVAAPIData(AVCodecContext* aAVCodecContext, AVFrame* aAVFrame);
+
+  // Release VAAPI related data, DMABufSurface can be reused
+  // for another frame.
+  void ReleaseVAAPIData();
 
   // Check if WaylandDMABufSurface is used by any gecko rendering process
   // (WebRender or GL compositor) or by WaylandDMABUFSurfaceImage/VideoData.
   bool IsUsed() const { return mSurface->IsGlobalRefSet(); }
 
+  const RefPtr<WaylandDMABufSurfaceNV12> GetWaylandDMABufSurface() {
+    return mSurface->GetAsWaylandDMABufSurfaceNV12();
+  }
+
  private:
+  const RefPtr<WaylandDMABufSurface> mSurface;
   const FFmpegLibWrapper* mLib;
-  const RefPtr<WaylandDMABufSurface> mSurface;
   AVBufferRef* mAVHWFramesContext;
   AVBufferRef* mHWAVBuffer;
 };
 #endif
 
 template <int V>
 class FFmpegVideoDecoder : public FFmpegDataDecoder<V> {};
 
@@ -113,37 +144,42 @@ class FFmpegVideoDecoder<LIBAV_VER>
                           MediaDataDecoder::DecodedData& aResults);
 
 #ifdef MOZ_WAYLAND_USE_VAAPI
   MediaResult InitVAAPIDecoder();
   bool CreateVAAPIDeviceContext();
   void InitVAAPICodecContext();
   AVCodec* FindVAAPICodec();
   bool IsHardwareAccelerated(nsACString& aFailureReason) const override;
+  bool GetVAAPISurfaceDescriptor(VADRMPRIMESurfaceDescriptor& aVaDesc);
 
-  MediaResult CreateImageVAAPI(int64_t aOffset, int64_t aPts, int64_t aDuration,
-                               MediaDataDecoder::DecodedData& aResults);
+  MediaResult CreateImageDMABuf(int64_t aOffset, int64_t aPts,
+                                int64_t aDuration,
+                                MediaDataDecoder::DecodedData& aResults);
+
   void ReleaseUnusedVAAPIFrames();
-  void ReleaseAllVAAPIFrames();
+  DMABufSurface* GetUnusedDMABufSurface();
+  void ReleaseDMABufSurfaces();
 #endif
 
   /**
    * This method allocates a buffer for FFmpeg's decoder, wrapped in an Image.
    * Currently it only supports Planar YUV420, which appears to be the only
    * non-hardware accelerated image format that FFmpeg's H264 decoder is
    * capable of outputting.
    */
   int AllocateYUV420PVideoBuffer(AVCodecContext* aCodecContext,
                                  AVFrame* aFrame);
 
 #ifdef MOZ_WAYLAND_USE_VAAPI
   AVBufferRef* mVAAPIDeviceContext;
   const bool mDisableHardwareDecoding;
   VADisplay mDisplay;
-  std::list<UniquePtr<VAAPIFrameHolder>> mFrameHolders;
+  bool mUseDMABufSurfaces;
+  nsTArray<DMABufSurface> mDMABufSurfaces;
 #endif
   RefPtr<KnowsCompositor> mImageAllocator;
   RefPtr<ImageContainer> mImageContainer;
   VideoInfo mInfo;
 
   class PtsCorrectionContext {
    public:
     PtsCorrectionContext();