Bug 1787561: Make HDR telemetry work for GPU-managed video, add tests. r=alwu,aosmond,emilio a=RyanVM
authorBrad Werth <bwerth@mozilla.com>
Tue, 13 Sep 2022 20:22:22 +0000
changeset 703967 387a1a22253e1ad93a38b1cad3eeb23d4154363c
parent 703966 0fcbdd57a1d51c881466faaf38bc0503b5e39b15
child 703968 505d23b500f6c1ac96ad3d34f761610375973dab
push id2954
push userryanvm@gmail.com
push dateThu, 29 Sep 2022 21:23:02 +0000
treeherdermozilla-release@387a1a22253e [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersalwu, aosmond, emilio, RyanVM
bugs1787561, 1790281, 1790436
milestone105.0.2
Bug 1787561: Make HDR telemetry work for GPU-managed video, add tests. r=alwu,aosmond,emilio a=RyanVM This patch includes rebased patches from Bug 1787561, Bug 1790281, and Bug 1790436. It makes GPU-managed video report HDR telemetry correctly. Differential Revision: https://phabricator.services.mozilla.com/D157247
dom/html/HTMLMediaElement.cpp
dom/html/HTMLMediaElement.h
dom/media/MediaDecoder.cpp
dom/media/MediaDecoder.h
dom/media/ipc/RemoteImageHolder.cpp
dom/media/ipc/RemoteImageHolder.h
dom/media/ipc/RemoteVideoDecoder.cpp
dom/media/test/browser/browser.ini
dom/media/test/browser/browser_tab_visibility_and_play_time.js
dom/media/test/browser/file_media.html
dom/webidl/HTMLMediaElement.webidl
gfx/layers/GPUVideoImage.h
--- a/dom/html/HTMLMediaElement.cpp
+++ b/dom/html/HTMLMediaElement.cpp
@@ -2074,16 +2074,20 @@ void HTMLMediaElement::SetVisible(bool a
 bool HTMLMediaElement::IsVideoDecodingSuspended() const {
   return mDecoder && mDecoder->IsVideoDecodingSuspended();
 }
 
 double HTMLMediaElement::TotalVideoPlayTime() const {
   return mDecoder ? mDecoder->GetTotalVideoPlayTimeInSeconds() : -1.0;
 }
 
+double HTMLMediaElement::TotalVideoHDRPlayTime() const {
+  return mDecoder ? mDecoder->GetTotalVideoHDRPlayTimeInSeconds() : -1.0;
+}
+
 double HTMLMediaElement::VisiblePlayTime() const {
   return mDecoder ? mDecoder->GetVisibleVideoPlayTimeInSeconds() : -1.0;
 }
 
 double HTMLMediaElement::InvisiblePlayTime() const {
   return mDecoder ? mDecoder->GetInvisibleVideoPlayTimeInSeconds() : -1.0;
 }
 
--- a/dom/html/HTMLMediaElement.h
+++ b/dom/html/HTMLMediaElement.h
@@ -650,16 +650,17 @@ class HTMLMediaElement : public nsGeneri
   bool HasSuspendTaint() const;
 
   // For use by mochitests.
   bool IsVideoDecodingSuspended() const;
 
   // These functions return accumulated time, which are used for the telemetry
   // usage. Return -1 for error.
   double TotalVideoPlayTime() const;
+  double TotalVideoHDRPlayTime() const;
   double VisiblePlayTime() const;
   double InvisiblePlayTime() const;
   double VideoDecodeSuspendedTime() const;
   double TotalAudioPlayTime() const;
   double AudiblePlayTime() const;
   double InaudiblePlayTime() const;
   double MutedPlayTime() const;
 
--- a/dom/media/MediaDecoder.cpp
+++ b/dom/media/MediaDecoder.cpp
@@ -1493,16 +1493,20 @@ void MediaDecoder::NotifyVolumeChanged()
   MOZ_DIAGNOSTIC_ASSERT(!IsShutdown());
   mTelemetryProbesReporter->OnMutedChanged(mVolume == 0.f);
 }
 
 double MediaDecoder::GetTotalVideoPlayTimeInSeconds() const {
   return mTelemetryProbesReporter->GetTotalVideoPlayTimeInSeconds();
 }
 
+double MediaDecoder::GetTotalVideoHDRPlayTimeInSeconds() const {
+  return mTelemetryProbesReporter->GetTotalVideoHDRPlayTimeInSeconds();
+}
+
 double MediaDecoder::GetVisibleVideoPlayTimeInSeconds() const {
   return mTelemetryProbesReporter->GetVisibleVideoPlayTimeInSeconds();
 }
 
 double MediaDecoder::GetInvisibleVideoPlayTimeInSeconds() const {
   return mTelemetryProbesReporter->GetInvisibleVideoPlayTimeInSeconds();
 }
 
--- a/dom/media/MediaDecoder.h
+++ b/dom/media/MediaDecoder.h
@@ -725,16 +725,17 @@ class MediaDecoder : public DecoderDocto
   AbstractCanonical<PlayState>* CanonicalPlayState() { return &mPlayState; }
 
   void UpdateTelemetryHelperBasedOnPlayState(PlayState aState) const;
 
   TelemetryProbesReporter::Visibility OwnerVisibility() const;
 
   // Those methods exist to report telemetry related metrics.
   double GetTotalVideoPlayTimeInSeconds() const;
+  double GetTotalVideoHDRPlayTimeInSeconds() const;
   double GetVisibleVideoPlayTimeInSeconds() const;
   double GetInvisibleVideoPlayTimeInSeconds() const;
   double GetVideoDecodeSuspendedTimeInSeconds() const;
   double GetTotalAudioPlayTimeInSeconds() const;
   double GetAudiblePlayTimeInSeconds() const;
   double GetInaudiblePlayTimeInSeconds() const;
   double GetMutedPlayTimeInSeconds() const;
 
--- a/dom/media/ipc/RemoteImageHolder.cpp
+++ b/dom/media/ipc/RemoteImageHolder.cpp
@@ -16,21 +16,27 @@ namespace mozilla {
 
 using namespace gfx;
 using namespace layers;
 
 RemoteImageHolder::RemoteImageHolder() = default;
 RemoteImageHolder::RemoteImageHolder(layers::IGPUVideoSurfaceManager* aManager,
                                      layers::VideoBridgeSource aSource,
                                      const gfx::IntSize& aSize,
+                                     const gfx::ColorDepth& aColorDepth,
                                      const layers::SurfaceDescriptor& aSD)
-    : mSource(aSource), mSize(aSize), mSD(Some(aSD)), mManager(aManager) {}
+    : mSource(aSource),
+      mSize(aSize),
+      mColorDepth(aColorDepth),
+      mSD(Some(aSD)),
+      mManager(aManager) {}
 RemoteImageHolder::RemoteImageHolder(RemoteImageHolder&& aOther)
     : mSource(aOther.mSource),
       mSize(aOther.mSize),
+      mColorDepth(aOther.mColorDepth),
       mSD(std::move(aOther.mSD)),
       mManager(aOther.mManager) {
   aOther.mSD = Nothing();
 }
 
 already_AddRefed<Image> RemoteImageHolder::DeserializeImage(
     layers::BufferRecycleBin* aBufferRecycleBin) {
   MOZ_ASSERT(mSD && mSD->type() == SurfaceDescriptor::TSurfaceDescriptorBuffer);
@@ -107,17 +113,17 @@ already_AddRefed<layers::Image> RemoteIm
     image = DeserializeImage(aBufferRecycleBin);
   } else {
     // The Image here creates a TextureData object that takes ownership
     // of the SurfaceDescriptor, and is responsible for making sure that
     // it gets deallocated.
     SurfaceDescriptorRemoteDecoder remoteSD =
         static_cast<const SurfaceDescriptorGPUVideo&>(*mSD);
     remoteSD.source() = Some(mSource);
-    image = new GPUVideoImage(mManager, remoteSD, mSize);
+    image = new GPUVideoImage(mManager, remoteSD, mSize, mColorDepth);
   }
   mSD = Nothing();
   mManager = nullptr;
 
   return image.forget();
 }
 
 RemoteImageHolder::~RemoteImageHolder() {
@@ -133,27 +139,29 @@ RemoteImageHolder::~RemoteImageHolder() 
   }
 }
 
 /* static */ void ipc::IPDLParamTraits<RemoteImageHolder>::Write(
     IPC::MessageWriter* aWriter, ipc::IProtocol* aActor,
     RemoteImageHolder&& aParam) {
   WriteIPDLParam(aWriter, aActor, aParam.mSource);
   WriteIPDLParam(aWriter, aActor, aParam.mSize);
+  WriteIPDLParam(aWriter, aActor, aParam.mColorDepth);
   WriteIPDLParam(aWriter, aActor, aParam.mSD);
   // Empty this holder.
   aParam.mSD = Nothing();
   aParam.mManager = nullptr;
 }
 
 /* static */ bool ipc::IPDLParamTraits<RemoteImageHolder>::Read(
     IPC::MessageReader* aReader, ipc::IProtocol* aActor,
     RemoteImageHolder* aResult) {
   if (!ReadIPDLParam(aReader, aActor, &aResult->mSource) ||
       !ReadIPDLParam(aReader, aActor, &aResult->mSize) ||
+      !ReadIPDLParam(aReader, aActor, &aResult->mColorDepth) ||
       !ReadIPDLParam(aReader, aActor, &aResult->mSD)) {
     return false;
   }
   if (!aResult->IsEmpty()) {
     aResult->mManager = RemoteDecoderManagerChild::GetSingleton(
         aResult->mSource == VideoBridgeSource::GpuProcess
             ? RemoteDecodeIn::GpuProcess
             : RemoteDecodeIn::RddProcess);
--- a/dom/media/ipc/RemoteImageHolder.h
+++ b/dom/media/ipc/RemoteImageHolder.h
@@ -23,16 +23,17 @@ class SurfaceDescriptor;
 class RemoteImageHolder final {
   friend struct ipc::IPDLParamTraits<RemoteImageHolder>;
 
  public:
   RemoteImageHolder();
   RemoteImageHolder(layers::IGPUVideoSurfaceManager* aManager,
                     layers::VideoBridgeSource aSource,
                     const gfx::IntSize& aSize,
+                    const gfx::ColorDepth& aColorDepth,
                     const layers::SurfaceDescriptor& aSD);
   RemoteImageHolder(RemoteImageHolder&& aOther);
   // Ensure we never copy this object.
   RemoteImageHolder(const RemoteImageHolder& aOther) = delete;
   RemoteImageHolder& operator=(const RemoteImageHolder& aOther) = delete;
   ~RemoteImageHolder();
 
   bool IsEmpty() const { return mSD.isNothing(); }
@@ -42,16 +43,17 @@ class RemoteImageHolder final {
       layers::BufferRecycleBin* aBufferRecycleBin = nullptr);
 
  private:
   already_AddRefed<layers::Image> DeserializeImage(
       layers::BufferRecycleBin* aBufferRecycleBin);
   // We need a default for the default constructor, never used in practice.
   layers::VideoBridgeSource mSource = layers::VideoBridgeSource::GpuProcess;
   gfx::IntSize mSize;
+  gfx::ColorDepth mColorDepth = gfx::ColorDepth::COLOR_8;
   Maybe<layers::SurfaceDescriptor> mSD;
   RefPtr<layers::IGPUVideoSurfaceManager> mManager;
 };
 
 template <>
 struct ipc::IPDLParamTraits<RemoteImageHolder> {
   static void Write(IPC::MessageWriter* aWriter, IProtocol* aActor,
                     RemoteImageHolder&& aParam);
--- a/dom/media/ipc/RemoteVideoDecoder.cpp
+++ b/dom/media/ipc/RemoteVideoDecoder.cpp
@@ -272,17 +272,17 @@ MediaResult RemoteVideoDecoderParent::Pr
 
     RemoteVideoData output(
         MediaDataIPDL(data->mOffset, data->mTime, data->mTimecode,
                       data->mDuration, data->mKeyframe),
         video->mDisplay,
         RemoteImageHolder(mParent,
                           XRE_IsGPUProcess() ? VideoBridgeSource::GpuProcess
                                              : VideoBridgeSource::RddProcess,
-                          size, sd),
+                          size, video->mImage->GetColorDepth(), sd),
         video->mFrameID);
 
     array.AppendElement(std::move(output));
   }
 
   aDecodedData = MakeRefPtr<ArrayOfRemoteVideoData>(std::move(array));
 
   return NS_OK;
--- a/dom/media/test/browser/browser.ini
+++ b/dom/media/test/browser/browser.ini
@@ -11,14 +11,15 @@ support-files =
   ../eme_standalone.js
   ../gizmo.mp4
   ../gizmo.webm
   ../sintel-short-clearkey-subsample-encrypted-video.webm
   ../small-shot.flac
   ../small-shot.m4a
   ../small-shot.mp3
   ../small-shot.ogg
+  ../TestPatternHDR.mp4
 
 [browser_encrypted_play_time_telemetry.js]
 skip-if =
   apple_silicon # Disabled due to bleedover with other tests when run in regular suites; passes in "failures" jobs
 [browser_tab_visibility_and_play_time.js]
 [browser_telemetry_video_hardware_decoding_support.js]
--- a/dom/media/test/browser/browser_tab_visibility_and_play_time.js
+++ b/dom/media/test/browser/browser_tab_visibility_and_play_time.js
@@ -1,38 +1,48 @@
 /**
  * This test is used to ensure that invisible play time would be accumulated
- * when tab is in background. However, this test won't directly check the
- * reported telemetry result, because we can't check the snapshot histogram in
- * the content process.
+ * when tab is in background. It also checks the HDR video accumulation time.
+ * However, this test won't directly check the reported telemetry result,
+ * because we can't check the snapshot histogram in the content process.
  * The actual probe checking happens in `test_accumulated_play_time.html`.
  */
 "use strict";
 
 const PAGE_URL =
   "https://example.com/browser/dom/media/test/browser/file_media.html";
 
+// This HDR tests will only pass on platforms that accurately report color
+// depth in their VideoInfo structures. Presently, that is only true for
+// macOS.
+const { AppConstants } = ChromeUtils.import(
+  "resource://gre/modules/AppConstants.jsm"
+);
+const reportsColorDepthFromVideoData = AppConstants.platform == "macosx";
+
 add_task(async function testChangingTabVisibilityAffectsInvisiblePlayTime() {
   const originalTab = gBrowser.selectedTab;
   const mediaTab = await openMediaTab(PAGE_URL);
 
   info(`measuring play time when tab is in foreground`);
   await startMedia({
     mediaTab,
     shouldAccumulateTime: true,
     shouldAccumulateInvisibleTime: false,
+    shouldAccumulateHDRTime: reportsColorDepthFromVideoData,
   });
   await pauseMedia(mediaTab);
 
   info(`measuring play time when tab is in background`);
   await BrowserTestUtils.switchTab(window.gBrowser, originalTab);
   await startMedia({
     mediaTab,
     shouldAccumulateTime: true,
     shouldAccumulateInvisibleTime: true,
+    shouldAccumulateHDRTime: reportsColorDepthFromVideoData,
   });
   await pauseMedia(mediaTab);
 
   BrowserTestUtils.removeTab(mediaTab);
 });
 
 /**
  * Following are helper functions.
@@ -90,21 +100,26 @@ async function openMediaTab(url) {
   });
   return tab;
 }
 
 function startMedia({
   mediaTab,
   shouldAccumulateTime,
   shouldAccumulateInvisibleTime,
+  shouldAccumulateHDRTime,
 }) {
   return SpecialPowers.spawn(
     mediaTab.linkedBrowser,
-    [shouldAccumulateTime, shouldAccumulateInvisibleTime],
-    async (accumulateTime, accumulateInvisibleTime) => {
+    [
+      shouldAccumulateTime,
+      shouldAccumulateInvisibleTime,
+      shouldAccumulateHDRTime,
+    ],
+    async (accumulateTime, accumulateInvisibleTime, accumulateHDRTime) => {
       const video = content.document.getElementById("video");
       ok(
         await video.play().then(
           () => true,
           () => false
         ),
         "video started playing"
       );
@@ -126,22 +141,78 @@ function startMedia({
           "invisiblePlayTime"
         );
       } else {
         await content.assertValueKeptUnchanged(
           videoChrome,
           "invisiblePlayTime"
         );
       }
+
+      const videoHDR = content.document.getElementById("videoHDR");
+
+      // HDR test video might not decode on all platforms, so catch
+      // the play() command and exit early in such a case. Failure to
+      // decode might manifest as a timeout, so add a rejection race
+      // to catch that.
+      let didDecode = true;
+      const playPromise = videoHDR.play().then(
+        () => true,
+        () => false
+      );
+      /* eslint-disable mozilla/no-arbitrary-setTimeout */
+      const tooSlowPromise = new Promise(resolve =>
+        setTimeout(() => {
+          info("videoHDR timed out.");
+          didDecode = false;
+          resolve(false);
+        }, 1000)
+      );
+      /* eslint-enable mozilla/no-arbitrary-setTimeout */
+
+      let didPlay = await Promise.race(playPromise, tooSlowPromise).catch(
+        err => {
+          info("videoHDR failed to decode with error: " + err.message);
+          didDecode = false;
+          return false;
+        }
+      );
+
+      if (!didDecode) {
+        return;
+      }
+
+      ok(didPlay, "videoHDR started playing");
+      const videoHDRChrome = SpecialPowers.wrap(videoHDR);
+      if (accumulateHDRTime) {
+        await content.assertValueConstantlyIncreases(
+          videoHDRChrome,
+          "totalVideoHDRPlayTime"
+        );
+      } else {
+        await content.assertValueKeptUnchanged(
+          videoHDRChrome,
+          "totalVideoHDRPlayTime"
+        );
+      }
     }
   );
 }
 
 function pauseMedia(tab) {
   return SpecialPowers.spawn(tab.linkedBrowser, [], async _ => {
     const video = content.document.getElementById("video");
     video.pause();
     ok(true, "video paused");
     const videoChrome = SpecialPowers.wrap(video);
     await content.assertValueKeptUnchanged(videoChrome, "totalVideoPlayTime");
     await content.assertValueKeptUnchanged(videoChrome, "invisiblePlayTime");
+
+    const videoHDR = content.document.getElementById("videoHDR");
+    videoHDR.pause();
+    ok(true, "videoHDR paused");
+    const videoHDRChrome = SpecialPowers.wrap(videoHDR);
+    await content.assertValueKeptUnchanged(
+      videoHDRChrome,
+      "totalVideoHDRPlayTime"
+    );
   });
 }
--- a/dom/media/test/browser/file_media.html
+++ b/dom/media/test/browser/file_media.html
@@ -1,9 +1,10 @@
 <!DOCTYPE html>
 <html>
 <head>
 <title>Non-Autoplay page</title>
 </head>
 <body>
 <video id="video" src="gizmo.mp4" loop></video>
+<video id="videoHDR" src="TestPatternHDR.mp4" loop></video>
 </body>
 </html>
--- a/dom/webidl/HTMLMediaElement.webidl
+++ b/dom/webidl/HTMLMediaElement.webidl
@@ -231,16 +231,19 @@ partial interface HTMLMediaElement {
 
   [ChromeOnly]
   readonly attribute boolean isVideoDecodingSuspended;
 
   [ChromeOnly]
   readonly attribute double totalVideoPlayTime;
 
   [ChromeOnly]
+  readonly attribute double totalVideoHDRPlayTime;
+
+  [ChromeOnly]
   readonly attribute double visiblePlayTime;
 
   [ChromeOnly]
   readonly attribute double invisiblePlayTime;
 
   [ChromeOnly]
   readonly attribute double videoDecodeSuspendedTime;
 
--- a/gfx/layers/GPUVideoImage.h
+++ b/gfx/layers/GPUVideoImage.h
@@ -33,34 +33,39 @@ class IGPUVideoSurfaceManager {
 };
 
 // Represents an animated Image that is known to the GPU process.
 class GPUVideoImage final : public Image {
   friend class gl::GLBlitHelper;
 
  public:
   GPUVideoImage(IGPUVideoSurfaceManager* aManager,
-                const SurfaceDescriptorGPUVideo& aSD, const gfx::IntSize& aSize)
-      : Image(nullptr, ImageFormat::GPU_VIDEO), mSize(aSize) {
+                const SurfaceDescriptorGPUVideo& aSD, const gfx::IntSize& aSize,
+                const gfx::ColorDepth& aColorDepth)
+      : Image(nullptr, ImageFormat::GPU_VIDEO),
+        mSize(aSize),
+        mColorDepth(aColorDepth) {
     // Create the TextureClient immediately since the GPUVideoTextureData
     // is responsible for deallocating the SurfaceDescriptor.
     //
     // Use the RECYCLE texture flag, since it's likely that our 'real'
     // TextureData (in the decoder thread of the GPU process) is using
     // it too, and we want to make sure we don't send the delete message
     // until we've stopped being used on the compositor.
     mTextureClient = TextureClient::CreateWithData(
         new GPUVideoTextureData(aManager, aSD, aSize), TextureFlags::RECYCLE,
         ImageBridgeChild::GetSingleton().get());
   }
 
   virtual ~GPUVideoImage() = default;
 
   gfx::IntSize GetSize() const override { return mSize; }
 
+  gfx::ColorDepth GetColorDepth() const override { return mColorDepth; }
+
   Maybe<SurfaceDescriptor> GetDesc() override {
     return GetDescFromTexClient(mTextureClient);
   }
 
  private:
   GPUVideoTextureData* GetData() const {
     if (!mTextureClient) {
       return nullptr;
@@ -84,15 +89,16 @@ class GPUVideoImage final : public Image
   TextureClient* GetTextureClient(KnowsCompositor* aKnowsCompositor) override {
     MOZ_ASSERT(aKnowsCompositor == ImageBridgeChild::GetSingleton(),
                "Must only use GPUVideo on ImageBridge");
     return mTextureClient;
   }
 
  private:
   gfx::IntSize mSize;
+  gfx::ColorDepth mColorDepth;
   RefPtr<TextureClient> mTextureClient;
 };
 
 }  // namespace layers
 }  // namespace mozilla
 
 #endif  // GFX_GPU_VIDEO_IMAGE_H