Bug 1321221 - Delay getDisplayMedia() promise until first frame arrives, to pass wpt. r=ng
authorJan-Ivar Bruaroey <jib@mozilla.com>
Fri, 28 Dec 2018 03:13:11 +0000
changeset 452036 654a36f62f5a1a3ccf8cb3e8d600a7e48413bdc9
parent 452035 680b0fa725809c5d20b7d63f28aa4ba9d3f4186a
child 452037 cb614edd086310a3317e7cb2d08c9e166a6fce14
push id35281
push userbtara@mozilla.com
push dateFri, 28 Dec 2018 21:47:52 +0000
treeherdermozilla-central@d57dde190f67 [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewersng
bugs1321221
milestone66.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1321221 - Delay getDisplayMedia() promise until first frame arrives, to pass wpt. r=ng Differential Revision: https://phabricator.services.mozilla.com/D15283
dom/media/MediaManager.cpp
dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
dom/media/webrtc/MediaEngineRemoteVideoSource.h
dom/media/webrtc/MediaEngineSource.h
testing/web-platform/meta/screen-capture/getdisplaymedia.https.html.ini
--- a/dom/media/MediaManager.cpp
+++ b/dom/media/MediaManager.cpp
@@ -1088,26 +1088,28 @@ class GetUserMediaStreamRunnable : publi
   }
 
   class TracksCreatedListener : public MediaStreamTrackListener {
    public:
     TracksCreatedListener(
         MediaManager* aManager,
         MozPromiseHolder<MediaManager::StreamPromise>&& aHolder,
         GetUserMediaWindowListener* aWindowListener, uint64_t aWindowID,
-        DOMMediaStream* aStream, MediaStreamTrack* aTrack)
+        DOMMediaStream* aStream, MediaStreamTrack* aTrack,
+        RefPtr<GenericNonExclusivePromise>&& aFirstFramePromise)
         : mWindowListener(aWindowListener),
           mHolder(std::move(aHolder)),
           mManager(aManager),
           mWindowID(aWindowID),
           mGraph(aTrack->GraphImpl()),
           mStream(new nsMainThreadPtrHolder<DOMMediaStream>(
               "TracksCreatedListener::mStream", aStream)),
           mTrack(new nsMainThreadPtrHolder<MediaStreamTrack>(
-              "TracksCreatedListener::mTrack", aTrack)) {}
+              "TracksCreatedListener::mTrack", aTrack)),
+          mFirstFramePromise(aFirstFramePromise) {}
 
     ~TracksCreatedListener() {
       RejectIfExists(MakeRefPtr<MediaMgrError>(MediaMgrError::Name::AbortError),
                      __func__);
     }
 
     // TODO: The need for this should be removed by an upcoming refactor.
     void RejectIfExists(RefPtr<MediaMgrError>&& aError,
@@ -1130,18 +1132,36 @@ class GetUserMediaStreamRunnable : publi
             mTrack->RemoveListener(this);
 
             if (!mManager->IsWindowListenerStillActive(mWindowListener)) {
               return;
             }
 
             // This is safe since we're on main-thread, and the windowlist can
             // only be invalidated from the main-thread (see OnNavigation)
-            LOG("Returning success for getUserMedia()");
-            mHolder.Resolve(RefPtr<DOMMediaStream>(mStream), __func__);
+            if (!mFirstFramePromise) {
+              LOG("Returning success for getUserMedia()");
+              mHolder.Resolve(RefPtr<DOMMediaStream>(mStream), __func__);
+              return;
+            }
+            LOG("Deferring getUserMedia success to arrival of 1st frame");
+            mFirstFramePromise->Then(
+                GetMainThreadSerialEventTarget(), __func__,
+                [holder = std::move(mHolder), stream = mStream](
+                    const GenericNonExclusivePromise::ResolveOrRejectValue&
+                        aValue) mutable {
+                  if (aValue.IsReject()) {
+                    holder.Reject(MakeRefPtr<MediaMgrError>(
+                                      MediaMgrError::Name::AbortError),
+                                  __func__);
+                  } else {
+                    LOG("Returning success for getUserMedia()!");
+                    holder.Resolve(RefPtr<DOMMediaStream>(stream), __func__);
+                  }
+                });
           });
       // DispatchToMainThreadStableState will make the runnable run
       // in stable state. But since the runnable runs JS we need to make a
       // double dispatch.
       mGraph->DispatchToMainThreadStableState(NS_NewRunnableFunction(
           "TracksCreatedListener::NotifyOutput Stable State Notifier",
           [graph = mGraph, r = std::move(r)]() mutable {
             graph->Dispatch(r.forget());
@@ -1162,16 +1182,17 @@ class GetUserMediaStreamRunnable : publi
     // Keep the DOMMediaStream alive until the success callback has been called,
     // otherwise we might immediately destroy the DOMMediaStream and
     // shut down the underlying MediaStream prematurely.
     // This creates a cycle which is broken when we're destroyed, i.e., either
     // when we've called the success callback and thus removed the listener from
     // the graph, or on graph shutdown.
     nsMainThreadPtrHandle<DOMMediaStream> mStream;
     nsMainThreadPtrHandle<MediaStreamTrack> mTrack;
+    RefPtr<GenericNonExclusivePromise> mFirstFramePromise;
     // Graph thread only.
     bool mDispatchedTracksCreated = false;
   };
 
   NS_IMETHOD
   Run() override {
     MOZ_ASSERT(NS_IsMainThread());
     LOG("GetUserMediaStreamRunnable::Run()");
@@ -1190,16 +1211,17 @@ class GetUserMediaStreamRunnable : publi
     MediaStreamGraph::GraphDriverType graphDriverType =
         mAudioDevice ? MediaStreamGraph::AUDIO_THREAD_DRIVER
                      : MediaStreamGraph::SYSTEM_THREAD_DRIVER;
     MediaStreamGraph* msg = MediaStreamGraph::GetInstance(
         graphDriverType, window, MediaStreamGraph::REQUEST_DEFAULT_SAMPLE_RATE);
 
     RefPtr<DOMMediaStream> domStream;
     RefPtr<SourceMediaStream> stream;
+    RefPtr<GenericNonExclusivePromise> firstFramePromise;
     // AudioCapture is a special case, here, in the sense that we're not really
     // using the audio source and the SourceMediaStream, which acts as
     // placeholders. We re-route a number of stream internaly in the MSG and mix
     // them down instead.
     if (mAudioDevice &&
         mAudioDevice->GetMediaSource() == MediaSourceEnum::AudioCapture) {
       NS_WARNING(
           "MediaCaptureWindowState doesn't handle "
@@ -1322,16 +1344,28 @@ class GetUserMediaStreamRunnable : publi
         RefPtr<MediaStreamTrackSource> videoSource =
             new LocalTrackSource(principal, videoDeviceName, mSourceListener,
                                  source, kVideoTrack, mPeerIdentity);
         MOZ_ASSERT(IsOn(mConstraints.mVideo));
         RefPtr<MediaStreamTrack> track = domStream->CreateDOMTrack(
             kVideoTrack, MediaSegment::VIDEO, videoSource,
             GetInvariant(mConstraints.mVideo));
         domStream->AddTrackInternal(track);
+        switch (source) {
+          case MediaSourceEnum::Browser:
+          case MediaSourceEnum::Screen:
+          case MediaSourceEnum::Application:
+          case MediaSourceEnum::Window:
+            // Wait for first frame for screen-sharing devices, to ensure
+            // with and height settings are available immediately, to pass wpt.
+            firstFramePromise = mVideoDevice->mSource->GetFirstFramePromise();
+            break;
+          default:
+            break;
+        }
       }
     }
 
     if (!domStream || !stream || sHasShutdown) {
       LOG("Returning error for getUserMedia() - no stream");
 
       mHolder.Reject(MakeRefPtr<MediaMgrError>(
                          MediaMgrError::Name::AbortError,
@@ -1347,17 +1381,17 @@ class GetUserMediaStreamRunnable : publi
     mWindowListener->Activate(mSourceListener, stream, mAudioDevice,
                               mVideoDevice);
 
     nsTArray<RefPtr<MediaStreamTrack>> tracks(2);
     domStream->GetTracks(tracks);
     RefPtr<MediaStreamTrack> track = tracks[0];
     auto tracksCreatedListener = MakeRefPtr<TracksCreatedListener>(
         mManager, std::move(mHolder), mWindowListener, mWindowID, domStream,
-        track);
+        track, std::move(firstFramePromise));
 
     // Dispatch to the media thread to ask it to start the sources,
     // because that can take a while.
     // Pass ownership of domStream through the lambda to the nested chrome
     // notification lambda to ensure it's kept alive until that lambda runs or
     // is discarded.
     mSourceListener->InitializeAsync()->Then(
         GetMainThreadSerialEventTarget(), __func__,
--- a/dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
+++ b/dom/media/webrtc/MediaEngineRemoteVideoSource.cpp
@@ -36,23 +36,28 @@ MediaEngineRemoteVideoSource::MediaEngin
     int aIndex, camera::CaptureEngine aCapEngine, bool aScary)
     : mCaptureIndex(aIndex),
       mCapEngine(aCapEngine),
       mScary(aScary),
       mMutex("MediaEngineRemoteVideoSource::mMutex"),
       mRescalingBufferPool(/* zero_initialize */ false,
                            /* max_number_of_buffers */ 1),
       mSettingsUpdatedByFrame(MakeAndAddRef<media::Refcountable<AtomicBool>>()),
-      mSettings(MakeAndAddRef<media::Refcountable<MediaTrackSettings>>()) {
+      mSettings(MakeAndAddRef<media::Refcountable<MediaTrackSettings>>()),
+      mFirstFramePromise(mFirstFramePromiseHolder.Ensure(__func__)) {
   mSettings->mWidth.Construct(0);
   mSettings->mHeight.Construct(0);
   mSettings->mFrameRate.Construct(0);
   Init();
 }
 
+MediaEngineRemoteVideoSource::~MediaEngineRemoteVideoSource() {
+  mFirstFramePromiseHolder.RejectIfExists(NS_ERROR_ABORT, __func__);
+}
+
 dom::MediaSourceEnum MediaEngineRemoteVideoSource::GetMediaSource() const {
   switch (mCapEngine) {
     case camera::BrowserEngine:
       return MediaSourceEnum::Browser;
     case camera::CameraEngine:
       return MediaSourceEnum::Camera;
     case camera::ScreenEngine:
       return MediaSourceEnum::Screen;
@@ -632,21 +637,27 @@ int MediaEngineRemoteVideoSource::Delive
       frame_num++, aProps.width(), aProps.height(), dst_width, dst_height,
       aProps.rotation(), aProps.timeStamp(), aProps.ntpTimeMs(),
       aProps.renderTimeMs());
 #endif
 
   if (mImageSize.width != dst_width || mImageSize.height != dst_height) {
     NS_DispatchToMainThread(NS_NewRunnableFunction(
         "MediaEngineRemoteVideoSource::FrameSizeChange",
-        [settings = mSettings, updated = mSettingsUpdatedByFrame, dst_width,
+        [settings = mSettings, updated = mSettingsUpdatedByFrame,
+         holder = std::move(mFirstFramePromiseHolder), dst_width,
          dst_height]() mutable {
           settings->mWidth.Value() = dst_width;
           settings->mHeight.Value() = dst_height;
           updated->mValue = true;
+          // Since mImageSize was initialized to (0,0), we end up here on the
+          // arrival of the first frame. We resolve the promise representing
+          // arrival of first frame, after correct settings values have been
+          // made available (Resolve() is idempotent if already resolved).
+          holder.ResolveIfExists(true, __func__);
         }));
   }
 
   {
     MutexAutoLock lock(mMutex);
     // implicitly releases last image
     mImage = image.forget();
     mImageSize = mImage->GetSize();
--- a/dom/media/webrtc/MediaEngineRemoteVideoSource.h
+++ b/dom/media/webrtc/MediaEngineRemoteVideoSource.h
@@ -58,17 +58,17 @@ namespace mozilla {
 // constraints algorithm.
 enum DistanceCalculation { kFitness, kFeasibility };
 
 /**
  * The WebRTC implementation of the MediaEngine interface.
  */
 class MediaEngineRemoteVideoSource : public MediaEngineSource,
                                      public camera::FrameRelay {
-  ~MediaEngineRemoteVideoSource() = default;
+  ~MediaEngineRemoteVideoSource();
 
   struct CapabilityCandidate {
     explicit CapabilityCandidate(webrtc::CaptureCapability&& aCapability,
                                  uint32_t aDistance = 0)
         : mCapability(std::forward<webrtc::CaptureCapability>(aCapability)),
           mDistance(aDistance) {}
 
     const webrtc::CaptureCapability mCapability;
@@ -150,16 +150,20 @@ class MediaEngineRemoteVideoSource : pub
   nsString GetName() const override;
   void SetName(nsString aName);
 
   nsCString GetUUID() const override;
   void SetUUID(const char* aUUID);
 
   bool GetScary() const override { return mScary; }
 
+  RefPtr<GenericNonExclusivePromise> GetFirstFramePromise() const override {
+    return mFirstFramePromise;
+  }
+
  private:
   // Initialize the needed Video engine interfaces.
   void Init();
 
   /**
    * Returns the number of capabilities for the underlying device.
    *
    * Guaranteed to return at least one capability.
@@ -224,16 +228,18 @@ class MediaEngineRemoteVideoSource : pub
   // TODO: This can be removed in bug 1453269.
   const RefPtr<media::Refcountable<AtomicBool>> mSettingsUpdatedByFrame;
 
   // The current settings of this source.
   // Note that these may be different from the settings of the underlying device
   // since we scale frames to avoid fingerprinting.
   // Members are main thread only.
   const RefPtr<media::Refcountable<dom::MediaTrackSettings>> mSettings;
+  MozPromiseHolder<GenericNonExclusivePromise> mFirstFramePromiseHolder;
+  RefPtr<GenericNonExclusivePromise> mFirstFramePromise;
 
   // The capability currently chosen by constraints of the user of this source.
   // Set under mMutex on the owning thread. Accessed under one of the two.
   webrtc::CaptureCapability mCapability;
 
   /**
    * Capabilities that we choose between when applying constraints.
    *
--- a/dom/media/webrtc/MediaEngineSource.h
+++ b/dom/media/webrtc/MediaEngineSource.h
@@ -110,16 +110,24 @@ class MediaEngineSourceInterface {
   virtual dom::MediaSourceEnum GetMediaSource() const = 0;
 
   /**
    * Override w/true if source does end-run around cross origin restrictions.
    */
   virtual bool GetScary() const = 0;
 
   /**
+   * Override w/a promise if source has frames, in order to potentially allow
+   * deferring success of source acquisition until first frame has arrived.
+   */
+  virtual RefPtr<GenericNonExclusivePromise> GetFirstFramePromise() const {
+    return nullptr;
+  }
+
+  /**
    * Called by MediaEngine to allocate a handle to this source.
    *
    * If this is the first registered AllocationHandle, the underlying device
    * will be allocated.
    *
    * Note that the AllocationHandle may be nullptr at the discretion of the
    * MediaEngineSource implementation. Any user is to treat it as an opaque
    * object.
--- a/testing/web-platform/meta/screen-capture/getdisplaymedia.https.html.ini
+++ b/testing/web-platform/meta/screen-capture/getdisplaymedia.https.html.ini
@@ -32,44 +32,44 @@
   [getDisplayMedia({"audio":true}) must succeed with video maybe audio]
     expected:
       if os == "android": FAIL
       PASS
 
   [getDisplayMedia({"video":{"width":{"max":360}}}) must be constrained]
     expected:
       if os == "android": FAIL
-      FAIL
+      PASS
 
   [getDisplayMedia({"video":{"height":{"max":240}}}) must be constrained]
     expected:
       if os == "android": FAIL
-      FAIL
+      PASS
 
   [getDisplayMedia({"video":{"width":{"max":360},"height":{"max":240}}}) must be constrained]
     expected:
       if os == "android": FAIL
-      FAIL
+      PASS
 
   [getDisplayMedia({"video":{"frameRate":{"max":4}}}) must be constrained]
     expected:
       if os == "android": FAIL
-      FAIL
+      PASS
 
   [getDisplayMedia({"video":{"frameRate":{"max":4},"width":{"max":360}}}) must be constrained]
     expected:
       if os == "android": FAIL
-      FAIL
+      PASS
 
   [getDisplayMedia({"video":{"frameRate":{"max":4},"height":{"max":240}}}) must be constrained]
     expected:
       if os == "android": FAIL
-      FAIL
+      PASS
 
   [getDisplayMedia({"video":{"frameRate":{"max":4},"width":{"max":360},"height":{"max":240}}}) must be constrained]
     expected:
       if os == "android": FAIL
-      FAIL
+      PASS
 
   [getDisplayMedia() with getSettings]
     expected:
       if os == "android": FAIL
       FAIL