Bug 1628246 [wpt PR 22779] - Add a test for ReplaceTrack that verifies video track content., a=testonly
authorHarald Alvestrand <hta@chromium.org>
Wed, 13 May 2020 03:55:59 +0000
changeset 529951 7bf043c2f796af5e6e523d27ed55450bf9ac41e5
parent 529950 c54d3d641172607595b7425d7525f154729de353
child 529952 a2740075b6ea08c94f1bf9aea75ef89b328b5721
push id116003
push userwptsync@mozilla.com
push dateThu, 14 May 2020 19:04:08 +0000
treeherderautoland@7baa72e0d62c [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerstestonly
bugs1628246, 22779, 2141913, 758062
milestone78.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1628246 [wpt PR 22779] - Add a test for ReplaceTrack that verifies video track content., a=testonly Automatic update from web-platform-tests Add a test for ReplaceTrack that verifies video track content. This verifies that replaceTrack() actually replaces the track. Adds a new helper function to add a "signal" square into a noise track. Bug: none Change-Id: Ia90535c984d65adcdf2c63a5700b08d7c1e384c0 Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2141913 Commit-Queue: Harald Alvestrand <hta@chromium.org> Reviewed-by: Guido Urdaneta <guidou@chromium.org> Cr-Commit-Position: refs/heads/master@{#758062} -- wpt-commits: 0de19dc97fb8a46fc83c95153da7bbffb7018c54 wpt-pr: 22779
testing/web-platform/tests/webrtc/RTCPeerConnection-helper.js
testing/web-platform/tests/webrtc/RTCRtpSender-replaceTrack.https.html
--- a/testing/web-platform/tests/webrtc/RTCPeerConnection-helper.js
+++ b/testing/web-platform/tests/webrtc/RTCPeerConnection-helper.js
@@ -438,62 +438,94 @@ const trackFactories = {
     const ctx = trackFactories.audioContext = trackFactories.audioContext ||
       new AudioContext();
     const oscillator = ctx.createOscillator();
     const dst = oscillator.connect(ctx.createMediaStreamDestination());
     oscillator.start();
     return dst.stream.getAudioTracks()[0];
   },
 
-  video({width = 640, height = 480} = {}) {
+  video({width = 640, height = 480, signal = null} = {}) {
     const canvas = Object.assign(
       document.createElement("canvas"), {width, height}
     );
     const ctx = canvas.getContext('2d');
     const stream = canvas.captureStream();
 
     let count = 0;
     setInterval(() => {
       ctx.fillStyle = `rgb(${count%255}, ${count*count%255}, ${count%255})`;
       count += 1;
-
       ctx.fillRect(0, 0, width, height);
+      // If signal is set, add a constant-color box to the video frame.
+      if (signal !== null) {
+        ctx.fillStyle = `rgb(${signal}, ${signal}, ${signal})`;
+        ctx.fillRect(10, 10, 20, 20);
+        let pixel = ctx.getImageData(15, 15, 1, 1);
+      }
     }, 100);
 
     if (document.body) {
       document.body.appendChild(canvas);
     } else {
       document.addEventListener('DOMContentLoaded', () => {
         document.body.appendChild(canvas);
       });
     }
 
     return stream.getVideoTracks()[0];
   }
 };
 
+// Get the signal from a video element inserted by createNoiseStream
+function getVideoSignal(v) {
+  if (v.videoWidth < 21 || v.videoHeight < 21) {
+    return null;
+  }
+  const canvas = new OffscreenCanvas(v.videoWidth, v.videoHeight);
+  let context = canvas.getContext('2d');
+  context.drawImage(v, 0, 0, v.videoWidth, v.videoHeight);
+  // Extract pixel value at position 20, 20
+  let pixel = context.getImageData(20, 20, 1, 1);
+  return (pixel.data[0] + pixel.data[1] + pixel.data[2]) / 3;
+}
+
+function detectSignal(t, v, value) {
+  return new Promise((resolve) => {
+    let check = () => {
+      const signal = getVideoSignal(v);
+      if (signal !== null && signal < value + 1 && signal > value - 1) {
+        resolve();
+      } else {
+        t.step_timeout(check, 100);
+      }
+    }
+    check();
+  });
+}
+
 // Generate a MediaStream bearing the specified tracks.
 //
 // @param {object} [caps]
 // @param {boolean} [caps.audio] - flag indicating whether the generated stream
 //                                 should include an audio track
 // @param {boolean} [caps.video] - flag indicating whether the generated stream
-//                                 should include a video track
+//                                 should include a video track, or parameters for video
 async function getNoiseStream(caps = {}) {
   if (!trackFactories.canCreate(caps)) {
     return navigator.mediaDevices.getUserMedia(caps);
   }
   const tracks = [];
 
   if (caps.audio) {
     tracks.push(trackFactories.audio());
   }
 
   if (caps.video) {
-    tracks.push(trackFactories.video());
+    tracks.push(trackFactories.video(caps.video));
   }
 
   return new MediaStream(tracks);
 }
 
 // Obtain a MediaStreamTrack of kind using procedurally-generated streams (and
 // falling back to `getUserMedia` when the user agent cannot generate the
 // requested streams).
--- a/testing/web-platform/tests/webrtc/RTCRtpSender-replaceTrack.https.html
+++ b/testing/web-platform/tests/webrtc/RTCRtpSender-replaceTrack.https.html
@@ -267,10 +267,40 @@
                 InvalidModificationError and abort these steps.
 
             2.  If withTrack is null, have the sender stop sending, without
                 negotiating. Otherwise, have the sender switch seamlessly to
                 transmitting withTrack instead of the sender's existing track,
                 without negotiating.
             3.  Queue a task that runs the following steps:
               1.  If connection's [[isClosed]] slot is true, abort these steps.
-   */
+  */
+
+promise_test(async t => {
+  const v = document.createElement('video');
+  v.autoplay = true;
+  const pc1 = new RTCPeerConnection();
+  t.add_cleanup(() => pc1.close());
+  const pc2 = new RTCPeerConnection();
+  t.add_cleanup(() => pc2.close());
+  const stream1 = await getNoiseStream({video: {signal: 20}});
+  t.add_cleanup(() => stream1.getTracks().forEach(track => track.stop()));
+  const [track1] = stream1.getTracks();
+  const stream2 = await getNoiseStream({video: {signal: 250}});
+  t.add_cleanup(() => stream2.getTracks().forEach(track => track.stop()));
+  const [track2] = stream2.getTracks();
+  const sender = pc1.addTrack(track1);
+  pc2.ontrack = (e) => {
+    v.srcObject = new MediaStream([e.track]);
+  };
+  const metadataToBeLoaded = new Promise((resolve) => {
+    v.addEventListener('loadedmetadata', () => {
+      resolve();
+    });
+  });
+  exchangeIceCandidates(pc1, pc2);
+  doSignalingHandshake(pc1, pc2);
+  await metadataToBeLoaded;
+  await detectSignal(t, v, 20);
+  await sender.replaceTrack(track2);
+  await detectSignal(t, v, 250);
+}, 'ReplaceTrack transmits the new track not the old track');
 </script>