Bug 1628246 [wpt PR 22779] - Add a test for ReplaceTrack that verifies video track content., a=testonly
authorHarald Alvestrand <hta@chromium.org>
Wed, 13 May 2020 03:55:59 +0000
changeset 531000 454ddc192d2fa2849ecedb7e5f5bfc85fdf079dc
parent 530999 c05f86d05198e4daec9e586b6fa2d7f43db6d4a1
child 531001 cb45dc3521454bf42d8f1bc149bcbf4279bb9a16
push id37435
push userapavel@mozilla.com
push dateWed, 20 May 2020 15:28:23 +0000
treeherdermozilla-central@5415da14ec9a [default view] [failures only]
perfherder[talos] [build metrics] [platform microbench] (compared to previous push)
reviewerstestonly
bugs1628246, 22779, 2141913, 758062
milestone78.0a1
first release with
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
last release without
nightly linux32
nightly linux64
nightly mac
nightly win32
nightly win64
Bug 1628246 [wpt PR 22779] - Add a test for ReplaceTrack that verifies video track content., a=testonly Automatic update from web-platform-tests Add a test for ReplaceTrack that verifies video track content. This verifies that replaceTrack() actually replaces the track. Adds a new helper function to add a "signal" square into a noise track. Bug: none Change-Id: Ia90535c984d65adcdf2c63a5700b08d7c1e384c0 Reviewed-on: https://chromium-review.googlesource.com/c/chromium/src/+/2141913 Commit-Queue: Harald Alvestrand <hta@chromium.org> Reviewed-by: Guido Urdaneta <guidou@chromium.org> Cr-Commit-Position: refs/heads/master@{#758062} -- wpt-commits: 0de19dc97fb8a46fc83c95153da7bbffb7018c54 wpt-pr: 22779
testing/web-platform/tests/webrtc/RTCPeerConnection-helper.js
testing/web-platform/tests/webrtc/RTCRtpSender-replaceTrack.https.html
--- a/testing/web-platform/tests/webrtc/RTCPeerConnection-helper.js
+++ b/testing/web-platform/tests/webrtc/RTCPeerConnection-helper.js
@@ -438,62 +438,94 @@ const trackFactories = {
     const ctx = trackFactories.audioContext = trackFactories.audioContext ||
       new AudioContext();
     const oscillator = ctx.createOscillator();
     const dst = oscillator.connect(ctx.createMediaStreamDestination());
     oscillator.start();
     return dst.stream.getAudioTracks()[0];
   },
 
-  video({width = 640, height = 480} = {}) {
+  video({width = 640, height = 480, signal = null} = {}) {
     const canvas = Object.assign(
       document.createElement("canvas"), {width, height}
     );
     const ctx = canvas.getContext('2d');
     const stream = canvas.captureStream();
 
     let count = 0;
     setInterval(() => {
       ctx.fillStyle = `rgb(${count%255}, ${count*count%255}, ${count%255})`;
       count += 1;
-
       ctx.fillRect(0, 0, width, height);
+      // If signal is set, add a constant-color box to the video frame.
+      if (signal !== null) {
+        ctx.fillStyle = `rgb(${signal}, ${signal}, ${signal})`;
+        ctx.fillRect(10, 10, 20, 20);
+        let pixel = ctx.getImageData(15, 15, 1, 1);
+      }
     }, 100);
 
     if (document.body) {
       document.body.appendChild(canvas);
     } else {
       document.addEventListener('DOMContentLoaded', () => {
         document.body.appendChild(canvas);
       });
     }
 
     return stream.getVideoTracks()[0];
   }
 };
 
+// Get the signal from a video element inserted by createNoiseStream
+function getVideoSignal(v) {
+  if (v.videoWidth < 21 || v.videoHeight < 21) {
+    return null;
+  }
+  const canvas = new OffscreenCanvas(v.videoWidth, v.videoHeight);
+  let context = canvas.getContext('2d');
+  context.drawImage(v, 0, 0, v.videoWidth, v.videoHeight);
+  // Extract pixel value at position 20, 20
+  let pixel = context.getImageData(20, 20, 1, 1);
+  return (pixel.data[0] + pixel.data[1] + pixel.data[2]) / 3;
+}
+
+function detectSignal(t, v, value) {
+  return new Promise((resolve) => {
+    let check = () => {
+      const signal = getVideoSignal(v);
+      if (signal !== null && signal < value + 1 && signal > value - 1) {
+        resolve();
+      } else {
+        t.step_timeout(check, 100);
+      }
+    }
+    check();
+  });
+}
+
 // Generate a MediaStream bearing the specified tracks.
 //
 // @param {object} [caps]
 // @param {boolean} [caps.audio] - flag indicating whether the generated stream
 //                                 should include an audio track
 // @param {boolean} [caps.video] - flag indicating whether the generated stream
-//                                 should include a video track
+//                                 should include a video track, or parameters for video
 async function getNoiseStream(caps = {}) {
   if (!trackFactories.canCreate(caps)) {
     return navigator.mediaDevices.getUserMedia(caps);
   }
   const tracks = [];
 
   if (caps.audio) {
     tracks.push(trackFactories.audio());
   }
 
   if (caps.video) {
-    tracks.push(trackFactories.video());
+    tracks.push(trackFactories.video(caps.video));
   }
 
   return new MediaStream(tracks);
 }
 
 // Obtain a MediaStreamTrack of kind using procedurally-generated streams (and
 // falling back to `getUserMedia` when the user agent cannot generate the
 // requested streams).
--- a/testing/web-platform/tests/webrtc/RTCRtpSender-replaceTrack.https.html
+++ b/testing/web-platform/tests/webrtc/RTCRtpSender-replaceTrack.https.html
@@ -267,10 +267,40 @@
                 InvalidModificationError and abort these steps.
 
             2.  If withTrack is null, have the sender stop sending, without
                 negotiating. Otherwise, have the sender switch seamlessly to
                 transmitting withTrack instead of the sender's existing track,
                 without negotiating.
             3.  Queue a task that runs the following steps:
               1.  If connection's [[isClosed]] slot is true, abort these steps.
-   */
+  */
+
+promise_test(async t => {
+  const v = document.createElement('video');
+  v.autoplay = true;
+  const pc1 = new RTCPeerConnection();
+  t.add_cleanup(() => pc1.close());
+  const pc2 = new RTCPeerConnection();
+  t.add_cleanup(() => pc2.close());
+  const stream1 = await getNoiseStream({video: {signal: 20}});
+  t.add_cleanup(() => stream1.getTracks().forEach(track => track.stop()));
+  const [track1] = stream1.getTracks();
+  const stream2 = await getNoiseStream({video: {signal: 250}});
+  t.add_cleanup(() => stream2.getTracks().forEach(track => track.stop()));
+  const [track2] = stream2.getTracks();
+  const sender = pc1.addTrack(track1);
+  pc2.ontrack = (e) => {
+    v.srcObject = new MediaStream([e.track]);
+  };
+  const metadataToBeLoaded = new Promise((resolve) => {
+    v.addEventListener('loadedmetadata', () => {
+      resolve();
+    });
+  });
+  exchangeIceCandidates(pc1, pc2);
+  doSignalingHandshake(pc1, pc2);
+  await metadataToBeLoaded;
+  await detectSignal(t, v, 20);
+  await sender.replaceTrack(track2);
+  await detectSignal(t, v, 250);
+}, 'ReplaceTrack transmits the new track not the old track');
 </script>