MediaStreamTrackGenerator-video.https.html (11416B)
1 <!DOCTYPE html> 2 <html> 3 <head> 4 <title>MediaStream Insertable Streams - Video</title> 5 <script src="/resources/testharness.js"></script> 6 <script src="/resources/testharnessreport.js"></script> 7 <script src="/webrtc/RTCPeerConnection-helper.js"></script> 8 </head> 9 <body> 10 <p class="instructions">When prompted, use the accept button to give permission to use your audio and video devices.</p> 11 <h1 class="instructions">Description</h1> 12 <p class="instructions">This test checks that generating video MediaStreamTracks works as expected.</p> 13 <script> 14 15 const pixelColour = [50, 100, 150, 255]; 16 const height = 240; 17 const width = 320; 18 function makeVideoFrame(timestamp) { 19 const canvas = new OffscreenCanvas(width, height); 20 21 const ctx = canvas.getContext('2d', {alpha: false}); 22 ctx.fillStyle = `rgba(${pixelColour.join()})`; 23 ctx.fillRect(0, 0, width, height); 24 25 return new VideoFrame(canvas, {timestamp, alpha: 'discard'}); 26 } 27 28 async function getVideoFrame() { 29 const stream = await getNoiseStream({video: true}); 30 const input_track = stream.getTracks()[0]; 31 const processor = new MediaStreamTrackProcessor(input_track); 32 const reader = processor.readable.getReader(); 33 const result = await reader.read(); 34 input_track.stop(); 35 return result.value; 36 } 37 38 function assertPixel(t, bytes, expected, epsilon = 5) { 39 for (let i = 0; i < bytes.length; i++) { 40 t.step(() => { 41 assert_less_than(Math.abs(bytes[i] - expected[i]), epsilon, "Mismatched pixel"); 42 }); 43 } 44 } 45 46 async function initiateSingleTrackCall(t, track, output) { 47 const caller = new RTCPeerConnection(); 48 t.add_cleanup(() => caller.close()); 49 const callee = new RTCPeerConnection(); 50 t.add_cleanup(() => callee.close()); 51 caller.addTrack(track); 52 t.add_cleanup(() => track.stop()); 53 54 exchangeIceCandidates(caller, callee); 55 // Wait for the first track. 56 const e = await exchangeOfferAndListenToOntrack(t, caller, callee); 57 output.srcObject = new MediaStream([e.track]); 58 // Exchange answer. 59 await exchangeAnswer(caller, callee); 60 await waitForConnectionStateChange(callee, ['connected']); 61 } 62 63 promise_test(async t => { 64 const videoFrame = await getVideoFrame(); 65 const originalWidth = videoFrame.displayWidth; 66 const originalHeight = videoFrame.displayHeight; 67 const originalTimestamp = videoFrame.timestamp; 68 const generator = new MediaStreamTrackGenerator({kind: 'video'}); 69 t.add_cleanup(() => generator.stop()); 70 71 // Use a MediaStreamTrackProcessor as a sink for |generator| to verify 72 // that |processor| actually forwards the frames written to its writable 73 // field. 74 const processor = new MediaStreamTrackProcessor(generator); 75 const reader = processor.readable.getReader(); 76 const readerPromise = new Promise(async resolve => { 77 const result = await reader.read(); 78 assert_equals(result.value.displayWidth, originalWidth); 79 assert_equals(result.value.displayHeight, originalHeight); 80 assert_equals(result.value.timestamp, originalTimestamp); 81 resolve(); 82 }); 83 84 generator.writable.getWriter().write(videoFrame); 85 86 return readerPromise; 87 }, 'Tests that MediaStreamTrackGenerator forwards frames to sink'); 88 89 promise_test(async t => { 90 const videoFrame = makeVideoFrame(1); 91 const originalWidth = videoFrame.displayWidth; 92 const originalHeight = videoFrame.displayHeight; 93 const generator = new MediaStreamTrackGenerator({ kind: 'video' }); 94 t.add_cleanup(() => generator.stop()); 95 96 const video = document.createElement("video"); 97 video.autoplay = true; 98 video.width = 320; 99 video.height = 240; 100 video.srcObject = new MediaStream([generator]); 101 video.play(); 102 103 // Wait for the video element to be connected to the generator and 104 // generate the frame. 105 video.onloadstart = () => generator.writable.getWriter().write(videoFrame); 106 107 return new Promise((resolve)=> { 108 video.ontimeupdate = t.step_func(() => { 109 const canvas = document.createElement("canvas"); 110 canvas.width = originalWidth; 111 canvas.height = originalHeight; 112 const context = canvas.getContext('2d'); 113 context.drawImage(video, 0, 0); 114 // Pick a pixel in the centre of the video and check that it has the colour of the frame provided. 115 const pixel = context.getImageData(videoFrame.displayWidth/2, videoFrame.displayHeight/2, 1, 1); 116 assertPixel(t, pixel.data, pixelColour); 117 resolve(); 118 }); 119 }); 120 }, 'Tests that frames are actually rendered correctly in a stream used for a video element.'); 121 122 promise_test(async t => { 123 const generator = new MediaStreamTrackGenerator({kind: 'video'}); 124 t.add_cleanup(() => generator.stop()); 125 126 // Write frames for the duration of the test. 127 const writer = generator.writable.getWriter(); 128 let timestamp = 0; 129 const intervalId = setInterval( 130 t.step_func(async () => { 131 if (generator.readyState === 'live') { 132 timestamp++; 133 await writer.write(makeVideoFrame(timestamp)); 134 } 135 }), 136 40); 137 t.add_cleanup(() => clearInterval(intervalId)); 138 139 const video = document.createElement('video'); 140 video.autoplay = true; 141 video.width = width; 142 video.height = height; 143 video.muted = true; 144 145 await initiateSingleTrackCall(t, generator, video); 146 147 return new Promise(resolve => { 148 video.ontimeupdate = t.step_func(() => { 149 const canvas = document.createElement('canvas'); 150 canvas.width = width; 151 canvas.height = height; 152 const context = canvas.getContext('2d'); 153 context.drawImage(video, 0, 0); 154 // Pick a pixel in the centre of the video and check that it has the 155 // colour of the frame provided. 156 const pixel = context.getImageData(width / 2, height / 2, 1, 1); 157 // Encoding/decoding can add noise, so increase the threshhold to 8. 158 assertPixel(t, pixel.data, pixelColour, 8); 159 resolve(); 160 }); 161 }); 162 }, 'Tests that frames are actually rendered correctly in a stream sent over a peer connection.'); 163 164 promise_test(async t => { 165 const generator = new MediaStreamTrackGenerator({kind: 'video'}); 166 t.add_cleanup(() => generator.stop()); 167 168 const inputCanvas = new OffscreenCanvas(width, height); 169 170 const inputContext = inputCanvas.getContext('2d', {alpha: false}); 171 // draw four quadrants 172 const colorUL = [255, 0, 0, 255]; 173 inputContext.fillStyle = `rgba(${colorUL.join()})`; 174 inputContext.fillRect(0, 0, width / 2, height / 2); 175 const colorUR = [255, 255, 0, 255]; 176 inputContext.fillStyle = `rgba(${colorUR.join()})`; 177 inputContext.fillRect(width / 2, 0, width / 2, height / 2); 178 const colorLL = [0, 255, 0, 255]; 179 inputContext.fillStyle = `rgba(${colorLL.join()})`; 180 inputContext.fillRect(0, height / 2, width / 2, height / 2); 181 const colorLR = [0, 255, 255, 255]; 182 inputContext.fillStyle = `rgba(${colorLR.join()})`; 183 inputContext.fillRect(width / 2, height / 2, width / 2, height / 2); 184 185 // Write frames for the duration of the test. 186 const writer = generator.writable.getWriter(); 187 let timestamp = 0; 188 const intervalId = setInterval( 189 t.step_func(async () => { 190 if (generator.readyState === 'live') { 191 timestamp++; 192 await writer.write(new VideoFrame( 193 inputCanvas, {timestamp: timestamp, alpha: 'discard'})); 194 } 195 }), 196 40); 197 t.add_cleanup(() => clearInterval(intervalId)); 198 199 const caller = new RTCPeerConnection(); 200 t.add_cleanup(() => caller.close()); 201 const callee = new RTCPeerConnection(); 202 t.add_cleanup(() => callee.close()); 203 const sender = caller.addTrack(generator); 204 205 exchangeIceCandidates(caller, callee); 206 // Wait for the first track. 207 const e = await exchangeOfferAndListenToOntrack(t, caller, callee); 208 209 // Exchange answer. 210 await exchangeAnswer(caller, callee); 211 await waitForConnectionStateChange(callee, ['connected']); 212 const params = sender.getParameters(); 213 params.encodings.forEach(e => e.scaleResolutionDownBy = 2); 214 sender.setParameters(params); 215 216 const processor = new MediaStreamTrackProcessor(e.track); 217 const reader = processor.readable.getReader(); 218 219 // The first frame may not have had scaleResolutionDownBy applied 220 const numTries = 5; 221 for (let i = 1; i <= numTries; i++) { 222 const {value: outputFrame} = await reader.read(); 223 if (outputFrame.displayWidth !== width / 2) { 224 assert_less_than(i, numTries, `First ${numTries} frames were the wrong size.`); 225 outputFrame.close(); 226 continue; 227 } 228 229 assert_equals(outputFrame.displayWidth, width / 2); 230 assert_equals(outputFrame.displayHeight, height / 2); 231 232 const outputCanvas = new OffscreenCanvas(width / 2, height / 2); 233 const outputContext = outputCanvas.getContext('2d', {alpha: false}); 234 outputContext.drawImage(outputFrame, 0, 0); 235 outputFrame.close(); 236 // Check the four quadrants 237 const pixelUL = outputContext.getImageData(width / 8, height / 8, 1, 1); 238 assertPixel(t, pixelUL.data, colorUL); 239 const pixelUR = 240 outputContext.getImageData(width * 3 / 8, height / 8, 1, 1); 241 assertPixel(t, pixelUR.data, colorUR); 242 const pixelLL = 243 outputContext.getImageData(width / 8, height * 3 / 8, 1, 1); 244 assertPixel(t, pixelLL.data, colorLL); 245 const pixelLR = 246 outputContext.getImageData(width * 3 / 8, height * 3 / 8, 1, 1); 247 assertPixel(t, pixelLR.data, colorLR); 248 break; 249 } 250 }, 'Tests that frames are sent correctly with RTCRtpEncodingParameters.scaleResolutionDownBy.'); 251 252 promise_test(async t => { 253 const generator = new MediaStreamTrackGenerator("video"); 254 t.add_cleanup(() => generator.stop()); 255 256 const writer = generator.writable.getWriter(); 257 const frame = makeVideoFrame(1); 258 await writer.write(frame); 259 260 assert_equals(generator.kind, "video"); 261 assert_equals(generator.readyState, "live"); 262 }, "Tests that creating a Video MediaStreamTrackGenerator works as expected"); 263 264 promise_test(async t => { 265 const generator = new MediaStreamTrackGenerator("video"); 266 t.add_cleanup(() => generator.stop()); 267 268 const writer = generator.writable.getWriter(); 269 const frame = makeVideoFrame(1); 270 await writer.write(frame); 271 272 assert_throws_dom("InvalidStateError", () => frame.clone(), "VideoFrame wasn't destroyed on write."); 273 }, "Tests that VideoFrames are destroyed on write."); 274 275 promise_test(async t => { 276 const generator = new MediaStreamTrackGenerator("audio"); 277 t.add_cleanup(() => generator.stop()); 278 279 const writer = generator.writable.getWriter(); 280 const frame = makeVideoFrame(1); 281 assert_throws_js(TypeError, writer.write(frame)); 282 }, "Mismatched frame and generator kind throws on write."); 283 </script> 284 </body> 285 </html>