tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

VideoTrackGenerator-with-window-tracks.https.html (11569B)


      1 <!DOCTYPE html>
      2 <html>
      3 <head>
      4 <title>MediaStream Insertable Streams - VideoTrackGenerator</title>
      5 <script src="/resources/testharness.js"></script>
      6 <script src="/resources/testharnessreport.js"></script>
      7 <script src="/webrtc/RTCPeerConnection-helper.js"></script>
      8 </head>
      9 <body>
     10  <h1 class="instructions">Description</h1>
     11  <p class="instructions">This test checks that generating video MediaStreamTracks from VideoTrackGenerator works as expected.</p>
     12  <script id="scriptRoutines">
     13    const pixelColour = [50, 100, 150, 255];
     14    const height = 240;
     15    const width = 320;
     16    function makeVideoFrame(timestamp) {
     17      const canvas = new OffscreenCanvas(width, height);
     18 
     19      const ctx = canvas.getContext('2d', {alpha: false});
     20      ctx.fillStyle = `rgba(${pixelColour.join()})`;
     21      ctx.fillRect(0, 0, width, height);
     22 
     23      return new VideoFrame(canvas, {timestamp, alpha: 'discard'});
     24    }
     25 
     26    async function getVideoFrame() {
     27      const stream = await getNoiseStream({video: true});
     28      const input_track = stream.getTracks()[0];
     29      const processor = new MediaStreamTrackProcessor(input_track);
     30      const reader = processor.readable.getReader();
     31      const result = await reader.read();
     32      input_track.stop();
     33      return result.value;
     34    }
     35 
     36    function assertPixel(t, bytes, expected, epsilon = 5) {
     37      for (let i = 0; i < bytes.length; i++) {
     38        t.step(() => {
     39          assert_less_than(Math.abs(bytes[i] - expected[i]),  epsilon, "Mismatched pixel");
     40        });
     41      }
     42    }
     43 
     44    async function initiateSingleTrackCall(t, track, output) {
     45      const caller = new RTCPeerConnection();
     46      t.add_cleanup(() => caller.close());
     47      const callee = new RTCPeerConnection();
     48      t.add_cleanup(() => callee.close());
     49      caller.addTrack(track);
     50      t.add_cleanup(() => track.stop());
     51 
     52      exchangeIceCandidates(caller, callee);
     53      // Wait for the first track.
     54      const e = await exchangeOfferAndListenToOntrack(t, caller, callee);
     55      output.srcObject = new MediaStream([e.track]);
     56      // Exchange answer.
     57      await exchangeAnswer(caller, callee);
     58      await waitForConnectionStateChange(callee, ['connected']);
     59    }
     60  </script>
     61  <script>
     62    async function createWorker(script) {
     63      script = scriptRoutines.text + script + "self.postMessage('ready');";
     64      const blob = new Blob([script], { type: 'text/javascript' });
     65      const url = URL.createObjectURL(blob);
     66      const worker = new Worker(url);
     67      try {
     68        await new Promise((resolve, reject) => {
     69          worker.onmessage = resolve;
     70          worker.onerror = (err) => reject(err.message);
     71        });
     72        return worker;
     73      } finally {
     74        URL.revokeObjectURL(url);
     75      }
     76    }
     77 
     78   promise_test(async t => {
     79     const worker = await createWorker(`
     80         const generator = new VideoTrackGenerator();
     81         const videoFrame = makeVideoFrame(1);
     82         const originalWidth = videoFrame.displayWidth;
     83         const originalHeight = videoFrame.displayHeight;
     84         self.onmessage = async (event) => {
     85           if (event.data == "transfer") {
     86             self.postMessage({ track: generator.track, originalWidth, originalHeight }, [generator.track]);
     87             return;
     88           }
     89           if (event.data == "write frame") {
     90             generator.writable.getWriter().write(videoFrame)
     91             return;
     92           }
     93           if (event.data == "cleanup") {
     94             videoFrame.close();
     95             return;
     96           }
     97         }
     98     `);
     99 
    100     t.add_cleanup(() => worker.postMessage("cleanup"));
    101 
    102     worker.postMessage("transfer");
    103     const { track, originalWidth, originalHeight } = await new Promise(resolve => worker.onmessage = e => resolve(e.data));
    104     t.add_cleanup(() => track.stop());
    105 
    106     const video = document.createElement("video");
    107     video.autoplay = true;
    108     video.width = 320;
    109     video.height = 240;
    110     video.srcObject = new MediaStream([track]);
    111     video.play();
    112 
    113     // Wait for the video element to be connected to the generator and
    114     // generate the frame.
    115     video.onloadstart = () => worker.postMessage("write frame");
    116 
    117     return new Promise((resolve)=> {
    118       video.ontimeupdate = t.step_func(() => {
    119         const canvas = document.createElement("canvas");
    120         canvas.width = originalWidth;
    121         canvas.height = originalHeight;
    122         const context = canvas.getContext('2d');
    123         context.drawImage(video, 0, 0);
    124         // Pick a pixel in the centre of the video and check that it has the colour of the frame provided.
    125         const pixel = context.getImageData(originalWidth/2, originalHeight/2, 1, 1);
    126         assertPixel(t, pixel.data, pixelColour);
    127         resolve();
    128       });
    129     });
    130   }, 'Tests that frames are actually rendered correctly in a stream used for a video element.');
    131 
    132   promise_test(async t => {
    133     const worker = await createWorker(`
    134         const generator = new VideoTrackGenerator();
    135         const videoFrame = makeVideoFrame(1);
    136         const originalWidth = videoFrame.displayWidth;
    137         const originalHeight = videoFrame.displayHeight;
    138         let intervalId;
    139         self.onmessage = async (event) => {
    140           if (event.data == "transfer") {
    141             self.postMessage({ track: generator.track}, [generator.track]);
    142             // Write frames for the duration of the test.
    143             const writer = generator.writable.getWriter();
    144             let timestamp = 0;
    145             intervalId = setInterval(async () => {
    146               timestamp++;
    147               await writer.write(makeVideoFrame(timestamp));
    148             }, 40);
    149             return;
    150           }
    151         }
    152     `);
    153 
    154     worker.postMessage("transfer");
    155     const { track } = await new Promise(resolve => worker.onmessage = e => resolve(e.data));
    156     t.add_cleanup(() => track.stop());
    157 
    158     const video = document.createElement('video');
    159     video.autoplay = true;
    160     video.width = width;
    161     video.height = height;
    162     video.muted = true;
    163 
    164     await initiateSingleTrackCall(t, track, video);
    165     return new Promise(resolve => {
    166       video.requestVideoFrameCallback(t.step_func(() => {
    167         const canvas = document.createElement('canvas');
    168         canvas.width = width;
    169         canvas.height = height;
    170         const context = canvas.getContext('2d');
    171         context.drawImage(video, 0, 0);
    172         // Pick a pixel in the centre of the video and check that it has the
    173         // colour of the frame provided.
    174         const pixel = context.getImageData(width / 2, height / 2, 1, 1);
    175         // Encoding/decoding can add noise, so increase the threshhold to 8.
    176         assertPixel(t, pixel.data, pixelColour, 8);
    177         resolve();
    178       }));
    179     });
    180   }, 'Tests that frames are actually rendered correctly in a stream sent over a peer connection.');
    181 
    182    promise_test(async t => {
    183      const colorUL = [255, 0, 0, 255];
    184      const colorUR = [255, 255, 0, 255];
    185      const colorLL = [0, 255, 0, 255];
    186      const colorLR = [0, 255, 255, 255];
    187      const worker = await createWorker(`
    188          const generator = new VideoTrackGenerator();
    189          const videoFrame = makeVideoFrame(1);
    190          const originalWidth = videoFrame.displayWidth;
    191          const originalHeight = videoFrame.displayHeight;
    192          let intervalId;
    193          self.onmessage = async (event) => {
    194            if (event.data == "transfer") {
    195              self.postMessage({ track: generator.track}, [generator.track]);
    196              const inputCanvas = new OffscreenCanvas(width, height);
    197              const inputContext = inputCanvas.getContext('2d', {alpha: false});
    198              // draw four quadrants
    199              inputContext.fillStyle = \`rgba(${colorUL.join()})\`;
    200              inputContext.fillRect(0, 0, width / 2, height / 2);
    201              inputContext.fillStyle = \`rgba(${colorUR.join()})\`;
    202              inputContext.fillRect(width / 2, 0, width / 2, height / 2);
    203              inputContext.fillStyle = \`rgba(${colorLL.join()})\`;
    204              inputContext.fillRect(0, height / 2, width / 2, height / 2);
    205              inputContext.fillStyle = \`rgba(${colorLR.join()})\`;
    206              inputContext.fillRect(width / 2, height / 2, width / 2, height / 2);
    207 
    208              // Write frames for the duration of the test.
    209              const writer = generator.writable.getWriter();
    210              let timestamp = 0;
    211              const intervalId = setInterval(async () => {
    212                timestamp++;
    213                await writer.write(new VideoFrame(inputCanvas, {timestamp: timestamp, alpha: 'discard'}));
    214              }, 40);
    215              return;
    216            }
    217            if (event.data.type === "getVideoFrame") {
    218              const processor = new MediaStreamTrackProcessor({ track: event.data.track });
    219              const reader = processor.readable.getReader();
    220              const frame = (await reader.read()).value;
    221              self.postMessage({frame}, [frame])
    222              event.data.track.stop();
    223            }
    224          }
    225      `);
    226 
    227      worker.postMessage("transfer");
    228      const { track } = await new Promise(resolve => worker.onmessage = e => resolve(e.data));
    229      t.add_cleanup(() => track.stop());
    230 
    231      const caller = new RTCPeerConnection();
    232      t.add_cleanup(() => caller.close());
    233      const callee = new RTCPeerConnection();
    234      t.add_cleanup(() => callee.close());
    235      const sender = caller.addTrack(track);
    236 
    237      exchangeIceCandidates(caller, callee);
    238      // Wait for the first track.
    239      const e = await exchangeOfferAndListenToOntrack(t, caller, callee);
    240 
    241      // Exchange answer.
    242      await exchangeAnswer(caller, callee);
    243      await waitForConnectionStateChange(callee, ['connected']);
    244      const params = sender.getParameters();
    245      params.encodings.forEach(e => e.scaleResolutionDownBy = 2);
    246      sender.setParameters(params);
    247 
    248      // The first frame may not have had scaleResolutionDownBy applied
    249      const numTries = 5;
    250      for (let i = 1; i <= numTries; i++) {
    251        const clone = e.track.clone();
    252        worker.postMessage({type:"getVideoFrame", track: clone}, [clone]);
    253        const {frame: outputFrame} = await new Promise(resolve => worker.onmessage = e => resolve(e.data));
    254        if (outputFrame.displayWidth !== width / 2) {
    255          assert_less_than(i, numTries, `First ${numTries} frames were the wrong size.`);
    256          outputFrame.close();
    257          continue;
    258        }
    259 
    260        assert_equals(outputFrame.displayWidth, width / 2);
    261        assert_equals(outputFrame.displayHeight, height / 2);
    262 
    263        const outputCanvas = new OffscreenCanvas(width / 2, height / 2);
    264        const outputContext = outputCanvas.getContext('2d', {alpha: false});
    265        outputContext.drawImage(outputFrame, 0, 0);
    266        outputFrame.close();
    267        // Check the four quadrants
    268        const pixelUL = outputContext.getImageData(width / 8, height / 8, 1, 1);
    269        assertPixel(t, pixelUL.data, colorUL);
    270        const pixelUR =
    271            outputContext.getImageData(width * 3 / 8, height / 8, 1, 1);
    272        assertPixel(t, pixelUR.data, colorUR);
    273        const pixelLL =
    274            outputContext.getImageData(width / 8, height * 3 / 8, 1, 1);
    275        assertPixel(t, pixelLL.data, colorLL);
    276        const pixelLR =
    277            outputContext.getImageData(width * 3 / 8, height * 3 / 8, 1, 1);
    278        assertPixel(t, pixelLR.data, colorLR);
    279        break;
    280      }
    281    }, 'Tests that frames are sent correctly with RTCRtpEncodingParameters.scaleResolutionDownBy.');
    282 
    283  </script>
    284 </body>
    285 </html>