tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

bug1994562-shim-mstp-mstg-on-window.js (11699B)


      1 /* This Source Code Form is subject to the terms of the Mozilla Public
      2 * License, v. 2.0. If a copy of the MPL was not distributed with this
      3 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      4 
      5 "use strict";
      6 
      7 /**
      8 * Bug 1994562 - Sites that depend on legacy (main-thread)
      9 *               MediaStreamTrackProcessor or MediaStreamTrackGenerator
     10 *
     11 * Several websites that offer real-time media processing in Chrome fail
     12 * to work in Firefox, either ghosting the button that offers this
     13 * feature or erroring with a message like "Voice/Video processing is
     14 * not supported in this browser".
     15 *
     16 * These webpages rely on the older Chrome-only MSTP or MSTG APIs on
     17 * window instead of the standard MSTP and VTG (VideoTrackGenerator)
     18 * implemented in Safari (and soon Firefox). The following shims the
     19 * former APIs using existing technology on window (canvas for video
     20 * and AudioWorklets for audio).
     21 *
     22 * Note: this shim has inherent performance limitations being on
     23 * main thread. Websites are encouraged to upgrade to the standard
     24 * worker-based APIs directly for optimal performance in Firefox.
     25 */
     26 
     27 /* globals exportFunction, cloneInto */
     28 
     29 console.info(
     30  "Nonstandard MediaStreamTrackProcessor and MediaStreamTrackGenerator are being shimmed for compatibility reasons. Please consider updating to the standard equivalents available in workers for optimal performance! See https://bugzil.la/1994562 for details."
     31 );
     32 
     33 if (!window.MediaStreamTrackProcessor) {
     34  const win = window.wrappedJSObject;
     35  const f = func => exportFunction(func, window);
     36  const o = obj => Object.assign(new win.Object(), obj);
     37 
     38  function MediaStreamTrackProcessor(options) {
     39    if (!(options?.track instanceof win.MediaStreamTrack)) {
     40      throw new TypeError("Missing track");
     41    }
     42    const { track } = options;
     43    if (track.kind == "video") {
     44      const src = o({
     45        start: f(function start(controller) {
     46          return win.Promise.resolve()
     47            .then(
     48              f(() => {
     49                track.addEventListener(
     50                  "ended",
     51                  f(() => controller.close()),
     52                  o({ once: true })
     53                );
     54                src.video = win.document.createElement("video");
     55                const tracks = new win.Array();
     56                tracks.push(track);
     57                src.video.srcObject = new win.MediaStream(tracks);
     58                src.video.play();
     59                return new win.Promise(
     60                  f(r => (src.video.onloadedmetadata = r))
     61                );
     62              })
     63            )
     64            .then(
     65              f(() => {
     66                src.track = track;
     67                src.canvas = new win.OffscreenCanvas(
     68                  src.video.videoWidth,
     69                  src.video.videoHeight
     70                );
     71                src.ctx = src.canvas.getContext(
     72                  "2d",
     73                  o({ desynchronized: true })
     74                );
     75                src.t1 = performance.now();
     76              })
     77            );
     78        }),
     79        pull: f(function pull(controller) {
     80          if (track.readyState == "ended") {
     81            controller.close();
     82            return Promise.resolve();
     83          }
     84          const fps = track.getSettings().frameRate || 30;
     85          return new win.Promise(
     86            f(r => {
     87              const waitUntil = () => {
     88                if (
     89                  track.readyState == "ended" ||
     90                  performance.now() - src.t1 >= 1000 / fps
     91                ) {
     92                  r();
     93                  return;
     94                }
     95                requestAnimationFrame(waitUntil);
     96              };
     97              requestAnimationFrame(waitUntil);
     98            })
     99          ).then(
    100            f(() => {
    101              if (track.readyState == "ended") {
    102                controller.close();
    103                return;
    104              }
    105              src.t1 = performance.now();
    106              src.ctx.drawImage(src.video, 0, 0);
    107              const frame = new win.VideoFrame(
    108                src.canvas,
    109                o({ timestamp: src.t1 })
    110              );
    111              controller.enqueue(frame);
    112            })
    113          );
    114        }),
    115      });
    116      return o({ readable: new win.ReadableStream(src) });
    117    } else if (track.kind == "audio") {
    118      const src = o({
    119        start: f(function start(controller) {
    120          return win.Promise.resolve()
    121            .then(
    122              f(() => {
    123                track.addEventListener(
    124                  "ended",
    125                  f(() => controller.close()),
    126                  o({ once: true })
    127                );
    128                src.ac = new win.AudioContext();
    129                src.arrays = new win.Array();
    130                function worklet() {
    131                  registerProcessor(
    132                    "mstp-shim",
    133                    class Processor extends AudioWorkletProcessor {
    134                      process(input) {
    135                        this.port.postMessage(input);
    136                        return true;
    137                      }
    138                    }
    139                  );
    140                }
    141                return src.ac.audioWorklet.addModule(
    142                  `data:text/javascript,(${worklet.toString()})()`
    143                );
    144              })
    145            )
    146            .then(
    147              f(() => {
    148                src.node = new win.AudioWorkletNode(src.ac, "mstp-shim");
    149                const tracks = new win.Array();
    150                tracks.push(track);
    151                src.ac
    152                  .createMediaStreamSource(new win.MediaStream(tracks))
    153                  .connect(src.node);
    154                src.node.port.addEventListener(
    155                  "message",
    156                  f(({ data }) => data[0][0] && src.arrays.push(data))
    157                );
    158              })
    159            );
    160        }),
    161        pull: f(function pull(controller) {
    162          return win.Promise.resolve()
    163            .then(
    164              f(() => {
    165                if (track.readyState == "ended") {
    166                  controller.close();
    167                  return Promise.resolve();
    168                }
    169                return src.arrays.length
    170                  ? win.Promise.resolve()
    171                  : new win.Promise(f(r => (src.node.port.onmessage = r))).then(
    172                      f(function loop() {
    173                        if (track.readyState == "ended") {
    174                          return Promise.resolve();
    175                        }
    176                        if (!src.arrays.length) {
    177                          return new win.Promise(
    178                            f(r => (src.node.port.onmessage = r))
    179                          ).then(f(loop));
    180                        }
    181                        return win.Promise.resolve();
    182                      })
    183                    );
    184              })
    185            )
    186            .then(
    187              f(() => {
    188                if (track.readyState == "ended") {
    189                  return;
    190                }
    191                const [channels] = src.arrays.shift();
    192                const joined = new win.Float32Array(
    193                  channels.reduce(f((a, b) => a + b.length, 0))
    194                );
    195                channels.reduce(
    196                  f((offset, a) => {
    197                    joined.set(a, offset);
    198                    return offset + a.length;
    199                  }, 0)
    200                );
    201                const transfer = new win.Array();
    202                transfer.push(joined.buffer);
    203                const data = new win.AudioData(
    204                  o({
    205                    format: "f32-planar",
    206                    sampleRate: src.ac.sampleRate,
    207                    numberOfFrames: channels[0].length,
    208                    numberOfChannels: channels.length,
    209                    timestamp: (src.ac.currentTime * 1e6) | 0,
    210                    data: joined,
    211                    transfer,
    212                  })
    213                );
    214                controller.enqueue(data);
    215              })
    216            );
    217        }),
    218      });
    219      return o({ readable: new win.ReadableStream(src) });
    220    }
    221  }
    222  win.MediaStreamTrackProcessor = exportFunction(
    223    MediaStreamTrackProcessor,
    224    window,
    225    { allowCrossOriginArguments: true }
    226  );
    227 }
    228 
    229 if (!window.MediaStreamTrackGenerator) {
    230  const win = window.wrappedJSObject;
    231  const f = func => exportFunction(func, window);
    232  const o = obj => Object.assign(new win.Object(), obj);
    233 
    234  function MediaStreamTrackGenerator(options) {
    235    if (options?.kind != "video" && options?.kind != "audio") {
    236      throw new TypeError("Invalid kind");
    237    }
    238    if (options.kind == "video") {
    239      const canvas = win.document.createElement("canvas");
    240      const ctx = canvas.getContext("2d", o({ desynchronized: true }));
    241      const [track] = canvas.captureStream().getVideoTracks();
    242      const sink = o({
    243        write: f(function write(frame) {
    244          canvas.width = frame.displayWidth;
    245          canvas.height = frame.displayHeight;
    246          ctx.drawImage(frame, 0, 0, canvas.width, canvas.height);
    247          frame.close();
    248        }),
    249      });
    250      track.writable = new win.WritableStream(sink);
    251      return track;
    252    } else if (options.kind == "audio") {
    253      const ac = new win.AudioContext();
    254      const dest = ac.createMediaStreamDestination();
    255      const [track] = dest.stream.getAudioTracks();
    256      const sink = o({
    257        start: f(function start() {
    258          return win.Promise.resolve()
    259            .then(
    260              f(() => {
    261                sink.arrays = new win.Array();
    262                function worklet() {
    263                  registerProcessor(
    264                    "mstg-shim",
    265                    class Processor extends AudioWorkletProcessor {
    266                      constructor() {
    267                        super();
    268                        this.arrays = [];
    269                        this.arrayOffset = 0;
    270                        this.port.onmessage = ({ data }) =>
    271                          this.arrays.push(data);
    272                        this.emptyArray = new Float32Array(0);
    273                      }
    274                      process(inputs, [[output]]) {
    275                        for (let i = 0; i < output.length; i++) {
    276                          if (
    277                            !this.array ||
    278                            this.arrayOffset >= this.array.length
    279                          ) {
    280                            this.array = this.arrays.shift() || this.emptyArray;
    281                            this.arrayOffset = 0;
    282                          }
    283                          output[i] = this.array[this.arrayOffset++] || 0;
    284                        }
    285                        return true;
    286                      }
    287                    }
    288                  );
    289                }
    290                return ac.audioWorklet.addModule(
    291                  `data:text/javascript,(${worklet.toString()})()`
    292                );
    293              })
    294            )
    295            .then(
    296              f(() => {
    297                sink.node = new win.AudioWorkletNode(ac, "mstg-shim");
    298                sink.node.connect(dest);
    299                return track;
    300              })
    301            );
    302        }),
    303        write: f(function write(audioData) {
    304          const array = new win.Float32Array(
    305            audioData.numberOfFrames * audioData.numberOfChannels
    306          );
    307          audioData.copyTo(array, o({ planeIndex: 0 }));
    308          const transfer = new win.Array();
    309          transfer.push(array.buffer);
    310          sink.node.port.postMessage(array, o({ transfer }));
    311          audioData.close();
    312        }),
    313      });
    314      track.writable = new win.WritableStream(sink);
    315      return track;
    316    }
    317  }
    318  win.MediaStreamTrackGenerator = exportFunction(
    319    MediaStreamTrackGenerator,
    320    window,
    321    { allowCrossOriginArguments: true }
    322  );
    323 }