tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

webaudio.js (10884B)


      1 // Helpers for Web Audio tests
      2 
      3 // It is expected that the test defines this.
      4 /* global gTest */
      5 
      6 function expectException(func, exceptionCode) {
      7  var threw = false;
      8  try {
      9    func();
     10  } catch (ex) {
     11    threw = true;
     12    is(ex.constructor.name, "DOMException", "Expect a DOM exception");
     13    is(ex.code, exceptionCode, "Expect the correct exception code");
     14  }
     15  ok(threw, "The exception was thrown");
     16 }
     17 
     18 function expectNoException(func) {
     19  var threw = false;
     20  try {
     21    func();
     22  } catch (ex) {
     23    threw = true;
     24  }
     25  ok(!threw, "An exception was not thrown");
     26 }
     27 
     28 function expectTypeError(func) {
     29  var threw = false;
     30  try {
     31    func();
     32  } catch (ex) {
     33    threw = true;
     34    ok(ex instanceof TypeError, "Expect a TypeError");
     35  }
     36  ok(threw, "The exception was thrown");
     37 }
     38 
     39 function expectRejectedPromise(that, func, exceptionName) {
     40  var promise = that[func]();
     41 
     42  ok(promise instanceof Promise, "Expect a Promise");
     43 
     44  promise
     45    .then(function () {
     46      ok(false, "Promise resolved when it should have been rejected.");
     47    })
     48    .catch(function (err) {
     49      is(
     50        err.name,
     51        exceptionName,
     52        "Promise correctly reject with " + exceptionName
     53      );
     54    });
     55 }
     56 
     57 function fuzzyCompare(a, b) {
     58  return Math.abs(a - b) < 9e-3;
     59 }
     60 
     61 function compareChannels(
     62  buf1,
     63  buf2,
     64  /*optional*/ length,
     65  /*optional*/ sourceOffset,
     66  /*optional*/ destOffset,
     67  /*optional*/ skipLengthCheck
     68 ) {
     69  if (!skipLengthCheck) {
     70    is(buf1.length, buf2.length, "Channels must have the same length");
     71  }
     72  sourceOffset = sourceOffset || 0;
     73  destOffset = destOffset || 0;
     74  if (length == undefined) {
     75    length = buf1.length - sourceOffset;
     76  }
     77  var difference = 0;
     78  var maxDifference = 0;
     79  var firstBadIndex = -1;
     80  for (var i = 0; i < length; ++i) {
     81    if (!fuzzyCompare(buf1[i + sourceOffset], buf2[i + destOffset])) {
     82      difference++;
     83      maxDifference = Math.max(
     84        maxDifference,
     85        Math.abs(buf1[i + sourceOffset] - buf2[i + destOffset])
     86      );
     87      if (firstBadIndex == -1) {
     88        firstBadIndex = i;
     89      }
     90    }
     91  }
     92 
     93  is(
     94    difference,
     95    0,
     96    "maxDifference: " +
     97      maxDifference +
     98      ", first bad index: " +
     99      firstBadIndex +
    100      " with test-data offset " +
    101      sourceOffset +
    102      " and expected-data offset " +
    103      destOffset +
    104      "; corresponding values " +
    105      buf1[firstBadIndex + sourceOffset] +
    106      " and " +
    107      buf2[firstBadIndex + destOffset] +
    108      " --- differences"
    109  );
    110 }
    111 
    112 function compareBuffers(got, expected) {
    113  if (got.numberOfChannels != expected.numberOfChannels) {
    114    is(
    115      got.numberOfChannels,
    116      expected.numberOfChannels,
    117      "Correct number of buffer channels"
    118    );
    119    return;
    120  }
    121  if (got.length != expected.length) {
    122    is(got.length, expected.length, "Correct buffer length");
    123    return;
    124  }
    125  if (got.sampleRate != expected.sampleRate) {
    126    is(got.sampleRate, expected.sampleRate, "Correct sample rate");
    127    return;
    128  }
    129 
    130  for (var i = 0; i < got.numberOfChannels; ++i) {
    131    compareChannels(
    132      got.getChannelData(i),
    133      expected.getChannelData(i),
    134      got.length,
    135      0,
    136      0,
    137      true
    138    );
    139  }
    140 }
    141 
    142 /**
    143 * Compute the root mean square (RMS,
    144 * <http://en.wikipedia.org/wiki/Root_mean_square>) of a channel of a slice
    145 * (defined by `start` and `end`) of an AudioBuffer.
    146 *
    147 * This is useful to detect that a buffer is noisy or silent.
    148 */
    149 function rms(audiobuffer, channel = 0, start = 0, end = audiobuffer.length) {
    150  var buffer = audiobuffer.getChannelData(channel);
    151  var rms = 0;
    152  for (var i = start; i < end; i++) {
    153    rms += buffer[i] * buffer[i];
    154  }
    155 
    156  rms /= buffer.length;
    157  rms = Math.sqrt(rms);
    158  return rms;
    159 }
    160 
    161 function getEmptyBuffer(context, length) {
    162  return context.createBuffer(
    163    gTest.numberOfChannels,
    164    length,
    165    context.sampleRate
    166  );
    167 }
    168 
    169 function isChannelSilent(channel) {
    170  for (var i = 0; i < channel.length; ++i) {
    171    if (channel[i] != 0.0) {
    172      return false;
    173    }
    174  }
    175  return true;
    176 }
    177 
    178 const HRTFPannersByRate = new Map();
    179 /**
    180 * Return a promise that resolves when PannerNodes with HRTF panningModel in
    181 * an AudioContext of the specified sample rate will be ready to produce
    182 * non-zero output.  Before the HRIR database is loaded, such PannerNodes
    183 * produce zero output.
    184 */
    185 async function promiseHRTFReady(sampleRate) {
    186  if (HRTFPannersByRate.has(sampleRate)) {
    187    return;
    188  }
    189 
    190  const ctx = new AudioContext({ sampleRate });
    191  const processor = ctx.createScriptProcessor(4096, 2, 0);
    192  const panner = new PannerNode(ctx, { panningModel: "HRTF" });
    193  panner.connect(processor);
    194  const oscillator = ctx.createOscillator();
    195  oscillator.connect(panner);
    196  oscillator.start(0);
    197 
    198  await new Promise(r => {
    199    processor.onaudioprocess = e => {
    200      if (!isChannelSilent(e.inputBuffer.getChannelData(0))) {
    201        r();
    202      }
    203    };
    204  });
    205 
    206  ctx.suspend();
    207  oscillator.disconnect();
    208  panner.disconnect();
    209  processor.onaudioprocess = null;
    210  // Keep a reference to the panner so that the database is not unloaded.
    211  HRTFPannersByRate.set(sampleRate, panner);
    212 }
    213 
    214 /**
    215 * This function assumes that the test file defines a single gTest variable with
    216 * the following properties and methods:
    217 *
    218 * + numberOfChannels: optional property which specifies the number of channels
    219 *                     in the output.  The default value is 2.
    220 * + createGraph: mandatory method which takes a context object and does
    221 *                everything needed in order to set up the Web Audio graph.
    222 *                This function returns the node to be inspected.
    223 * + createGraphAsync: async version of createGraph.  This function takes
    224 *                     a callback which should be called with an argument
    225 *                     set to the node to be inspected when the callee is
    226 *                     ready to proceed with the test.  Either this function
    227 *                     or createGraph must be provided.
    228 * + createExpectedBuffers: optional method which takes a context object and
    229 *                          returns either one expected buffer or an array of
    230 *                          them, designating what is expected to be observed
    231 *                          in the output.  If omitted, the output is expected
    232 *                          to be silence.  All buffers must have the same
    233 *                          length, which must be a bufferSize supported by
    234 *                          ScriptProcessorNode.  This function is guaranteed
    235 *                          to be called before createGraph.
    236 * + length: property equal to the total number of frames which we are waiting
    237 *           to see in the output, mandatory if createExpectedBuffers is not
    238 *           provided, in which case it must be a bufferSize supported by
    239 *           ScriptProcessorNode (256, 512, 1024, 2048, 4096, 8192, or 16384).
    240 *           If createExpectedBuffers is provided then this must be equal to
    241 *           the number of expected buffers * the expected buffer length.
    242 *
    243 * + skipOfflineContextTests: optional. when true, skips running tests on an offline
    244 *                            context by circumventing testOnOfflineContext.
    245 */
    246 function runTest() {
    247  function done() {
    248    SimpleTest.finish();
    249  }
    250 
    251  SimpleTest.waitForExplicitFinish();
    252  function runTestFunction() {
    253    if (!gTest.numberOfChannels) {
    254      gTest.numberOfChannels = 2; // default
    255    }
    256 
    257    var testLength;
    258 
    259    function runTestOnContext(context, callback, testOutput) {
    260      if (!gTest.createExpectedBuffers) {
    261        // Assume that the output is silence
    262        var expectedBuffers = getEmptyBuffer(context, gTest.length);
    263      } else {
    264        var expectedBuffers = gTest.createExpectedBuffers(context);
    265      }
    266      if (!(expectedBuffers instanceof Array)) {
    267        expectedBuffers = [expectedBuffers];
    268      }
    269      var expectedFrames = 0;
    270      for (var i = 0; i < expectedBuffers.length; ++i) {
    271        is(
    272          expectedBuffers[i].numberOfChannels,
    273          gTest.numberOfChannels,
    274          "Correct number of channels for expected buffer " + i
    275        );
    276        expectedFrames += expectedBuffers[i].length;
    277      }
    278      if (gTest.length && gTest.createExpectedBuffers) {
    279        is(expectedFrames, gTest.length, "Correct number of expected frames");
    280      }
    281 
    282      if (gTest.createGraphAsync) {
    283        gTest.createGraphAsync(context, function (nodeToInspect) {
    284          testOutput(nodeToInspect, expectedBuffers, callback);
    285        });
    286      } else {
    287        testOutput(gTest.createGraph(context), expectedBuffers, callback);
    288      }
    289    }
    290 
    291    function testOnNormalContext(callback) {
    292      function testOutput(nodeToInspect, expectedBuffers, callback) {
    293        testLength = 0;
    294        var sp = context.createScriptProcessor(
    295          expectedBuffers[0].length,
    296          gTest.numberOfChannels,
    297          0
    298        );
    299        nodeToInspect.connect(sp);
    300        sp.onaudioprocess = function (e) {
    301          var expectedBuffer = expectedBuffers.shift();
    302          testLength += expectedBuffer.length;
    303          compareBuffers(e.inputBuffer, expectedBuffer);
    304          if (!expectedBuffers.length) {
    305            sp.onaudioprocess = null;
    306            callback();
    307          }
    308        };
    309      }
    310      var context = new AudioContext();
    311      runTestOnContext(context, callback, testOutput);
    312    }
    313 
    314    function testOnOfflineContext(callback, sampleRate) {
    315      function testOutput(nodeToInspect, expectedBuffers, callback) {
    316        nodeToInspect.connect(context.destination);
    317        context.oncomplete = function (e) {
    318          var samplesSeen = 0;
    319          while (expectedBuffers.length) {
    320            var expectedBuffer = expectedBuffers.shift();
    321            is(
    322              e.renderedBuffer.numberOfChannels,
    323              expectedBuffer.numberOfChannels,
    324              "Correct number of input buffer channels"
    325            );
    326            for (var i = 0; i < e.renderedBuffer.numberOfChannels; ++i) {
    327              compareChannels(
    328                e.renderedBuffer.getChannelData(i),
    329                expectedBuffer.getChannelData(i),
    330                expectedBuffer.length,
    331                samplesSeen,
    332                undefined,
    333                true
    334              );
    335            }
    336            samplesSeen += expectedBuffer.length;
    337          }
    338          callback();
    339        };
    340        context.startRendering();
    341      }
    342 
    343      var context = new OfflineAudioContext(
    344        gTest.numberOfChannels,
    345        testLength,
    346        sampleRate
    347      );
    348      runTestOnContext(context, callback, testOutput);
    349    }
    350 
    351    testOnNormalContext(function () {
    352      if (!gTest.skipOfflineContextTests) {
    353        testOnOfflineContext(function () {
    354          testOnOfflineContext(done, 44100);
    355        }, 48000);
    356      } else {
    357        done();
    358      }
    359    });
    360  }
    361 
    362  if (document.readyState !== "complete") {
    363    addLoadEvent(runTestFunction);
    364  } else {
    365    runTestFunction();
    366  }
    367 }