tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

audiocontext-suspend-resume-close.html (11369B)


      1 <!DOCTYPE html>
      2 <html>
      3  <head>
      4    <meta charset="utf-8" />
      5    <script src="/resources/testharness.js"></script>
      6    <script src="/resources/testharnessreport.js"></script>
      7    <script type="module">
      8 "use strict";
      9 
     10 function tryToCreateNodeOnClosedContext(ctx) {
     11  assert_equals(ctx.state, "closed", "The context is in closed state");
     12 
     13  [
     14    { name: "createBufferSource" },
     15    {
     16      name: "createMediaStreamDestination",
     17      onOfflineAudioContext: false,
     18    },
     19    { name: "createScriptProcessor" },
     20    { name: "createStereoPanner" },
     21    { name: "createAnalyser" },
     22    { name: "createGain" },
     23    { name: "createDelay" },
     24    { name: "createBiquadFilter" },
     25    { name: "createWaveShaper" },
     26    { name: "createPanner" },
     27    { name: "createConvolver" },
     28    { name: "createChannelSplitter" },
     29    { name: "createChannelMerger" },
     30    { name: "createDynamicsCompressor" },
     31    { name: "createOscillator" },
     32    {
     33      name: "createMediaElementSource",
     34      args: [new Audio()],
     35      onOfflineAudioContext: false,
     36    },
     37    {
     38      name: "createMediaStreamSource",
     39      args: [new AudioContext().createMediaStreamDestination().stream],
     40      onOfflineAudioContext: false,
     41    },
     42  ].forEach(function (e) {
     43    if (
     44      e.onOfflineAudioContext == false &&
     45      ctx instanceof OfflineAudioContext
     46    ) {
     47      return;
     48    }
     49 
     50    try {
     51      ctx[e.name].apply(ctx, e.args);
     52    } catch (err) {
     53      assert_true(false, "unexpected exception thrown for " + e.name);
     54    }
     55  });
     56 }
     57 
     58 function loadFile(url, callback) {
     59  return new Promise((resolve) => {
     60    var xhr = new XMLHttpRequest();
     61    xhr.open("GET", url, true);
     62    xhr.responseType = "arraybuffer";
     63    xhr.onload = function () {
     64      resolve(xhr.response);
     65    };
     66    xhr.send();
     67  });
     68 }
     69 
     70 // createBuffer, createPeriodicWave and decodeAudioData should work on a context
     71 // that has `state` == "closed"
     72 async function tryLegalOperationsOnClosedContext(ctx) {
     73  assert_equals(ctx.state, "closed", "The context is in closed state");
     74 
     75  [
     76    { name: "createBuffer", args: [1, 44100, 44100] },
     77    {
     78      name: "createPeriodicWave",
     79      args: [new Float32Array(10), new Float32Array(10)],
     80    },
     81  ].forEach(function (e) {
     82    try {
     83      ctx[e.name].apply(ctx, e.args);
     84    } catch (err) {
     85      assert_true(false, "unexpected exception thrown");
     86    }
     87  });
     88  var buf = await loadFile("/webaudio/resources/sin_440Hz_-6dBFS_1s.wav");
     89  return ctx
     90    .decodeAudioData(buf)
     91    .then(function (decodedBuf) {
     92      assert_true(
     93        true,
     94        "decodeAudioData on a closed context should work, it did."
     95      );
     96    })
     97    .catch(function (e) {
     98      assert_true(
     99        false,
    100        "decodeAudioData on a closed context should work, it did not"
    101      );
    102    });
    103 }
    104 
    105 // Test that MediaStreams that are the output of a suspended AudioContext are
    106 // producing silence
    107 // ac1 produce a sine fed to a MediaStreamAudioDestinationNode
    108 // ac2 is connected to ac1 with a MediaStreamAudioSourceNode, and check that
    109 // there is silence when ac1 is suspended
    110 async function testMultiContextOutput() {
    111  var ac1 = new AudioContext(),
    112    ac2 = new AudioContext();
    113 
    114  await new Promise((resolve) => (ac1.onstatechange = resolve));
    115 
    116  ac1.onstatechange = null;
    117  await ac1.suspend();
    118  assert_equals(ac1.state, "suspended", "ac1 is suspended");
    119  var osc1 = ac1.createOscillator(),
    120    mediaStreamDestination1 = ac1.createMediaStreamDestination();
    121 
    122  var mediaStreamAudioSourceNode2 = ac2.createMediaStreamSource(
    123      mediaStreamDestination1.stream
    124    ),
    125    sp2 = ac2.createScriptProcessor(),
    126    silentBuffersInARow = 0;
    127 
    128  osc1.connect(mediaStreamDestination1);
    129  mediaStreamAudioSourceNode2.connect(sp2);
    130  osc1.start();
    131 
    132  let e = await new Promise((resolve) => (sp2.onaudioprocess = resolve));
    133 
    134  while (true) {
    135    let e = await new Promise(
    136      (resolve) => (sp2.onaudioprocess = resolve)
    137    );
    138    var input = e.inputBuffer.getChannelData(0);
    139    var silent = true;
    140    for (var i = 0; i < input.length; i++) {
    141      if (input[i] != 0.0) {
    142        silent = false;
    143      }
    144    }
    145 
    146    if (silent) {
    147      silentBuffersInARow++;
    148      if (silentBuffersInARow == 10) {
    149        assert_true(
    150          true,
    151          "MediaStreams produce silence when their input is blocked."
    152        );
    153        break;
    154      }
    155    } else {
    156      assert_equals(
    157        silentBuffersInARow,
    158        0,
    159        "No non silent buffer inbetween silent buffers."
    160      );
    161    }
    162  }
    163 
    164  sp2.onaudioprocess = null;
    165  ac1.close();
    166  ac2.close();
    167 }
    168 
    169 // Test that there is no buffering between contexts when connecting a running
    170 // AudioContext to a suspended AudioContext. Gecko's ScriptProcessorNode does some
    171 // buffering internally, so we ensure this by using a very very low frequency
    172 // on a sine, and oberve that the phase has changed by a big enough margin.
    173 async function testMultiContextInput() {
    174  var ac1 = new AudioContext(),
    175    ac2 = new AudioContext();
    176 
    177  await new Promise((resolve) => (ac1.onstatechange = resolve));
    178  ac1.onstatechange = null;
    179 
    180  var osc1 = ac1.createOscillator(),
    181    mediaStreamDestination1 = ac1.createMediaStreamDestination(),
    182    sp1 = ac1.createScriptProcessor();
    183 
    184  var mediaStreamAudioSourceNode2 = ac2.createMediaStreamSource(
    185      mediaStreamDestination1.stream
    186    ),
    187    sp2 = ac2.createScriptProcessor(),
    188    eventReceived = 0;
    189 
    190  osc1.frequency.value = 0.0001;
    191  osc1.connect(mediaStreamDestination1);
    192  osc1.connect(sp1);
    193  mediaStreamAudioSourceNode2.connect(sp2);
    194  osc1.start();
    195 
    196  var e = await new Promise((resolve) => (sp2.onaudioprocess = resolve));
    197  var inputBuffer1 = e.inputBuffer.getChannelData(0);
    198  sp2.value = inputBuffer1[inputBuffer1.length - 1];
    199  await ac2.suspend();
    200  await ac2.resume();
    201 
    202  while (true) {
    203    var e = await new Promise(
    204      (resolve) => (sp2.onaudioprocess = resolve)
    205    );
    206    var inputBuffer = e.inputBuffer.getChannelData(0);
    207    if (eventReceived++ == 3) {
    208      var delta = Math.abs(inputBuffer[1] - sp2.value),
    209        theoreticalIncrement =
    210          (2048 * 3 * Math.PI * 2 * osc1.frequency.value) /
    211          ac1.sampleRate;
    212      assert_true(
    213        delta >= theoreticalIncrement,
    214        "Buffering did not occur when the context was suspended (delta:" +
    215          delta +
    216          " increment: " +
    217          theoreticalIncrement +
    218          ")"
    219      );
    220      break;
    221    }
    222  }
    223  ac1.close();
    224  ac2.close();
    225  sp1.onaudioprocess = null;
    226  sp2.onaudioprocess = null;
    227 }
    228 
    229 // Take an AudioContext, make sure it switches to running when the audio starts
    230 // flowing, and then, call suspend, resume and close on it, tracking its state.
    231 async function testAudioContext() {
    232  var ac = new AudioContext();
    233  assert_equals(
    234    ac.state,
    235    "suspended",
    236    "AudioContext should start in suspended state."
    237  );
    238  var stateTracker = {
    239    previous: ac.state,
    240    // no promise for the initial suspended -> running
    241    initial: { handler: false },
    242    suspend: { promise: false, handler: false },
    243    resume: { promise: false, handler: false },
    244    close: { promise: false, handler: false },
    245  };
    246 
    247  await new Promise((resolve) => (ac.onstatechange = resolve));
    248 
    249  assert_true(
    250    stateTracker.previous == "suspended" && ac.state == "running",
    251    'AudioContext should switch to "running" when the audio hardware is' +
    252      " ready."
    253  );
    254 
    255  stateTracker.previous = ac.state;
    256  stateTracker.initial.handler = true;
    257 
    258  let promise_statechange_suspend = new Promise((resolve) => {
    259    ac.onstatechange = resolve;
    260  }).then(() => {
    261    stateTracker.suspend.handler = true;
    262  });
    263  await ac.suspend();
    264  assert_true(
    265    !stateTracker.suspend.handler,
    266    "Promise should be resolved before the callback."
    267  );
    268  assert_equals(
    269    ac.state,
    270    "suspended",
    271    'AudioContext should switch to "suspended" when the audio stream is ' +
    272      "suspended."
    273  );
    274  await promise_statechange_suspend;
    275  stateTracker.previous = ac.state;
    276 
    277  let promise_statechange_resume = new Promise((resolve) => {
    278    ac.onstatechange = resolve;
    279  }).then(() => {
    280    stateTracker.resume.handler = true;
    281  });
    282  await ac.resume();
    283  assert_true(
    284    !stateTracker.resume.handler,
    285    "Promise should be resolved before the callback."
    286  );
    287  assert_equals(
    288    ac.state,
    289    "running",
    290    'AudioContext should switch to "running" when the audio stream is ' +
    291      "resumed."
    292  );
    293  await promise_statechange_resume;
    294  stateTracker.previous = ac.state;
    295 
    296  let promise_statechange_close = new Promise((resolve) => {
    297    ac.onstatechange = resolve;
    298  }).then(() => {
    299    stateTracker.close.handler = true;
    300  });
    301  await ac.close();
    302  assert_true(
    303    !stateTracker.close.handler,
    304    "Promise should be resolved before the callback."
    305  );
    306  assert_equals(
    307    ac.state,
    308    "closed",
    309    'AudioContext should switch to "closed" when the audio stream is ' +
    310      "closed."
    311  );
    312  await promise_statechange_close;
    313  stateTracker.previous = ac.state;
    314 
    315  tryToCreateNodeOnClosedContext(ac);
    316  await tryLegalOperationsOnClosedContext(ac);
    317 }
    318 
    319 async function testOfflineAudioContext() {
    320  var o = new OfflineAudioContext(1, 44100, 44100);
    321  assert_equals(
    322    o.state,
    323    "suspended",
    324    "OfflineAudioContext should start in suspended state."
    325  );
    326 
    327  var previousState = o.state,
    328    finishedRendering = false;
    329 
    330  o.startRendering().then(function (buffer) {
    331    finishedRendering = true;
    332  });
    333 
    334  await new Promise((resolve) => (o.onstatechange = resolve));
    335 
    336  assert_true(
    337    previousState == "suspended" && o.state == "running",
    338    "onstatechanged" +
    339      "handler is called on state changed, and the new state is running"
    340  );
    341  previousState = o.state;
    342  await new Promise((resolve) => (o.onstatechange = resolve));
    343  assert_true(
    344    previousState == "running" && o.state == "closed",
    345    "onstatechanged handler is called when rendering finishes, " +
    346      "and the new state is closed"
    347  );
    348  assert_true(
    349    finishedRendering,
    350    "The Promise that is resolved when the rendering is " +
    351      "done should be resolved earlier than the state change."
    352  );
    353  previousState = o.state;
    354  function afterRenderingFinished() {
    355    assert_true(
    356      false,
    357      "There should be no transition out of the closed state."
    358    );
    359  }
    360  o.onstatechange = afterRenderingFinished;
    361 
    362  tryToCreateNodeOnClosedContext(o);
    363  await tryLegalOperationsOnClosedContext(o);
    364 }
    365 
    366 async function testSuspendResumeEventLoop() {
    367  var ac = new AudioContext();
    368  var source = ac.createBufferSource();
    369  source.buffer = ac.createBuffer(1, 44100, 44100);
    370  await new Promise((resolve) => (ac.onstatechange = resolve));
    371  ac.onstatechange = null;
    372  assert_true(ac.state == "running", "initial state is running");
    373  await ac.suspend();
    374  source.start();
    375  ac.resume();
    376  await new Promise((resolve) => (source.onended = resolve));
    377  assert_true(true, "The AudioContext did resume");
    378 }
    379 
    380 function testResumeInStateChangeForResumeCallback() {
    381  return new Promise((resolve) => {
    382    var ac = new AudioContext();
    383    ac.onstatechange = function () {
    384      ac.resume().then(() => {
    385        assert_true(true, "resume promise resolved as expected.");
    386        resolve();
    387      });
    388    };
    389  });
    390 }
    391 
    392 var tests = [
    393  testOfflineAudioContext,
    394  testSuspendResumeEventLoop,
    395  testResumeInStateChangeForResumeCallback,
    396  testAudioContext,
    397  testMultiContextOutput,
    398  testMultiContextInput,
    399 ];
    400 
    401 tests.forEach(function (f) {
    402  promise_test(f, f.name);
    403 });
    404    </script>
    405  </head>
    406 </html>