tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

test_audioContextSuspendResumeClose.html (13049B)


      1 <!DOCTYPE HTML>
      2 <html>
      3 <head>
      4  <title>Test suspend, resume and close method of the AudioContext</title>
      5  <script src="/tests/SimpleTest/SimpleTest.js"></script>
      6  <script type="text/javascript" src="webaudio.js"></script>
      7  <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css" />
      8 </head>
      9 <body>
     10 <pre id="test">
     11 <script class="testbody" type="text/javascript">
     12 
     13 function tryToCreateNodeOnClosedContext(ctx) {
     14  is(ctx.state, "closed", "The context is in closed state");
     15 
     16  [ { name: "createBufferSource" },
     17    { name: "createMediaStreamDestination",
     18      onOfflineAudioContext: false},
     19    { name: "createScriptProcessor" },
     20    { name: "createStereoPanner" },
     21    { name: "createAnalyser" },
     22    { name: "createGain" },
     23    { name: "createDelay" },
     24    { name: "createBiquadFilter" },
     25    { name: "createWaveShaper" },
     26    { name: "createPanner" },
     27    { name: "createConvolver" },
     28    { name: "createChannelSplitter" },
     29    { name: "createChannelMerger" },
     30    { name: "createDynamicsCompressor" },
     31    { name: "createOscillator" },
     32    { name: "createMediaElementSource",
     33      args: [new Audio()],
     34      onOfflineAudioContext: false },
     35    { name: "createMediaStreamSource",
     36      args: [(new AudioContext()).createMediaStreamDestination().stream],
     37      onOfflineAudioContext: false } ].forEach(function(e) {
     38 
     39      if (e.onOfflineAudioContext == false &&
     40          ctx instanceof OfflineAudioContext) {
     41        return;
     42      }
     43 
     44      expectNoException(function() {
     45        ctx[e.name].apply(ctx, e.args);
     46      }, DOMException.INVALID_STATE_ERR);
     47    });
     48 }
     49 
     50 function loadFile(url, callback) {
     51  var xhr = new XMLHttpRequest();
     52  xhr.open("GET", url, true);
     53  xhr.responseType = "arraybuffer";
     54  xhr.onload = function() {
     55    callback(xhr.response);
     56  };
     57  xhr.send();
     58 }
     59 
     60 // createBuffer, createPeriodicWave and decodeAudioData should work on a context
     61 // that has `state` == "closed"
     62 function tryLegalOpeerationsOnClosedContext(ctx) {
     63  is(ctx.state, "closed", "The context is in closed state");
     64 
     65  [ { name: "createBuffer",
     66      args: [1, 44100, 44100] },
     67    { name: "createPeriodicWave",
     68      args: [new Float32Array(10), new Float32Array(10)] }
     69  ].forEach(function(e) {
     70    expectNoException(function() {
     71      ctx[e.name].apply(ctx, e.args);
     72    });
     73  });
     74  loadFile("ting-44.1k-1ch.ogg", function(buf) {
     75    ctx.decodeAudioData(buf).then(function() {
     76      ok(true, "decodeAudioData on a closed context should work, it did.")
     77      finish();
     78    }).catch(function(){
     79      ok(false, "decodeAudioData on a closed context should work, it did not");
     80      finish();
     81    });
     82  });
     83 }
     84 
     85 // Test that MediaStreams that are the output of a suspended AudioContext are
     86 // producing silence
     87 // ac1 produce a sine fed to a MediaStreamAudioDestinationNode
     88 // ac2 is connected to ac1 with a MediaStreamAudioSourceNode, and check that
     89 // there is silence when ac1 is suspended
     90 function testMultiContextOutput() {
     91  var ac1 = new AudioContext(),
     92      ac2 = new AudioContext();
     93 
     94  ac1.onstatechange = function() {
     95    ac1.onstatechange = null;
     96 
     97    var osc1 = ac1.createOscillator(),
     98        mediaStreamDestination1 = ac1.createMediaStreamDestination();
     99 
    100    var mediaStreamAudioSourceNode2 =
    101      ac2.createMediaStreamSource(mediaStreamDestination1.stream),
    102      sp2 = ac2.createScriptProcessor(),
    103      silentBuffersInARow = 0;
    104 
    105 
    106    sp2.onaudioprocess = function() {
    107      ac1.suspend().then(function() {
    108        is(ac1.state, "suspended", "ac1 is suspended");
    109        sp2.onaudioprocess = checkSilence;
    110      });
    111      sp2.onaudioprocess = null;
    112    }
    113 
    114    function checkSilence(e) {
    115      var input = e.inputBuffer.getChannelData(0);
    116      var silent = true;
    117      for (var i = 0; i < input.length; i++) {
    118        if (input[i] != 0.0) {
    119          silent = false;
    120        }
    121      }
    122 
    123      if (silent) {
    124        silentBuffersInARow++;
    125        if (silentBuffersInARow == 10) {
    126          ok(true,
    127              "MediaStreams produce silence when their input is blocked.");
    128          sp2.onaudioprocess = null;
    129          ac1.close();
    130          ac2.close();
    131          finish();
    132        }
    133      } else {
    134        is(silentBuffersInARow, 0,
    135            "No non silent buffer inbetween silent buffers.");
    136      }
    137    }
    138 
    139    osc1.connect(mediaStreamDestination1);
    140 
    141    mediaStreamAudioSourceNode2.connect(sp2);
    142    osc1.start();
    143  }
    144 }
    145 
    146 
    147 // Test that there is no buffering between contexts when connecting a running
    148 // AudioContext to a suspended AudioContext. Our ScriptProcessorNode does some
    149 // buffering internally, so we ensure this by using a very very low frequency
    150 // on a sine, and oberve that the phase has changed by a big enough margin.
    151 function testMultiContextInput() {
    152  var ac1 = new AudioContext(),
    153      ac2 = new AudioContext();
    154 
    155  ac1.onstatechange = function() {
    156    ac1.onstatechange = null;
    157 
    158    var osc1 = ac1.createOscillator(),
    159        mediaStreamDestination1 = ac1.createMediaStreamDestination(),
    160        sp1 = ac1.createScriptProcessor();
    161 
    162    var mediaStreamAudioSourceNode2 =
    163      ac2.createMediaStreamSource(mediaStreamDestination1.stream),
    164      sp2 = ac2.createScriptProcessor(),
    165      eventReceived = 0;
    166 
    167 
    168    osc1.frequency.value = 0.0001;
    169 
    170    function checkDiscontinuity(e) {
    171      var inputBuffer = e.inputBuffer.getChannelData(0);
    172      if (eventReceived++ == 3) {
    173        var delta = Math.abs(inputBuffer[1] - sp2.value),
    174            theoreticalIncrement = 2048 * 3 * Math.PI * 2 * osc1.frequency.value / ac1.sampleRate;
    175        ok(delta >= theoreticalIncrement,
    176            "Buffering did not occur when the context was suspended (delta:" + delta + " increment: " + theoreticalIncrement+")");
    177        ac1.close();
    178        ac2.close();
    179        sp1.onaudioprocess = null;
    180        sp2.onaudioprocess = null;
    181        finish();
    182      }
    183    }
    184 
    185    sp2.onaudioprocess = function(e) {
    186      var inputBuffer = e.inputBuffer.getChannelData(0);
    187      sp2.value = inputBuffer[inputBuffer.length - 1];
    188      ac2.suspend().then(function() {
    189          ac2.resume().then(function() {
    190            sp2.onaudioprocess = checkDiscontinuity;
    191            });
    192          });
    193    }
    194 
    195    osc1.connect(mediaStreamDestination1);
    196    osc1.connect(sp1);
    197 
    198    mediaStreamAudioSourceNode2.connect(sp2);
    199    osc1.start();
    200  }
    201 }
    202 
    203 // Test that ScriptProcessorNode's onaudioprocess don't get called while the
    204 // context is suspended/closed. It is possible that we get the handler called
    205 // exactly once after suspend, because the event has already been sent to the
    206 // event loop.
    207 function testScriptProcessNodeSuspended() {
    208  var ac = new AudioContext();
    209  var sp = ac.createScriptProcessor();
    210  var remainingIterations = 30;
    211  var afterResume = false;
    212  ac.onstatechange = function() {
    213    ac.onstatechange = null;
    214    sp.onaudioprocess = function() {
    215      ok(ac.state == "running", "If onaudioprocess is called, the context" +
    216          " must be running (was " + ac.state + ", remainingIterations:" + remainingIterations +")");
    217      remainingIterations--;
    218      if (!afterResume) {
    219        if (remainingIterations == 0) {
    220          ac.suspend().then(function() {
    221            ac.resume().then(function() {
    222              remainingIterations = 30;
    223              afterResume = true;
    224            });
    225          });
    226        }
    227      } else {
    228        sp.onaudioprocess = null;
    229        finish();
    230      }
    231    }
    232  }
    233  sp.connect(ac.destination);
    234 }
    235 
    236 // Take an AudioContext, make sure it switches to running when the audio starts
    237 // flowing, and then, call suspend, resume and close on it, tracking its state.
    238 function testAudioContext() {
    239  var ac = new AudioContext();
    240  is(ac.state, "suspended", "AudioContext should start in suspended state.");
    241  var stateTracker = {
    242    previous: ac.state,
    243     // no promise for the initial suspended -> running
    244    initial: {  handler: false },
    245    suspend: { promise: false, handler: false },
    246    resume: { promise: false, handler: false },
    247    close: { promise: false, handler: false }
    248  };
    249 
    250  function initialSuspendToRunning() {
    251    ok(stateTracker.previous == "suspended" &&
    252       ac.state == "running",
    253       "AudioContext should switch to \"running\" when the audio hardware is" +
    254       " ready.");
    255 
    256    stateTracker.previous = ac.state;
    257    ac.onstatechange = afterSuspend;
    258    stateTracker.initial.handler = true;
    259 
    260    ac.suspend().then(function() {
    261      ok(!stateTracker.suspend.promise && !stateTracker.suspend.handler,
    262        "Promise should be resolved before the callback, and only once.")
    263      stateTracker.suspend.promise = true;
    264    });
    265  }
    266 
    267  function afterSuspend() {
    268    ok(stateTracker.previous == "running" &&
    269       ac.state == "suspended",
    270       "AudioContext should switch to \"suspend\" when the audio stream is" +
    271       "suspended.");
    272    ok(stateTracker.suspend.promise && !stateTracker.suspend.handler,
    273        "Handler should be called after the callback, and only once");
    274 
    275    stateTracker.suspend.handler = true;
    276    stateTracker.previous = ac.state;
    277    ac.onstatechange = afterResume;
    278 
    279    ac.resume().then(function() {
    280      ok(!stateTracker.resume.promise && !stateTracker.resume.handler,
    281        "Promise should be called before the callback, and only once");
    282      stateTracker.resume.promise = true;
    283    });
    284  }
    285 
    286  function afterResume() {
    287    ok(stateTracker.previous == "suspended" &&
    288       ac.state == "running",
    289   "AudioContext should switch to \"running\" when the audio stream resumes.");
    290 
    291    ok(stateTracker.resume.promise && !stateTracker.resume.handler,
    292       "Handler should be called after the callback, and only once");
    293 
    294    stateTracker.resume.handler = true;
    295    stateTracker.previous = ac.state;
    296    ac.onstatechange = afterClose;
    297 
    298    ac.close().then(function() {
    299      ok(!stateTracker.close.promise && !stateTracker.close.handler,
    300        "Promise should be called before the callback, and only once");
    301      stateTracker.close.promise = true;
    302      tryToCreateNodeOnClosedContext(ac);
    303      tryLegalOpeerationsOnClosedContext(ac);
    304    });
    305  }
    306 
    307  function afterClose() {
    308    ok(stateTracker.previous == "running" &&
    309       ac.state == "closed",
    310       "AudioContext should switch to \"closed\" when the audio stream is" +
    311       " closed.");
    312    ok(stateTracker.close.promise && !stateTracker.close.handler,
    313       "Handler should be called after the callback, and only once");
    314  }
    315 
    316  ac.onstatechange = initialSuspendToRunning;
    317 }
    318 
    319 function testOfflineAudioContext() {
    320  var o = new OfflineAudioContext(1, 44100, 44100);
    321  is(o.state, "suspended", "OfflineAudioContext should start in suspended state.");
    322 
    323  expectRejectedPromise(o, "resume", "NotSupportedError");
    324 
    325  var previousState = o.state,
    326      finishedRendering = false;
    327  function beforeStartRendering() {
    328    ok(previousState == "suspended" && o.state == "running", "onstatechanged" +
    329        "handler is called on state changed, and the new state is running");
    330    previousState = o.state;
    331    o.onstatechange = onRenderingFinished;
    332  }
    333 
    334  function onRenderingFinished() {
    335    ok(previousState == "running" && o.state == "closed",
    336        "onstatechanged handler is called when rendering finishes, " +
    337        "and the new state is closed");
    338    ok(finishedRendering, "The Promise that is resolved when the rendering is" +
    339                    "done should be resolved earlier than the state change.");
    340    previousState = o.state;
    341    o.onstatechange = afterRenderingFinished;
    342 
    343    tryToCreateNodeOnClosedContext(o);
    344    tryLegalOpeerationsOnClosedContext(o);
    345  }
    346 
    347  function afterRenderingFinished() {
    348    ok(false, "There should be no transition out of the closed state.");
    349  }
    350 
    351  o.onstatechange = beforeStartRendering;
    352 
    353  o.startRendering().then(function() {
    354    finishedRendering = true;
    355  });
    356 }
    357 
    358 function testSuspendResumeEventLoop() {
    359  var ac = new AudioContext();
    360  var source = ac.createBufferSource();
    361  source.buffer = ac.createBuffer(1, 44100, 44100);
    362  source.onended = function() {
    363    ok(true, "The AudioContext did resume.");
    364    finish();
    365  }
    366  ac.onstatechange = function() {
    367    ac.onstatechange = null;
    368 
    369    ok(ac.state == "running", "initial state is running");
    370    ac.suspend();
    371    source.start();
    372    ac.resume();
    373  }
    374 }
    375 
    376 function testResumeInStateChangeForResumeCallback() {
    377  // Regression test for bug 1468085.
    378  var ac = new AudioContext;
    379  ac.onstatechange = function() {
    380    ac.resume().then(() => {
    381        ok(true, "resume promise resolved as expected.");
    382        finish();
    383    });
    384  }
    385 }
    386 
    387 var remaining = 0;
    388 function finish() {
    389  remaining--;
    390  if (remaining == 0) {
    391    SimpleTest.finish();
    392  }
    393 }
    394 
    395 
    396 SimpleTest.waitForExplicitFinish();
    397 addLoadEvent(function() {
    398  var tests = [
    399    testOfflineAudioContext,
    400    testScriptProcessNodeSuspended,
    401    testMultiContextOutput,
    402    testMultiContextInput,
    403    testSuspendResumeEventLoop,
    404    testResumeInStateChangeForResumeCallback
    405  ];
    406 
    407  // See Bug 1305136, many intermittent failures on Linux
    408  if (!navigator.platform.startsWith("Linux")) {
    409    tests.push(testAudioContext);
    410  }
    411 
    412  remaining = tests.length;
    413  tests.forEach(function(f) { f() });
    414 });
    415 
    416 </script>
    417 </pre>
    418 </body>
    419 </html>