suspend-with-navigation.html (2773B)
1 <!doctype html> 2 <meta name="timeout" content="long"> 3 <title>AudioContext.suspend() with navigation</title> 4 <script src="/resources/testharness.js"></script> 5 <script src="/resources/testharnessreport.js"></script> 6 <script src="/common/utils.js"></script> 7 <script src="/common/dispatcher/dispatcher.js"></script> 8 <script src="/html/browsers/browsing-the-web/back-forward-cache/resources/helper.sub.js"></script> 9 <script> 10 'use strict'; 11 runBfcacheTest({ 12 funcBeforeNavigation: async () => { 13 window.promise_event = (target, name) => { 14 return new Promise(resolve => target[`on${name}`] = resolve); 15 }; 16 window.promise_source_ended = (audioCtx) => { 17 const source = new ConstantSourceNode(audioCtx); 18 source.start(0); 19 source.stop(audioCtx.currentTime + 1/audioCtx.sampleRate); 20 return promise_event(source, "ended"); 21 }; 22 23 window.suspended_ctx = new AudioContext(); 24 // Perform the equivalent of test_driver.bless() to request a user gesture 25 // for when the test is run from a browser. test_driver would need to be 26 // able to postMessage() to the test context, which is not available due 27 // to window.open() being called with noopener (for back/forward cache). 28 // Audio autoplay is expected to be allowed when run through webdriver 29 // from `wpt run`. 30 let button = document.createElement('button'); 31 button.innerHTML = 'This test requires user interaction.<br />' + 32 'Please click here to allow AudioContext.'; 33 document.body.appendChild(button); 34 button.addEventListener('click', () => { 35 document.body.removeChild(button); 36 suspended_ctx.resume(); 37 }, {once: true}); 38 // Wait for user gesture, if required. 39 await suspended_ctx.resume(); 40 await suspended_ctx.suspend(); 41 window.ended_promise = promise_source_ended(suspended_ctx); 42 }, 43 funcAfterAssertion: async (pageA) => { 44 const state = await pageA.execute_script(() => suspended_ctx.state); 45 assert_equals(state, 'suspended', 'state after back()'); 46 const first_ended = await pageA.execute_script(async () => { 47 // Wait for an ended event from a running AudioContext to provide enough 48 // time to check that the ended event has not yet been dispatched from 49 // the suspended ctx. 50 const running_ctx = new AudioContext(); 51 await running_ctx.resume(); 52 return Promise.race([ 53 ended_promise.then(() => 'suspended_ctx'), 54 promise_source_ended(running_ctx).then(() => 'running_ctx'), 55 ]); 56 }); 57 assert_equals(first_ended, 'running_ctx', 58 'AudioContext of first ended event'); 59 await pageA.execute_script(() => { 60 window.suspended_ctx.resume(); 61 return ended_promise; 62 }); 63 }, 64 }, 'suspend() with navigation'); 65 </script>