test_capture_stream_av_sync.html (10153B)
1 <!DOCTYPE HTML> 2 <html> 3 <head> 4 <title>A/V sync test for stream capturing</title> 5 <script src="/tests/SimpleTest/SimpleTest.js"></script> 6 <link rel="stylesheet" type="text/css" href="/tests/SimpleTest/test.css"/> 7 <p>Following canvas will capture and show the video frame when the video becomes audible</p><br> 8 <canvas id="canvas" width="640" height="480"></canvas> 9 <script type="application/javascript"> 10 11 /** 12 * This test will capture stream before the video starts playing, and check if 13 * A/V sync will keep sync during playing. 14 */ 15 add_task(async function testAVSyncForStreamCapturing() { 16 createVideo(); 17 captureStreamFromVideo(); 18 await playMedia(); 19 await testAVSync(); 20 destroyVideo(); 21 }); 22 23 /** 24 * This test will check if A/V is still on sync after we switch the media sink 25 * from playback-based sink to mediatrack-based sink. 26 */ 27 add_task(async function testAVSyncWhenSwitchingMediaSink() { 28 createVideo(); 29 await playMedia({resolveAfterReceivingTimeupdate : 5}); 30 captureStreamFromVideo(); 31 await testAVSync(); 32 destroyVideo(); 33 }); 34 35 /** 36 * This test will check if A/V is still on sync after we change the playback 37 * rate on the captured stream. 38 */ 39 add_task(async function testAVSyncWhenChangingPlaybackRate() { 40 createVideo(); 41 captureStreamFromVideo(); 42 await playMedia(); 43 const playbackRates = [0.25, 0.5, 1.0, 1.5, 2.0]; 44 for (let rate of playbackRates) { 45 setPlaybackRate(rate); 46 // TODO : when playback rate set to 1.5+x, the A/V will become less stable 47 // in testing so we raise the fuzzy frames for that, but also increase the 48 // test times. As at that speed, precise A/V becomes trivial because we 49 // can't really tell the difference. But it would be good for us to 50 // investigate if we could make A/V sync work better at that extreme high 51 // rate. 52 if (rate >= 1.5) { 53 await testAVSync({ expectedAVSyncTestTimes : 4, fuzzyFrames : 10}); 54 } else { 55 await testAVSync({ expectedAVSyncTestTimes : 2 }); 56 } 57 } 58 destroyVideo(); 59 }); 60 61 /** 62 * Following are helper functions 63 */ 64 const DEBUG = false; 65 function info_debug(msg) { 66 if (DEBUG) { 67 info(msg); 68 } 69 } 70 71 function createVideo() { 72 const video = document.createElement("video"); 73 // This video is special for testing A/V sync, it only produce audible sound 74 // once per second, and when the sound comes out, you can check the position 75 // of the square to know if the A/V keeps sync. 76 video.src = "sync.webm"; 77 video.loop = true; 78 video.controls = true; 79 video.width = 640; 80 video.height = 480; 81 video.id = "video"; 82 document.body.appendChild(video); 83 } 84 85 function destroyVideo() { 86 const video = document.getElementById("video"); 87 video.src = null; 88 video.remove(); 89 } 90 91 async function playMedia({ resolveAfterReceivingTimeupdate } = {}) { 92 const video = document.getElementById("video"); 93 ok(await video.play().then(_=>true,_=>false), "video started playing"); 94 if (resolveAfterReceivingTimeupdate > 0) { 95 // Play it for a while to ensure the clock growing on the normal audio sink. 96 for (let idx = 0; idx < resolveAfterReceivingTimeupdate; idx++) { 97 await new Promise(r => video.ontimeupdate = r); 98 } 99 } 100 } 101 102 async function captureStreamFromVideo() { 103 const video = document.getElementById("video"); 104 let ac = new AudioContext(); 105 let analyser = ac.createAnalyser(); 106 analyser.frequencyBuf = new Float32Array(analyser.frequencyBinCount); 107 analyser.smoothingTimeConstant = 0; 108 analyser.fftSize = 2048; // 1024 bins 109 let sourceNode = ac.createMediaElementSource(video); 110 sourceNode.connect(analyser); 111 analyser.connect(ac.destination); 112 video.analyser = analyser; 113 } 114 115 // This method will capture the stream from the video element, and check if A/V 116 // keeps sync during capturing. `callback` parameter will be executed after 117 // finishing capturing. 118 // @param [optional] expectedAVSyncTestTimes 119 // The amount of times that A/V sync test performs. 120 // @param [optional] fuzzyFrames 121 // This will fuzz the result from +0 (perfect sync) to -X to +X frames. 122 async function testAVSync({ expectedAVSyncTestTimes = 5, fuzzyFrames = 5} = {}) { 123 return new Promise(r => { 124 const analyser = document.getElementById("video").analyser; 125 let testIdx = 0; 126 let hasDetectedAudibleFrame = false; 127 // As we only want to detect the audible frame at the first moment when 128 // sound becomes audible, so we always skip the first audible frame because 129 // it might not be the start, but the tail part (where audio is being 130 // decaying to silence) when we start detecting. 131 let hasSkippedFirstFrame = false; 132 analyser.notifyAnalysis = () => { 133 let {frequencyBuf} = analyser; 134 analyser.getFloatFrequencyData(frequencyBuf); 135 if (checkIfBufferIsSilent(frequencyBuf)) { 136 info_debug("no need to paint the silent frame"); 137 hasDetectedAudibleFrame = false; 138 requestAnimationFrame(analyser.notifyAnalysis); 139 return; 140 } 141 if (hasDetectedAudibleFrame) { 142 info_debug("detected audible frame already"); 143 requestAnimationFrame(analyser.notifyAnalysis); 144 return; 145 } 146 hasDetectedAudibleFrame = true; 147 if (!hasSkippedFirstFrame) { 148 info("skip the first audible frame"); 149 hasSkippedFirstFrame = true; 150 requestAnimationFrame(analyser.notifyAnalysis); 151 return; 152 } 153 const video = document.getElementById("video"); 154 info(`paint audible frame`); 155 const cvs = document.getElementById("canvas"); 156 let context = cvs.getContext('2d'); 157 context.drawImage(video, 0, 0, 640, 480); 158 if (checkIfAVIsOnSyncFuzzy(context, fuzzyFrames)) { 159 ok(true, `test ${testIdx++} times, a/v is in sync!`); 160 } else { 161 ok(false, `test ${testIdx++} times, a/v is out of sync!`); 162 } 163 if (testIdx == expectedAVSyncTestTimes) { 164 r(); 165 return; 166 } 167 requestAnimationFrame(analyser.notifyAnalysis); 168 } 169 analyser.notifyAnalysis(); 170 }); 171 } 172 173 function checkIfBufferIsSilent(buffer) { 174 for (let data of buffer) { 175 // when sound is audible, its values are around -200 and the silence values 176 // are around -800. 177 if (data > -200) { 178 return false; 179 } 180 } 181 return true; 182 } 183 184 // This function will check the pixel data from the `context` to see if the 185 // square appears in the right place. As we can't control the exact timing 186 // of rendering video frames in the compositor, so the result would be fuzzy. 187 function checkIfAVIsOnSyncFuzzy(context, fuzzyFrames) { 188 const squareLength = 48; 189 // Canvas is 640*480, so perfect sync is the left-top corner when the square 190 // shows up in the middle. 191 const perfectSync = 192 { x: 320 - squareLength/2.0 , 193 y: 240 - squareLength/2.0 }; 194 let isAVSyncFuzzy = false; 195 // Get the whole partial section of image and detect where the square is. 196 let imageData = context.getImageData(0, perfectSync.y, 640, squareLength); 197 for (let i = 0; i < imageData.data.length; i += 4) { 198 // If the pixel's color is red, then this position will be the left-top 199 // corner of the square. 200 if (isPixelColorRed(imageData.data[i], imageData.data[i+1], 201 imageData.data[i+2])) { 202 const pos = ImageIdxToRelativeCoordinate(imageData, i); 203 let diff = calculateFrameDiffenceInXAxis(pos.x, perfectSync.x); 204 info(`find the square in diff=${diff}`); 205 // Maybe we check A/V sync too early or too late, try to adjust the diff 206 // to guess the previous correct position where the square should be. 207 if (diff > fuzzyFrames) { 208 diff = adjustFrameDiffBasedOnMediaTime(diff); 209 const video = document.getElementById("video"); 210 info(`adjusted diff to ${diff} (time=${video.currentTime})`); 211 } 212 if (diff <= fuzzyFrames) { 213 isAVSyncFuzzy = true; 214 } 215 context.putImageData(imageData, 0, 0); 216 break; 217 } 218 } 219 if (!isAVSyncFuzzy) { 220 const ctx = document.getElementById('canvas'); 221 info(ctx.toDataURL()); 222 } 223 return isAVSyncFuzzy; 224 } 225 226 // Input an imageData and its idx, then return a relative coordinate on the 227 // range of given imageData. 228 function ImageIdxToRelativeCoordinate(imageData, idx) { 229 const offset = idx / 4; // RGBA 230 return { x: offset % imageData.width, y: offset / imageData.width }; 231 } 232 233 function calculateFrameDiffenceInXAxis(squareX, targetX) { 234 const offsetX = Math.abs(targetX - squareX); 235 const xSpeedPerFrame = 640 / 60; // video is 60fps 236 return offsetX / xSpeedPerFrame; 237 } 238 239 function isPixelColorRed(r, g, b) { 240 // As the rendering color would vary in the screen on different platforms, so 241 // we won't strict the R should be 255, just check if it's larger than a 242 // certain threshold. 243 return r > 200 && g < 10 && b < 10; 244 } 245 246 function setPlaybackRate(rate) { 247 const video = document.getElementById("video"); 248 info(`change playback rate from ${video.playbackRate} to ${rate}`); 249 document.getElementById("video").playbackRate = rate; 250 } 251 252 function adjustFrameDiffBasedOnMediaTime(currentDiff) { 253 // The audio wave can be simply regarded as being composed by "start", "peak" 254 // and "tail". The "start" part is the sound gradually becoming louder and the 255 // "tail" is gradually becoming silent. We want to check the "peak" part which 256 // should happn on evert second regularly (1s, 2s, 3s ...) However, this check 257 // is triggered by `requestAnimationFrame()` and we can't guarantee that 258 // we're checking the peak part while the function is being called. Therefore, 259 // we have to do an adjustment by the video time, to know if we're checking 260 // the audio wave too early or too late in order to get a consistent result. 261 const video = document.getElementById("video"); 262 const videoCurrentTimeFloatPortion = video.currentTime % 1; 263 const timeOffset = 264 videoCurrentTimeFloatPortion > 0.5 ? 265 1 - videoCurrentTimeFloatPortion : // too early 266 videoCurrentTimeFloatPortion; // too late 267 const frameOffset = timeOffset / 0.016; // 60fps, 1 frame=0.016s 268 info(`timeOffset=${timeOffset}, frameOffset=${frameOffset}`); 269 return Math.abs(currentDiff - frameOffset); 270 } 271 272 </script> 273 </head> 274 <body> 275 </body> 276 </html>