head.js (44027B)
1 /* This Source Code Form is subject to the terms of the Mozilla Public 2 * License, v. 2.0. If a copy of the MPL was not distributed with this 3 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ 4 5 "use strict"; 6 7 var Cc = SpecialPowers.Cc; 8 var Ci = SpecialPowers.Ci; 9 10 // Specifies if we want fake audio streams for this run 11 let WANT_FAKE_AUDIO = true; 12 // Specifies if we want fake video streams for this run 13 let WANT_FAKE_VIDEO = true; 14 let TEST_AUDIO_FREQ = 1000; 15 16 /** 17 * Reads the current values of preferences affecting fake and loopback devices 18 * and sets the WANT_FAKE_AUDIO and WANT_FAKE_VIDEO gloabals appropriately. 19 */ 20 function updateConfigFromFakeAndLoopbackPrefs() { 21 let audioDevice = SpecialPowers.getCharPref("media.audio_loopback_dev", ""); 22 if (audioDevice) { 23 WANT_FAKE_AUDIO = false; 24 dump("TEST DEVICES: Got loopback audio: " + audioDevice + "\n"); 25 } else { 26 WANT_FAKE_AUDIO = true; 27 dump( 28 "TEST DEVICES: No test device found in media.audio_loopback_dev, using fake audio streams.\n" 29 ); 30 } 31 let videoDevice = SpecialPowers.getCharPref("media.video_loopback_dev", ""); 32 if (videoDevice) { 33 WANT_FAKE_VIDEO = false; 34 dump("TEST DEVICES: Got loopback video: " + videoDevice + "\n"); 35 } else { 36 WANT_FAKE_VIDEO = true; 37 dump( 38 "TEST DEVICES: No test device found in media.video_loopback_dev, using fake video streams.\n" 39 ); 40 } 41 } 42 43 updateConfigFromFakeAndLoopbackPrefs(); 44 45 /** 46 * Helper class to setup a sine tone of a given frequency. 47 */ 48 class LoopbackTone { 49 constructor(audioContext, frequency) { 50 if (!audioContext) { 51 throw new Error("You must provide a valid AudioContext"); 52 } 53 this.oscNode = audioContext.createOscillator(); 54 var gainNode = audioContext.createGain(); 55 gainNode.gain.value = 0.5; 56 this.oscNode.connect(gainNode); 57 gainNode.connect(audioContext.destination); 58 this.changeFrequency(frequency); 59 } 60 61 // Method should be used when WANT_FAKE_AUDIO is false. 62 start() { 63 if (!this.oscNode) { 64 throw new Error("Attempt to start a stopped LoopbackTone"); 65 } 66 info(`Start loopback tone at ${this.oscNode.frequency.value}`); 67 this.oscNode.start(); 68 } 69 70 // Change the frequency of the tone. It can be used after start. 71 // Frequency will change on the fly. No need to stop and create a new instance. 72 changeFrequency(frequency) { 73 if (!this.oscNode) { 74 throw new Error("Attempt to change frequency on a stopped LoopbackTone"); 75 } 76 this.oscNode.frequency.value = frequency; 77 } 78 79 stop() { 80 if (!this.oscNode) { 81 throw new Error("Attempt to stop a stopped LoopbackTone"); 82 } 83 this.oscNode.stop(); 84 this.oscNode = null; 85 } 86 } 87 // Object that holds the default loopback tone. 88 var DefaultLoopbackTone = null; 89 90 /** 91 * This class provides helpers around analysing the audio content in a stream 92 * using WebAudio AnalyserNodes. 93 * 94 * @class 95 * @param {object} stream 96 * A MediaStream object whose audio track we shall analyse. 97 */ 98 function AudioStreamAnalyser(ac, stream) { 99 this.audioContext = ac; 100 this.stream = stream; 101 this.sourceNodes = []; 102 this.analyser = this.audioContext.createAnalyser(); 103 // Setting values lower than default for speedier testing on emulators 104 this.analyser.smoothingTimeConstant = 0.2; 105 this.analyser.fftSize = 1024; 106 this.connectTrack = t => { 107 let source = this.audioContext.createMediaStreamSource( 108 new MediaStream([t]) 109 ); 110 this.sourceNodes.push(source); 111 source.connect(this.analyser); 112 }; 113 this.stream.getAudioTracks().forEach(t => this.connectTrack(t)); 114 this.onaddtrack = ev => this.connectTrack(ev.track); 115 this.stream.addEventListener("addtrack", this.onaddtrack); 116 this.data = new Uint8Array(this.analyser.frequencyBinCount); 117 } 118 119 AudioStreamAnalyser.prototype = { 120 /** 121 * Get an array of frequency domain data for our stream's audio track. 122 * 123 * @returns {Array} A Uint8Array containing the frequency domain data. 124 */ 125 getByteFrequencyData() { 126 this.analyser.getByteFrequencyData(this.data); 127 return this.data; 128 }, 129 130 /** 131 * Append a canvas to the DOM where the frequency data are drawn. 132 * Useful to debug tests. 133 */ 134 enableDebugCanvas() { 135 var cvs = (this.debugCanvas = document.createElement("canvas")); 136 const content = document.getElementById("content"); 137 content.insertBefore(cvs, content.children[0]); 138 139 // Easy: 1px per bin 140 cvs.width = this.analyser.frequencyBinCount; 141 cvs.height = 128; 142 cvs.style.border = "1px solid red"; 143 144 var c = cvs.getContext("2d"); 145 c.fillStyle = "black"; 146 147 var self = this; 148 function render() { 149 c.clearRect(0, 0, cvs.width, cvs.height); 150 var array = self.getByteFrequencyData(); 151 for (var i = 0; i < array.length; i++) { 152 c.fillRect(i, cvs.height - array[i] / 2, 1, cvs.height); 153 } 154 if (!cvs.stopDrawing) { 155 requestAnimationFrame(render); 156 } 157 } 158 requestAnimationFrame(render); 159 }, 160 161 /** 162 * Stop drawing of and remove the debug canvas from the DOM if it was 163 * previously added. 164 */ 165 disableDebugCanvas() { 166 if (!this.debugCanvas || !this.debugCanvas.parentElement) { 167 return; 168 } 169 170 this.debugCanvas.stopDrawing = true; 171 this.debugCanvas.parentElement.removeChild(this.debugCanvas); 172 }, 173 174 /** 175 * Disconnects the input stream from our internal analyser node. 176 * Call this to reduce main thread processing, mostly necessary on slow 177 * devices. 178 */ 179 disconnect() { 180 this.disableDebugCanvas(); 181 this.sourceNodes.forEach(n => n.disconnect()); 182 this.sourceNodes = []; 183 this.stream.removeEventListener("addtrack", this.onaddtrack); 184 }, 185 186 /** 187 * Return a Promise, that will be resolved when the function passed as 188 * argument, when called, returns true (meaning the analysis was a 189 * success). The promise is rejected if the cancel promise resolves first. 190 * 191 * @param {function} analysisFunction 192 * A function that performs an analysis, and resolves with true if the 193 * analysis was a success (i.e. it found what it was looking for) 194 * @param {promise} cancel 195 * A promise that on resolving will reject the promise we returned. 196 */ 197 async waitForAnalysisSuccess( 198 analysisFunction, 199 cancel = wait(60000, new Error("Audio analysis timed out")) 200 ) { 201 let aborted = false; 202 cancel.then(() => (aborted = true)); 203 204 // We need to give the Analyser some time to start gathering data. 205 await wait(200); 206 207 do { 208 await new Promise(resolve => requestAnimationFrame(resolve)); 209 if (aborted) { 210 throw await cancel; 211 } 212 } while (!analysisFunction(this.getByteFrequencyData())); 213 }, 214 215 /** 216 * Return the FFT bin index for a given frequency. 217 * 218 * @param {double} frequency 219 * The frequency for whicht to return the bin number. 220 * @returns {integer} the index of the bin in the FFT array. 221 */ 222 binIndexForFrequency(frequency) { 223 return Math.round( 224 (frequency * this.analyser.fftSize) / this.audioContext.sampleRate 225 ); 226 }, 227 228 /** 229 * Reverse operation, get the frequency for a bin index. 230 * 231 * @param {integer} index an index in an FFT array 232 * @returns {double} the frequency for this bin 233 */ 234 frequencyForBinIndex(index) { 235 return (index * this.audioContext.sampleRate) / this.analyser.fftSize; 236 }, 237 }; 238 239 /** 240 * Creates a MediaStream with an audio track containing a sine tone at the 241 * given frequency. 242 * 243 * @param {AudioContext} ac 244 * AudioContext in which to create the OscillatorNode backing the stream 245 * @param {double} frequency 246 * The frequency in Hz of the generated sine tone 247 * @returns {MediaStream} the MediaStream containing sine tone audio track 248 */ 249 function createOscillatorStream(ac, frequency) { 250 var osc = ac.createOscillator(); 251 osc.frequency.value = frequency; 252 253 var oscDest = ac.createMediaStreamDestination(); 254 osc.connect(oscDest); 255 osc.start(); 256 return oscDest.stream; 257 } 258 259 /** 260 * Create the necessary HTML elements for head and body as used by Mochitests 261 * 262 * @param {object} meta 263 * Meta information of the test 264 * @param {string} meta.title 265 * Description of the test 266 * @param {string} [meta.bug] 267 * Bug the test was created for 268 * @param {boolean} [meta.visible=false] 269 * Visibility of the media elements 270 */ 271 function realCreateHTML(meta) { 272 var test = document.getElementById("test"); 273 274 // Create the head content 275 var elem = document.createElement("meta"); 276 elem.setAttribute("charset", "utf-8"); 277 document.head.appendChild(elem); 278 279 var title = document.createElement("title"); 280 title.textContent = meta.title; 281 document.head.appendChild(title); 282 283 // Create the body content 284 var anchor = document.createElement("a"); 285 anchor.textContent = meta.title; 286 if (meta.bug) { 287 anchor.setAttribute( 288 "href", 289 "https://bugzilla.mozilla.org/show_bug.cgi?id=" + meta.bug 290 ); 291 } else { 292 anchor.setAttribute("target", "_blank"); 293 } 294 295 document.body.insertBefore(anchor, test); 296 297 var display = document.createElement("p"); 298 display.setAttribute("id", "display"); 299 document.body.insertBefore(display, test); 300 301 var content = document.createElement("div"); 302 content.setAttribute("id", "content"); 303 content.style.display = meta.visible ? "block" : "none"; 304 document.body.appendChild(content); 305 } 306 307 /** 308 * Creates an element of the given type, assigns the given id, sets the controls 309 * and autoplay attributes and adds it to the content node. 310 * 311 * @param {string} type 312 * Defining if we should create an "audio" or "video" element 313 * @param {string} id 314 * A string to use as the element id. 315 */ 316 function createMediaElement(type, id) { 317 const element = document.createElement(type); 318 element.setAttribute("id", id); 319 element.setAttribute("height", 100); 320 element.setAttribute("width", 150); 321 element.setAttribute("controls", "controls"); 322 element.setAttribute("autoplay", "autoplay"); 323 element.setAttribute("muted", "muted"); 324 element.muted = true; 325 document.getElementById("content").appendChild(element); 326 327 return element; 328 } 329 330 /** 331 * Returns an existing element for the given track with the given idPrefix, 332 * as it was added by createMediaElementForTrack(). 333 * 334 * @param {MediaStreamTrack} track 335 * Track used as the element's source. 336 * @param {string} idPrefix 337 * A string to use as the element id. The track id will also be appended. 338 */ 339 function getMediaElementForTrack(track, idPrefix) { 340 return document.getElementById(idPrefix + "_" + track.id); 341 } 342 343 /** 344 * Create a media element with a track as source and attach it to the content 345 * node. 346 * 347 * @param {MediaStreamTrack} track 348 * Track for use as source. 349 * @param {string} idPrefix 350 * A string to use as the element id. The track id will also be appended. 351 * @return {HTMLMediaElement} The created HTML media element 352 */ 353 function createMediaElementForTrack(track, idPrefix) { 354 const id = idPrefix + "_" + track.id; 355 const element = createMediaElement(track.kind, id); 356 element.srcObject = new MediaStream([track]); 357 358 return element; 359 } 360 361 /** 362 * Wrapper function for mediaDevices.getUserMedia used by some tests. Whether 363 * to use fake devices or not is now determined in pref further below instead. 364 * 365 * @param {Dictionary} constraints 366 * The constraints for this getUserMedia callback 367 */ 368 function getUserMedia(constraints) { 369 if (!constraints.fake && constraints.audio) { 370 // Disable input processing mode when it's not explicity enabled. 371 // This is to avoid distortion of the loopback tone 372 constraints.audio = Object.assign( 373 {}, 374 { autoGainControl: false }, 375 { echoCancellation: false }, 376 { noiseSuppression: false }, 377 constraints.audio 378 ); 379 } 380 info("Call getUserMedia for " + JSON.stringify(constraints)); 381 return navigator.mediaDevices.getUserMedia(constraints).then(stream => { 382 checkMediaStreamTracks(constraints, stream); 383 return stream; 384 }); 385 } 386 387 // These are the promises we use to track that the prerequisites for the test 388 // are in place before running it. 389 var setTestOptions; 390 var testConfigured = new Promise(r => (setTestOptions = r)); 391 392 function pushPrefs(...p) { 393 return SpecialPowers.pushPrefEnv({ set: p }); 394 } 395 396 async function withPrefs(prefs, func) { 397 await SpecialPowers.pushPrefEnv({ set: prefs }); 398 try { 399 return await func(); 400 } finally { 401 await SpecialPowers.popPrefEnv(); 402 } 403 } 404 405 function setupEnvironment() { 406 var defaultMochitestPrefs = { 407 set: [ 408 ["media.peerconnection.enabled", true], 409 ["media.peerconnection.identity.timeout", 120000], 410 ["media.peerconnection.ice.stun_client_maximum_transmits", 14], 411 ["media.peerconnection.ice.trickle_grace_period", 30000], 412 ["media.navigator.permission.disabled", true], 413 // If either fake audio or video is desired we enable fake streams. 414 // If loopback devices are set they will be chosen instead of fakes in gecko. 415 ["media.navigator.streams.fake", WANT_FAKE_AUDIO || WANT_FAKE_VIDEO], 416 ["media.getusermedia.audio.capture.enabled", true], 417 ["media.getusermedia.screensharing.enabled", true], 418 ["media.getusermedia.window.focus_source.enabled", false], 419 ["media.recorder.audio_node.enabled", true], 420 ["media.peerconnection.ice.obfuscate_host_addresses", false], 421 ["media.peerconnection.nat_simulator.filtering_type", ""], 422 ["media.peerconnection.nat_simulator.mapping_type", ""], 423 ["media.peerconnection.nat_simulator.block_tcp", false], 424 ["media.peerconnection.nat_simulator.block_udp", false], 425 ["media.peerconnection.nat_simulator.redirect_address", ""], 426 ["media.peerconnection.nat_simulator.redirect_targets", ""], 427 ["media.peerconnection.treat_warnings_as_errors", true], 428 ], 429 }; 430 431 if (navigator.userAgent.includes("Android")) { 432 defaultMochitestPrefs.set.push( 433 ["media.navigator.video.default_width", 320], 434 ["media.navigator.video.default_height", 240], 435 ["media.navigator.video.max_fr", 10], 436 ["media.autoplay.default", Ci.nsIAutoplay.ALLOWED] 437 ); 438 } 439 440 // Platform codec prefs should be matched because fake H.264 GMP codec doesn't 441 // produce/consume real bitstreams. [TODO] remove after bug 1509012 is fixed. 442 const platformEncoderEnabled = 443 SpecialPowers.getIntPref("media.webrtc.encoder_creation_strategy") == 1; 444 defaultMochitestPrefs.set.push([ 445 "media.navigator.mediadatadecoder_h264_enabled", 446 platformEncoderEnabled, 447 ]); 448 449 // Running as a Mochitest. 450 SimpleTest.requestFlakyTimeout("WebRTC inherently depends on timeouts"); 451 window.finish = () => SimpleTest.finish(); 452 SpecialPowers.pushPrefEnv(defaultMochitestPrefs, setTestOptions); 453 454 // We don't care about waiting for this to complete, we just want to ensure 455 // that we don't build up a huge backlog of GC work. 456 SpecialPowers.exactGC(); 457 } 458 459 // [TODO] remove after bug 1509012 is fixed. 460 function checkPlatformH264CodecPrefs() { 461 // Has platform (MediaDataEncoder) H.264 support 462 const platform = 463 SpecialPowers.getIntPref("media.webrtc.encoder_creation_strategy") == 1 && 464 (navigator.userAgent.includes("Android") || 465 navigator.userAgent.includes("Mac OS X")); 466 const webrtc = !navigator.userAgent.includes("Android"); 467 return { 468 platform, 469 webrtc, 470 any: platform || webrtc, 471 }; 472 } 473 474 async function runTestWhenReady(testFunc) { 475 setupEnvironment(); 476 const options = await testConfigured; 477 try { 478 await testFunc(options); 479 } catch (e) { 480 ok( 481 false, 482 `Error executing test: ${e} 483 ${e.stack ? e.stack : ""}` 484 ); 485 } finally { 486 SimpleTest.finish(); 487 } 488 } 489 490 /** 491 * Checks that the media stream tracks have the expected amount of tracks 492 * with the correct attributes based on the type and constraints given. 493 * 494 * @param {object} constraints specifies whether the stream should have 495 * audio, video, or both 496 * @param {string} type the type of media stream tracks being checked 497 * @param {sequence<MediaStreamTrack>} mediaStreamTracks the media stream 498 * tracks being checked 499 */ 500 function checkMediaStreamTracksByType(constraints, type, mediaStreamTracks) { 501 if (constraints[type]) { 502 is(mediaStreamTracks.length, 1, "One " + type + " track shall be present"); 503 504 if (mediaStreamTracks.length) { 505 is(mediaStreamTracks[0].kind, type, "Track kind should be " + type); 506 ok(mediaStreamTracks[0].id, "Track id should be defined"); 507 ok(!mediaStreamTracks[0].muted, "Track should not be muted"); 508 } 509 } else { 510 is(mediaStreamTracks.length, 0, "No " + type + " tracks shall be present"); 511 } 512 } 513 514 /** 515 * Check that the given media stream contains the expected media stream 516 * tracks given the associated audio & video constraints provided. 517 * 518 * @param {object} constraints specifies whether the stream should have 519 * audio, video, or both 520 * @param {MediaStream} mediaStream the media stream being checked 521 */ 522 function checkMediaStreamTracks(constraints, mediaStream) { 523 checkMediaStreamTracksByType( 524 constraints, 525 "audio", 526 mediaStream.getAudioTracks() 527 ); 528 checkMediaStreamTracksByType( 529 constraints, 530 "video", 531 mediaStream.getVideoTracks() 532 ); 533 } 534 535 /** 536 * Check that a media stream contains exactly a set of media stream tracks. 537 * 538 * @param {MediaStream} mediaStream the media stream being checked 539 * @param {Array} tracks the tracks that should exist in mediaStream 540 * @param {string} [message] an optional message to pass to asserts 541 */ 542 function checkMediaStreamContains(mediaStream, tracks, message) { 543 message = message ? message + ": " : ""; 544 tracks.forEach(t => 545 ok( 546 mediaStream.getTrackById(t.id), 547 message + "MediaStream " + mediaStream.id + " contains track " + t.id 548 ) 549 ); 550 is( 551 mediaStream.getTracks().length, 552 tracks.length, 553 message + "MediaStream " + mediaStream.id + " contains no extra tracks" 554 ); 555 } 556 557 function checkMediaStreamCloneAgainstOriginal(clone, original) { 558 isnot(clone.id.length, 0, "Stream clone should have an id string"); 559 isnot(clone, original, "Stream clone should be different from the original"); 560 isnot( 561 clone.id, 562 original.id, 563 "Stream clone's id should be different from the original's" 564 ); 565 is( 566 clone.getAudioTracks().length, 567 original.getAudioTracks().length, 568 "All audio tracks should get cloned" 569 ); 570 is( 571 clone.getVideoTracks().length, 572 original.getVideoTracks().length, 573 "All video tracks should get cloned" 574 ); 575 is(clone.active, original.active, "Active state should be preserved"); 576 original 577 .getTracks() 578 .forEach(t => 579 ok(!clone.getTrackById(t.id), "The clone's tracks should be originals") 580 ); 581 } 582 583 function checkMediaStreamTrackCloneAgainstOriginal(clone, original) { 584 isnot(clone.id.length, 0, "Track clone should have an id string"); 585 isnot(clone, original, "Track clone should be different from the original"); 586 isnot( 587 clone.id, 588 original.id, 589 "Track clone's id should be different from the original's" 590 ); 591 is( 592 clone.kind, 593 original.kind, 594 "Track clone's kind should be same as the original's" 595 ); 596 is( 597 clone.enabled, 598 original.enabled, 599 "Track clone's kind should be same as the original's" 600 ); 601 is( 602 clone.readyState, 603 original.readyState, 604 "Track clone's readyState should be same as the original's" 605 ); 606 is( 607 clone.muted, 608 original.muted, 609 "Track clone's muted state should be same as the original's" 610 ); 611 } 612 613 /*** Utility methods */ 614 615 /** The dreadful setTimeout, use sparingly */ 616 function wait(time, message) { 617 return new Promise(r => setTimeout(() => r(message), time)); 618 } 619 620 /** The even more dreadful setInterval, use even more sparingly */ 621 function waitUntil(func, time) { 622 return new Promise(resolve => { 623 var interval = setInterval(() => { 624 if (func()) { 625 clearInterval(interval); 626 resolve(); 627 } 628 }, time || 200); 629 }); 630 } 631 632 /** Time out while waiting for a promise to get resolved or rejected. */ 633 var timeout = (promise, time, msg) => 634 Promise.race([ 635 promise, 636 wait(time).then(() => Promise.reject(new Error(msg))), 637 ]); 638 639 /** 640 * Adds a |finally| function to a promise whose argument is invoked whether the 641 * promise is resolved or rejected, and that does not interfere with chaining. 642 */ 643 var addFinallyToPromise = promise => { 644 promise.finally = func => { 645 return promise.then( 646 result => { 647 func(); 648 return Promise.resolve(result); 649 }, 650 error => { 651 func(); 652 return Promise.reject(error); 653 } 654 ); 655 }; 656 return promise; 657 }; 658 659 /** Use event listener to call passed-in function on fire until it returns true */ 660 var listenUntil = (target, eventName, onFire) => { 661 return new Promise(resolve => 662 target.addEventListener(eventName, function callback(event) { 663 var result = onFire(event); 664 if (result) { 665 target.removeEventListener(eventName, callback); 666 resolve(result); 667 } 668 }) 669 ); 670 }; 671 672 /* Test that a function throws the right error */ 673 function mustThrowWith(msg, reason, f) { 674 try { 675 f(); 676 ok(false, msg + " must throw"); 677 } catch (e) { 678 is(e.name, reason, msg + " must throw: " + e.message); 679 } 680 } 681 682 /* Get a dummy audio track */ 683 function getSilentTrack() { 684 let ctx = new AudioContext(), 685 oscillator = ctx.createOscillator(); 686 let dst = oscillator.connect(ctx.createMediaStreamDestination()); 687 oscillator.start(); 688 return Object.assign(dst.stream.getAudioTracks()[0], { enabled: false }); 689 } 690 691 function getBlackTrack({ width = 640, height = 480 } = {}) { 692 let canvas = Object.assign(document.createElement("canvas"), { 693 width, 694 height, 695 }); 696 canvas.getContext("2d").fillRect(0, 0, width, height); 697 let stream = canvas.captureStream(); 698 return Object.assign(stream.getVideoTracks()[0], { enabled: false }); 699 } 700 701 /*** Test control flow methods */ 702 703 /** 704 * Generates a callback function fired only under unexpected circumstances 705 * while running the tests. The generated function kills off the test as well 706 * gracefully. 707 * 708 * @param {string} [message] 709 * An optional message to show if no object gets passed into the 710 * generated callback method. 711 */ 712 function generateErrorCallback(message) { 713 var stack = new Error().stack.split("\n"); 714 stack.shift(); // Don't include this instantiation frame 715 716 /** 717 * @param {object} aObj 718 * The object fired back from the callback 719 */ 720 return aObj => { 721 if (aObj) { 722 if (aObj.name && aObj.message) { 723 ok( 724 false, 725 "Unexpected callback for '" + 726 aObj.name + 727 "' with message = '" + 728 aObj.message + 729 "' at " + 730 JSON.stringify(stack) 731 ); 732 } else { 733 ok( 734 false, 735 "Unexpected callback with = '" + 736 aObj + 737 "' at: " + 738 JSON.stringify(stack) 739 ); 740 } 741 } else { 742 ok( 743 false, 744 "Unexpected callback with message = '" + 745 message + 746 "' at: " + 747 JSON.stringify(stack) 748 ); 749 } 750 throw new Error("Unexpected callback"); 751 }; 752 } 753 754 var unexpectedEventArrived; 755 var rejectOnUnexpectedEvent = new Promise((x, reject) => { 756 unexpectedEventArrived = reject; 757 }); 758 759 /** 760 * Generates a callback function fired only for unexpected events happening. 761 * 762 * @param {string} description 763 Description of the object for which the event has been fired 764 * @param {string} eventName 765 Name of the unexpected event 766 */ 767 function unexpectedEvent(message, eventName) { 768 var stack = new Error().stack.split("\n"); 769 stack.shift(); // Don't include this instantiation frame 770 771 return e => { 772 var details = 773 "Unexpected event '" + 774 eventName + 775 "' fired with message = '" + 776 message + 777 "' at: " + 778 JSON.stringify(stack); 779 ok(false, details); 780 unexpectedEventArrived(new Error(details)); 781 }; 782 } 783 784 /** 785 * Implements the one-shot event pattern used throughout. Each of the 'onxxx' 786 * attributes on the wrappers can be set with a custom handler. Prior to the 787 * handler being set, if the event fires, it causes the test execution to halt. 788 * That handler is used exactly once, after which the original, error-generating 789 * handler is re-installed. Thus, each event handler is used at most once. 790 * 791 * @param {object} wrapper 792 * The wrapper on which the psuedo-handler is installed 793 * @param {object} obj 794 * The real source of events 795 * @param {string} event 796 * The name of the event 797 */ 798 function createOneShotEventWrapper(wrapper, obj, event) { 799 var onx = "on" + event; 800 var unexpected = unexpectedEvent(wrapper, event); 801 wrapper[onx] = unexpected; 802 obj[onx] = e => { 803 info(wrapper + ': "on' + event + '" event fired'); 804 e.wrapper = wrapper; 805 wrapper[onx](e); 806 wrapper[onx] = unexpected; 807 }; 808 } 809 810 /** 811 * Returns a promise that resolves when `target` has raised an event with the 812 * given name the given number of times. Cancel the returned promise by passing 813 * in a `cancel` promise and resolving it. 814 * 815 * @param {object} target 816 * The target on which the event should occur. 817 * @param {string} name 818 * The name of the event that should occur. 819 * @param {integer} count 820 * Optional number of times the event should be raised before resolving. 821 * @param {promise} cancel 822 * Optional promise that on resolving rejects the returned promise, 823 * so we can avoid logging results after a test has finished. 824 * @returns {promise} A promise that resolves to the last of the seen events. 825 */ 826 function haveEvents(target, name, count, cancel) { 827 var listener; 828 var counter = count || 1; 829 return Promise.race([ 830 (cancel || new Promise(() => {})).then(e => Promise.reject(e)), 831 new Promise(resolve => 832 target.addEventListener( 833 name, 834 (listener = e => --counter < 1 && resolve(e)) 835 ) 836 ), 837 ]).then(e => { 838 target.removeEventListener(name, listener); 839 return e; 840 }); 841 } 842 843 /** 844 * Returns a promise that resolves when `target` has raised an event with the 845 * given name. Cancel the returned promise by passing in a `cancel` promise and 846 * resolving it. 847 * 848 * @param {object} target 849 * The target on which the event should occur. 850 * @param {string} name 851 * The name of the event that should occur. 852 * @param {promise} cancel 853 * Optional promise that on resolving rejects the returned promise, 854 * so we can avoid logging results after a test has finished. 855 * @returns {promise} A promise that resolves to the seen event. 856 */ 857 function haveEvent(target, name, cancel) { 858 return haveEvents(target, name, 1, cancel); 859 } 860 861 /** 862 * Returns a promise that resolves if the target has not seen the given event 863 * after one crank (or until the given timeoutPromise resolves) of the event 864 * loop. 865 * 866 * @param {object} target 867 * The target on which the event should not occur. 868 * @param {string} name 869 * The name of the event that should not occur. 870 * @param {promise} timeoutPromise 871 * Optional promise defining how long we should wait before resolving. 872 * @returns {promise} A promise that is rejected if we see the given event, or 873 * resolves after a timeout otherwise. 874 */ 875 function haveNoEvent(target, name, timeoutPromise) { 876 return haveEvent(target, name, timeoutPromise || wait(0)).then( 877 () => Promise.reject(new Error("Too many " + name + " events")), 878 () => {} 879 ); 880 } 881 882 /** 883 * Returns a promise that resolves after the target has seen the given number 884 * of events but no such event in a following crank of the event loop. 885 * 886 * @param {object} target 887 * The target on which the events should occur. 888 * @param {string} name 889 * The name of the event that should occur. 890 * @param {integer} count 891 * Optional number of times the event should be raised before resolving. 892 * @param {promise} cancel 893 * Optional promise that on resolving rejects the returned promise, 894 * so we can avoid logging results after a test has finished. 895 * @returns {promise} A promise that resolves to the last of the seen events. 896 */ 897 function haveEventsButNoMore(target, name, count, cancel) { 898 return haveEvents(target, name, count, cancel).then(e => 899 haveNoEvent(target, name).then(() => e) 900 ); 901 } 902 903 /* 904 * Resolves the returned promise with an object with usage and reportCount 905 * properties. `usage` is in the same units as reported by the reporter for 906 * `path`. 907 */ 908 const collectMemoryUsage = async path => { 909 const MemoryReporterManager = Cc[ 910 "@mozilla.org/memory-reporter-manager;1" 911 ].getService(Ci.nsIMemoryReporterManager); 912 913 let usage = 0; 914 let reportCount = 0; 915 await new Promise(resolve => 916 MemoryReporterManager.getReports( 917 (aProcess, aPath, aKind, aUnits, aAmount, aDesc) => { 918 if (aPath != path) { 919 return; 920 } 921 ++reportCount; 922 usage += aAmount; 923 }, 924 null, 925 resolve, 926 null, 927 /* anonymized = */ false 928 ) 929 ); 930 return { usage, reportCount }; 931 }; 932 933 // Some DNS helper functions 934 const dnsLookup = async hostname => { 935 // Convenience API for various networking related stuff. _Almost_ convenient 936 // enough. 937 const neckoDashboard = SpecialPowers.Cc[ 938 "@mozilla.org/network/dashboard;1" 939 ].getService(Ci.nsIDashboard); 940 941 const results = await new Promise(r => { 942 neckoDashboard.requestDNSLookup(hostname, results => { 943 r(SpecialPowers.wrap(results)); 944 }); 945 }); 946 947 // |address| is an array-like dictionary (ie; keys are all integers). 948 // We convert to an array to make it less unwieldy. 949 const addresses = [...results.address]; 950 info(`DNS results for ${hostname}: ${JSON.stringify(addresses)}`); 951 return addresses; 952 }; 953 954 const dnsLookupV4 = async hostname => { 955 const addresses = await dnsLookup(hostname); 956 return addresses.filter(address => !address.includes(":")); 957 }; 958 959 const dnsLookupV6 = async hostname => { 960 const addresses = await dnsLookup(hostname); 961 return addresses.filter(address => address.includes(":")); 962 }; 963 964 const getTurnHostname = turnUrl => { 965 const urlNoParams = turnUrl.split("?")[0]; 966 // Strip off scheme 967 const hostAndMaybePort = urlNoParams.split(":", 2)[1]; 968 if (hostAndMaybePort[0] == "[") { 969 // IPV6 literal, strip out '[', and split at closing ']' 970 return hostAndMaybePort.substring(1).split("]")[0]; 971 } 972 return hostAndMaybePort.split(":")[0]; 973 }; 974 975 // Yo dawg I heard you like yo dawg I heard you like Proxies 976 // Example: let value = await GleanTest.category.metric.testGetValue(); 977 // For labeled metrics: 978 // let value = await GleanTest.category.metric["label"].testGetValue(); 979 // Please don't try to use the string "testGetValue" as a label. 980 const GleanTest = new Proxy( 981 {}, 982 { 983 get(target, categoryName, receiver) { 984 return new Proxy( 985 {}, 986 { 987 get(target, metricName, receiver) { 988 return new Proxy( 989 { 990 async testGetValue() { 991 return SpecialPowers.spawnChrome( 992 [categoryName, metricName], 993 async (categoryName, metricName) => { 994 await Services.fog.testFlushAllChildren(); 995 const window = this.browsingContext.topChromeWindow; 996 return window.Glean[categoryName][ 997 metricName 998 ].testGetValue(); 999 } 1000 ); 1001 }, 1002 }, 1003 { 1004 get(target, prop, receiver) { 1005 // The only prop that will be there is testGetValue, but we 1006 // might add more later. 1007 if (prop in target) { 1008 return target[prop]; 1009 } 1010 1011 // |prop| must be a label? 1012 const label = prop; 1013 return { 1014 async testGetValue() { 1015 return SpecialPowers.spawnChrome( 1016 [categoryName, metricName, label], 1017 async (categoryName, metricName, label) => { 1018 await Services.fog.testFlushAllChildren(); 1019 const window = this.browsingContext.topChromeWindow; 1020 return window.Glean[categoryName][metricName][ 1021 label 1022 ].testGetValue(); 1023 } 1024 ); 1025 }, 1026 }; 1027 }, 1028 } 1029 ); 1030 }, 1031 } 1032 ); 1033 }, 1034 } 1035 ); 1036 1037 /** 1038 * This class executes a series of functions in a continuous sequence. 1039 * Promise-bearing functions are executed after the previous promise completes. 1040 * 1041 * @class 1042 * @param {object} framework 1043 * A back reference to the framework which makes use of the class. It is 1044 * passed to each command callback. 1045 * @param {Function[]} commandList 1046 * Commands to set during initialization 1047 */ 1048 function CommandChain(framework, commandList) { 1049 this._framework = framework; 1050 this.commands = commandList || []; 1051 } 1052 1053 CommandChain.prototype = { 1054 /** 1055 * Start the command chain. This returns a promise that always resolves 1056 * cleanly (this catches errors and fails the test case). 1057 */ 1058 execute() { 1059 return this.commands 1060 .reduce((prev, next, i) => { 1061 if (typeof next !== "function" || !next.name) { 1062 throw new Error("registered non-function" + next); 1063 } 1064 1065 return prev.then(() => { 1066 info("Run step " + (i + 1) + ": " + next.name); 1067 return Promise.race([next(this._framework), rejectOnUnexpectedEvent]); 1068 }); 1069 }, Promise.resolve()) 1070 .catch(e => 1071 ok( 1072 false, 1073 "Error in test execution: " + 1074 e + 1075 (typeof e.stack === "string" 1076 ? " " + e.stack.split("\n").join(" ... ") 1077 : "") 1078 ) 1079 ); 1080 }, 1081 1082 /** 1083 * Add new commands to the end of the chain 1084 */ 1085 append(commands) { 1086 this.commands = this.commands.concat(commands); 1087 }, 1088 1089 /** 1090 * Returns the index of the specified command in the chain. 1091 * 1092 * @param {occurrence} Optional param specifying which occurrence to match, 1093 * with 0 representing the first occurrence. 1094 */ 1095 indexOf(functionOrName, occurrence) { 1096 occurrence = occurrence || 0; 1097 return this.commands.findIndex(func => { 1098 if (typeof functionOrName === "string") { 1099 if (func.name !== functionOrName) { 1100 return false; 1101 } 1102 } else if (func !== functionOrName) { 1103 return false; 1104 } 1105 if (occurrence) { 1106 --occurrence; 1107 return false; 1108 } 1109 return true; 1110 }); 1111 }, 1112 1113 mustHaveIndexOf(functionOrName, occurrence) { 1114 var index = this.indexOf(functionOrName, occurrence); 1115 if (index == -1) { 1116 throw new Error("Unknown test: " + functionOrName); 1117 } 1118 return index; 1119 }, 1120 1121 /** 1122 * Inserts the new commands after the specified command. 1123 */ 1124 insertAfter(functionOrName, commands, all, occurrence) { 1125 this._insertHelper(functionOrName, commands, 1, all, occurrence); 1126 }, 1127 1128 /** 1129 * Inserts the new commands after every occurrence of the specified command 1130 */ 1131 insertAfterEach(functionOrName, commands) { 1132 this._insertHelper(functionOrName, commands, 1, true); 1133 }, 1134 1135 /** 1136 * Inserts the new commands before the specified command. 1137 */ 1138 insertBefore(functionOrName, commands, all, occurrence) { 1139 this._insertHelper(functionOrName, commands, 0, all, occurrence); 1140 }, 1141 1142 _insertHelper(functionOrName, commands, delta, all, occurrence) { 1143 occurrence = occurrence || 0; 1144 for ( 1145 var index = this.mustHaveIndexOf(functionOrName, occurrence); 1146 index !== -1; 1147 index = this.indexOf(functionOrName, ++occurrence) 1148 ) { 1149 this.commands = [].concat( 1150 this.commands.slice(0, index + delta), 1151 commands, 1152 this.commands.slice(index + delta) 1153 ); 1154 if (!all) { 1155 break; 1156 } 1157 } 1158 }, 1159 1160 /** 1161 * Removes the specified command, returns what was removed. 1162 */ 1163 remove(functionOrName, occurrence) { 1164 return this.commands.splice( 1165 this.mustHaveIndexOf(functionOrName, occurrence), 1166 1 1167 ); 1168 }, 1169 1170 /** 1171 * Removes all commands after the specified one, returns what was removed. 1172 */ 1173 removeAfter(functionOrName, occurrence) { 1174 return this.commands.splice( 1175 this.mustHaveIndexOf(functionOrName, occurrence) + 1 1176 ); 1177 }, 1178 1179 /** 1180 * Removes all commands before the specified one, returns what was removed. 1181 */ 1182 removeBefore(functionOrName, occurrence) { 1183 return this.commands.splice( 1184 0, 1185 this.mustHaveIndexOf(functionOrName, occurrence) 1186 ); 1187 }, 1188 1189 /** 1190 * Replaces a single command, returns what was removed. 1191 */ 1192 replace(functionOrName, commands) { 1193 this.insertBefore(functionOrName, commands); 1194 return this.remove(functionOrName); 1195 }, 1196 1197 /** 1198 * Replaces all commands after the specified one, returns what was removed. 1199 */ 1200 replaceAfter(functionOrName, commands, occurrence) { 1201 var oldCommands = this.removeAfter(functionOrName, occurrence); 1202 this.append(commands); 1203 return oldCommands; 1204 }, 1205 1206 /** 1207 * Replaces all commands before the specified one, returns what was removed. 1208 */ 1209 replaceBefore(functionOrName, commands) { 1210 var oldCommands = this.removeBefore(functionOrName); 1211 this.insertBefore(functionOrName, commands); 1212 return oldCommands; 1213 }, 1214 1215 /** 1216 * Remove all commands whose name match the specified regex. 1217 */ 1218 filterOut(id_match) { 1219 this.commands = this.commands.filter(c => !id_match.test(c.name)); 1220 }, 1221 }; 1222 1223 function AudioStreamFlowingHelper() { 1224 this._context = new AudioContext(); 1225 // Tests may have changed the values of prefs, so recheck 1226 updateConfigFromFakeAndLoopbackPrefs(); 1227 if (!WANT_FAKE_AUDIO) { 1228 // Loopback device is configured, start the default loopback tone 1229 if (!DefaultLoopbackTone) { 1230 DefaultLoopbackTone = new LoopbackTone(this._context, TEST_AUDIO_FREQ); 1231 DefaultLoopbackTone.start(); 1232 } 1233 } 1234 } 1235 1236 AudioStreamFlowingHelper.prototype = { 1237 checkAudio(stream, analyser, fun) { 1238 /* 1239 analyser.enableDebugCanvas(); 1240 return analyser.waitForAnalysisSuccess(fun) 1241 .then(() => analyser.disableDebugCanvas()); 1242 */ 1243 return analyser.waitForAnalysisSuccess(fun); 1244 }, 1245 1246 checkAudioFlowing(stream) { 1247 var analyser = new AudioStreamAnalyser(this._context, stream); 1248 var freq = analyser.binIndexForFrequency(TEST_AUDIO_FREQ); 1249 return this.checkAudio(stream, analyser, array => array[freq] > 200); 1250 }, 1251 1252 // Use checkAudioNotFlowing() only after checkAudioFlowing() or similar to 1253 // know that audio had previously been flowing on the same stream, as 1254 // checkAudioNotFlowing() does not wait for the loopback device to return 1255 // any audio that it receives. 1256 checkAudioNotFlowing(stream) { 1257 var analyser = new AudioStreamAnalyser(this._context, stream); 1258 var freq = analyser.binIndexForFrequency(TEST_AUDIO_FREQ); 1259 return this.checkAudio(stream, analyser, array => array[freq] < 50); 1260 }, 1261 }; 1262 1263 class VideoFrameEmitter { 1264 constructor(color1, color2, width, height, { fillEntireFrame = false } = {}) { 1265 if (!width) { 1266 width = 64; 1267 } 1268 if (!height) { 1269 height = width; 1270 } 1271 this._helper = new CaptureStreamTestHelper2D(width, height); 1272 this._fillEntireFrame = fillEntireFrame; 1273 this._canvas = this._helper.createAndAppendElement( 1274 "canvas", 1275 "source_canvas" 1276 ); 1277 this._canvas.width = width; 1278 this._canvas.height = height; 1279 this._drawColorOptions = { 1280 offsetX: 0, 1281 offsetY: 0, 1282 width: fillEntireFrame ? width : width / 2, 1283 height: fillEntireFrame ? height : height / 2, 1284 }; 1285 this._color1 = color1 ? color1 : this._helper.green; 1286 this._color2 = color2 ? color2 : this._helper.red; 1287 // Make sure this is initted 1288 this._helper.drawColor(this._canvas, this._color1, this._drawColorOptions); 1289 this._stream = this._canvas.captureStream(); 1290 this._started = false; 1291 } 1292 1293 stream() { 1294 return this._stream; 1295 } 1296 1297 helper() { 1298 return this._helper; 1299 } 1300 1301 colors(color1, color2) { 1302 this._color1 = color1 ? color1 : this._helper.green; 1303 this._color2 = color2 ? color2 : this._helper.red; 1304 try { 1305 this._helper.drawColor( 1306 this._canvas, 1307 this._color1, 1308 this._drawColorOptions 1309 ); 1310 } catch (e) { 1311 // ignore; stream might have shut down 1312 } 1313 } 1314 1315 size(width, height) { 1316 this._canvas.width = width; 1317 this._canvas.height = height; 1318 this._drawColorOptions.width = this._fillEntireFrame ? width : width / 2; 1319 this._drawColorOptions.height = this._fillEntireFrame ? height : height / 2; 1320 } 1321 1322 start() { 1323 if (this._started) { 1324 info("*** emitter already started"); 1325 return; 1326 } 1327 1328 let i = 0; 1329 this._started = true; 1330 this._intervalId = setInterval(() => { 1331 try { 1332 this._helper.drawColor( 1333 this._canvas, 1334 i ? this._color1 : this._color2, 1335 this._drawColorOptions 1336 ); 1337 i = 1 - i; 1338 } catch (e) { 1339 // ignore; stream might have shut down, and we don't bother clearing 1340 // the setInterval. 1341 } 1342 }, 500); 1343 } 1344 1345 stop() { 1346 if (this._started) { 1347 clearInterval(this._intervalId); 1348 this._started = false; 1349 } 1350 } 1351 } 1352 1353 class VideoStreamHelper { 1354 constructor() { 1355 this._helper = new CaptureStreamTestHelper2D(50, 50); 1356 } 1357 1358 async checkHasFrame(video, { offsetX, offsetY, threshold } = {}) { 1359 const h = this._helper; 1360 await h.waitForPixel( 1361 video, 1362 px => { 1363 let result = h.isOpaquePixelNot(px, h.black, threshold); 1364 info( 1365 "Checking that we have a frame, got [" + 1366 Array.from(px) + 1367 "]. Ref=[" + 1368 Array.from(h.black.data) + 1369 "]. Threshold=" + 1370 threshold + 1371 ". Pass=" + 1372 result 1373 ); 1374 return result; 1375 }, 1376 { offsetX, offsetY } 1377 ); 1378 } 1379 1380 async checkVideoPlaying( 1381 video, 1382 { offsetX = 10, offsetY = 10, threshold = 16 } = {} 1383 ) { 1384 const h = this._helper; 1385 await this.checkHasFrame(video, { offsetX, offsetY, threshold }); 1386 let startPixel = { 1387 data: h.getPixel(video, offsetX, offsetY), 1388 name: "startcolor", 1389 }; 1390 await h.waitForPixel( 1391 video, 1392 px => { 1393 let result = h.isPixelNot(px, startPixel, threshold); 1394 info( 1395 "Checking playing, [" + 1396 Array.from(px) + 1397 "] vs [" + 1398 Array.from(startPixel.data) + 1399 "]. Threshold=" + 1400 threshold + 1401 " Pass=" + 1402 result 1403 ); 1404 return result; 1405 }, 1406 { offsetX, offsetY } 1407 ); 1408 } 1409 1410 async checkVideoPaused( 1411 video, 1412 { offsetX = 10, offsetY = 10, threshold = 16, time = 5000 } = {} 1413 ) { 1414 const h = this._helper; 1415 await this.checkHasFrame(video, { offsetX, offsetY, threshold }); 1416 let startPixel = { 1417 data: h.getPixel(video, offsetX, offsetY), 1418 name: "startcolor", 1419 }; 1420 try { 1421 await h.waitForPixel( 1422 video, 1423 px => { 1424 let result = h.isOpaquePixelNot(px, startPixel, threshold); 1425 info( 1426 "Checking paused, [" + 1427 Array.from(px) + 1428 "] vs [" + 1429 Array.from(startPixel.data) + 1430 "]. Threshold=" + 1431 threshold + 1432 " Pass=" + 1433 result 1434 ); 1435 return result; 1436 }, 1437 { offsetX, offsetY, cancel: wait(time, "timeout") } 1438 ); 1439 ok(false, "Frame changed within " + time / 1000 + " seconds"); 1440 } catch (e) { 1441 is( 1442 e, 1443 "timeout", 1444 "Frame shouldn't change for " + time / 1000 + " seconds" 1445 ); 1446 } 1447 } 1448 } 1449 1450 (function () { 1451 var el = document.createElement("link"); 1452 el.rel = "stylesheet"; 1453 el.type = "text/css"; 1454 el.href = "/tests/SimpleTest/test.css"; 1455 document.head.appendChild(el); 1456 })();