browser_devices_get_user_media_paused.js (32535B)
1 /* Any copyright is dedicated to the Public Domain. 2 * http://creativecommons.org/publicdomain/zero/1.0/ */ 3 4 function setCameraMuted(mute) { 5 return sendObserverNotification( 6 mute ? "getUserMedia:muteVideo" : "getUserMedia:unmuteVideo" 7 ); 8 } 9 10 function setMicrophoneMuted(mute) { 11 return sendObserverNotification( 12 mute ? "getUserMedia:muteAudio" : "getUserMedia:unmuteAudio" 13 ); 14 } 15 16 function sendObserverNotification(topic) { 17 const windowId = gBrowser.selectedBrowser.innerWindowID; 18 return SpecialPowers.spawn( 19 gBrowser.selectedBrowser, 20 [{ topic, windowId }], 21 function (args) { 22 Services.obs.notifyObservers( 23 content.window, 24 args.topic, 25 JSON.stringify(args.windowId) 26 ); 27 } 28 ); 29 } 30 31 function setTrackEnabled(audio, video) { 32 return SpecialPowers.spawn( 33 gBrowser.selectedBrowser, 34 [{ audio, video }], 35 function (args) { 36 let stream = content.wrappedJSObject.gStreams[0]; 37 if (args.audio != null) { 38 stream.getAudioTracks()[0].enabled = args.audio; 39 } 40 if (args.video != null) { 41 stream.getVideoTracks()[0].enabled = args.video; 42 } 43 } 44 ); 45 } 46 47 async function getVideoTrackMuted() { 48 return SpecialPowers.spawn( 49 gBrowser.selectedBrowser, 50 [], 51 () => content.wrappedJSObject.gStreams[0].getVideoTracks()[0].muted 52 ); 53 } 54 55 async function getVideoTrackEvents() { 56 return SpecialPowers.spawn( 57 gBrowser.selectedBrowser, 58 [], 59 () => content.wrappedJSObject.gVideoEvents 60 ); 61 } 62 63 async function getAudioTrackMuted() { 64 return SpecialPowers.spawn( 65 gBrowser.selectedBrowser, 66 [], 67 () => content.wrappedJSObject.gStreams[0].getAudioTracks()[0].muted 68 ); 69 } 70 71 async function getAudioTrackEvents() { 72 return SpecialPowers.spawn( 73 gBrowser.selectedBrowser, 74 [], 75 () => content.wrappedJSObject.gAudioEvents 76 ); 77 } 78 79 function cloneTracks(audio, video) { 80 return SpecialPowers.spawn( 81 gBrowser.selectedBrowser, 82 [{ audio, video }], 83 function (args) { 84 if (!content.wrappedJSObject.gClones) { 85 content.wrappedJSObject.gClones = []; 86 } 87 let clones = content.wrappedJSObject.gClones; 88 let stream = content.wrappedJSObject.gStreams[0]; 89 if (args.audio != null) { 90 clones.push(stream.getAudioTracks()[0].clone()); 91 } 92 if (args.video != null) { 93 clones.push(stream.getVideoTracks()[0].clone()); 94 } 95 } 96 ); 97 } 98 99 function stopClonedTracks(audio, video) { 100 return SpecialPowers.spawn( 101 gBrowser.selectedBrowser, 102 [{ audio, video }], 103 function (args) { 104 let clones = content.wrappedJSObject.gClones || []; 105 if (args.audio != null) { 106 clones.filter(t => t.kind == "audio").forEach(t => t.stop()); 107 } 108 if (args.video != null) { 109 clones.filter(t => t.kind == "video").forEach(t => t.stop()); 110 } 111 let liveClones = clones.filter(t => t.readyState == "live"); 112 if (!liveClones.length) { 113 delete content.wrappedJSObject.gClones; 114 } else { 115 content.wrappedJSObject.gClones = liveClones; 116 } 117 } 118 ); 119 } 120 121 var gTests = [ 122 { 123 desc: "getUserMedia audio+video: disabling the stream shows the paused indicator", 124 run: async function checkDisabled() { 125 let observerPromise = expectObserverCalled("getUserMedia:request"); 126 let promise = promisePopupNotificationShown("webRTC-shareDevices"); 127 await promiseRequestDevice(true, true); 128 await promise; 129 await observerPromise; 130 checkDeviceSelectors(["microphone", "camera"]); 131 132 let indicator = promiseIndicatorWindow(); 133 let observerPromise1 = expectObserverCalled( 134 "getUserMedia:response:allow" 135 ); 136 let observerPromise2 = expectObserverCalled("recording-device-events"); 137 await promiseMessage("ok", () => { 138 PopupNotifications.panel.firstElementChild.button.click(); 139 }); 140 await observerPromise1; 141 await observerPromise2; 142 Assert.deepEqual( 143 await getMediaCaptureState(), 144 { audio: true, video: true }, 145 "expected camera and microphone to be shared" 146 ); 147 await indicator; 148 await checkSharingUI({ 149 video: STATE_CAPTURE_ENABLED, 150 audio: STATE_CAPTURE_ENABLED, 151 }); 152 153 // Disable both audio and video. 154 observerPromise = expectObserverCalled("recording-device-events", 2); 155 await setTrackEnabled(false, false); 156 157 // Wait for capture state to propagate to the UI asynchronously. 158 await BrowserTestUtils.waitForCondition( 159 () => 160 window.gPermissionPanel._sharingState.webRTC.camera == 161 STATE_CAPTURE_DISABLED, 162 "video should be disabled" 163 ); 164 165 await observerPromise; 166 167 // The identity UI should show both as disabled. 168 await checkSharingUI({ 169 video: STATE_CAPTURE_DISABLED, 170 audio: STATE_CAPTURE_DISABLED, 171 }); 172 173 // Enable only audio again. 174 observerPromise = expectObserverCalled("recording-device-events"); 175 await setTrackEnabled(true); 176 177 await BrowserTestUtils.waitForCondition( 178 () => 179 window.gPermissionPanel._sharingState.webRTC.microphone == 180 STATE_CAPTURE_ENABLED, 181 "audio should be enabled" 182 ); 183 184 await observerPromise; 185 186 // The identity UI should show only video as disabled. 187 await checkSharingUI({ 188 video: STATE_CAPTURE_DISABLED, 189 audio: STATE_CAPTURE_ENABLED, 190 }); 191 192 // Enable video again. 193 observerPromise = expectObserverCalled("recording-device-events"); 194 await setTrackEnabled(null, true); 195 196 await BrowserTestUtils.waitForCondition( 197 () => 198 window.gPermissionPanel._sharingState.webRTC.camera == 199 STATE_CAPTURE_ENABLED, 200 "video should be enabled" 201 ); 202 203 await observerPromise; 204 205 // Both streams should show as running. 206 await checkSharingUI({ 207 video: STATE_CAPTURE_ENABLED, 208 audio: STATE_CAPTURE_ENABLED, 209 }); 210 await closeStream(); 211 }, 212 }, 213 214 { 215 desc: "getUserMedia audio+video: disabling the original tracks and stopping enabled clones shows the paused indicator", 216 run: async function checkDisabledAfterCloneStop() { 217 let observerPromise = expectObserverCalled("getUserMedia:request"); 218 let promise = promisePopupNotificationShown("webRTC-shareDevices"); 219 await promiseRequestDevice(true, true); 220 await promise; 221 await observerPromise; 222 checkDeviceSelectors(["microphone", "camera"]); 223 224 let indicator = promiseIndicatorWindow(); 225 let observerPromise1 = expectObserverCalled( 226 "getUserMedia:response:allow" 227 ); 228 let observerPromise2 = expectObserverCalled("recording-device-events"); 229 await promiseMessage("ok", () => { 230 PopupNotifications.panel.firstElementChild.button.click(); 231 }); 232 await observerPromise1; 233 await observerPromise2; 234 Assert.deepEqual( 235 await getMediaCaptureState(), 236 { audio: true, video: true }, 237 "expected camera and microphone to be shared" 238 ); 239 await indicator; 240 await checkSharingUI({ 241 video: STATE_CAPTURE_ENABLED, 242 audio: STATE_CAPTURE_ENABLED, 243 }); 244 245 // Clone audio and video, their state will be enabled 246 await cloneTracks(true, true); 247 248 observerPromise = expectObserverCalled("recording-device-events", 2); 249 250 // Disable both audio and video. 251 await setTrackEnabled(false, false); 252 253 await observerPromise; 254 255 observerPromise = expectObserverCalled("recording-device-events"); 256 257 // Stop the clones. This should disable the sharing indicators. 258 await stopClonedTracks(true, true); 259 260 // Wait for capture state to propagate to the UI asynchronously. 261 await BrowserTestUtils.waitForCondition( 262 () => 263 window.gPermissionPanel._sharingState.webRTC.camera == 264 STATE_CAPTURE_DISABLED && 265 window.gPermissionPanel._sharingState.webRTC.microphone == 266 STATE_CAPTURE_DISABLED, 267 "video and audio should be disabled" 268 ); 269 270 await observerPromise; 271 272 // The identity UI should show both as disabled. 273 await checkSharingUI({ 274 video: STATE_CAPTURE_DISABLED, 275 audio: STATE_CAPTURE_DISABLED, 276 }); 277 278 // Enable only audio again. 279 observerPromise = expectObserverCalled("recording-device-events"); 280 await setTrackEnabled(true); 281 282 await BrowserTestUtils.waitForCondition( 283 () => 284 window.gPermissionPanel._sharingState.webRTC.microphone == 285 STATE_CAPTURE_ENABLED, 286 "audio should be enabled" 287 ); 288 289 await observerPromise; 290 291 // The identity UI should show only video as disabled. 292 await checkSharingUI({ 293 video: STATE_CAPTURE_DISABLED, 294 audio: STATE_CAPTURE_ENABLED, 295 }); 296 297 // Enable video again. 298 observerPromise = expectObserverCalled("recording-device-events"); 299 await setTrackEnabled(null, true); 300 301 await BrowserTestUtils.waitForCondition( 302 () => 303 window.gPermissionPanel._sharingState.webRTC.camera == 304 STATE_CAPTURE_ENABLED, 305 "video should be enabled" 306 ); 307 308 await observerPromise; 309 310 // Both streams should show as running. 311 await checkSharingUI({ 312 video: STATE_CAPTURE_ENABLED, 313 audio: STATE_CAPTURE_ENABLED, 314 }); 315 await closeStream(); 316 }, 317 }, 318 319 { 320 desc: "getUserMedia screen: disabling the stream shows the paused indicator", 321 run: async function checkScreenDisabled() { 322 let observerPromise = expectObserverCalled("getUserMedia:request"); 323 let promise = promisePopupNotificationShown("webRTC-shareDevices"); 324 await promiseRequestDevice(false, true, null, "screen"); 325 await promise; 326 await observerPromise; 327 328 is( 329 PopupNotifications.getNotification("webRTC-shareDevices").anchorID, 330 "webRTC-shareScreen-notification-icon", 331 "anchored to device icon" 332 ); 333 checkDeviceSelectors(["screen"]); 334 335 let menulist = document.getElementById("webRTC-selectWindow-menulist"); 336 menulist.getItemAtIndex(menulist.itemCount - 1).doCommand(); 337 338 let indicator = promiseIndicatorWindow(); 339 let observerPromise1 = expectObserverCalled( 340 "getUserMedia:response:allow" 341 ); 342 let observerPromise2 = expectObserverCalled("recording-device-events"); 343 await promiseMessage("ok", () => { 344 PopupNotifications.panel.firstElementChild.button.click(); 345 }); 346 await observerPromise1; 347 await observerPromise2; 348 Assert.deepEqual( 349 await getMediaCaptureState(), 350 { screen: "Screen" }, 351 "expected screen to be shared" 352 ); 353 354 await indicator; 355 await checkSharingUI({ screen: "Screen" }); 356 357 observerPromise = expectObserverCalled("recording-device-events"); 358 await setTrackEnabled(null, false); 359 360 // Wait for capture state to propagate to the UI asynchronously. 361 await BrowserTestUtils.waitForCondition( 362 () => 363 window.gPermissionPanel._sharingState.webRTC.screen == "ScreenPaused", 364 "screen should be disabled" 365 ); 366 await observerPromise; 367 await checkSharingUI({ screen: "ScreenPaused" }, window, { 368 screen: "Screen", 369 }); 370 371 observerPromise = expectObserverCalled("recording-device-events"); 372 await setTrackEnabled(null, true); 373 374 await BrowserTestUtils.waitForCondition( 375 () => window.gPermissionPanel._sharingState.webRTC.screen == "Screen", 376 "screen should be enabled" 377 ); 378 await observerPromise; 379 await checkSharingUI({ screen: "Screen" }); 380 await closeStream(); 381 }, 382 }, 383 384 { 385 desc: "getUserMedia audio+video: muting the camera shows the muted indicator", 386 run: async function checkCameraMuted() { 387 let observerPromise = expectObserverCalled("getUserMedia:request"); 388 let promise = promisePopupNotificationShown("webRTC-shareDevices"); 389 await promiseRequestDevice(true, true); 390 await promise; 391 await observerPromise; 392 checkDeviceSelectors(["microphone", "camera"]); 393 394 let indicator = promiseIndicatorWindow(); 395 let observerPromise1 = expectObserverCalled( 396 "getUserMedia:response:allow" 397 ); 398 let observerPromise2 = expectObserverCalled("recording-device-events"); 399 await promiseMessage("ok", () => { 400 PopupNotifications.panel.firstElementChild.button.click(); 401 }); 402 await observerPromise1; 403 await observerPromise2; 404 Assert.deepEqual( 405 await getMediaCaptureState(), 406 { audio: true, video: true }, 407 "expected camera and microphone to be shared" 408 ); 409 await indicator; 410 await checkSharingUI({ 411 video: STATE_CAPTURE_ENABLED, 412 audio: STATE_CAPTURE_ENABLED, 413 }); 414 is(await getVideoTrackMuted(), false, "video track starts unmuted"); 415 Assert.deepEqual( 416 await getVideoTrackEvents(), 417 [], 418 "no video track events fired yet" 419 ); 420 421 // Mute camera. 422 observerPromise = expectObserverCalled("recording-device-events"); 423 await setCameraMuted(true); 424 425 // Wait for capture state to propagate to the UI asynchronously. 426 await BrowserTestUtils.waitForCondition( 427 () => 428 window.gPermissionPanel._sharingState.webRTC.camera == 429 STATE_CAPTURE_DISABLED, 430 "video should be muted" 431 ); 432 433 await observerPromise; 434 435 // The identity UI should show only camera as disabled. 436 await checkSharingUI({ 437 video: STATE_CAPTURE_DISABLED, 438 audio: STATE_CAPTURE_ENABLED, 439 }); 440 is(await getVideoTrackMuted(), true, "video track is muted"); 441 Assert.deepEqual(await getVideoTrackEvents(), ["mute"], "mute fired"); 442 443 // Unmute video again. 444 observerPromise = expectObserverCalled("recording-device-events"); 445 await setCameraMuted(false); 446 447 await BrowserTestUtils.waitForCondition( 448 () => 449 window.gPermissionPanel._sharingState.webRTC.camera == 450 STATE_CAPTURE_ENABLED, 451 "video should be enabled" 452 ); 453 454 await observerPromise; 455 456 // Both streams should show as running. 457 await checkSharingUI({ 458 video: STATE_CAPTURE_ENABLED, 459 audio: STATE_CAPTURE_ENABLED, 460 }); 461 is(await getVideoTrackMuted(), false, "video track is unmuted"); 462 Assert.deepEqual( 463 await getVideoTrackEvents(), 464 ["mute", "unmute"], 465 "unmute fired" 466 ); 467 await closeStream(); 468 }, 469 }, 470 471 { 472 desc: "getUserMedia audio+video: muting the microphone shows the muted indicator", 473 run: async function checkMicrophoneMuted() { 474 let observerPromise = expectObserverCalled("getUserMedia:request"); 475 let promise = promisePopupNotificationShown("webRTC-shareDevices"); 476 await promiseRequestDevice(true, true); 477 await promise; 478 await observerPromise; 479 checkDeviceSelectors(["microphone", "camera"]); 480 481 let indicator = promiseIndicatorWindow(); 482 let observerPromise1 = expectObserverCalled( 483 "getUserMedia:response:allow" 484 ); 485 let observerPromise2 = expectObserverCalled("recording-device-events"); 486 await promiseMessage("ok", () => { 487 PopupNotifications.panel.firstElementChild.button.click(); 488 }); 489 await observerPromise1; 490 await observerPromise2; 491 Assert.deepEqual( 492 await getMediaCaptureState(), 493 { audio: true, video: true }, 494 "expected camera and microphone to be shared" 495 ); 496 await indicator; 497 await checkSharingUI({ 498 video: STATE_CAPTURE_ENABLED, 499 audio: STATE_CAPTURE_ENABLED, 500 }); 501 is(await getAudioTrackMuted(), false, "audio track starts unmuted"); 502 Assert.deepEqual( 503 await getAudioTrackEvents(), 504 [], 505 "no audio track events fired yet" 506 ); 507 508 // Mute microphone. 509 observerPromise = expectObserverCalled("recording-device-events"); 510 await setMicrophoneMuted(true); 511 512 // Wait for capture state to propagate to the UI asynchronously. 513 await BrowserTestUtils.waitForCondition( 514 () => 515 window.gPermissionPanel._sharingState.webRTC.microphone == 516 STATE_CAPTURE_DISABLED, 517 "audio should be muted" 518 ); 519 520 await observerPromise; 521 522 // The identity UI should show only microphone as disabled. 523 await checkSharingUI({ 524 video: STATE_CAPTURE_ENABLED, 525 audio: STATE_CAPTURE_DISABLED, 526 }); 527 is(await getAudioTrackMuted(), true, "audio track is muted"); 528 Assert.deepEqual(await getAudioTrackEvents(), ["mute"], "mute fired"); 529 530 // Unmute audio again. 531 observerPromise = expectObserverCalled("recording-device-events"); 532 await setMicrophoneMuted(false); 533 534 await BrowserTestUtils.waitForCondition( 535 () => 536 window.gPermissionPanel._sharingState.webRTC.microphone == 537 STATE_CAPTURE_ENABLED, 538 "audio should be enabled" 539 ); 540 541 await observerPromise; 542 543 // Both streams should show as running. 544 await checkSharingUI({ 545 video: STATE_CAPTURE_ENABLED, 546 audio: STATE_CAPTURE_ENABLED, 547 }); 548 is(await getAudioTrackMuted(), false, "audio track is unmuted"); 549 Assert.deepEqual( 550 await getAudioTrackEvents(), 551 ["mute", "unmute"], 552 "unmute fired" 553 ); 554 await closeStream(); 555 }, 556 }, 557 558 { 559 desc: "getUserMedia audio+video: disabling & muting camera in combination", 560 // Test the following combinations of disabling and muting camera: 561 // 1. Disable video track only. 562 // 2. Mute camera & disable audio (to have a condition to wait for) 563 // 3. Enable both audio and video tracks (only audio should flow). 564 // 4. Unmute camera again (video should flow). 565 // 5. Mute camera & disable both tracks. 566 // 6. Unmute camera & enable audio (only audio should flow) 567 // 7. Enable video track again (video should flow). 568 run: async function checkDisabledMutedCombination() { 569 let observerPromise = expectObserverCalled("getUserMedia:request"); 570 let promise = promisePopupNotificationShown("webRTC-shareDevices"); 571 await promiseRequestDevice(true, true); 572 await promise; 573 await observerPromise; 574 checkDeviceSelectors(["microphone", "camera"]); 575 576 let indicator = promiseIndicatorWindow(); 577 let observerPromise1 = expectObserverCalled( 578 "getUserMedia:response:allow" 579 ); 580 let observerPromise2 = expectObserverCalled("recording-device-events"); 581 await promiseMessage("ok", () => { 582 PopupNotifications.panel.firstElementChild.button.click(); 583 }); 584 await observerPromise1; 585 await observerPromise2; 586 Assert.deepEqual( 587 await getMediaCaptureState(), 588 { audio: true, video: true }, 589 "expected camera and microphone to be shared" 590 ); 591 await indicator; 592 await checkSharingUI({ 593 video: STATE_CAPTURE_ENABLED, 594 audio: STATE_CAPTURE_ENABLED, 595 }); 596 597 // 1. Disable video track only. 598 observerPromise = expectObserverCalled("recording-device-events"); 599 await setTrackEnabled(null, false); 600 601 // Wait for capture state to propagate to the UI asynchronously. 602 await BrowserTestUtils.waitForCondition( 603 () => 604 window.gPermissionPanel._sharingState.webRTC.camera == 605 STATE_CAPTURE_DISABLED, 606 "video should be disabled" 607 ); 608 609 await observerPromise; 610 611 // The identity UI should show only video as disabled. 612 await checkSharingUI({ 613 video: STATE_CAPTURE_DISABLED, 614 audio: STATE_CAPTURE_ENABLED, 615 }); 616 is(await getVideoTrackMuted(), false, "video track still unmuted"); 617 Assert.deepEqual( 618 await getVideoTrackEvents(), 619 [], 620 "no video track events fired yet" 621 ); 622 623 // 2. Mute camera & disable audio (to have a condition to wait for) 624 observerPromise = expectObserverCalled("recording-device-events", 2); 625 await setCameraMuted(true); 626 await setTrackEnabled(false, null); 627 628 await BrowserTestUtils.waitForCondition( 629 () => 630 window.gPermissionPanel._sharingState.webRTC.microphone == 631 STATE_CAPTURE_DISABLED, 632 "audio should be disabled" 633 ); 634 635 await observerPromise; 636 637 // The identity UI should show both as disabled. 638 await checkSharingUI({ 639 video: STATE_CAPTURE_DISABLED, 640 audio: STATE_CAPTURE_DISABLED, 641 }); 642 is(await getVideoTrackMuted(), true, "video track is muted"); 643 Assert.deepEqual( 644 await getVideoTrackEvents(), 645 ["mute"], 646 "mute is still fired even though track was disabled" 647 ); 648 649 // 3. Enable both audio and video tracks (only audio should flow). 650 observerPromise = expectObserverCalled("recording-device-events", 2); 651 await setTrackEnabled(true, true); 652 653 await BrowserTestUtils.waitForCondition( 654 () => 655 window.gPermissionPanel._sharingState.webRTC.microphone == 656 STATE_CAPTURE_ENABLED, 657 "audio should be enabled" 658 ); 659 660 await observerPromise; 661 662 // The identity UI should show only audio as enabled, as video is muted. 663 await checkSharingUI({ 664 video: STATE_CAPTURE_DISABLED, 665 audio: STATE_CAPTURE_ENABLED, 666 }); 667 is(await getVideoTrackMuted(), true, "video track is still muted"); 668 Assert.deepEqual(await getVideoTrackEvents(), ["mute"], "no new events"); 669 670 // 4. Unmute camera again (video should flow). 671 observerPromise = expectObserverCalled("recording-device-events"); 672 await setCameraMuted(false); 673 674 await BrowserTestUtils.waitForCondition( 675 () => 676 window.gPermissionPanel._sharingState.webRTC.camera == 677 STATE_CAPTURE_ENABLED, 678 "video should be enabled" 679 ); 680 681 await observerPromise; 682 683 // Both streams should show as running. 684 await checkSharingUI({ 685 video: STATE_CAPTURE_ENABLED, 686 audio: STATE_CAPTURE_ENABLED, 687 }); 688 is(await getVideoTrackMuted(), false, "video track is unmuted"); 689 Assert.deepEqual( 690 await getVideoTrackEvents(), 691 ["mute", "unmute"], 692 "unmute fired" 693 ); 694 695 // 5. Mute camera & disable both tracks. 696 observerPromise = expectObserverCalled("recording-device-events", 3); 697 await setCameraMuted(true); 698 await setTrackEnabled(false, false); 699 700 await BrowserTestUtils.waitForCondition( 701 () => 702 window.gPermissionPanel._sharingState.webRTC.camera == 703 STATE_CAPTURE_DISABLED, 704 "video should be disabled" 705 ); 706 707 await observerPromise; 708 709 // The identity UI should show both as disabled. 710 await checkSharingUI({ 711 video: STATE_CAPTURE_DISABLED, 712 audio: STATE_CAPTURE_DISABLED, 713 }); 714 is(await getVideoTrackMuted(), true, "video track is muted"); 715 Assert.deepEqual( 716 await getVideoTrackEvents(), 717 ["mute", "unmute", "mute"], 718 "mute fired afain" 719 ); 720 721 // 6. Unmute camera & enable audio (only audio should flow) 722 observerPromise = expectObserverCalled("recording-device-events", 2); 723 await setCameraMuted(false); 724 await setTrackEnabled(true, null); 725 726 await BrowserTestUtils.waitForCondition( 727 () => 728 window.gPermissionPanel._sharingState.webRTC.microphone == 729 STATE_CAPTURE_ENABLED, 730 "audio should be enabled" 731 ); 732 733 await observerPromise; 734 735 // Only audio should show as running, as video track is still disabled. 736 await checkSharingUI({ 737 video: STATE_CAPTURE_DISABLED, 738 audio: STATE_CAPTURE_ENABLED, 739 }); 740 is(await getVideoTrackMuted(), false, "video track is unmuted"); 741 Assert.deepEqual( 742 await getVideoTrackEvents(), 743 ["mute", "unmute", "mute", "unmute"], 744 "unmute fired even though track is disabled" 745 ); 746 747 // 7. Enable video track again (video should flow). 748 observerPromise = expectObserverCalled("recording-device-events"); 749 await setTrackEnabled(null, true); 750 751 await BrowserTestUtils.waitForCondition( 752 () => 753 window.gPermissionPanel._sharingState.webRTC.camera == 754 STATE_CAPTURE_ENABLED, 755 "video should be enabled" 756 ); 757 758 await observerPromise; 759 760 // The identity UI should show both as running again. 761 await checkSharingUI({ 762 video: STATE_CAPTURE_ENABLED, 763 audio: STATE_CAPTURE_ENABLED, 764 }); 765 is(await getVideoTrackMuted(), false, "video track remains unmuted"); 766 Assert.deepEqual( 767 await getVideoTrackEvents(), 768 ["mute", "unmute", "mute", "unmute"], 769 "no new events fired" 770 ); 771 await closeStream(); 772 }, 773 }, 774 775 { 776 desc: "getUserMedia audio+video: disabling & muting microphone in combination", 777 // Test the following combinations of disabling and muting microphone: 778 // 1. Disable audio track only. 779 // 2. Mute microphone & disable video (to have a condition to wait for) 780 // 3. Enable both audio and video tracks (only video should flow). 781 // 4. Unmute microphone again (audio should flow). 782 // 5. Mute microphone & disable both tracks. 783 // 6. Unmute microphone & enable video (only video should flow) 784 // 7. Enable audio track again (audio should flow). 785 run: async function checkDisabledMutedCombination() { 786 let observerPromise = expectObserverCalled("getUserMedia:request"); 787 let promise = promisePopupNotificationShown("webRTC-shareDevices"); 788 await promiseRequestDevice(true, true); 789 await promise; 790 await observerPromise; 791 checkDeviceSelectors(["microphone", "camera"]); 792 793 let indicator = promiseIndicatorWindow(); 794 let observerPromise1 = expectObserverCalled( 795 "getUserMedia:response:allow" 796 ); 797 let observerPromise2 = expectObserverCalled("recording-device-events"); 798 await promiseMessage("ok", () => { 799 PopupNotifications.panel.firstElementChild.button.click(); 800 }); 801 await observerPromise1; 802 await observerPromise2; 803 Assert.deepEqual( 804 await getMediaCaptureState(), 805 { audio: true, video: true }, 806 "expected camera and microphone to be shared" 807 ); 808 await indicator; 809 await checkSharingUI({ 810 video: STATE_CAPTURE_ENABLED, 811 audio: STATE_CAPTURE_ENABLED, 812 }); 813 814 // 1. Disable audio track only. 815 observerPromise = expectObserverCalled("recording-device-events"); 816 await setTrackEnabled(false, null); 817 818 // Wait for capture state to propagate to the UI asynchronously. 819 await BrowserTestUtils.waitForCondition( 820 () => 821 window.gPermissionPanel._sharingState.webRTC.microphone == 822 STATE_CAPTURE_DISABLED, 823 "audio should be disabled" 824 ); 825 826 await observerPromise; 827 828 // The identity UI should show only audio as disabled. 829 await checkSharingUI({ 830 video: STATE_CAPTURE_ENABLED, 831 audio: STATE_CAPTURE_DISABLED, 832 }); 833 is(await getAudioTrackMuted(), false, "audio track still unmuted"); 834 Assert.deepEqual( 835 await getAudioTrackEvents(), 836 [], 837 "no audio track events fired yet" 838 ); 839 840 // 2. Mute microphone & disable video (to have a condition to wait for) 841 observerPromise = expectObserverCalled("recording-device-events", 2); 842 await setMicrophoneMuted(true); 843 await setTrackEnabled(null, false); 844 845 await BrowserTestUtils.waitForCondition( 846 () => 847 window.gPermissionPanel._sharingState.webRTC.camera == 848 STATE_CAPTURE_DISABLED, 849 "camera should be disabled" 850 ); 851 852 await observerPromise; 853 854 // The identity UI should show both as disabled. 855 await checkSharingUI({ 856 video: STATE_CAPTURE_DISABLED, 857 audio: STATE_CAPTURE_DISABLED, 858 }); 859 is(await getAudioTrackMuted(), true, "audio track is muted"); 860 Assert.deepEqual( 861 await getAudioTrackEvents(), 862 ["mute"], 863 "mute is still fired even though track was disabled" 864 ); 865 866 // 3. Enable both audio and video tracks (only video should flow). 867 observerPromise = expectObserverCalled("recording-device-events", 2); 868 await setTrackEnabled(true, true); 869 870 await BrowserTestUtils.waitForCondition( 871 () => 872 window.gPermissionPanel._sharingState.webRTC.camera == 873 STATE_CAPTURE_ENABLED, 874 "video should be enabled" 875 ); 876 877 await observerPromise; 878 879 // The identity UI should show only video as enabled, as audio is muted. 880 await checkSharingUI({ 881 video: STATE_CAPTURE_ENABLED, 882 audio: STATE_CAPTURE_DISABLED, 883 }); 884 is(await getAudioTrackMuted(), true, "audio track is still muted"); 885 Assert.deepEqual(await getAudioTrackEvents(), ["mute"], "no new events"); 886 887 // 4. Unmute microphone again (audio should flow). 888 observerPromise = expectObserverCalled("recording-device-events"); 889 await setMicrophoneMuted(false); 890 891 await BrowserTestUtils.waitForCondition( 892 () => 893 window.gPermissionPanel._sharingState.webRTC.microphone == 894 STATE_CAPTURE_ENABLED, 895 "audio should be enabled" 896 ); 897 898 await observerPromise; 899 900 // Both streams should show as running. 901 await checkSharingUI({ 902 video: STATE_CAPTURE_ENABLED, 903 audio: STATE_CAPTURE_ENABLED, 904 }); 905 is(await getAudioTrackMuted(), false, "audio track is unmuted"); 906 Assert.deepEqual( 907 await getAudioTrackEvents(), 908 ["mute", "unmute"], 909 "unmute fired" 910 ); 911 912 // 5. Mute microphone & disable both tracks. 913 observerPromise = expectObserverCalled("recording-device-events", 3); 914 await setMicrophoneMuted(true); 915 await setTrackEnabled(false, false); 916 917 await BrowserTestUtils.waitForCondition( 918 () => 919 window.gPermissionPanel._sharingState.webRTC.microphone == 920 STATE_CAPTURE_DISABLED, 921 "audio should be disabled" 922 ); 923 924 await observerPromise; 925 926 // The identity UI should show both as disabled. 927 await checkSharingUI({ 928 video: STATE_CAPTURE_DISABLED, 929 audio: STATE_CAPTURE_DISABLED, 930 }); 931 is(await getAudioTrackMuted(), true, "audio track is muted"); 932 Assert.deepEqual( 933 await getAudioTrackEvents(), 934 ["mute", "unmute", "mute"], 935 "mute fired again" 936 ); 937 938 // 6. Unmute microphone & enable video (only video should flow) 939 observerPromise = expectObserverCalled("recording-device-events", 2); 940 await setMicrophoneMuted(false); 941 await setTrackEnabled(null, true); 942 943 await BrowserTestUtils.waitForCondition( 944 () => 945 window.gPermissionPanel._sharingState.webRTC.camera == 946 STATE_CAPTURE_ENABLED, 947 "video should be enabled" 948 ); 949 950 await observerPromise; 951 952 // Only video should show as running, as audio track is still disabled. 953 await checkSharingUI({ 954 video: STATE_CAPTURE_ENABLED, 955 audio: STATE_CAPTURE_DISABLED, 956 }); 957 is(await getAudioTrackMuted(), false, "audio track is unmuted"); 958 Assert.deepEqual( 959 await getAudioTrackEvents(), 960 ["mute", "unmute", "mute", "unmute"], 961 "unmute fired even though track is disabled" 962 ); 963 964 // 7. Enable audio track again (audio should flow). 965 observerPromise = expectObserverCalled("recording-device-events"); 966 await setTrackEnabled(true, null); 967 968 await BrowserTestUtils.waitForCondition( 969 () => 970 window.gPermissionPanel._sharingState.webRTC.microphone == 971 STATE_CAPTURE_ENABLED, 972 "audio should be enabled" 973 ); 974 975 await observerPromise; 976 977 // The identity UI should show both as running again. 978 await checkSharingUI({ 979 video: STATE_CAPTURE_ENABLED, 980 audio: STATE_CAPTURE_ENABLED, 981 }); 982 is(await getAudioTrackMuted(), false, "audio track remains unmuted"); 983 Assert.deepEqual( 984 await getAudioTrackEvents(), 985 ["mute", "unmute", "mute", "unmute"], 986 "no new events fired" 987 ); 988 await closeStream(); 989 }, 990 }, 991 ]; 992 993 add_task(async function test() { 994 await SpecialPowers.pushPrefEnv({ 995 set: [ 996 ["media.getusermedia.camera.off_while_disabled.delay_ms", 0], 997 ["media.getusermedia.microphone.off_while_disabled.delay_ms", 0], 998 ], 999 }); 1000 1001 SimpleTest.requestCompleteLog(); 1002 await runTests(gTests); 1003 });