tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

RTCPeerConnection-helper.js (28537B)


      1 'use strict'
      2 
      3 /*
      4 *  Helper Methods for testing the following methods in RTCPeerConnection:
      5 *    createOffer
      6 *    createAnswer
      7 *    setLocalDescription
      8 *    setRemoteDescription
      9 *
     10 *  This file offers the following features:
     11 *    SDP similarity comparison
     12 *    Generating offer/answer using anonymous peer connection
     13 *    Test signalingstatechange event
     14 *    Test promise that never resolve
     15 */
     16 
     17 const audioLineRegex = /\r\nm=audio.+\r\n/g;
     18 const videoLineRegex = /\r\nm=video.+\r\n/g;
     19 const applicationLineRegex = /\r\nm=application.+\r\n/g;
     20 
     21 function countLine(sdp, regex) {
     22  const matches = sdp.match(regex);
     23  if(matches === null) {
     24    return 0;
     25  } else {
     26    return matches.length;
     27  }
     28 }
     29 
     30 function countAudioLine(sdp) {
     31  return countLine(sdp, audioLineRegex);
     32 }
     33 
     34 function countVideoLine(sdp) {
     35  return countLine(sdp, videoLineRegex);
     36 }
     37 
     38 function countApplicationLine(sdp) {
     39  return countLine(sdp, applicationLineRegex);
     40 }
     41 
     42 function similarMediaDescriptions(sdp1, sdp2) {
     43  if(sdp1 === sdp2) {
     44    return true;
     45  } else if(
     46    countAudioLine(sdp1) !== countAudioLine(sdp2) ||
     47    countVideoLine(sdp1) !== countVideoLine(sdp2) ||
     48    countApplicationLine(sdp1) !== countApplicationLine(sdp2))
     49  {
     50    return false;
     51  } else {
     52    return true;
     53  }
     54 }
     55 
     56 // Assert that given object is either an
     57 // RTCSessionDescription or RTCSessionDescriptionInit
     58 function assert_is_session_description(sessionDesc) {
     59  if(sessionDesc instanceof RTCSessionDescription) {
     60    return;
     61  }
     62 
     63  assert_not_equals(sessionDesc, undefined,
     64    'Expect session description to be defined');
     65 
     66  assert_true(typeof(sessionDesc) === 'object',
     67    'Expect sessionDescription to be either a RTCSessionDescription or an object');
     68 
     69  assert_true(typeof(sessionDesc.type) === 'string',
     70    'Expect sessionDescription.type to be a string');
     71 
     72  assert_true(typeof(sessionDesc.sdp) === 'string',
     73    'Expect sessionDescription.sdp to be a string');
     74 }
     75 
     76 
     77 // We can't do string comparison to the SDP content,
     78 // because RTCPeerConnection may return SDP that is
     79 // slightly modified or reordered from what is given
     80 // to it due to ICE candidate events or serialization.
     81 // Instead, we create SDP with different number of media
     82 // lines, and if the SDP strings are not the same, we
     83 // simply count the media description lines and if they
     84 // are the same, we assume it is the same.
     85 function isSimilarSessionDescription(sessionDesc1, sessionDesc2) {
     86  assert_is_session_description(sessionDesc1);
     87  assert_is_session_description(sessionDesc2);
     88 
     89  if(sessionDesc1.type !== sessionDesc2.type) {
     90    return false;
     91  } else {
     92    return similarMediaDescriptions(sessionDesc1.sdp, sessionDesc2.sdp);
     93  }
     94 }
     95 
     96 function assert_session_desc_similar(sessionDesc1, sessionDesc2) {
     97  assert_true(isSimilarSessionDescription(sessionDesc1, sessionDesc2),
     98    'Expect both session descriptions to have the same count of media lines');
     99 }
    100 
    101 function assert_session_desc_not_similar(sessionDesc1, sessionDesc2) {
    102  assert_false(isSimilarSessionDescription(sessionDesc1, sessionDesc2),
    103    'Expect both session descriptions to have different count of media lines');
    104 }
    105 
    106 async function generateDataChannelOffer(pc) {
    107  pc.createDataChannel('test');
    108  const offer = await pc.createOffer();
    109  assert_equals(countApplicationLine(offer.sdp), 1, 'Expect m=application line to be present in generated SDP');
    110  return offer;
    111 }
    112 
    113 async function generateAudioReceiveOnlyOffer(pc)
    114 {
    115    try {
    116        pc.addTransceiver('audio', { direction: 'recvonly' });
    117        return pc.createOffer();
    118    } catch(e) {
    119        return pc.createOffer({ offerToReceiveAudio: true });
    120    }
    121 }
    122 
    123 async function generateVideoReceiveOnlyOffer(pc)
    124 {
    125    try {
    126        pc.addTransceiver('video', { direction: 'recvonly' });
    127        return pc.createOffer();
    128    } catch(e) {
    129        return pc.createOffer({ offerToReceiveVideo: true });
    130    }
    131 }
    132 
    133 // Helper function to generate answer based on given offer using a freshly
    134 // created RTCPeerConnection object
    135 async function generateAnswer(offer) {
    136  const pc = new RTCPeerConnection();
    137  await pc.setRemoteDescription(offer);
    138  const answer = await pc.createAnswer();
    139  pc.close();
    140  return answer;
    141 }
    142 
    143 // Helper function to generate offer using a freshly
    144 // created RTCPeerConnection object
    145 async function generateOffer() {
    146  const pc = new RTCPeerConnection();
    147  const offer = await pc.createOffer();
    148  pc.close();
    149  return offer;
    150 }
    151 
    152 // Run a test function that return a promise that should
    153 // never be resolved. For lack of better options,
    154 // we wait for a time out and pass the test if the
    155 // promise doesn't resolve within that time.
    156 function test_never_resolve(testFunc, testName) {
    157  async_test(t => {
    158    testFunc(t)
    159    .then(
    160      t.step_func(result => {
    161        assert_unreached(`Pending promise should never be resolved. Instead it is fulfilled with: ${result}`);
    162      }),
    163      t.step_func(err => {
    164        assert_unreached(`Pending promise should never be resolved. Instead it is rejected with: ${err}`);
    165      }));
    166 
    167    t.step_timeout(t.step_func_done(), 100)
    168  }, testName);
    169 }
    170 
    171 // Helper function to exchange ice candidates between
    172 // two local peer connections
    173 function exchangeIceCandidates(pc1, pc2) {
    174  // private function
    175  function doExchange(localPc, remotePc) {
    176    localPc.addEventListener('icecandidate', event => {
    177      const { candidate } = event;
    178 
    179      // Guard against already closed peerconnection to
    180      // avoid unrelated exceptions.
    181      if (remotePc.signalingState !== 'closed') {
    182        remotePc.addIceCandidate(candidate);
    183      }
    184    });
    185  }
    186 
    187  doExchange(pc1, pc2);
    188  doExchange(pc2, pc1);
    189 }
    190 
    191 // Returns a promise that resolves when a |name| event is fired.
    192 function waitUntilEvent(obj, name) {
    193  return new Promise(r => obj.addEventListener(name, r, {once: true}));
    194 }
    195 
    196 // Returns a promise that resolves when the |transport.state| is |state|
    197 // This should work for RTCSctpTransport, RTCDtlsTransport and RTCIceTransport.
    198 async function waitForState(transport, state) {
    199  while (transport.state != state) {
    200    await waitUntilEvent(transport, 'statechange');
    201  }
    202 }
    203 
    204 // Returns a promise that resolves when |pc.iceConnectionState| is 'connected'
    205 // or 'completed'.
    206 async function listenToIceConnected(pc) {
    207  await waitForIceStateChange(pc, ['connected', 'completed']);
    208 }
    209 
    210 // Returns a promise that resolves when |pc.iceConnectionState| is in one of the
    211 // wanted states.
    212 async function waitForIceStateChange(pc, wantedStates) {
    213  while (!wantedStates.includes(pc.iceConnectionState)) {
    214    await waitUntilEvent(pc, 'iceconnectionstatechange');
    215  }
    216 }
    217 
    218 // Returns a promise that resolves when |pc.connectionState| is 'connected'.
    219 async function listenToConnected(pc) {
    220  while (pc.connectionState != 'connected') {
    221    await waitUntilEvent(pc, 'connectionstatechange');
    222  }
    223 }
    224 
    225 // Returns a promise that resolves when |pc.connectionState| is in one of the
    226 // wanted states.
    227 async function waitForConnectionStateChange(pc, wantedStates) {
    228  while (!wantedStates.includes(pc.connectionState)) {
    229    await waitUntilEvent(pc, 'connectionstatechange');
    230  }
    231 }
    232 
    233 function waitForConnectionStateChangeWithTimeout(t, pc, wantedStates, timeout) {
    234  return new Promise((resolve, reject) => {
    235    if (wantedStates.includes(pc.connectionState)) {
    236      resolve();
    237      return;
    238    }
    239    pc.addEventListener('connectionstatechange', () => {
    240      if (wantedStates.includes(pc.connectionState))
    241        resolve();
    242    });
    243    t.step_timeout(reject, timeout);
    244  });
    245 }
    246 
    247 async function waitForIceGatheringState(pc, wantedStates) {
    248  while (!wantedStates.includes(pc.iceGatheringState)) {
    249    await waitUntilEvent(pc, 'icegatheringstatechange');
    250  }
    251 }
    252 
    253 async function waitForTrackUnmuted(track) {
    254  if (track.muted === false) return true;
    255  return waitUntilEvent(track, 'unmute');
    256 }
    257 
    258 // Resolves when RTP packets have been received.
    259 async function listenForSSRCs(t, receiver) {
    260  while (true) {
    261    const ssrcs = receiver.getSynchronizationSources();
    262    if (Array.isArray(ssrcs) && ssrcs.length > 0) {
    263      return ssrcs;
    264    }
    265    await new Promise(r => t.step_timeout(r, 0));
    266  }
    267 }
    268 
    269 // Helper function to create a pair of connected data channels.
    270 // On success the promise resolves to an array with two data channels.
    271 // It does the heavy lifting of performing signaling handshake,
    272 // ICE candidate exchange, and waiting for data channel at two
    273 // end points to open. Can do both negotiated and non-negotiated setup.
    274 async function createDataChannelPairWithLabel(t, label, options,
    275                                     pc1 = createPeerConnectionWithCleanup(t),
    276                                     pc2 = createPeerConnectionWithCleanup(t)) {
    277  let pair = [], bothOpen;
    278  try {
    279    if (options.negotiated) {
    280      pair = [pc1, pc2].map(pc => pc.createDataChannel(label, options));
    281      bothOpen = Promise.all(pair.map(dc => new Promise((r, e) => {
    282        dc.onopen = r;
    283        dc.onerror = ({error}) => e(error);
    284      })));
    285    } else {
    286      pair = [pc1.createDataChannel(label, options)];
    287      bothOpen = Promise.all([
    288        new Promise((r, e) => {
    289          pair[0].onopen = r;
    290          pair[0].onerror = ({error}) => e(error);
    291        }),
    292        new Promise((r, e) => pc2.ondatachannel = ({channel}) => {
    293          pair[1] = channel;
    294          channel.onopen = r;
    295          channel.onerror = ({error}) => e(error);
    296        })
    297      ]);
    298    }
    299    exchangeIceCandidates(pc1, pc2);
    300    await exchangeOfferAnswer(pc1, pc2);
    301    await bothOpen;
    302    return pair;
    303  } finally {
    304    for (const dc of pair) {
    305       dc.onopen = dc.onerror = null;
    306    }
    307  }
    308 }
    309 
    310 async function createDataChannelPair(t, options, pc1, pc2) {
    311  return createDataChannelPairWithLabel(t, '', options, pc1, pc2);
    312 }
    313 
    314 // Wait for RTP and RTCP stats to arrive
    315 async function waitForRtpAndRtcpStats(pc) {
    316  // If remote stats are never reported, return after 5 seconds.
    317  const startTime = performance.now();
    318  while (true) {
    319    const report = await pc.getStats();
    320    const stats = [...report.values()].filter(({type}) => type.endsWith("bound-rtp"));
    321    // Each RTP and RTCP stat has a reference
    322    // to the matching stat in the other direction
    323    if (stats.length && stats.every(({localId, remoteId}) => localId || remoteId)) {
    324      break;
    325    }
    326    if (performance.now() > startTime + 5000) {
    327      break;
    328    }
    329  }
    330 }
    331 
    332 // Wait for a single message event and return
    333 // a promise that resolve when the event fires
    334 function awaitMessage(channel) {
    335  const once = true;
    336  return new Promise((resolve, reject) => {
    337    channel.addEventListener('message', ({data}) => resolve(data), {once});
    338    channel.addEventListener('error', reject, {once});
    339  });
    340 }
    341 
    342 // Helper to convert a blob to array buffer so that
    343 // we can read the content
    344 async function blobToArrayBuffer(blob) {
    345  const reader = new FileReader();
    346  reader.readAsArrayBuffer(blob);
    347  return new Promise((resolve, reject) => {
    348    reader.addEventListener('load', () => resolve(reader.result), {once: true});
    349    reader.addEventListener('error', () => reject(reader.error), {once: true});
    350  });
    351 }
    352 
    353 // Assert that two TypedArray or ArrayBuffer objects have the same byte values
    354 function assert_equals_typed_array(array1, array2) {
    355  const [view1, view2] = [array1, array2].map((array) => {
    356    if (array instanceof ArrayBuffer) {
    357      return new DataView(array);
    358    } else {
    359      assert_true(array.buffer instanceof ArrayBuffer,
    360        'Expect buffer to be instance of ArrayBuffer');
    361      return new DataView(array.buffer, array.byteOffset, array.byteLength);
    362    }
    363  });
    364 
    365  assert_equals(view1.byteLength, view2.byteLength,
    366    'Expect both arrays to be of the same byte length');
    367 
    368  const byteLength = view1.byteLength;
    369 
    370  for (let i = 0; i < byteLength; ++i) {
    371    assert_equals(view1.getUint8(i), view2.getUint8(i),
    372      `Expect byte at buffer position ${i} to be equal`);
    373  }
    374 }
    375 
    376 // These media tracks will be continually updated with deterministic "noise" in
    377 // order to ensure UAs do not cease transmission in response to apparent
    378 // silence.
    379 //
    380 // > Many codecs and systems are capable of detecting "silence" and changing
    381 // > their behavior in this case by doing things such as not transmitting any
    382 // > media.
    383 //
    384 // Source: https://w3c.github.io/webrtc-pc/#offer-answer-options
    385 const trackFactories = {
    386  // Share a single context between tests to avoid exceeding resource limits
    387  // without requiring explicit destruction.
    388  audioContext: null,
    389 
    390  /**
    391   * Given a set of requested media types, determine if the user agent is
    392   * capable of procedurally generating a suitable media stream.
    393   *
    394   * @param {object} requested
    395   * @param {boolean} [requested.audio] - flag indicating whether the desired
    396   *                                      stream should include an audio track
    397   * @param {boolean} [requested.video] - flag indicating whether the desired
    398   *                                      stream should include a video track
    399   *
    400   * @returns {boolean}
    401   */
    402  canCreate(requested) {
    403    const supported = {
    404      audio: !!window.AudioContext && !!window.MediaStreamAudioDestinationNode,
    405      video: !!HTMLCanvasElement.prototype.captureStream
    406    };
    407 
    408    return (!requested.audio || supported.audio) &&
    409      (!requested.video || supported.video);
    410  },
    411 
    412  audio() {
    413    const ctx = trackFactories.audioContext = trackFactories.audioContext ||
    414      new AudioContext();
    415    const oscillator = ctx.createOscillator();
    416    const dst = oscillator.connect(ctx.createMediaStreamDestination());
    417    oscillator.start();
    418    return dst.stream.getAudioTracks()[0];
    419  },
    420 
    421  video({width = 640, height = 480, signal} = {}) {
    422    const canvas = Object.assign(
    423      document.createElement("canvas"), {width, height}
    424    );
    425    const ctx = canvas.getContext('2d');
    426    const stream = canvas.captureStream();
    427 
    428    let count = 0;
    429    const interval = setInterval(() => {
    430      ctx.fillStyle = `rgb(${count%255}, ${count*count%255}, ${count%255})`;
    431      count += 1;
    432      ctx.fillRect(0, 0, width, height);
    433      // Add some bouncing boxes in contrast color to add a little more noise.
    434      const contrast = count + 128;
    435      ctx.fillStyle = `rgb(${contrast%255}, ${contrast*contrast%255}, ${contrast%255})`;
    436      const xpos = count % (width - 20);
    437      const ypos = count % (height - 20);
    438      ctx.fillRect(xpos, ypos, xpos + 20, ypos + 20);
    439      const xpos2 = (count + width / 2) % (width - 20);
    440      const ypos2 = (count + height / 2) % (height - 20);
    441      ctx.fillRect(xpos2, ypos2, xpos2 + 20, ypos2 + 20);
    442      // If signal is set (0-255), add a constant-color box of that luminance to
    443      // the video frame at coordinates 20 to 60 in both X and Y direction.
    444      // (big enough to avoid color bleed from surrounding video in some codecs,
    445      // for more stable tests).
    446      if (signal != undefined) {
    447        ctx.fillStyle = `rgb(${signal}, ${signal}, ${signal})`;
    448        ctx.fillRect(20, 20, 40, 40);
    449      }
    450    }, 100);
    451 
    452    if (document.body) {
    453      document.body.appendChild(canvas);
    454    } else {
    455      document.addEventListener('DOMContentLoaded', () => {
    456        document.body.appendChild(canvas);
    457      }, {once: true});
    458    }
    459 
    460    // Implement track.stop() for performance in some tests on some platforms
    461    const track = stream.getVideoTracks()[0];
    462    const nativeStop = track.stop;
    463    track.stop = function stop() {
    464      clearInterval(interval);
    465      nativeStop.apply(this);
    466      if (document.body && canvas.parentElement == document.body) {
    467        document.body.removeChild(canvas);
    468      }
    469    };
    470    return track;
    471  }
    472 };
    473 
    474 // Get the signal from a video element inserted by createNoiseStream
    475 function getVideoSignal(v) {
    476  if (v.videoWidth < 60 || v.videoHeight < 60) {
    477    throw new Error('getVideoSignal: video too small for test');
    478  }
    479  const canvas = document.createElement("canvas");
    480  canvas.width = canvas.height = 60;
    481  const context = canvas.getContext('2d');
    482  context.drawImage(v, 0, 0);
    483  // Extract pixel value at position 40, 40
    484  const pixel = context.getImageData(40, 40, 1, 1);
    485  // Use luma reconstruction to get back original value according to
    486  // ITU-R rec BT.709
    487  return (pixel.data[0] * 0.21 + pixel.data[1] * 0.72 + pixel.data[2] * 0.07);
    488 }
    489 
    490 async function detectSignal(t, v, value) {
    491  while (true) {
    492    const signal = getVideoSignal(v).toFixed();
    493    // allow off-by-two pixel error (observed in some implementations)
    494    if (value - 2 <= signal && signal <= value + 2) {
    495      return;
    496    }
    497    // We would like to wait for each new frame instead here,
    498    // but there seems to be no such callback.
    499    await new Promise(r => t.step_timeout(r, 100));
    500  }
    501 }
    502 
    503 // Generate a MediaStream bearing the specified tracks.
    504 //
    505 // @param {object} [caps]
    506 // @param {boolean} [caps.audio] - flag indicating whether the generated stream
    507 //                                 should include an audio track
    508 // @param {boolean} [caps.video] - flag indicating whether the generated stream
    509 //                                 should include a video track, or parameters for video
    510 async function getNoiseStream(caps = {}) {
    511  if (!trackFactories.canCreate(caps)) {
    512    return navigator.mediaDevices.getUserMedia(caps);
    513  }
    514  const tracks = [];
    515 
    516  if (caps.audio) {
    517    tracks.push(trackFactories.audio());
    518  }
    519 
    520  if (caps.video) {
    521    tracks.push(trackFactories.video(caps.video));
    522  }
    523 
    524  return new MediaStream(tracks);
    525 }
    526 
    527 // Obtain a MediaStreamTrack of kind using procedurally-generated streams (and
    528 // falling back to `getUserMedia` when the user agent cannot generate the
    529 // requested streams).
    530 // Return Promise of pair of track and associated mediaStream.
    531 // Assumes that there is at least one available device
    532 // to generate the track.
    533 function getTrackFromUserMedia(kind) {
    534  return getNoiseStream({ [kind]: true })
    535  .then(mediaStream => {
    536    const [track] = mediaStream.getTracks();
    537    return [track, mediaStream];
    538  });
    539 }
    540 
    541 // Obtain |count| MediaStreamTracks of type |kind| and MediaStreams. The tracks
    542 // do not belong to any stream and the streams are empty. Returns a Promise
    543 // resolved with a pair of arrays [tracks, streams].
    544 // Assumes there is at least one available device to generate the tracks and
    545 // streams and that the getUserMedia() calls resolve.
    546 function getUserMediaTracksAndStreams(count, type = 'audio') {
    547  let otherTracksPromise;
    548  if (count > 1)
    549    otherTracksPromise = getUserMediaTracksAndStreams(count - 1, type);
    550  else
    551    otherTracksPromise = Promise.resolve([[], []]);
    552  return otherTracksPromise.then(([tracks, streams]) => {
    553    return getTrackFromUserMedia(type)
    554    .then(([track, stream]) => {
    555      // Remove the default stream-track relationship.
    556      stream.removeTrack(track);
    557      tracks.push(track);
    558      streams.push(stream);
    559      return [tracks, streams];
    560    });
    561  });
    562 }
    563 
    564 // Performs an offer exchange caller -> callee.
    565 async function exchangeOffer(caller, callee) {
    566  await caller.setLocalDescription(await caller.createOffer());
    567  await callee.setRemoteDescription(caller.localDescription);
    568 }
    569 // Performs an answer exchange caller -> callee.
    570 async function exchangeAnswer(caller, callee) {
    571  // Note that caller's remote description must be set first; if not,
    572  // there's a chance that candidates from callee arrive at caller before
    573  // it has a remote description to apply them to.
    574  const answer = await callee.createAnswer();
    575  await caller.setRemoteDescription(answer);
    576  await callee.setLocalDescription(answer);
    577 }
    578 async function exchangeOfferAnswer(caller, callee) {
    579  await exchangeOffer(caller, callee);
    580  await exchangeAnswer(caller, callee);
    581 }
    582 
    583 // The returned promise is resolved with caller's ontrack event.
    584 async function exchangeAnswerAndListenToOntrack(t, caller, callee) {
    585  const ontrackPromise = addEventListenerPromise(t, caller, 'track');
    586  await exchangeAnswer(caller, callee);
    587  return ontrackPromise;
    588 }
    589 // The returned promise is resolved with callee's ontrack event.
    590 async function exchangeOfferAndListenToOntrack(t, caller, callee) {
    591  const ontrackPromise = addEventListenerPromise(t, callee, 'track');
    592  await exchangeOffer(caller, callee);
    593  return ontrackPromise;
    594 }
    595 
    596 // The resolver extends a |promise| that can be resolved or rejected using |resolve|
    597 // or |reject|.
    598 class Resolver extends Promise {
    599  constructor(executor) {
    600    let resolve, reject;
    601    super((resolve_, reject_) => {
    602      resolve = resolve_;
    603      reject = reject_;
    604      if (executor) {
    605        return executor(resolve_, reject_);
    606      }
    607    });
    608 
    609    this._done = false;
    610    this._resolve = resolve;
    611    this._reject = reject;
    612  }
    613 
    614  /**
    615   * Return whether the promise is done (resolved or rejected).
    616   */
    617  get done() {
    618    return this._done;
    619  }
    620 
    621  /**
    622   * Resolve the promise.
    623   */
    624  resolve(...args) {
    625    this._done = true;
    626    return this._resolve(...args);
    627  }
    628 
    629  /**
    630   * Reject the promise.
    631   */
    632  reject(...args) {
    633    this._done = true;
    634    return this._reject(...args);
    635  }
    636 }
    637 
    638 function addEventListenerPromise(t, obj, type, listener) {
    639  if (!listener) {
    640    return waitUntilEvent(obj, type);
    641  }
    642  return new Promise(r => obj.addEventListener(type,
    643                                               t.step_func(e => r(listener(e))),
    644                                               {once: true}));
    645 }
    646 
    647 function createPeerConnectionWithCleanup(t) {
    648  const pc = new RTCPeerConnection();
    649  t.add_cleanup(() => pc.close());
    650  return pc;
    651 }
    652 
    653 async function createTrackAndStreamWithCleanup(t, kind = 'audio') {
    654  let constraints = {};
    655  constraints[kind] = true;
    656  const stream = await getNoiseStream(constraints);
    657  const [track] = stream.getTracks();
    658  t.add_cleanup(() => track.stop());
    659  return [track, stream];
    660 }
    661 
    662 function findTransceiverForSender(pc, sender) {
    663  const transceivers = pc.getTransceivers();
    664  for (let i = 0; i < transceivers.length; ++i) {
    665    if (transceivers[i].sender == sender)
    666      return transceivers[i];
    667  }
    668  return null;
    669 }
    670 
    671 function preferCodec(transceiver, mimeType, sdpFmtpLine) {
    672  const {codecs} = RTCRtpReceiver.getCapabilities(transceiver.receiver.track.kind);
    673  // sdpFmtpLine is optional, pick the first partial match if not given.
    674  const selectedCodecIndex = codecs.findIndex(c => {
    675    return c.mimeType === mimeType && (c.sdpFmtpLine === sdpFmtpLine || !sdpFmtpLine);
    676  });
    677  const selectedCodec = codecs[selectedCodecIndex];
    678  codecs.slice(selectedCodecIndex, 1);
    679  codecs.unshift(selectedCodec);
    680  return transceiver.setCodecPreferences(codecs);
    681 }
    682 
    683 function findSendCodecCapability(mimeType, sdpFmtpLine) {
    684  return RTCRtpSender.getCapabilities(mimeType.split('/')[0])
    685    .codecs
    686    .filter(c => c.mimeType.localeCompare(name, undefined, { sensitivity: 'base' }) === 0
    687      && (c.sdpFmtpLine === sdpFmtpLine || !sdpFmtpLine))[0];
    688 }
    689 
    690 // Contains a set of values and will yell at you if you try to add a value twice.
    691 class UniqueSet extends Set {
    692  constructor(items) {
    693    super();
    694    if (items !== undefined) {
    695      for (const item of items) {
    696        this.add(item);
    697      }
    698    }
    699  }
    700 
    701  add(value, message) {
    702    if (message === undefined) {
    703      message = `Value '${value}' needs to be unique but it is already in the set`;
    704    }
    705    assert_true(!this.has(value), message);
    706    super.add(value);
    707  }
    708 }
    709 
    710 const iceGatheringStateTransitions = async (pc, ...states) => {
    711  for (const state of states) {
    712    await new Promise((resolve, reject) => {
    713      pc.addEventListener('icegatheringstatechange', () => {
    714        if (pc.iceGatheringState == state) {
    715          resolve();
    716        } else {
    717          reject(`Unexpected gathering state: ${pc.iceGatheringState}, was expecting ${state}`);
    718        }
    719      }, {once: true});
    720    });
    721  }
    722  return states;
    723 };
    724 
    725 const initialOfferAnswerWithIceGatheringStateTransitions =
    726    async (pc1, pc2, offerOptions) => {
    727      await pc1.setLocalDescription(
    728        await pc1.createOffer(offerOptions));
    729      const pc1Transitions =
    730          iceGatheringStateTransitions(pc1, 'gathering', 'complete');
    731      await pc2.setRemoteDescription(pc1.localDescription);
    732      await pc2.setLocalDescription(await pc2.createAnswer());
    733      const pc2Transitions =
    734          iceGatheringStateTransitions(pc2, 'gathering', 'complete');
    735      await pc1.setRemoteDescription(pc2.localDescription);
    736      await pc1Transitions;
    737      await pc2Transitions;
    738    };
    739 
    740 const expectNoMoreIceConnectionStateChanges = async (t, pc) => {
    741  pc.oniceconnectionstatechange =
    742      t.step_func(() => {
    743        assert_unreached(
    744            'Should not get an iceconnectionstatechange right now!');
    745      });
    746 };
    747 
    748 const expectNoMoreGatheringStateChanges = async (t, pc) => {
    749  pc.onicegatheringstatechange =
    750      t.step_func(() => {
    751        assert_unreached(
    752            'Should not get an icegatheringstatechange right now!');
    753      });
    754 };
    755 
    756 function gatheringStateReached(object, state) {
    757  if (object instanceof RTCIceTransport) {
    758    return new Promise(r =>
    759      object.addEventListener("gatheringstatechange", function listener() {
    760        if (object.gatheringState == state) {
    761          object.removeEventListener("gatheringstatechange", listener);
    762          r(state);
    763        }
    764      })
    765    );
    766  } else if (object instanceof RTCPeerConnection) {
    767    return new Promise(r =>
    768      object.addEventListener("icegatheringstatechange", function listener() {
    769        if (object.iceGatheringState == state) {
    770          object.removeEventListener("icegatheringstatechange", listener);
    771          r(state);
    772        }
    773      })
    774    );
    775  } else {
    776    throw "First parameter is neither an RTCIceTransport nor an RTCPeerConnection";
    777  }
    778 }
    779 
    780 function nextGatheringState(object) {
    781  if (object instanceof RTCIceTransport) {
    782    return new Promise(resolve =>
    783      object.addEventListener(
    784        "gatheringstatechange",
    785        () => resolve(object.gatheringState),
    786        { once: true }
    787      )
    788    );
    789  } else if (object instanceof RTCPeerConnection) {
    790    return new Promise(resolve =>
    791      object.addEventListener(
    792        "icegatheringstatechange",
    793        () => resolve(object.iceGatheringState),
    794        { once: true }
    795      )
    796    );
    797  } else {
    798    throw "First parameter is neither an RTCIceTransport nor an RTCPeerConnection";
    799  }
    800 }
    801 
    802 function emptyCandidate(pc) {
    803  return new Promise(r =>
    804    pc.addEventListener("icecandidate", function listener(e) {
    805      if (e.candidate && e.candidate.candidate == "") {
    806        pc.removeEventListener("icecandidate", listener);
    807        r(e);
    808      }
    809    })
    810  );
    811 }
    812 
    813 function nullCandidate(pc) {
    814  return new Promise(r =>
    815    pc.addEventListener("icecandidate", function listener(e) {
    816      if (!e.candidate) {
    817        pc.removeEventListener("icecandidate", listener);
    818        r(e);
    819      }
    820    })
    821  );
    822 }
    823 
    824 function connectionStateReached(object, state) {
    825  if (object instanceof RTCIceTransport || object instanceof RTCDtlsTransport) {
    826    return new Promise(resolve =>
    827      object.addEventListener("statechange", function listener() {
    828        if (object.state == state) {
    829          object.removeEventListener("statechange", listener);
    830          resolve(state);
    831        }
    832      })
    833    );
    834  } else if (object instanceof RTCPeerConnection) {
    835    return new Promise(resolve =>
    836      object.addEventListener("connectionstatechange", function listener() {
    837        if (object.connectionState == state) {
    838          object.removeEventListener("connectionstatechange", listener);
    839          resolve(state);
    840        }
    841      })
    842    );
    843  } else {
    844    throw "First parameter is neither an RTCIceTransport, an RTCDtlsTransport, nor an RTCPeerConnection";
    845  }
    846 }
    847 
    848 function nextConnectionState(object) {
    849  if (object instanceof RTCIceTransport || object instanceof RTCDtlsTransport) {
    850    return new Promise(resolve =>
    851      object.addEventListener("statechange", () => resolve(object.state), {
    852        once: true,
    853      })
    854    );
    855  } else if (object instanceof RTCPeerConnection) {
    856    return new Promise(resolve =>
    857      object.addEventListener(
    858        "connectionstatechange",
    859        () => resolve(object.connectionState),
    860        { once: true }
    861      )
    862    );
    863  } else {
    864    throw "First parameter is neither an RTCIceTransport, an RTCDtlsTransport, nor an RTCPeerConnection";
    865  }
    866 }
    867 
    868 function nextIceConnectionState(pc) {
    869  if (pc instanceof RTCPeerConnection) {
    870    return new Promise(resolve =>
    871      pc.addEventListener(
    872        "iceconnectionstatechange",
    873        () => resolve(pc.iceConnectionState),
    874        { once: true }
    875      )
    876    );
    877  } else {
    878    throw "First parameter is not an RTCPeerConnection";
    879  }
    880 }
    881 
    882 async function queueAWebrtcTask() {
    883  const pc = new RTCPeerConnection();
    884  pc.addTransceiver('audio');
    885  await new Promise(r => pc.onnegotiationneeded = r);
    886 }