PeerConnectionClient.java (51629B)
1 /* 2 * Copyright 2014 The WebRTC Project Authors. All rights reserved. 3 * 4 * Use of this source code is governed by a BSD-style license 5 * that can be found in the LICENSE file in the root of the source 6 * tree. An additional intellectual property rights grant can be found 7 * in the file PATENTS. All contributing project authors may 8 * be found in the AUTHORS file in the root of the source tree. 9 */ 10 11 package org.appspot.apprtc; 12 13 import android.content.Context; 14 import android.os.Environment; 15 import android.os.ParcelFileDescriptor; 16 import android.util.Log; 17 import androidx.annotation.Nullable; 18 import java.io.File; 19 import java.io.IOException; 20 import java.nio.ByteBuffer; 21 import java.nio.charset.Charset; 22 import java.text.DateFormat; 23 import java.text.SimpleDateFormat; 24 import java.util.ArrayList; 25 import java.util.Arrays; 26 import java.util.Collections; 27 import java.util.Date; 28 import java.util.Iterator; 29 import java.util.List; 30 import java.util.Locale; 31 import java.util.Timer; 32 import java.util.TimerTask; 33 import java.util.concurrent.ExecutorService; 34 import java.util.concurrent.Executors; 35 import java.util.regex.Matcher; 36 import java.util.regex.Pattern; 37 import org.appspot.apprtc.AppRTCClient.SignalingParameters; 38 import org.appspot.apprtc.RecordedAudioToFileController; 39 import org.webrtc.AddIceObserver; 40 import org.webrtc.AudioSource; 41 import org.webrtc.AudioTrack; 42 import org.webrtc.CameraVideoCapturer; 43 import org.webrtc.CandidatePairChangeEvent; 44 import org.webrtc.DataChannel; 45 import org.webrtc.DefaultVideoDecoderFactory; 46 import org.webrtc.DefaultVideoEncoderFactory; 47 import org.webrtc.EglBase; 48 import org.webrtc.IceCandidate; 49 import org.webrtc.IceCandidateErrorEvent; 50 import org.webrtc.Logging; 51 import org.webrtc.MediaConstraints; 52 import org.webrtc.MediaStream; 53 import org.webrtc.MediaStreamTrack; 54 import org.webrtc.PeerConnection; 55 import org.webrtc.PeerConnection.IceConnectionState; 56 import org.webrtc.PeerConnection.PeerConnectionState; 57 import org.webrtc.PeerConnectionFactory; 58 import org.webrtc.RTCStatsCollectorCallback; 59 import org.webrtc.RTCStatsReport; 60 import org.webrtc.RtpParameters; 61 import org.webrtc.RtpReceiver; 62 import org.webrtc.RtpSender; 63 import org.webrtc.RtpTransceiver; 64 import org.webrtc.SdpObserver; 65 import org.webrtc.SessionDescription; 66 import org.webrtc.SoftwareVideoDecoderFactory; 67 import org.webrtc.SoftwareVideoEncoderFactory; 68 import org.webrtc.SurfaceTextureHelper; 69 import org.webrtc.VideoCapturer; 70 import org.webrtc.VideoDecoderFactory; 71 import org.webrtc.VideoEncoderFactory; 72 import org.webrtc.VideoSink; 73 import org.webrtc.VideoSource; 74 import org.webrtc.VideoTrack; 75 import org.webrtc.audio.AudioDeviceModule; 76 import org.webrtc.audio.JavaAudioDeviceModule; 77 import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordErrorCallback; 78 import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordStateCallback; 79 import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackErrorCallback; 80 import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStateCallback; 81 82 /** 83 * Peer connection client implementation. 84 * 85 * <p>All public methods are routed to local looper thread. 86 * All PeerConnectionEvents callbacks are invoked from the same looper thread. 87 * This class is a singleton. 88 */ 89 public class PeerConnectionClient { 90 public static final String VIDEO_TRACK_ID = "ARDAMSv0"; 91 public static final String AUDIO_TRACK_ID = "ARDAMSa0"; 92 public static final String VIDEO_TRACK_TYPE = "video"; 93 private static final String TAG = "PCRTCClient"; 94 private static final String VIDEO_CODEC_VP8 = "VP8"; 95 private static final String VIDEO_CODEC_VP9 = "VP9"; 96 private static final String VIDEO_CODEC_H264 = "H264"; 97 private static final String VIDEO_CODEC_H264_BASELINE = "H264 Baseline"; 98 private static final String VIDEO_CODEC_H264_HIGH = "H264 High"; 99 private static final String VIDEO_CODEC_AV1 = "AV1"; 100 private static final String AUDIO_CODEC_OPUS = "opus"; 101 private static final String AUDIO_CODEC_ISAC = "ISAC"; 102 private static final String VIDEO_CODEC_PARAM_START_BITRATE = "x-google-start-bitrate"; 103 private static final String VIDEO_FLEXFEC_FIELDTRIAL = 104 "WebRTC-FlexFEC-03-Advertised/Enabled/WebRTC-FlexFEC-03/Enabled/"; 105 private static final String DISABLE_WEBRTC_AGC_FIELDTRIAL = 106 "WebRTC-Audio-MinimizeResamplingOnMobile/Enabled/"; 107 private static final String AUDIO_CODEC_PARAM_BITRATE = "maxaveragebitrate"; 108 private static final String AUDIO_ECHO_CANCELLATION_CONSTRAINT = "googEchoCancellation"; 109 private static final String AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT = "googAutoGainControl"; 110 private static final String AUDIO_HIGH_PASS_FILTER_CONSTRAINT = "googHighpassFilter"; 111 private static final String AUDIO_NOISE_SUPPRESSION_CONSTRAINT = "googNoiseSuppression"; 112 private static final String DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT = "DtlsSrtpKeyAgreement"; 113 private static final int HD_VIDEO_WIDTH = 1280; 114 private static final int HD_VIDEO_HEIGHT = 720; 115 private static final int BPS_IN_KBPS = 1000; 116 private static final String RTCEVENTLOG_OUTPUT_DIR_NAME = "rtc_event_log"; 117 118 // Executor thread is started once in private ctor and is used for all 119 // peer connection API calls to ensure new peer connection factory is 120 // created on the same thread as previously destroyed factory. 121 private static final ExecutorService executor = Executors.newSingleThreadExecutor(); 122 123 private final PCObserver pcObserver = new PCObserver(); 124 private final SDPObserver sdpObserver = new SDPObserver(); 125 private final Timer statsTimer = new Timer(); 126 private final EglBase rootEglBase; 127 private final Context appContext; 128 private final PeerConnectionParameters peerConnectionParameters; 129 private final PeerConnectionEvents events; 130 131 @Nullable 132 private PeerConnectionFactory factory; 133 @Nullable 134 private PeerConnection peerConnection; 135 @Nullable 136 private AudioSource audioSource; 137 @Nullable private SurfaceTextureHelper surfaceTextureHelper; 138 @Nullable private VideoSource videoSource; 139 private boolean preferIsac; 140 private boolean videoCapturerStopped; 141 private boolean isError; 142 @Nullable 143 private VideoSink localRender; 144 @Nullable private List<VideoSink> remoteSinks; 145 private SignalingParameters signalingParameters; 146 private int videoWidth; 147 private int videoHeight; 148 private int videoFps; 149 private MediaConstraints audioConstraints; 150 private MediaConstraints sdpMediaConstraints; 151 // Queued remote ICE candidates are consumed only after both local and 152 // remote descriptions are set. Similarly local ICE candidates are sent to 153 // remote peer after both local and remote description are set. 154 @Nullable 155 private List<IceCandidate> queuedRemoteCandidates; 156 private boolean isInitiator; 157 @Nullable private SessionDescription localDescription; // either offer or answer description 158 @Nullable 159 private VideoCapturer videoCapturer; 160 // enableVideo is set to true if video should be rendered and sent. 161 private boolean renderVideo = true; 162 @Nullable 163 private VideoTrack localVideoTrack; 164 @Nullable 165 private VideoTrack remoteVideoTrack; 166 @Nullable 167 private RtpSender localVideoSender; 168 // enableAudio is set to true if audio should be sent. 169 private boolean enableAudio = true; 170 @Nullable 171 private AudioTrack localAudioTrack; 172 @Nullable 173 private DataChannel dataChannel; 174 private final boolean dataChannelEnabled; 175 // Enable RtcEventLog. 176 @Nullable 177 private RtcEventLog rtcEventLog; 178 // Implements the WebRtcAudioRecordSamplesReadyCallback interface and writes 179 // recorded audio samples to an output file. 180 @Nullable private RecordedAudioToFileController saveRecordedAudioToFile; 181 182 /** 183 * Peer connection parameters. 184 */ 185 public static class DataChannelParameters { 186 public final boolean ordered; 187 public final int maxRetransmitTimeMs; 188 public final int maxRetransmits; 189 public final String protocol; 190 public final boolean negotiated; 191 public final int id; 192 193 public DataChannelParameters(boolean ordered, int maxRetransmitTimeMs, int maxRetransmits, 194 String protocol, boolean negotiated, int id) { 195 this.ordered = ordered; 196 this.maxRetransmitTimeMs = maxRetransmitTimeMs; 197 this.maxRetransmits = maxRetransmits; 198 this.protocol = protocol; 199 this.negotiated = negotiated; 200 this.id = id; 201 } 202 } 203 204 /** 205 * Peer connection parameters. 206 */ 207 public static class PeerConnectionParameters { 208 public final boolean videoCallEnabled; 209 public final boolean loopback; 210 public final boolean tracing; 211 public final int videoWidth; 212 public final int videoHeight; 213 public final int videoFps; 214 public final int videoMaxBitrate; 215 public final String videoCodec; 216 public final boolean videoCodecHwAcceleration; 217 public final boolean videoFlexfecEnabled; 218 public final int audioStartBitrate; 219 public final String audioCodec; 220 public final boolean noAudioProcessing; 221 public final boolean aecDump; 222 public final boolean saveInputAudioToFile; 223 public final boolean useOpenSLES; 224 public final boolean disableBuiltInAEC; 225 public final boolean disableBuiltInAGC; 226 public final boolean disableBuiltInNS; 227 public final boolean disableWebRtcAGCAndHPF; 228 public final boolean enableRtcEventLog; 229 private final DataChannelParameters dataChannelParameters; 230 231 public PeerConnectionParameters(boolean videoCallEnabled, boolean loopback, boolean tracing, 232 int videoWidth, int videoHeight, int videoFps, int videoMaxBitrate, String videoCodec, 233 boolean videoCodecHwAcceleration, boolean videoFlexfecEnabled, int audioStartBitrate, 234 String audioCodec, boolean noAudioProcessing, boolean aecDump, boolean saveInputAudioToFile, 235 boolean useOpenSLES, boolean disableBuiltInAEC, boolean disableBuiltInAGC, 236 boolean disableBuiltInNS, boolean disableWebRtcAGCAndHPF, boolean enableRtcEventLog, 237 DataChannelParameters dataChannelParameters) { 238 this.videoCallEnabled = videoCallEnabled; 239 this.loopback = loopback; 240 this.tracing = tracing; 241 this.videoWidth = videoWidth; 242 this.videoHeight = videoHeight; 243 this.videoFps = videoFps; 244 this.videoMaxBitrate = videoMaxBitrate; 245 this.videoCodec = videoCodec; 246 this.videoFlexfecEnabled = videoFlexfecEnabled; 247 this.videoCodecHwAcceleration = videoCodecHwAcceleration; 248 this.audioStartBitrate = audioStartBitrate; 249 this.audioCodec = audioCodec; 250 this.noAudioProcessing = noAudioProcessing; 251 this.aecDump = aecDump; 252 this.saveInputAudioToFile = saveInputAudioToFile; 253 this.useOpenSLES = useOpenSLES; 254 this.disableBuiltInAEC = disableBuiltInAEC; 255 this.disableBuiltInAGC = disableBuiltInAGC; 256 this.disableBuiltInNS = disableBuiltInNS; 257 this.disableWebRtcAGCAndHPF = disableWebRtcAGCAndHPF; 258 this.enableRtcEventLog = enableRtcEventLog; 259 this.dataChannelParameters = dataChannelParameters; 260 } 261 } 262 263 /** 264 * Peer connection events. 265 */ 266 public interface PeerConnectionEvents { 267 /** 268 * Callback fired once local SDP is created and set. 269 */ 270 void onLocalDescription(final SessionDescription sdp); 271 272 /** 273 * Callback fired once local Ice candidate is generated. 274 */ 275 void onIceCandidate(final IceCandidate candidate); 276 277 /** 278 * Callback fired once local ICE candidates are removed. 279 */ 280 void onIceCandidatesRemoved(final IceCandidate[] candidates); 281 282 /** 283 * Callback fired once connection is established (IceConnectionState is 284 * CONNECTED). 285 */ 286 void onIceConnected(); 287 288 /** 289 * Callback fired once connection is disconnected (IceConnectionState is 290 * DISCONNECTED). 291 */ 292 void onIceDisconnected(); 293 294 /** 295 * Callback fired once DTLS connection is established (PeerConnectionState 296 * is CONNECTED). 297 */ 298 void onConnected(); 299 300 /** 301 * Callback fired once DTLS connection is disconnected (PeerConnectionState 302 * is DISCONNECTED). 303 */ 304 void onDisconnected(); 305 306 /** 307 * Callback fired once peer connection is closed. 308 */ 309 void onPeerConnectionClosed(); 310 311 /** 312 * Callback fired once peer connection statistics is ready. 313 */ 314 void onPeerConnectionStatsReady(final RTCStatsReport report); 315 316 /** 317 * Callback fired once peer connection error happened. 318 */ 319 void onPeerConnectionError(final String description); 320 } 321 322 /** 323 * Create a PeerConnectionClient with the specified parameters. PeerConnectionClient takes 324 * ownership of `eglBase`. 325 */ 326 public PeerConnectionClient(Context appContext, EglBase eglBase, 327 PeerConnectionParameters peerConnectionParameters, PeerConnectionEvents events) { 328 this.rootEglBase = eglBase; 329 this.appContext = appContext; 330 this.events = events; 331 this.peerConnectionParameters = peerConnectionParameters; 332 this.dataChannelEnabled = peerConnectionParameters.dataChannelParameters != null; 333 334 Log.d(TAG, "Preferred video codec: " + getSdpVideoCodecName(peerConnectionParameters)); 335 336 executor.execute(() -> { 337 Log.d(TAG, "Initialize WebRTC."); 338 PeerConnectionFactory.initialize( 339 PeerConnectionFactory.InitializationOptions.builder(appContext) 340 .setEnableInternalTracer(true) 341 .createInitializationOptions()); 342 }); 343 } 344 345 /** 346 * This function should only be called once. 347 */ 348 public void createPeerConnectionFactory(PeerConnectionFactory.Options options) { 349 if (factory != null) { 350 throw new IllegalStateException("PeerConnectionFactory has already been constructed"); 351 } 352 executor.execute(() -> createPeerConnectionFactoryInternal(options)); 353 } 354 355 public void createPeerConnection(final VideoSink localRender, final VideoSink remoteSink, 356 final VideoCapturer videoCapturer, final SignalingParameters signalingParameters) { 357 if (peerConnectionParameters.videoCallEnabled && videoCapturer == null) { 358 Log.w(TAG, "Video call enabled but no video capturer provided."); 359 } 360 createPeerConnection( 361 localRender, Collections.singletonList(remoteSink), videoCapturer, signalingParameters); 362 } 363 364 public void createPeerConnection(final VideoSink localRender, final List<VideoSink> remoteSinks, 365 final VideoCapturer videoCapturer, final SignalingParameters signalingParameters) { 366 if (peerConnectionParameters == null) { 367 Log.e(TAG, "Creating peer connection without initializing factory."); 368 return; 369 } 370 this.localRender = localRender; 371 this.remoteSinks = remoteSinks; 372 this.videoCapturer = videoCapturer; 373 this.signalingParameters = signalingParameters; 374 executor.execute(() -> { 375 try { 376 createMediaConstraintsInternal(); 377 createPeerConnectionInternal(); 378 maybeCreateAndStartRtcEventLog(); 379 } catch (Exception e) { 380 reportError("Failed to create peer connection: " + e.getMessage()); 381 throw e; 382 } 383 }); 384 } 385 386 public void close() { 387 executor.execute(this ::closeInternal); 388 } 389 390 private boolean isVideoCallEnabled() { 391 return peerConnectionParameters.videoCallEnabled && videoCapturer != null; 392 } 393 394 private void createPeerConnectionFactoryInternal(PeerConnectionFactory.Options options) { 395 isError = false; 396 397 if (peerConnectionParameters.tracing) { 398 PeerConnectionFactory.startInternalTracingCapture( 399 Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator 400 + "webrtc-trace.txt"); 401 } 402 403 // Check if ISAC is used by default. 404 preferIsac = peerConnectionParameters.audioCodec != null 405 && peerConnectionParameters.audioCodec.equals(AUDIO_CODEC_ISAC); 406 407 // It is possible to save a copy in raw PCM format on a file by checking 408 // the "Save input audio to file" checkbox in the Settings UI. A callback 409 // interface is set when this flag is enabled. As a result, a copy of recorded 410 // audio samples are provided to this client directly from the native audio 411 // layer in Java. 412 if (peerConnectionParameters.saveInputAudioToFile) { 413 if (!peerConnectionParameters.useOpenSLES) { 414 Log.d(TAG, "Enable recording of microphone input audio to file"); 415 saveRecordedAudioToFile = new RecordedAudioToFileController(executor); 416 } else { 417 // TODO(henrika): ensure that the UI reflects that if OpenSL ES is selected, 418 // then the "Save inut audio to file" option shall be grayed out. 419 Log.e(TAG, "Recording of input audio is not supported for OpenSL ES"); 420 } 421 } 422 423 final AudioDeviceModule adm = createJavaAudioDevice(); 424 425 // Create peer connection factory. 426 if (options != null) { 427 Log.d(TAG, "Factory networkIgnoreMask option: " + options.networkIgnoreMask); 428 } 429 final boolean enableH264HighProfile = 430 VIDEO_CODEC_H264_HIGH.equals(peerConnectionParameters.videoCodec); 431 final VideoEncoderFactory encoderFactory; 432 final VideoDecoderFactory decoderFactory; 433 434 if (peerConnectionParameters.videoCodecHwAcceleration) { 435 encoderFactory = new DefaultVideoEncoderFactory( 436 rootEglBase.getEglBaseContext(), true /* enableIntelVp8Encoder */, enableH264HighProfile); 437 decoderFactory = new DefaultVideoDecoderFactory(rootEglBase.getEglBaseContext()); 438 } else { 439 encoderFactory = new SoftwareVideoEncoderFactory(); 440 decoderFactory = new SoftwareVideoDecoderFactory(); 441 } 442 443 // Disable encryption for loopback calls. 444 if (peerConnectionParameters.loopback) { 445 options.disableEncryption = true; 446 } 447 final String fieldTrials = getFieldTrials(peerConnectionParameters); 448 factory = PeerConnectionFactory.builder() 449 .setFieldTrials(fieldTrials) 450 .setOptions(options) 451 .setAudioDeviceModule(adm) 452 .setVideoEncoderFactory(encoderFactory) 453 .setVideoDecoderFactory(decoderFactory) 454 .createPeerConnectionFactory(); 455 Log.d(TAG, "Peer connection factory created. Field trials: " + fieldTrials); 456 adm.release(); 457 } 458 459 AudioDeviceModule createJavaAudioDevice() { 460 // Enable/disable OpenSL ES playback. 461 if (!peerConnectionParameters.useOpenSLES) { 462 Log.w(TAG, "External OpenSLES ADM not implemented yet."); 463 // TODO(magjed): Add support for external OpenSLES ADM. 464 } 465 466 // Set audio record error callbacks. 467 AudioRecordErrorCallback audioRecordErrorCallback = new AudioRecordErrorCallback() { 468 @Override 469 public void onWebRtcAudioRecordInitError(String errorMessage) { 470 Log.e(TAG, "onWebRtcAudioRecordInitError: " + errorMessage); 471 reportError(errorMessage); 472 } 473 474 @Override 475 public void onWebRtcAudioRecordStartError( 476 JavaAudioDeviceModule.AudioRecordStartErrorCode errorCode, String errorMessage) { 477 Log.e(TAG, "onWebRtcAudioRecordStartError: " + errorCode + ". " + errorMessage); 478 reportError(errorMessage); 479 } 480 481 @Override 482 public void onWebRtcAudioRecordError(String errorMessage) { 483 Log.e(TAG, "onWebRtcAudioRecordError: " + errorMessage); 484 reportError(errorMessage); 485 } 486 }; 487 488 AudioTrackErrorCallback audioTrackErrorCallback = new AudioTrackErrorCallback() { 489 @Override 490 public void onWebRtcAudioTrackInitError(String errorMessage) { 491 Log.e(TAG, "onWebRtcAudioTrackInitError: " + errorMessage); 492 reportError(errorMessage); 493 } 494 495 @Override 496 public void onWebRtcAudioTrackStartError( 497 JavaAudioDeviceModule.AudioTrackStartErrorCode errorCode, String errorMessage) { 498 Log.e(TAG, "onWebRtcAudioTrackStartError: " + errorCode + ". " + errorMessage); 499 reportError(errorMessage); 500 } 501 502 @Override 503 public void onWebRtcAudioTrackError(String errorMessage) { 504 Log.e(TAG, "onWebRtcAudioTrackError: " + errorMessage); 505 reportError(errorMessage); 506 } 507 }; 508 509 // Set audio record state callbacks. 510 AudioRecordStateCallback audioRecordStateCallback = new AudioRecordStateCallback() { 511 @Override 512 public void onWebRtcAudioRecordStart() { 513 Log.i(TAG, "Audio recording starts"); 514 } 515 516 @Override 517 public void onWebRtcAudioRecordStop() { 518 Log.i(TAG, "Audio recording stops"); 519 } 520 }; 521 522 // Set audio track state callbacks. 523 AudioTrackStateCallback audioTrackStateCallback = new AudioTrackStateCallback() { 524 @Override 525 public void onWebRtcAudioTrackStart() { 526 Log.i(TAG, "Audio playout starts"); 527 } 528 529 @Override 530 public void onWebRtcAudioTrackStop() { 531 Log.i(TAG, "Audio playout stops"); 532 } 533 }; 534 535 return JavaAudioDeviceModule.builder(appContext) 536 .setSamplesReadyCallback(saveRecordedAudioToFile) 537 .setUseHardwareAcousticEchoCanceler(!peerConnectionParameters.disableBuiltInAEC) 538 .setUseHardwareNoiseSuppressor(!peerConnectionParameters.disableBuiltInNS) 539 .setAudioRecordErrorCallback(audioRecordErrorCallback) 540 .setAudioTrackErrorCallback(audioTrackErrorCallback) 541 .setAudioRecordStateCallback(audioRecordStateCallback) 542 .setAudioTrackStateCallback(audioTrackStateCallback) 543 .createAudioDeviceModule(); 544 } 545 546 private void createMediaConstraintsInternal() { 547 // Create video constraints if video call is enabled. 548 if (isVideoCallEnabled()) { 549 videoWidth = peerConnectionParameters.videoWidth; 550 videoHeight = peerConnectionParameters.videoHeight; 551 videoFps = peerConnectionParameters.videoFps; 552 553 // If video resolution is not specified, default to HD. 554 if (videoWidth == 0 || videoHeight == 0) { 555 videoWidth = HD_VIDEO_WIDTH; 556 videoHeight = HD_VIDEO_HEIGHT; 557 } 558 559 // If fps is not specified, default to 30. 560 if (videoFps == 0) { 561 videoFps = 30; 562 } 563 Logging.d(TAG, "Capturing format: " + videoWidth + "x" + videoHeight + "@" + videoFps); 564 } 565 566 // Create audio constraints. 567 audioConstraints = new MediaConstraints(); 568 // added for audio performance measurements 569 if (peerConnectionParameters.noAudioProcessing) { 570 Log.d(TAG, "Disabling audio processing"); 571 audioConstraints.mandatory.add( 572 new MediaConstraints.KeyValuePair(AUDIO_ECHO_CANCELLATION_CONSTRAINT, "false")); 573 audioConstraints.mandatory.add( 574 new MediaConstraints.KeyValuePair(AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false")); 575 audioConstraints.mandatory.add( 576 new MediaConstraints.KeyValuePair(AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false")); 577 audioConstraints.mandatory.add( 578 new MediaConstraints.KeyValuePair(AUDIO_NOISE_SUPPRESSION_CONSTRAINT, "false")); 579 } 580 // Create SDP constraints. 581 sdpMediaConstraints = new MediaConstraints(); 582 sdpMediaConstraints.mandatory.add( 583 new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true")); 584 sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair( 585 "OfferToReceiveVideo", Boolean.toString(isVideoCallEnabled()))); 586 } 587 588 private void createPeerConnectionInternal() { 589 if (factory == null || isError) { 590 Log.e(TAG, "Peerconnection factory is not created"); 591 return; 592 } 593 Log.d(TAG, "Create peer connection."); 594 595 queuedRemoteCandidates = new ArrayList<>(); 596 597 PeerConnection.RTCConfiguration rtcConfig = 598 new PeerConnection.RTCConfiguration(signalingParameters.iceServers); 599 // TCP candidates are only useful when connecting to a server that supports 600 // ICE-TCP. 601 rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED; 602 rtcConfig.bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE; 603 rtcConfig.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE; 604 rtcConfig.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY; 605 // Use ECDSA encryption. 606 rtcConfig.keyType = PeerConnection.KeyType.ECDSA; 607 rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN; 608 609 peerConnection = factory.createPeerConnection(rtcConfig, pcObserver); 610 611 if (dataChannelEnabled) { 612 DataChannel.Init init = new DataChannel.Init(); 613 init.ordered = peerConnectionParameters.dataChannelParameters.ordered; 614 init.negotiated = peerConnectionParameters.dataChannelParameters.negotiated; 615 init.maxRetransmits = peerConnectionParameters.dataChannelParameters.maxRetransmits; 616 init.maxRetransmitTimeMs = peerConnectionParameters.dataChannelParameters.maxRetransmitTimeMs; 617 init.id = peerConnectionParameters.dataChannelParameters.id; 618 init.protocol = peerConnectionParameters.dataChannelParameters.protocol; 619 dataChannel = peerConnection.createDataChannel("ApprtcDemo data", init); 620 } 621 isInitiator = false; 622 623 // Set INFO libjingle logging. 624 // NOTE: this _must_ happen while `factory` is alive! 625 Logging.enableLogToDebugOutput(Logging.Severity.LS_INFO); 626 627 List<String> mediaStreamLabels = Collections.singletonList("ARDAMS"); 628 if (isVideoCallEnabled()) { 629 peerConnection.addTrack(createVideoTrack(videoCapturer), mediaStreamLabels); 630 // We can add the renderers right away because we don't need to wait for an 631 // answer to get the remote track. 632 remoteVideoTrack = getRemoteVideoTrack(); 633 remoteVideoTrack.setEnabled(renderVideo); 634 for (VideoSink remoteSink : remoteSinks) { 635 remoteVideoTrack.addSink(remoteSink); 636 } 637 } 638 peerConnection.addTrack(createAudioTrack(), mediaStreamLabels); 639 if (isVideoCallEnabled()) { 640 findVideoSender(); 641 } 642 643 if (peerConnectionParameters.aecDump) { 644 try { 645 ParcelFileDescriptor aecDumpFileDescriptor = 646 ParcelFileDescriptor.open(new File(Environment.getExternalStorageDirectory().getPath() 647 + File.separator + "Download/audio.aecdump"), 648 ParcelFileDescriptor.MODE_READ_WRITE | ParcelFileDescriptor.MODE_CREATE 649 | ParcelFileDescriptor.MODE_TRUNCATE); 650 factory.startAecDump(aecDumpFileDescriptor.detachFd(), -1); 651 } catch (IOException e) { 652 Log.e(TAG, "Can not open aecdump file", e); 653 } 654 } 655 656 if (saveRecordedAudioToFile != null) { 657 if (saveRecordedAudioToFile.start()) { 658 Log.d(TAG, "Recording input audio to file is activated"); 659 } 660 } 661 Log.d(TAG, "Peer connection created."); 662 } 663 664 private File createRtcEventLogOutputFile() { 665 DateFormat dateFormat = new SimpleDateFormat("yyyyMMdd_hhmm_ss", Locale.getDefault()); 666 Date date = new Date(); 667 final String outputFileName = "event_log_" + dateFormat.format(date) + ".log"; 668 return new File( 669 appContext.getDir(RTCEVENTLOG_OUTPUT_DIR_NAME, Context.MODE_PRIVATE), outputFileName); 670 } 671 672 private void maybeCreateAndStartRtcEventLog() { 673 if (appContext == null || peerConnection == null) { 674 return; 675 } 676 if (!peerConnectionParameters.enableRtcEventLog) { 677 Log.d(TAG, "RtcEventLog is disabled."); 678 return; 679 } 680 rtcEventLog = new RtcEventLog(peerConnection); 681 rtcEventLog.start(createRtcEventLogOutputFile()); 682 } 683 684 private void closeInternal() { 685 if (factory != null && peerConnectionParameters.aecDump) { 686 factory.stopAecDump(); 687 } 688 Log.d(TAG, "Closing peer connection."); 689 statsTimer.cancel(); 690 if (dataChannel != null) { 691 dataChannel.dispose(); 692 dataChannel = null; 693 } 694 if (rtcEventLog != null) { 695 // RtcEventLog should stop before the peer connection is disposed. 696 rtcEventLog.stop(); 697 rtcEventLog = null; 698 } 699 if (peerConnection != null) { 700 peerConnection.dispose(); 701 peerConnection = null; 702 } 703 Log.d(TAG, "Closing audio source."); 704 if (audioSource != null) { 705 audioSource.dispose(); 706 audioSource = null; 707 } 708 Log.d(TAG, "Stopping capture."); 709 if (videoCapturer != null) { 710 try { 711 videoCapturer.stopCapture(); 712 } catch (InterruptedException e) { 713 throw new RuntimeException(e); 714 } 715 videoCapturerStopped = true; 716 videoCapturer.dispose(); 717 videoCapturer = null; 718 } 719 Log.d(TAG, "Closing video source."); 720 if (videoSource != null) { 721 videoSource.dispose(); 722 videoSource = null; 723 } 724 if (surfaceTextureHelper != null) { 725 surfaceTextureHelper.dispose(); 726 surfaceTextureHelper = null; 727 } 728 if (saveRecordedAudioToFile != null) { 729 Log.d(TAG, "Closing audio file for recorded input audio."); 730 saveRecordedAudioToFile.stop(); 731 saveRecordedAudioToFile = null; 732 } 733 localRender = null; 734 remoteSinks = null; 735 Log.d(TAG, "Closing peer connection factory."); 736 if (factory != null) { 737 factory.dispose(); 738 factory = null; 739 } 740 rootEglBase.release(); 741 Log.d(TAG, "Closing peer connection done."); 742 events.onPeerConnectionClosed(); 743 PeerConnectionFactory.stopInternalTracingCapture(); 744 PeerConnectionFactory.shutdownInternalTracer(); 745 } 746 747 public boolean isHDVideo() { 748 return isVideoCallEnabled() && videoWidth * videoHeight >= 1280 * 720; 749 } 750 751 private void getStats() { 752 if (peerConnection == null || isError) { 753 return; 754 } 755 peerConnection.getStats(new RTCStatsCollectorCallback() { 756 @Override 757 public void onStatsDelivered(RTCStatsReport report) { 758 events.onPeerConnectionStatsReady(report); 759 } 760 }); 761 } 762 763 public void enableStatsEvents(boolean enable, int periodMs) { 764 if (enable) { 765 try { 766 statsTimer.schedule(new TimerTask() { 767 @Override 768 public void run() { 769 executor.execute(() -> getStats()); 770 } 771 }, 0, periodMs); 772 } catch (Exception e) { 773 Log.e(TAG, "Can not schedule statistics timer", e); 774 } 775 } else { 776 statsTimer.cancel(); 777 } 778 } 779 780 public void setAudioEnabled(final boolean enable) { 781 executor.execute(() -> { 782 enableAudio = enable; 783 if (localAudioTrack != null) { 784 localAudioTrack.setEnabled(enableAudio); 785 } 786 }); 787 } 788 789 public void setVideoEnabled(final boolean enable) { 790 executor.execute(() -> { 791 renderVideo = enable; 792 if (localVideoTrack != null) { 793 localVideoTrack.setEnabled(renderVideo); 794 } 795 if (remoteVideoTrack != null) { 796 remoteVideoTrack.setEnabled(renderVideo); 797 } 798 }); 799 } 800 801 public void createOffer() { 802 executor.execute(() -> { 803 if (peerConnection != null && !isError) { 804 Log.d(TAG, "PC Create OFFER"); 805 isInitiator = true; 806 peerConnection.createOffer(sdpObserver, sdpMediaConstraints); 807 } 808 }); 809 } 810 811 public void createAnswer() { 812 executor.execute(() -> { 813 if (peerConnection != null && !isError) { 814 Log.d(TAG, "PC create ANSWER"); 815 isInitiator = false; 816 peerConnection.createAnswer(sdpObserver, sdpMediaConstraints); 817 } 818 }); 819 } 820 821 public void addRemoteIceCandidate(final IceCandidate candidate) { 822 executor.execute(() -> { 823 if (peerConnection != null && !isError) { 824 if (queuedRemoteCandidates != null) { 825 queuedRemoteCandidates.add(candidate); 826 } else { 827 peerConnection.addIceCandidate(candidate, new AddIceObserver() { 828 @Override 829 public void onAddSuccess() { 830 Log.d(TAG, "Candidate " + candidate + " successfully added."); 831 } 832 @Override 833 public void onAddFailure(String error) { 834 Log.d(TAG, "Candidate " + candidate + " addition failed: " + error); 835 } 836 }); 837 } 838 } 839 }); 840 } 841 842 public void removeRemoteIceCandidates(final IceCandidate[] candidates) { 843 executor.execute(() -> { 844 if (peerConnection == null || isError) { 845 return; 846 } 847 // Drain the queued remote candidates if there is any so that 848 // they are processed in the proper order. 849 drainCandidates(); 850 peerConnection.removeIceCandidates(candidates); 851 }); 852 } 853 854 public void setRemoteDescription(final SessionDescription desc) { 855 executor.execute(() -> { 856 if (peerConnection == null || isError) { 857 return; 858 } 859 String sdp = desc.description; 860 if (preferIsac) { 861 sdp = preferCodec(sdp, AUDIO_CODEC_ISAC, true); 862 } 863 if (isVideoCallEnabled()) { 864 sdp = preferCodec(sdp, getSdpVideoCodecName(peerConnectionParameters), false); 865 } 866 if (peerConnectionParameters.audioStartBitrate > 0) { 867 sdp = setStartBitrate( 868 AUDIO_CODEC_OPUS, false, sdp, peerConnectionParameters.audioStartBitrate); 869 } 870 Log.d(TAG, "Set remote SDP."); 871 SessionDescription sdpRemote = new SessionDescription(desc.type, sdp); 872 peerConnection.setRemoteDescription(sdpObserver, sdpRemote); 873 }); 874 } 875 876 public void stopVideoSource() { 877 executor.execute(() -> { 878 if (videoCapturer != null && !videoCapturerStopped) { 879 Log.d(TAG, "Stop video source."); 880 try { 881 videoCapturer.stopCapture(); 882 } catch (InterruptedException e) { 883 } 884 videoCapturerStopped = true; 885 } 886 }); 887 } 888 889 public void startVideoSource() { 890 executor.execute(() -> { 891 if (videoCapturer != null && videoCapturerStopped) { 892 Log.d(TAG, "Restart video source."); 893 videoCapturer.startCapture(videoWidth, videoHeight, videoFps); 894 videoCapturerStopped = false; 895 } 896 }); 897 } 898 899 public void setVideoMaxBitrate(@Nullable final Integer maxBitrateKbps) { 900 executor.execute(() -> { 901 if (peerConnection == null || localVideoSender == null || isError) { 902 return; 903 } 904 Log.d(TAG, "Requested max video bitrate: " + maxBitrateKbps); 905 if (localVideoSender == null) { 906 Log.w(TAG, "Sender is not ready."); 907 return; 908 } 909 910 RtpParameters parameters = localVideoSender.getParameters(); 911 if (parameters.encodings.size() == 0) { 912 Log.w(TAG, "RtpParameters are not ready."); 913 return; 914 } 915 916 for (RtpParameters.Encoding encoding : parameters.encodings) { 917 // Null value means no limit. 918 encoding.maxBitrateBps = maxBitrateKbps == null ? null : maxBitrateKbps * BPS_IN_KBPS; 919 } 920 if (!localVideoSender.setParameters(parameters)) { 921 Log.e(TAG, "RtpSender.setParameters failed."); 922 } 923 Log.d(TAG, "Configured max video bitrate to: " + maxBitrateKbps); 924 }); 925 } 926 927 private void reportError(final String errorMessage) { 928 Log.e(TAG, "Peerconnection error: " + errorMessage); 929 executor.execute(() -> { 930 if (!isError) { 931 events.onPeerConnectionError(errorMessage); 932 isError = true; 933 } 934 }); 935 } 936 937 @Nullable 938 private AudioTrack createAudioTrack() { 939 audioSource = factory.createAudioSource(audioConstraints); 940 localAudioTrack = factory.createAudioTrack(AUDIO_TRACK_ID, audioSource); 941 localAudioTrack.setEnabled(enableAudio); 942 return localAudioTrack; 943 } 944 945 @Nullable 946 private VideoTrack createVideoTrack(VideoCapturer capturer) { 947 surfaceTextureHelper = 948 SurfaceTextureHelper.create("CaptureThread", rootEglBase.getEglBaseContext()); 949 videoSource = factory.createVideoSource(capturer.isScreencast()); 950 capturer.initialize(surfaceTextureHelper, appContext, videoSource.getCapturerObserver()); 951 capturer.startCapture(videoWidth, videoHeight, videoFps); 952 953 localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource); 954 localVideoTrack.setEnabled(renderVideo); 955 localVideoTrack.addSink(localRender); 956 return localVideoTrack; 957 } 958 959 private void findVideoSender() { 960 for (RtpSender sender : peerConnection.getSenders()) { 961 if (sender.track() != null) { 962 String trackType = sender.track().kind(); 963 if (trackType.equals(VIDEO_TRACK_TYPE)) { 964 Log.d(TAG, "Found video sender."); 965 localVideoSender = sender; 966 } 967 } 968 } 969 } 970 971 // Returns the remote VideoTrack, assuming there is only one. 972 private @Nullable VideoTrack getRemoteVideoTrack() { 973 for (RtpTransceiver transceiver : peerConnection.getTransceivers()) { 974 MediaStreamTrack track = transceiver.getReceiver().track(); 975 if (track instanceof VideoTrack) { 976 return (VideoTrack) track; 977 } 978 } 979 return null; 980 } 981 982 private static String getSdpVideoCodecName(PeerConnectionParameters parameters) { 983 switch (parameters.videoCodec) { 984 case VIDEO_CODEC_VP8: 985 return VIDEO_CODEC_VP8; 986 case VIDEO_CODEC_VP9: 987 return VIDEO_CODEC_VP9; 988 case VIDEO_CODEC_AV1: 989 return VIDEO_CODEC_AV1; 990 case VIDEO_CODEC_H264_HIGH: 991 case VIDEO_CODEC_H264_BASELINE: 992 return VIDEO_CODEC_H264; 993 default: 994 return VIDEO_CODEC_VP8; 995 } 996 } 997 998 private static String getFieldTrials(PeerConnectionParameters peerConnectionParameters) { 999 String fieldTrials = ""; 1000 if (peerConnectionParameters.videoFlexfecEnabled) { 1001 fieldTrials += VIDEO_FLEXFEC_FIELDTRIAL; 1002 Log.d(TAG, "Enable FlexFEC field trial."); 1003 } 1004 if (peerConnectionParameters.disableWebRtcAGCAndHPF) { 1005 fieldTrials += DISABLE_WEBRTC_AGC_FIELDTRIAL; 1006 Log.d(TAG, "Disable WebRTC AGC field trial."); 1007 } 1008 return fieldTrials; 1009 } 1010 1011 @SuppressWarnings("StringSplitter") 1012 private static String setStartBitrate( 1013 String codec, boolean isVideoCodec, String sdp, int bitrateKbps) { 1014 String[] lines = sdp.split("\r\n"); 1015 int rtpmapLineIndex = -1; 1016 boolean sdpFormatUpdated = false; 1017 String codecRtpMap = null; 1018 // Search for codec rtpmap in format 1019 // a=rtpmap:<payload type> <encoding name>/<clock rate> [/<encoding parameters>] 1020 String regex = "^a=rtpmap:(\\d+) " + codec + "(/\\d+)+[\r]?$"; 1021 Pattern codecPattern = Pattern.compile(regex); 1022 for (int i = 0; i < lines.length; i++) { 1023 Matcher codecMatcher = codecPattern.matcher(lines[i]); 1024 if (codecMatcher.matches()) { 1025 codecRtpMap = codecMatcher.group(1); 1026 rtpmapLineIndex = i; 1027 break; 1028 } 1029 } 1030 if (codecRtpMap == null) { 1031 Log.w(TAG, "No rtpmap for " + codec + " codec"); 1032 return sdp; 1033 } 1034 Log.d(TAG, "Found " + codec + " rtpmap " + codecRtpMap + " at " + lines[rtpmapLineIndex]); 1035 1036 // Check if a=fmtp string already exist in remote SDP for this codec and 1037 // update it with new bitrate parameter. 1038 regex = "^a=fmtp:" + codecRtpMap + " \\w+=\\d+.*[\r]?$"; 1039 codecPattern = Pattern.compile(regex); 1040 for (int i = 0; i < lines.length; i++) { 1041 Matcher codecMatcher = codecPattern.matcher(lines[i]); 1042 if (codecMatcher.matches()) { 1043 Log.d(TAG, "Found " + codec + " " + lines[i]); 1044 if (isVideoCodec) { 1045 lines[i] += "; " + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps; 1046 } else { 1047 lines[i] += "; " + AUDIO_CODEC_PARAM_BITRATE + "=" + (bitrateKbps * 1000); 1048 } 1049 Log.d(TAG, "Update remote SDP line: " + lines[i]); 1050 sdpFormatUpdated = true; 1051 break; 1052 } 1053 } 1054 1055 StringBuilder newSdpDescription = new StringBuilder(); 1056 for (int i = 0; i < lines.length; i++) { 1057 newSdpDescription.append(lines[i]).append("\r\n"); 1058 // Append new a=fmtp line if no such line exist for a codec. 1059 if (!sdpFormatUpdated && i == rtpmapLineIndex) { 1060 String bitrateSet; 1061 if (isVideoCodec) { 1062 bitrateSet = 1063 "a=fmtp:" + codecRtpMap + " " + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps; 1064 } else { 1065 bitrateSet = "a=fmtp:" + codecRtpMap + " " + AUDIO_CODEC_PARAM_BITRATE + "=" 1066 + (bitrateKbps * 1000); 1067 } 1068 Log.d(TAG, "Add remote SDP line: " + bitrateSet); 1069 newSdpDescription.append(bitrateSet).append("\r\n"); 1070 } 1071 } 1072 return newSdpDescription.toString(); 1073 } 1074 1075 /** Returns the line number containing "m=audio|video", or -1 if no such line exists. */ 1076 private static int findMediaDescriptionLine(boolean isAudio, String[] sdpLines) { 1077 final String mediaDescription = isAudio ? "m=audio " : "m=video "; 1078 for (int i = 0; i < sdpLines.length; ++i) { 1079 if (sdpLines[i].startsWith(mediaDescription)) { 1080 return i; 1081 } 1082 } 1083 return -1; 1084 } 1085 1086 private static String joinString( 1087 Iterable<? extends CharSequence> s, String delimiter, boolean delimiterAtEnd) { 1088 Iterator<? extends CharSequence> iter = s.iterator(); 1089 if (!iter.hasNext()) { 1090 return ""; 1091 } 1092 StringBuilder buffer = new StringBuilder(iter.next()); 1093 while (iter.hasNext()) { 1094 buffer.append(delimiter).append(iter.next()); 1095 } 1096 if (delimiterAtEnd) { 1097 buffer.append(delimiter); 1098 } 1099 return buffer.toString(); 1100 } 1101 1102 private static @Nullable String movePayloadTypesToFront( 1103 List<String> preferredPayloadTypes, String mLine) { 1104 // The format of the media description line should be: m=<media> <port> <proto> <fmt> ... 1105 final List<String> origLineParts = Arrays.asList(mLine.split(" ")); 1106 if (origLineParts.size() <= 3) { 1107 Log.e(TAG, "Wrong SDP media description format: " + mLine); 1108 return null; 1109 } 1110 final List<String> header = origLineParts.subList(0, 3); 1111 final List<String> unpreferredPayloadTypes = 1112 new ArrayList<>(origLineParts.subList(3, origLineParts.size())); 1113 unpreferredPayloadTypes.removeAll(preferredPayloadTypes); 1114 // Reconstruct the line with `preferredPayloadTypes` moved to the beginning of the payload 1115 // types. 1116 final List<String> newLineParts = new ArrayList<>(); 1117 newLineParts.addAll(header); 1118 newLineParts.addAll(preferredPayloadTypes); 1119 newLineParts.addAll(unpreferredPayloadTypes); 1120 return joinString(newLineParts, " ", false /* delimiterAtEnd */); 1121 } 1122 1123 private static String preferCodec(String sdp, String codec, boolean isAudio) { 1124 final String[] lines = sdp.split("\r\n"); 1125 final int mLineIndex = findMediaDescriptionLine(isAudio, lines); 1126 if (mLineIndex == -1) { 1127 Log.w(TAG, "No mediaDescription line, so can't prefer " + codec); 1128 return sdp; 1129 } 1130 // A list with all the payload types with name `codec`. The payload types are integers in the 1131 // range 96-127, but they are stored as strings here. 1132 final List<String> codecPayloadTypes = new ArrayList<>(); 1133 // a=rtpmap:<payload type> <encoding name>/<clock rate> [/<encoding parameters>] 1134 final Pattern codecPattern = Pattern.compile("^a=rtpmap:(\\d+) " + codec + "(/\\d+)+[\r]?$"); 1135 for (String line : lines) { 1136 Matcher codecMatcher = codecPattern.matcher(line); 1137 if (codecMatcher.matches()) { 1138 codecPayloadTypes.add(codecMatcher.group(1)); 1139 } 1140 } 1141 if (codecPayloadTypes.isEmpty()) { 1142 Log.w(TAG, "No payload types with name " + codec); 1143 return sdp; 1144 } 1145 1146 final String newMLine = movePayloadTypesToFront(codecPayloadTypes, lines[mLineIndex]); 1147 if (newMLine == null) { 1148 return sdp; 1149 } 1150 Log.d(TAG, "Change media description from: " + lines[mLineIndex] + " to " + newMLine); 1151 lines[mLineIndex] = newMLine; 1152 return joinString(Arrays.asList(lines), "\r\n", true /* delimiterAtEnd */); 1153 } 1154 1155 private void drainCandidates() { 1156 if (queuedRemoteCandidates != null) { 1157 Log.d(TAG, "Add " + queuedRemoteCandidates.size() + " remote candidates"); 1158 for (IceCandidate candidate : queuedRemoteCandidates) { 1159 peerConnection.addIceCandidate(candidate, new AddIceObserver() { 1160 @Override 1161 public void onAddSuccess() { 1162 Log.d(TAG, "Candidate " + candidate + " successfully added."); 1163 } 1164 @Override 1165 public void onAddFailure(String error) { 1166 Log.d(TAG, "Candidate " + candidate + " addition failed: " + error); 1167 } 1168 }); 1169 } 1170 queuedRemoteCandidates = null; 1171 } 1172 } 1173 1174 private void switchCameraInternal() { 1175 if (videoCapturer instanceof CameraVideoCapturer) { 1176 if (!isVideoCallEnabled() || isError) { 1177 Log.e(TAG, 1178 "Failed to switch camera. Video: " + isVideoCallEnabled() + ". Error : " + isError); 1179 return; // No video is sent or only one camera is available or error happened. 1180 } 1181 Log.d(TAG, "Switch camera"); 1182 CameraVideoCapturer cameraVideoCapturer = (CameraVideoCapturer) videoCapturer; 1183 cameraVideoCapturer.switchCamera(null); 1184 } else { 1185 Log.d(TAG, "Will not switch camera, video caputurer is not a camera"); 1186 } 1187 } 1188 1189 public void switchCamera() { 1190 executor.execute(this ::switchCameraInternal); 1191 } 1192 1193 public void changeCaptureFormat(final int width, final int height, final int framerate) { 1194 executor.execute(() -> changeCaptureFormatInternal(width, height, framerate)); 1195 } 1196 1197 private void changeCaptureFormatInternal(int width, int height, int framerate) { 1198 if (!isVideoCallEnabled() || isError || videoCapturer == null) { 1199 Log.e(TAG, 1200 "Failed to change capture format. Video: " + isVideoCallEnabled() 1201 + ". Error : " + isError); 1202 return; 1203 } 1204 Log.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate); 1205 videoSource.adaptOutputFormat(width, height, framerate); 1206 } 1207 1208 // Implementation detail: observe ICE & stream changes and react accordingly. 1209 private class PCObserver implements PeerConnection.Observer { 1210 @Override 1211 public void onIceCandidate(final IceCandidate candidate) { 1212 executor.execute(() -> events.onIceCandidate(candidate)); 1213 } 1214 1215 @Override 1216 public void onIceCandidateError(final IceCandidateErrorEvent event) { 1217 Log.d(TAG, 1218 "IceCandidateError address: " + event.address + ", port: " + event.port + ", url: " 1219 + event.url + ", errorCode: " + event.errorCode + ", errorText: " + event.errorText); 1220 } 1221 1222 @Override 1223 public void onIceCandidatesRemoved(final IceCandidate[] candidates) { 1224 executor.execute(() -> events.onIceCandidatesRemoved(candidates)); 1225 } 1226 1227 @Override 1228 public void onSignalingChange(PeerConnection.SignalingState newState) { 1229 Log.d(TAG, "SignalingState: " + newState); 1230 } 1231 1232 @Override 1233 public void onIceConnectionChange(final PeerConnection.IceConnectionState newState) { 1234 executor.execute(() -> { 1235 Log.d(TAG, "IceConnectionState: " + newState); 1236 if (newState == IceConnectionState.CONNECTED) { 1237 events.onIceConnected(); 1238 } else if (newState == IceConnectionState.DISCONNECTED) { 1239 events.onIceDisconnected(); 1240 } else if (newState == IceConnectionState.FAILED) { 1241 reportError("ICE connection failed."); 1242 } 1243 }); 1244 } 1245 1246 @Override 1247 public void onConnectionChange(final PeerConnection.PeerConnectionState newState) { 1248 executor.execute(() -> { 1249 Log.d(TAG, "PeerConnectionState: " + newState); 1250 if (newState == PeerConnectionState.CONNECTED) { 1251 events.onConnected(); 1252 } else if (newState == PeerConnectionState.DISCONNECTED) { 1253 events.onDisconnected(); 1254 } else if (newState == PeerConnectionState.FAILED) { 1255 reportError("DTLS connection failed."); 1256 } 1257 }); 1258 } 1259 1260 @Override 1261 public void onIceGatheringChange(PeerConnection.IceGatheringState newState) { 1262 Log.d(TAG, "IceGatheringState: " + newState); 1263 } 1264 1265 @Override 1266 public void onIceConnectionReceivingChange(boolean receiving) { 1267 Log.d(TAG, "IceConnectionReceiving changed to " + receiving); 1268 } 1269 1270 @Override 1271 public void onSelectedCandidatePairChanged(CandidatePairChangeEvent event) { 1272 Log.d(TAG, "Selected candidate pair changed because: " + event.reason); 1273 } 1274 1275 @Override 1276 public void onAddStream(final MediaStream stream) {} 1277 1278 @Override 1279 public void onRemoveStream(final MediaStream stream) {} 1280 1281 @Override 1282 public void onDataChannel(final DataChannel dc) { 1283 Log.d(TAG, "New Data channel " + dc.label()); 1284 1285 if (!dataChannelEnabled) 1286 return; 1287 1288 dc.registerObserver(new DataChannel.Observer() { 1289 @Override 1290 public void onBufferedAmountChange(long previousAmount) { 1291 Log.d(TAG, "Data channel buffered amount changed: " + dc.label() + ": " + dc.state()); 1292 } 1293 1294 @Override 1295 public void onStateChange() { 1296 Log.d(TAG, "Data channel state changed: " + dc.label() + ": " + dc.state()); 1297 } 1298 1299 @Override 1300 public void onMessage(final DataChannel.Buffer buffer) { 1301 if (buffer.binary) { 1302 Log.d(TAG, "Received binary msg over " + dc); 1303 return; 1304 } 1305 ByteBuffer data = buffer.data; 1306 final byte[] bytes = new byte[data.capacity()]; 1307 data.get(bytes); 1308 String strData = new String(bytes, Charset.forName("UTF-8")); 1309 Log.d(TAG, "Got msg: " + strData + " over " + dc); 1310 } 1311 }); 1312 } 1313 1314 @Override 1315 public void onRenegotiationNeeded() { 1316 // No need to do anything; AppRTC follows a pre-agreed-upon 1317 // signaling/negotiation protocol. 1318 } 1319 1320 @Override 1321 public void onAddTrack(final RtpReceiver receiver, final MediaStream[] mediaStreams) {} 1322 1323 @Override 1324 public void onRemoveTrack(final RtpReceiver receiver) {} 1325 } 1326 1327 // Implementation detail: handle offer creation/signaling and answer setting, 1328 // as well as adding remote ICE candidates once the answer SDP is set. 1329 private class SDPObserver implements SdpObserver { 1330 @Override 1331 public void onCreateSuccess(final SessionDescription desc) { 1332 if (localDescription != null) { 1333 reportError("Multiple SDP create."); 1334 return; 1335 } 1336 String sdp = desc.description; 1337 if (preferIsac) { 1338 sdp = preferCodec(sdp, AUDIO_CODEC_ISAC, true); 1339 } 1340 if (isVideoCallEnabled()) { 1341 sdp = preferCodec(sdp, getSdpVideoCodecName(peerConnectionParameters), false); 1342 } 1343 final SessionDescription newDesc = new SessionDescription(desc.type, sdp); 1344 localDescription = newDesc; 1345 executor.execute(() -> { 1346 if (peerConnection != null && !isError) { 1347 Log.d(TAG, "Set local SDP from " + desc.type); 1348 peerConnection.setLocalDescription(sdpObserver, newDesc); 1349 } 1350 }); 1351 } 1352 1353 @Override 1354 public void onSetSuccess() { 1355 executor.execute(() -> { 1356 if (peerConnection == null || isError) { 1357 return; 1358 } 1359 if (isInitiator) { 1360 // For offering peer connection we first create offer and set 1361 // local SDP, then after receiving answer set remote SDP. 1362 if (peerConnection.getRemoteDescription() == null) { 1363 // We've just set our local SDP so time to send it. 1364 Log.d(TAG, "Local SDP set succesfully"); 1365 events.onLocalDescription(localDescription); 1366 } else { 1367 // We've just set remote description, so drain remote 1368 // and send local ICE candidates. 1369 Log.d(TAG, "Remote SDP set succesfully"); 1370 drainCandidates(); 1371 } 1372 } else { 1373 // For answering peer connection we set remote SDP and then 1374 // create answer and set local SDP. 1375 if (peerConnection.getLocalDescription() != null) { 1376 // We've just set our local SDP so time to send it, drain 1377 // remote and send local ICE candidates. 1378 Log.d(TAG, "Local SDP set succesfully"); 1379 events.onLocalDescription(localDescription); 1380 drainCandidates(); 1381 } else { 1382 // We've just set remote SDP - do nothing for now - 1383 // answer will be created soon. 1384 Log.d(TAG, "Remote SDP set succesfully"); 1385 } 1386 } 1387 }); 1388 } 1389 1390 @Override 1391 public void onCreateFailure(final String error) { 1392 reportError("createSDP error: " + error); 1393 } 1394 1395 @Override 1396 public void onSetFailure(final String error) { 1397 reportError("setSDP error: " + error); 1398 } 1399 } 1400 }