1 /* 2 * Copyright 2014 The WebRTC Project Authors. All rights reserved. 3 * 4 * Use of this source code is governed by a BSD-style license 5 * that can be found in the LICENSE file in the root of the source 6 * tree. An additional intellectual property rights grant can be found 7 * in the file PATENTS. All contributing project authors may 8 * be found in the AUTHORS file in the root of the source tree. 9 */ 10 11 package org.appspot.apprtc; 12 13 import android.content.Context; 14 import android.os.Environment; 15 import android.os.ParcelFileDescriptor; 16 import android.util.Log; 17 import androidx.annotation.Nullable; 18 import java.io.File; 19 import java.io.IOException; 20 import java.nio.ByteBuffer; 21 import java.nio.charset.Charset; 22 import java.text.DateFormat; 23 import java.text.SimpleDateFormat; 24 import java.util.ArrayList; 25 import java.util.Arrays; 26 import java.util.Collections; 27 import java.util.Date; 28 import java.util.Iterator; 29 import java.util.List; 30 import java.util.Locale; 31 import java.util.Timer; 32 import java.util.TimerTask; 33 import java.util.concurrent.ExecutorService; 34 import java.util.concurrent.Executors; 35 import java.util.regex.Matcher; 36 import java.util.regex.Pattern; 37 import org.appspot.apprtc.AppRTCClient.SignalingParameters; 38 import org.appspot.apprtc.RecordedAudioToFileController; 39 import org.webrtc.AddIceObserver; 40 import org.webrtc.AudioSource; 41 import org.webrtc.AudioTrack; 42 import org.webrtc.CameraVideoCapturer; 43 import org.webrtc.CandidatePairChangeEvent; 44 import org.webrtc.DataChannel; 45 import org.webrtc.DefaultVideoDecoderFactory; 46 import org.webrtc.DefaultVideoEncoderFactory; 47 import org.webrtc.EglBase; 48 import org.webrtc.IceCandidate; 49 import org.webrtc.IceCandidateErrorEvent; 50 import org.webrtc.Logging; 51 import org.webrtc.MediaConstraints; 52 import org.webrtc.MediaStream; 53 import org.webrtc.MediaStreamTrack; 54 import org.webrtc.PeerConnection; 55 import org.webrtc.PeerConnection.IceConnectionState; 56 import org.webrtc.PeerConnection.PeerConnectionState; 57 import org.webrtc.PeerConnectionFactory; 58 import org.webrtc.RTCStatsCollectorCallback; 59 import org.webrtc.RTCStatsReport; 60 import org.webrtc.RtpParameters; 61 import org.webrtc.RtpReceiver; 62 import org.webrtc.RtpSender; 63 import org.webrtc.RtpTransceiver; 64 import org.webrtc.SdpObserver; 65 import org.webrtc.SessionDescription; 66 import org.webrtc.SoftwareVideoDecoderFactory; 67 import org.webrtc.SoftwareVideoEncoderFactory; 68 import org.webrtc.SurfaceTextureHelper; 69 import org.webrtc.VideoCapturer; 70 import org.webrtc.VideoDecoderFactory; 71 import org.webrtc.VideoEncoderFactory; 72 import org.webrtc.VideoSink; 73 import org.webrtc.VideoSource; 74 import org.webrtc.VideoTrack; 75 import org.webrtc.audio.AudioDeviceModule; 76 import org.webrtc.audio.JavaAudioDeviceModule; 77 import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordErrorCallback; 78 import org.webrtc.audio.JavaAudioDeviceModule.AudioRecordStateCallback; 79 import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackErrorCallback; 80 import org.webrtc.audio.JavaAudioDeviceModule.AudioTrackStateCallback; 81 82 /** 83 * Peer connection client implementation. 84 * 85 * <p>All public methods are routed to local looper thread. 86 * All PeerConnectionEvents callbacks are invoked from the same looper thread. 87 * This class is a singleton. 88 */ 89 public class PeerConnectionClient { 90 public static final String VIDEO_TRACK_ID = "ARDAMSv0"; 91 public static final String AUDIO_TRACK_ID = "ARDAMSa0"; 92 public static final String VIDEO_TRACK_TYPE = "video"; 93 private static final String TAG = "PCRTCClient"; 94 private static final String VIDEO_CODEC_VP8 = "VP8"; 95 private static final String VIDEO_CODEC_VP9 = "VP9"; 96 private static final String VIDEO_CODEC_H264 = "H264"; 97 private static final String VIDEO_CODEC_H264_BASELINE = "H264 Baseline"; 98 private static final String VIDEO_CODEC_H264_HIGH = "H264 High"; 99 private static final String VIDEO_CODEC_AV1 = "AV1"; 100 private static final String AUDIO_CODEC_OPUS = "opus"; 101 private static final String AUDIO_CODEC_ISAC = "ISAC"; 102 private static final String VIDEO_CODEC_PARAM_START_BITRATE = "x-google-start-bitrate"; 103 private static final String VIDEO_FLEXFEC_FIELDTRIAL = 104 "WebRTC-FlexFEC-03-Advertised/Enabled/WebRTC-FlexFEC-03/Enabled/"; 105 private static final String VIDEO_VP8_INTEL_HW_ENCODER_FIELDTRIAL = "WebRTC-IntelVP8/Enabled/"; 106 private static final String DISABLE_WEBRTC_AGC_FIELDTRIAL = 107 "WebRTC-Audio-MinimizeResamplingOnMobile/Enabled/"; 108 private static final String AUDIO_CODEC_PARAM_BITRATE = "maxaveragebitrate"; 109 private static final String AUDIO_ECHO_CANCELLATION_CONSTRAINT = "googEchoCancellation"; 110 private static final String AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT = "googAutoGainControl"; 111 private static final String AUDIO_HIGH_PASS_FILTER_CONSTRAINT = "googHighpassFilter"; 112 private static final String AUDIO_NOISE_SUPPRESSION_CONSTRAINT = "googNoiseSuppression"; 113 private static final String DTLS_SRTP_KEY_AGREEMENT_CONSTRAINT = "DtlsSrtpKeyAgreement"; 114 private static final int HD_VIDEO_WIDTH = 1280; 115 private static final int HD_VIDEO_HEIGHT = 720; 116 private static final int BPS_IN_KBPS = 1000; 117 private static final String RTCEVENTLOG_OUTPUT_DIR_NAME = "rtc_event_log"; 118 119 // Executor thread is started once in private ctor and is used for all 120 // peer connection API calls to ensure new peer connection factory is 121 // created on the same thread as previously destroyed factory. 122 private static final ExecutorService executor = Executors.newSingleThreadExecutor(); 123 124 private final PCObserver pcObserver = new PCObserver(); 125 private final SDPObserver sdpObserver = new SDPObserver(); 126 private final Timer statsTimer = new Timer(); 127 private final EglBase rootEglBase; 128 private final Context appContext; 129 private final PeerConnectionParameters peerConnectionParameters; 130 private final PeerConnectionEvents events; 131 132 @Nullable 133 private PeerConnectionFactory factory; 134 @Nullable 135 private PeerConnection peerConnection; 136 @Nullable 137 private AudioSource audioSource; 138 @Nullable private SurfaceTextureHelper surfaceTextureHelper; 139 @Nullable private VideoSource videoSource; 140 private boolean preferIsac; 141 private boolean videoCapturerStopped; 142 private boolean isError; 143 @Nullable 144 private VideoSink localRender; 145 @Nullable private List<VideoSink> remoteSinks; 146 private SignalingParameters signalingParameters; 147 private int videoWidth; 148 private int videoHeight; 149 private int videoFps; 150 private MediaConstraints audioConstraints; 151 private MediaConstraints sdpMediaConstraints; 152 // Queued remote ICE candidates are consumed only after both local and 153 // remote descriptions are set. Similarly local ICE candidates are sent to 154 // remote peer after both local and remote description are set. 155 @Nullable 156 private List<IceCandidate> queuedRemoteCandidates; 157 private boolean isInitiator; 158 @Nullable private SessionDescription localDescription; // either offer or answer description 159 @Nullable 160 private VideoCapturer videoCapturer; 161 // enableVideo is set to true if video should be rendered and sent. 162 private boolean renderVideo = true; 163 @Nullable 164 private VideoTrack localVideoTrack; 165 @Nullable 166 private VideoTrack remoteVideoTrack; 167 @Nullable 168 private RtpSender localVideoSender; 169 // enableAudio is set to true if audio should be sent. 170 private boolean enableAudio = true; 171 @Nullable 172 private AudioTrack localAudioTrack; 173 @Nullable 174 private DataChannel dataChannel; 175 private final boolean dataChannelEnabled; 176 // Enable RtcEventLog. 177 @Nullable 178 private RtcEventLog rtcEventLog; 179 // Implements the WebRtcAudioRecordSamplesReadyCallback interface and writes 180 // recorded audio samples to an output file. 181 @Nullable private RecordedAudioToFileController saveRecordedAudioToFile; 182 183 /** 184 * Peer connection parameters. 185 */ 186 public static class DataChannelParameters { 187 public final boolean ordered; 188 public final int maxRetransmitTimeMs; 189 public final int maxRetransmits; 190 public final String protocol; 191 public final boolean negotiated; 192 public final int id; 193 DataChannelParameters(boolean ordered, int maxRetransmitTimeMs, int maxRetransmits, String protocol, boolean negotiated, int id)194 public DataChannelParameters(boolean ordered, int maxRetransmitTimeMs, int maxRetransmits, 195 String protocol, boolean negotiated, int id) { 196 this.ordered = ordered; 197 this.maxRetransmitTimeMs = maxRetransmitTimeMs; 198 this.maxRetransmits = maxRetransmits; 199 this.protocol = protocol; 200 this.negotiated = negotiated; 201 this.id = id; 202 } 203 } 204 205 /** 206 * Peer connection parameters. 207 */ 208 public static class PeerConnectionParameters { 209 public final boolean videoCallEnabled; 210 public final boolean loopback; 211 public final boolean tracing; 212 public final int videoWidth; 213 public final int videoHeight; 214 public final int videoFps; 215 public final int videoMaxBitrate; 216 public final String videoCodec; 217 public final boolean videoCodecHwAcceleration; 218 public final boolean videoFlexfecEnabled; 219 public final int audioStartBitrate; 220 public final String audioCodec; 221 public final boolean noAudioProcessing; 222 public final boolean aecDump; 223 public final boolean saveInputAudioToFile; 224 public final boolean useOpenSLES; 225 public final boolean disableBuiltInAEC; 226 public final boolean disableBuiltInAGC; 227 public final boolean disableBuiltInNS; 228 public final boolean disableWebRtcAGCAndHPF; 229 public final boolean enableRtcEventLog; 230 private final DataChannelParameters dataChannelParameters; 231 PeerConnectionParameters(boolean videoCallEnabled, boolean loopback, boolean tracing, int videoWidth, int videoHeight, int videoFps, int videoMaxBitrate, String videoCodec, boolean videoCodecHwAcceleration, boolean videoFlexfecEnabled, int audioStartBitrate, String audioCodec, boolean noAudioProcessing, boolean aecDump, boolean saveInputAudioToFile, boolean useOpenSLES, boolean disableBuiltInAEC, boolean disableBuiltInAGC, boolean disableBuiltInNS, boolean disableWebRtcAGCAndHPF, boolean enableRtcEventLog, DataChannelParameters dataChannelParameters)232 public PeerConnectionParameters(boolean videoCallEnabled, boolean loopback, boolean tracing, 233 int videoWidth, int videoHeight, int videoFps, int videoMaxBitrate, String videoCodec, 234 boolean videoCodecHwAcceleration, boolean videoFlexfecEnabled, int audioStartBitrate, 235 String audioCodec, boolean noAudioProcessing, boolean aecDump, boolean saveInputAudioToFile, 236 boolean useOpenSLES, boolean disableBuiltInAEC, boolean disableBuiltInAGC, 237 boolean disableBuiltInNS, boolean disableWebRtcAGCAndHPF, boolean enableRtcEventLog, 238 DataChannelParameters dataChannelParameters) { 239 this.videoCallEnabled = videoCallEnabled; 240 this.loopback = loopback; 241 this.tracing = tracing; 242 this.videoWidth = videoWidth; 243 this.videoHeight = videoHeight; 244 this.videoFps = videoFps; 245 this.videoMaxBitrate = videoMaxBitrate; 246 this.videoCodec = videoCodec; 247 this.videoFlexfecEnabled = videoFlexfecEnabled; 248 this.videoCodecHwAcceleration = videoCodecHwAcceleration; 249 this.audioStartBitrate = audioStartBitrate; 250 this.audioCodec = audioCodec; 251 this.noAudioProcessing = noAudioProcessing; 252 this.aecDump = aecDump; 253 this.saveInputAudioToFile = saveInputAudioToFile; 254 this.useOpenSLES = useOpenSLES; 255 this.disableBuiltInAEC = disableBuiltInAEC; 256 this.disableBuiltInAGC = disableBuiltInAGC; 257 this.disableBuiltInNS = disableBuiltInNS; 258 this.disableWebRtcAGCAndHPF = disableWebRtcAGCAndHPF; 259 this.enableRtcEventLog = enableRtcEventLog; 260 this.dataChannelParameters = dataChannelParameters; 261 } 262 } 263 264 /** 265 * Peer connection events. 266 */ 267 public interface PeerConnectionEvents { 268 /** 269 * Callback fired once local SDP is created and set. 270 */ onLocalDescription(final SessionDescription sdp)271 void onLocalDescription(final SessionDescription sdp); 272 273 /** 274 * Callback fired once local Ice candidate is generated. 275 */ onIceCandidate(final IceCandidate candidate)276 void onIceCandidate(final IceCandidate candidate); 277 278 /** 279 * Callback fired once local ICE candidates are removed. 280 */ onIceCandidatesRemoved(final IceCandidate[] candidates)281 void onIceCandidatesRemoved(final IceCandidate[] candidates); 282 283 /** 284 * Callback fired once connection is established (IceConnectionState is 285 * CONNECTED). 286 */ onIceConnected()287 void onIceConnected(); 288 289 /** 290 * Callback fired once connection is disconnected (IceConnectionState is 291 * DISCONNECTED). 292 */ onIceDisconnected()293 void onIceDisconnected(); 294 295 /** 296 * Callback fired once DTLS connection is established (PeerConnectionState 297 * is CONNECTED). 298 */ onConnected()299 void onConnected(); 300 301 /** 302 * Callback fired once DTLS connection is disconnected (PeerConnectionState 303 * is DISCONNECTED). 304 */ onDisconnected()305 void onDisconnected(); 306 307 /** 308 * Callback fired once peer connection is closed. 309 */ onPeerConnectionClosed()310 void onPeerConnectionClosed(); 311 312 /** 313 * Callback fired once peer connection statistics is ready. 314 */ onPeerConnectionStatsReady(final RTCStatsReport report)315 void onPeerConnectionStatsReady(final RTCStatsReport report); 316 317 /** 318 * Callback fired once peer connection error happened. 319 */ onPeerConnectionError(final String description)320 void onPeerConnectionError(final String description); 321 } 322 323 /** 324 * Create a PeerConnectionClient with the specified parameters. PeerConnectionClient takes 325 * ownership of `eglBase`. 326 */ PeerConnectionClient(Context appContext, EglBase eglBase, PeerConnectionParameters peerConnectionParameters, PeerConnectionEvents events)327 public PeerConnectionClient(Context appContext, EglBase eglBase, 328 PeerConnectionParameters peerConnectionParameters, PeerConnectionEvents events) { 329 this.rootEglBase = eglBase; 330 this.appContext = appContext; 331 this.events = events; 332 this.peerConnectionParameters = peerConnectionParameters; 333 this.dataChannelEnabled = peerConnectionParameters.dataChannelParameters != null; 334 335 Log.d(TAG, "Preferred video codec: " + getSdpVideoCodecName(peerConnectionParameters)); 336 337 final String fieldTrials = getFieldTrials(peerConnectionParameters); 338 executor.execute(() -> { 339 Log.d(TAG, "Initialize WebRTC. Field trials: " + fieldTrials); 340 PeerConnectionFactory.initialize( 341 PeerConnectionFactory.InitializationOptions.builder(appContext) 342 .setFieldTrials(fieldTrials) 343 .setEnableInternalTracer(true) 344 .createInitializationOptions()); 345 }); 346 } 347 348 /** 349 * This function should only be called once. 350 */ createPeerConnectionFactory(PeerConnectionFactory.Options options)351 public void createPeerConnectionFactory(PeerConnectionFactory.Options options) { 352 if (factory != null) { 353 throw new IllegalStateException("PeerConnectionFactory has already been constructed"); 354 } 355 executor.execute(() -> createPeerConnectionFactoryInternal(options)); 356 } 357 createPeerConnection(final VideoSink localRender, final VideoSink remoteSink, final VideoCapturer videoCapturer, final SignalingParameters signalingParameters)358 public void createPeerConnection(final VideoSink localRender, final VideoSink remoteSink, 359 final VideoCapturer videoCapturer, final SignalingParameters signalingParameters) { 360 if (peerConnectionParameters.videoCallEnabled && videoCapturer == null) { 361 Log.w(TAG, "Video call enabled but no video capturer provided."); 362 } 363 createPeerConnection( 364 localRender, Collections.singletonList(remoteSink), videoCapturer, signalingParameters); 365 } 366 createPeerConnection(final VideoSink localRender, final List<VideoSink> remoteSinks, final VideoCapturer videoCapturer, final SignalingParameters signalingParameters)367 public void createPeerConnection(final VideoSink localRender, final List<VideoSink> remoteSinks, 368 final VideoCapturer videoCapturer, final SignalingParameters signalingParameters) { 369 if (peerConnectionParameters == null) { 370 Log.e(TAG, "Creating peer connection without initializing factory."); 371 return; 372 } 373 this.localRender = localRender; 374 this.remoteSinks = remoteSinks; 375 this.videoCapturer = videoCapturer; 376 this.signalingParameters = signalingParameters; 377 executor.execute(() -> { 378 try { 379 createMediaConstraintsInternal(); 380 createPeerConnectionInternal(); 381 maybeCreateAndStartRtcEventLog(); 382 } catch (Exception e) { 383 reportError("Failed to create peer connection: " + e.getMessage()); 384 throw e; 385 } 386 }); 387 } 388 close()389 public void close() { 390 executor.execute(this ::closeInternal); 391 } 392 isVideoCallEnabled()393 private boolean isVideoCallEnabled() { 394 return peerConnectionParameters.videoCallEnabled && videoCapturer != null; 395 } 396 createPeerConnectionFactoryInternal(PeerConnectionFactory.Options options)397 private void createPeerConnectionFactoryInternal(PeerConnectionFactory.Options options) { 398 isError = false; 399 400 if (peerConnectionParameters.tracing) { 401 PeerConnectionFactory.startInternalTracingCapture( 402 Environment.getExternalStorageDirectory().getAbsolutePath() + File.separator 403 + "webrtc-trace.txt"); 404 } 405 406 // Check if ISAC is used by default. 407 preferIsac = peerConnectionParameters.audioCodec != null 408 && peerConnectionParameters.audioCodec.equals(AUDIO_CODEC_ISAC); 409 410 // It is possible to save a copy in raw PCM format on a file by checking 411 // the "Save input audio to file" checkbox in the Settings UI. A callback 412 // interface is set when this flag is enabled. As a result, a copy of recorded 413 // audio samples are provided to this client directly from the native audio 414 // layer in Java. 415 if (peerConnectionParameters.saveInputAudioToFile) { 416 if (!peerConnectionParameters.useOpenSLES) { 417 Log.d(TAG, "Enable recording of microphone input audio to file"); 418 saveRecordedAudioToFile = new RecordedAudioToFileController(executor); 419 } else { 420 // TODO(henrika): ensure that the UI reflects that if OpenSL ES is selected, 421 // then the "Save inut audio to file" option shall be grayed out. 422 Log.e(TAG, "Recording of input audio is not supported for OpenSL ES"); 423 } 424 } 425 426 final AudioDeviceModule adm = createJavaAudioDevice(); 427 428 // Create peer connection factory. 429 if (options != null) { 430 Log.d(TAG, "Factory networkIgnoreMask option: " + options.networkIgnoreMask); 431 } 432 final boolean enableH264HighProfile = 433 VIDEO_CODEC_H264_HIGH.equals(peerConnectionParameters.videoCodec); 434 final VideoEncoderFactory encoderFactory; 435 final VideoDecoderFactory decoderFactory; 436 437 if (peerConnectionParameters.videoCodecHwAcceleration) { 438 encoderFactory = new DefaultVideoEncoderFactory( 439 rootEglBase.getEglBaseContext(), true /* enableIntelVp8Encoder */, enableH264HighProfile); 440 decoderFactory = new DefaultVideoDecoderFactory(rootEglBase.getEglBaseContext()); 441 } else { 442 encoderFactory = new SoftwareVideoEncoderFactory(); 443 decoderFactory = new SoftwareVideoDecoderFactory(); 444 } 445 446 // Disable encryption for loopback calls. 447 if (peerConnectionParameters.loopback) { 448 options.disableEncryption = true; 449 } 450 factory = PeerConnectionFactory.builder() 451 .setOptions(options) 452 .setAudioDeviceModule(adm) 453 .setVideoEncoderFactory(encoderFactory) 454 .setVideoDecoderFactory(decoderFactory) 455 .createPeerConnectionFactory(); 456 Log.d(TAG, "Peer connection factory created."); 457 adm.release(); 458 } 459 createJavaAudioDevice()460 AudioDeviceModule createJavaAudioDevice() { 461 // Enable/disable OpenSL ES playback. 462 if (!peerConnectionParameters.useOpenSLES) { 463 Log.w(TAG, "External OpenSLES ADM not implemented yet."); 464 // TODO(magjed): Add support for external OpenSLES ADM. 465 } 466 467 // Set audio record error callbacks. 468 AudioRecordErrorCallback audioRecordErrorCallback = new AudioRecordErrorCallback() { 469 @Override 470 public void onWebRtcAudioRecordInitError(String errorMessage) { 471 Log.e(TAG, "onWebRtcAudioRecordInitError: " + errorMessage); 472 reportError(errorMessage); 473 } 474 475 @Override 476 public void onWebRtcAudioRecordStartError( 477 JavaAudioDeviceModule.AudioRecordStartErrorCode errorCode, String errorMessage) { 478 Log.e(TAG, "onWebRtcAudioRecordStartError: " + errorCode + ". " + errorMessage); 479 reportError(errorMessage); 480 } 481 482 @Override 483 public void onWebRtcAudioRecordError(String errorMessage) { 484 Log.e(TAG, "onWebRtcAudioRecordError: " + errorMessage); 485 reportError(errorMessage); 486 } 487 }; 488 489 AudioTrackErrorCallback audioTrackErrorCallback = new AudioTrackErrorCallback() { 490 @Override 491 public void onWebRtcAudioTrackInitError(String errorMessage) { 492 Log.e(TAG, "onWebRtcAudioTrackInitError: " + errorMessage); 493 reportError(errorMessage); 494 } 495 496 @Override 497 public void onWebRtcAudioTrackStartError( 498 JavaAudioDeviceModule.AudioTrackStartErrorCode errorCode, String errorMessage) { 499 Log.e(TAG, "onWebRtcAudioTrackStartError: " + errorCode + ". " + errorMessage); 500 reportError(errorMessage); 501 } 502 503 @Override 504 public void onWebRtcAudioTrackError(String errorMessage) { 505 Log.e(TAG, "onWebRtcAudioTrackError: " + errorMessage); 506 reportError(errorMessage); 507 } 508 }; 509 510 // Set audio record state callbacks. 511 AudioRecordStateCallback audioRecordStateCallback = new AudioRecordStateCallback() { 512 @Override 513 public void onWebRtcAudioRecordStart() { 514 Log.i(TAG, "Audio recording starts"); 515 } 516 517 @Override 518 public void onWebRtcAudioRecordStop() { 519 Log.i(TAG, "Audio recording stops"); 520 } 521 }; 522 523 // Set audio track state callbacks. 524 AudioTrackStateCallback audioTrackStateCallback = new AudioTrackStateCallback() { 525 @Override 526 public void onWebRtcAudioTrackStart() { 527 Log.i(TAG, "Audio playout starts"); 528 } 529 530 @Override 531 public void onWebRtcAudioTrackStop() { 532 Log.i(TAG, "Audio playout stops"); 533 } 534 }; 535 536 return JavaAudioDeviceModule.builder(appContext) 537 .setSamplesReadyCallback(saveRecordedAudioToFile) 538 .setUseHardwareAcousticEchoCanceler(!peerConnectionParameters.disableBuiltInAEC) 539 .setUseHardwareNoiseSuppressor(!peerConnectionParameters.disableBuiltInNS) 540 .setAudioRecordErrorCallback(audioRecordErrorCallback) 541 .setAudioTrackErrorCallback(audioTrackErrorCallback) 542 .setAudioRecordStateCallback(audioRecordStateCallback) 543 .setAudioTrackStateCallback(audioTrackStateCallback) 544 .createAudioDeviceModule(); 545 } 546 createMediaConstraintsInternal()547 private void createMediaConstraintsInternal() { 548 // Create video constraints if video call is enabled. 549 if (isVideoCallEnabled()) { 550 videoWidth = peerConnectionParameters.videoWidth; 551 videoHeight = peerConnectionParameters.videoHeight; 552 videoFps = peerConnectionParameters.videoFps; 553 554 // If video resolution is not specified, default to HD. 555 if (videoWidth == 0 || videoHeight == 0) { 556 videoWidth = HD_VIDEO_WIDTH; 557 videoHeight = HD_VIDEO_HEIGHT; 558 } 559 560 // If fps is not specified, default to 30. 561 if (videoFps == 0) { 562 videoFps = 30; 563 } 564 Logging.d(TAG, "Capturing format: " + videoWidth + "x" + videoHeight + "@" + videoFps); 565 } 566 567 // Create audio constraints. 568 audioConstraints = new MediaConstraints(); 569 // added for audio performance measurements 570 if (peerConnectionParameters.noAudioProcessing) { 571 Log.d(TAG, "Disabling audio processing"); 572 audioConstraints.mandatory.add( 573 new MediaConstraints.KeyValuePair(AUDIO_ECHO_CANCELLATION_CONSTRAINT, "false")); 574 audioConstraints.mandatory.add( 575 new MediaConstraints.KeyValuePair(AUDIO_AUTO_GAIN_CONTROL_CONSTRAINT, "false")); 576 audioConstraints.mandatory.add( 577 new MediaConstraints.KeyValuePair(AUDIO_HIGH_PASS_FILTER_CONSTRAINT, "false")); 578 audioConstraints.mandatory.add( 579 new MediaConstraints.KeyValuePair(AUDIO_NOISE_SUPPRESSION_CONSTRAINT, "false")); 580 } 581 // Create SDP constraints. 582 sdpMediaConstraints = new MediaConstraints(); 583 sdpMediaConstraints.mandatory.add( 584 new MediaConstraints.KeyValuePair("OfferToReceiveAudio", "true")); 585 sdpMediaConstraints.mandatory.add(new MediaConstraints.KeyValuePair( 586 "OfferToReceiveVideo", Boolean.toString(isVideoCallEnabled()))); 587 } 588 createPeerConnectionInternal()589 private void createPeerConnectionInternal() { 590 if (factory == null || isError) { 591 Log.e(TAG, "Peerconnection factory is not created"); 592 return; 593 } 594 Log.d(TAG, "Create peer connection."); 595 596 queuedRemoteCandidates = new ArrayList<>(); 597 598 PeerConnection.RTCConfiguration rtcConfig = 599 new PeerConnection.RTCConfiguration(signalingParameters.iceServers); 600 // TCP candidates are only useful when connecting to a server that supports 601 // ICE-TCP. 602 rtcConfig.tcpCandidatePolicy = PeerConnection.TcpCandidatePolicy.DISABLED; 603 rtcConfig.bundlePolicy = PeerConnection.BundlePolicy.MAXBUNDLE; 604 rtcConfig.rtcpMuxPolicy = PeerConnection.RtcpMuxPolicy.REQUIRE; 605 rtcConfig.continualGatheringPolicy = PeerConnection.ContinualGatheringPolicy.GATHER_CONTINUALLY; 606 // Use ECDSA encryption. 607 rtcConfig.keyType = PeerConnection.KeyType.ECDSA; 608 rtcConfig.sdpSemantics = PeerConnection.SdpSemantics.UNIFIED_PLAN; 609 610 peerConnection = factory.createPeerConnection(rtcConfig, pcObserver); 611 612 if (dataChannelEnabled) { 613 DataChannel.Init init = new DataChannel.Init(); 614 init.ordered = peerConnectionParameters.dataChannelParameters.ordered; 615 init.negotiated = peerConnectionParameters.dataChannelParameters.negotiated; 616 init.maxRetransmits = peerConnectionParameters.dataChannelParameters.maxRetransmits; 617 init.maxRetransmitTimeMs = peerConnectionParameters.dataChannelParameters.maxRetransmitTimeMs; 618 init.id = peerConnectionParameters.dataChannelParameters.id; 619 init.protocol = peerConnectionParameters.dataChannelParameters.protocol; 620 dataChannel = peerConnection.createDataChannel("ApprtcDemo data", init); 621 } 622 isInitiator = false; 623 624 // Set INFO libjingle logging. 625 // NOTE: this _must_ happen while `factory` is alive! 626 Logging.enableLogToDebugOutput(Logging.Severity.LS_INFO); 627 628 List<String> mediaStreamLabels = Collections.singletonList("ARDAMS"); 629 if (isVideoCallEnabled()) { 630 peerConnection.addTrack(createVideoTrack(videoCapturer), mediaStreamLabels); 631 // We can add the renderers right away because we don't need to wait for an 632 // answer to get the remote track. 633 remoteVideoTrack = getRemoteVideoTrack(); 634 remoteVideoTrack.setEnabled(renderVideo); 635 for (VideoSink remoteSink : remoteSinks) { 636 remoteVideoTrack.addSink(remoteSink); 637 } 638 } 639 peerConnection.addTrack(createAudioTrack(), mediaStreamLabels); 640 if (isVideoCallEnabled()) { 641 findVideoSender(); 642 } 643 644 if (peerConnectionParameters.aecDump) { 645 try { 646 ParcelFileDescriptor aecDumpFileDescriptor = 647 ParcelFileDescriptor.open(new File(Environment.getExternalStorageDirectory().getPath() 648 + File.separator + "Download/audio.aecdump"), 649 ParcelFileDescriptor.MODE_READ_WRITE | ParcelFileDescriptor.MODE_CREATE 650 | ParcelFileDescriptor.MODE_TRUNCATE); 651 factory.startAecDump(aecDumpFileDescriptor.detachFd(), -1); 652 } catch (IOException e) { 653 Log.e(TAG, "Can not open aecdump file", e); 654 } 655 } 656 657 if (saveRecordedAudioToFile != null) { 658 if (saveRecordedAudioToFile.start()) { 659 Log.d(TAG, "Recording input audio to file is activated"); 660 } 661 } 662 Log.d(TAG, "Peer connection created."); 663 } 664 createRtcEventLogOutputFile()665 private File createRtcEventLogOutputFile() { 666 DateFormat dateFormat = new SimpleDateFormat("yyyyMMdd_hhmm_ss", Locale.getDefault()); 667 Date date = new Date(); 668 final String outputFileName = "event_log_" + dateFormat.format(date) + ".log"; 669 return new File( 670 appContext.getDir(RTCEVENTLOG_OUTPUT_DIR_NAME, Context.MODE_PRIVATE), outputFileName); 671 } 672 maybeCreateAndStartRtcEventLog()673 private void maybeCreateAndStartRtcEventLog() { 674 if (appContext == null || peerConnection == null) { 675 return; 676 } 677 if (!peerConnectionParameters.enableRtcEventLog) { 678 Log.d(TAG, "RtcEventLog is disabled."); 679 return; 680 } 681 rtcEventLog = new RtcEventLog(peerConnection); 682 rtcEventLog.start(createRtcEventLogOutputFile()); 683 } 684 closeInternal()685 private void closeInternal() { 686 if (factory != null && peerConnectionParameters.aecDump) { 687 factory.stopAecDump(); 688 } 689 Log.d(TAG, "Closing peer connection."); 690 statsTimer.cancel(); 691 if (dataChannel != null) { 692 dataChannel.dispose(); 693 dataChannel = null; 694 } 695 if (rtcEventLog != null) { 696 // RtcEventLog should stop before the peer connection is disposed. 697 rtcEventLog.stop(); 698 rtcEventLog = null; 699 } 700 if (peerConnection != null) { 701 peerConnection.dispose(); 702 peerConnection = null; 703 } 704 Log.d(TAG, "Closing audio source."); 705 if (audioSource != null) { 706 audioSource.dispose(); 707 audioSource = null; 708 } 709 Log.d(TAG, "Stopping capture."); 710 if (videoCapturer != null) { 711 try { 712 videoCapturer.stopCapture(); 713 } catch (InterruptedException e) { 714 throw new RuntimeException(e); 715 } 716 videoCapturerStopped = true; 717 videoCapturer.dispose(); 718 videoCapturer = null; 719 } 720 Log.d(TAG, "Closing video source."); 721 if (videoSource != null) { 722 videoSource.dispose(); 723 videoSource = null; 724 } 725 if (surfaceTextureHelper != null) { 726 surfaceTextureHelper.dispose(); 727 surfaceTextureHelper = null; 728 } 729 if (saveRecordedAudioToFile != null) { 730 Log.d(TAG, "Closing audio file for recorded input audio."); 731 saveRecordedAudioToFile.stop(); 732 saveRecordedAudioToFile = null; 733 } 734 localRender = null; 735 remoteSinks = null; 736 Log.d(TAG, "Closing peer connection factory."); 737 if (factory != null) { 738 factory.dispose(); 739 factory = null; 740 } 741 rootEglBase.release(); 742 Log.d(TAG, "Closing peer connection done."); 743 events.onPeerConnectionClosed(); 744 PeerConnectionFactory.stopInternalTracingCapture(); 745 PeerConnectionFactory.shutdownInternalTracer(); 746 } 747 isHDVideo()748 public boolean isHDVideo() { 749 return isVideoCallEnabled() && videoWidth * videoHeight >= 1280 * 720; 750 } 751 getStats()752 private void getStats() { 753 if (peerConnection == null || isError) { 754 return; 755 } 756 peerConnection.getStats(new RTCStatsCollectorCallback() { 757 @Override 758 public void onStatsDelivered(RTCStatsReport report) { 759 events.onPeerConnectionStatsReady(report); 760 } 761 }); 762 } 763 enableStatsEvents(boolean enable, int periodMs)764 public void enableStatsEvents(boolean enable, int periodMs) { 765 if (enable) { 766 try { 767 statsTimer.schedule(new TimerTask() { 768 @Override 769 public void run() { 770 executor.execute(() -> getStats()); 771 } 772 }, 0, periodMs); 773 } catch (Exception e) { 774 Log.e(TAG, "Can not schedule statistics timer", e); 775 } 776 } else { 777 statsTimer.cancel(); 778 } 779 } 780 setAudioEnabled(final boolean enable)781 public void setAudioEnabled(final boolean enable) { 782 executor.execute(() -> { 783 enableAudio = enable; 784 if (localAudioTrack != null) { 785 localAudioTrack.setEnabled(enableAudio); 786 } 787 }); 788 } 789 setVideoEnabled(final boolean enable)790 public void setVideoEnabled(final boolean enable) { 791 executor.execute(() -> { 792 renderVideo = enable; 793 if (localVideoTrack != null) { 794 localVideoTrack.setEnabled(renderVideo); 795 } 796 if (remoteVideoTrack != null) { 797 remoteVideoTrack.setEnabled(renderVideo); 798 } 799 }); 800 } 801 createOffer()802 public void createOffer() { 803 executor.execute(() -> { 804 if (peerConnection != null && !isError) { 805 Log.d(TAG, "PC Create OFFER"); 806 isInitiator = true; 807 peerConnection.createOffer(sdpObserver, sdpMediaConstraints); 808 } 809 }); 810 } 811 createAnswer()812 public void createAnswer() { 813 executor.execute(() -> { 814 if (peerConnection != null && !isError) { 815 Log.d(TAG, "PC create ANSWER"); 816 isInitiator = false; 817 peerConnection.createAnswer(sdpObserver, sdpMediaConstraints); 818 } 819 }); 820 } 821 addRemoteIceCandidate(final IceCandidate candidate)822 public void addRemoteIceCandidate(final IceCandidate candidate) { 823 executor.execute(() -> { 824 if (peerConnection != null && !isError) { 825 if (queuedRemoteCandidates != null) { 826 queuedRemoteCandidates.add(candidate); 827 } else { 828 peerConnection.addIceCandidate(candidate, new AddIceObserver() { 829 @Override 830 public void onAddSuccess() { 831 Log.d(TAG, "Candidate " + candidate + " successfully added."); 832 } 833 @Override 834 public void onAddFailure(String error) { 835 Log.d(TAG, "Candidate " + candidate + " addition failed: " + error); 836 } 837 }); 838 } 839 } 840 }); 841 } 842 removeRemoteIceCandidates(final IceCandidate[] candidates)843 public void removeRemoteIceCandidates(final IceCandidate[] candidates) { 844 executor.execute(() -> { 845 if (peerConnection == null || isError) { 846 return; 847 } 848 // Drain the queued remote candidates if there is any so that 849 // they are processed in the proper order. 850 drainCandidates(); 851 peerConnection.removeIceCandidates(candidates); 852 }); 853 } 854 setRemoteDescription(final SessionDescription desc)855 public void setRemoteDescription(final SessionDescription desc) { 856 executor.execute(() -> { 857 if (peerConnection == null || isError) { 858 return; 859 } 860 String sdp = desc.description; 861 if (preferIsac) { 862 sdp = preferCodec(sdp, AUDIO_CODEC_ISAC, true); 863 } 864 if (isVideoCallEnabled()) { 865 sdp = preferCodec(sdp, getSdpVideoCodecName(peerConnectionParameters), false); 866 } 867 if (peerConnectionParameters.audioStartBitrate > 0) { 868 sdp = setStartBitrate( 869 AUDIO_CODEC_OPUS, false, sdp, peerConnectionParameters.audioStartBitrate); 870 } 871 Log.d(TAG, "Set remote SDP."); 872 SessionDescription sdpRemote = new SessionDescription(desc.type, sdp); 873 peerConnection.setRemoteDescription(sdpObserver, sdpRemote); 874 }); 875 } 876 stopVideoSource()877 public void stopVideoSource() { 878 executor.execute(() -> { 879 if (videoCapturer != null && !videoCapturerStopped) { 880 Log.d(TAG, "Stop video source."); 881 try { 882 videoCapturer.stopCapture(); 883 } catch (InterruptedException e) { 884 } 885 videoCapturerStopped = true; 886 } 887 }); 888 } 889 startVideoSource()890 public void startVideoSource() { 891 executor.execute(() -> { 892 if (videoCapturer != null && videoCapturerStopped) { 893 Log.d(TAG, "Restart video source."); 894 videoCapturer.startCapture(videoWidth, videoHeight, videoFps); 895 videoCapturerStopped = false; 896 } 897 }); 898 } 899 setVideoMaxBitrate(@ullable final Integer maxBitrateKbps)900 public void setVideoMaxBitrate(@Nullable final Integer maxBitrateKbps) { 901 executor.execute(() -> { 902 if (peerConnection == null || localVideoSender == null || isError) { 903 return; 904 } 905 Log.d(TAG, "Requested max video bitrate: " + maxBitrateKbps); 906 if (localVideoSender == null) { 907 Log.w(TAG, "Sender is not ready."); 908 return; 909 } 910 911 RtpParameters parameters = localVideoSender.getParameters(); 912 if (parameters.encodings.size() == 0) { 913 Log.w(TAG, "RtpParameters are not ready."); 914 return; 915 } 916 917 for (RtpParameters.Encoding encoding : parameters.encodings) { 918 // Null value means no limit. 919 encoding.maxBitrateBps = maxBitrateKbps == null ? null : maxBitrateKbps * BPS_IN_KBPS; 920 } 921 if (!localVideoSender.setParameters(parameters)) { 922 Log.e(TAG, "RtpSender.setParameters failed."); 923 } 924 Log.d(TAG, "Configured max video bitrate to: " + maxBitrateKbps); 925 }); 926 } 927 reportError(final String errorMessage)928 private void reportError(final String errorMessage) { 929 Log.e(TAG, "Peerconnection error: " + errorMessage); 930 executor.execute(() -> { 931 if (!isError) { 932 events.onPeerConnectionError(errorMessage); 933 isError = true; 934 } 935 }); 936 } 937 938 @Nullable createAudioTrack()939 private AudioTrack createAudioTrack() { 940 audioSource = factory.createAudioSource(audioConstraints); 941 localAudioTrack = factory.createAudioTrack(AUDIO_TRACK_ID, audioSource); 942 localAudioTrack.setEnabled(enableAudio); 943 return localAudioTrack; 944 } 945 946 @Nullable createVideoTrack(VideoCapturer capturer)947 private VideoTrack createVideoTrack(VideoCapturer capturer) { 948 surfaceTextureHelper = 949 SurfaceTextureHelper.create("CaptureThread", rootEglBase.getEglBaseContext()); 950 videoSource = factory.createVideoSource(capturer.isScreencast()); 951 capturer.initialize(surfaceTextureHelper, appContext, videoSource.getCapturerObserver()); 952 capturer.startCapture(videoWidth, videoHeight, videoFps); 953 954 localVideoTrack = factory.createVideoTrack(VIDEO_TRACK_ID, videoSource); 955 localVideoTrack.setEnabled(renderVideo); 956 localVideoTrack.addSink(localRender); 957 return localVideoTrack; 958 } 959 findVideoSender()960 private void findVideoSender() { 961 for (RtpSender sender : peerConnection.getSenders()) { 962 if (sender.track() != null) { 963 String trackType = sender.track().kind(); 964 if (trackType.equals(VIDEO_TRACK_TYPE)) { 965 Log.d(TAG, "Found video sender."); 966 localVideoSender = sender; 967 } 968 } 969 } 970 } 971 972 // Returns the remote VideoTrack, assuming there is only one. getRemoteVideoTrack()973 private @Nullable VideoTrack getRemoteVideoTrack() { 974 for (RtpTransceiver transceiver : peerConnection.getTransceivers()) { 975 MediaStreamTrack track = transceiver.getReceiver().track(); 976 if (track instanceof VideoTrack) { 977 return (VideoTrack) track; 978 } 979 } 980 return null; 981 } 982 getSdpVideoCodecName(PeerConnectionParameters parameters)983 private static String getSdpVideoCodecName(PeerConnectionParameters parameters) { 984 switch (parameters.videoCodec) { 985 case VIDEO_CODEC_VP8: 986 return VIDEO_CODEC_VP8; 987 case VIDEO_CODEC_VP9: 988 return VIDEO_CODEC_VP9; 989 case VIDEO_CODEC_AV1: 990 return VIDEO_CODEC_AV1; 991 case VIDEO_CODEC_H264_HIGH: 992 case VIDEO_CODEC_H264_BASELINE: 993 return VIDEO_CODEC_H264; 994 default: 995 return VIDEO_CODEC_VP8; 996 } 997 } 998 getFieldTrials(PeerConnectionParameters peerConnectionParameters)999 private static String getFieldTrials(PeerConnectionParameters peerConnectionParameters) { 1000 String fieldTrials = ""; 1001 if (peerConnectionParameters.videoFlexfecEnabled) { 1002 fieldTrials += VIDEO_FLEXFEC_FIELDTRIAL; 1003 Log.d(TAG, "Enable FlexFEC field trial."); 1004 } 1005 fieldTrials += VIDEO_VP8_INTEL_HW_ENCODER_FIELDTRIAL; 1006 if (peerConnectionParameters.disableWebRtcAGCAndHPF) { 1007 fieldTrials += DISABLE_WEBRTC_AGC_FIELDTRIAL; 1008 Log.d(TAG, "Disable WebRTC AGC field trial."); 1009 } 1010 return fieldTrials; 1011 } 1012 1013 @SuppressWarnings("StringSplitter") setStartBitrate( String codec, boolean isVideoCodec, String sdp, int bitrateKbps)1014 private static String setStartBitrate( 1015 String codec, boolean isVideoCodec, String sdp, int bitrateKbps) { 1016 String[] lines = sdp.split("\r\n"); 1017 int rtpmapLineIndex = -1; 1018 boolean sdpFormatUpdated = false; 1019 String codecRtpMap = null; 1020 // Search for codec rtpmap in format 1021 // a=rtpmap:<payload type> <encoding name>/<clock rate> [/<encoding parameters>] 1022 String regex = "^a=rtpmap:(\\d+) " + codec + "(/\\d+)+[\r]?$"; 1023 Pattern codecPattern = Pattern.compile(regex); 1024 for (int i = 0; i < lines.length; i++) { 1025 Matcher codecMatcher = codecPattern.matcher(lines[i]); 1026 if (codecMatcher.matches()) { 1027 codecRtpMap = codecMatcher.group(1); 1028 rtpmapLineIndex = i; 1029 break; 1030 } 1031 } 1032 if (codecRtpMap == null) { 1033 Log.w(TAG, "No rtpmap for " + codec + " codec"); 1034 return sdp; 1035 } 1036 Log.d(TAG, "Found " + codec + " rtpmap " + codecRtpMap + " at " + lines[rtpmapLineIndex]); 1037 1038 // Check if a=fmtp string already exist in remote SDP for this codec and 1039 // update it with new bitrate parameter. 1040 regex = "^a=fmtp:" + codecRtpMap + " \\w+=\\d+.*[\r]?$"; 1041 codecPattern = Pattern.compile(regex); 1042 for (int i = 0; i < lines.length; i++) { 1043 Matcher codecMatcher = codecPattern.matcher(lines[i]); 1044 if (codecMatcher.matches()) { 1045 Log.d(TAG, "Found " + codec + " " + lines[i]); 1046 if (isVideoCodec) { 1047 lines[i] += "; " + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps; 1048 } else { 1049 lines[i] += "; " + AUDIO_CODEC_PARAM_BITRATE + "=" + (bitrateKbps * 1000); 1050 } 1051 Log.d(TAG, "Update remote SDP line: " + lines[i]); 1052 sdpFormatUpdated = true; 1053 break; 1054 } 1055 } 1056 1057 StringBuilder newSdpDescription = new StringBuilder(); 1058 for (int i = 0; i < lines.length; i++) { 1059 newSdpDescription.append(lines[i]).append("\r\n"); 1060 // Append new a=fmtp line if no such line exist for a codec. 1061 if (!sdpFormatUpdated && i == rtpmapLineIndex) { 1062 String bitrateSet; 1063 if (isVideoCodec) { 1064 bitrateSet = 1065 "a=fmtp:" + codecRtpMap + " " + VIDEO_CODEC_PARAM_START_BITRATE + "=" + bitrateKbps; 1066 } else { 1067 bitrateSet = "a=fmtp:" + codecRtpMap + " " + AUDIO_CODEC_PARAM_BITRATE + "=" 1068 + (bitrateKbps * 1000); 1069 } 1070 Log.d(TAG, "Add remote SDP line: " + bitrateSet); 1071 newSdpDescription.append(bitrateSet).append("\r\n"); 1072 } 1073 } 1074 return newSdpDescription.toString(); 1075 } 1076 1077 /** Returns the line number containing "m=audio|video", or -1 if no such line exists. */ findMediaDescriptionLine(boolean isAudio, String[] sdpLines)1078 private static int findMediaDescriptionLine(boolean isAudio, String[] sdpLines) { 1079 final String mediaDescription = isAudio ? "m=audio " : "m=video "; 1080 for (int i = 0; i < sdpLines.length; ++i) { 1081 if (sdpLines[i].startsWith(mediaDescription)) { 1082 return i; 1083 } 1084 } 1085 return -1; 1086 } 1087 joinString( Iterable<? extends CharSequence> s, String delimiter, boolean delimiterAtEnd)1088 private static String joinString( 1089 Iterable<? extends CharSequence> s, String delimiter, boolean delimiterAtEnd) { 1090 Iterator<? extends CharSequence> iter = s.iterator(); 1091 if (!iter.hasNext()) { 1092 return ""; 1093 } 1094 StringBuilder buffer = new StringBuilder(iter.next()); 1095 while (iter.hasNext()) { 1096 buffer.append(delimiter).append(iter.next()); 1097 } 1098 if (delimiterAtEnd) { 1099 buffer.append(delimiter); 1100 } 1101 return buffer.toString(); 1102 } 1103 movePayloadTypesToFront( List<String> preferredPayloadTypes, String mLine)1104 private static @Nullable String movePayloadTypesToFront( 1105 List<String> preferredPayloadTypes, String mLine) { 1106 // The format of the media description line should be: m=<media> <port> <proto> <fmt> ... 1107 final List<String> origLineParts = Arrays.asList(mLine.split(" ")); 1108 if (origLineParts.size() <= 3) { 1109 Log.e(TAG, "Wrong SDP media description format: " + mLine); 1110 return null; 1111 } 1112 final List<String> header = origLineParts.subList(0, 3); 1113 final List<String> unpreferredPayloadTypes = 1114 new ArrayList<>(origLineParts.subList(3, origLineParts.size())); 1115 unpreferredPayloadTypes.removeAll(preferredPayloadTypes); 1116 // Reconstruct the line with `preferredPayloadTypes` moved to the beginning of the payload 1117 // types. 1118 final List<String> newLineParts = new ArrayList<>(); 1119 newLineParts.addAll(header); 1120 newLineParts.addAll(preferredPayloadTypes); 1121 newLineParts.addAll(unpreferredPayloadTypes); 1122 return joinString(newLineParts, " ", false /* delimiterAtEnd */); 1123 } 1124 preferCodec(String sdp, String codec, boolean isAudio)1125 private static String preferCodec(String sdp, String codec, boolean isAudio) { 1126 final String[] lines = sdp.split("\r\n"); 1127 final int mLineIndex = findMediaDescriptionLine(isAudio, lines); 1128 if (mLineIndex == -1) { 1129 Log.w(TAG, "No mediaDescription line, so can't prefer " + codec); 1130 return sdp; 1131 } 1132 // A list with all the payload types with name `codec`. The payload types are integers in the 1133 // range 96-127, but they are stored as strings here. 1134 final List<String> codecPayloadTypes = new ArrayList<>(); 1135 // a=rtpmap:<payload type> <encoding name>/<clock rate> [/<encoding parameters>] 1136 final Pattern codecPattern = Pattern.compile("^a=rtpmap:(\\d+) " + codec + "(/\\d+)+[\r]?$"); 1137 for (String line : lines) { 1138 Matcher codecMatcher = codecPattern.matcher(line); 1139 if (codecMatcher.matches()) { 1140 codecPayloadTypes.add(codecMatcher.group(1)); 1141 } 1142 } 1143 if (codecPayloadTypes.isEmpty()) { 1144 Log.w(TAG, "No payload types with name " + codec); 1145 return sdp; 1146 } 1147 1148 final String newMLine = movePayloadTypesToFront(codecPayloadTypes, lines[mLineIndex]); 1149 if (newMLine == null) { 1150 return sdp; 1151 } 1152 Log.d(TAG, "Change media description from: " + lines[mLineIndex] + " to " + newMLine); 1153 lines[mLineIndex] = newMLine; 1154 return joinString(Arrays.asList(lines), "\r\n", true /* delimiterAtEnd */); 1155 } 1156 drainCandidates()1157 private void drainCandidates() { 1158 if (queuedRemoteCandidates != null) { 1159 Log.d(TAG, "Add " + queuedRemoteCandidates.size() + " remote candidates"); 1160 for (IceCandidate candidate : queuedRemoteCandidates) { 1161 peerConnection.addIceCandidate(candidate, new AddIceObserver() { 1162 @Override 1163 public void onAddSuccess() { 1164 Log.d(TAG, "Candidate " + candidate + " successfully added."); 1165 } 1166 @Override 1167 public void onAddFailure(String error) { 1168 Log.d(TAG, "Candidate " + candidate + " addition failed: " + error); 1169 } 1170 }); 1171 } 1172 queuedRemoteCandidates = null; 1173 } 1174 } 1175 switchCameraInternal()1176 private void switchCameraInternal() { 1177 if (videoCapturer instanceof CameraVideoCapturer) { 1178 if (!isVideoCallEnabled() || isError) { 1179 Log.e(TAG, 1180 "Failed to switch camera. Video: " + isVideoCallEnabled() + ". Error : " + isError); 1181 return; // No video is sent or only one camera is available or error happened. 1182 } 1183 Log.d(TAG, "Switch camera"); 1184 CameraVideoCapturer cameraVideoCapturer = (CameraVideoCapturer) videoCapturer; 1185 cameraVideoCapturer.switchCamera(null); 1186 } else { 1187 Log.d(TAG, "Will not switch camera, video caputurer is not a camera"); 1188 } 1189 } 1190 switchCamera()1191 public void switchCamera() { 1192 executor.execute(this ::switchCameraInternal); 1193 } 1194 changeCaptureFormat(final int width, final int height, final int framerate)1195 public void changeCaptureFormat(final int width, final int height, final int framerate) { 1196 executor.execute(() -> changeCaptureFormatInternal(width, height, framerate)); 1197 } 1198 changeCaptureFormatInternal(int width, int height, int framerate)1199 private void changeCaptureFormatInternal(int width, int height, int framerate) { 1200 if (!isVideoCallEnabled() || isError || videoCapturer == null) { 1201 Log.e(TAG, 1202 "Failed to change capture format. Video: " + isVideoCallEnabled() 1203 + ". Error : " + isError); 1204 return; 1205 } 1206 Log.d(TAG, "changeCaptureFormat: " + width + "x" + height + "@" + framerate); 1207 videoSource.adaptOutputFormat(width, height, framerate); 1208 } 1209 1210 // Implementation detail: observe ICE & stream changes and react accordingly. 1211 private class PCObserver implements PeerConnection.Observer { 1212 @Override onIceCandidate(final IceCandidate candidate)1213 public void onIceCandidate(final IceCandidate candidate) { 1214 executor.execute(() -> events.onIceCandidate(candidate)); 1215 } 1216 1217 @Override onIceCandidateError(final IceCandidateErrorEvent event)1218 public void onIceCandidateError(final IceCandidateErrorEvent event) { 1219 Log.d(TAG, 1220 "IceCandidateError address: " + event.address + ", port: " + event.port + ", url: " 1221 + event.url + ", errorCode: " + event.errorCode + ", errorText: " + event.errorText); 1222 } 1223 1224 @Override onIceCandidatesRemoved(final IceCandidate[] candidates)1225 public void onIceCandidatesRemoved(final IceCandidate[] candidates) { 1226 executor.execute(() -> events.onIceCandidatesRemoved(candidates)); 1227 } 1228 1229 @Override onSignalingChange(PeerConnection.SignalingState newState)1230 public void onSignalingChange(PeerConnection.SignalingState newState) { 1231 Log.d(TAG, "SignalingState: " + newState); 1232 } 1233 1234 @Override onIceConnectionChange(final PeerConnection.IceConnectionState newState)1235 public void onIceConnectionChange(final PeerConnection.IceConnectionState newState) { 1236 executor.execute(() -> { 1237 Log.d(TAG, "IceConnectionState: " + newState); 1238 if (newState == IceConnectionState.CONNECTED) { 1239 events.onIceConnected(); 1240 } else if (newState == IceConnectionState.DISCONNECTED) { 1241 events.onIceDisconnected(); 1242 } else if (newState == IceConnectionState.FAILED) { 1243 reportError("ICE connection failed."); 1244 } 1245 }); 1246 } 1247 1248 @Override onConnectionChange(final PeerConnection.PeerConnectionState newState)1249 public void onConnectionChange(final PeerConnection.PeerConnectionState newState) { 1250 executor.execute(() -> { 1251 Log.d(TAG, "PeerConnectionState: " + newState); 1252 if (newState == PeerConnectionState.CONNECTED) { 1253 events.onConnected(); 1254 } else if (newState == PeerConnectionState.DISCONNECTED) { 1255 events.onDisconnected(); 1256 } else if (newState == PeerConnectionState.FAILED) { 1257 reportError("DTLS connection failed."); 1258 } 1259 }); 1260 } 1261 1262 @Override onIceGatheringChange(PeerConnection.IceGatheringState newState)1263 public void onIceGatheringChange(PeerConnection.IceGatheringState newState) { 1264 Log.d(TAG, "IceGatheringState: " + newState); 1265 } 1266 1267 @Override onIceConnectionReceivingChange(boolean receiving)1268 public void onIceConnectionReceivingChange(boolean receiving) { 1269 Log.d(TAG, "IceConnectionReceiving changed to " + receiving); 1270 } 1271 1272 @Override onSelectedCandidatePairChanged(CandidatePairChangeEvent event)1273 public void onSelectedCandidatePairChanged(CandidatePairChangeEvent event) { 1274 Log.d(TAG, "Selected candidate pair changed because: " + event); 1275 } 1276 1277 @Override onAddStream(final MediaStream stream)1278 public void onAddStream(final MediaStream stream) {} 1279 1280 @Override onRemoveStream(final MediaStream stream)1281 public void onRemoveStream(final MediaStream stream) {} 1282 1283 @Override onDataChannel(final DataChannel dc)1284 public void onDataChannel(final DataChannel dc) { 1285 Log.d(TAG, "New Data channel " + dc.label()); 1286 1287 if (!dataChannelEnabled) 1288 return; 1289 1290 dc.registerObserver(new DataChannel.Observer() { 1291 @Override 1292 public void onBufferedAmountChange(long previousAmount) { 1293 Log.d(TAG, "Data channel buffered amount changed: " + dc.label() + ": " + dc.state()); 1294 } 1295 1296 @Override 1297 public void onStateChange() { 1298 Log.d(TAG, "Data channel state changed: " + dc.label() + ": " + dc.state()); 1299 } 1300 1301 @Override 1302 public void onMessage(final DataChannel.Buffer buffer) { 1303 if (buffer.binary) { 1304 Log.d(TAG, "Received binary msg over " + dc); 1305 return; 1306 } 1307 ByteBuffer data = buffer.data; 1308 final byte[] bytes = new byte[data.capacity()]; 1309 data.get(bytes); 1310 String strData = new String(bytes, Charset.forName("UTF-8")); 1311 Log.d(TAG, "Got msg: " + strData + " over " + dc); 1312 } 1313 }); 1314 } 1315 1316 @Override onRenegotiationNeeded()1317 public void onRenegotiationNeeded() { 1318 // No need to do anything; AppRTC follows a pre-agreed-upon 1319 // signaling/negotiation protocol. 1320 } 1321 1322 @Override onAddTrack(final RtpReceiver receiver, final MediaStream[] mediaStreams)1323 public void onAddTrack(final RtpReceiver receiver, final MediaStream[] mediaStreams) {} 1324 1325 @Override onRemoveTrack(final RtpReceiver receiver)1326 public void onRemoveTrack(final RtpReceiver receiver) {} 1327 } 1328 1329 // Implementation detail: handle offer creation/signaling and answer setting, 1330 // as well as adding remote ICE candidates once the answer SDP is set. 1331 private class SDPObserver implements SdpObserver { 1332 @Override onCreateSuccess(final SessionDescription desc)1333 public void onCreateSuccess(final SessionDescription desc) { 1334 if (localDescription != null) { 1335 reportError("Multiple SDP create."); 1336 return; 1337 } 1338 String sdp = desc.description; 1339 if (preferIsac) { 1340 sdp = preferCodec(sdp, AUDIO_CODEC_ISAC, true); 1341 } 1342 if (isVideoCallEnabled()) { 1343 sdp = preferCodec(sdp, getSdpVideoCodecName(peerConnectionParameters), false); 1344 } 1345 final SessionDescription newDesc = new SessionDescription(desc.type, sdp); 1346 localDescription = newDesc; 1347 executor.execute(() -> { 1348 if (peerConnection != null && !isError) { 1349 Log.d(TAG, "Set local SDP from " + desc.type); 1350 peerConnection.setLocalDescription(sdpObserver, newDesc); 1351 } 1352 }); 1353 } 1354 1355 @Override onSetSuccess()1356 public void onSetSuccess() { 1357 executor.execute(() -> { 1358 if (peerConnection == null || isError) { 1359 return; 1360 } 1361 if (isInitiator) { 1362 // For offering peer connection we first create offer and set 1363 // local SDP, then after receiving answer set remote SDP. 1364 if (peerConnection.getRemoteDescription() == null) { 1365 // We've just set our local SDP so time to send it. 1366 Log.d(TAG, "Local SDP set succesfully"); 1367 events.onLocalDescription(localDescription); 1368 } else { 1369 // We've just set remote description, so drain remote 1370 // and send local ICE candidates. 1371 Log.d(TAG, "Remote SDP set succesfully"); 1372 drainCandidates(); 1373 } 1374 } else { 1375 // For answering peer connection we set remote SDP and then 1376 // create answer and set local SDP. 1377 if (peerConnection.getLocalDescription() != null) { 1378 // We've just set our local SDP so time to send it, drain 1379 // remote and send local ICE candidates. 1380 Log.d(TAG, "Local SDP set succesfully"); 1381 events.onLocalDescription(localDescription); 1382 drainCandidates(); 1383 } else { 1384 // We've just set remote SDP - do nothing for now - 1385 // answer will be created soon. 1386 Log.d(TAG, "Remote SDP set succesfully"); 1387 } 1388 } 1389 }); 1390 } 1391 1392 @Override onCreateFailure(final String error)1393 public void onCreateFailure(final String error) { 1394 reportError("createSDP error: " + error); 1395 } 1396 1397 @Override onSetFailure(final String error)1398 public void onSetFailure(final String error) { 1399 reportError("setSDP error: " + error); 1400 } 1401 } 1402 } 1403