rtp_video_sender.cc (41728B)
1 /* 2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved. 3 * 4 * Use of this source code is governed by a BSD-style license 5 * that can be found in the LICENSE file in the root of the source 6 * tree. An additional intellectual property rights grant can be found 7 * in the file PATENTS. All contributing project authors may 8 * be found in the AUTHORS file in the root of the source tree. 9 */ 10 11 #include "call/rtp_video_sender.h" 12 13 #include <algorithm> 14 #include <cmath> 15 #include <cstddef> 16 #include <cstdint> 17 #include <map> 18 #include <memory> 19 #include <optional> 20 #include <string> 21 #include <utility> 22 #include <vector> 23 24 #include "absl/algorithm/container.h" 25 #include "absl/base/nullability.h" 26 #include "absl/strings/string_view.h" 27 #include "api/array_view.h" 28 #include "api/call/bitrate_allocation.h" 29 #include "api/crypto/crypto_options.h" 30 #include "api/environment/environment.h" 31 #include "api/fec_controller.h" 32 #include "api/field_trials_view.h" 33 #include "api/frame_transformer_interface.h" 34 #include "api/rtp_headers.h" 35 #include "api/rtp_parameters.h" 36 #include "api/scoped_refptr.h" 37 #include "api/sequence_checker.h" 38 #include "api/task_queue/pending_task_safety_flag.h" 39 #include "api/task_queue/task_queue_base.h" 40 #include "api/transport/rtp/dependency_descriptor.h" 41 #include "api/units/data_rate.h" 42 #include "api/units/data_size.h" 43 #include "api/units/frequency.h" 44 #include "api/units/time_delta.h" 45 #include "api/video/encoded_image.h" 46 #include "api/video/video_bitrate_allocation.h" 47 #include "api/video/video_codec_type.h" 48 #include "api/video/video_frame_type.h" 49 #include "api/video/video_layers_allocation.h" 50 #include "api/video_codecs/video_codec.h" 51 #include "api/video_codecs/video_encoder.h" 52 #include "call/rtp_config.h" 53 #include "call/rtp_payload_params.h" 54 #include "call/rtp_transport_controller_send_interface.h" 55 #include "common_video/frame_counts.h" 56 #include "common_video/generic_frame_descriptor/generic_frame_info.h" 57 #include "modules/include/module_fec_types.h" 58 #include "modules/pacing/packet_router.h" 59 #include "modules/rtp_rtcp/include/flexfec_sender.h" 60 #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" 61 #include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" 62 #include "modules/rtp_rtcp/source/rtp_sender.h" 63 #include "modules/rtp_rtcp/source/rtp_sender_video.h" 64 #include "modules/rtp_rtcp/source/rtp_sequence_number_map.h" 65 #include "modules/rtp_rtcp/source/ulpfec_generator.h" 66 #include "modules/rtp_rtcp/source/video_fec_generator.h" 67 #include "modules/video_coding/include/video_codec_interface.h" 68 #include "rtc_base/checks.h" 69 #include "rtc_base/logging.h" 70 #include "rtc_base/numerics/safe_conversions.h" 71 #include "rtc_base/synchronization/mutex.h" 72 #include "rtc_base/trace_event.h" 73 74 namespace webrtc { 75 76 namespace webrtc_internal_rtp_video_sender { 77 78 RtpStreamSender::RtpStreamSender( 79 std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp, 80 std::unique_ptr<RTPSenderVideo> sender_video, 81 std::unique_ptr<VideoFecGenerator> fec_generator) 82 : rtp_rtcp(std::move(rtp_rtcp)), 83 sender_video(std::move(sender_video)), 84 fec_generator(std::move(fec_generator)) {} 85 86 RtpStreamSender::~RtpStreamSender() = default; 87 88 } // namespace webrtc_internal_rtp_video_sender 89 90 namespace { 91 const int kMinSendSidePacketHistorySize = 600; 92 // We don't do MTU discovery, so assume that we have the standard ethernet MTU. 93 const size_t kPathMTU = 1500; 94 95 using webrtc_internal_rtp_video_sender::RtpStreamSender; 96 97 bool PayloadTypeSupportsSkippingFecPackets(absl::string_view payload_name, 98 const FieldTrialsView& trials) { 99 const VideoCodecType codecType = 100 PayloadStringToCodecType(std::string(payload_name)); 101 if (codecType == kVideoCodecVP8 || codecType == kVideoCodecVP9) { 102 return true; 103 } 104 if (codecType == kVideoCodecGeneric && 105 trials.IsEnabled("WebRTC-GenericPictureId")) { 106 return true; 107 } 108 return false; 109 } 110 111 bool ShouldDisableRedAndUlpfec(bool flexfec_enabled, 112 const RtpConfig& rtp_config, 113 const FieldTrialsView& trials) { 114 // Consistency of NACK and RED+ULPFEC parameters is checked in this function. 115 const bool nack_enabled = rtp_config.nack.rtp_history_ms > 0; 116 117 // Shorthands. 118 auto IsRedEnabled = [&]() { return rtp_config.ulpfec.red_payload_type >= 0; }; 119 auto IsUlpfecEnabled = [&]() { 120 return rtp_config.ulpfec.ulpfec_payload_type >= 0; 121 }; 122 123 bool should_disable_red_and_ulpfec = false; 124 125 if (trials.IsEnabled("WebRTC-DisableUlpFecExperiment")) { 126 RTC_LOG(LS_INFO) << "Experiment to disable sending ULPFEC is enabled."; 127 should_disable_red_and_ulpfec = true; 128 } 129 130 // If enabled, FlexFEC takes priority over RED+ULPFEC. 131 if (flexfec_enabled) { 132 if (IsUlpfecEnabled()) { 133 RTC_LOG(LS_INFO) 134 << "Both FlexFEC and ULPFEC are configured. Disabling ULPFEC."; 135 } 136 should_disable_red_and_ulpfec = true; 137 } 138 139 // Payload types without picture ID cannot determine that a stream is complete 140 // without retransmitting FEC, so using ULPFEC + NACK for H.264 (for instance) 141 // is a waste of bandwidth since FEC packets still have to be transmitted. 142 // Note that this is not the case with FlexFEC. 143 if (nack_enabled && IsUlpfecEnabled() && 144 !PayloadTypeSupportsSkippingFecPackets(rtp_config.payload_name, trials)) { 145 RTC_LOG(LS_WARNING) 146 << "Transmitting payload type without picture ID using " 147 "NACK+ULPFEC is a waste of bandwidth since ULPFEC packets " 148 "also have to be retransmitted. Disabling ULPFEC."; 149 should_disable_red_and_ulpfec = true; 150 } 151 152 // Verify payload types. 153 if (IsUlpfecEnabled() ^ IsRedEnabled()) { 154 RTC_LOG(LS_WARNING) 155 << "Only RED or only ULPFEC enabled, but not both. Disabling both."; 156 should_disable_red_and_ulpfec = true; 157 } 158 159 return should_disable_red_and_ulpfec; 160 } 161 162 // TODO(brandtr): Update this function when we support multistream protection. 163 std::unique_ptr<VideoFecGenerator> MaybeCreateFecGenerator( 164 const Environment& env, 165 const RtpConfig& rtp, 166 const std::map<uint32_t, RtpState>& suspended_ssrcs, 167 int simulcast_index) { 168 // If flexfec is configured that takes priority. 169 if (rtp.flexfec.payload_type >= 0) { 170 RTC_DCHECK_GE(rtp.flexfec.payload_type, 0); 171 RTC_DCHECK_LE(rtp.flexfec.payload_type, 127); 172 if (rtp.flexfec.ssrc == 0) { 173 RTC_LOG(LS_WARNING) << "FlexFEC is enabled, but no FlexFEC SSRC given. " 174 "Therefore disabling FlexFEC."; 175 return nullptr; 176 } 177 if (rtp.flexfec.protected_media_ssrcs.empty()) { 178 RTC_LOG(LS_WARNING) 179 << "FlexFEC is enabled, but no protected media SSRC given. " 180 "Therefore disabling FlexFEC."; 181 return nullptr; 182 } 183 184 if (rtp.flexfec.protected_media_ssrcs.size() > 1) { 185 RTC_LOG(LS_WARNING) 186 << "The supplied FlexfecConfig contained multiple protected " 187 "media streams, but our implementation currently only " 188 "supports protecting a single media stream. " 189 "To avoid confusion, disabling FlexFEC completely."; 190 return nullptr; 191 } 192 193 if (absl::c_find(rtp.flexfec.protected_media_ssrcs, 194 rtp.ssrcs[simulcast_index]) == 195 rtp.flexfec.protected_media_ssrcs.end()) { 196 // Media SSRC not among flexfec protected SSRCs. 197 return nullptr; 198 } 199 200 const RtpState* rtp_state = nullptr; 201 auto it = suspended_ssrcs.find(rtp.flexfec.ssrc); 202 if (it != suspended_ssrcs.end()) { 203 rtp_state = &it->second; 204 } 205 206 RTC_DCHECK_EQ(1U, rtp.flexfec.protected_media_ssrcs.size()); 207 return std::make_unique<FlexfecSender>( 208 env, rtp.flexfec.payload_type, rtp.flexfec.ssrc, 209 rtp.flexfec.protected_media_ssrcs[0], rtp.mid, rtp.extensions, 210 RTPSender::FecExtensionSizes(), rtp_state); 211 } else if (rtp.ulpfec.red_payload_type >= 0 && 212 rtp.ulpfec.ulpfec_payload_type >= 0 && 213 !ShouldDisableRedAndUlpfec(/*flexfec_enabled=*/false, rtp, 214 env.field_trials())) { 215 // Flexfec not configured, but ulpfec is and is not disabled. 216 return std::make_unique<UlpfecGenerator>(env, rtp.ulpfec.red_payload_type, 217 rtp.ulpfec.ulpfec_payload_type); 218 } 219 220 // Not a single FEC is given. 221 return nullptr; 222 } 223 224 std::vector<RtpStreamSender> CreateRtpStreamSenders( 225 const Environment& env, 226 const RtpConfig& rtp_config, 227 const RtpSenderObservers& observers, 228 int rtcp_report_interval_ms, 229 Transport* send_transport, 230 RtpTransportControllerSendInterface* transport, 231 const std::map<uint32_t, RtpState>& suspended_ssrcs, 232 RateLimiter* retransmission_rate_limiter, 233 FrameEncryptorInterface* frame_encryptor, 234 const CryptoOptions& crypto_options, 235 scoped_refptr<FrameTransformerInterface> frame_transformer) { 236 RTC_DCHECK_GT(rtp_config.ssrcs.size(), 0); 237 238 RtpRtcpInterface::Configuration configuration; 239 configuration.audio = false; 240 configuration.receiver_only = false; 241 configuration.outgoing_transport = send_transport; 242 configuration.intra_frame_callback = observers.intra_frame_callback; 243 configuration.rtcp_loss_notification_observer = 244 observers.rtcp_loss_notification_observer; 245 configuration.network_link_rtcp_observer = transport->GetRtcpObserver(); 246 configuration.network_state_estimate_observer = 247 transport->network_state_estimate_observer(); 248 configuration.rtt_stats = observers.rtcp_rtt_stats; 249 configuration.rtcp_packet_type_counter_observer = 250 observers.rtcp_type_observer; 251 configuration.report_block_data_observer = 252 observers.report_block_data_observer; 253 configuration.paced_sender = transport->packet_sender(); 254 configuration.send_bitrate_observer = observers.bitrate_observer; 255 configuration.send_packet_observer = observers.send_packet_observer; 256 if (env.field_trials().IsDisabled("WebRTC-DisableRtxRateLimiter")) { 257 configuration.retransmission_rate_limiter = retransmission_rate_limiter; 258 } 259 configuration.rtp_stats_callback = observers.rtp_stats; 260 configuration.frame_encryptor = frame_encryptor; 261 configuration.require_frame_encryption = 262 crypto_options.sframe.require_frame_encryption; 263 configuration.extmap_allow_mixed = rtp_config.extmap_allow_mixed; 264 configuration.rtcp_report_interval_ms = rtcp_report_interval_ms; 265 configuration.enable_send_packet_batching = 266 rtp_config.enable_send_packet_batching; 267 268 std::vector<RtpStreamSender> rtp_streams; 269 270 RTC_DCHECK(rtp_config.rtx.ssrcs.empty() || 271 rtp_config.rtx.ssrcs.size() == rtp_config.ssrcs.size()); 272 273 // Some streams could have been disabled, but the rids are still there. 274 // This will occur when simulcast has been disabled for a codec (e.g. VP9) 275 RTC_DCHECK(rtp_config.rids.empty() || 276 rtp_config.rids.size() >= rtp_config.ssrcs.size()); 277 278 for (size_t i = 0; i < rtp_config.ssrcs.size(); ++i) { 279 RTPSenderVideo::Config video_config; 280 configuration.local_media_ssrc = rtp_config.ssrcs[i]; 281 282 std::unique_ptr<VideoFecGenerator> fec_generator = 283 MaybeCreateFecGenerator(env, rtp_config, suspended_ssrcs, i); 284 configuration.fec_generator = fec_generator.get(); 285 286 configuration.rtx_send_ssrc = 287 rtp_config.GetRtxSsrcAssociatedWithMediaSsrc(rtp_config.ssrcs[i]); 288 RTC_DCHECK_EQ(configuration.rtx_send_ssrc.has_value(), 289 !rtp_config.rtx.ssrcs.empty()); 290 291 configuration.rid = (i < rtp_config.rids.size()) ? rtp_config.rids[i] : ""; 292 293 configuration.need_rtp_packet_infos = rtp_config.lntf.enabled; 294 295 auto rtp_rtcp = std::make_unique<ModuleRtpRtcpImpl2>(env, configuration); 296 rtp_rtcp->SetSendingStatus(false); 297 rtp_rtcp->SetSendingMediaStatus(false); 298 rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound); 299 // Set NACK. 300 rtp_rtcp->SetStorePacketsStatus(true, kMinSendSidePacketHistorySize); 301 302 video_config.clock = &env.clock(); 303 video_config.rtp_sender = rtp_rtcp->RtpSender(); 304 video_config.frame_encryptor = frame_encryptor; 305 video_config.require_frame_encryption = 306 crypto_options.sframe.require_frame_encryption; 307 video_config.field_trials = &env.field_trials(); 308 video_config.enable_retransmit_all_layers = 309 !video_config.field_trials->IsDisabled( 310 "WebRTC-Video-EnableRetransmitAllLayers"); 311 312 const bool using_flexfec = 313 fec_generator && 314 fec_generator->GetFecType() == VideoFecGenerator::FecType::kFlexFec; 315 const bool should_disable_red_and_ulpfec = ShouldDisableRedAndUlpfec( 316 using_flexfec, rtp_config, env.field_trials()); 317 if (!should_disable_red_and_ulpfec && 318 rtp_config.ulpfec.red_payload_type != -1) { 319 video_config.red_payload_type = rtp_config.ulpfec.red_payload_type; 320 } 321 if (fec_generator) { 322 video_config.fec_type = fec_generator->GetFecType(); 323 video_config.fec_overhead_bytes = fec_generator->MaxPacketOverhead(); 324 } 325 video_config.frame_transformer = frame_transformer; 326 video_config.task_queue_factory = &env.task_queue_factory(); 327 video_config.raw_packetization = rtp_config.raw_payload; 328 329 auto sender_video = std::make_unique<RTPSenderVideo>(video_config); 330 rtp_streams.emplace_back(std::move(rtp_rtcp), std::move(sender_video), 331 std::move(fec_generator)); 332 } 333 return rtp_streams; 334 } 335 336 std::optional<VideoCodecType> GetVideoCodecType(const RtpConfig& config, 337 size_t simulcast_index) { 338 auto stream_config = config.GetStreamConfig(simulcast_index); 339 if (stream_config.raw_payload) { 340 return std::nullopt; 341 } 342 return PayloadStringToCodecType(stream_config.payload_name); 343 } 344 bool TransportSeqNumExtensionConfigured(const RtpConfig& config) { 345 return absl::c_any_of(config.extensions, [](const RtpExtension& ext) { 346 return ext.uri == RtpExtension::kTransportSequenceNumberUri; 347 }); 348 } 349 350 // Returns true when some coded video sequence can be decoded starting with 351 // this frame without requiring any previous frames. 352 // e.g. it is the same as a key frame when spatial scalability is not used. 353 // When spatial scalability is used, then it is true for layer frames of 354 // a key frame without inter-layer dependencies. 355 bool IsFirstFrameOfACodedVideoSequence( 356 const EncodedImage& encoded_image, 357 const CodecSpecificInfo* codec_specific_info) { 358 if (encoded_image._frameType != VideoFrameType::kVideoFrameKey) { 359 return false; 360 } 361 362 if (codec_specific_info != nullptr) { 363 if (codec_specific_info->generic_frame_info.has_value()) { 364 // This function is used before 365 // `codec_specific_info->generic_frame_info->frame_diffs` are calculated, 366 // so need to use a more complicated way to check for presence of the 367 // dependencies. 368 return absl::c_none_of( 369 codec_specific_info->generic_frame_info->encoder_buffers, 370 [](const CodecBufferUsage& buffer) { return buffer.referenced; }); 371 } 372 373 if (codec_specific_info->codecType == VideoCodecType::kVideoCodecVP8 || 374 codec_specific_info->codecType == VideoCodecType::kVideoCodecH264 || 375 codec_specific_info->codecType == VideoCodecType::kVideoCodecGeneric) { 376 // These codecs do not support intra picture dependencies, so a frame 377 // marked as a key frame should be a key frame. 378 return true; 379 } 380 } 381 382 // Without depenedencies described in generic format do an educated guess. 383 // It might be wrong for VP9 with spatial layer 0 skipped or higher spatial 384 // layer not depending on the spatial layer 0. This corner case is unimportant 385 // for current usage of this helper function. 386 387 // Use <= to accept both 0 (i.e. the first) and nullopt (i.e. the only). 388 return encoded_image.SpatialIndex() <= 0; 389 } 390 391 } // namespace 392 393 RtpVideoSender::RtpVideoSender( 394 const Environment& env, 395 TaskQueueBase* absl_nonnull transport_queue, 396 const std::map<uint32_t, RtpState>& suspended_ssrcs, 397 const std::map<uint32_t, RtpPayloadState>& states, 398 const RtpConfig& rtp_config, 399 int rtcp_report_interval_ms, 400 Transport* send_transport, 401 const RtpSenderObservers& observers, 402 RtpTransportControllerSendInterface* transport, 403 RateLimiter* retransmission_limiter, 404 std::unique_ptr<FecController> fec_controller, 405 FrameEncryptorInterface* frame_encryptor, 406 const CryptoOptions& crypto_options, 407 scoped_refptr<FrameTransformerInterface> frame_transformer) 408 : env_(env), 409 use_frame_rate_for_overhead_( 410 env.field_trials().IsEnabled("WebRTC-Video-UseFrameRateForOverhead")), 411 has_packet_feedback_(TransportSeqNumExtensionConfigured(rtp_config)), 412 transport_queue_(*transport_queue), 413 active_(false), 414 fec_controller_(std::move(fec_controller)), 415 fec_allowed_(true), 416 rtp_streams_(CreateRtpStreamSenders(env, 417 rtp_config, 418 observers, 419 rtcp_report_interval_ms, 420 send_transport, 421 transport, 422 suspended_ssrcs, 423 retransmission_limiter, 424 frame_encryptor, 425 crypto_options, 426 std::move(frame_transformer))), 427 rtp_config_(rtp_config), 428 transport_(transport), 429 independent_frame_ids_( 430 env.field_trials().IsDisabled("WebRTC-GenericDescriptorAuth")), 431 transport_overhead_bytes_per_packet_(0), 432 encoder_target_rate_bps_(0), 433 frame_counts_(rtp_config.ssrcs.size()), 434 frame_count_observer_(observers.frame_count_observer), 435 safety_(PendingTaskSafetyFlag::CreateAttachedToTaskQueue( 436 /*alive=*/true, 437 transport_queue)) { 438 transport_checker_.Detach(); 439 RTC_DCHECK_EQ(rtp_config_.ssrcs.size(), rtp_streams_.size()); 440 if (has_packet_feedback_) 441 transport_->IncludeOverheadInPacedSender(); 442 // SSRCs are assumed to be sorted in the same order as `rtp_modules`. 443 for (uint32_t ssrc : rtp_config_.ssrcs) { 444 // Restore state if it previously existed. 445 const RtpPayloadState* state = nullptr; 446 auto it = states.find(ssrc); 447 if (it != states.end()) { 448 state = &it->second; 449 shared_frame_id_ = std::max(shared_frame_id_, state->shared_frame_id); 450 } 451 params_.push_back(RtpPayloadParams(ssrc, state, env.field_trials())); 452 } 453 454 // RTP/RTCP initialization. 455 456 for (size_t i = 0; i < rtp_config_.extensions.size(); ++i) { 457 const std::string& extension = rtp_config_.extensions[i].uri; 458 int id = rtp_config_.extensions[i].id; 459 RTC_DCHECK(RtpExtension::IsSupportedForVideo(extension)); 460 for (const RtpStreamSender& stream : rtp_streams_) { 461 stream.rtp_rtcp->RegisterRtpHeaderExtension(extension, id); 462 } 463 } 464 465 ConfigureSsrcs(suspended_ssrcs); 466 467 if (!rtp_config_.mid.empty()) { 468 for (const RtpStreamSender& stream : rtp_streams_) { 469 stream.rtp_rtcp->SetMid(rtp_config_.mid); 470 } 471 } 472 473 bool fec_enabled = false; 474 for (size_t i = 0; i < rtp_streams_.size(); i++) { 475 const RtpStreamSender& stream = rtp_streams_[i]; 476 // Simulcast has one module for each layer. Set the CNAME on all modules. 477 stream.rtp_rtcp->SetCNAME(rtp_config_.c_name.c_str()); 478 stream.rtp_rtcp->SetMaxRtpPacketSize(rtp_config_.max_packet_size); 479 stream.rtp_rtcp->RegisterSendPayloadFrequency( 480 rtp_config_.GetStreamConfig(i).payload_type, 481 kVideoPayloadTypeFrequency); 482 if (stream.fec_generator != nullptr) { 483 fec_enabled = true; 484 } 485 } 486 // Currently, both ULPFEC and FlexFEC use the same FEC rate calculation logic, 487 // so enable that logic if either of those FEC schemes are enabled. 488 fec_controller_->SetProtectionMethod(fec_enabled, NackEnabled()); 489 490 fec_controller_->SetProtectionCallback(this); 491 492 // Construction happens on the worker thread (see Call::CreateVideoSendStream) 493 // but subseqeuent calls to the RTP state will happen on one of two threads: 494 // * The pacer thread for actually sending packets. 495 // * The transport thread when tearing down and quering GetRtpState(). 496 // Detach thread checkers. 497 for (const RtpStreamSender& stream : rtp_streams_) { 498 stream.rtp_rtcp->OnPacketSendingThreadSwitched(); 499 } 500 } 501 502 RtpVideoSender::~RtpVideoSender() { 503 RTC_DCHECK_RUN_ON(&transport_checker_); 504 SetActiveModulesLocked(/*sending=*/false); 505 } 506 507 void RtpVideoSender::SetSending(bool enabled) { 508 RTC_DCHECK_RUN_ON(&transport_checker_); 509 MutexLock lock(&mutex_); 510 if (enabled == active_) { 511 return; 512 } 513 SetActiveModulesLocked(/*sending=*/enabled); 514 } 515 516 void RtpVideoSender::SetActiveModulesLocked(bool sending) { 517 RTC_DCHECK_RUN_ON(&transport_checker_); 518 if (active_ == sending) { 519 return; 520 } 521 active_ = sending; 522 for (const RtpStreamSender& stream : rtp_streams_) { 523 SetModuleIsActive(sending, *stream.rtp_rtcp); 524 } 525 auto* feedback_provider = transport_->GetStreamFeedbackProvider(); 526 if (!sending) { 527 feedback_provider->DeRegisterStreamFeedbackObserver(this); 528 } else { 529 feedback_provider->RegisterStreamFeedbackObserver(rtp_config_.ssrcs, this); 530 } 531 } 532 533 void RtpVideoSender::SetModuleIsActive(bool sending, 534 RtpRtcpInterface& rtp_module) { 535 if (rtp_module.SendingMedia() == sending) { 536 return; 537 } 538 539 // Sends a kRtcpByeCode when going from true to false. 540 rtp_module.SetSendingStatus(sending); 541 rtp_module.SetSendingMediaStatus(sending); 542 if (sending) { 543 transport_->RegisterSendingRtpStream(rtp_module); 544 } else { 545 transport_->DeRegisterSendingRtpStream(rtp_module); 546 } 547 } 548 549 bool RtpVideoSender::IsActive() { 550 RTC_DCHECK_RUN_ON(&transport_checker_); 551 MutexLock lock(&mutex_); 552 return IsActiveLocked(); 553 } 554 555 bool RtpVideoSender::IsActiveLocked() { 556 return active_ && !rtp_streams_.empty(); 557 } 558 559 EncodedImageCallback::Result RtpVideoSender::OnEncodedImage( 560 const EncodedImage& encoded_image, 561 const CodecSpecificInfo* codec_specific_info) { 562 fec_controller_->UpdateWithEncodedData(encoded_image.size(), 563 encoded_image._frameType); 564 MutexLock lock(&mutex_); 565 RTC_DCHECK(!rtp_streams_.empty()); 566 if (!active_) 567 return Result(Result::ERROR_SEND_FAILED); 568 569 shared_frame_id_++; 570 size_t simulcast_index = encoded_image.SimulcastIndex().value_or(0); 571 RTC_DCHECK_LT(simulcast_index, rtp_streams_.size()); 572 573 uint32_t rtp_timestamp = 574 encoded_image.RtpTimestamp() + 575 rtp_streams_[simulcast_index].rtp_rtcp->StartTimestamp(); 576 577 // RTCPSender has it's own copy of the timestamp offset, added in 578 // RTCPSender::BuildSR, hence we must not add the in the offset for this call. 579 // TODO(nisse): Delete RTCPSender:timestamp_offset_, and see if we can confine 580 // knowledge of the offset to a single place. 581 if (!rtp_streams_[simulcast_index].rtp_rtcp->OnSendingRtpFrame( 582 encoded_image.RtpTimestamp(), encoded_image.capture_time_ms_, 583 rtp_config_.GetStreamConfig(simulcast_index).payload_type, 584 encoded_image._frameType == VideoFrameType::kVideoFrameKey)) { 585 // The payload router could be active but this module isn't sending. 586 return Result(Result::ERROR_SEND_FAILED); 587 } 588 589 TimeDelta expected_retransmission_time = TimeDelta::PlusInfinity(); 590 if (encoded_image.RetransmissionAllowed()) { 591 expected_retransmission_time = 592 rtp_streams_[simulcast_index].rtp_rtcp->ExpectedRetransmissionTime(); 593 } 594 595 if (IsFirstFrameOfACodedVideoSequence(encoded_image, codec_specific_info)) { 596 // In order to use the dependency descriptor RTP header extension: 597 // - Pass along any `FrameDependencyStructure` templates produced by the 598 // encoder adapter. 599 // - If none were produced the `RtpPayloadParams::*ToGeneric` for the 600 // particular codec have simulated a dependency structure, so provide a 601 // minimal set of templates. 602 // - Otherwise, don't pass along any templates at all which will disable 603 // the generation of a dependency descriptor. 604 RTPSenderVideo& sender_video = *rtp_streams_[simulcast_index].sender_video; 605 if (codec_specific_info && codec_specific_info->template_structure) { 606 sender_video.SetVideoStructure(&*codec_specific_info->template_structure); 607 } else if (std::optional<FrameDependencyStructure> structure = 608 params_[simulcast_index].GenericStructure( 609 codec_specific_info)) { 610 sender_video.SetVideoStructure(&*structure); 611 } else { 612 sender_video.SetVideoStructure(nullptr); 613 } 614 } 615 616 std::optional<int64_t> frame_id; 617 if (!independent_frame_ids_) { 618 frame_id = shared_frame_id_; 619 } 620 621 bool send_result = 622 rtp_streams_[simulcast_index].sender_video->SendEncodedImage( 623 rtp_config_.GetStreamConfig(simulcast_index).payload_type, 624 GetVideoCodecType(rtp_config_, simulcast_index), rtp_timestamp, 625 encoded_image, 626 params_[simulcast_index].GetRtpVideoHeader( 627 encoded_image, codec_specific_info, frame_id), 628 expected_retransmission_time, csrcs_); 629 if (frame_count_observer_) { 630 FrameCounts& counts = frame_counts_[simulcast_index]; 631 if (encoded_image._frameType == VideoFrameType::kVideoFrameKey) { 632 ++counts.key_frames; 633 } else if (encoded_image._frameType == VideoFrameType::kVideoFrameDelta) { 634 ++counts.delta_frames; 635 } else { 636 RTC_DCHECK(encoded_image._frameType == VideoFrameType::kEmptyFrame); 637 } 638 frame_count_observer_->FrameCountUpdated( 639 counts, rtp_config_.ssrcs[simulcast_index]); 640 } 641 if (!send_result) 642 return Result(Result::ERROR_SEND_FAILED); 643 644 return Result(Result::OK, rtp_timestamp); 645 } 646 647 void RtpVideoSender::OnBitrateAllocationUpdated( 648 const VideoBitrateAllocation& bitrate) { 649 RTC_DCHECK_RUN_ON(&transport_checker_); 650 MutexLock lock(&mutex_); 651 if (IsActiveLocked()) { 652 if (rtp_streams_.size() == 1) { 653 // If spatial scalability is enabled, it is covered by a single stream. 654 rtp_streams_[0].rtp_rtcp->SetVideoBitrateAllocation(bitrate); 655 } else { 656 std::vector<std::optional<VideoBitrateAllocation>> layer_bitrates = 657 bitrate.GetSimulcastAllocations(); 658 // Simulcast is in use, split the VideoBitrateAllocation into one struct 659 // per rtp stream, moving over the temporal layer allocation. 660 for (size_t i = 0; i < rtp_streams_.size(); ++i) { 661 // The next spatial layer could be used if the current one is 662 // inactive. 663 if (layer_bitrates[i]) { 664 rtp_streams_[i].rtp_rtcp->SetVideoBitrateAllocation( 665 *layer_bitrates[i]); 666 } else { 667 // Signal a 0 bitrate on a simulcast stream. 668 rtp_streams_[i].rtp_rtcp->SetVideoBitrateAllocation( 669 VideoBitrateAllocation()); 670 } 671 } 672 } 673 } 674 } 675 676 void RtpVideoSender::OnVideoLayersAllocationUpdated( 677 const VideoLayersAllocation& allocation) { 678 MutexLock lock(&mutex_); 679 if (IsActiveLocked()) { 680 for (size_t i = 0; i < rtp_streams_.size(); ++i) { 681 VideoLayersAllocation stream_allocation = allocation; 682 stream_allocation.rtp_stream_index = i; 683 rtp_streams_[i].sender_video->SetVideoLayersAllocation( 684 std::move(stream_allocation)); 685 } 686 687 // Only send video frames on the rtp module if the encoder is configured 688 // to send. This is to prevent stray frames to be sent after an encoder 689 // has been reconfigured. 690 // Reconfiguration of the RtpRtcp modules must happen on the transport queue 691 // to avoid races with batch sending of packets. 692 std::vector<bool> sending(rtp_streams_.size(), false); 693 for (const VideoLayersAllocation::SpatialLayer& layer : 694 allocation.active_spatial_layers) { 695 if (layer.rtp_stream_index < static_cast<int>(sending.size())) { 696 sending[layer.rtp_stream_index] = true; 697 } 698 } 699 transport_queue_.PostTask( 700 SafeTask(safety_.flag(), [this, sending = std::move(sending)] { 701 RTC_DCHECK_RUN_ON(&transport_checker_); 702 RTC_CHECK_EQ(sending.size(), rtp_streams_.size()); 703 for (size_t i = 0; i < sending.size(); ++i) { 704 SetModuleIsActive(sending[i], *rtp_streams_[i].rtp_rtcp); 705 } 706 })); 707 } 708 } 709 710 bool RtpVideoSender::NackEnabled() const { 711 const bool nack_enabled = rtp_config_.nack.rtp_history_ms > 0; 712 return nack_enabled; 713 } 714 715 DataRate RtpVideoSender::GetPostEncodeOverhead() const { 716 DataRate post_encode_overhead = DataRate::Zero(); 717 for (size_t i = 0; i < rtp_streams_.size(); ++i) { 718 if (rtp_streams_[i].rtp_rtcp->SendingMedia()) { 719 post_encode_overhead += 720 rtp_streams_[i].sender_video->PostEncodeOverhead(); 721 } 722 } 723 return post_encode_overhead; 724 } 725 726 void RtpVideoSender::DeliverRtcp(ArrayView<const uint8_t> packet) { 727 // Runs on a network thread. 728 for (const RtpStreamSender& stream : rtp_streams_) 729 stream.rtp_rtcp->IncomingRtcpPacket(packet); 730 } 731 732 void RtpVideoSender::ConfigureSsrcs( 733 const std::map<uint32_t, RtpState>& suspended_ssrcs) { 734 // Configure regular SSRCs. 735 RTC_CHECK(ssrc_to_rtp_module_.empty()); 736 for (size_t i = 0; i < rtp_config_.ssrcs.size(); ++i) { 737 uint32_t ssrc = rtp_config_.ssrcs[i]; 738 RtpRtcpInterface* const rtp_rtcp = rtp_streams_[i].rtp_rtcp.get(); 739 740 // Restore RTP state if previous existed. 741 auto it = suspended_ssrcs.find(ssrc); 742 if (it != suspended_ssrcs.end()) 743 rtp_rtcp->SetRtpState(it->second); 744 745 ssrc_to_rtp_module_[ssrc] = rtp_rtcp; 746 } 747 748 // Set up RTX if available. 749 if (rtp_config_.rtx.ssrcs.empty()) 750 return; 751 752 RTC_DCHECK_EQ(rtp_config_.rtx.ssrcs.size(), rtp_config_.ssrcs.size()); 753 for (size_t i = 0; i < rtp_config_.rtx.ssrcs.size(); ++i) { 754 uint32_t ssrc = rtp_config_.rtx.ssrcs[i]; 755 RtpRtcpInterface* const rtp_rtcp = rtp_streams_[i].rtp_rtcp.get(); 756 auto it = suspended_ssrcs.find(ssrc); 757 if (it != suspended_ssrcs.end()) 758 rtp_rtcp->SetRtxState(it->second); 759 } 760 761 // Configure RTX payload types. 762 RTC_DCHECK_GE(rtp_config_.rtx.payload_type, 0); 763 for (size_t i = 0; i < rtp_streams_.size(); ++i) { 764 const RtpStreamSender& stream = rtp_streams_[i]; 765 RtpStreamConfig stream_config = rtp_config_.GetStreamConfig(i); 766 RTC_DCHECK(stream_config.rtx); 767 stream.rtp_rtcp->SetRtxSendPayloadType(stream_config.rtx->payload_type, 768 stream_config.payload_type); 769 stream.rtp_rtcp->SetRtxSendStatus(kRtxRetransmitted | 770 kRtxRedundantPayloads); 771 } 772 if (rtp_config_.ulpfec.red_payload_type != -1 && 773 rtp_config_.ulpfec.red_rtx_payload_type != -1) { 774 for (const RtpStreamSender& stream : rtp_streams_) { 775 stream.rtp_rtcp->SetRtxSendPayloadType( 776 rtp_config_.ulpfec.red_rtx_payload_type, 777 rtp_config_.ulpfec.red_payload_type); 778 } 779 } 780 } 781 782 void RtpVideoSender::OnNetworkAvailability(bool network_available) { 783 for (const RtpStreamSender& stream : rtp_streams_) { 784 stream.rtp_rtcp->SetRTCPStatus(network_available ? rtp_config_.rtcp_mode 785 : RtcpMode::kOff); 786 } 787 } 788 789 std::map<uint32_t, RtpState> RtpVideoSender::GetRtpStates() const { 790 std::map<uint32_t, RtpState> rtp_states; 791 792 for (size_t i = 0; i < rtp_config_.ssrcs.size(); ++i) { 793 uint32_t ssrc = rtp_config_.ssrcs[i]; 794 RTC_DCHECK_EQ(ssrc, rtp_streams_[i].rtp_rtcp->SSRC()); 795 rtp_states[ssrc] = rtp_streams_[i].rtp_rtcp->GetRtpState(); 796 797 // Only happens during shutdown, when RTP module is already inactive, 798 // so OK to call fec generator here. 799 if (rtp_streams_[i].fec_generator) { 800 std::optional<RtpState> fec_state = 801 rtp_streams_[i].fec_generator->GetRtpState(); 802 if (fec_state) { 803 uint32_t fec_ssrc = rtp_config_.flexfec.ssrc; 804 rtp_states[fec_ssrc] = *fec_state; 805 } 806 } 807 } 808 809 for (size_t i = 0; i < rtp_config_.rtx.ssrcs.size(); ++i) { 810 uint32_t ssrc = rtp_config_.rtx.ssrcs[i]; 811 rtp_states[ssrc] = rtp_streams_[i].rtp_rtcp->GetRtxState(); 812 } 813 814 return rtp_states; 815 } 816 817 std::map<uint32_t, RtpPayloadState> RtpVideoSender::GetRtpPayloadStates() 818 const { 819 MutexLock lock(&mutex_); 820 std::map<uint32_t, RtpPayloadState> payload_states; 821 for (const auto& param : params_) { 822 payload_states[param.ssrc()] = param.state(); 823 payload_states[param.ssrc()].shared_frame_id = shared_frame_id_; 824 } 825 return payload_states; 826 } 827 828 void RtpVideoSender::OnTransportOverheadChanged( 829 size_t transport_overhead_bytes_per_packet) { 830 MutexLock lock(&mutex_); 831 transport_overhead_bytes_per_packet_ = transport_overhead_bytes_per_packet; 832 833 size_t max_rtp_packet_size = 834 std::min(rtp_config_.max_packet_size, 835 kPathMTU - transport_overhead_bytes_per_packet_); 836 for (const RtpStreamSender& stream : rtp_streams_) { 837 stream.rtp_rtcp->SetMaxRtpPacketSize(max_rtp_packet_size); 838 } 839 } 840 841 void RtpVideoSender::OnBitrateUpdated(BitrateAllocationUpdate update, 842 int framerate) { 843 // Substract overhead from bitrate. 844 MutexLock lock(&mutex_); 845 size_t num_active_streams = 0; 846 size_t overhead_bytes_per_packet = 0; 847 for (const auto& stream : rtp_streams_) { 848 if (stream.rtp_rtcp->SendingMedia()) { 849 overhead_bytes_per_packet += stream.rtp_rtcp->ExpectedPerPacketOverhead(); 850 ++num_active_streams; 851 } 852 } 853 if (num_active_streams > 1) { 854 overhead_bytes_per_packet /= num_active_streams; 855 } 856 857 DataSize packet_overhead = DataSize::Bytes( 858 overhead_bytes_per_packet + transport_overhead_bytes_per_packet_); 859 DataSize max_total_packet_size = DataSize::Bytes( 860 rtp_config_.max_packet_size + transport_overhead_bytes_per_packet_); 861 uint32_t payload_bitrate_bps = update.target_bitrate.bps(); 862 if (has_packet_feedback_) { 863 DataRate overhead_rate = 864 CalculateOverheadRate(update.target_bitrate, max_total_packet_size, 865 packet_overhead, Frequency::Hertz(framerate)); 866 // TODO(srte): We probably should not accept 0 payload bitrate here. 867 payload_bitrate_bps = 868 saturated_cast<uint32_t>(payload_bitrate_bps - overhead_rate.bps()); 869 } 870 871 // Get the encoder target rate. It is the estimated network rate - 872 // protection overhead. 873 // TODO(srte): We should multiply with 255 here. 874 encoder_target_rate_bps_ = fec_controller_->UpdateFecRates( 875 payload_bitrate_bps, framerate, 876 saturated_cast<uint8_t>(update.packet_loss_ratio * 256), 877 loss_mask_vector_, update.round_trip_time.ms()); 878 if (!fec_allowed_) { 879 encoder_target_rate_bps_ = payload_bitrate_bps; 880 // fec_controller_->UpdateFecRates() was still called so as to allow 881 // `fec_controller_` to update whatever internal state it might have, 882 // since `fec_allowed_` may be toggled back on at any moment. 883 } 884 885 // Subtract post encode overhead from the encoder target. If target rate 886 // is really low, cap the overhead at 50%. This also avoids the case where 887 // `encoder_target_rate_bps_` is 0 due to encoder pause event while the 888 // packetization rate is positive since packets are still flowing. 889 uint32_t post_encode_overhead_bps = std::min( 890 GetPostEncodeOverhead().bps<uint32_t>(), encoder_target_rate_bps_ / 2); 891 encoder_target_rate_bps_ -= post_encode_overhead_bps; 892 893 loss_mask_vector_.clear(); 894 895 uint32_t encoder_overhead_rate_bps = 0; 896 if (has_packet_feedback_) { 897 // TODO(srte): The packet size should probably be the same as in the 898 // CalculateOverheadRate call above (just max_total_packet_size), it doesn't 899 // make sense to use different packet rates for different overhead 900 // calculations. 901 DataRate encoder_overhead_rate = CalculateOverheadRate( 902 DataRate::BitsPerSec(encoder_target_rate_bps_), 903 max_total_packet_size - DataSize::Bytes(overhead_bytes_per_packet), 904 packet_overhead, Frequency::Hertz(framerate)); 905 encoder_overhead_rate_bps = std::min( 906 encoder_overhead_rate.bps<uint32_t>(), 907 update.target_bitrate.bps<uint32_t>() - encoder_target_rate_bps_); 908 } 909 const uint32_t media_rate = encoder_target_rate_bps_ + 910 encoder_overhead_rate_bps + 911 post_encode_overhead_bps; 912 RTC_DCHECK_GE(update.target_bitrate, DataRate::BitsPerSec(media_rate)); 913 // `protection_bitrate_bps_` includes overhead. 914 protection_bitrate_bps_ = update.target_bitrate.bps() - media_rate; 915 } 916 917 uint32_t RtpVideoSender::GetPayloadBitrateBps() const { 918 return encoder_target_rate_bps_; 919 } 920 921 uint32_t RtpVideoSender::GetProtectionBitrateBps() const { 922 return protection_bitrate_bps_; 923 } 924 925 std::vector<RtpSequenceNumberMap::Info> RtpVideoSender::GetSentRtpPacketInfos( 926 uint32_t ssrc, 927 ArrayView<const uint16_t> sequence_numbers) const { 928 for (const auto& rtp_stream : rtp_streams_) { 929 if (ssrc == rtp_stream.rtp_rtcp->SSRC()) { 930 return rtp_stream.rtp_rtcp->GetSentRtpPacketInfos(sequence_numbers); 931 } 932 } 933 return std::vector<RtpSequenceNumberMap::Info>(); 934 } 935 936 int RtpVideoSender::ProtectionRequest(const FecProtectionParams* delta_params, 937 const FecProtectionParams* key_params, 938 uint32_t* sent_video_rate_bps, 939 uint32_t* sent_nack_rate_bps, 940 uint32_t* sent_fec_rate_bps) { 941 *sent_video_rate_bps = 0; 942 *sent_nack_rate_bps = 0; 943 *sent_fec_rate_bps = 0; 944 for (const RtpStreamSender& stream : rtp_streams_) { 945 stream.rtp_rtcp->SetFecProtectionParams(*delta_params, *key_params); 946 947 auto send_bitrate = stream.rtp_rtcp->GetSendRates(); 948 *sent_video_rate_bps += send_bitrate[RtpPacketMediaType::kVideo].bps(); 949 *sent_fec_rate_bps += 950 send_bitrate[RtpPacketMediaType::kForwardErrorCorrection].bps(); 951 *sent_nack_rate_bps += 952 send_bitrate[RtpPacketMediaType::kRetransmission].bps(); 953 } 954 return 0; 955 } 956 957 void RtpVideoSender::SetRetransmissionMode(int retransmission_mode) { 958 MutexLock lock(&mutex_); 959 for (const RtpStreamSender& stream : rtp_streams_) { 960 stream.sender_video->SetRetransmissionSetting(retransmission_mode); 961 } 962 } 963 964 void RtpVideoSender::SetFecAllowed(bool fec_allowed) { 965 MutexLock lock(&mutex_); 966 fec_allowed_ = fec_allowed; 967 } 968 969 void RtpVideoSender::OnPacketFeedbackVector( 970 std::vector<StreamPacketInfo> packet_feedback_vector) { 971 if (fec_controller_->UseLossVectorMask()) { 972 MutexLock lock(&mutex_); 973 for (const StreamPacketInfo& packet : packet_feedback_vector) { 974 loss_mask_vector_.push_back(!packet.received); 975 } 976 } 977 978 // Map from SSRC to all acked packets for that RTP module. 979 std::map<uint32_t, std::vector<uint16_t>> acked_packets_per_ssrc; 980 for (const StreamPacketInfo& packet : packet_feedback_vector) { 981 if (packet.received && packet.ssrc) { 982 acked_packets_per_ssrc[*packet.ssrc].push_back( 983 packet.rtp_sequence_number); 984 } 985 } 986 987 // Map from SSRC to vector of RTP sequence numbers that are indicated as 988 // lost by feedback, without being trailed by any received packets. 989 std::map<uint32_t, std::vector<uint16_t>> early_loss_detected_per_ssrc; 990 991 for (const StreamPacketInfo& packet : packet_feedback_vector) { 992 // Only include new media packets, not retransmissions/padding/fec. 993 if (!packet.received && packet.ssrc && !packet.is_retransmission) { 994 // Last known lost packet, might not be detectable as lost by remote 995 // jitter buffer. 996 early_loss_detected_per_ssrc[*packet.ssrc].push_back( 997 packet.rtp_sequence_number); 998 } else { 999 // Packet received, so any loss prior to this is already detectable. 1000 early_loss_detected_per_ssrc.erase(*packet.ssrc); 1001 } 1002 } 1003 1004 for (const auto& kv : early_loss_detected_per_ssrc) { 1005 const uint32_t ssrc = kv.first; 1006 auto it = ssrc_to_rtp_module_.find(ssrc); 1007 RTC_CHECK(it != ssrc_to_rtp_module_.end()); 1008 RTPSender* rtp_sender = it->second->RtpSender(); 1009 for (uint16_t sequence_number : kv.second) { 1010 rtp_sender->ReSendPacket(sequence_number); 1011 } 1012 } 1013 1014 for (const auto& kv : acked_packets_per_ssrc) { 1015 const uint32_t ssrc = kv.first; 1016 auto it = ssrc_to_rtp_module_.find(ssrc); 1017 if (it == ssrc_to_rtp_module_.end()) { 1018 // No media, likely FEC or padding. Ignore since there's no RTP history to 1019 // clean up anyway. 1020 continue; 1021 } 1022 ArrayView<const uint16_t> rtp_sequence_numbers(kv.second); 1023 it->second->OnPacketsAcknowledged(rtp_sequence_numbers); 1024 } 1025 } 1026 1027 void RtpVideoSender::SetEncodingData(size_t width, 1028 size_t height, 1029 size_t num_temporal_layers) { 1030 fec_controller_->SetEncodingData(width, height, num_temporal_layers, 1031 rtp_config_.max_packet_size); 1032 } 1033 1034 void RtpVideoSender::SetCsrcs(ArrayView<const uint32_t> csrcs) { 1035 MutexLock lock(&mutex_); 1036 csrcs_.assign(csrcs.begin(), 1037 csrcs.begin() + std::min<size_t>(csrcs.size(), kRtpCsrcSize)); 1038 } 1039 1040 DataRate RtpVideoSender::CalculateOverheadRate(DataRate data_rate, 1041 DataSize packet_size, 1042 DataSize overhead_per_packet, 1043 Frequency framerate) const { 1044 Frequency packet_rate = data_rate / packet_size; 1045 if (use_frame_rate_for_overhead_) { 1046 framerate = std::max(framerate, Frequency::Hertz(1)); 1047 DataSize frame_size = data_rate / framerate; 1048 int packets_per_frame = ceil(frame_size / packet_size); 1049 packet_rate = packets_per_frame * framerate; 1050 } 1051 return packet_rate.RoundUpTo(Frequency::Hertz(1)) * overhead_per_packet; 1052 } 1053 1054 } // namespace webrtc