rtp_sender_video_unittest.cc (81326B)
1 /* 2 * Copyright (c) 2019 The WebRTC project authors. All Rights Reserved. 3 * 4 * Use of this source code is governed by a BSD-style license 5 * that can be found in the LICENSE file in the root of the source 6 * tree. An additional intellectual property rights grant can be found 7 * in the file PATENTS. All contributing project authors may 8 * be found in the AUTHORS file in the root of the source tree. 9 */ 10 11 #include "modules/rtp_rtcp/source/rtp_sender_video.h" 12 13 #include <array> 14 #include <cstddef> 15 #include <cstdint> 16 #include <memory> 17 #include <optional> 18 #include <utility> 19 #include <vector> 20 21 #include "absl/memory/memory.h" 22 #include "api/array_view.h" 23 #include "api/call/transport.h" 24 #include "api/environment/environment.h" 25 #include "api/environment/environment_factory.h" 26 #include "api/field_trials_view.h" 27 #include "api/frame_transformer_factory.h" 28 #include "api/frame_transformer_interface.h" 29 #include "api/make_ref_counted.h" 30 #include "api/rtp_headers.h" 31 #include "api/scoped_refptr.h" 32 #include "api/task_queue/task_queue_base.h" 33 #include "api/task_queue/task_queue_factory.h" 34 #include "api/test/mock_frame_encryptor.h" 35 #include "api/test/mock_frame_transformer.h" 36 #include "api/transport/rtp/corruption_detection_message.h" 37 #include "api/transport/rtp/dependency_descriptor.h" 38 #include "api/units/data_rate.h" 39 #include "api/units/time_delta.h" 40 #include "api/units/timestamp.h" 41 #include "api/video/corruption_detection/frame_instrumentation_data.h" 42 #include "api/video/encoded_image.h" 43 #include "api/video/video_codec_constants.h" 44 #include "api/video/video_codec_type.h" 45 #include "api/video/video_frame_type.h" 46 #include "api/video/video_layers_allocation.h" 47 #include "api/video/video_rotation.h" 48 #include "api/video/video_timing.h" 49 #include "modules/rtp_rtcp/include/rtp_cvo.h" 50 #include "modules/rtp_rtcp/include/rtp_header_extension_map.h" 51 #include "modules/rtp_rtcp/include/rtp_rtcp_defines.h" 52 #include "modules/rtp_rtcp/source/corruption_detection_extension.h" 53 #include "modules/rtp_rtcp/source/rtcp_packet/nack.h" 54 #include "modules/rtp_rtcp/source/rtcp_packet/receiver_report.h" 55 #include "modules/rtp_rtcp/source/rtcp_packet/report_block.h" 56 #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h" 57 #include "modules/rtp_rtcp/source/rtp_format_video_generic.h" 58 #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h" 59 #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h" 60 #include "modules/rtp_rtcp/source/rtp_header_extensions.h" 61 #include "modules/rtp_rtcp/source/rtp_packet.h" 62 #include "modules/rtp_rtcp/source/rtp_packet_received.h" 63 #include "modules/rtp_rtcp/source/rtp_rtcp_impl2.h" 64 #include "modules/rtp_rtcp/source/rtp_sender.h" 65 #include "modules/rtp_rtcp/source/rtp_video_layers_allocation_extension.h" 66 #include "modules/video_coding/codecs/h264/include/h264_globals.h" 67 #include "modules/video_coding/codecs/vp8/include/vp8_globals.h" 68 #include "modules/video_coding/codecs/vp9/include/vp9_globals.h" 69 #include "rtc_base/checks.h" 70 #include "rtc_base/rate_limiter.h" 71 #include "rtc_base/thread.h" 72 #include "system_wrappers/include/clock.h" 73 #include "system_wrappers/include/ntp_time.h" 74 #include "test/gmock.h" 75 #include "test/gtest.h" 76 #include "test/time_controller/simulated_time_controller.h" 77 78 namespace webrtc { 79 80 namespace { 81 82 using ::testing::_; 83 using ::testing::ContainerEq; 84 using ::testing::ElementsAre; 85 using ::testing::ElementsAreArray; 86 using ::testing::IsEmpty; 87 using ::testing::NiceMock; 88 using ::testing::Not; 89 using ::testing::ReturnArg; 90 using ::testing::SaveArg; 91 using ::testing::SizeIs; 92 using ::testing::WithArgs; 93 94 enum : int { // The first valid value is 1. 95 kAbsoluteSendTimeExtensionId = 1, 96 kGenericDescriptorId, 97 kDependencyDescriptorId, 98 kTransmissionTimeOffsetExtensionId, 99 kTransportSequenceNumberExtensionId, 100 kVideoRotationExtensionId, 101 kVideoTimingExtensionId, 102 kAbsoluteCaptureTimeExtensionId, 103 kPlayoutDelayExtensionId, 104 kVideoLayersAllocationExtensionId, 105 kCorruptionDetectionExtensionId, 106 }; 107 108 constexpr int kPayloadType = 100; 109 constexpr VideoCodecType kType = VideoCodecType::kVideoCodecGeneric; 110 constexpr uint32_t kTimestamp = 10; 111 constexpr uint16_t kSeqNum = 33; 112 constexpr uint32_t kSsrc = 725242; 113 constexpr uint32_t kRtxSsrc = 912364; 114 constexpr int kMaxPacketLength = 1500; 115 constexpr Timestamp kStartTime = Timestamp::Millis(123456789); 116 constexpr TimeDelta kDefaultExpectedRetransmissionTime = TimeDelta::Millis(125); 117 118 class LoopbackTransportTest : public Transport { 119 public: 120 LoopbackTransportTest() { 121 receivers_extensions_.Register<TransmissionOffset>( 122 kTransmissionTimeOffsetExtensionId); 123 receivers_extensions_.Register<AbsoluteSendTime>( 124 kAbsoluteSendTimeExtensionId); 125 receivers_extensions_.Register<TransportSequenceNumber>( 126 kTransportSequenceNumberExtensionId); 127 receivers_extensions_.Register<VideoOrientation>(kVideoRotationExtensionId); 128 receivers_extensions_.Register<VideoTimingExtension>( 129 kVideoTimingExtensionId); 130 receivers_extensions_.Register<RtpGenericFrameDescriptorExtension00>( 131 kGenericDescriptorId); 132 receivers_extensions_.Register<RtpDependencyDescriptorExtension>( 133 kDependencyDescriptorId); 134 receivers_extensions_.Register<AbsoluteCaptureTimeExtension>( 135 kAbsoluteCaptureTimeExtensionId); 136 receivers_extensions_.Register<PlayoutDelayLimits>( 137 kPlayoutDelayExtensionId); 138 receivers_extensions_.Register<RtpVideoLayersAllocationExtension>( 139 kVideoLayersAllocationExtensionId); 140 receivers_extensions_.Register<CorruptionDetectionExtension>( 141 kCorruptionDetectionExtensionId); 142 } 143 144 bool SendRtp(ArrayView<const uint8_t> data, 145 const PacketOptions& /* options */) override { 146 sent_packets_.push_back(RtpPacketReceived(&receivers_extensions_)); 147 EXPECT_TRUE(sent_packets_.back().Parse(data)); 148 return true; 149 } 150 bool SendRtcp(ArrayView<const uint8_t> /* data */, 151 const PacketOptions& /* options */) override { 152 return false; 153 } 154 const RtpPacketReceived& last_sent_packet() { return sent_packets_.back(); } 155 int packets_sent() { return sent_packets_.size(); } 156 const std::vector<RtpPacketReceived>& sent_packets() const { 157 return sent_packets_; 158 } 159 160 private: 161 RtpHeaderExtensionMap receivers_extensions_; 162 std::vector<RtpPacketReceived> sent_packets_; 163 }; 164 165 class TestRtpSenderVideo : public RTPSenderVideo { 166 public: 167 TestRtpSenderVideo(Clock* clock, 168 RTPSender* rtp_sender, 169 const FieldTrialsView& field_trials, 170 bool raw_packetization) 171 : RTPSenderVideo([&] { 172 Config config; 173 config.clock = clock; 174 config.rtp_sender = rtp_sender; 175 config.field_trials = &field_trials; 176 config.raw_packetization = raw_packetization; 177 return config; 178 }()) {} 179 ~TestRtpSenderVideo() override {} 180 181 bool AllowRetransmission(const RTPVideoHeader& header, 182 int32_t retransmission_settings, 183 TimeDelta expected_retransmission_time) { 184 return RTPSenderVideo::AllowRetransmission(GetTemporalId(header), 185 retransmission_settings, 186 expected_retransmission_time); 187 } 188 }; 189 190 class RtpSenderVideoTest : public ::testing::Test { 191 public: 192 explicit RtpSenderVideoTest(bool raw_packetization = false) 193 : fake_clock_(kStartTime), 194 env_(CreateEnvironment(&fake_clock_)), 195 retransmission_rate_limiter_(&fake_clock_, 1000), 196 rtp_module_( 197 env_, 198 {.outgoing_transport = &transport_, 199 .retransmission_rate_limiter = &retransmission_rate_limiter_, 200 .local_media_ssrc = kSsrc, 201 .rtx_send_ssrc = kRtxSsrc, 202 .rid = "rid"}), 203 rtp_sender_video_( 204 std::make_unique<TestRtpSenderVideo>(&fake_clock_, 205 rtp_module_.RtpSender(), 206 env_.field_trials(), 207 raw_packetization)) { 208 rtp_module_.SetSequenceNumber(kSeqNum); 209 rtp_module_.SetStartTimestamp(0); 210 } 211 212 void UsesMinimalVp8DescriptorWhenGenericFrameDescriptorExtensionIsUsed( 213 int version); 214 215 protected: 216 AutoThread main_thread_; 217 SimulatedClock fake_clock_; 218 const Environment env_; 219 LoopbackTransportTest transport_; 220 RateLimiter retransmission_rate_limiter_; 221 ModuleRtpRtcpImpl2 rtp_module_; 222 std::unique_ptr<TestRtpSenderVideo> rtp_sender_video_; 223 }; 224 225 TEST_F(RtpSenderVideoTest, KeyFrameHasCVO) { 226 uint8_t kFrame[kMaxPacketLength]; 227 rtp_module_.RegisterRtpHeaderExtension(VideoOrientation::Uri(), 228 kVideoRotationExtensionId); 229 230 RTPVideoHeader hdr; 231 hdr.rotation = kVideoRotation_0; 232 hdr.frame_type = VideoFrameType::kVideoFrameKey; 233 rtp_sender_video_->SendVideo( 234 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 235 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 236 237 VideoRotation rotation; 238 EXPECT_TRUE( 239 transport_.last_sent_packet().GetExtension<VideoOrientation>(&rotation)); 240 EXPECT_EQ(kVideoRotation_0, rotation); 241 } 242 243 TEST_F(RtpSenderVideoTest, TimingFrameHasPacketizationTimstampSet) { 244 uint8_t kFrame[kMaxPacketLength]; 245 const int64_t kPacketizationTimeMs = 100; 246 const int64_t kEncodeStartDeltaMs = 10; 247 const int64_t kEncodeFinishDeltaMs = 50; 248 rtp_module_.RegisterRtpHeaderExtension(VideoTimingExtension::Uri(), 249 kVideoTimingExtensionId); 250 251 const Timestamp kCaptureTimestamp = fake_clock_.CurrentTime(); 252 253 RTPVideoHeader hdr; 254 hdr.video_timing.flags = VideoSendTiming::kTriggeredByTimer; 255 hdr.video_timing.encode_start_delta_ms = kEncodeStartDeltaMs; 256 hdr.video_timing.encode_finish_delta_ms = kEncodeFinishDeltaMs; 257 258 fake_clock_.AdvanceTimeMilliseconds(kPacketizationTimeMs); 259 hdr.frame_type = VideoFrameType::kVideoFrameKey; 260 rtp_sender_video_->SendVideo(kPayloadType, kType, kTimestamp, 261 kCaptureTimestamp, kFrame, sizeof(kFrame), hdr, 262 kDefaultExpectedRetransmissionTime, {}); 263 VideoSendTiming timing; 264 EXPECT_TRUE(transport_.last_sent_packet().GetExtension<VideoTimingExtension>( 265 &timing)); 266 EXPECT_EQ(kPacketizationTimeMs, timing.packetization_finish_delta_ms); 267 EXPECT_EQ(kEncodeStartDeltaMs, timing.encode_start_delta_ms); 268 EXPECT_EQ(kEncodeFinishDeltaMs, timing.encode_finish_delta_ms); 269 } 270 271 TEST_F(RtpSenderVideoTest, 272 WriteCorruptionExtensionIfHeaderContainsFrameInstrumentationData) { 273 uint8_t kFrame[kMaxPacketLength]; 274 rtp_module_.RegisterRtpHeaderExtension(CorruptionDetectionExtension::Uri(), 275 kCorruptionDetectionExtensionId); 276 RTPVideoHeader hdr; 277 hdr.frame_type = VideoFrameType::kVideoFrameKey; 278 FrameInstrumentationData data; 279 data.SetSequenceIndex(130); // 128 + 2 280 data.SetStdDev(2.0); 281 data.SetLumaErrorThreshold(3); 282 data.SetChromaErrorThreshold(2); 283 data.SetSampleValues({12.0, 12.0, 12.0, 12.0, 12.0, 12.0, 12.0, 12.0}); 284 hdr.frame_instrumentation_data = data; 285 CorruptionDetectionMessage message; 286 287 rtp_sender_video_->SendVideo( 288 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 289 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 290 291 // Only written on last packet. 292 for (const RtpPacketReceived& packet : transport_.sent_packets()) { 293 if (&packet == &transport_.last_sent_packet()) { 294 EXPECT_TRUE(transport_.last_sent_packet() 295 .GetExtension<CorruptionDetectionExtension>(&message)); 296 } else { 297 EXPECT_FALSE(packet.HasExtension<CorruptionDetectionExtension>()); 298 } 299 } 300 EXPECT_EQ(message.sequence_index(), 2); 301 EXPECT_FALSE(message.interpret_sequence_index_as_most_significant_bits()); 302 EXPECT_NEAR(message.std_dev(), 2.0392156862745097, 0.041); // ~2% 303 EXPECT_EQ(message.luma_error_threshold(), 3); 304 EXPECT_EQ(message.chroma_error_threshold(), 2); 305 EXPECT_THAT(message.sample_values(), 306 ElementsAre(12.0, 12.0, 12.0, 12.0, 12.0, 12.0, 12.0, 12.0)); 307 } 308 309 TEST_F(RtpSenderVideoTest, 310 WriteCorruptionExtensionIfHeaderContainsFrameInstrumentationSyncData) { 311 uint8_t kFrame[kMaxPacketLength]; 312 rtp_module_.RegisterRtpHeaderExtension(CorruptionDetectionExtension::Uri(), 313 kCorruptionDetectionExtensionId); 314 RTPVideoHeader hdr; 315 hdr.frame_type = VideoFrameType::kVideoFrameKey; 316 // Send data with sequence index divisible by 2^7 and no sample values in 317 // order create a sync message with upper bits set. 318 hdr.frame_instrumentation_data.emplace().SetSequenceIndex(128); 319 CorruptionDetectionMessage message; 320 321 rtp_sender_video_->SendVideo( 322 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 323 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 324 325 // Only written on last packet. 326 for (const RtpPacketReceived& packet : transport_.sent_packets()) { 327 if (&packet == &transport_.last_sent_packet()) { 328 EXPECT_TRUE(transport_.last_sent_packet() 329 .GetExtension<CorruptionDetectionExtension>(&message)); 330 } else { 331 EXPECT_FALSE(packet.HasExtension<CorruptionDetectionExtension>()); 332 } 333 } 334 EXPECT_EQ(message.sequence_index(), 1); 335 EXPECT_TRUE(message.interpret_sequence_index_as_most_significant_bits()); 336 EXPECT_DOUBLE_EQ(message.std_dev(), 0.0); 337 EXPECT_EQ(message.luma_error_threshold(), 0); 338 EXPECT_EQ(message.chroma_error_threshold(), 0); 339 EXPECT_THAT(message.sample_values(), IsEmpty()); 340 } 341 342 TEST_F(RtpSenderVideoTest, DeltaFrameHasCVOWhenChanged) { 343 uint8_t kFrame[kMaxPacketLength]; 344 rtp_module_.RegisterRtpHeaderExtension(VideoOrientation::Uri(), 345 kVideoRotationExtensionId); 346 347 RTPVideoHeader hdr; 348 hdr.rotation = kVideoRotation_90; 349 hdr.frame_type = VideoFrameType::kVideoFrameKey; 350 EXPECT_TRUE(rtp_sender_video_->SendVideo( 351 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 352 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {})); 353 354 hdr.rotation = kVideoRotation_0; 355 hdr.frame_type = VideoFrameType::kVideoFrameDelta; 356 EXPECT_TRUE(rtp_sender_video_->SendVideo( 357 kPayloadType, kType, kTimestamp + 1, fake_clock_.CurrentTime(), kFrame, 358 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {})); 359 360 VideoRotation rotation; 361 EXPECT_TRUE( 362 transport_.last_sent_packet().GetExtension<VideoOrientation>(&rotation)); 363 EXPECT_EQ(kVideoRotation_0, rotation); 364 } 365 366 TEST_F(RtpSenderVideoTest, DeltaFrameHasCVOWhenNonZero) { 367 uint8_t kFrame[kMaxPacketLength]; 368 rtp_module_.RegisterRtpHeaderExtension(VideoOrientation::Uri(), 369 kVideoRotationExtensionId); 370 371 RTPVideoHeader hdr; 372 hdr.rotation = kVideoRotation_90; 373 hdr.frame_type = VideoFrameType::kVideoFrameKey; 374 EXPECT_TRUE(rtp_sender_video_->SendVideo( 375 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 376 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {})); 377 378 hdr.frame_type = VideoFrameType::kVideoFrameDelta; 379 EXPECT_TRUE(rtp_sender_video_->SendVideo( 380 kPayloadType, kType, kTimestamp + 1, fake_clock_.CurrentTime(), kFrame, 381 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {})); 382 383 VideoRotation rotation; 384 EXPECT_TRUE( 385 transport_.last_sent_packet().GetExtension<VideoOrientation>(&rotation)); 386 EXPECT_EQ(kVideoRotation_90, rotation); 387 } 388 389 // Make sure rotation is parsed correctly when the Camera (C) and Flip (F) bits 390 // are set in the CVO byte. 391 TEST_F(RtpSenderVideoTest, SendVideoWithCameraAndFlipCVO) { 392 // Test extracting rotation when Camera (C) and Flip (F) bits are zero. 393 EXPECT_EQ(kVideoRotation_0, ConvertCVOByteToVideoRotation(0)); 394 EXPECT_EQ(kVideoRotation_90, ConvertCVOByteToVideoRotation(1)); 395 EXPECT_EQ(kVideoRotation_180, ConvertCVOByteToVideoRotation(2)); 396 EXPECT_EQ(kVideoRotation_270, ConvertCVOByteToVideoRotation(3)); 397 // Test extracting rotation when Camera (C) and Flip (F) bits are set. 398 const int flip_bit = 1 << 2; 399 const int camera_bit = 1 << 3; 400 EXPECT_EQ(kVideoRotation_0, 401 ConvertCVOByteToVideoRotation(flip_bit | camera_bit | 0)); 402 EXPECT_EQ(kVideoRotation_90, 403 ConvertCVOByteToVideoRotation(flip_bit | camera_bit | 1)); 404 EXPECT_EQ(kVideoRotation_180, 405 ConvertCVOByteToVideoRotation(flip_bit | camera_bit | 2)); 406 EXPECT_EQ(kVideoRotation_270, 407 ConvertCVOByteToVideoRotation(flip_bit | camera_bit | 3)); 408 } 409 410 TEST_F(RtpSenderVideoTest, RetransmissionTypesGeneric) { 411 RTPVideoHeader header; 412 header.codec = kVideoCodecGeneric; 413 414 EXPECT_FALSE(rtp_sender_video_->AllowRetransmission( 415 header, kRetransmitOff, kDefaultExpectedRetransmissionTime)); 416 EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( 417 header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTime)); 418 EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( 419 header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTime)); 420 EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( 421 header, kConditionallyRetransmitHigherLayers, 422 kDefaultExpectedRetransmissionTime)); 423 } 424 425 TEST_F(RtpSenderVideoTest, RetransmissionTypesH264) { 426 RTPVideoHeader header; 427 header.video_type_header.emplace<RTPVideoHeaderH264>().packetization_mode = 428 H264PacketizationMode::NonInterleaved; 429 header.codec = kVideoCodecH264; 430 431 EXPECT_FALSE(rtp_sender_video_->AllowRetransmission( 432 header, kRetransmitOff, kDefaultExpectedRetransmissionTime)); 433 EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( 434 header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTime)); 435 EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( 436 header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTime)); 437 EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( 438 header, kConditionallyRetransmitHigherLayers, 439 kDefaultExpectedRetransmissionTime)); 440 } 441 442 TEST_F(RtpSenderVideoTest, RetransmissionTypesVP8BaseLayer) { 443 RTPVideoHeader header; 444 header.codec = kVideoCodecVP8; 445 auto& vp8_header = header.video_type_header.emplace<RTPVideoHeaderVP8>(); 446 vp8_header.temporalIdx = 0; 447 448 EXPECT_FALSE(rtp_sender_video_->AllowRetransmission( 449 header, kRetransmitOff, kDefaultExpectedRetransmissionTime)); 450 EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( 451 header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTime)); 452 EXPECT_FALSE(rtp_sender_video_->AllowRetransmission( 453 header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTime)); 454 EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( 455 header, kRetransmitHigherLayers | kRetransmitBaseLayer, 456 kDefaultExpectedRetransmissionTime)); 457 EXPECT_FALSE(rtp_sender_video_->AllowRetransmission( 458 header, kConditionallyRetransmitHigherLayers, 459 kDefaultExpectedRetransmissionTime)); 460 EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( 461 header, kRetransmitBaseLayer | kConditionallyRetransmitHigherLayers, 462 kDefaultExpectedRetransmissionTime)); 463 } 464 465 TEST_F(RtpSenderVideoTest, RetransmissionTypesVP8HigherLayers) { 466 RTPVideoHeader header; 467 header.codec = kVideoCodecVP8; 468 469 auto& vp8_header = header.video_type_header.emplace<RTPVideoHeaderVP8>(); 470 for (int tid = 1; tid <= kMaxTemporalStreams; ++tid) { 471 vp8_header.temporalIdx = tid; 472 473 EXPECT_FALSE(rtp_sender_video_->AllowRetransmission( 474 header, kRetransmitOff, kDefaultExpectedRetransmissionTime)); 475 EXPECT_FALSE(rtp_sender_video_->AllowRetransmission( 476 header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTime)); 477 EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( 478 header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTime)); 479 EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( 480 header, kRetransmitHigherLayers | kRetransmitBaseLayer, 481 kDefaultExpectedRetransmissionTime)); 482 } 483 } 484 485 TEST_F(RtpSenderVideoTest, RetransmissionTypesVP9) { 486 RTPVideoHeader header; 487 header.codec = kVideoCodecVP9; 488 489 auto& vp9_header = header.video_type_header.emplace<RTPVideoHeaderVP9>(); 490 for (int tid = 1; tid <= kMaxTemporalStreams; ++tid) { 491 vp9_header.temporal_idx = tid; 492 493 EXPECT_FALSE(rtp_sender_video_->AllowRetransmission( 494 header, kRetransmitOff, kDefaultExpectedRetransmissionTime)); 495 EXPECT_FALSE(rtp_sender_video_->AllowRetransmission( 496 header, kRetransmitBaseLayer, kDefaultExpectedRetransmissionTime)); 497 EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( 498 header, kRetransmitHigherLayers, kDefaultExpectedRetransmissionTime)); 499 EXPECT_TRUE(rtp_sender_video_->AllowRetransmission( 500 header, kRetransmitHigherLayers | kRetransmitBaseLayer, 501 kDefaultExpectedRetransmissionTime)); 502 } 503 } 504 505 TEST_F(RtpSenderVideoTest, ConditionalRetransmit) { 506 constexpr TimeDelta kFrameInterval = TimeDelta::Millis(33); 507 constexpr TimeDelta kRtt = (kFrameInterval * 3) / 2; 508 const uint8_t kSettings = 509 kRetransmitBaseLayer | kConditionallyRetransmitHigherLayers; 510 511 // Insert VP8 frames for all temporal layers, but stop before the final index. 512 RTPVideoHeader header; 513 header.codec = kVideoCodecVP8; 514 515 // Fill averaging window to prevent rounding errors. 516 constexpr int kNumRepetitions = 517 RTPSenderVideo::kTLRateWindowSize / kFrameInterval; 518 constexpr std::array kPattern = {0, 2, 1, 2}; 519 auto& vp8_header = header.video_type_header.emplace<RTPVideoHeaderVP8>(); 520 for (size_t i = 0; i < kPattern.size() * kNumRepetitions; ++i) { 521 vp8_header.temporalIdx = kPattern[i % kPattern.size()]; 522 rtp_sender_video_->AllowRetransmission(header, kSettings, kRtt); 523 fake_clock_.AdvanceTime(kFrameInterval); 524 } 525 526 // Since we're at the start of the pattern, the next expected frame in TL0 is 527 // right now. We will wait at most one expected retransmission time before 528 // acknowledging that it did not arrive, which means this frame and the next 529 // will not be retransmitted. 530 vp8_header.temporalIdx = 1; 531 EXPECT_FALSE(rtp_sender_video_->AllowRetransmission(header, kSettings, kRtt)); 532 fake_clock_.AdvanceTime(kFrameInterval); 533 EXPECT_FALSE(rtp_sender_video_->AllowRetransmission(header, kSettings, kRtt)); 534 fake_clock_.AdvanceTime(kFrameInterval); 535 536 // The TL0 frame did not arrive. So allow retransmission. 537 EXPECT_TRUE(rtp_sender_video_->AllowRetransmission(header, kSettings, kRtt)); 538 fake_clock_.AdvanceTime(kFrameInterval); 539 540 // Insert a frame for TL2. We just had frame in TL1, so the next one there is 541 // in three frames away. TL0 is still too far in the past. So, allow 542 // retransmission. 543 vp8_header.temporalIdx = 2; 544 EXPECT_TRUE(rtp_sender_video_->AllowRetransmission(header, kSettings, kRtt)); 545 fake_clock_.AdvanceTime(kFrameInterval); 546 547 // Another TL2, next in TL1 is two frames away. Allow again. 548 EXPECT_TRUE(rtp_sender_video_->AllowRetransmission(header, kSettings, kRtt)); 549 fake_clock_.AdvanceTime(kFrameInterval); 550 551 // Yet another TL2, next in TL1 is now only one frame away, so don't store 552 // for retransmission. 553 EXPECT_FALSE(rtp_sender_video_->AllowRetransmission(header, kSettings, kRtt)); 554 } 555 556 TEST_F(RtpSenderVideoTest, ConditionalRetransmitLimit) { 557 constexpr TimeDelta kFrameInterval = TimeDelta::Millis(200); 558 constexpr TimeDelta kRtt = (kFrameInterval * 3) / 2; 559 const int32_t kSettings = 560 kRetransmitBaseLayer | kConditionallyRetransmitHigherLayers; 561 562 // Insert VP8 frames for all temporal layers, but stop before the final index. 563 RTPVideoHeader header; 564 header.codec = kVideoCodecVP8; 565 566 // Fill averaging window to prevent rounding errors. 567 constexpr int kNumRepetitions = 568 RTPSenderVideo::kTLRateWindowSize / kFrameInterval; 569 constexpr std::array kPattern = {0, 2, 2, 2}; 570 auto& vp8_header = header.video_type_header.emplace<RTPVideoHeaderVP8>(); 571 for (size_t i = 0; i < kPattern.size() * kNumRepetitions; ++i) { 572 vp8_header.temporalIdx = kPattern[i % kPattern.size()]; 573 574 rtp_sender_video_->AllowRetransmission(header, kSettings, kRtt); 575 fake_clock_.AdvanceTime(kFrameInterval); 576 } 577 578 // Since we're at the start of the pattern, the next expected frame will be 579 // right now in TL0. Put it in TL1 instead. Regular rules would dictate that 580 // we don't store for retransmission because we expect a frame in a lower 581 // layer, but that last frame in TL1 was a long time ago in absolute terms, 582 // so allow retransmission anyway. 583 vp8_header.temporalIdx = 1; 584 EXPECT_TRUE(rtp_sender_video_->AllowRetransmission(header, kSettings, kRtt)); 585 } 586 587 TEST_F(RtpSenderVideoTest, 588 ReservesEnoughSpaceForRtxPacketWhenMidAndRsidAreRegistered) { 589 constexpr int kMediaPayloadId = 100; 590 constexpr int kRtxPayloadId = 101; 591 constexpr size_t kMaxPacketSize = 1'000; 592 593 rtp_module_.SetMaxRtpPacketSize(kMaxPacketSize); 594 rtp_module_.RegisterRtpHeaderExtension(RtpMid::Uri(), 1); 595 rtp_module_.RegisterRtpHeaderExtension(RtpStreamId::Uri(), 2); 596 rtp_module_.RegisterRtpHeaderExtension(RepairedRtpStreamId::Uri(), 3); 597 rtp_module_.RegisterRtpHeaderExtension(AbsoluteSendTime::Uri(), 4); 598 rtp_module_.SetMid("long_mid"); 599 rtp_module_.SetRtxSendPayloadType(kRtxPayloadId, kMediaPayloadId); 600 rtp_module_.SetStorePacketsStatus(/*enable=*/true, 10); 601 rtp_module_.SetRtxSendStatus(kRtxRetransmitted); 602 603 RTPVideoHeader header; 604 header.codec = kVideoCodecVP8; 605 header.frame_type = VideoFrameType::kVideoFrameDelta; 606 auto& vp8_header = header.video_type_header.emplace<RTPVideoHeaderVP8>(); 607 vp8_header.temporalIdx = 0; 608 609 uint8_t kPayload[kMaxPacketSize] = {}; 610 EXPECT_TRUE(rtp_sender_video_->SendVideo( 611 kMediaPayloadId, /*codec_type=*/kVideoCodecVP8, /*rtp_timestamp=*/0, 612 /*capture_time=*/Timestamp::Seconds(1), kPayload, sizeof(kPayload), 613 header, 614 /*expected_retransmission_time=*/TimeDelta::PlusInfinity(), 615 /*csrcs=*/{})); 616 ASSERT_THAT(transport_.sent_packets(), Not(IsEmpty())); 617 // Ack media ssrc, but not rtx ssrc. 618 rtcp::ReceiverReport rr; 619 rtcp::ReportBlock rb; 620 rb.SetMediaSsrc(kSsrc); 621 rb.SetExtHighestSeqNum(transport_.last_sent_packet().SequenceNumber()); 622 rr.AddReportBlock(rb); 623 rtp_module_.IncomingRtcpPacket(rr.Build()); 624 625 // Test for various frame size close to `kMaxPacketSize` to catch edge cases 626 // when rtx packet barely fit. 627 for (size_t frame_size = 800; frame_size < kMaxPacketSize; ++frame_size) { 628 SCOPED_TRACE(frame_size); 629 ArrayView<const uint8_t> payload(kPayload, frame_size); 630 631 EXPECT_TRUE(rtp_sender_video_->SendVideo( 632 kMediaPayloadId, /*codec_type=*/kVideoCodecVP8, /*rtp_timestamp=*/0, 633 /*capture_time=*/Timestamp::Seconds(1), payload, frame_size, header, 634 /*expected_retransmission_time=*/TimeDelta::Seconds(1), /*csrcs=*/{})); 635 const RtpPacketReceived& media_packet = transport_.last_sent_packet(); 636 EXPECT_EQ(media_packet.Ssrc(), kSsrc); 637 638 rtcp::Nack nack; 639 nack.SetMediaSsrc(kSsrc); 640 nack.SetPacketIds({media_packet.SequenceNumber()}); 641 rtp_module_.IncomingRtcpPacket(nack.Build()); 642 643 const RtpPacketReceived& rtx_packet = transport_.last_sent_packet(); 644 EXPECT_EQ(rtx_packet.Ssrc(), kRtxSsrc); 645 EXPECT_LE(rtx_packet.size(), kMaxPacketSize); 646 } 647 } 648 649 TEST_F(RtpSenderVideoTest, SendsDependencyDescriptorWhenVideoStructureIsSet) { 650 const int64_t kFrameId = 100000; 651 uint8_t kFrame[100]; 652 rtp_module_.RegisterRtpHeaderExtension( 653 RtpDependencyDescriptorExtension::Uri(), kDependencyDescriptorId); 654 FrameDependencyStructure video_structure; 655 video_structure.num_decode_targets = 2; 656 video_structure.templates = { 657 FrameDependencyTemplate().S(0).T(0).Dtis("SS"), 658 FrameDependencyTemplate().S(1).T(0).Dtis("-S"), 659 FrameDependencyTemplate().S(1).T(1).Dtis("-D"), 660 }; 661 rtp_sender_video_->SetVideoStructure(&video_structure); 662 663 // Send key frame. 664 RTPVideoHeader hdr; 665 RTPVideoHeader::GenericDescriptorInfo& generic = hdr.generic.emplace(); 666 generic.frame_id = kFrameId; 667 generic.temporal_index = 0; 668 generic.spatial_index = 0; 669 generic.decode_target_indications = {DecodeTargetIndication::kSwitch, 670 DecodeTargetIndication::kSwitch}; 671 hdr.frame_type = VideoFrameType::kVideoFrameKey; 672 rtp_sender_video_->SendVideo( 673 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 674 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 675 676 ASSERT_EQ(transport_.packets_sent(), 1); 677 DependencyDescriptor descriptor_key; 678 ASSERT_TRUE(transport_.last_sent_packet() 679 .GetExtension<RtpDependencyDescriptorExtension>( 680 nullptr, &descriptor_key)); 681 ASSERT_TRUE(descriptor_key.attached_structure); 682 EXPECT_EQ(descriptor_key.attached_structure->num_decode_targets, 2); 683 EXPECT_THAT(descriptor_key.attached_structure->templates, SizeIs(3)); 684 EXPECT_EQ(descriptor_key.frame_number, kFrameId & 0xFFFF); 685 EXPECT_EQ(descriptor_key.frame_dependencies.spatial_id, 0); 686 EXPECT_EQ(descriptor_key.frame_dependencies.temporal_id, 0); 687 EXPECT_EQ(descriptor_key.frame_dependencies.decode_target_indications, 688 generic.decode_target_indications); 689 EXPECT_THAT(descriptor_key.frame_dependencies.frame_diffs, IsEmpty()); 690 691 // Send delta frame. 692 generic.frame_id = kFrameId + 1; 693 generic.temporal_index = 1; 694 generic.spatial_index = 1; 695 generic.dependencies = {kFrameId, kFrameId - 500}; 696 generic.decode_target_indications = {DecodeTargetIndication::kNotPresent, 697 DecodeTargetIndication::kRequired}; 698 hdr.frame_type = VideoFrameType::kVideoFrameDelta; 699 rtp_sender_video_->SendVideo( 700 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 701 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 702 703 EXPECT_EQ(transport_.packets_sent(), 2); 704 DependencyDescriptor descriptor_delta; 705 ASSERT_TRUE( 706 transport_.last_sent_packet() 707 .GetExtension<RtpDependencyDescriptorExtension>( 708 descriptor_key.attached_structure.get(), &descriptor_delta)); 709 EXPECT_EQ(descriptor_delta.attached_structure, nullptr); 710 EXPECT_EQ(descriptor_delta.frame_number, (kFrameId + 1) & 0xFFFF); 711 EXPECT_EQ(descriptor_delta.frame_dependencies.spatial_id, 1); 712 EXPECT_EQ(descriptor_delta.frame_dependencies.temporal_id, 1); 713 EXPECT_EQ(descriptor_delta.frame_dependencies.decode_target_indications, 714 generic.decode_target_indications); 715 EXPECT_THAT(descriptor_delta.frame_dependencies.frame_diffs, 716 ElementsAre(1, 501)); 717 } 718 719 TEST_F(RtpSenderVideoTest, 720 SkipsDependencyDescriptorOnDeltaFrameWhenFailedToAttachToKeyFrame) { 721 const int64_t kFrameId = 100000; 722 uint8_t kFrame[100]; 723 rtp_module_.RegisterRtpHeaderExtension( 724 RtpDependencyDescriptorExtension::Uri(), kDependencyDescriptorId); 725 rtp_module_.SetExtmapAllowMixed(false); 726 FrameDependencyStructure video_structure; 727 video_structure.num_decode_targets = 2; 728 // Use many templates so that key dependency descriptor would be too large 729 // to fit into 16 bytes (max size of one byte header rtp header extension) 730 video_structure.templates = { 731 FrameDependencyTemplate().S(0).T(0).Dtis("SS"), 732 FrameDependencyTemplate().S(1).T(0).Dtis("-S"), 733 FrameDependencyTemplate().S(1).T(1).Dtis("-D").FrameDiffs({1, 2, 3, 4}), 734 FrameDependencyTemplate().S(1).T(1).Dtis("-D").FrameDiffs({2, 3, 4, 5}), 735 FrameDependencyTemplate().S(1).T(1).Dtis("-D").FrameDiffs({3, 4, 5, 6}), 736 FrameDependencyTemplate().S(1).T(1).Dtis("-D").FrameDiffs({4, 5, 6, 7}), 737 }; 738 rtp_sender_video_->SetVideoStructure(&video_structure); 739 740 // Send key frame. 741 RTPVideoHeader hdr; 742 RTPVideoHeader::GenericDescriptorInfo& generic = hdr.generic.emplace(); 743 generic.frame_id = kFrameId; 744 generic.temporal_index = 0; 745 generic.spatial_index = 0; 746 generic.decode_target_indications = {DecodeTargetIndication::kSwitch, 747 DecodeTargetIndication::kSwitch}; 748 hdr.frame_type = VideoFrameType::kVideoFrameKey; 749 rtp_sender_video_->SendVideo( 750 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 751 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 752 753 ASSERT_EQ(transport_.packets_sent(), 1); 754 DependencyDescriptor descriptor_key; 755 ASSERT_FALSE(transport_.last_sent_packet() 756 .HasExtension<RtpDependencyDescriptorExtension>()); 757 758 // Send delta frame. 759 generic.frame_id = kFrameId + 1; 760 generic.temporal_index = 1; 761 generic.spatial_index = 1; 762 generic.dependencies = {kFrameId, kFrameId - 500}; 763 generic.decode_target_indications = {DecodeTargetIndication::kNotPresent, 764 DecodeTargetIndication::kRequired}; 765 hdr.frame_type = VideoFrameType::kVideoFrameDelta; 766 rtp_sender_video_->SendVideo( 767 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 768 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 769 770 EXPECT_EQ(transport_.packets_sent(), 2); 771 EXPECT_FALSE(transport_.last_sent_packet() 772 .HasExtension<RtpDependencyDescriptorExtension>()); 773 } 774 775 TEST_F(RtpSenderVideoTest, PropagatesChainDiffsIntoDependencyDescriptor) { 776 const int64_t kFrameId = 100000; 777 uint8_t kFrame[100]; 778 rtp_module_.RegisterRtpHeaderExtension( 779 RtpDependencyDescriptorExtension::Uri(), kDependencyDescriptorId); 780 FrameDependencyStructure video_structure; 781 video_structure.num_decode_targets = 2; 782 video_structure.num_chains = 1; 783 video_structure.decode_target_protected_by_chain = {0, 0}; 784 video_structure.templates = { 785 FrameDependencyTemplate().S(0).T(0).Dtis("SS").ChainDiffs({1}), 786 }; 787 rtp_sender_video_->SetVideoStructure(&video_structure); 788 789 RTPVideoHeader hdr; 790 RTPVideoHeader::GenericDescriptorInfo& generic = hdr.generic.emplace(); 791 generic.frame_id = kFrameId; 792 generic.decode_target_indications = {DecodeTargetIndication::kSwitch, 793 DecodeTargetIndication::kSwitch}; 794 generic.chain_diffs = {2}; 795 hdr.frame_type = VideoFrameType::kVideoFrameKey; 796 rtp_sender_video_->SendVideo( 797 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 798 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 799 800 ASSERT_EQ(transport_.packets_sent(), 1); 801 DependencyDescriptor descriptor_key; 802 ASSERT_TRUE(transport_.last_sent_packet() 803 .GetExtension<RtpDependencyDescriptorExtension>( 804 nullptr, &descriptor_key)); 805 EXPECT_THAT(descriptor_key.frame_dependencies.chain_diffs, 806 ContainerEq(generic.chain_diffs)); 807 } 808 809 TEST_F(RtpSenderVideoTest, 810 PropagatesActiveDecodeTargetsIntoDependencyDescriptor) { 811 const int64_t kFrameId = 100000; 812 uint8_t kFrame[100]; 813 rtp_module_.RegisterRtpHeaderExtension( 814 RtpDependencyDescriptorExtension::Uri(), kDependencyDescriptorId); 815 FrameDependencyStructure video_structure; 816 video_structure.num_decode_targets = 2; 817 video_structure.num_chains = 1; 818 video_structure.decode_target_protected_by_chain = {0, 0}; 819 video_structure.templates = { 820 FrameDependencyTemplate().S(0).T(0).Dtis("SS").ChainDiffs({1}), 821 }; 822 rtp_sender_video_->SetVideoStructure(&video_structure); 823 824 RTPVideoHeader hdr; 825 RTPVideoHeader::GenericDescriptorInfo& generic = hdr.generic.emplace(); 826 generic.frame_id = kFrameId; 827 generic.decode_target_indications = {DecodeTargetIndication::kSwitch, 828 DecodeTargetIndication::kSwitch}; 829 generic.active_decode_targets = 0b01; 830 generic.chain_diffs = {1}; 831 hdr.frame_type = VideoFrameType::kVideoFrameKey; 832 rtp_sender_video_->SendVideo( 833 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 834 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 835 836 ASSERT_EQ(transport_.packets_sent(), 1); 837 DependencyDescriptor descriptor_key; 838 ASSERT_TRUE(transport_.last_sent_packet() 839 .GetExtension<RtpDependencyDescriptorExtension>( 840 nullptr, &descriptor_key)); 841 EXPECT_EQ(descriptor_key.active_decode_targets_bitmask, 0b01u); 842 } 843 844 TEST_F(RtpSenderVideoTest, 845 SetDiffentVideoStructureAvoidsCollisionWithThePreviousStructure) { 846 const int64_t kFrameId = 100000; 847 uint8_t kFrame[100]; 848 rtp_module_.RegisterRtpHeaderExtension( 849 RtpDependencyDescriptorExtension::Uri(), kDependencyDescriptorId); 850 FrameDependencyStructure video_structure1; 851 video_structure1.num_decode_targets = 2; 852 video_structure1.templates = { 853 FrameDependencyTemplate().S(0).T(0).Dtis("SS"), 854 FrameDependencyTemplate().S(0).T(1).Dtis("D-"), 855 }; 856 FrameDependencyStructure video_structure2; 857 video_structure2.num_decode_targets = 2; 858 video_structure2.templates = { 859 FrameDependencyTemplate().S(0).T(0).Dtis("SS"), 860 FrameDependencyTemplate().S(0).T(1).Dtis("R-"), 861 }; 862 863 // Send 1st key frame. 864 RTPVideoHeader hdr; 865 RTPVideoHeader::GenericDescriptorInfo& generic = hdr.generic.emplace(); 866 generic.frame_id = kFrameId; 867 generic.decode_target_indications = {DecodeTargetIndication::kSwitch, 868 DecodeTargetIndication::kSwitch}; 869 hdr.frame_type = VideoFrameType::kVideoFrameKey; 870 rtp_sender_video_->SetVideoStructure(&video_structure1); 871 rtp_sender_video_->SendVideo( 872 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 873 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 874 // Parse 1st extension. 875 ASSERT_EQ(transport_.packets_sent(), 1); 876 DependencyDescriptor descriptor_key1; 877 ASSERT_TRUE(transport_.last_sent_packet() 878 .GetExtension<RtpDependencyDescriptorExtension>( 879 nullptr, &descriptor_key1)); 880 ASSERT_TRUE(descriptor_key1.attached_structure); 881 882 // Send the delta frame. 883 generic.frame_id = kFrameId + 1; 884 generic.temporal_index = 1; 885 generic.decode_target_indications = {DecodeTargetIndication::kDiscardable, 886 DecodeTargetIndication::kNotPresent}; 887 hdr.frame_type = VideoFrameType::kVideoFrameDelta; 888 rtp_sender_video_->SendVideo( 889 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 890 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 891 892 ASSERT_EQ(transport_.packets_sent(), 2); 893 RtpPacket delta_packet = transport_.last_sent_packet(); 894 895 // Send 2nd key frame. 896 generic.frame_id = kFrameId + 2; 897 generic.decode_target_indications = {DecodeTargetIndication::kSwitch, 898 DecodeTargetIndication::kSwitch}; 899 hdr.frame_type = VideoFrameType::kVideoFrameKey; 900 rtp_sender_video_->SetVideoStructure(&video_structure2); 901 rtp_sender_video_->SendVideo( 902 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 903 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 904 // Parse the 2nd key frame. 905 ASSERT_EQ(transport_.packets_sent(), 3); 906 DependencyDescriptor descriptor_key2; 907 ASSERT_TRUE(transport_.last_sent_packet() 908 .GetExtension<RtpDependencyDescriptorExtension>( 909 nullptr, &descriptor_key2)); 910 ASSERT_TRUE(descriptor_key2.attached_structure); 911 912 // Try to parse the 1st delta frame. It should parseble using the structure 913 // from the 1st key frame, but not using the structure from the 2nd key frame. 914 DependencyDescriptor descriptor_delta; 915 EXPECT_TRUE(delta_packet.GetExtension<RtpDependencyDescriptorExtension>( 916 descriptor_key1.attached_structure.get(), &descriptor_delta)); 917 EXPECT_FALSE(delta_packet.GetExtension<RtpDependencyDescriptorExtension>( 918 descriptor_key2.attached_structure.get(), &descriptor_delta)); 919 } 920 921 TEST_F(RtpSenderVideoTest, 922 AuthenticateVideoHeaderWhenDependencyDescriptorExtensionIsUsed) { 923 static constexpr size_t kFrameSize = 100; 924 uint8_t kFrame[kFrameSize] = {1, 2, 3, 4}; 925 926 rtp_module_.RegisterRtpHeaderExtension( 927 RtpDependencyDescriptorExtension::Uri(), kDependencyDescriptorId); 928 auto encryptor = make_ref_counted<NiceMock<MockFrameEncryptor>>(); 929 ON_CALL(*encryptor, GetMaxCiphertextByteSize).WillByDefault(ReturnArg<1>()); 930 ON_CALL(*encryptor, Encrypt) 931 .WillByDefault(WithArgs<3, 5>( 932 [](ArrayView<const uint8_t> frame, size_t* bytes_written) { 933 *bytes_written = frame.size(); 934 return 0; 935 })); 936 RTPSenderVideo::Config config; 937 config.clock = &fake_clock_; 938 config.rtp_sender = rtp_module_.RtpSender(); 939 config.field_trials = &env_.field_trials(); 940 config.frame_encryptor = encryptor.get(); 941 RTPSenderVideo rtp_sender_video(config); 942 943 FrameDependencyStructure video_structure; 944 video_structure.num_decode_targets = 1; 945 video_structure.templates = {FrameDependencyTemplate().Dtis("S")}; 946 rtp_sender_video.SetVideoStructure(&video_structure); 947 948 // Send key frame. 949 RTPVideoHeader hdr; 950 hdr.frame_type = VideoFrameType::kVideoFrameKey; 951 hdr.generic.emplace().decode_target_indications = 952 video_structure.templates[0].decode_target_indications; 953 954 EXPECT_CALL(*encryptor, 955 Encrypt(_, _, Not(IsEmpty()), ElementsAreArray(kFrame), _, _)); 956 rtp_sender_video.SendVideo(kPayloadType, kType, kTimestamp, 957 fake_clock_.CurrentTime(), kFrame, sizeof(kFrame), 958 hdr, kDefaultExpectedRetransmissionTime, {}); 959 // Double check packet with the dependency descriptor is sent. 960 ASSERT_EQ(transport_.packets_sent(), 1); 961 EXPECT_TRUE(transport_.last_sent_packet() 962 .HasExtension<RtpDependencyDescriptorExtension>()); 963 } 964 965 TEST_F(RtpSenderVideoTest, PopulateGenericFrameDescriptor) { 966 const int64_t kFrameId = 100000; 967 uint8_t kFrame[100]; 968 rtp_module_.RegisterRtpHeaderExtension( 969 RtpGenericFrameDescriptorExtension00::Uri(), kGenericDescriptorId); 970 971 RTPVideoHeader hdr; 972 RTPVideoHeader::GenericDescriptorInfo& generic = hdr.generic.emplace(); 973 generic.frame_id = kFrameId; 974 generic.temporal_index = 3; 975 generic.spatial_index = 2; 976 generic.dependencies.push_back(kFrameId - 1); 977 generic.dependencies.push_back(kFrameId - 500); 978 hdr.frame_type = VideoFrameType::kVideoFrameDelta; 979 rtp_sender_video_->SendVideo( 980 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 981 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 982 983 RtpGenericFrameDescriptor descriptor_wire; 984 EXPECT_EQ(1, transport_.packets_sent()); 985 ASSERT_TRUE(transport_.last_sent_packet() 986 .GetExtension<RtpGenericFrameDescriptorExtension00>( 987 &descriptor_wire)); 988 EXPECT_EQ(static_cast<uint16_t>(generic.frame_id), descriptor_wire.FrameId()); 989 EXPECT_EQ(generic.temporal_index, descriptor_wire.TemporalLayer()); 990 EXPECT_THAT(descriptor_wire.FrameDependenciesDiffs(), ElementsAre(1, 500)); 991 EXPECT_EQ(descriptor_wire.SpatialLayersBitmask(), 0b0000'0100); 992 } 993 994 void RtpSenderVideoTest:: 995 UsesMinimalVp8DescriptorWhenGenericFrameDescriptorExtensionIsUsed( 996 int /* version */) { 997 const int64_t kFrameId = 100000; 998 const size_t kFrameSize = 100; 999 uint8_t kFrame[kFrameSize]; 1000 1001 rtp_module_.RegisterRtpHeaderExtension( 1002 RtpGenericFrameDescriptorExtension00::Uri(), kGenericDescriptorId); 1003 1004 RTPVideoHeader hdr; 1005 hdr.codec = kVideoCodecVP8; 1006 RTPVideoHeaderVP8& vp8 = hdr.video_type_header.emplace<RTPVideoHeaderVP8>(); 1007 vp8.pictureId = kFrameId % 0X7FFF; 1008 vp8.tl0PicIdx = 13; 1009 vp8.temporalIdx = 1; 1010 vp8.keyIdx = 2; 1011 RTPVideoHeader::GenericDescriptorInfo& generic = hdr.generic.emplace(); 1012 generic.frame_id = kFrameId; 1013 hdr.frame_type = VideoFrameType::kVideoFrameDelta; 1014 rtp_sender_video_->SendVideo(kPayloadType, VideoCodecType::kVideoCodecVP8, 1015 kTimestamp, fake_clock_.CurrentTime(), kFrame, 1016 sizeof(kFrame), hdr, 1017 kDefaultExpectedRetransmissionTime, {}); 1018 1019 ASSERT_EQ(transport_.packets_sent(), 1); 1020 // Expect only minimal 1-byte vp8 descriptor was generated. 1021 EXPECT_EQ(transport_.last_sent_packet().payload_size(), 1 + kFrameSize); 1022 } 1023 1024 TEST_F(RtpSenderVideoTest, 1025 UsesMinimalVp8DescriptorWhenGenericFrameDescriptorExtensionIsUsed00) { 1026 UsesMinimalVp8DescriptorWhenGenericFrameDescriptorExtensionIsUsed(0); 1027 } 1028 1029 TEST_F(RtpSenderVideoTest, 1030 UsesMinimalVp8DescriptorWhenGenericFrameDescriptorExtensionIsUsed01) { 1031 UsesMinimalVp8DescriptorWhenGenericFrameDescriptorExtensionIsUsed(1); 1032 } 1033 1034 TEST_F(RtpSenderVideoTest, VideoLayersAllocationWithResolutionSentOnKeyFrames) { 1035 const size_t kFrameSize = 100; 1036 uint8_t kFrame[kFrameSize]; 1037 rtp_module_.RegisterRtpHeaderExtension( 1038 RtpVideoLayersAllocationExtension::Uri(), 1039 kVideoLayersAllocationExtensionId); 1040 1041 VideoLayersAllocation allocation; 1042 VideoLayersAllocation::SpatialLayer layer; 1043 layer.width = 360; 1044 layer.height = 180; 1045 layer.target_bitrate_per_temporal_layer.push_back( 1046 DataRate::KilobitsPerSec(50)); 1047 allocation.resolution_and_frame_rate_is_valid = true; 1048 allocation.active_spatial_layers.push_back(layer); 1049 rtp_sender_video_->SetVideoLayersAllocation(allocation); 1050 1051 RTPVideoHeader hdr; 1052 hdr.frame_type = VideoFrameType::kVideoFrameKey; 1053 rtp_sender_video_->SendVideo( 1054 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 1055 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 1056 1057 VideoLayersAllocation sent_allocation; 1058 EXPECT_TRUE( 1059 transport_.last_sent_packet() 1060 .GetExtension<RtpVideoLayersAllocationExtension>(&sent_allocation)); 1061 EXPECT_THAT(sent_allocation.active_spatial_layers, ElementsAre(layer)); 1062 1063 // Next key frame also have the allocation. 1064 rtp_sender_video_->SendVideo( 1065 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 1066 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 1067 EXPECT_TRUE( 1068 transport_.last_sent_packet() 1069 .GetExtension<RtpVideoLayersAllocationExtension>(&sent_allocation)); 1070 } 1071 1072 TEST_F(RtpSenderVideoTest, 1073 VideoLayersAllocationWithoutResolutionSentOnDeltaWhenUpdated) { 1074 const size_t kFrameSize = 100; 1075 uint8_t kFrame[kFrameSize]; 1076 rtp_module_.RegisterRtpHeaderExtension( 1077 RtpVideoLayersAllocationExtension::Uri(), 1078 kVideoLayersAllocationExtensionId); 1079 1080 VideoLayersAllocation allocation; 1081 VideoLayersAllocation::SpatialLayer layer; 1082 layer.width = 360; 1083 layer.height = 180; 1084 allocation.resolution_and_frame_rate_is_valid = true; 1085 layer.target_bitrate_per_temporal_layer.push_back( 1086 DataRate::KilobitsPerSec(50)); 1087 allocation.active_spatial_layers.push_back(layer); 1088 rtp_sender_video_->SetVideoLayersAllocation(allocation); 1089 1090 RTPVideoHeader hdr; 1091 hdr.frame_type = VideoFrameType::kVideoFrameKey; 1092 rtp_sender_video_->SendVideo( 1093 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 1094 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 1095 EXPECT_TRUE(transport_.last_sent_packet() 1096 .HasExtension<RtpVideoLayersAllocationExtension>()); 1097 1098 // No allocation sent on delta frame unless it has been updated. 1099 hdr.frame_type = VideoFrameType::kVideoFrameDelta; 1100 rtp_sender_video_->SendVideo( 1101 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 1102 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 1103 EXPECT_FALSE(transport_.last_sent_packet() 1104 .HasExtension<RtpVideoLayersAllocationExtension>()); 1105 1106 // Update the allocation. 1107 rtp_sender_video_->SetVideoLayersAllocation(allocation); 1108 rtp_sender_video_->SendVideo( 1109 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 1110 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 1111 1112 VideoLayersAllocation sent_allocation; 1113 EXPECT_TRUE( 1114 transport_.last_sent_packet() 1115 .GetExtension<RtpVideoLayersAllocationExtension>(&sent_allocation)); 1116 ASSERT_THAT(sent_allocation.active_spatial_layers, SizeIs(1)); 1117 EXPECT_FALSE(sent_allocation.resolution_and_frame_rate_is_valid); 1118 EXPECT_THAT(sent_allocation.active_spatial_layers[0] 1119 .target_bitrate_per_temporal_layer, 1120 SizeIs(1)); 1121 } 1122 1123 TEST_F(RtpSenderVideoTest, 1124 VideoLayersAllocationWithResolutionSentOnDeltaWhenSpatialLayerAdded) { 1125 const size_t kFrameSize = 100; 1126 uint8_t kFrame[kFrameSize]; 1127 rtp_module_.RegisterRtpHeaderExtension( 1128 RtpVideoLayersAllocationExtension::Uri(), 1129 kVideoLayersAllocationExtensionId); 1130 1131 VideoLayersAllocation allocation; 1132 allocation.resolution_and_frame_rate_is_valid = true; 1133 VideoLayersAllocation::SpatialLayer layer; 1134 layer.width = 360; 1135 layer.height = 180; 1136 layer.spatial_id = 0; 1137 layer.target_bitrate_per_temporal_layer.push_back( 1138 DataRate::KilobitsPerSec(50)); 1139 allocation.active_spatial_layers.push_back(layer); 1140 rtp_sender_video_->SetVideoLayersAllocation(allocation); 1141 1142 RTPVideoHeader hdr; 1143 hdr.frame_type = VideoFrameType::kVideoFrameKey; 1144 rtp_sender_video_->SendVideo( 1145 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 1146 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 1147 ASSERT_TRUE(transport_.last_sent_packet() 1148 .HasExtension<RtpVideoLayersAllocationExtension>()); 1149 1150 // Update the allocation. 1151 layer.width = 640; 1152 layer.height = 320; 1153 layer.spatial_id = 1; 1154 layer.target_bitrate_per_temporal_layer.push_back( 1155 DataRate::KilobitsPerSec(100)); 1156 allocation.active_spatial_layers.push_back(layer); 1157 rtp_sender_video_->SetVideoLayersAllocation(allocation); 1158 hdr.frame_type = VideoFrameType::kVideoFrameDelta; 1159 rtp_sender_video_->SendVideo( 1160 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 1161 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 1162 1163 VideoLayersAllocation sent_allocation; 1164 EXPECT_TRUE( 1165 transport_.last_sent_packet() 1166 .GetExtension<RtpVideoLayersAllocationExtension>(&sent_allocation)); 1167 EXPECT_THAT(sent_allocation.active_spatial_layers, SizeIs(2)); 1168 EXPECT_TRUE(sent_allocation.resolution_and_frame_rate_is_valid); 1169 } 1170 1171 TEST_F(RtpSenderVideoTest, 1172 VideoLayersAllocationWithResolutionSentOnLargeFrameRateChange) { 1173 const size_t kFrameSize = 100; 1174 uint8_t kFrame[kFrameSize]; 1175 rtp_module_.RegisterRtpHeaderExtension( 1176 RtpVideoLayersAllocationExtension::Uri(), 1177 kVideoLayersAllocationExtensionId); 1178 1179 VideoLayersAllocation allocation; 1180 allocation.resolution_and_frame_rate_is_valid = true; 1181 VideoLayersAllocation::SpatialLayer layer; 1182 layer.width = 360; 1183 layer.height = 180; 1184 layer.spatial_id = 0; 1185 layer.frame_rate_fps = 10; 1186 layer.target_bitrate_per_temporal_layer.push_back( 1187 DataRate::KilobitsPerSec(50)); 1188 allocation.active_spatial_layers.push_back(layer); 1189 rtp_sender_video_->SetVideoLayersAllocation(allocation); 1190 1191 RTPVideoHeader hdr; 1192 hdr.frame_type = VideoFrameType::kVideoFrameKey; 1193 rtp_sender_video_->SendVideo( 1194 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 1195 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 1196 ASSERT_TRUE(transport_.last_sent_packet() 1197 .HasExtension<RtpVideoLayersAllocationExtension>()); 1198 1199 // Update frame rate only. 1200 allocation.active_spatial_layers[0].frame_rate_fps = 20; 1201 rtp_sender_video_->SetVideoLayersAllocation(allocation); 1202 hdr.frame_type = VideoFrameType::kVideoFrameDelta; 1203 rtp_sender_video_->SendVideo( 1204 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 1205 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 1206 1207 VideoLayersAllocation sent_allocation; 1208 EXPECT_TRUE( 1209 transport_.last_sent_packet() 1210 .GetExtension<RtpVideoLayersAllocationExtension>(&sent_allocation)); 1211 ASSERT_TRUE(sent_allocation.resolution_and_frame_rate_is_valid); 1212 EXPECT_EQ(sent_allocation.active_spatial_layers[0].frame_rate_fps, 20); 1213 } 1214 1215 TEST_F(RtpSenderVideoTest, 1216 VideoLayersAllocationWithoutResolutionSentOnSmallFrameRateChange) { 1217 const size_t kFrameSize = 100; 1218 uint8_t kFrame[kFrameSize]; 1219 rtp_module_.RegisterRtpHeaderExtension( 1220 RtpVideoLayersAllocationExtension::Uri(), 1221 kVideoLayersAllocationExtensionId); 1222 1223 VideoLayersAllocation allocation; 1224 allocation.resolution_and_frame_rate_is_valid = true; 1225 VideoLayersAllocation::SpatialLayer layer; 1226 layer.width = 360; 1227 layer.height = 180; 1228 layer.spatial_id = 0; 1229 layer.frame_rate_fps = 10; 1230 layer.target_bitrate_per_temporal_layer.push_back( 1231 DataRate::KilobitsPerSec(50)); 1232 allocation.active_spatial_layers.push_back(layer); 1233 rtp_sender_video_->SetVideoLayersAllocation(allocation); 1234 1235 RTPVideoHeader hdr; 1236 hdr.frame_type = VideoFrameType::kVideoFrameKey; 1237 rtp_sender_video_->SendVideo( 1238 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 1239 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 1240 ASSERT_TRUE(transport_.last_sent_packet() 1241 .HasExtension<RtpVideoLayersAllocationExtension>()); 1242 1243 // Update frame rate slightly. 1244 allocation.active_spatial_layers[0].frame_rate_fps = 9; 1245 rtp_sender_video_->SetVideoLayersAllocation(allocation); 1246 hdr.frame_type = VideoFrameType::kVideoFrameDelta; 1247 rtp_sender_video_->SendVideo( 1248 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 1249 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 1250 1251 VideoLayersAllocation sent_allocation; 1252 EXPECT_TRUE( 1253 transport_.last_sent_packet() 1254 .GetExtension<RtpVideoLayersAllocationExtension>(&sent_allocation)); 1255 EXPECT_FALSE(sent_allocation.resolution_and_frame_rate_is_valid); 1256 } 1257 1258 TEST_F(RtpSenderVideoTest, VideoLayersAllocationSentOnDeltaFramesOnlyOnUpdate) { 1259 const size_t kFrameSize = 100; 1260 uint8_t kFrame[kFrameSize]; 1261 rtp_module_.RegisterRtpHeaderExtension( 1262 RtpVideoLayersAllocationExtension::Uri(), 1263 kVideoLayersAllocationExtensionId); 1264 1265 VideoLayersAllocation allocation; 1266 VideoLayersAllocation::SpatialLayer layer; 1267 layer.width = 360; 1268 layer.height = 180; 1269 layer.target_bitrate_per_temporal_layer.push_back( 1270 DataRate::KilobitsPerSec(50)); 1271 allocation.active_spatial_layers.push_back(layer); 1272 rtp_sender_video_->SetVideoLayersAllocation(allocation); 1273 1274 RTPVideoHeader hdr; 1275 hdr.frame_type = VideoFrameType::kVideoFrameDelta; 1276 rtp_sender_video_->SendVideo( 1277 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 1278 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 1279 1280 VideoLayersAllocation sent_allocation; 1281 EXPECT_TRUE( 1282 transport_.last_sent_packet() 1283 .GetExtension<RtpVideoLayersAllocationExtension>(&sent_allocation)); 1284 EXPECT_THAT(sent_allocation.active_spatial_layers, SizeIs(1)); 1285 1286 // VideoLayersAllocation not sent on the next delta frame. 1287 rtp_sender_video_->SendVideo( 1288 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 1289 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 1290 EXPECT_FALSE(transport_.last_sent_packet() 1291 .HasExtension<RtpVideoLayersAllocationExtension>()); 1292 1293 // Update allocation. VideoLayesAllocation should be sent on the next frame. 1294 rtp_sender_video_->SetVideoLayersAllocation(allocation); 1295 rtp_sender_video_->SendVideo( 1296 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 1297 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 1298 EXPECT_TRUE( 1299 transport_.last_sent_packet() 1300 .GetExtension<RtpVideoLayersAllocationExtension>(&sent_allocation)); 1301 } 1302 1303 TEST_F(RtpSenderVideoTest, VideoLayersAllocationNotSentOnHigherTemporalLayers) { 1304 const size_t kFrameSize = 100; 1305 uint8_t kFrame[kFrameSize]; 1306 rtp_module_.RegisterRtpHeaderExtension( 1307 RtpVideoLayersAllocationExtension::Uri(), 1308 kVideoLayersAllocationExtensionId); 1309 1310 VideoLayersAllocation allocation; 1311 allocation.resolution_and_frame_rate_is_valid = true; 1312 VideoLayersAllocation::SpatialLayer layer; 1313 layer.width = 360; 1314 layer.height = 180; 1315 layer.target_bitrate_per_temporal_layer.push_back( 1316 DataRate::KilobitsPerSec(50)); 1317 allocation.active_spatial_layers.push_back(layer); 1318 rtp_sender_video_->SetVideoLayersAllocation(allocation); 1319 1320 RTPVideoHeader hdr; 1321 hdr.frame_type = VideoFrameType::kVideoFrameDelta; 1322 hdr.codec = VideoCodecType::kVideoCodecVP8; 1323 auto& vp8_header = hdr.video_type_header.emplace<RTPVideoHeaderVP8>(); 1324 vp8_header.temporalIdx = 1; 1325 1326 rtp_sender_video_->SendVideo( 1327 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 1328 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 1329 EXPECT_FALSE(transport_.last_sent_packet() 1330 .HasExtension<RtpVideoLayersAllocationExtension>()); 1331 1332 // Send a delta frame on tl0. 1333 vp8_header.temporalIdx = 0; 1334 rtp_sender_video_->SendVideo( 1335 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 1336 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 1337 EXPECT_TRUE(transport_.last_sent_packet() 1338 .HasExtension<RtpVideoLayersAllocationExtension>()); 1339 } 1340 1341 TEST_F(RtpSenderVideoTest, 1342 AbsoluteCaptureTimeNotForwardedWhenImageHasNoCaptureTime) { 1343 uint8_t kFrame[kMaxPacketLength]; 1344 rtp_module_.RegisterRtpHeaderExtension(AbsoluteCaptureTimeExtension::Uri(), 1345 kAbsoluteCaptureTimeExtensionId); 1346 1347 RTPVideoHeader hdr; 1348 hdr.frame_type = VideoFrameType::kVideoFrameKey; 1349 rtp_sender_video_->SendVideo(kPayloadType, kType, kTimestamp, 1350 /*capture_time=*/Timestamp::MinusInfinity(), 1351 kFrame, sizeof(kFrame), hdr, 1352 kDefaultExpectedRetransmissionTime, {}); 1353 // No absolute capture time should be set as the capture_time_ms was the 1354 // default value. 1355 for (const RtpPacketReceived& packet : transport_.sent_packets()) { 1356 EXPECT_FALSE(packet.HasExtension<AbsoluteCaptureTimeExtension>()); 1357 } 1358 } 1359 1360 TEST_F(RtpSenderVideoTest, AbsoluteCaptureTime) { 1361 rtp_sender_video_ = std::make_unique<TestRtpSenderVideo>( 1362 &fake_clock_, rtp_module_.RtpSender(), env_.field_trials(), 1363 /*raw_packetization=*/false); 1364 1365 constexpr Timestamp kAbsoluteCaptureTimestamp = Timestamp::Millis(12345678); 1366 uint8_t kFrame[kMaxPacketLength]; 1367 rtp_module_.RegisterRtpHeaderExtension(AbsoluteCaptureTimeExtension::Uri(), 1368 kAbsoluteCaptureTimeExtensionId); 1369 1370 RTPVideoHeader hdr; 1371 hdr.frame_type = VideoFrameType::kVideoFrameKey; 1372 rtp_sender_video_->SendVideo( 1373 kPayloadType, kType, kTimestamp, kAbsoluteCaptureTimestamp, kFrame, 1374 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 1375 1376 std::optional<AbsoluteCaptureTime> absolute_capture_time; 1377 1378 // It is expected that one and only one of the packets sent on this video 1379 // frame has absolute capture time header extension. 1380 for (const RtpPacketReceived& packet : transport_.sent_packets()) { 1381 if (absolute_capture_time.has_value()) { 1382 EXPECT_FALSE(packet.HasExtension<AbsoluteCaptureTimeExtension>()); 1383 } else { 1384 absolute_capture_time = 1385 packet.GetExtension<AbsoluteCaptureTimeExtension>(); 1386 } 1387 } 1388 1389 // Verify the capture timestamp and that the clock offset is set to zero. 1390 ASSERT_TRUE(absolute_capture_time.has_value()); 1391 EXPECT_EQ(absolute_capture_time->absolute_capture_timestamp, 1392 Int64MsToUQ32x32( 1393 fake_clock_.ConvertTimestampToNtpTime(kAbsoluteCaptureTimestamp) 1394 .ToMs())); 1395 EXPECT_EQ(absolute_capture_time->estimated_capture_clock_offset, 0); 1396 } 1397 1398 TEST_F(RtpSenderVideoTest, AbsoluteCaptureTimeWithExtensionProvided) { 1399 constexpr AbsoluteCaptureTime kAbsoluteCaptureTime = { 1400 .absolute_capture_timestamp = 123, 1401 .estimated_capture_clock_offset = std::optional<int64_t>(456), 1402 }; 1403 uint8_t kFrame[kMaxPacketLength]; 1404 rtp_module_.RegisterRtpHeaderExtension(AbsoluteCaptureTimeExtension::Uri(), 1405 kAbsoluteCaptureTimeExtensionId); 1406 1407 RTPVideoHeader hdr; 1408 hdr.frame_type = VideoFrameType::kVideoFrameKey; 1409 hdr.absolute_capture_time = kAbsoluteCaptureTime; 1410 rtp_sender_video_->SendVideo(kPayloadType, kType, kTimestamp, 1411 /*capture_time=*/Timestamp::Millis(789), kFrame, 1412 sizeof(kFrame), hdr, 1413 kDefaultExpectedRetransmissionTime, {}); 1414 1415 std::optional<AbsoluteCaptureTime> absolute_capture_time; 1416 1417 // It is expected that one and only one of the packets sent on this video 1418 // frame has absolute capture time header extension. 1419 for (const RtpPacketReceived& packet : transport_.sent_packets()) { 1420 if (absolute_capture_time.has_value()) { 1421 EXPECT_FALSE(packet.HasExtension<AbsoluteCaptureTimeExtension>()); 1422 } else { 1423 absolute_capture_time = 1424 packet.GetExtension<AbsoluteCaptureTimeExtension>(); 1425 } 1426 } 1427 1428 // Verify the extension. 1429 EXPECT_EQ(absolute_capture_time, kAbsoluteCaptureTime); 1430 } 1431 1432 TEST_F(RtpSenderVideoTest, PopulatesPlayoutDelay) { 1433 // Single packet frames. 1434 constexpr size_t kPacketSize = 123; 1435 uint8_t kFrame[kPacketSize]; 1436 rtp_module_.RegisterRtpHeaderExtension(PlayoutDelayLimits::Uri(), 1437 kPlayoutDelayExtensionId); 1438 const VideoPlayoutDelay kExpectedDelay(TimeDelta::Millis(10), 1439 TimeDelta::Millis(20)); 1440 1441 // Send initial key-frame without playout delay. 1442 RTPVideoHeader hdr; 1443 hdr.frame_type = VideoFrameType::kVideoFrameKey; 1444 hdr.codec = VideoCodecType::kVideoCodecVP8; 1445 auto& vp8_header = hdr.video_type_header.emplace<RTPVideoHeaderVP8>(); 1446 vp8_header.temporalIdx = 0; 1447 1448 rtp_sender_video_->SendVideo( 1449 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 1450 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 1451 EXPECT_FALSE( 1452 transport_.last_sent_packet().HasExtension<PlayoutDelayLimits>()); 1453 1454 // Set playout delay on a discardable frame. 1455 hdr.playout_delay = kExpectedDelay; 1456 hdr.frame_type = VideoFrameType::kVideoFrameDelta; 1457 vp8_header.temporalIdx = 1; 1458 rtp_sender_video_->SendVideo( 1459 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 1460 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 1461 VideoPlayoutDelay received_delay = VideoPlayoutDelay(); 1462 ASSERT_TRUE(transport_.last_sent_packet().GetExtension<PlayoutDelayLimits>( 1463 &received_delay)); 1464 EXPECT_EQ(received_delay, kExpectedDelay); 1465 1466 // Set playout delay on a non-discardable frame, the extension should still 1467 // be populated since dilvery wasn't guaranteed on the last one. 1468 hdr.playout_delay = std::nullopt; // Indicates "no change". 1469 vp8_header.temporalIdx = 0; 1470 rtp_sender_video_->SendVideo( 1471 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 1472 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 1473 ASSERT_TRUE(transport_.last_sent_packet().GetExtension<PlayoutDelayLimits>( 1474 &received_delay)); 1475 EXPECT_EQ(received_delay, kExpectedDelay); 1476 1477 // The next frame does not need the extensions since it's delivery has 1478 // already been guaranteed. 1479 rtp_sender_video_->SendVideo( 1480 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 1481 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 1482 EXPECT_FALSE( 1483 transport_.last_sent_packet().HasExtension<PlayoutDelayLimits>()); 1484 1485 // Insert key-frame, we need to refresh the state here. 1486 hdr.frame_type = VideoFrameType::kVideoFrameKey; 1487 rtp_sender_video_->SendVideo( 1488 kPayloadType, kType, kTimestamp, fake_clock_.CurrentTime(), kFrame, 1489 sizeof(kFrame), hdr, kDefaultExpectedRetransmissionTime, {}); 1490 ASSERT_TRUE(transport_.last_sent_packet().GetExtension<PlayoutDelayLimits>( 1491 &received_delay)); 1492 EXPECT_EQ(received_delay, kExpectedDelay); 1493 } 1494 1495 TEST_F(RtpSenderVideoTest, SendGenericVideo) { 1496 const uint8_t kPayloadTypeGeneric = 127; 1497 const VideoCodecType kCodecType = VideoCodecType::kVideoCodecGeneric; 1498 const uint8_t kPayload[] = {47, 11, 32, 93, 89}; 1499 1500 // Send keyframe. 1501 RTPVideoHeader video_header; 1502 video_header.frame_type = VideoFrameType::kVideoFrameKey; 1503 ASSERT_TRUE(rtp_sender_video_->SendVideo( 1504 kPayloadTypeGeneric, kCodecType, 1234, fake_clock_.CurrentTime(), 1505 kPayload, sizeof(kPayload), video_header, TimeDelta::PlusInfinity(), {})); 1506 1507 ArrayView<const uint8_t> sent_payload = 1508 transport_.last_sent_packet().payload(); 1509 uint8_t generic_header = sent_payload[0]; 1510 EXPECT_TRUE(generic_header & RtpFormatVideoGeneric::kKeyFrameBit); 1511 EXPECT_TRUE(generic_header & RtpFormatVideoGeneric::kFirstPacketBit); 1512 EXPECT_THAT(sent_payload.subview(1), ElementsAreArray(kPayload)); 1513 1514 // Send delta frame. 1515 const uint8_t kDeltaPayload[] = {13, 42, 32, 93, 13}; 1516 video_header.frame_type = VideoFrameType::kVideoFrameDelta; 1517 ASSERT_TRUE(rtp_sender_video_->SendVideo( 1518 kPayloadTypeGeneric, kCodecType, 1234, fake_clock_.CurrentTime(), 1519 kDeltaPayload, sizeof(kDeltaPayload), video_header, 1520 TimeDelta::PlusInfinity(), {})); 1521 1522 sent_payload = sent_payload = transport_.last_sent_packet().payload(); 1523 generic_header = sent_payload[0]; 1524 EXPECT_FALSE(generic_header & RtpFormatVideoGeneric::kKeyFrameBit); 1525 EXPECT_TRUE(generic_header & RtpFormatVideoGeneric::kFirstPacketBit); 1526 EXPECT_THAT(sent_payload.subview(1), ElementsAreArray(kDeltaPayload)); 1527 } 1528 1529 class RtpSenderVideoRawPacketizationTest : public RtpSenderVideoTest { 1530 public: 1531 RtpSenderVideoRawPacketizationTest() : RtpSenderVideoTest(true) {} 1532 }; 1533 1534 TEST_F(RtpSenderVideoRawPacketizationTest, SendRawVideo) { 1535 const uint8_t kPayloadTypeRaw = 111; 1536 const uint8_t kPayload[] = {11, 22, 33, 44, 55}; 1537 1538 // Send a frame. 1539 RTPVideoHeader video_header; 1540 video_header.frame_type = VideoFrameType::kVideoFrameKey; 1541 ASSERT_TRUE(rtp_sender_video_->SendVideo( 1542 kPayloadTypeRaw, std::nullopt, 1234, fake_clock_.CurrentTime(), kPayload, 1543 sizeof(kPayload), video_header, TimeDelta::PlusInfinity(), {})); 1544 1545 ArrayView<const uint8_t> sent_payload = 1546 transport_.last_sent_packet().payload(); 1547 EXPECT_THAT(sent_payload, ElementsAreArray(kPayload)); 1548 } 1549 1550 TEST_F(RtpSenderVideoRawPacketizationTest, SendVideoWithSetCodecTypeStillRaw) { 1551 const uint8_t kPayloadTypeRaw = 111; 1552 const uint8_t kPayload[] = {11, 22, 33, 44, 55}; 1553 1554 // Send a frame with codectype "generic" 1555 RTPVideoHeader video_header; 1556 video_header.frame_type = VideoFrameType::kVideoFrameKey; 1557 ASSERT_TRUE(rtp_sender_video_->SendVideo( 1558 kPayloadTypeRaw, VideoCodecType::kVideoCodecGeneric, 1234, 1559 fake_clock_.CurrentTime(), kPayload, sizeof(kPayload), video_header, 1560 TimeDelta::PlusInfinity(), {})); 1561 1562 // Should still be packetized as raw. 1563 EXPECT_THAT(transport_.last_sent_packet().payload(), 1564 ElementsAreArray(kPayload)); 1565 } 1566 1567 class RtpSenderVideoWithFrameTransformerTest : public ::testing::Test { 1568 public: 1569 RtpSenderVideoWithFrameTransformerTest() 1570 : time_controller_(kStartTime), 1571 env_(CreateEnvironment(time_controller_.GetClock(), 1572 time_controller_.GetTaskQueueFactory())), 1573 retransmission_rate_limiter_(time_controller_.GetClock(), 1000), 1574 rtp_module_( 1575 env_, 1576 {.outgoing_transport = &transport_, 1577 .retransmission_rate_limiter = &retransmission_rate_limiter_, 1578 .local_media_ssrc = kSsrc, 1579 .rid = "myrid"}) { 1580 rtp_module_.SetSequenceNumber(kSeqNum); 1581 rtp_module_.SetStartTimestamp(0); 1582 } 1583 1584 std::unique_ptr<RTPSenderVideo> CreateSenderWithFrameTransformer( 1585 scoped_refptr<FrameTransformerInterface> transformer) { 1586 RTPSenderVideo::Config config; 1587 config.clock = time_controller_.GetClock(); 1588 config.rtp_sender = rtp_module_.RtpSender(); 1589 config.field_trials = &env_.field_trials(); 1590 config.frame_transformer = transformer; 1591 config.task_queue_factory = time_controller_.GetTaskQueueFactory(); 1592 return std::make_unique<RTPSenderVideo>(config); 1593 } 1594 1595 protected: 1596 GlobalSimulatedTimeController time_controller_; 1597 const Environment env_; 1598 LoopbackTransportTest transport_; 1599 RateLimiter retransmission_rate_limiter_; 1600 ModuleRtpRtcpImpl2 rtp_module_; 1601 }; 1602 1603 std::unique_ptr<EncodedImage> CreateDefaultEncodedImage() { 1604 const uint8_t data[] = {1, 2, 3, 4}; 1605 auto encoded_image = std::make_unique<EncodedImage>(); 1606 encoded_image->SetEncodedData(EncodedImageBuffer::Create(data, sizeof(data))); 1607 return encoded_image; 1608 } 1609 1610 TEST_F(RtpSenderVideoWithFrameTransformerTest, 1611 CreateSenderRegistersFrameTransformer) { 1612 auto mock_frame_transformer = 1613 make_ref_counted<NiceMock<MockFrameTransformer>>(); 1614 EXPECT_CALL(*mock_frame_transformer, 1615 RegisterTransformedFrameSinkCallback(_, kSsrc)); 1616 std::unique_ptr<RTPSenderVideo> rtp_sender_video = 1617 CreateSenderWithFrameTransformer(mock_frame_transformer); 1618 } 1619 1620 TEST_F(RtpSenderVideoWithFrameTransformerTest, 1621 DestroySenderUnregistersFrameTransformer) { 1622 auto mock_frame_transformer = 1623 make_ref_counted<NiceMock<MockFrameTransformer>>(); 1624 std::unique_ptr<RTPSenderVideo> rtp_sender_video = 1625 CreateSenderWithFrameTransformer(mock_frame_transformer); 1626 EXPECT_CALL(*mock_frame_transformer, 1627 UnregisterTransformedFrameSinkCallback(kSsrc)); 1628 rtp_sender_video = nullptr; 1629 } 1630 1631 TEST_F(RtpSenderVideoWithFrameTransformerTest, 1632 SendEncodedImageTransformsFrame) { 1633 auto mock_frame_transformer = 1634 make_ref_counted<NiceMock<MockFrameTransformer>>(); 1635 std::unique_ptr<RTPSenderVideo> rtp_sender_video = 1636 CreateSenderWithFrameTransformer(mock_frame_transformer); 1637 auto encoded_image = CreateDefaultEncodedImage(); 1638 RTPVideoHeader video_header; 1639 1640 EXPECT_CALL(*mock_frame_transformer, Transform); 1641 rtp_sender_video->SendEncodedImage(kPayloadType, kType, kTimestamp, 1642 *encoded_image, video_header, 1643 kDefaultExpectedRetransmissionTime); 1644 } 1645 1646 TEST_F(RtpSenderVideoTest, SendEncodedImageIncludesProvidedCsrcs) { 1647 std::vector<uint32_t> expected_csrcs = {1, 2, 3}; 1648 std::unique_ptr<EncodedImage> encoded_image = CreateDefaultEncodedImage(); 1649 RTPVideoHeader video_header; 1650 video_header.frame_type = VideoFrameType::kVideoFrameKey; 1651 1652 ASSERT_TRUE(rtp_sender_video_->SendEncodedImage( 1653 0, kType, kTimestamp, *encoded_image, video_header, 1654 kDefaultExpectedRetransmissionTime, expected_csrcs)); 1655 1656 ASSERT_GT(transport_.packets_sent(), 0); 1657 std::vector<uint32_t> csrcs = transport_.last_sent_packet().Csrcs(); 1658 EXPECT_EQ(csrcs, expected_csrcs); 1659 } 1660 1661 TEST_F(RtpSenderVideoWithFrameTransformerTest, 1662 SendEncodedImageIncludesProvidedCsrcs) { 1663 auto mock_frame_transformer = 1664 make_ref_counted<NiceMock<MockFrameTransformer>>(); 1665 scoped_refptr<TransformedFrameCallback> callback; 1666 EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameSinkCallback) 1667 .WillOnce(SaveArg<0>(&callback)); 1668 std::unique_ptr<RTPSenderVideo> rtp_sender_video = 1669 CreateSenderWithFrameTransformer(mock_frame_transformer); 1670 ASSERT_TRUE(callback); 1671 ON_CALL(*mock_frame_transformer, Transform) 1672 .WillByDefault( 1673 [&callback](std::unique_ptr<TransformableFrameInterface> frame) { 1674 callback->OnTransformedFrame(std::move(frame)); 1675 }); 1676 1677 auto encoded_image = CreateDefaultEncodedImage(); 1678 std::vector<uint32_t> expected_csrcs = {1, 2, 3}; 1679 RTPVideoHeader video_header; 1680 video_header.frame_type = VideoFrameType::kVideoFrameKey; 1681 auto encoder_queue = time_controller_.GetTaskQueueFactory()->CreateTaskQueue( 1682 "encoder_queue", TaskQueueFactory::Priority::NORMAL); 1683 encoder_queue->PostTask([&] { 1684 rtp_sender_video->SendEncodedImage( 1685 kPayloadType, kType, kTimestamp, *encoded_image, video_header, 1686 kDefaultExpectedRetransmissionTime, expected_csrcs); 1687 }); 1688 time_controller_.AdvanceTime(TimeDelta::Zero()); 1689 1690 ASSERT_GT(transport_.packets_sent(), 0); 1691 std::vector<uint32_t> csrcs = transport_.last_sent_packet().Csrcs(); 1692 EXPECT_EQ(csrcs, expected_csrcs); 1693 } 1694 1695 #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID) 1696 TEST_F(RtpSenderVideoWithFrameTransformerTest, ValidPayloadTypes) { 1697 auto mock_frame_transformer = 1698 make_ref_counted<NiceMock<MockFrameTransformer>>(); 1699 std::unique_ptr<RTPSenderVideo> rtp_sender_video = 1700 CreateSenderWithFrameTransformer(mock_frame_transformer); 1701 auto encoded_image = CreateDefaultEncodedImage(); 1702 RTPVideoHeader video_header; 1703 1704 EXPECT_TRUE(rtp_sender_video->SendEncodedImage( 1705 0, kType, kTimestamp, *encoded_image, video_header, 1706 kDefaultExpectedRetransmissionTime)); 1707 EXPECT_TRUE(rtp_sender_video->SendEncodedImage( 1708 127, kType, kTimestamp, *encoded_image, video_header, 1709 kDefaultExpectedRetransmissionTime)); 1710 EXPECT_DEATH(rtp_sender_video->SendEncodedImage( 1711 -1, kType, kTimestamp, *encoded_image, video_header, 1712 kDefaultExpectedRetransmissionTime), 1713 ""); 1714 EXPECT_DEATH(rtp_sender_video->SendEncodedImage( 1715 128, kType, kTimestamp, *encoded_image, video_header, 1716 kDefaultExpectedRetransmissionTime), 1717 ""); 1718 } 1719 #endif 1720 1721 TEST_F(RtpSenderVideoWithFrameTransformerTest, OnTransformedFrameSendsVideo) { 1722 auto mock_frame_transformer = 1723 make_ref_counted<NiceMock<MockFrameTransformer>>(); 1724 scoped_refptr<TransformedFrameCallback> callback; 1725 EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameSinkCallback) 1726 .WillOnce(SaveArg<0>(&callback)); 1727 std::unique_ptr<RTPSenderVideo> rtp_sender_video = 1728 CreateSenderWithFrameTransformer(mock_frame_transformer); 1729 ASSERT_TRUE(callback); 1730 1731 auto encoded_image = CreateDefaultEncodedImage(); 1732 RTPVideoHeader video_header; 1733 video_header.frame_type = VideoFrameType::kVideoFrameKey; 1734 ON_CALL(*mock_frame_transformer, Transform) 1735 .WillByDefault( 1736 [&callback](std::unique_ptr<TransformableFrameInterface> frame) { 1737 callback->OnTransformedFrame(std::move(frame)); 1738 }); 1739 auto encoder_queue = time_controller_.GetTaskQueueFactory()->CreateTaskQueue( 1740 "encoder_queue", TaskQueueFactory::Priority::NORMAL); 1741 encoder_queue->PostTask([&] { 1742 rtp_sender_video->SendEncodedImage(kPayloadType, kType, kTimestamp, 1743 *encoded_image, video_header, 1744 kDefaultExpectedRetransmissionTime); 1745 }); 1746 time_controller_.AdvanceTime(TimeDelta::Zero()); 1747 EXPECT_EQ(transport_.packets_sent(), 1); 1748 encoder_queue->PostTask([&] { 1749 rtp_sender_video->SendEncodedImage(kPayloadType, kType, kTimestamp, 1750 *encoded_image, video_header, 1751 kDefaultExpectedRetransmissionTime); 1752 }); 1753 time_controller_.AdvanceTime(TimeDelta::Zero()); 1754 EXPECT_EQ(transport_.packets_sent(), 2); 1755 } 1756 1757 TEST_F(RtpSenderVideoWithFrameTransformerTest, 1758 TransformOverheadCorrectlyAccountedFor) { 1759 auto mock_frame_transformer = 1760 make_ref_counted<NiceMock<MockFrameTransformer>>(); 1761 scoped_refptr<TransformedFrameCallback> callback; 1762 EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameSinkCallback) 1763 .WillOnce(SaveArg<0>(&callback)); 1764 std::unique_ptr<RTPSenderVideo> rtp_sender_video = 1765 CreateSenderWithFrameTransformer(mock_frame_transformer); 1766 ASSERT_TRUE(callback); 1767 1768 auto encoded_image = CreateDefaultEncodedImage(); 1769 RTPVideoHeader video_header; 1770 video_header.frame_type = VideoFrameType::kVideoFrameKey; 1771 ON_CALL(*mock_frame_transformer, Transform) 1772 .WillByDefault( 1773 [&callback](std::unique_ptr<TransformableFrameInterface> frame) { 1774 const uint8_t data[] = {1, 2, 3, 4, 5, 6, 7, 8, 1775 9, 10, 11, 12, 13, 14, 15, 16}; 1776 frame->SetData(data); 1777 callback->OnTransformedFrame(std::move(frame)); 1778 }); 1779 auto encoder_queue = time_controller_.GetTaskQueueFactory()->CreateTaskQueue( 1780 "encoder_queue", TaskQueueFactory::Priority::NORMAL); 1781 const int kFramesPerSecond = 25; 1782 for (int i = 0; i < kFramesPerSecond; ++i) { 1783 encoder_queue->PostTask([&] { 1784 rtp_sender_video->SendEncodedImage(kPayloadType, kType, kTimestamp, 1785 *encoded_image, video_header, 1786 kDefaultExpectedRetransmissionTime); 1787 }); 1788 time_controller_.AdvanceTime(TimeDelta::Millis(1000 / kFramesPerSecond)); 1789 } 1790 EXPECT_EQ(transport_.packets_sent(), kFramesPerSecond); 1791 EXPECT_GT(rtp_sender_video->PostEncodeOverhead().bps(), 2200); 1792 } 1793 1794 TEST_F(RtpSenderVideoWithFrameTransformerTest, 1795 TransformableFrameMetadataHasCorrectValue) { 1796 auto mock_frame_transformer = 1797 make_ref_counted<NiceMock<MockFrameTransformer>>(); 1798 std::unique_ptr<RTPSenderVideo> rtp_sender_video = 1799 CreateSenderWithFrameTransformer(mock_frame_transformer); 1800 auto encoded_image = CreateDefaultEncodedImage(); 1801 RTPVideoHeader video_header; 1802 video_header.width = 1280u; 1803 video_header.height = 720u; 1804 RTPVideoHeader::GenericDescriptorInfo& generic = 1805 video_header.generic.emplace(); 1806 generic.frame_id = 10; 1807 generic.temporal_index = 3; 1808 generic.spatial_index = 2; 1809 generic.decode_target_indications = {DecodeTargetIndication::kSwitch}; 1810 generic.dependencies = {5}; 1811 1812 // Check that the transformable frame passed to the frame transformer has the 1813 // correct metadata. 1814 EXPECT_CALL(*mock_frame_transformer, Transform) 1815 .WillOnce( 1816 [](std::unique_ptr<TransformableFrameInterface> transformable_frame) { 1817 auto frame = 1818 absl::WrapUnique(static_cast<TransformableVideoFrameInterface*>( 1819 transformable_frame.release())); 1820 ASSERT_TRUE(frame); 1821 auto metadata = frame->Metadata(); 1822 EXPECT_EQ(frame->Rid(), "myrid"); 1823 EXPECT_EQ(metadata.GetWidth(), 1280u); 1824 EXPECT_EQ(metadata.GetHeight(), 720u); 1825 EXPECT_EQ(metadata.GetFrameId(), 10); 1826 EXPECT_EQ(metadata.GetTemporalIndex(), 3); 1827 EXPECT_EQ(metadata.GetSpatialIndex(), 2); 1828 EXPECT_THAT(metadata.GetFrameDependencies(), ElementsAre(5)); 1829 EXPECT_THAT(metadata.GetDecodeTargetIndications(), 1830 ElementsAre(DecodeTargetIndication::kSwitch)); 1831 }); 1832 rtp_sender_video->SendEncodedImage(kPayloadType, kType, kTimestamp, 1833 *encoded_image, video_header, 1834 kDefaultExpectedRetransmissionTime); 1835 } 1836 1837 TEST_F(RtpSenderVideoWithFrameTransformerTest, 1838 TransformableFrameHasCorrectCaptureIdentifier) { 1839 auto mock_frame_transformer = 1840 make_ref_counted<NiceMock<MockFrameTransformer>>(); 1841 std::unique_ptr<RTPSenderVideo> rtp_sender_video = 1842 CreateSenderWithFrameTransformer(mock_frame_transformer); 1843 auto encoded_image = CreateDefaultEncodedImage(); 1844 encoded_image->SetPresentationTimestamp(Timestamp::Millis(1)); 1845 RTPVideoHeader video_header; 1846 1847 EXPECT_CALL(*mock_frame_transformer, Transform) 1848 .WillOnce([&encoded_image](std::unique_ptr<TransformableFrameInterface> 1849 transformable_frame) { 1850 auto* frame = static_cast<TransformableVideoFrameInterface*>( 1851 transformable_frame.get()); 1852 ASSERT_TRUE(frame); 1853 EXPECT_EQ(frame->GetPresentationTimestamp(), 1854 encoded_image->PresentationTimestamp()); 1855 }); 1856 rtp_sender_video->SendEncodedImage(kPayloadType, kType, kTimestamp, 1857 *encoded_image, video_header, 1858 kDefaultExpectedRetransmissionTime); 1859 } 1860 1861 TEST_F(RtpSenderVideoWithFrameTransformerTest, 1862 OnTransformedFrameSendsVideoWhenCloned) { 1863 auto mock_frame_transformer = 1864 make_ref_counted<NiceMock<MockFrameTransformer>>(); 1865 scoped_refptr<TransformedFrameCallback> callback; 1866 EXPECT_CALL(*mock_frame_transformer, RegisterTransformedFrameSinkCallback) 1867 .WillOnce(SaveArg<0>(&callback)); 1868 std::unique_ptr<RTPSenderVideo> rtp_sender_video = 1869 CreateSenderWithFrameTransformer(mock_frame_transformer); 1870 ASSERT_TRUE(callback); 1871 1872 auto encoded_image = CreateDefaultEncodedImage(); 1873 RTPVideoHeader video_header; 1874 video_header.frame_type = VideoFrameType::kVideoFrameKey; 1875 ON_CALL(*mock_frame_transformer, Transform) 1876 .WillByDefault( 1877 [&callback](std::unique_ptr<TransformableFrameInterface> frame) { 1878 auto clone = CloneVideoFrame( 1879 static_cast<TransformableVideoFrameInterface*>(frame.get())); 1880 EXPECT_TRUE(clone); 1881 callback->OnTransformedFrame(std::move(clone)); 1882 }); 1883 auto encoder_queue = time_controller_.GetTaskQueueFactory()->CreateTaskQueue( 1884 "encoder_queue", TaskQueueFactory::Priority::NORMAL); 1885 encoder_queue->PostTask([&] { 1886 rtp_sender_video->SendEncodedImage(kPayloadType, kType, kTimestamp, 1887 *encoded_image, video_header, 1888 kDefaultExpectedRetransmissionTime); 1889 }); 1890 time_controller_.AdvanceTime(TimeDelta::Zero()); 1891 EXPECT_EQ(transport_.packets_sent(), 1); 1892 encoder_queue->PostTask([&] { 1893 rtp_sender_video->SendEncodedImage(kPayloadType, kType, kTimestamp, 1894 *encoded_image, video_header, 1895 kDefaultExpectedRetransmissionTime); 1896 }); 1897 time_controller_.AdvanceTime(TimeDelta::Zero()); 1898 EXPECT_EQ(transport_.packets_sent(), 2); 1899 } 1900 1901 } // namespace 1902 } // namespace webrtc