VideoFrameConverter.h (23211B)
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ 2 /* This Source Code Form is subject to the terms of the Mozilla Public 3 * License, v. 2.0. If a copy of the MPL was not distributed with this file, 4 * You can obtain one at http://mozilla.org/MPL/2.0/. */ 5 6 #ifndef VideoFrameConverter_h 7 #define VideoFrameConverter_h 8 9 #include "ImageContainer.h" 10 #include "ImageConversion.h" 11 #include "Pacer.h" 12 #include "PerformanceRecorder.h" 13 #include "VideoSegment.h" 14 #include "api/video/video_frame.h" 15 #include "common_video/include/video_frame_buffer.h" 16 #include "common_video/include/video_frame_buffer_pool.h" 17 #include "jsapi/RTCStatsReport.h" 18 #include "media/base/adapted_video_track_source.h" 19 #include "mozilla/dom/ImageBitmapBinding.h" 20 #include "mozilla/dom/ImageUtils.h" 21 #include "nsISupportsImpl.h" 22 #include "nsThreadUtils.h" 23 24 // The number of frame buffers VideoFrameConverter may create before returning 25 // errors. 26 // Sometimes these are released synchronously but they can be forwarded all the 27 // way to the encoder for asynchronous encoding. With a pool size of 5, 28 // we allow 1 buffer for the current conversion, and 4 buffers to be queued at 29 // the encoder. 30 #define CONVERTER_BUFFER_POOL_SIZE 5 31 32 extern mozilla::LazyLogModule gMediaPipelineLog; 33 #define LOG(level, msg, ...) \ 34 MOZ_LOG(gMediaPipelineLog, level, (msg, ##__VA_ARGS__)) 35 36 namespace mozilla { 37 38 enum class FrameDroppingPolicy { 39 Allowed, 40 Disabled, 41 }; 42 43 // An async video frame format converter. 44 // 45 // Input is typically a MediaTrackListener driven by MediaTrackGraph. 46 // 47 // Output is exposed through webrtc::AdaptedVideoTrackSource, which implements 48 // webrtc::VideoSourceInterface<webrtc::VideoFrame>. 49 template <FrameDroppingPolicy DropPolicy = FrameDroppingPolicy::Allowed> 50 class VideoFrameConverterImpl : public webrtc::AdaptedVideoTrackSource { 51 protected: 52 explicit VideoFrameConverterImpl( 53 already_AddRefed<nsISerialEventTarget> aTarget, 54 const dom::RTCStatsTimestampMaker& aTimestampMaker, bool aLockScaling) 55 : mTimestampMaker(aTimestampMaker), 56 mTarget(aTarget), 57 mLockScaling(aLockScaling), 58 mPacer(MakeAndAddRef<Pacer<FrameToProcess>>( 59 do_AddRef(mTarget), mIdleFrameDuplicationInterval)), 60 mScalingPool(false, CONVERTER_BUFFER_POOL_SIZE), 61 mConversionPool(false, CONVERTER_BUFFER_POOL_SIZE) { 62 MOZ_COUNT_CTOR(VideoFrameConverterImpl); 63 } 64 65 // AdaptedVideoTrackSource impl -- we don't expect any of these to be called. 66 // They are in libwebrtc because they are used by blink to communicate 67 // properties from a video track source to their libwebrtc integration layer. 68 // We signal this elsewhere. 69 void GenerateKeyFrame() override { 70 MOZ_CRASH("Unexpected VideoFrameConverterImpl::GenerateKeyFrame"); 71 } 72 SourceState state() const override { 73 MOZ_CRASH("Unexpected VideoFrameConverterImpl::state"); 74 } 75 bool remote() const override { 76 MOZ_CRASH("Unexpected VideoFrameConverterImpl::remote"); 77 } 78 bool is_screencast() const override { 79 MOZ_CRASH("Unexpected VideoFrameConverterImpl::is_screencast"); 80 } 81 std::optional<bool> needs_denoising() const override { 82 MOZ_CRASH("Unexpected VideoFrameConverterImpl::needs_denoising"); 83 } 84 85 void RegisterListener() { 86 mPacingListener = mPacer->PacedItemEvent().Connect( 87 mTarget, 88 [self = RefPtr(this)](FrameToProcess&& aFrame, TimeStamp aTime) { 89 self->QueueForProcessing(std::move(aFrame.mImage), aTime, 90 aFrame.mSize, aFrame.mForceBlack); 91 }); 92 } 93 94 public: 95 using webrtc::VideoSourceInterface<webrtc::VideoFrame>::AddOrUpdateSink; 96 using webrtc::VideoSourceInterface<webrtc::VideoFrame>::RemoveSink; 97 98 void QueueVideoChunk(const VideoChunk& aChunk, bool aForceBlack) { 99 gfx::IntSize size = aChunk.mFrame.GetIntrinsicSize(); 100 if (size.width == 0 || size.height == 0) { 101 return; 102 } 103 104 TimeStamp t = aChunk.mTimeStamp; 105 MOZ_ASSERT(!t.IsNull()); 106 107 mPacer->Enqueue( 108 FrameToProcess(aChunk.mFrame.GetImage(), t, size, aForceBlack), t); 109 } 110 111 /** 112 * An active VideoFrameConverter actively converts queued video frames. 113 * While inactive, we keep track of the frame most recently queued for 114 * processing, so it can be immediately sent out once activated. 115 */ 116 void SetActive(bool aActive) { 117 MOZ_ALWAYS_SUCCEEDS(mTarget->Dispatch(NS_NewRunnableFunction( 118 __func__, [self = RefPtr<VideoFrameConverterImpl>(this), this, aActive, 119 time = TimeStamp::Now()] { 120 if (mActive == aActive) { 121 return; 122 } 123 LOG(LogLevel::Debug, "VideoFrameConverter %p is now %s", this, 124 aActive ? "active" : "inactive"); 125 mActive = aActive; 126 if (aActive && mLastFrameQueuedForProcessing.Serial() != -2) { 127 // After activating, we re-process the last image that was queued 128 // for processing so it can be immediately sent. The image is reset 129 // so it doesn't get dropped if within the duplicate frame interval. 130 QueueForProcessing(std::move(mLastFrameQueuedForProcessing.mImage), 131 std::max(mLastFrameQueuedForProcessing.mTime + 132 TimeDuration::FromMicroseconds(1), 133 time), 134 mLastFrameQueuedForProcessing.mSize, 135 mLastFrameQueuedForProcessing.mForceBlack); 136 } 137 }))); 138 } 139 140 void SetTrackEnabled(bool aTrackEnabled) { 141 MOZ_ALWAYS_SUCCEEDS(mTarget->Dispatch(NS_NewRunnableFunction( 142 __func__, [self = RefPtr<VideoFrameConverterImpl>(this), this, 143 aTrackEnabled, time = TimeStamp::Now()] { 144 if (mTrackEnabled == aTrackEnabled) { 145 return; 146 } 147 LOG(LogLevel::Debug, "VideoFrameConverterImpl %p Track is now %s", 148 this, aTrackEnabled ? "enabled" : "disabled"); 149 mTrackEnabled = aTrackEnabled; 150 if (!aTrackEnabled) { 151 // After disabling we immediately send a frame as black, so it can 152 // be seen quickly, even if no frames are flowing. If no frame has 153 // been queued for processing yet, we use the FrameToProcess default 154 // size (640x480). 155 QueueForProcessing(/* aImage= */ nullptr, 156 std::max(mLastFrameQueuedForProcessing.mTime + 157 TimeDuration::FromMicroseconds(1), 158 time), 159 mLastFrameQueuedForProcessing.mSize, 160 /* aForceBlack= */ true); 161 } 162 }))); 163 } 164 165 void SetTrackingId(TrackingId aTrackingId) { 166 MOZ_ALWAYS_SUCCEEDS(mTarget->Dispatch(NS_NewRunnableFunction( 167 __func__, [self = RefPtr<VideoFrameConverterImpl>(this), this, 168 id = std::move(aTrackingId)]() mutable { 169 mTrackingId = Some(std::move(id)); 170 }))); 171 } 172 173 void SetIdleFrameDuplicationInterval(TimeDuration aInterval) { 174 MOZ_ALWAYS_SUCCEEDS(mTarget->Dispatch(NS_NewRunnableFunction( 175 __func__, [self = RefPtr(this), this, aInterval] { 176 mIdleFrameDuplicationInterval = aInterval; 177 }))); 178 mPacer->SetDuplicationInterval(aInterval); 179 } 180 181 void Shutdown() { 182 mPacer->Shutdown()->Then( 183 mTarget, __func__, 184 [self = RefPtr<VideoFrameConverterImpl>(this), this] { 185 mPacingListener.DisconnectIfExists(); 186 mScalingPool.Release(); 187 mConversionPool.Release(); 188 mLastFrameQueuedForProcessing = FrameToProcess(); 189 mLastFrameConverted = Nothing(); 190 }); 191 } 192 193 protected: 194 struct FrameToProcess { 195 FrameToProcess() = default; 196 197 FrameToProcess(RefPtr<layers::Image> aImage, TimeStamp aTime, 198 gfx::IntSize aSize, bool aForceBlack) 199 : mImage(std::move(aImage)), 200 mTime(aTime), 201 mSize(aSize), 202 mForceBlack(aForceBlack) {} 203 204 RefPtr<layers::Image> mImage; 205 TimeStamp mTime = TimeStamp::Now(); 206 gfx::IntSize mSize = gfx::IntSize(640, 480); 207 bool mForceBlack = false; 208 209 int32_t Serial() const { 210 if (mForceBlack) { 211 // Set the last-img check to indicate black. 212 // -1 is not a guaranteed invalid serial. See bug 1262134. 213 return -1; 214 } 215 if (!mImage) { 216 // Set the last-img check to indicate reset. 217 // -2 is not a guaranteed invalid serial. See bug 1262134. 218 return -2; 219 } 220 return mImage->GetSerial(); 221 } 222 }; 223 224 struct FrameConverted { 225 FrameConverted(webrtc::VideoFrame aFrame, gfx::IntSize aOriginalSize, 226 int32_t aSerial) 227 : mFrame(std::move(aFrame)), 228 mOriginalSize(aOriginalSize), 229 mSerial(aSerial) {} 230 231 webrtc::VideoFrame mFrame; 232 gfx::IntSize mOriginalSize; 233 int32_t mSerial; 234 }; 235 236 MOZ_COUNTED_DTOR_VIRTUAL(VideoFrameConverterImpl) 237 238 void VideoFrameConverted(const webrtc::VideoFrame& aVideoFrame, 239 gfx::IntSize aOriginalSize, int32_t aSerial) { 240 MOZ_ASSERT(mTarget->IsOnCurrentThread()); 241 242 LOG(LogLevel::Verbose, 243 "VideoFrameConverterImpl %p: Converted a frame. Diff from last: %.3fms", 244 this, 245 static_cast<double>(aVideoFrame.timestamp_us() - 246 (mLastFrameConverted 247 ? mLastFrameConverted->mFrame.timestamp_us() 248 : aVideoFrame.timestamp_us())) / 249 1000); 250 251 // Check that time doesn't go backwards 252 MOZ_ASSERT_IF(mLastFrameConverted, 253 aVideoFrame.timestamp_us() > 254 mLastFrameConverted->mFrame.timestamp_us()); 255 256 mLastFrameConverted = 257 Some(FrameConverted(aVideoFrame, aOriginalSize, aSerial)); 258 259 OnFrame(aVideoFrame); 260 } 261 262 void QueueForProcessing(RefPtr<layers::Image> aImage, TimeStamp aTime, 263 gfx::IntSize aSize, bool aForceBlack) { 264 MOZ_ASSERT(mTarget->IsOnCurrentThread()); 265 266 FrameToProcess frame{std::move(aImage), aTime, aSize, 267 aForceBlack || !mTrackEnabled}; 268 269 if (frame.mTime <= mLastFrameQueuedForProcessing.mTime) { 270 LOG(LogLevel::Debug, 271 "VideoFrameConverterImpl %p: Dropping a frame because time did not " 272 "progress (%.3fs)", 273 this, 274 (mLastFrameQueuedForProcessing.mTime - frame.mTime).ToSeconds()); 275 return; 276 } 277 278 if (frame.Serial() == mLastFrameQueuedForProcessing.Serial()) { 279 // This is the same frame as the last one. We limit the same-frame rate, 280 // and rewrite the time so the frame-gap is in multiples of the 281 // duplication interval. 282 // 283 // The pacer only starts duplicating frames if there is no flow of frames 284 // into it. There are other reasons the same frame could repeat here, and 285 // at a shorter interval than the duplication interval. For instance after 286 // the sender is disabled (SetTrackEnabled) but there is still a flow of 287 // frames into the pacer. All disabled frames have the same serial. 288 if (auto diff = frame.mTime - mLastFrameQueuedForProcessing.mTime; 289 diff >= mIdleFrameDuplicationInterval) { 290 auto diff_us = static_cast<int64_t>(diff.ToMicroseconds()); 291 auto idle_interval_us = static_cast<int64_t>( 292 mIdleFrameDuplicationInterval.ToMicroseconds()); 293 auto multiples = diff_us / idle_interval_us; 294 MOZ_ASSERT(multiples > 0); 295 LOG(LogLevel::Verbose, 296 "VideoFrameConverterImpl %p: Rewrote time interval for a duplicate " 297 "frame from %.3fs to %.3fs", 298 this, 299 (frame.mTime - mLastFrameQueuedForProcessing.mTime).ToSeconds(), 300 (mIdleFrameDuplicationInterval * multiples).ToSeconds()); 301 frame.mTime = mLastFrameQueuedForProcessing.mTime + 302 (mIdleFrameDuplicationInterval * multiples); 303 } else { 304 LOG(LogLevel::Verbose, 305 "VideoFrameConverterImpl %p: Dropping a duplicate frame because " 306 "the duplication interval (%.3fs) hasn't passed (%.3fs)", 307 this, mIdleFrameDuplicationInterval.ToSeconds(), 308 (frame.mTime - mLastFrameQueuedForProcessing.mTime).ToSeconds()); 309 return; 310 } 311 } 312 313 mLastFrameQueuedForProcessing = std::move(frame); 314 315 if (!mActive) { 316 LOG(LogLevel::Debug, 317 "VideoFrameConverterImpl %p: Ignoring a frame because we're inactive", 318 this); 319 return; 320 } 321 322 MOZ_ALWAYS_SUCCEEDS(mTarget->Dispatch(NewRunnableMethod<FrameToProcess>( 323 "VideoFrameConverterImpl::ProcessVideoFrame", this, 324 &VideoFrameConverterImpl::ProcessVideoFrame, 325 mLastFrameQueuedForProcessing))); 326 } 327 328 void ProcessVideoFrame(const FrameToProcess& aFrame) { 329 MOZ_ASSERT(mTarget->IsOnCurrentThread()); 330 331 auto convert = [this, 332 &aFrame]() -> webrtc::scoped_refptr<webrtc::I420Buffer> { 333 webrtc::scoped_refptr<webrtc::I420Buffer> buffer = 334 mConversionPool.CreateI420Buffer(aFrame.mSize.width, 335 aFrame.mSize.height); 336 if (!buffer) { 337 #ifdef MOZ_DIAGNOSTIC_ASSERT_ENABLED 338 ++mConversionFramesDropped; 339 #endif 340 MOZ_DIAGNOSTIC_ASSERT(mConversionFramesDropped <= 100, 341 "Conversion buffers must be leaking"); 342 LOG(LogLevel::Warning, 343 "VideoFrameConverterImpl %p: Creating a conversion buffer failed", 344 this); 345 return nullptr; 346 } 347 348 #ifdef MOZ_DIAGNOSTIC_ASSERT_ENABLED 349 mConversionFramesDropped = 0; 350 #endif 351 PerformanceRecorder<CopyVideoStage> rec( 352 "VideoFrameConverterImpl::ConvertToI420"_ns, *mTrackingId, 353 buffer->width(), buffer->height()); 354 nsresult rv = ConvertToI420(aFrame.mImage, buffer->MutableDataY(), 355 buffer->StrideY(), buffer->MutableDataU(), 356 buffer->StrideU(), buffer->MutableDataV(), 357 buffer->StrideV(), aFrame.mSize); 358 359 if (NS_FAILED(rv)) { 360 LOG(LogLevel::Warning, 361 "VideoFrameConverterImpl %p: Image conversion failed", this); 362 return nullptr; 363 } 364 rec.Record(); 365 return buffer; 366 }; 367 368 auto cropAndScale = 369 [this, &aFrame]( 370 const webrtc::scoped_refptr<webrtc::I420BufferInterface>& aSrc, 371 int aCrop_x, int aCrop_y, int aCrop_w, int aCrop_h, int aOut_width, 372 int aOut_height) 373 -> webrtc::scoped_refptr<webrtc::I420BufferInterface> { 374 webrtc::scoped_refptr<webrtc::I420Buffer> buffer = 375 mScalingPool.CreateI420Buffer(aOut_width, aOut_height); 376 if (!buffer) { 377 #ifdef MOZ_DIAGNOSTIC_ASSERT_ENABLED 378 ++mScalingFramesDropped; 379 MOZ_DIAGNOSTIC_ASSERT(mScalingFramesDropped <= 100, 380 "Scaling buffers must be leaking"); 381 #endif 382 LOG(LogLevel::Warning, 383 "VideoFrameConverterImpl %p: Creating a scaling buffer failed", 384 this); 385 return nullptr; 386 } 387 388 #ifdef MOZ_DIAGNOSTIC_ASSERT_ENABLED 389 mScalingFramesDropped = 0; 390 #endif 391 PerformanceRecorder<CopyVideoStage> rec( 392 "VideoFrameConverterImpl::CropAndScale"_ns, *mTrackingId, 393 aSrc->width(), aSrc->height()); 394 LOG(LogLevel::Verbose, 395 "VideoFrameConverterImpl %p: Scaling image %d, %dx%d -> %dx%d", this, 396 aFrame.Serial(), aFrame.mSize.Width(), aFrame.mSize.Height(), 397 aOut_width, aOut_height); 398 buffer->CropAndScaleFrom(*aSrc, aCrop_x, aCrop_y, aCrop_w, aCrop_h); 399 rec.Record(); 400 return buffer; 401 }; 402 403 const webrtc::Timestamp time = 404 dom::RTCStatsTimestamp::FromMozTime(mTimestampMaker, aFrame.mTime) 405 .ToRealtime(); 406 407 const bool sameAsLastConverted = 408 mLastFrameConverted && aFrame.Serial() == mLastFrameConverted->mSerial; 409 const gfx::IntSize inSize = 410 sameAsLastConverted ? mLastFrameConverted->mOriginalSize : aFrame.mSize; 411 412 int crop_x{}, crop_y{}, crop_width{}, crop_height{}, out_width{}, 413 out_height{}; 414 bool keep = 415 AdaptFrame(inSize.Width(), inSize.Height(), time.us(), &out_width, 416 &out_height, &crop_width, &crop_height, &crop_x, &crop_y); 417 418 if (mLockScaling) { 419 crop_x = crop_y = 0; 420 crop_width = out_width = inSize.Width(); 421 crop_height = out_height = inSize.Height(); 422 } 423 424 if (out_width == 0 || out_height == 0) { 425 LOG(LogLevel::Verbose, 426 "VideoFrameConverterImpl %p: Skipping a frame because it has no " 427 "pixels", 428 this); 429 OnFrameDropped(); 430 return; 431 } 432 433 if constexpr (DropPolicy == FrameDroppingPolicy::Allowed) { 434 if (!keep) { 435 LOG(LogLevel::Verbose, 436 "VideoFrameConverterImpl %p: Dropping a frame because of SinkWants", 437 this); 438 // AdaptFrame has already called OnFrameDropped. 439 return; 440 } 441 if (aFrame.mTime < mLastFrameQueuedForProcessing.mTime) { 442 LOG(LogLevel::Verbose, 443 "VideoFrameConverterImpl %p: Dropping a frame that is %.3f seconds " 444 "before latest", 445 this, 446 (mLastFrameQueuedForProcessing.mTime - aFrame.mTime).ToSeconds()); 447 OnFrameDropped(); 448 return; 449 } 450 } 451 452 if (sameAsLastConverted) { 453 if (out_width == mLastFrameConverted->mFrame.width() && 454 out_height == mLastFrameConverted->mFrame.height()) { 455 // This is the same input frame as last time. Avoid a conversion. 456 LOG(LogLevel::Verbose, 457 "VideoFrameConverterImpl %p: Re-converting last frame %d. " 458 "Re-using with same resolution.", 459 this, aFrame.Serial()); 460 webrtc::VideoFrame frame = mLastFrameConverted->mFrame; 461 frame.set_timestamp_us(time.us()); 462 VideoFrameConverted(frame, mLastFrameConverted->mOriginalSize, 463 mLastFrameConverted->mSerial); 464 return; 465 } 466 } 467 468 if (aFrame.mForceBlack) { 469 // Send a black image. 470 webrtc::scoped_refptr<webrtc::I420Buffer> buffer = 471 mScalingPool.CreateI420Buffer(out_width, out_height); 472 if (!buffer) { 473 MOZ_DIAGNOSTIC_CRASH( 474 "Buffers not leaving scope except for " 475 "reconfig, should never leak"); 476 LOG(LogLevel::Warning, 477 "VideoFrameConverterImpl %p: Creating a buffer for a black video " 478 "frame failed", 479 this); 480 OnFrameDropped(); 481 return; 482 } 483 484 LOG(LogLevel::Verbose, 485 "VideoFrameConverterImpl %p: Sending a black video frame. " 486 "CropAndScale: %dx%d -> %dx%d", 487 this, aFrame.mSize.Width(), aFrame.mSize.Height(), out_width, 488 out_height); 489 webrtc::I420Buffer::SetBlack(buffer.get()); 490 491 VideoFrameConverted(webrtc::VideoFrame::Builder() 492 .set_video_frame_buffer(buffer) 493 .set_timestamp_us(time.us()) 494 .build(), 495 inSize, aFrame.Serial()); 496 return; 497 } 498 499 if (!aFrame.mImage) { 500 // Don't send anything for null images. 501 return; 502 } 503 504 MOZ_ASSERT(aFrame.mImage->GetSize() == aFrame.mSize); 505 506 webrtc::scoped_refptr<webrtc::I420BufferInterface> srcFrame; 507 RefPtr<layers::PlanarYCbCrImage> image = 508 aFrame.mImage->AsPlanarYCbCrImage(); 509 if (image) { 510 dom::ImageUtils utils(image); 511 Maybe<dom::ImageBitmapFormat> format = utils.GetFormat(); 512 if (format.isSome() && 513 format.value() == dom::ImageBitmapFormat::YUV420P && 514 image->GetData()) { 515 const layers::PlanarYCbCrData* data = image->GetData(); 516 srcFrame = webrtc::WrapI420Buffer( 517 aFrame.mImage->GetSize().width, aFrame.mImage->GetSize().height, 518 data->mYChannel, data->mYStride, data->mCbChannel, 519 data->mCbCrStride, data->mCrChannel, data->mCbCrStride, 520 [image] { /* keep reference alive*/ }); 521 522 LOG(LogLevel::Verbose, 523 "VideoFrameConverterImpl %p: Avoiding a conversion for image %d", 524 this, aFrame.Serial()); 525 } 526 } 527 528 if (!srcFrame) { 529 srcFrame = convert(); 530 } 531 532 if (!srcFrame) { 533 OnFrameDropped(); 534 return; 535 } 536 537 if (srcFrame->width() == out_width && srcFrame->height() == out_height) { 538 LOG(LogLevel::Verbose, 539 "VideoFrameConverterImpl %p: Avoiding scaling for image %d, " 540 "Dimensions: %dx%d", 541 this, aFrame.Serial(), out_width, out_height); 542 VideoFrameConverted(webrtc::VideoFrame::Builder() 543 .set_video_frame_buffer(srcFrame) 544 .set_timestamp_us(time.us()) 545 .build(), 546 inSize, aFrame.Serial()); 547 return; 548 } 549 550 if (webrtc::scoped_refptr<webrtc::I420BufferInterface> buffer = 551 cropAndScale(webrtc::scoped_refptr(srcFrame), crop_x, crop_y, 552 crop_width, crop_height, out_width, out_height)) { 553 VideoFrameConverted(webrtc::VideoFrame::Builder() 554 .set_video_frame_buffer(buffer) 555 .set_timestamp_us(time.us()) 556 .build(), 557 inSize, aFrame.Serial()); 558 } 559 } 560 561 public: 562 const dom::RTCStatsTimestampMaker mTimestampMaker; 563 const nsCOMPtr<nsISerialEventTarget> mTarget; 564 const bool mLockScaling; 565 566 protected: 567 TimeDuration mIdleFrameDuplicationInterval = TimeDuration::Forever(); 568 569 // Used to pace future frames close to their rendering-time. Thread-safe. 570 const RefPtr<Pacer<FrameToProcess>> mPacer; 571 572 // Accessed only from mTarget. 573 MediaEventListener mPacingListener; 574 webrtc::VideoFrameBufferPool mScalingPool; 575 webrtc::VideoFrameBufferPool mConversionPool; 576 FrameToProcess mLastFrameQueuedForProcessing; 577 Maybe<FrameConverted> mLastFrameConverted; 578 bool mActive = false; 579 bool mTrackEnabled = true; 580 Maybe<TrackingId> mTrackingId; 581 #ifdef MOZ_DIAGNOSTIC_ASSERT_ENABLED 582 size_t mConversionFramesDropped = 0; 583 size_t mScalingFramesDropped = 0; 584 #endif 585 }; 586 587 class VideoFrameConverter 588 : public webrtc::RefCountedObject< 589 VideoFrameConverterImpl<FrameDroppingPolicy::Allowed>> { 590 protected: 591 VideoFrameConverter(already_AddRefed<nsISerialEventTarget> aTarget, 592 const dom::RTCStatsTimestampMaker& aTimestampMaker, 593 bool aLockScaling) 594 : webrtc::RefCountedObject<VideoFrameConverterImpl>( 595 std::move(aTarget), aTimestampMaker, aLockScaling) {} 596 597 public: 598 static already_AddRefed<VideoFrameConverter> Create( 599 already_AddRefed<nsISerialEventTarget> aTarget, 600 const dom::RTCStatsTimestampMaker& aTimestampMaker, bool aLockScaling) { 601 RefPtr<VideoFrameConverter> converter = new VideoFrameConverter( 602 std::move(aTarget), aTimestampMaker, aLockScaling); 603 converter->RegisterListener(); 604 return converter.forget(); 605 } 606 }; 607 608 } // namespace mozilla 609 610 #undef LOG 611 612 #endif // VideoFrameConverter_h