tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

VideoSink.cpp (27486B)


      1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
      2 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
      3 /* This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #ifdef XP_WIN
      8 // Include Windows headers required for enabling high precision timers.
      9 // clang-format off
     10 #  include <windows.h>
     11 #  include <mmsystem.h>
     12 // clang-format on
     13 #endif
     14 
     15 #include "VideoSink.h"
     16 
     17 #include "AudioDeviceInfo.h"
     18 #include "MediaQueue.h"
     19 #include "VideoUtils.h"
     20 #include "mozilla/IntegerPrintfMacros.h"
     21 #include "mozilla/ProfilerLabels.h"
     22 #include "mozilla/ProfilerMarkerTypes.h"
     23 #include "mozilla/StaticPrefs_browser.h"
     24 #include "mozilla/StaticPrefs_media.h"
     25 
     26 namespace mozilla {
     27 extern LazyLogModule gMediaDecoderLog;
     28 }
     29 
     30 #undef FMT
     31 
     32 #define FMT(x, ...) "VideoSink=%p " x, this, ##__VA_ARGS__
     33 #define VSINK_LOG(x, ...) \
     34  MOZ_LOG(gMediaDecoderLog, LogLevel::Debug, (FMT(x, ##__VA_ARGS__)))
     35 #define VSINK_LOG_V(x, ...) \
     36  MOZ_LOG(gMediaDecoderLog, LogLevel::Verbose, (FMT(x, ##__VA_ARGS__)))
     37 
     38 namespace mozilla {
     39 
     40 using namespace mozilla::layers;
     41 
     42 // Minimum update frequency is 1/120th of a second, i.e. half the
     43 // duration of a 60-fps frame.
     44 static const int64_t MIN_UPDATE_INTERVAL_US = 1000000 / (60 * 2);
     45 
     46 static void SetImageToGreenPixel(PlanarYCbCrImage* aImage) {
     47  static uint8_t greenPixel[] = {0x00, 0x00, 0x00};
     48  PlanarYCbCrData data;
     49  data.mYChannel = greenPixel;
     50  data.mCbChannel = greenPixel + 1;
     51  data.mCrChannel = greenPixel + 2;
     52  data.mYStride = data.mCbCrStride = 1;
     53  data.mPictureRect = gfx::IntRect(0, 0, 1, 1);
     54  data.mYUVColorSpace = gfx::YUVColorSpace::BT601;
     55  aImage->CopyData(data);
     56 }
     57 
     58 VideoSink::VideoSink(AbstractThread* aThread, MediaSink* aAudioSink,
     59                     MediaQueue<VideoData>& aVideoQueue,
     60                     VideoFrameContainer* aContainer,
     61                     FrameStatistics& aFrameStats,
     62                     uint32_t aVQueueSentToCompositerSize)
     63    : mOwnerThread(aThread),
     64      mAudioSink(aAudioSink),
     65      mVideoQueue(aVideoQueue),
     66      mContainer(aContainer),
     67      mProducerID(ImageContainer::AllocateProducerID()),
     68      mFrameStats(aFrameStats),
     69      mOldCompositorDroppedCount(mContainer ? mContainer->GetDroppedImageCount()
     70                                            : 0),
     71      mPendingDroppedCount(0),
     72      mHasVideo(false),
     73      mUpdateScheduler(aThread),
     74      mVideoQueueSendToCompositorSize(aVQueueSentToCompositerSize)
     75 #ifdef XP_WIN
     76      ,
     77      mHiResTimersRequested(false)
     78 #endif
     79 {
     80  MOZ_ASSERT(mAudioSink, "AudioSink should exist.");
     81 
     82  if (StaticPrefs::browser_measurement_render_anims_and_video_solid() &&
     83      mContainer) {
     84    InitializeBlankImage();
     85    MOZ_ASSERT(mBlankImage, "Blank image should exist.");
     86  }
     87 }
     88 
     89 VideoSink::~VideoSink() {
     90 #ifdef XP_WIN
     91  MOZ_ASSERT(!mHiResTimersRequested);
     92 #endif
     93 }
     94 
     95 RefPtr<VideoSink::EndedPromise> VideoSink::OnEnded(TrackType aType) {
     96  AssertOwnerThread();
     97  MOZ_ASSERT(mAudioSink->IsStarted(), "Must be called after playback starts.");
     98 
     99  if (aType == TrackInfo::kAudioTrack) {
    100    return mAudioSink->OnEnded(aType);
    101  } else if (aType == TrackInfo::kVideoTrack) {
    102    return mEndPromise;
    103  }
    104  return nullptr;
    105 }
    106 
    107 media::TimeUnit VideoSink::GetEndTime(TrackType aType) const {
    108  AssertOwnerThread();
    109  MOZ_ASSERT(mAudioSink->IsStarted(), "Must be called after playback starts.");
    110 
    111  if (aType == TrackInfo::kVideoTrack) {
    112    return mVideoFrameEndTime;
    113  } else if (aType == TrackInfo::kAudioTrack) {
    114    return mAudioSink->GetEndTime(aType);
    115  }
    116  return media::TimeUnit::Zero();
    117 }
    118 
    119 media::TimeUnit VideoSink::GetPosition(TimeStamp* aTimeStamp) {
    120  AssertOwnerThread();
    121  return mAudioSink->GetPosition(aTimeStamp);
    122 }
    123 
    124 bool VideoSink::HasUnplayedFrames(TrackType aType) const {
    125  AssertOwnerThread();
    126  MOZ_ASSERT(aType == TrackInfo::kAudioTrack,
    127             "Not implemented for non audio tracks.");
    128 
    129  return mAudioSink->HasUnplayedFrames(aType);
    130 }
    131 
    132 media::TimeUnit VideoSink::UnplayedDuration(TrackType aType) const {
    133  AssertOwnerThread();
    134  MOZ_ASSERT(aType == TrackInfo::kAudioTrack,
    135             "Not implemented for non audio tracks.");
    136 
    137  return mAudioSink->UnplayedDuration(aType);
    138 }
    139 
    140 void VideoSink::SetPlaybackRate(double aPlaybackRate) {
    141  AssertOwnerThread();
    142 
    143  mAudioSink->SetPlaybackRate(aPlaybackRate);
    144 }
    145 
    146 void VideoSink::SetVolume(double aVolume) {
    147  AssertOwnerThread();
    148 
    149  mAudioSink->SetVolume(aVolume);
    150 }
    151 
    152 void VideoSink::SetStreamName(const nsAString& aStreamName) {
    153  AssertOwnerThread();
    154 
    155  mAudioSink->SetStreamName(aStreamName);
    156 }
    157 
    158 void VideoSink::SetPreservesPitch(bool aPreservesPitch) {
    159  AssertOwnerThread();
    160 
    161  mAudioSink->SetPreservesPitch(aPreservesPitch);
    162 }
    163 
    164 RefPtr<GenericPromise> VideoSink::SetAudioDevice(
    165    RefPtr<AudioDeviceInfo> aDevice) {
    166  return mAudioSink->SetAudioDevice(std::move(aDevice));
    167 }
    168 
    169 double VideoSink::PlaybackRate() const {
    170  AssertOwnerThread();
    171 
    172  return mAudioSink->PlaybackRate();
    173 }
    174 
    175 void VideoSink::EnsureHighResTimersOnOnlyIfPlaying() {
    176 #ifdef XP_WIN
    177  const bool needed = IsPlaying();
    178  if (needed == mHiResTimersRequested) {
    179    return;
    180  }
    181  if (needed) {
    182    // Ensure high precision timers are enabled on Windows, otherwise the
    183    // VideoSink isn't woken up at reliable intervals to set the next frame, and
    184    // we drop frames while painting. Note that each call must be matched by a
    185    // corresponding timeEndPeriod() call. Enabling high precision timers causes
    186    // the CPU to wake up more frequently on Windows 7 and earlier, which causes
    187    // more CPU load and battery use. So we only enable high precision timers
    188    // when we're actually playing.
    189    timeBeginPeriod(1);
    190  } else {
    191    timeEndPeriod(1);
    192  }
    193  mHiResTimersRequested = needed;
    194 #endif
    195 }
    196 
    197 void VideoSink::SetPlaying(bool aPlaying) {
    198  AssertOwnerThread();
    199  VSINK_LOG_V(" playing (%d) -> (%d)", mAudioSink->IsPlaying(), aPlaying);
    200 
    201  if (!aPlaying) {
    202    // Reset any update timer if paused.
    203    mUpdateScheduler.Reset();
    204    // Since playback is paused, tell compositor to render only current frame.
    205    TimeStamp nowTime;
    206    const auto clockTime = mAudioSink->GetPosition(&nowTime);
    207    RefPtr<VideoData> currentFrame = VideoQueue().PeekFront();
    208    if (currentFrame) {
    209      RenderVideoFrames(Span(&currentFrame, 1), clockTime.ToMicroseconds(),
    210                        nowTime);
    211    }
    212    if (mContainer) {
    213      mContainer->ClearCachedResources();
    214    }
    215    if (mSecondaryContainer) {
    216      mSecondaryContainer->ClearCachedResources();
    217    }
    218  }
    219 
    220  mAudioSink->SetPlaying(aPlaying);
    221 
    222  if (mHasVideo && aPlaying) {
    223    // There's no thread in VideoSink for pulling video frames, need to trigger
    224    // rendering while becoming playing status. because the VideoQueue may be
    225    // full already.
    226    TryUpdateRenderedVideoFrames();
    227  }
    228 
    229  EnsureHighResTimersOnOnlyIfPlaying();
    230 }
    231 
    232 nsresult VideoSink::Start(const media::TimeUnit& aStartTime,
    233                          const MediaInfo& aInfo) {
    234  AssertOwnerThread();
    235  VSINK_LOG("[%s]", __func__);
    236 
    237  nsresult rv = mAudioSink->Start(aStartTime, aInfo);
    238 
    239  mHasVideo = aInfo.HasVideo();
    240 
    241  if (mHasVideo) {
    242    mEndPromise = mEndPromiseHolder.Ensure(__func__);
    243 
    244    // If the underlying MediaSink has an end promise for the video track (which
    245    // happens when mAudioSink refers to a DecodedStream), we must wait for it
    246    // to complete before resolving our own end promise. Otherwise, MDSM might
    247    // stop playback before DecodedStream plays to the end and cause
    248    // test_streams_element_capture.html to time out.
    249    RefPtr<EndedPromise> p = mAudioSink->OnEnded(TrackInfo::kVideoTrack);
    250    if (p) {
    251      RefPtr<VideoSink> self = this;
    252      p->Then(
    253           mOwnerThread, __func__,
    254           [self]() {
    255             self->mVideoSinkEndRequest.Complete();
    256             self->TryUpdateRenderedVideoFrames();
    257             // It is possible the video queue size is 0 and we have no
    258             // frames to render. However, we need to call
    259             // MaybeResolveEndPromise() to ensure mEndPromiseHolder is
    260             // resolved.
    261             self->MaybeResolveEndPromise();
    262           },
    263           [self]() {
    264             self->mVideoSinkEndRequest.Complete();
    265             self->TryUpdateRenderedVideoFrames();
    266             self->MaybeResolveEndPromise();
    267           })
    268          ->Track(mVideoSinkEndRequest);
    269    }
    270 
    271    ConnectListener();
    272    // Run the render loop at least once so we can resolve the end promise
    273    // when video duration is 0.
    274    UpdateRenderedVideoFrames();
    275  }
    276  return rv;
    277 }
    278 
    279 void VideoSink::Stop() {
    280  AssertOwnerThread();
    281  MOZ_ASSERT(mAudioSink->IsStarted(), "playback not started.");
    282  VSINK_LOG("[%s]", __func__);
    283 
    284  mAudioSink->Stop();
    285 
    286  mUpdateScheduler.Reset();
    287  if (mHasVideo) {
    288    DisconnectListener();
    289    mVideoSinkEndRequest.DisconnectIfExists();
    290    mEndPromiseHolder.ResolveIfExists(true, __func__);
    291    mEndPromise = nullptr;
    292  }
    293  mVideoFrameEndTime = media::TimeUnit::Zero();
    294 
    295  EnsureHighResTimersOnOnlyIfPlaying();
    296 }
    297 
    298 bool VideoSink::IsStarted() const {
    299  AssertOwnerThread();
    300 
    301  return mAudioSink->IsStarted();
    302 }
    303 
    304 bool VideoSink::IsPlaying() const {
    305  AssertOwnerThread();
    306 
    307  return mAudioSink->IsPlaying();
    308 }
    309 
    310 void VideoSink::Shutdown() {
    311  AssertOwnerThread();
    312  MOZ_ASSERT(!mAudioSink->IsStarted(), "must be called after playback stops.");
    313  VSINK_LOG("[%s]", __func__);
    314 
    315  mAudioSink->Shutdown();
    316 }
    317 
    318 void VideoSink::OnVideoQueuePushed(const RefPtr<VideoData>& aSample) {
    319  AssertOwnerThread();
    320  // Listen to push event, VideoSink should try rendering ASAP if first frame
    321  // arrives but update scheduler is not triggered yet.
    322  if (!aSample->IsSentToCompositor()) {
    323    // Since we push rendered frames back to the queue, we will receive
    324    // push events for them. We only need to trigger render loop
    325    // when this frame is not rendered yet.
    326    TryUpdateRenderedVideoFrames();
    327  }
    328 }
    329 
    330 void VideoSink::OnVideoQueueFinished() {
    331  AssertOwnerThread();
    332  // Run render loop if the end promise is not resolved yet.
    333  if (!mUpdateScheduler.IsScheduled() && mAudioSink->IsPlaying() &&
    334      !mEndPromiseHolder.IsEmpty()) {
    335    UpdateRenderedVideoFrames();
    336  }
    337 }
    338 
    339 void VideoSink::Redraw(const VideoInfo& aInfo) {
    340  AUTO_PROFILER_LABEL("VideoSink::Redraw", MEDIA_PLAYBACK);
    341  AssertOwnerThread();
    342 
    343  // No video track, nothing to draw.
    344  if (!aInfo.IsValid() || !mContainer) {
    345    return;
    346  }
    347 
    348  auto now = TimeStamp::Now();
    349 
    350  RefPtr<VideoData> video = VideoQueue().PeekFront();
    351  if (video) {
    352    if (mBlankImage) {
    353      video->mImage = mBlankImage;
    354    }
    355    video->MarkSentToCompositor();
    356    mContainer->SetCurrentFrame(video->mDisplay, video->mImage, now,
    357                                media::TimeUnit::Invalid(), video->mTime);
    358    if (mSecondaryContainer) {
    359      mSecondaryContainer->SetCurrentFrame(video->mDisplay, video->mImage, now,
    360                                           media::TimeUnit::Invalid(),
    361                                           video->mTime);
    362    }
    363    return;
    364  }
    365 
    366  // When we reach here, it means there are no frames in this video track.
    367  // Draw a blank frame to ensure there is something in the image container
    368  // to fire 'loadeddata'.
    369 
    370  RefPtr<Image> blank =
    371      mContainer->GetImageContainer()->CreatePlanarYCbCrImage();
    372  mContainer->SetCurrentFrame(aInfo.mDisplay, blank, now,
    373                              media::TimeUnit::Invalid(),
    374                              media::TimeUnit::Invalid());
    375 
    376  if (mSecondaryContainer) {
    377    mSecondaryContainer->SetCurrentFrame(aInfo.mDisplay, blank, now,
    378                                         media::TimeUnit::Invalid(),
    379                                         media::TimeUnit::Invalid());
    380  }
    381 }
    382 
    383 void VideoSink::TryUpdateRenderedVideoFrames() {
    384  AUTO_PROFILER_LABEL("VideoSink::TryUpdateRenderedVideoFrames",
    385                      MEDIA_PLAYBACK);
    386  AssertOwnerThread();
    387  if (mUpdateScheduler.IsScheduled() || !mAudioSink->IsPlaying()) {
    388    return;
    389  }
    390  RefPtr<VideoData> v = VideoQueue().PeekFront();
    391  if (!v) {
    392    // No frames to render.
    393    return;
    394  }
    395 
    396  TimeStamp nowTime;
    397  const media::TimeUnit clockTime = mAudioSink->GetPosition(&nowTime);
    398  if (clockTime >= v->mTime) {
    399    // Time to render this frame.
    400    UpdateRenderedVideoFrames();
    401    return;
    402  }
    403 
    404  // If we send this future frame to the compositor now, it will be rendered
    405  // immediately and break A/V sync. Instead, we schedule a timer to send it
    406  // later.
    407  int64_t delta =
    408      (v->mTime - clockTime).ToMicroseconds() / mAudioSink->PlaybackRate();
    409  TimeStamp target = nowTime + TimeDuration::FromMicroseconds(delta);
    410  RefPtr<VideoSink> self = this;
    411  mUpdateScheduler.Ensure(
    412      target, [self]() { self->UpdateRenderedVideoFramesByTimer(); },
    413      [self]() { self->UpdateRenderedVideoFramesByTimer(); });
    414 }
    415 
    416 void VideoSink::UpdateRenderedVideoFramesByTimer() {
    417  AssertOwnerThread();
    418  mUpdateScheduler.CompleteRequest();
    419  UpdateRenderedVideoFrames();
    420 }
    421 
    422 void VideoSink::ConnectListener() {
    423  AssertOwnerThread();
    424  mPushListener = VideoQueue().PushEvent().Connect(
    425      mOwnerThread, this, &VideoSink::OnVideoQueuePushed);
    426  mFinishListener = VideoQueue().FinishEvent().Connect(
    427      mOwnerThread, this, &VideoSink::OnVideoQueueFinished);
    428 }
    429 
    430 void VideoSink::DisconnectListener() {
    431  AssertOwnerThread();
    432  mPushListener.Disconnect();
    433  mFinishListener.Disconnect();
    434 }
    435 
    436 void VideoSink::RenderVideoFrames(Span<const RefPtr<VideoData>> aFrames,
    437                                  int64_t aClockTime,
    438                                  const TimeStamp& aClockTimeStamp) {
    439  AUTO_PROFILER_LABEL("VideoSink::RenderVideoFrames", MEDIA_PLAYBACK);
    440  AssertOwnerThread();
    441 
    442  if (aFrames.IsEmpty() || !mContainer) {
    443    return;
    444  }
    445 
    446  PROFILER_MARKER("VideoSink::RenderVideoFrames", MEDIA_PLAYBACK, {},
    447                  VideoSinkRenderMarker, aClockTime);
    448 
    449  AutoTArray<ImageContainer::NonOwningImage, 16> images;
    450  TimeStamp lastFrameTime;
    451  double playbackRate = mAudioSink->PlaybackRate();
    452  for (uint32_t i = 0; i < aFrames.Length(); ++i) {
    453    VideoData* frame = aFrames[i];
    454    bool wasSent = frame->IsSentToCompositor();
    455    frame->MarkSentToCompositor();
    456 
    457    if (!frame->mImage || !frame->mImage->IsValid() ||
    458        !frame->mImage->GetSize().width || !frame->mImage->GetSize().height) {
    459      continue;
    460    }
    461 
    462    if (frame->mTime.IsNegative()) {
    463      // Frame times before the start time are invalid; drop such frames
    464      continue;
    465    }
    466 
    467    MOZ_ASSERT(!aClockTimeStamp.IsNull());
    468    int64_t delta = frame->mTime.ToMicroseconds() - aClockTime;
    469    TimeStamp t =
    470        aClockTimeStamp + TimeDuration::FromMicroseconds(delta / playbackRate);
    471    if (!lastFrameTime.IsNull() && t <= lastFrameTime) {
    472      // Timestamps out of order; drop the new frame. In theory we should
    473      // probably replace the previous frame with the new frame if the
    474      // timestamps are equal, but this is a corrupt video file already so
    475      // never mind.
    476      continue;
    477    }
    478    MOZ_ASSERT(!t.IsNull());
    479    lastFrameTime = t;
    480 
    481    ImageContainer::NonOwningImage* img = images.AppendElement();
    482    img->mTimeStamp = t;
    483    img->mImage = frame->mImage;
    484    if (mBlankImage) {
    485      img->mImage = mBlankImage;
    486    }
    487    img->mFrameID = frame->mFrameID;
    488    img->mProducerID = mProducerID;
    489    img->mMediaTime = frame->mTime;
    490 
    491    VSINK_LOG_V("playing video frame %" PRId64
    492                " (id=%x, vq-queued=%zu, clock=%" PRId64 ")",
    493                frame->mTime.ToMicroseconds(), frame->mFrameID,
    494                VideoQueue().GetSize(), aClockTime);
    495    if (!wasSent) {
    496      PROFILER_MARKER("PlayVideo", MEDIA_PLAYBACK, {}, MediaSampleMarker,
    497                      frame->mTime.ToMicroseconds(),
    498                      frame->GetEndTime().ToMicroseconds(),
    499                      VideoQueue().GetSize());
    500    }
    501  }
    502 
    503  if (images.Length() > 0) {
    504    mContainer->SetCurrentFrames(aFrames[0]->mDisplay, images);
    505 
    506    if (mSecondaryContainer) {
    507      mSecondaryContainer->SetCurrentFrames(aFrames[0]->mDisplay, images);
    508    }
    509  }
    510 }
    511 
    512 void VideoSink::UpdateRenderedVideoFrames() {
    513  AUTO_PROFILER_LABEL("VideoSink::UpdateRenderedVideoFrames", MEDIA_PLAYBACK);
    514  AssertOwnerThread();
    515  MOZ_ASSERT(mAudioSink->IsPlaying(), "should be called while playing.");
    516 
    517  // Get the current playback position.
    518  TimeStamp nowTime;
    519  const auto clockTime = mAudioSink->GetPosition(&nowTime);
    520  MOZ_ASSERT(!clockTime.IsNegative(), "Should have positive clock time.");
    521 
    522  uint32_t sentToCompositorCount = 0;
    523  uint32_t droppedInSink = 0;
    524 
    525  // Skip frames up to the playback position.
    526  // At least the last frame is retained, even when out of date, because it
    527  // will be used if no more frames are received before the queue finishes or
    528  // the video is paused.
    529  RefPtr<VideoData> lastExpiredFrameInCompositor;
    530  while (VideoQueue().GetSize() > 1 &&
    531         clockTime >= VideoQueue().PeekFront()->GetEndTime()) {
    532    RefPtr<VideoData> frame = VideoQueue().PopFront();
    533    if (frame->IsSentToCompositor()) {
    534      lastExpiredFrameInCompositor = frame;
    535      sentToCompositorCount++;
    536    } else {
    537      droppedInSink++;
    538      mDroppedInSinkSequenceDuration += frame->mDuration;
    539      VSINK_LOG_V("discarding video frame mTime=%" PRId64
    540                  " clock_time=%" PRId64,
    541                  frame->mTime.ToMicroseconds(), clockTime.ToMicroseconds());
    542 
    543      struct VideoSinkDroppedFrameMarker {
    544        static constexpr Span<const char> MarkerTypeName() {
    545          return MakeStringSpan("VideoSinkDroppedFrame");
    546        }
    547        static void StreamJSONMarkerData(
    548            baseprofiler::SpliceableJSONWriter& aWriter,
    549            int64_t aSampleStartTimeUs, int64_t aSampleEndTimeUs,
    550            int64_t aClockTimeUs) {
    551          aWriter.IntProperty("sampleStartTimeUs", aSampleStartTimeUs);
    552          aWriter.IntProperty("sampleEndTimeUs", aSampleEndTimeUs);
    553          aWriter.IntProperty("clockTimeUs", aClockTimeUs);
    554        }
    555        static MarkerSchema MarkerTypeDisplay() {
    556          using MS = MarkerSchema;
    557          MS schema{MS::Location::MarkerChart, MS::Location::MarkerTable};
    558          schema.AddKeyLabelFormat("sampleStartTimeUs", "Sample start time",
    559                                   MS::Format::Microseconds);
    560          schema.AddKeyLabelFormat("sampleEndTimeUs", "Sample end time",
    561                                   MS::Format::Microseconds);
    562          schema.AddKeyLabelFormat("clockTimeUs", "Audio clock time",
    563                                   MS::Format::Microseconds);
    564          return schema;
    565        }
    566      };
    567      profiler_add_marker(
    568          "VideoSinkDroppedFrame", geckoprofiler::category::MEDIA_PLAYBACK, {},
    569          VideoSinkDroppedFrameMarker{}, frame->mTime.ToMicroseconds(),
    570          frame->GetEndTime().ToMicroseconds(), clockTime.ToMicroseconds());
    571    }
    572  }
    573 
    574  if (droppedInSink || sentToCompositorCount) {
    575    uint32_t totalCompositorDroppedCount = mContainer->GetDroppedImageCount();
    576    uint32_t droppedInCompositor =
    577        totalCompositorDroppedCount - mOldCompositorDroppedCount;
    578    if (droppedInCompositor > 0) {
    579      mOldCompositorDroppedCount = totalCompositorDroppedCount;
    580      VSINK_LOG_V("%u video frame previously discarded by compositor",
    581                  droppedInCompositor);
    582    }
    583    mPendingDroppedCount += droppedInCompositor;
    584    uint32_t droppedReported = mPendingDroppedCount > sentToCompositorCount
    585                                   ? sentToCompositorCount
    586                                   : mPendingDroppedCount;
    587    mPendingDroppedCount -= droppedReported;
    588 
    589    mFrameStats.Accumulate({0, 0, sentToCompositorCount - droppedReported, 0,
    590                            droppedInSink, droppedInCompositor});
    591  }
    592 
    593  AutoTArray<RefPtr<VideoData>, 16> frames;
    594  RefPtr<VideoData> currentFrame = VideoQueue().PeekFront();
    595  if (currentFrame) {
    596    // The presentation end time of the last video frame consumed is the end
    597    // time of the current frame.
    598    mVideoFrameEndTime =
    599        std::max(mVideoFrameEndTime, currentFrame->GetEndTime());
    600 
    601    // Gecko doesn't support VideoPlaybackQuality.totalFrameDelay
    602    // (bug 962353), and so poor video quality from presenting frames late
    603    // would not be reported to content.  If frames are late, then throttle
    604    // the number of frames sent to the compositor, so that the
    605    // droppedVideoFrames are reported.  Perhaps the reduced number of frames
    606    // composited might free up some resources for decode.
    607    if (  // currentFrame is on time, or almost so, or
    608        currentFrame->GetEndTime() >= clockTime ||
    609        // there is only one frame in the VideoQueue() because the current
    610        // frame would have otherwise been removed above.  Send this frame if
    611        // it has already been sent to the compositor because it has not been
    612        // dropped and sending it again now, without any preceding frames, will
    613        // drop references to any preceding frames and update the intrinsic
    614        // size on the VideoFrameContainer.
    615        currentFrame->IsSentToCompositor() ||
    616        // Send this frame if its lateness is less than the duration that has
    617        // been skipped for throttling, or
    618        clockTime - currentFrame->GetEndTime() <
    619            mDroppedInSinkSequenceDuration ||
    620        // in a talos test for the compositor, which requires that the most
    621        // recently decoded frame is passed to the compositor so that the
    622        // compositor has something to composite during the talos test when the
    623        // decode is stressed.
    624        StaticPrefs::media_ruin_av_sync_enabled()) {
    625      mDroppedInSinkSequenceDuration = media::TimeUnit::Zero();
    626      VideoQueue().GetFirstElements(
    627          std::max(2u, mVideoQueueSendToCompositorSize), &frames);
    628    } else if (lastExpiredFrameInCompositor) {
    629      // Release references to all but the last frame passed to the
    630      // compositor.  Passing this frame to RenderVideoFrames() as the first
    631      // in frames also updates the intrinsic size on the VideoFrameContainer
    632      // to that of this frame.
    633      frames.AppendElement(lastExpiredFrameInCompositor);
    634    }
    635    RenderVideoFrames(Span(frames.Elements(),
    636                           std::min<size_t>(frames.Length(),
    637                                            mVideoQueueSendToCompositorSize)),
    638                      clockTime.ToMicroseconds(), nowTime);
    639  }
    640 
    641  MaybeResolveEndPromise();
    642 
    643  // Get the timestamp of the next frame. Schedule the next update at
    644  // the start time of the next frame. If we don't have a next frame,
    645  // we will run render loops again upon incoming frames.
    646  if (frames.Length() < 2) {
    647    return;
    648  }
    649 
    650  int64_t nextFrameTime = frames[1]->mTime.ToMicroseconds();
    651  int64_t delta = std::max(nextFrameTime - clockTime.ToMicroseconds(),
    652                           MIN_UPDATE_INTERVAL_US);
    653  TimeStamp target = nowTime + TimeDuration::FromMicroseconds(
    654                                   delta / mAudioSink->PlaybackRate());
    655 
    656  RefPtr<VideoSink> self = this;
    657  mUpdateScheduler.Ensure(
    658      target, [self]() { self->UpdateRenderedVideoFramesByTimer(); },
    659      [self]() { self->UpdateRenderedVideoFramesByTimer(); });
    660 }
    661 
    662 void VideoSink::MaybeResolveEndPromise() {
    663  AssertOwnerThread();
    664  // All frames are rendered, Let's resolve the promise.
    665  if (VideoQueue().IsFinished() && VideoQueue().GetSize() <= 1 &&
    666      !mVideoSinkEndRequest.Exists()) {
    667    TimeStamp nowTime;
    668    const auto clockTime = mAudioSink->GetPosition(&nowTime);
    669 
    670    if (VideoQueue().GetSize() == 1) {
    671      // The last frame is no longer required in the VideoQueue().
    672      RefPtr<VideoData> frame = VideoQueue().PopFront();
    673      // Ensure that the last frame and its dimensions have been set on the
    674      // VideoFrameContainer, even if the frame was decoded late.  This also
    675      // removes references to any other frames currently held by the
    676      // VideoFrameContainer.
    677      RenderVideoFrames(Span(&frame, 1), clockTime.ToMicroseconds(), nowTime);
    678      if (mPendingDroppedCount > 0) {
    679        mFrameStats.Accumulate({0, 0, 0, 0, 0, 1});
    680        mPendingDroppedCount--;
    681      } else {
    682        mFrameStats.NotifyPresentedFrame();
    683      }
    684    }
    685 
    686    if (clockTime < mVideoFrameEndTime) {
    687      VSINK_LOG_V(
    688          "Not reach video end time yet, reschedule timer to resolve "
    689          "end promise. clockTime=%" PRId64 ", endTime=%" PRId64,
    690          clockTime.ToMicroseconds(), mVideoFrameEndTime.ToMicroseconds());
    691      int64_t delta = (mVideoFrameEndTime - clockTime).ToMicroseconds() /
    692                      mAudioSink->PlaybackRate();
    693      TimeStamp target = nowTime + TimeDuration::FromMicroseconds(delta);
    694      auto resolveEndPromise = [self = RefPtr<VideoSink>(this)]() {
    695        self->mEndPromiseHolder.ResolveIfExists(true, __func__);
    696        self->mUpdateScheduler.CompleteRequest();
    697      };
    698      mUpdateScheduler.Ensure(target, std::move(resolveEndPromise),
    699                              std::move(resolveEndPromise));
    700    } else {
    701      mEndPromiseHolder.ResolveIfExists(true, __func__);
    702    }
    703  }
    704 }
    705 
    706 void VideoSink::SetSecondaryVideoContainer(VideoFrameContainer* aSecondary) {
    707  AssertOwnerThread();
    708  // Clear all images of secondary ImageContainer, when it is removed from
    709  // VideoSink.
    710  if (mSecondaryContainer && aSecondary != mSecondaryContainer) {
    711    ImageContainer* secondaryImageContainer =
    712        mSecondaryContainer->GetImageContainer();
    713    secondaryImageContainer->ClearImagesInHost(layers::ClearImagesType::All);
    714  }
    715  mSecondaryContainer = aSecondary;
    716  if (!IsPlaying() && mSecondaryContainer) {
    717    ImageContainer* mainImageContainer = mContainer->GetImageContainer();
    718    ImageContainer* secondaryImageContainer =
    719        mSecondaryContainer->GetImageContainer();
    720    MOZ_DIAGNOSTIC_ASSERT(mainImageContainer);
    721    MOZ_DIAGNOSTIC_ASSERT(secondaryImageContainer);
    722 
    723    // If the video isn't currently playing, get the current frame and display
    724    // that in the secondary container as well.
    725    AutoLockImage lockImage(mainImageContainer);
    726    TimeStamp now = TimeStamp::Now();
    727    if (const auto* owningImage = lockImage.GetOwningImage(now)) {
    728      AutoTArray<ImageContainer::NonOwningImage, 1> currentFrame;
    729      currentFrame.AppendElement(ImageContainer::NonOwningImage(
    730          owningImage->mImage, now, /* frameID */ 1,
    731          /* producerId */ ImageContainer::AllocateProducerID(),
    732          owningImage->mProcessingDuration, owningImage->mMediaTime,
    733          owningImage->mWebrtcCaptureTime, owningImage->mWebrtcReceiveTime,
    734          owningImage->mRtpTimestamp));
    735      secondaryImageContainer->SetCurrentImages(currentFrame);
    736    }
    737  }
    738 }
    739 
    740 void VideoSink::GetDebugInfo(dom::MediaSinkDebugInfo& aInfo) {
    741  AssertOwnerThread();
    742  aInfo.mVideoSink.mIsStarted = IsStarted();
    743  aInfo.mVideoSink.mIsPlaying = IsPlaying();
    744  aInfo.mVideoSink.mFinished = VideoQueue().IsFinished();
    745  aInfo.mVideoSink.mSize = VideoQueue().GetSize();
    746  aInfo.mVideoSink.mVideoFrameEndTime = mVideoFrameEndTime.ToMicroseconds();
    747  aInfo.mVideoSink.mHasVideo = mHasVideo;
    748  aInfo.mVideoSink.mVideoSinkEndRequestExists = mVideoSinkEndRequest.Exists();
    749  aInfo.mVideoSink.mEndPromiseHolderIsEmpty = mEndPromiseHolder.IsEmpty();
    750  mAudioSink->GetDebugInfo(aInfo);
    751 }
    752 
    753 bool VideoSink::InitializeBlankImage() {
    754  mBlankImage = mContainer->GetImageContainer()->CreatePlanarYCbCrImage();
    755  if (mBlankImage == nullptr) {
    756    return false;
    757  }
    758  SetImageToGreenPixel(mBlankImage->AsPlanarYCbCrImage());
    759  return true;
    760 }
    761 
    762 }  // namespace mozilla