tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

HTMLVideoElement.cpp (34096B)


      1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
      2 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
      3 /* This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #include "mozilla/dom/HTMLVideoElement.h"
      8 
      9 #include "mozilla/AppShutdown.h"
     10 #include "mozilla/AsyncEventDispatcher.h"
     11 #include "mozilla/dom/HTMLVideoElementBinding.h"
     12 #ifdef MOZ_WEBRTC
     13 #  include "mozilla/dom/RTCStatsReport.h"
     14 #endif
     15 #include <algorithm>
     16 #include <limits>
     17 
     18 #include "FrameStatistics.h"
     19 #include "ImageContainer.h"
     20 #include "MediaDecoder.h"
     21 #include "MediaDecoderStateMachine.h"
     22 #include "MediaError.h"
     23 #include "VideoFrameContainer.h"
     24 #include "VideoOutput.h"
     25 #include "mozilla/Preferences.h"
     26 #include "mozilla/StaticPrefs_media.h"
     27 #include "mozilla/dom/Performance.h"
     28 #include "mozilla/dom/TimeRanges.h"
     29 #include "mozilla/dom/VideoPlaybackQuality.h"
     30 #include "mozilla/dom/VideoStreamTrack.h"
     31 #include "mozilla/dom/WakeLock.h"
     32 #include "mozilla/dom/power/PowerManagerService.h"
     33 #include "mozilla/gfx/DataSurfaceHelpers.h"
     34 #include "nsError.h"
     35 #include "nsGenericHTMLElement.h"
     36 #include "nsGkAtoms.h"
     37 #include "nsIHttpChannel.h"
     38 #include "nsNodeInfoManager.h"
     39 #include "nsRFPService.h"
     40 #include "nsSize.h"
     41 #include "nsThreadUtils.h"
     42 #include "plbase64.h"
     43 #include "prlock.h"
     44 
     45 extern mozilla::LazyLogModule gMediaElementLog;
     46 #define LOG(msg, ...)                        \
     47  MOZ_LOG(gMediaElementLog, LogLevel::Debug, \
     48          ("HTMLVideoElement=%p, " msg, this, ##__VA_ARGS__))
     49 
     50 nsGenericHTMLElement* NS_NewHTMLVideoElement(
     51    already_AddRefed<mozilla::dom::NodeInfo>&& aNodeInfo,
     52    mozilla::dom::FromParser aFromParser) {
     53  RefPtr<mozilla::dom::NodeInfo> nodeInfo(aNodeInfo);
     54  auto* nim = nodeInfo->NodeInfoManager();
     55  mozilla::dom::HTMLVideoElement* element =
     56      new (nim) mozilla::dom::HTMLVideoElement(nodeInfo.forget());
     57  element->Init();
     58  return element;
     59 }
     60 
     61 namespace mozilla::dom {
     62 
     63 nsresult HTMLVideoElement::Clone(mozilla::dom::NodeInfo* aNodeInfo,
     64                                 nsINode** aResult) const {
     65  *aResult = nullptr;
     66  RefPtr<mozilla::dom::NodeInfo> ni(aNodeInfo);
     67  auto* nim = ni->NodeInfoManager();
     68  HTMLVideoElement* it = new (nim) HTMLVideoElement(ni.forget());
     69  it->Init();
     70  nsCOMPtr<nsINode> kungFuDeathGrip = it;
     71  nsresult rv = const_cast<HTMLVideoElement*>(this)->CopyInnerTo(it);
     72  if (NS_SUCCEEDED(rv)) {
     73    kungFuDeathGrip.swap(*aResult);
     74  }
     75  return rv;
     76 }
     77 
     78 nsresult HTMLVideoElement::CopyInnerTo(Element* aDest) {
     79  nsresult rv = HTMLMediaElement::CopyInnerTo(aDest);
     80  NS_ENSURE_SUCCESS(rv, rv);
     81  HTMLVideoElement* dest = static_cast<HTMLVideoElement*>(aDest);
     82 
     83  // Cloning into a static document indicates we are creating a clone for
     84  // printing purposes only.
     85  //
     86  // If we are making a clone for printing, also clone a frame of video.
     87  // Avoid using GetVideoFrameContainer on this object, because that may
     88  // create a new video container for no reason.
     89  if (aDest->OwnerDoc()->IsStaticDocument() && mVideoFrameContainer) {
     90    // We can expect the source video to have frames, unless the decoder
     91    // has been suspended. When that happens, all frames are cleared.
     92    // Otherwise, even if the media is not seekable, once a frame is
     93    // decoded there should always be images available.
     94    //
     95    // When the decoder is suspended, we will only get fake frames.
     96    // This situation is similar to when JS needs a frame to use in,
     97    // eg., nsLayoutUtils::SurfaceFromElement() via drawImage().
     98    //
     99    // TODO: As an alternative, we could asynchronously resume decoding
    100    // and dispatch an event to copy an image when that succeeds.
    101    //
    102    // See bug 1295921 for synchronous decoding support:
    103    // https://bugzilla.mozilla.org/show_bug.cgi?id=1295921#c208
    104    AutoTArray<ImageContainer::OwningImage, 10> images;
    105    mVideoFrameContainer->GetImageContainer()->GetCurrentImages(&images);
    106    if (images.IsEmpty()) {
    107      LOG("no video images, printing with a suspended video decoder?");
    108      return rv;
    109    }
    110 
    111    // Ask for the video frame container only after we know we have a current
    112    // image from the source.
    113    // GetVideoFrameContainer might create a new video frame container,
    114    // which would be pointless if we won't be able to create any video data.
    115    VideoFrameContainer* const dstVideo = dest->GetVideoFrameContainer();
    116    NS_ENSURE_TRUE(dstVideo, rv);
    117 
    118    // Make a copy of the first image. This ensures we don't hold onto any
    119    // output buffers from a decoder while print preview is open.
    120    RefPtr<gfx::DataSourceSurface> dstSurface(CopyImage(images[0].mImage));
    121    if (!dstSurface) {
    122      MOZ_LOG(gMediaElementLog, LogLevel::Error,
    123              ("failed to copy video image"));
    124      return rv;
    125    }
    126    RefPtr<layers::SourceSurfaceImage> dstImage =
    127        MakeAndAddRef<layers::SourceSurfaceImage>(dstSurface.get());
    128 
    129    dstVideo->SetCurrentFrame(dstImage->GetSize(), dstImage, TimeStamp(),
    130                              media::TimeUnit::Invalid(),
    131                              media::TimeUnit::Invalid());
    132  }
    133  return rv;
    134 }
    135 
    136 NS_IMPL_ISUPPORTS_CYCLE_COLLECTION_INHERITED_0(HTMLVideoElement,
    137                                               HTMLMediaElement)
    138 
    139 NS_IMPL_CYCLE_COLLECTION_CLASS(HTMLVideoElement)
    140 
    141 NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(HTMLVideoElement)
    142  NS_IMPL_CYCLE_COLLECTION_UNLINK(mVideoFrameRequestManager)
    143  NS_IMPL_CYCLE_COLLECTION_UNLINK(mVisualCloneTarget)
    144  NS_IMPL_CYCLE_COLLECTION_UNLINK(mVisualCloneTargetPromise)
    145  NS_IMPL_CYCLE_COLLECTION_UNLINK(mVisualCloneSource)
    146  tmp->mSecondaryVideoOutput = nullptr;
    147 NS_IMPL_CYCLE_COLLECTION_UNLINK_END_INHERITED(HTMLMediaElement)
    148 
    149 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN_INHERITED(HTMLVideoElement,
    150                                                  HTMLMediaElement)
    151  NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mVideoFrameRequestManager)
    152  NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mVisualCloneTarget)
    153  NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mVisualCloneTargetPromise)
    154  NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mVisualCloneSource)
    155 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
    156 
    157 HTMLVideoElement::HTMLVideoElement(already_AddRefed<NodeInfo>&& aNodeInfo)
    158    : HTMLMediaElement(std::move(aNodeInfo)),
    159      mVideoWatchManager(this, AbstractThread::MainThread()) {
    160  DecoderDoctorLogger::LogConstruction(this);
    161 }
    162 
    163 HTMLVideoElement::~HTMLVideoElement() {
    164  mVideoWatchManager.Shutdown();
    165  DecoderDoctorLogger::LogDestruction(this);
    166 }
    167 
    168 void HTMLVideoElement::UpdateMediaSize(const nsIntSize& aSize) {
    169  HTMLMediaElement::UpdateMediaSize(aSize);
    170  // If we have a clone target, we should update its size as well.
    171  if (mVisualCloneTarget) {
    172    Maybe<nsIntSize> newSize = Some(aSize);
    173    mVisualCloneTarget->Invalidate(ImageSizeChanged::Yes, newSize,
    174                                   ForceInvalidate::Yes);
    175  }
    176 }
    177 
    178 Maybe<CSSIntSize> HTMLVideoElement::GetVideoSize() const {
    179  if (!mMediaInfo.HasVideo()) {
    180    return Nothing();
    181  }
    182 
    183  if (mDisableVideo) {
    184    return Nothing();
    185  }
    186 
    187  CSSIntSize size;
    188  switch (mMediaInfo.mVideo.mRotation) {
    189    case VideoRotation::kDegree_90:
    190    case VideoRotation::kDegree_270: {
    191      size.width = mMediaInfo.mVideo.mDisplay.height;
    192      size.height = mMediaInfo.mVideo.mDisplay.width;
    193      break;
    194    }
    195    case VideoRotation::kDegree_0:
    196    case VideoRotation::kDegree_180:
    197    default: {
    198      size.height = mMediaInfo.mVideo.mDisplay.height;
    199      size.width = mMediaInfo.mVideo.mDisplay.width;
    200      break;
    201    }
    202  }
    203  return Some(size);
    204 }
    205 
    206 void HTMLVideoElement::Invalidate(ImageSizeChanged aImageSizeChanged,
    207                                  const Maybe<nsIntSize>& aNewIntrinsicSize,
    208                                  ForceInvalidate aForceInvalidate) {
    209  HTMLMediaElement::Invalidate(aImageSizeChanged, aNewIntrinsicSize,
    210                               aForceInvalidate);
    211  if (mVisualCloneTarget) {
    212    VideoFrameContainer* container =
    213        mVisualCloneTarget->GetVideoFrameContainer();
    214    if (container) {
    215      container->Invalidate();
    216    }
    217  }
    218 
    219  if (mVideoFrameRequestManager.IsEmpty()) {
    220    return;
    221  }
    222 
    223  if (RefPtr<ImageContainer> imageContainer = GetImageContainer()) {
    224    if (imageContainer->HasCurrentImage()) {
    225      OwnerDoc()->ScheduleVideoFrameCallbacks(this);
    226    }
    227  }
    228 }
    229 
    230 bool HTMLVideoElement::ParseAttribute(int32_t aNamespaceID, nsAtom* aAttribute,
    231                                      const nsAString& aValue,
    232                                      nsIPrincipal* aMaybeScriptedPrincipal,
    233                                      nsAttrValue& aResult) {
    234  if (aAttribute == nsGkAtoms::width || aAttribute == nsGkAtoms::height) {
    235    return aResult.ParseHTMLDimension(aValue);
    236  }
    237 
    238  return HTMLMediaElement::ParseAttribute(aNamespaceID, aAttribute, aValue,
    239                                          aMaybeScriptedPrincipal, aResult);
    240 }
    241 
    242 void HTMLVideoElement::MapAttributesIntoRule(
    243    MappedDeclarationsBuilder& aBuilder) {
    244  MapImageSizeAttributesInto(aBuilder, MapAspectRatio::Yes);
    245  MapCommonAttributesInto(aBuilder);
    246 }
    247 
    248 NS_IMETHODIMP_(bool)
    249 HTMLVideoElement::IsAttributeMapped(const nsAtom* aAttribute) const {
    250  static const MappedAttributeEntry attributes[] = {
    251      {nsGkAtoms::width}, {nsGkAtoms::height}, {nullptr}};
    252 
    253  static const MappedAttributeEntry* const map[] = {attributes,
    254                                                    sCommonAttributeMap};
    255 
    256  return FindAttributeDependence(aAttribute, map);
    257 }
    258 
    259 nsMapRuleToAttributesFunc HTMLVideoElement::GetAttributeMappingFunction()
    260    const {
    261  return &MapAttributesIntoRule;
    262 }
    263 
    264 void HTMLVideoElement::UnbindFromTree(UnbindContext& aContext) {
    265  if (mVisualCloneSource) {
    266    mVisualCloneSource->EndCloningVisually();
    267  } else if (mVisualCloneTarget) {
    268    AsyncEventDispatcher::RunDOMEventWhenSafe(
    269        *this, u"MozStopPictureInPicture"_ns, CanBubble::eNo,
    270        ChromeOnlyDispatch::eYes);
    271    EndCloningVisually();
    272  }
    273 
    274  HTMLMediaElement::UnbindFromTree(aContext);
    275 }
    276 
    277 nsresult HTMLVideoElement::SetAcceptHeader(nsIHttpChannel* aChannel) {
    278  nsAutoCString value(
    279      "video/webm,"
    280      "video/ogg,"
    281      "video/*;q=0.9,"
    282      "application/ogg;q=0.7,"
    283      "audio/*;q=0.6,*/*;q=0.5");
    284 
    285  return aChannel->SetRequestHeader("Accept"_ns, value, false);
    286 }
    287 
    288 bool HTMLVideoElement::IsInteractiveHTMLContent() const {
    289  return HasAttr(nsGkAtoms::controls) ||
    290         HTMLMediaElement::IsInteractiveHTMLContent();
    291 }
    292 
    293 gfx::IntSize HTMLVideoElement::GetVideoIntrinsicDimensions() {
    294  const auto& sz = mMediaInfo.mVideo.mDisplay;
    295 
    296  // Prefer the size of the container as it's more up to date.
    297  return ToMaybeRef(mVideoFrameContainer.get())
    298      .map([&](auto& aVFC) { return aVFC.CurrentIntrinsicSize().valueOr(sz); })
    299      .valueOr(sz);
    300 }
    301 
    302 uint32_t HTMLVideoElement::VideoWidth() {
    303  if (!HasVideo()) {
    304    return 0;
    305  }
    306  gfx::IntSize size = GetVideoIntrinsicDimensions();
    307  if (mMediaInfo.mVideo.mRotation == VideoRotation::kDegree_90 ||
    308      mMediaInfo.mVideo.mRotation == VideoRotation::kDegree_270) {
    309    return size.height;
    310  }
    311  return size.width;
    312 }
    313 
    314 uint32_t HTMLVideoElement::VideoHeight() {
    315  if (!HasVideo()) {
    316    return 0;
    317  }
    318  gfx::IntSize size = GetVideoIntrinsicDimensions();
    319  if (mMediaInfo.mVideo.mRotation == VideoRotation::kDegree_90 ||
    320      mMediaInfo.mVideo.mRotation == VideoRotation::kDegree_270) {
    321    return size.width;
    322  }
    323  return size.height;
    324 }
    325 
    326 uint32_t HTMLVideoElement::MozParsedFrames() const {
    327  MOZ_ASSERT(NS_IsMainThread(), "Should be on main thread.");
    328  if (!IsVideoStatsEnabled()) {
    329    return 0;
    330  }
    331 
    332  if (OwnerDoc()->ShouldResistFingerprinting(
    333          RFPTarget::VideoElementMozFrames)) {
    334    return nsRFPService::GetSpoofedTotalFrames(TotalPlayTime());
    335  }
    336 
    337  return mDecoder ? mDecoder->GetFrameStatistics().GetParsedFrames() : 0;
    338 }
    339 
    340 uint32_t HTMLVideoElement::MozDecodedFrames() const {
    341  MOZ_ASSERT(NS_IsMainThread(), "Should be on main thread.");
    342  if (!IsVideoStatsEnabled()) {
    343    return 0;
    344  }
    345 
    346  if (OwnerDoc()->ShouldResistFingerprinting(
    347          RFPTarget::VideoElementMozFrames)) {
    348    return nsRFPService::GetSpoofedTotalFrames(TotalPlayTime());
    349  }
    350 
    351  return mDecoder ? mDecoder->GetFrameStatistics().GetDecodedFrames() : 0;
    352 }
    353 
    354 uint32_t HTMLVideoElement::MozPresentedFrames() {
    355  MOZ_ASSERT(NS_IsMainThread(), "Should be on main thread.");
    356  if (!IsVideoStatsEnabled()) {
    357    return 0;
    358  }
    359 
    360  if (OwnerDoc()->ShouldResistFingerprinting(
    361          RFPTarget::VideoElementMozFrames)) {
    362    return nsRFPService::GetSpoofedPresentedFrames(TotalPlayTime(),
    363                                                   VideoWidth(), VideoHeight());
    364  }
    365 
    366  return mDecoder ? mDecoder->GetFrameStatistics().GetPresentedFrames() : 0;
    367 }
    368 
    369 uint32_t HTMLVideoElement::MozPaintedFrames() {
    370  MOZ_ASSERT(NS_IsMainThread(), "Should be on main thread.");
    371  if (!IsVideoStatsEnabled()) {
    372    return 0;
    373  }
    374 
    375  if (OwnerDoc()->ShouldResistFingerprinting(
    376          RFPTarget::VideoElementMozFrames)) {
    377    return nsRFPService::GetSpoofedPresentedFrames(TotalPlayTime(),
    378                                                   VideoWidth(), VideoHeight());
    379  }
    380 
    381  layers::ImageContainer* container = GetImageContainer();
    382  return container ? container->GetPaintCount() : 0;
    383 }
    384 
    385 double HTMLVideoElement::MozFrameDelay() {
    386  MOZ_ASSERT(NS_IsMainThread(), "Should be on main thread.");
    387 
    388  if (!IsVideoStatsEnabled() || OwnerDoc()->ShouldResistFingerprinting(
    389                                    RFPTarget::VideoElementMozFrameDelay)) {
    390    return 0.0;
    391  }
    392 
    393  VideoFrameContainer* container = GetVideoFrameContainer();
    394  // Hide negative delays. Frame timing tweaks in the compositor (e.g.
    395  // adding a bias value to prevent multiple dropped/duped frames when
    396  // frame times are aligned with composition times) may produce apparent
    397  // negative delay, but we shouldn't report that.
    398  return container ? std::max(0.0, container->GetFrameDelay()) : 0.0;
    399 }
    400 
    401 bool HTMLVideoElement::MozHasAudio() const {
    402  MOZ_ASSERT(NS_IsMainThread(), "Should be on main thread.");
    403  return HasAudio();
    404 }
    405 
    406 JSObject* HTMLVideoElement::WrapNode(JSContext* aCx,
    407                                     JS::Handle<JSObject*> aGivenProto) {
    408  return HTMLVideoElement_Binding::Wrap(aCx, this, aGivenProto);
    409 }
    410 
    411 already_AddRefed<VideoPlaybackQuality>
    412 HTMLVideoElement::GetVideoPlaybackQuality() {
    413  DOMHighResTimeStamp creationTime = 0;
    414  uint32_t totalFrames = 0;
    415  uint32_t droppedFrames = 0;
    416 
    417  if (IsVideoStatsEnabled()) {
    418    if (nsPIDOMWindowInner* window = OwnerDoc()->GetInnerWindow()) {
    419      Performance* perf = window->GetPerformance();
    420      if (perf) {
    421        creationTime = perf->Now();
    422      }
    423    }
    424 
    425    if (mDecoder) {
    426      if (OwnerDoc()->ShouldResistFingerprinting(
    427              RFPTarget::VideoElementPlaybackQuality)) {
    428        totalFrames = nsRFPService::GetSpoofedTotalFrames(TotalPlayTime());
    429        droppedFrames = nsRFPService::GetSpoofedDroppedFrames(
    430            TotalPlayTime(), VideoWidth(), VideoHeight());
    431      } else {
    432        FrameStatistics* stats = &mDecoder->GetFrameStatistics();
    433        if (sizeof(totalFrames) >= sizeof(stats->GetParsedFrames())) {
    434          totalFrames = stats->GetTotalFrames();
    435          droppedFrames = stats->GetDroppedFrames();
    436        } else {
    437          uint64_t total = stats->GetTotalFrames();
    438          const auto maxNumber = std::numeric_limits<uint32_t>::max();
    439          if (total <= maxNumber) {
    440            totalFrames = uint32_t(total);
    441            droppedFrames = uint32_t(stats->GetDroppedFrames());
    442          } else {
    443            // Too big number(s) -> Resize everything to fit in 32 bits.
    444            double ratio = double(maxNumber) / double(total);
    445            totalFrames = maxNumber;  // === total * ratio
    446            droppedFrames = uint32_t(double(stats->GetDroppedFrames()) * ratio);
    447          }
    448        }
    449      }
    450      if (!StaticPrefs::media_video_dropped_frame_stats_enabled()) {
    451        droppedFrames = 0;
    452      }
    453    }
    454  }
    455 
    456  RefPtr<VideoPlaybackQuality> playbackQuality =
    457      new VideoPlaybackQuality(this, creationTime, totalFrames, droppedFrames);
    458  return playbackQuality.forget();
    459 }
    460 
    461 void HTMLVideoElement::WakeLockRelease() {
    462  HTMLMediaElement::WakeLockRelease();
    463  ReleaseVideoWakeLockIfExists();
    464 }
    465 
    466 void HTMLVideoElement::UpdateWakeLock() {
    467  HTMLMediaElement::UpdateWakeLock();
    468  if (!mPaused) {
    469    CreateVideoWakeLockIfNeeded();
    470  } else {
    471    ReleaseVideoWakeLockIfExists();
    472  }
    473 }
    474 
    475 bool HTMLVideoElement::ShouldCreateVideoWakeLock() const {
    476  if (!StaticPrefs::media_video_wakelock()) {
    477    return false;
    478  }
    479  // Only request wake lock for video with audio or video from media
    480  // stream, because non-stream video without audio is often used as a
    481  // background image.
    482  //
    483  // Some web conferencing sites route audio outside the video element,
    484  // and would not be detected unless we check for media stream, so do
    485  // that below.
    486  //
    487  // Media streams generally aren't used as background images, though if
    488  // they were we'd get false positives. If this is an issue, we could
    489  // check for media stream AND document has audio playing (but that was
    490  // tricky to do).
    491  return HasVideo() && (mSrcStream || HasAudio());
    492 }
    493 
    494 void HTMLVideoElement::CreateVideoWakeLockIfNeeded() {
    495  if (AppShutdown::IsInOrBeyond(ShutdownPhase::AppShutdownConfirmed)) {
    496    return;
    497  }
    498  if (!mScreenWakeLock && ShouldCreateVideoWakeLock()) {
    499    RefPtr<power::PowerManagerService> pmService =
    500        power::PowerManagerService::GetInstance();
    501    NS_ENSURE_TRUE_VOID(pmService);
    502 
    503    ErrorResult rv;
    504    mScreenWakeLock = pmService->NewWakeLock(u"video-playing"_ns,
    505                                             OwnerDoc()->GetInnerWindow(), rv);
    506  }
    507 }
    508 
    509 void HTMLVideoElement::ReleaseVideoWakeLockIfExists() {
    510  if (mScreenWakeLock) {
    511    ErrorResult rv;
    512    mScreenWakeLock->Unlock(rv);
    513    rv.SuppressException();
    514    mScreenWakeLock = nullptr;
    515    return;
    516  }
    517 }
    518 
    519 bool HTMLVideoElement::SetVisualCloneTarget(
    520    RefPtr<HTMLVideoElement> aVisualCloneTarget,
    521    RefPtr<Promise> aVisualCloneTargetPromise) {
    522  MOZ_DIAGNOSTIC_ASSERT(
    523      !aVisualCloneTarget || aVisualCloneTarget->IsInComposedDoc(),
    524      "Can't set the clone target to a disconnected video "
    525      "element.");
    526  MOZ_DIAGNOSTIC_ASSERT(!mVisualCloneSource,
    527                        "Can't clone a video element that is already a clone.");
    528  if (!aVisualCloneTarget ||
    529      (aVisualCloneTarget->IsInComposedDoc() && !mVisualCloneSource)) {
    530    mVisualCloneTarget = std::move(aVisualCloneTarget);
    531    mVisualCloneTargetPromise = std::move(aVisualCloneTargetPromise);
    532    return true;
    533  }
    534  return false;
    535 }
    536 
    537 bool HTMLVideoElement::SetVisualCloneSource(
    538    RefPtr<HTMLVideoElement> aVisualCloneSource) {
    539  MOZ_DIAGNOSTIC_ASSERT(
    540      !aVisualCloneSource || aVisualCloneSource->IsInComposedDoc(),
    541      "Can't set the clone source to a disconnected video "
    542      "element.");
    543  MOZ_DIAGNOSTIC_ASSERT(!mVisualCloneTarget,
    544                        "Can't clone a video element that is already a "
    545                        "clone.");
    546  if (!aVisualCloneSource ||
    547      (aVisualCloneSource->IsInComposedDoc() && !mVisualCloneTarget)) {
    548    mVisualCloneSource = std::move(aVisualCloneSource);
    549    return true;
    550  }
    551  return false;
    552 }
    553 
    554 /* static */
    555 already_AddRefed<gfx::DataSourceSurface> HTMLVideoElement::CopyImage(
    556    layers::Image* aImage) {
    557  RefPtr<gfx::SourceSurface> surface = aImage->GetAsSourceSurface();
    558  if (!surface) {
    559    return nullptr;
    560  }
    561 
    562  RefPtr<gfx::DataSourceSurface> data = surface->GetDataSurface();
    563  if (!data) {
    564    return nullptr;
    565  }
    566 
    567  gfx::DataSourceSurface::ScopedMap read(data, gfx::DataSourceSurface::READ);
    568  if (!read.IsMapped()) {
    569    return nullptr;
    570  }
    571 
    572  RefPtr<gfx::DataSourceSurface> copy = gfx::CreateDataSourceSurfaceFromData(
    573      data->GetSize(), data->GetFormat(), read.GetData(), read.GetStride());
    574 
    575  MOZ_ASSERT_IF(copy, data->GetSize() == copy->GetSize());
    576  MOZ_ASSERT_IF(copy, data->GetFormat() == copy->GetFormat());
    577  return copy.forget();
    578 }
    579 
    580 /* static */
    581 bool HTMLVideoElement::IsVideoStatsEnabled() {
    582  return StaticPrefs::media_video_stats_enabled();
    583 }
    584 
    585 double HTMLVideoElement::TotalPlayTime() const {
    586  double total = 0.0;
    587 
    588  if (mPlayed) {
    589    uint32_t timeRangeCount = mPlayed->Length();
    590 
    591    for (uint32_t i = 0; i < timeRangeCount; i++) {
    592      double begin = mPlayed->Start(i);
    593      double end = mPlayed->End(i);
    594      total += end - begin;
    595    }
    596 
    597    if (mCurrentPlayRangeStart != -1.0) {
    598      double now = CurrentTime();
    599      if (mCurrentPlayRangeStart != now) {
    600        total += now - mCurrentPlayRangeStart;
    601      }
    602    }
    603  }
    604 
    605  return total;
    606 }
    607 
    608 already_AddRefed<Promise> HTMLVideoElement::CloneElementVisually(
    609    HTMLVideoElement& aTargetVideo, ErrorResult& aRv) {
    610  MOZ_ASSERT(IsInComposedDoc(),
    611             "Can't clone a video that's not bound to a DOM tree.");
    612  MOZ_ASSERT(aTargetVideo.IsInComposedDoc(),
    613             "Can't clone to a video that's not bound to a DOM tree.");
    614  if (!IsInComposedDoc() || !aTargetVideo.IsInComposedDoc()) {
    615    aRv.Throw(NS_ERROR_UNEXPECTED);
    616    return nullptr;
    617  }
    618 
    619  nsPIDOMWindowInner* win = OwnerDoc()->GetInnerWindow();
    620  if (!win) {
    621    aRv.Throw(NS_ERROR_UNEXPECTED);
    622    return nullptr;
    623  }
    624 
    625  RefPtr<Promise> promise = Promise::Create(win->AsGlobal(), aRv);
    626  if (aRv.Failed()) {
    627    return nullptr;
    628  }
    629 
    630  // Do we already have a visual clone target? If so, shut it down.
    631  if (mVisualCloneTarget) {
    632    EndCloningVisually();
    633  }
    634 
    635  // If there's a poster set on the target video, clear it, otherwise
    636  // it'll display over top of the cloned frames.
    637  aTargetVideo.UnsetHTMLAttr(nsGkAtoms::poster, aRv);
    638  if (aRv.Failed()) {
    639    return nullptr;
    640  }
    641 
    642  if (!SetVisualCloneTarget(&aTargetVideo, promise)) {
    643    aRv.Throw(NS_ERROR_FAILURE);
    644    return nullptr;
    645  }
    646 
    647  if (!aTargetVideo.SetVisualCloneSource(this)) {
    648    mVisualCloneTarget = nullptr;
    649    aRv.Throw(NS_ERROR_FAILURE);
    650    return nullptr;
    651  }
    652 
    653  aTargetVideo.SetMediaInfo(mMediaInfo);
    654 
    655  if (IsInComposedDoc() && !StaticPrefs::media_cloneElementVisually_testing()) {
    656    NotifyUAWidgetSetupOrChange();
    657  }
    658 
    659  MaybeBeginCloningVisually();
    660 
    661  return promise.forget();
    662 }
    663 
    664 void HTMLVideoElement::StopCloningElementVisually() {
    665  if (mVisualCloneTarget) {
    666    EndCloningVisually();
    667  }
    668 }
    669 
    670 void HTMLVideoElement::MaybeBeginCloningVisually() {
    671  if (!mVisualCloneTarget) {
    672    return;
    673  }
    674 
    675  if (mDecoder) {
    676    mDecoder->SetSecondaryVideoContainer(
    677        mVisualCloneTarget->GetVideoFrameContainer());
    678    NotifyDecoderActivityChanges();
    679    UpdateMediaControlAfterPictureInPictureModeChanged();
    680  } else if (mSrcStream) {
    681    VideoFrameContainer* container =
    682        mVisualCloneTarget->GetVideoFrameContainer();
    683    if (container) {
    684      mSecondaryVideoOutput = MakeRefPtr<FirstFrameVideoOutput>(
    685          container, AbstractThread::MainThread());
    686      mVideoWatchManager.Watch(
    687          mSecondaryVideoOutput->mFirstFrameRendered,
    688          &HTMLVideoElement::OnSecondaryVideoOutputFirstFrameRendered);
    689      SetSecondaryMediaStreamRenderer(container, mSecondaryVideoOutput);
    690    }
    691    UpdateMediaControlAfterPictureInPictureModeChanged();
    692  }
    693 }
    694 
    695 void HTMLVideoElement::EndCloningVisually() {
    696  MOZ_ASSERT(mVisualCloneTarget);
    697 
    698  if (mDecoder) {
    699    mDecoder->SetSecondaryVideoContainer(nullptr);
    700    NotifyDecoderActivityChanges();
    701  } else if (mSrcStream) {
    702    if (mSecondaryVideoOutput) {
    703      mVideoWatchManager.Unwatch(
    704          mSecondaryVideoOutput->mFirstFrameRendered,
    705          &HTMLVideoElement::OnSecondaryVideoOutputFirstFrameRendered);
    706      mSecondaryVideoOutput = nullptr;
    707    }
    708    SetSecondaryMediaStreamRenderer(nullptr);
    709  }
    710 
    711  (void)mVisualCloneTarget->SetVisualCloneSource(nullptr);
    712  (void)SetVisualCloneTarget(nullptr);
    713 
    714  UpdateMediaControlAfterPictureInPictureModeChanged();
    715 
    716  if (IsInComposedDoc() && !StaticPrefs::media_cloneElementVisually_testing()) {
    717    NotifyUAWidgetSetupOrChange();
    718  }
    719 }
    720 
    721 void HTMLVideoElement::OnSecondaryVideoContainerInstalled(
    722    const RefPtr<VideoFrameContainer>& aSecondaryContainer) {
    723  MOZ_ASSERT(NS_IsMainThread());
    724  MOZ_DIAGNOSTIC_ASSERT_IF(mVisualCloneTargetPromise, mVisualCloneTarget);
    725  if (!mVisualCloneTargetPromise) {
    726    // Clone target was unset.
    727    return;
    728  }
    729 
    730  VideoFrameContainer* container = mVisualCloneTarget->GetVideoFrameContainer();
    731  if (NS_WARN_IF(container != aSecondaryContainer)) {
    732    // Not the right container.
    733    return;
    734  }
    735 
    736  NS_DispatchToCurrentThread(NewRunnableMethod(
    737      "Promise::MaybeResolveWithUndefined", mVisualCloneTargetPromise,
    738      &Promise::MaybeResolveWithUndefined));
    739  mVisualCloneTargetPromise = nullptr;
    740 }
    741 
    742 void HTMLVideoElement::OnSecondaryVideoOutputFirstFrameRendered() {
    743  OnSecondaryVideoContainerInstalled(
    744      mVisualCloneTarget->GetVideoFrameContainer());
    745 }
    746 
    747 void HTMLVideoElement::OnVisibilityChange(Visibility aNewVisibility) {
    748  HTMLMediaElement::OnVisibilityChange(aNewVisibility);
    749 
    750  // See the alternative part after step 4, but we only pause/resume invisible
    751  // autoplay for non-audible video, which is different from the spec. This
    752  // behavior seems aiming to reduce the power consumption without interering
    753  // users, and Chrome and Safari also chose to do that only for non-audible
    754  // video, so we want to match them in order to reduce webcompat issue.
    755  // https://html.spec.whatwg.org/multipage/media.html#ready-states:eligible-for-autoplay-2
    756  if (!HasAttr(nsGkAtoms::autoplay) || IsAudible()) {
    757    return;
    758  }
    759 
    760  if (aNewVisibility == Visibility::ApproximatelyVisible && mPaused &&
    761      IsEligibleForAutoplay() && AllowedToPlay()) {
    762    LOG("resume invisible paused autoplay video");
    763    RunAutoplay();
    764  }
    765 
    766  // We need to consider the Pip window as well, which won't reflect in the
    767  // visibility event.
    768  if ((aNewVisibility == Visibility::ApproximatelyNonVisible &&
    769       !IsCloningElementVisually()) &&
    770      mCanAutoplayFlag) {
    771    LOG("pause non-audible autoplay video when it's invisible");
    772    PauseInternal();
    773    mCanAutoplayFlag = true;
    774    return;
    775  }
    776 }
    777 
    778 void HTMLVideoElement::ResetState() {
    779  HTMLMediaElement::ResetState();
    780  mLastPresentedFrameID = layers::kContainerFrameID_Invalid;
    781 }
    782 
    783 bool HTMLVideoElement::WillFireVideoFrameCallbacks(
    784    const TimeStamp& aNowTime, const Maybe<TimeStamp>& aNextTickTime,
    785    VideoFrameCallbackMetadata& aMd) {
    786  // Attempt to find the next image to be presented on this tick. Note that
    787  // composited will be accurate only if the element is visible.
    788  AutoTArray<ImageContainer::OwningImage, 4> images;
    789  if (RefPtr<layers::ImageContainer> container = GetImageContainer()) {
    790    container->GetCurrentImages(&images);
    791  }
    792 
    793  // If we did not find any current images, we must have fired too early, or we
    794  // are in the process of shutting down. Wait for the next invalidation.
    795  if (images.IsEmpty()) {
    796    return false;
    797  }
    798 
    799  // We are guaranteed that the images are in timestamp order. It is possible we
    800  // are already behind if the compositor notifications have not been processed
    801  // yet, so as per the standard, this is a best effort attempt at synchronizing
    802  // with the state of the GPU process.
    803  const ImageContainer::OwningImage* selected = nullptr;
    804  bool composited = false;
    805  for (const auto& image : images) {
    806    if (image.mTimeStamp <= aNowTime) {
    807      // Image should already have been composited. Because we might not be in
    808      // the display list, we cannot rely upon its mComposited status, and
    809      // should just assume it has indeed been composited.
    810      selected = &image;
    811      composited = true;
    812    } else if (!aNextTickTime || image.mTimeStamp <= aNextTickTime.ref()) {
    813      // Image should be the next to be composited. mComposited will be false
    814      // if the compositor hasn't rendered the frame yet or notified us of the
    815      // render yet, but it is in progress. If it is true, then we know the
    816      // next vsync will display the frame.
    817      selected = &image;
    818      composited = false;
    819    } else {
    820      // Image is for a future composition.
    821      break;
    822    }
    823  }
    824 
    825  // If all of the available images are for future compositions, we must have
    826  // fired too early. Wait for the next invalidation.
    827  if (!selected || selected->mFrameID == layers::kContainerFrameID_Invalid ||
    828      selected->mFrameID == mLastPresentedFrameID) {
    829    return false;
    830  }
    831 
    832  // If we have got a dummy frame, then we must have suspended decoding and have
    833  // no actual frame to present. This should only happen if we raced on
    834  // requesting a callback, and the media state machine advancing.
    835  gfx::IntSize frameSize = selected->mImage->GetSize();
    836  if (NS_WARN_IF(frameSize.IsEmpty())) {
    837    return false;
    838  }
    839 
    840  // If we have already displayed the expected frame, we need to make the
    841  // display time match the presentation time to indicate it is already
    842  // complete.
    843  if (composited) {
    844    aMd.mExpectedDisplayTime = aMd.mPresentationTime;
    845  }
    846 
    847  MOZ_ASSERT(!frameSize.IsEmpty());
    848 
    849  aMd.mWidth = frameSize.width;
    850  aMd.mHeight = frameSize.height;
    851 
    852  // If we were not provided a valid media time, then we need to estimate based
    853  // on the CurrentTime from the element.
    854  aMd.mMediaTime = selected->mMediaTime.IsValid()
    855                       ? selected->mMediaTime.ToSeconds()
    856                       : CurrentTime();
    857 
    858  // If we have a processing duration, we need to round it.
    859  //
    860  // https://wicg.github.io/video-rvfc/#security-and-privacy
    861  //
    862  // 5. Security and Privacy Considerations.
    863  // ... processingDuration exposes some under-the-hood performance information
    864  // about the video pipeline ... We therefore propose a resolution of 100μs,
    865  // which is still useful for automated quality analysis, but doesn’t offer any
    866  // new sources of high resolution information.
    867  if (selected->mProcessingDuration.IsValid()) {
    868    aMd.mProcessingDuration.Construct(
    869        selected->mProcessingDuration.ToBase(10000).ToSeconds());
    870  }
    871 
    872 #ifdef MOZ_WEBRTC
    873  // If given, this is the RTP timestamp from the last packet for the frame.
    874  if (selected->mRtpTimestamp) {
    875    aMd.mRtpTimestamp.Construct(*selected->mRtpTimestamp);
    876  }
    877 
    878  // For remote sources, the capture and receive time are represented as WebRTC
    879  // timestamps relative to an origin that is specific to the WebRTC session.
    880  const bool hasCaptureTimeNtp = selected->mWebrtcCaptureTime.is<int64_t>();
    881  const bool hasReceiveTimeReal = selected->mWebrtcReceiveTime.isSome();
    882  const auto* tsMaker = mSelectedVideoStreamTrack
    883                            ? mSelectedVideoStreamTrack->GetTimestampMaker()
    884                            : nullptr;
    885  auto* win = OwnerDoc()->GetInnerWindow();
    886  auto* perf = win ? win->GetPerformance() : nullptr;
    887 
    888  // WebRTC timestamps are converted to document-local high resolution
    889  // timestamps to have a shared time base (see bug 1937776)
    890  if (tsMaker && perf) {
    891    if (hasCaptureTimeNtp) {
    892      const int64_t capt64 = selected->mWebrtcCaptureTime.as<int64_t>();
    893      webrtc::Timestamp captTs = webrtc::Timestamp::Millis(capt64);
    894      aMd.mCaptureTime.Construct(
    895          RTCStatsTimestamp::FromNtp(*tsMaker, captTs).ToDom() -
    896          perf->TimeOrigin());
    897    }
    898 
    899    if (hasReceiveTimeReal) {
    900      const int64_t recvUs = selected->mWebrtcReceiveTime.value();
    901      webrtc::Timestamp recvTs = webrtc::Timestamp::Micros(recvUs);
    902      aMd.mReceiveTime.Construct(
    903          RTCStatsTimestamp::FromRealtime(*tsMaker, recvTs).ToDom() -
    904          perf->TimeOrigin());
    905    }
    906  }
    907 
    908  // The capture time may already be a high resolution timestamp from the
    909  // camera pipeline indicating when the sample was captured.
    910  if (perf && selected->mWebrtcCaptureTime.is<TimeStamp>()) {
    911    aMd.mCaptureTime.Construct(perf->TimeStampToDOMHighResForRendering(
    912        selected->mWebrtcCaptureTime.as<TimeStamp>()));
    913  }
    914 #endif
    915 
    916  // Note that if we seek, or restart a video, we may present an earlier frame
    917  // that we already presented with the same ID. This would cause presented
    918  // frames to go backwards when it must be monotonically increasing. Presented
    919  // frames cannot simply increment by 1 each request callback because it is
    920  // also used by the caller to determine if frames were missed. As such, we
    921  // will typically use the difference between the current frame and the last
    922  // presented via the callback, but otherwise assume a single frame due to the
    923  // seek.
    924  mPresentedFrames +=
    925      selected->mFrameID > 1 && selected->mFrameID > mLastPresentedFrameID
    926          ? selected->mFrameID - mLastPresentedFrameID
    927          : 1;
    928  mLastPresentedFrameID = selected->mFrameID;
    929 
    930  // Presented frames is a bit of a misnomer from a rendering perspective,
    931  // because we still need to advance regardless of composition. Video elements
    932  // that are outside of the DOM, or are not visible, still advance the video in
    933  // the background, and presumably the caller still needs some way to know how
    934  // many frames we have advanced.
    935  aMd.mPresentedFrames = mPresentedFrames;
    936 
    937  NS_DispatchToMainThread(NewRunnableMethod(
    938      "HTMLVideoElement::FinishedVideoFrameRequestCallbacks", this,
    939      &HTMLVideoElement::FinishedVideoFrameRequestCallbacks));
    940 
    941  return true;
    942 }
    943 
    944 void HTMLVideoElement::FinishedVideoFrameRequestCallbacks() {
    945  // After we have executed the rVFC and rAF callbacks, we need to check whether
    946  // or not we have scheduled more. If we did not, then we need to notify the
    947  // decoder, because it may be the only thing keeping the decoder fully active.
    948  if (!HasPendingCallbacks()) {
    949    NotifyDecoderActivityChanges();
    950  }
    951 }
    952 
    953 uint32_t HTMLVideoElement::RequestVideoFrameCallback(
    954    VideoFrameRequestCallback& aCallback, ErrorResult& aRv) {
    955  bool hasPending = HasPendingCallbacks();
    956  uint32_t handle = 0;
    957  aRv = mVideoFrameRequestManager.Schedule(aCallback, &handle);
    958  if (!hasPending && HasPendingCallbacks()) {
    959    NotifyDecoderActivityChanges();
    960  }
    961  return handle;
    962 }
    963 
    964 void HTMLVideoElement::CancelVideoFrameCallback(uint32_t aHandle) {
    965  if (mVideoFrameRequestManager.Cancel(aHandle) && !HasPendingCallbacks()) {
    966    NotifyDecoderActivityChanges();
    967  }
    968 }
    969 
    970 }  // namespace mozilla::dom
    971 
    972 #undef LOG