tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

VideoFrame.cpp (103775B)


      1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
      2 /* vim:set ts=2 sw=2 sts=2 et cindent: */
      3 /* This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #include "mozilla/dom/VideoFrame.h"
      8 
      9 #include <math.h>
     10 
     11 #include <limits>
     12 #include <utility>
     13 
     14 #include "ImageContainer.h"
     15 #include "ImageConversion.h"
     16 #include "MediaResult.h"
     17 #include "VideoColorSpace.h"
     18 #include "js/StructuredClone.h"
     19 #include "mozilla/Maybe.h"
     20 #include "mozilla/ResultVariant.h"
     21 #include "mozilla/StaticPrefs_dom.h"
     22 #include "mozilla/Try.h"
     23 #include "mozilla/UniquePtr.h"
     24 #include "mozilla/dom/BufferSourceBinding.h"
     25 #include "mozilla/dom/CanvasUtils.h"
     26 #include "mozilla/dom/DOMRect.h"
     27 #include "mozilla/dom/HTMLCanvasElement.h"
     28 #include "mozilla/dom/HTMLImageElement.h"
     29 #include "mozilla/dom/HTMLVideoElement.h"
     30 #include "mozilla/dom/ImageBitmap.h"
     31 #include "mozilla/dom/ImageUtils.h"
     32 #include "mozilla/dom/OffscreenCanvas.h"
     33 #include "mozilla/dom/Promise.h"
     34 #include "mozilla/dom/SVGImageElement.h"
     35 #include "mozilla/dom/StructuredCloneHolder.h"
     36 #include "mozilla/dom/StructuredCloneTags.h"
     37 #include "mozilla/dom/UnionTypes.h"
     38 #include "mozilla/dom/VideoFrameBinding.h"
     39 #include "mozilla/gfx/2D.h"
     40 #include "mozilla/gfx/Swizzle.h"
     41 #include "mozilla/layers/LayersSurfaces.h"
     42 #include "mozilla/webgpu/ExternalTexture.h"
     43 #include "nsIPrincipal.h"
     44 #include "nsIURI.h"
     45 #include "nsLayoutUtils.h"
     46 
     47 extern mozilla::LazyLogModule gWebCodecsLog;
     48 
     49 namespace mozilla::dom {
     50 
     51 #ifdef LOG_INTERNAL
     52 #  undef LOG_INTERNAL
     53 #endif  // LOG_INTERNAL
     54 #define LOG_INTERNAL(level, msg, ...) \
     55  MOZ_LOG(gWebCodecsLog, LogLevel::level, (msg, ##__VA_ARGS__))
     56 
     57 #ifdef LOG
     58 #  undef LOG
     59 #endif  // LOG
     60 #define LOG(msg, ...) LOG_INTERNAL(Debug, msg, ##__VA_ARGS__)
     61 
     62 #ifdef LOGW
     63 #  undef LOGW
     64 #endif  // LOGW
     65 #define LOGW(msg, ...) LOG_INTERNAL(Warning, msg, ##__VA_ARGS__)
     66 
     67 #ifdef LOGE
     68 #  undef LOGE
     69 #endif  // LOGE
     70 #define LOGE(msg, ...) LOG_INTERNAL(Error, msg, ##__VA_ARGS__)
     71 
     72 NS_IMPL_CYCLE_COLLECTION_WRAPPERCACHE_CLASS(VideoFrame)
     73 NS_IMPL_CYCLE_COLLECTION_UNLINK_BEGIN(VideoFrame)
     74  tmp->CloseIfNeeded();
     75  NS_IMPL_CYCLE_COLLECTION_UNLINK(mParent)
     76  NS_IMPL_CYCLE_COLLECTION_UNLINK_PRESERVED_WRAPPER
     77 NS_IMPL_CYCLE_COLLECTION_UNLINK_END
     78 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_BEGIN(VideoFrame)
     79  NS_IMPL_CYCLE_COLLECTION_TRAVERSE(mParent)
     80 NS_IMPL_CYCLE_COLLECTION_TRAVERSE_END
     81 
     82 NS_IMPL_CYCLE_COLLECTING_ADDREF(VideoFrame)
     83 // VideoFrame should be released as soon as its refcount drops to zero,
     84 // without waiting for async deletion by the cycle collector, since it may hold
     85 // a large-size image.
     86 NS_IMPL_CYCLE_COLLECTING_RELEASE_WITH_LAST_RELEASE(VideoFrame, CloseIfNeeded())
     87 NS_INTERFACE_MAP_BEGIN_CYCLE_COLLECTION(VideoFrame)
     88  NS_WRAPPERCACHE_INTERFACE_MAP_ENTRY
     89  NS_INTERFACE_MAP_ENTRY(nsISupports)
     90 NS_INTERFACE_MAP_END
     91 
     92 /*
     93 * The following are helpers to read the image data from the given buffer and
     94 * the format. The data layout is illustrated in the comments for
     95 * `VideoFrame::Format` below.
     96 */
     97 
     98 static int32_t CeilingOfHalf(int32_t aValue) {
     99  MOZ_ASSERT(aValue >= 0);
    100  return aValue / 2 + (aValue % 2);
    101 }
    102 
    103 class YUVBufferReaderBase {
    104 public:
    105  YUVBufferReaderBase(const Span<uint8_t>& aBuffer, int32_t aWidth,
    106                      int32_t aHeight)
    107      : mWidth(aWidth), mHeight(aHeight), mStrideY(aWidth), mBuffer(aBuffer) {}
    108  virtual ~YUVBufferReaderBase() = default;
    109 
    110  const uint8_t* DataY() const { return mBuffer.data(); }
    111  const int32_t mWidth;
    112  const int32_t mHeight;
    113  const int32_t mStrideY;
    114 
    115 protected:
    116  CheckedInt<size_t> YByteSize() const {
    117    return CheckedInt<size_t>(mStrideY) * mHeight;
    118  }
    119 
    120  const Span<uint8_t> mBuffer;
    121 };
    122 
    123 class I420ABufferReader;
    124 class I420BufferReader : public YUVBufferReaderBase {
    125 public:
    126  I420BufferReader(const Span<uint8_t>& aBuffer, int32_t aWidth,
    127                   int32_t aHeight)
    128      : YUVBufferReaderBase(aBuffer, aWidth, aHeight),
    129        mStrideU(CeilingOfHalf(aWidth)),
    130        mStrideV(CeilingOfHalf(aWidth)) {}
    131  virtual ~I420BufferReader() = default;
    132 
    133  const uint8_t* DataU() const { return &mBuffer[YByteSize().value()]; }
    134  const uint8_t* DataV() const {
    135    return &mBuffer[YByteSize().value() + UByteSize().value()];
    136  }
    137  virtual I420ABufferReader* AsI420ABufferReader() { return nullptr; }
    138 
    139  const int32_t mStrideU;
    140  const int32_t mStrideV;
    141 
    142 protected:
    143  CheckedInt<size_t> UByteSize() const {
    144    return CheckedInt<size_t>(CeilingOfHalf(mHeight)) * mStrideU;
    145  }
    146 
    147  CheckedInt<size_t> VSize() const {
    148    return CheckedInt<size_t>(CeilingOfHalf(mHeight)) * mStrideV;
    149  }
    150 };
    151 
    152 class I420ABufferReader final : public I420BufferReader {
    153 public:
    154  I420ABufferReader(const Span<uint8_t>& aBuffer, int32_t aWidth,
    155                    int32_t aHeight)
    156      : I420BufferReader(aBuffer, aWidth, aHeight), mStrideA(aWidth) {
    157    MOZ_ASSERT(mStrideA == mStrideY);
    158  }
    159  virtual ~I420ABufferReader() = default;
    160 
    161  const uint8_t* DataA() const {
    162    return &mBuffer[YByteSize().value() + UByteSize().value() +
    163                    VSize().value()];
    164  }
    165 
    166  virtual I420ABufferReader* AsI420ABufferReader() override { return this; }
    167 
    168  const int32_t mStrideA;
    169 };
    170 
    171 class NV12BufferReader final : public YUVBufferReaderBase {
    172 public:
    173  NV12BufferReader(const Span<uint8_t>& aBuffer, int32_t aWidth,
    174                   int32_t aHeight)
    175      : YUVBufferReaderBase(aBuffer, aWidth, aHeight),
    176        mStrideUV(aWidth + aWidth % 2) {}
    177  virtual ~NV12BufferReader() = default;
    178 
    179  const uint8_t* DataUV() const { return &mBuffer[YByteSize().value()]; }
    180 
    181  const int32_t mStrideUV;
    182 };
    183 
    184 /*
    185 * The followings are helpers to create a VideoFrame from a given buffer
    186 */
    187 
    188 static Result<RefPtr<gfx::DataSourceSurface>, MediaResult> AllocateBGRASurface(
    189    gfx::DataSourceSurface* aSurface) {
    190  MOZ_ASSERT(aSurface);
    191 
    192  // Memory allocation relies on CreateDataSourceSurfaceWithStride so we still
    193  // need to do this even if the format is SurfaceFormat::BGR{A, X}.
    194 
    195  gfx::DataSourceSurface::ScopedMap surfaceMap(aSurface,
    196                                               gfx::DataSourceSurface::READ);
    197  if (!surfaceMap.IsMapped()) {
    198    return Err(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
    199                           "The source surface is not readable"_ns));
    200  }
    201 
    202  RefPtr<gfx::DataSourceSurface> bgraSurface =
    203      gfx::Factory::CreateDataSourceSurfaceWithStride(
    204          aSurface->GetSize(), gfx::SurfaceFormat::B8G8R8A8,
    205          surfaceMap.GetStride());
    206  if (!bgraSurface) {
    207    return Err(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
    208                           "Failed to allocate a BGRA surface"_ns));
    209  }
    210 
    211  gfx::DataSourceSurface::ScopedMap bgraMap(bgraSurface,
    212                                            gfx::DataSourceSurface::WRITE);
    213  if (!bgraMap.IsMapped()) {
    214    return Err(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
    215                           "The allocated BGRA surface is not writable"_ns));
    216  }
    217 
    218  gfx::SwizzleData(surfaceMap.GetData(), surfaceMap.GetStride(),
    219                   aSurface->GetFormat(), bgraMap.GetData(),
    220                   bgraMap.GetStride(), bgraSurface->GetFormat(),
    221                   bgraSurface->GetSize());
    222 
    223  return bgraSurface;
    224 }
    225 
    226 static Result<RefPtr<layers::Image>, MediaResult> CreateImageFromSourceSurface(
    227    gfx::SourceSurface* aSource) {
    228  MOZ_ASSERT(aSource);
    229 
    230  if (aSource->GetSize().IsEmpty()) {
    231    return Err(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
    232                           "Surface has non positive width or height"_ns));
    233  }
    234 
    235  RefPtr<gfx::DataSourceSurface> surface = aSource->GetDataSurface();
    236  if (!surface) {
    237    return Err(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
    238                           "Failed to get the data surface"_ns));
    239  }
    240 
    241  // Gecko favors BGRA so we convert surface into BGRA format first.
    242  RefPtr<gfx::DataSourceSurface> bgraSurface =
    243      MOZ_TRY(AllocateBGRASurface(surface));
    244 
    245  return RefPtr<layers::Image>(
    246      new layers::SourceSurfaceImage(bgraSurface.get()));
    247 }
    248 
    249 static Result<RefPtr<layers::Image>, MediaResult> CreateImageFromRawData(
    250    const gfx::IntSize& aSize, int32_t aStride, gfx::SurfaceFormat aFormat,
    251    const Span<uint8_t>& aBuffer) {
    252  MOZ_ASSERT(!aSize.IsEmpty());
    253 
    254  // Wrap the source buffer into a DataSourceSurface.
    255  RefPtr<gfx::DataSourceSurface> surface =
    256      gfx::Factory::CreateWrappingDataSourceSurface(aBuffer.data(), aStride,
    257                                                    aSize, aFormat);
    258  if (!surface) {
    259    return Err(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
    260                           "Failed to wrap the raw data into a surface"_ns));
    261  }
    262 
    263  // Gecko favors BGRA so we convert surface into BGRA format first.
    264  RefPtr<gfx::DataSourceSurface> bgraSurface =
    265      MOZ_TRY(AllocateBGRASurface(surface));
    266  MOZ_ASSERT(bgraSurface);
    267 
    268  return RefPtr<layers::Image>(
    269      new layers::SourceSurfaceImage(bgraSurface.get()));
    270 }
    271 
    272 static Result<RefPtr<layers::Image>, MediaResult> CreateRGBAImageFromBuffer(
    273    const VideoFrame::Format& aFormat, const gfx::IntSize& aSize,
    274    const Span<uint8_t>& aBuffer) {
    275  const gfx::SurfaceFormat format = aFormat.ToSurfaceFormat();
    276  MOZ_ASSERT(format == gfx::SurfaceFormat::R8G8B8A8 ||
    277             format == gfx::SurfaceFormat::R8G8B8X8 ||
    278             format == gfx::SurfaceFormat::B8G8R8A8 ||
    279             format == gfx::SurfaceFormat::B8G8R8X8);
    280  // TODO: Use aFormat.SampleBytes() instead?
    281  CheckedInt<int32_t> stride(BytesPerPixel(format));
    282  stride *= aSize.Width();
    283  if (!stride.isValid()) {
    284    return Err(MediaResult(NS_ERROR_INVALID_ARG,
    285                           "Image size exceeds implementation's limit"_ns));
    286  }
    287  return CreateImageFromRawData(aSize, stride.value(), format, aBuffer);
    288 }
    289 
    290 static Result<RefPtr<layers::Image>, MediaResult> CreateYUVImageFromBuffer(
    291    const VideoFrame::Format& aFormat,
    292    const VideoColorSpaceInternal& aColorSpace, const gfx::IntSize& aSize,
    293    const Span<uint8_t>& aBuffer) {
    294  if (aFormat.PixelFormat() == VideoPixelFormat::I420 ||
    295      aFormat.PixelFormat() == VideoPixelFormat::I420A) {
    296    UniquePtr<I420BufferReader> reader;
    297    if (aFormat.PixelFormat() == VideoPixelFormat::I420) {
    298      reader.reset(
    299          new I420BufferReader(aBuffer, aSize.Width(), aSize.Height()));
    300    } else {
    301      reader.reset(
    302          new I420ABufferReader(aBuffer, aSize.Width(), aSize.Height()));
    303    }
    304 
    305    layers::PlanarYCbCrData data;
    306    data.mPictureRect = gfx::IntRect(0, 0, reader->mWidth, reader->mHeight);
    307 
    308    // Y plane.
    309    data.mYChannel = const_cast<uint8_t*>(reader->DataY());
    310    data.mYStride = reader->mStrideY;
    311    data.mYSkip = 0;
    312    // Cb plane.
    313    data.mCbChannel = const_cast<uint8_t*>(reader->DataU());
    314    data.mCbSkip = 0;
    315    // Cr plane.
    316    data.mCrChannel = const_cast<uint8_t*>(reader->DataV());
    317    data.mCbSkip = 0;
    318    // A plane.
    319    if (aFormat.PixelFormat() == VideoPixelFormat::I420A) {
    320      data.mAlpha.emplace();
    321      data.mAlpha->mChannel =
    322          const_cast<uint8_t*>(reader->AsI420ABufferReader()->DataA());
    323      data.mAlpha->mSize = data.mPictureRect.Size();
    324      // No values for mDepth and mPremultiplied.
    325    }
    326 
    327    // CbCr plane vector.
    328    MOZ_RELEASE_ASSERT(reader->mStrideU == reader->mStrideV);
    329    data.mCbCrStride = reader->mStrideU;
    330    data.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
    331    // Color settings.
    332    if (aColorSpace.mFullRange) {
    333      data.mColorRange = ToColorRange(aColorSpace.mFullRange.value());
    334    }
    335    MOZ_RELEASE_ASSERT(aColorSpace.mMatrix);
    336    data.mYUVColorSpace = ToColorSpace(aColorSpace.mMatrix.value());
    337    if (aColorSpace.mTransfer) {
    338      data.mTransferFunction =
    339          ToTransferFunction(aColorSpace.mTransfer.value());
    340    }
    341    if (aColorSpace.mPrimaries) {
    342      data.mColorPrimaries = ToPrimaries(aColorSpace.mPrimaries.value());
    343    }
    344 
    345    RefPtr<layers::PlanarYCbCrImage> image =
    346        new layers::RecyclingPlanarYCbCrImage(new layers::BufferRecycleBin());
    347    nsresult r = image->CopyData(data);
    348    if (NS_FAILED(r)) {
    349      return Err(MediaResult(
    350          r,
    351          nsPrintfCString(
    352              "Failed to create I420%s image",
    353              (aFormat.PixelFormat() == VideoPixelFormat::I420A ? "A" : ""))));
    354    }
    355    // Manually cast type to make Result work.
    356    return RefPtr<layers::Image>(image.forget());
    357  }
    358 
    359  if (aFormat.PixelFormat() == VideoPixelFormat::NV12) {
    360    NV12BufferReader reader(aBuffer, aSize.Width(), aSize.Height());
    361 
    362    layers::PlanarYCbCrData data;
    363    data.mPictureRect = gfx::IntRect(0, 0, reader.mWidth, reader.mHeight);
    364 
    365    // Y plane.
    366    data.mYChannel = const_cast<uint8_t*>(reader.DataY());
    367    data.mYStride = reader.mStrideY;
    368    data.mYSkip = 0;
    369    // Cb plane.
    370    data.mCbChannel = const_cast<uint8_t*>(reader.DataUV());
    371    data.mCbSkip = 1;
    372    // Cr plane.
    373    data.mCrChannel = data.mCbChannel + 1;
    374    data.mCrSkip = 1;
    375    // CbCr plane vector.
    376    data.mCbCrStride = reader.mStrideUV;
    377    data.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
    378    // Color settings.
    379    if (aColorSpace.mFullRange) {
    380      data.mColorRange = ToColorRange(aColorSpace.mFullRange.value());
    381    }
    382    MOZ_RELEASE_ASSERT(aColorSpace.mMatrix);
    383    data.mYUVColorSpace = ToColorSpace(aColorSpace.mMatrix.value());
    384    if (aColorSpace.mTransfer) {
    385      data.mTransferFunction =
    386          ToTransferFunction(aColorSpace.mTransfer.value());
    387    }
    388    if (aColorSpace.mPrimaries) {
    389      data.mColorPrimaries = ToPrimaries(aColorSpace.mPrimaries.value());
    390    }
    391 
    392    RefPtr<layers::NVImage> image = new layers::NVImage();
    393    nsresult r = image->SetData(data);
    394    if (NS_FAILED(r)) {
    395      return Err(MediaResult(r, "Failed to create NV12 image"_ns));
    396    }
    397    // Manually cast type to make Result work.
    398    return RefPtr<layers::Image>(image.forget());
    399  }
    400 
    401  return Err(MediaResult(
    402      NS_ERROR_DOM_NOT_SUPPORTED_ERR,
    403      nsPrintfCString("%s is unsupported",
    404                      dom::GetEnumString(aFormat.PixelFormat()).get())));
    405 }
    406 
    407 static Result<RefPtr<layers::Image>, MediaResult> CreateImageFromBuffer(
    408    const VideoFrame::Format& aFormat,
    409    const VideoColorSpaceInternal& aColorSpace, const gfx::IntSize& aSize,
    410    const Span<uint8_t>& aBuffer) {
    411  switch (aFormat.PixelFormat()) {
    412    case VideoPixelFormat::I420:
    413    case VideoPixelFormat::I420A:
    414    case VideoPixelFormat::NV12:
    415      return CreateYUVImageFromBuffer(aFormat, aColorSpace, aSize, aBuffer);
    416    case VideoPixelFormat::I420P10:
    417    case VideoPixelFormat::I420P12:
    418    case VideoPixelFormat::I420AP10:
    419    case VideoPixelFormat::I420AP12:
    420    case VideoPixelFormat::I422:
    421    case VideoPixelFormat::I422P10:
    422    case VideoPixelFormat::I422P12:
    423    case VideoPixelFormat::I422A:
    424    case VideoPixelFormat::I422AP10:
    425    case VideoPixelFormat::I422AP12:
    426    case VideoPixelFormat::I444:
    427    case VideoPixelFormat::I444P10:
    428    case VideoPixelFormat::I444P12:
    429    case VideoPixelFormat::I444A:
    430    case VideoPixelFormat::I444AP10:
    431    case VideoPixelFormat::I444AP12:
    432      // Not yet support for now.
    433      break;
    434    case VideoPixelFormat::RGBA:
    435    case VideoPixelFormat::RGBX:
    436    case VideoPixelFormat::BGRA:
    437    case VideoPixelFormat::BGRX:
    438      return CreateRGBAImageFromBuffer(aFormat, aSize, aBuffer);
    439  }
    440  return Err(MediaResult(
    441      NS_ERROR_DOM_NOT_SUPPORTED_ERR,
    442      nsPrintfCString("%s is unsupported",
    443                      dom::GetEnumString(aFormat.PixelFormat()).get())));
    444 }
    445 
    446 /*
    447 * The followings are helpers defined in
    448 * https://w3c.github.io/webcodecs/#videoframe-algorithms
    449 */
    450 
    451 static bool IsSameOrigin(nsIGlobalObject* aGlobal, const VideoFrame& aFrame) {
    452  MOZ_ASSERT(aGlobal);
    453  MOZ_ASSERT(aFrame.GetParentObject());
    454 
    455  nsIPrincipal* principalX = aGlobal->PrincipalOrNull();
    456  nsIPrincipal* principalY = aFrame.GetParentObject()->PrincipalOrNull();
    457 
    458  // If both of VideoFrames are created in worker, they are in the same origin
    459  // domain.
    460  if (!principalX) {
    461    return !principalY;
    462  }
    463  // Otherwise, check their domains.
    464  return principalX->Equals(principalY);
    465 }
    466 
    467 static bool IsSameOrigin(nsIGlobalObject* aGlobal,
    468                         HTMLVideoElement& aVideoElement) {
    469  MOZ_ASSERT(aGlobal);
    470 
    471  // If CORS is in use, consider the video source is same-origin.
    472  if (aVideoElement.GetCORSMode() != CORS_NONE) {
    473    return true;
    474  }
    475 
    476  // Otherwise, check if video source has cross-origin redirect or not.
    477  if (aVideoElement.HadCrossOriginRedirects()) {
    478    return false;
    479  }
    480 
    481  // Finally, compare the VideoFrame's domain and video's one.
    482  nsIPrincipal* principal = aGlobal->PrincipalOrNull();
    483  nsCOMPtr<nsIPrincipal> elementPrincipal =
    484      aVideoElement.GetCurrentVideoPrincipal();
    485  // <video> cannot be created in worker, so it should have a valid principal.
    486  if (NS_WARN_IF(!elementPrincipal) || !principal) {
    487    return false;
    488  }
    489  return principal->Subsumes(elementPrincipal);
    490 }
    491 
    492 // A sub-helper to convert DOMRectInit to gfx::IntRect.
    493 static Result<gfx::IntRect, nsCString> ToIntRect(const DOMRectInit& aRectInit) {
    494  auto EQ = [](const double& a, const double& b) {
    495    constexpr double e = std::numeric_limits<double>::epsilon();
    496    return std::fabs(a - b) <= e;
    497  };
    498  auto GT = [&](const double& a, const double& b) {
    499    return !EQ(a, b) && a > b;
    500  };
    501 
    502  // Make sure the double values are in the gfx::IntRect's valid range, before
    503  // checking the spec's valid range. The double's infinity value is larger than
    504  // gfx::IntRect's max value so it will be filtered out here.
    505  constexpr double MAX = static_cast<double>(
    506      std::numeric_limits<decltype(gfx::IntRect::x)>::max());
    507  constexpr double MIN = static_cast<double>(
    508      std::numeric_limits<decltype(gfx::IntRect::x)>::min());
    509  if (GT(aRectInit.mX, MAX) || GT(MIN, aRectInit.mX)) {
    510    return Err("x is out of the valid range"_ns);
    511  }
    512  if (GT(aRectInit.mY, MAX) || GT(MIN, aRectInit.mY)) {
    513    return Err("y is out of the valid range"_ns);
    514  }
    515  if (GT(aRectInit.mWidth, MAX) || GT(MIN, aRectInit.mWidth)) {
    516    return Err("width is out of the valid range"_ns);
    517  }
    518  if (GT(aRectInit.mHeight, MAX) || GT(MIN, aRectInit.mHeight)) {
    519    return Err("height is out of the valid range"_ns);
    520  }
    521 
    522  gfx::IntRect rect(
    523      static_cast<decltype(gfx::IntRect::x)>(aRectInit.mX),
    524      static_cast<decltype(gfx::IntRect::y)>(aRectInit.mY),
    525      static_cast<decltype(gfx::IntRect::width)>(aRectInit.mWidth),
    526      static_cast<decltype(gfx::IntRect::height)>(aRectInit.mHeight));
    527  // Check the spec's valid range.
    528  if (rect.X() < 0) {
    529    return Err("x must be non-negative"_ns);
    530  }
    531  if (rect.Y() < 0) {
    532    return Err("y must be non-negative"_ns);
    533  }
    534  if (rect.Width() <= 0) {
    535    return Err("width must be positive"_ns);
    536  }
    537  if (rect.Height() <= 0) {
    538    return Err("height must be positive"_ns);
    539  }
    540 
    541  return rect;
    542 }
    543 
    544 // A sub-helper to convert a (width, height) pair to gfx::IntRect.
    545 static Result<gfx::IntSize, nsCString> ToIntSize(const uint32_t& aWidth,
    546                                                 const uint32_t& aHeight) {
    547  // Make sure the given values are in the gfx::IntSize's valid range, before
    548  // checking the spec's valid range.
    549  constexpr uint32_t MAX = static_cast<uint32_t>(
    550      std::numeric_limits<decltype(gfx::IntRect::width)>::max());
    551  if (aWidth > MAX) {
    552    return Err("Width exceeds the implementation's range"_ns);
    553  }
    554  if (aHeight > MAX) {
    555    return Err("Height exceeds the implementation's range"_ns);
    556  }
    557 
    558  gfx::IntSize size(static_cast<decltype(gfx::IntRect::width)>(aWidth),
    559                    static_cast<decltype(gfx::IntRect::height)>(aHeight));
    560  // Check the spec's valid range.
    561  if (size.Width() <= 0) {
    562    return Err("Width must be positive"_ns);
    563  }
    564  if (size.Height() <= 0) {
    565    return Err("Height must be positive"_ns);
    566  }
    567  return size;
    568 }
    569 
    570 // A sub-helper to make sure visible range is in the picture.
    571 static Result<Ok, nsCString> ValidateVisibility(
    572    const gfx::IntRect& aVisibleRect, const gfx::IntSize& aPicSize) {
    573  MOZ_ASSERT(aVisibleRect.X() >= 0);
    574  MOZ_ASSERT(aVisibleRect.Y() >= 0);
    575  MOZ_ASSERT(aVisibleRect.Width() > 0);
    576  MOZ_ASSERT(aVisibleRect.Height() > 0);
    577 
    578  const auto w = CheckedInt<uint32_t>(aVisibleRect.Width()) + aVisibleRect.X();
    579  if (w.value() > static_cast<uint32_t>(aPicSize.Width())) {
    580    return Err(
    581        "Sum of visible rectangle's x and width exceeds the picture's width"_ns);
    582  }
    583 
    584  const auto h = CheckedInt<uint32_t>(aVisibleRect.Height()) + aVisibleRect.Y();
    585  if (h.value() > static_cast<uint32_t>(aPicSize.Height())) {
    586    return Err(
    587        "Sum of visible rectangle's y and height exceeds the picture's height"_ns);
    588  }
    589 
    590  return Ok();
    591 }
    592 
    593 // A sub-helper to check and get display{Width, Height} in
    594 // VideoFrame(Buffer)Init.
    595 template <class T>
    596 static Result<Maybe<gfx::IntSize>, nsCString> MaybeGetDisplaySize(
    597    const T& aInit) {
    598  if (aInit.mDisplayWidth.WasPassed() != aInit.mDisplayHeight.WasPassed()) {
    599    return Err(
    600        "displayWidth and displayHeight cannot be set without the other"_ns);
    601  }
    602 
    603  Maybe<gfx::IntSize> displaySize;
    604  if (aInit.mDisplayWidth.WasPassed() && aInit.mDisplayHeight.WasPassed()) {
    605    displaySize.emplace(MOZ_TRY(
    606        ToIntSize(aInit.mDisplayWidth.Value(), aInit.mDisplayHeight.Value())
    607            .mapErr([](nsCString error) {
    608              error.Insert("display", 0);
    609              return error;
    610            })));
    611  }
    612  return displaySize;
    613 }
    614 
    615 // https://w3c.github.io/webcodecs/#valid-videoframebufferinit
    616 static Result<
    617    std::tuple<gfx::IntSize, Maybe<gfx::IntRect>, Maybe<gfx::IntSize>>,
    618    nsCString>
    619 ValidateVideoFrameBufferInit(const VideoFrameBufferInit& aInit) {
    620  gfx::IntSize codedSize =
    621      MOZ_TRY(ToIntSize(aInit.mCodedWidth, aInit.mCodedHeight)
    622                  .mapErr([](nsCString error) {
    623                    error.Insert("coded", 0);
    624                    return error;
    625                  }));
    626 
    627  Maybe<gfx::IntRect> visibleRect;
    628  if (aInit.mVisibleRect.WasPassed()) {
    629    visibleRect.emplace(MOZ_TRY(
    630        ToIntRect(aInit.mVisibleRect.Value()).mapErr([](nsCString error) {
    631          error.Insert("visibleRect's ", 0);
    632          return error;
    633        })));
    634    MOZ_TRY(ValidateVisibility(visibleRect.ref(), codedSize));
    635  }
    636 
    637  Maybe<gfx::IntSize> displaySize = MOZ_TRY(MaybeGetDisplaySize(aInit));
    638 
    639  return std::make_tuple(codedSize, visibleRect, displaySize);
    640 }
    641 
    642 // https://w3c.github.io/webcodecs/#videoframe-verify-rect-offset-alignment
    643 static Result<Ok, nsCString> VerifyRectOffsetAlignment(
    644    const Maybe<VideoFrame::Format>& aFormat, const gfx::IntRect& aRect) {
    645  if (!aFormat) {
    646    return Ok();
    647  }
    648  for (const VideoFrame::Format::Plane& p : aFormat->Planes()) {
    649    const gfx::IntSize sample = aFormat->SampleSize(p);
    650    if (aRect.X() % sample.Width() != 0) {
    651      return Err("Mismatch between format and given left offset"_ns);
    652    }
    653 
    654    if (aRect.Y() % sample.Height() != 0) {
    655      return Err("Mismatch between format and given top offset"_ns);
    656    }
    657  }
    658  return Ok();
    659 }
    660 
    661 // https://w3c.github.io/webcodecs/#videoframe-parse-visible-rect
    662 static Result<gfx::IntRect, MediaResult> ParseVisibleRect(
    663    const gfx::IntRect& aDefaultRect, const Maybe<gfx::IntRect>& aOverrideRect,
    664    const gfx::IntSize& aCodedSize, const VideoFrame::Format& aFormat) {
    665  MOZ_ASSERT(ValidateVisibility(aDefaultRect, aCodedSize).isOk());
    666 
    667  gfx::IntRect rect = aDefaultRect;
    668  if (aOverrideRect) {
    669    // Skip checking overrideRect's width and height here. They should be
    670    // checked before reaching here, and ValidateVisibility will assert it.
    671 
    672    MOZ_TRY(ValidateVisibility(aOverrideRect.ref(), aCodedSize)
    673                .mapErr([](const nsCString& error) {
    674                  return MediaResult(NS_ERROR_INVALID_ARG, error);
    675                }));
    676    rect = *aOverrideRect;
    677  }
    678 
    679  MOZ_TRY(VerifyRectOffsetAlignment(Some(aFormat), rect)
    680              .mapErr([](const nsCString& error) {
    681                return MediaResult(NS_ERROR_INVALID_ARG, error);
    682              }));
    683 
    684  return rect;
    685 }
    686 
    687 // https://w3c.github.io/webcodecs/#computed-plane-layout
    688 struct ComputedPlaneLayout {
    689  // The offset from the beginning of the buffer in one plane.
    690  uint32_t mDestinationOffset = 0;
    691  // The stride of the image data in one plane.
    692  uint32_t mDestinationStride = 0;
    693  // Sample count of picture's top offset (a.k.a samples of y).
    694  uint32_t mSourceTop = 0;
    695  // Sample count of the picture's height.
    696  uint32_t mSourceHeight = 0;
    697  // Byte count of the picture's left offset (a.k.a bytes of x).
    698  uint32_t mSourceLeftBytes = 0;
    699  // Byte count of the picture's width.
    700  uint32_t mSourceWidthBytes = 0;
    701 };
    702 
    703 // https://w3c.github.io/webcodecs/#combined-buffer-layout
    704 struct CombinedBufferLayout {
    705  CombinedBufferLayout() : mAllocationSize(0) {}
    706  CombinedBufferLayout(uint32_t aAllocationSize,
    707                       nsTArray<ComputedPlaneLayout>&& aLayout)
    708      : mAllocationSize(aAllocationSize),
    709        mComputedLayouts(std::move(aLayout)) {}
    710  uint32_t mAllocationSize = 0;
    711  nsTArray<ComputedPlaneLayout> mComputedLayouts;
    712 };
    713 
    714 // https://w3c.github.io/webcodecs/#videoframe-compute-layout-and-allocation-size
    715 static Result<CombinedBufferLayout, MediaResult> ComputeLayoutAndAllocationSize(
    716    const gfx::IntRect& aRect, const VideoFrame::Format& aFormat,
    717    const Sequence<PlaneLayout>* aPlaneLayouts) {
    718  nsTArray<VideoFrame::Format::Plane> planes = aFormat.Planes();
    719 
    720  if (aPlaneLayouts && aPlaneLayouts->Length() != planes.Length()) {
    721    return Err(MediaResult(NS_ERROR_INVALID_ARG,
    722                           "Mismatch between format and layout"_ns));
    723  }
    724 
    725  uint32_t minAllocationSize = 0;
    726  nsTArray<ComputedPlaneLayout> layouts;
    727  nsTArray<uint32_t> endOffsets;
    728 
    729  for (size_t i = 0; i < planes.Length(); ++i) {
    730    const VideoFrame::Format::Plane& p = planes[i];
    731    const gfx::IntSize sampleSize = aFormat.SampleSize(p);
    732    MOZ_RELEASE_ASSERT(!sampleSize.IsEmpty());
    733 
    734    // aRect's x, y, width, and height are int32_t, and sampleSize's width and
    735    // height >= 1, so (aRect.* / sampleSize.*) must be in int32_t range.
    736 
    737    CheckedUint32 sourceTop(aRect.Y());
    738    sourceTop /= sampleSize.Height();
    739    MOZ_RELEASE_ASSERT(sourceTop.isValid());
    740 
    741    CheckedUint32 sourceHeight(aRect.Height());
    742    sourceHeight /= sampleSize.Height();
    743    MOZ_RELEASE_ASSERT(sourceHeight.isValid());
    744 
    745    CheckedUint32 sourceLeftBytes(aRect.X());
    746    sourceLeftBytes /= sampleSize.Width();
    747    MOZ_RELEASE_ASSERT(sourceLeftBytes.isValid());
    748    sourceLeftBytes *= aFormat.SampleBytes(p);
    749    if (!sourceLeftBytes.isValid()) {
    750      return Err(MediaResult(
    751          NS_ERROR_INVALID_ARG,
    752          nsPrintfCString(
    753              "The parsed-rect's x-offset is too large for %s plane",
    754              aFormat.PlaneName(p))));
    755    }
    756 
    757    CheckedUint32 sourceWidthBytes(aRect.Width());
    758    sourceWidthBytes /= sampleSize.Width();
    759    MOZ_RELEASE_ASSERT(sourceWidthBytes.isValid());
    760    sourceWidthBytes *= aFormat.SampleBytes(p);
    761    if (!sourceWidthBytes.isValid()) {
    762      return Err(MediaResult(
    763          NS_ERROR_INVALID_ARG,
    764          nsPrintfCString("The parsed-rect's width is too large for %s plane",
    765                          aFormat.PlaneName(p))));
    766    }
    767 
    768    ComputedPlaneLayout layout{.mDestinationOffset = 0,
    769                               .mDestinationStride = 0,
    770                               .mSourceTop = sourceTop.value(),
    771                               .mSourceHeight = sourceHeight.value(),
    772                               .mSourceLeftBytes = sourceLeftBytes.value(),
    773                               .mSourceWidthBytes = sourceWidthBytes.value()};
    774    if (aPlaneLayouts) {
    775      const PlaneLayout& planeLayout = aPlaneLayouts->ElementAt(i);
    776      if (planeLayout.mStride < layout.mSourceWidthBytes) {
    777        return Err(
    778            MediaResult(NS_ERROR_INVALID_ARG,
    779                        nsPrintfCString("The stride in %s plane is too small",
    780                                        aFormat.PlaneName(p))));
    781      }
    782      layout.mDestinationOffset = planeLayout.mOffset;
    783      layout.mDestinationStride = planeLayout.mStride;
    784    } else {
    785      layout.mDestinationOffset = minAllocationSize;
    786      layout.mDestinationStride = layout.mSourceWidthBytes;
    787    }
    788 
    789    const CheckedInt<uint32_t> planeSize =
    790        CheckedInt<uint32_t>(layout.mDestinationStride) * layout.mSourceHeight;
    791    if (!planeSize.isValid()) {
    792      return Err(MediaResult(NS_ERROR_INVALID_ARG,
    793                             "Invalid layout with an over-sized plane"_ns));
    794    }
    795    const CheckedInt<uint32_t> planeEnd = planeSize + layout.mDestinationOffset;
    796    if (!planeEnd.isValid()) {
    797      return Err(
    798          MediaResult(NS_ERROR_INVALID_ARG,
    799                      "Invalid layout with the out-out-bound offset"_ns));
    800    }
    801    endOffsets.AppendElement(planeEnd.value());
    802 
    803    minAllocationSize = std::max(minAllocationSize, planeEnd.value());
    804 
    805    for (size_t j = 0; j < i; ++j) {
    806      const ComputedPlaneLayout& earlier = layouts[j];
    807      // If the current data's end is smaller or equal to the previous one's
    808      // head, or if the previous data's end is smaller or equal to the current
    809      // one's head, then they do not overlap. Otherwise, they do.
    810      if (endOffsets[i] > earlier.mDestinationOffset &&
    811          endOffsets[j] > layout.mDestinationOffset) {
    812        return Err(MediaResult(NS_ERROR_INVALID_ARG,
    813                               "Invalid layout with the overlapped planes"_ns));
    814      }
    815    }
    816    layouts.AppendElement(layout);
    817  }
    818 
    819  return CombinedBufferLayout(minAllocationSize, std::move(layouts));
    820 }
    821 
    822 // https://w3c.github.io/webcodecs/#videoframe-verify-rect-size-alignment
    823 static MediaResult VerifyRectSizeAlignment(const VideoFrame::Format& aFormat,
    824                                           const gfx::IntRect& aRect) {
    825  for (const VideoFrame::Format::Plane& p : aFormat.Planes()) {
    826    const gfx::IntSize sample = aFormat.SampleSize(p);
    827    if (aRect.Width() % sample.Width() != 0) {
    828      return MediaResult(NS_ERROR_INVALID_ARG,
    829                         "Mismatch between format and given rect's width"_ns);
    830    }
    831 
    832    if (aRect.Height() % sample.Height() != 0) {
    833      return MediaResult(NS_ERROR_INVALID_ARG,
    834                         "Mismatch between format and given rect's height"_ns);
    835    }
    836  }
    837  return MediaResult(NS_OK);
    838 }
    839 
    840 // https://w3c.github.io/webcodecs/#videoframe-parse-videoframecopytooptions
    841 static Result<CombinedBufferLayout, MediaResult> ParseVideoFrameCopyToOptions(
    842    const VideoFrameCopyToOptions& aOptions, const gfx::IntRect& aVisibleRect,
    843    const gfx::IntSize& aCodedSize, const VideoFrame::Format& aFormat) {
    844  Maybe<gfx::IntRect> overrideRect;
    845  if (aOptions.mRect.WasPassed()) {
    846    // TODO: We handle some edge cases that spec misses:
    847    // https://github.com/w3c/webcodecs/issues/513
    848    // This comment should be removed once the issue is resolved.
    849    overrideRect.emplace(
    850        MOZ_TRY(ToIntRect(aOptions.mRect.Value()).mapErr([](nsCString error) {
    851          error.Insert("rect's ", 0);
    852          return MediaResult(NS_ERROR_INVALID_ARG, error);
    853        })));
    854 
    855    MediaResult r = VerifyRectSizeAlignment(aFormat, overrideRect.ref());
    856    if (NS_FAILED(r.Code())) {
    857      return Err(r);
    858    }
    859  }
    860 
    861  gfx::IntRect parsedRect = MOZ_TRY(
    862      ParseVisibleRect(aVisibleRect, overrideRect, aCodedSize, aFormat));
    863 
    864  const Sequence<PlaneLayout>* optLayout = OptionalToPointer(aOptions.mLayout);
    865 
    866  VideoFrame::Format format(aFormat);
    867  if (aOptions.mFormat.WasPassed()) {
    868    if (aOptions.mFormat.Value() != VideoPixelFormat::RGBA &&
    869        aOptions.mFormat.Value() != VideoPixelFormat::RGBX &&
    870        aOptions.mFormat.Value() != VideoPixelFormat::BGRA &&
    871        aOptions.mFormat.Value() != VideoPixelFormat::BGRX) {
    872      nsAutoCString error(dom::GetEnumString(aOptions.mFormat.Value()).get());
    873      error.Append(" is unsupported in ParseVideoFrameCopyToOptions");
    874      return Err(MediaResult(NS_ERROR_DOM_NOT_SUPPORTED_ERR, error));
    875    }
    876    format = VideoFrame::Format(aOptions.mFormat.Value());
    877  }
    878 
    879  return ComputeLayoutAndAllocationSize(parsedRect, format, optLayout);
    880 }
    881 
    882 static bool IsYUVFormat(const VideoPixelFormat& aFormat) {
    883  switch (aFormat) {
    884    case VideoPixelFormat::I420:
    885    case VideoPixelFormat::I420P10:
    886    case VideoPixelFormat::I420P12:
    887    case VideoPixelFormat::I420A:
    888    case VideoPixelFormat::I420AP10:
    889    case VideoPixelFormat::I420AP12:
    890    case VideoPixelFormat::I422:
    891    case VideoPixelFormat::I422P10:
    892    case VideoPixelFormat::I422P12:
    893    case VideoPixelFormat::I422A:
    894    case VideoPixelFormat::I422AP10:
    895    case VideoPixelFormat::I422AP12:
    896    case VideoPixelFormat::I444:
    897    case VideoPixelFormat::I444P10:
    898    case VideoPixelFormat::I444P12:
    899    case VideoPixelFormat::I444A:
    900    case VideoPixelFormat::I444AP10:
    901    case VideoPixelFormat::I444AP12:
    902    case VideoPixelFormat::NV12:
    903      return true;
    904    case VideoPixelFormat::RGBA:
    905    case VideoPixelFormat::RGBX:
    906    case VideoPixelFormat::BGRA:
    907    case VideoPixelFormat::BGRX:
    908      return false;
    909  }
    910  return false;
    911 }
    912 
    913 // https://w3c.github.io/webcodecs/#videoframe-pick-color-space
    914 static VideoColorSpaceInternal PickColorSpace(
    915    const VideoColorSpaceInit* aInitColorSpace,
    916    const VideoPixelFormat& aFormat) {
    917  VideoColorSpaceInternal colorSpace;
    918  if (aInitColorSpace) {
    919    colorSpace = VideoColorSpaceInternal(*aInitColorSpace);
    920    // By spec, we MAY replace null members of aInitColorSpace with guessed
    921    // values so we can always use these in CreateYUVImageFromBuffer.
    922    if (IsYUVFormat(aFormat) && colorSpace.mMatrix.isNothing()) {
    923      colorSpace.mMatrix.emplace(VideoMatrixCoefficients::Bt709);
    924    }
    925    return colorSpace;
    926  }
    927 
    928  switch (aFormat) {
    929    case VideoPixelFormat::I420:
    930    case VideoPixelFormat::I420P10:
    931    case VideoPixelFormat::I420P12:
    932    case VideoPixelFormat::I420A:
    933    case VideoPixelFormat::I420AP10:
    934    case VideoPixelFormat::I420AP12:
    935    case VideoPixelFormat::I422:
    936    case VideoPixelFormat::I422P10:
    937    case VideoPixelFormat::I422P12:
    938    case VideoPixelFormat::I422A:
    939    case VideoPixelFormat::I422AP10:
    940    case VideoPixelFormat::I422AP12:
    941    case VideoPixelFormat::I444:
    942    case VideoPixelFormat::I444P10:
    943    case VideoPixelFormat::I444P12:
    944    case VideoPixelFormat::I444A:
    945    case VideoPixelFormat::I444AP10:
    946    case VideoPixelFormat::I444AP12:
    947    case VideoPixelFormat::NV12:
    948      // https://w3c.github.io/webcodecs/#rec709-color-space
    949      colorSpace.mFullRange.emplace(false);
    950      colorSpace.mMatrix.emplace(VideoMatrixCoefficients::Bt709);
    951      colorSpace.mPrimaries.emplace(VideoColorPrimaries::Bt709);
    952      colorSpace.mTransfer.emplace(VideoTransferCharacteristics::Bt709);
    953      break;
    954    case VideoPixelFormat::RGBA:
    955    case VideoPixelFormat::RGBX:
    956    case VideoPixelFormat::BGRA:
    957    case VideoPixelFormat::BGRX:
    958      // https://w3c.github.io/webcodecs/#srgb-color-space
    959      colorSpace.mFullRange.emplace(true);
    960      colorSpace.mMatrix.emplace(VideoMatrixCoefficients::Rgb);
    961      colorSpace.mPrimaries.emplace(VideoColorPrimaries::Bt709);
    962      colorSpace.mTransfer.emplace(VideoTransferCharacteristics::Iec61966_2_1);
    963      break;
    964  }
    965 
    966  return colorSpace;
    967 }
    968 
    969 // https://w3c.github.io/webcodecs/#validate-videoframeinit
    970 static Result<std::pair<Maybe<gfx::IntRect>, Maybe<gfx::IntSize>>, nsCString>
    971 ValidateVideoFrameInit(const VideoFrameInit& aInit,
    972                       const Maybe<VideoFrame::Format>& aFormat,
    973                       const gfx::IntSize& aCodedSize) {
    974  if (aCodedSize.Width() <= 0 || aCodedSize.Height() <= 0) {
    975    return Err("codedWidth and codedHeight must be positive"_ns);
    976  }
    977 
    978  Maybe<gfx::IntRect> visibleRect;
    979  if (aInit.mVisibleRect.WasPassed()) {
    980    visibleRect.emplace(MOZ_TRY(
    981        ToIntRect(aInit.mVisibleRect.Value()).mapErr([](nsCString error) {
    982          error.Insert("visibleRect's ", 0);
    983          return error;
    984        })));
    985    MOZ_TRY(ValidateVisibility(visibleRect.ref(), aCodedSize));
    986 
    987    MOZ_TRY(VerifyRectOffsetAlignment(aFormat, visibleRect.ref()));
    988  }
    989 
    990  Maybe<gfx::IntSize> displaySize = MOZ_TRY(MaybeGetDisplaySize(aInit));
    991 
    992  return std::make_pair(visibleRect, displaySize);
    993 }
    994 
    995 // https://w3c.github.io/webcodecs/#dom-videoframe-videoframe-data-init
    996 template <class T>
    997 static Result<RefPtr<VideoFrame>, MediaResult> CreateVideoFrameFromBuffer(
    998    nsIGlobalObject* aGlobal, const T& aBuffer,
    999    const VideoFrameBufferInit& aInit) {
   1000  if (aInit.mColorSpace.WasPassed() &&
   1001      !aInit.mColorSpace.Value().mTransfer.IsNull() &&
   1002      aInit.mColorSpace.Value().mTransfer.Value() ==
   1003          VideoTransferCharacteristics::Linear) {
   1004    return Err(MediaResult(NS_ERROR_DOM_NOT_SUPPORTED_ERR,
   1005                           "linear RGB is not supported"_ns));
   1006  }
   1007 
   1008  std::tuple<gfx::IntSize, Maybe<gfx::IntRect>, Maybe<gfx::IntSize>> init =
   1009      MOZ_TRY(ValidateVideoFrameBufferInit(aInit).mapErr([](nsCString error) {
   1010        return MediaResult(NS_ERROR_INVALID_ARG, error);
   1011      }));
   1012  gfx::IntSize codedSize = std::get<0>(init);
   1013  Maybe<gfx::IntRect> visibleRect = std::get<1>(init);
   1014  Maybe<gfx::IntSize> displaySize = std::get<2>(init);
   1015 
   1016  VideoFrame::Format format(aInit.mFormat);
   1017  // TODO: Spec doesn't ask for this in ctor but Pixel Format does. See
   1018  // https://github.com/w3c/webcodecs/issues/512
   1019  // This comment should be removed once the issue is resolved.
   1020  if (!format.IsValidSize(codedSize)) {
   1021    return Err(MediaResult(NS_ERROR_INVALID_ARG,
   1022                           "coded width and/or height is invalid"_ns));
   1023  }
   1024 
   1025  gfx::IntRect parsedRect = MOZ_TRY(ParseVisibleRect(
   1026      gfx::IntRect({0, 0}, codedSize), visibleRect, codedSize, format));
   1027 
   1028  const Sequence<PlaneLayout>* optLayout = OptionalToPointer(aInit.mLayout);
   1029 
   1030  CombinedBufferLayout combinedLayout =
   1031      MOZ_TRY(ComputeLayoutAndAllocationSize(parsedRect, format, optLayout));
   1032 
   1033  Maybe<uint64_t> duration = OptionalToMaybe(aInit.mDuration);
   1034 
   1035  VideoColorSpaceInternal colorSpace =
   1036      PickColorSpace(OptionalToPointer(aInit.mColorSpace), aInit.mFormat);
   1037 
   1038  RefPtr<layers::Image> data = MOZ_TRY(aBuffer.ProcessFixedData(
   1039      [&](const Span<uint8_t>& aData)
   1040          -> Result<RefPtr<layers::Image>, MediaResult> {
   1041        if (aData.Length() <
   1042            static_cast<size_t>(combinedLayout.mAllocationSize)) {
   1043          return Err(MediaResult(NS_ERROR_INVALID_ARG, "data is too small"_ns));
   1044        }
   1045 
   1046        // TODO: If codedSize is (3, 3) and visibleRect is (0, 0, 1, 1) but
   1047        // the data is 2 x 2 RGBA buffer (2 x 2 x 4 bytes), it pass the
   1048        // above check. In this case, we can crop it to a 1 x 1-codedSize
   1049        // image (Bug 1782128).
   1050        if (aData.Length() < format.ByteCount(codedSize)) {
   1051          return Err(MediaResult(NS_ERROR_INVALID_ARG, "data is too small"_ns));
   1052        }
   1053 
   1054        return CreateImageFromBuffer(format, colorSpace, codedSize, aData);
   1055      }));
   1056 
   1057  MOZ_ASSERT(data);
   1058  MOZ_ASSERT(data->GetSize() == codedSize);
   1059 
   1060  // By spec, we should set visible* here. But if we don't change the image,
   1061  // visible* is same as parsedRect here. The display{Width, Height} is
   1062  // visible{Width, Height} if it's not set.
   1063 
   1064  return MakeRefPtr<VideoFrame>(aGlobal, data, Some(aInit.mFormat), codedSize,
   1065                                parsedRect,
   1066                                displaySize ? *displaySize : parsedRect.Size(),
   1067                                duration, aInit.mTimestamp, colorSpace);
   1068 }
   1069 
   1070 template <class T>
   1071 static already_AddRefed<VideoFrame> CreateVideoFrameFromBuffer(
   1072    const GlobalObject& aGlobal, const T& aBuffer,
   1073    const VideoFrameBufferInit& aInit, ErrorResult& aRv) {
   1074  nsCOMPtr<nsIGlobalObject> global = do_QueryInterface(aGlobal.GetAsSupports());
   1075  if (!global) {
   1076    aRv.Throw(NS_ERROR_FAILURE);
   1077    return nullptr;
   1078  }
   1079 
   1080  auto r = CreateVideoFrameFromBuffer(global, aBuffer, aInit);
   1081  if (r.isErr()) {
   1082    MediaResult err = r.unwrapErr();
   1083    if (err.Code() == NS_ERROR_DOM_NOT_SUPPORTED_ERR) {
   1084      aRv.ThrowNotSupportedError(err.Message());
   1085    } else {
   1086      aRv.ThrowTypeError(err.Message());
   1087    }
   1088    return nullptr;
   1089  }
   1090  return r.unwrap().forget();
   1091 }
   1092 
   1093 // https://w3c.github.io/webcodecs/#videoframe-initialize-visible-rect-and-display-size
   1094 static void InitializeVisibleRectAndDisplaySize(
   1095    Maybe<gfx::IntRect>& aVisibleRect, Maybe<gfx::IntSize>& aDisplaySize,
   1096    gfx::IntRect aDefaultVisibleRect, gfx::IntSize aDefaultDisplaySize) {
   1097  if (!aVisibleRect) {
   1098    aVisibleRect.emplace(aDefaultVisibleRect);
   1099  }
   1100  if (!aDisplaySize) {
   1101    double wScale = static_cast<double>(aDefaultDisplaySize.Width()) /
   1102                    aDefaultVisibleRect.Width();
   1103    double hScale = static_cast<double>(aDefaultDisplaySize.Height()) /
   1104                    aDefaultVisibleRect.Height();
   1105    double w = wScale * aVisibleRect->Width();
   1106    double h = hScale * aVisibleRect->Height();
   1107    aDisplaySize.emplace(gfx::IntSize(static_cast<uint32_t>(round(w)),
   1108                                      static_cast<uint32_t>(round(h))));
   1109  }
   1110 }
   1111 
   1112 // https://w3c.github.io/webcodecs/#videoframe-initialize-frame-with-resource-and-size
   1113 static Result<already_AddRefed<VideoFrame>, nsCString>
   1114 InitializeFrameWithResourceAndSize(nsIGlobalObject* aGlobal,
   1115                                   const VideoFrameInit& aInit,
   1116                                   already_AddRefed<layers::Image> aImage) {
   1117  MOZ_ASSERT(aInit.mTimestamp.WasPassed());
   1118 
   1119  RefPtr<layers::Image> image(aImage);
   1120  MOZ_ASSERT(image);
   1121 
   1122  RefPtr<gfx::SourceSurface> surface = image->GetAsSourceSurface();
   1123  Maybe<VideoFrame::Format> format =
   1124      SurfaceFormatToVideoPixelFormat(surface->GetFormat())
   1125          .map([](const VideoPixelFormat& aFormat) {
   1126            return VideoFrame::Format(aFormat);
   1127          });
   1128 
   1129  std::pair<Maybe<gfx::IntRect>, Maybe<gfx::IntSize>> init =
   1130      MOZ_TRY(ValidateVideoFrameInit(aInit, format, image->GetSize()));
   1131  Maybe<gfx::IntRect> visibleRect = init.first;
   1132  Maybe<gfx::IntSize> displaySize = init.second;
   1133 
   1134  if (format && aInit.mAlpha == AlphaOption::Discard) {
   1135    format->MakeOpaque();
   1136    // Keep the alpha data in image for now until it's being rendered.
   1137    // TODO: The alpha will still be rendered if the format is unrecognized
   1138    // since no additional flag keeping this request. Should spec address what
   1139    // to do in this case?
   1140  }
   1141 
   1142  InitializeVisibleRectAndDisplaySize(visibleRect, displaySize,
   1143                                      gfx::IntRect({0, 0}, image->GetSize()),
   1144                                      image->GetSize());
   1145 
   1146  Maybe<uint64_t> duration = OptionalToMaybe(aInit.mDuration);
   1147 
   1148  VideoColorSpaceInternal colorSpace;
   1149  if (IsYUVFormat(
   1150          SurfaceFormatToVideoPixelFormat(surface->GetFormat()).ref())) {
   1151    colorSpace = FallbackColorSpaceForVideoContent();
   1152  } else {
   1153    colorSpace = FallbackColorSpaceForWebContent();
   1154  }
   1155  return MakeAndAddRef<VideoFrame>(
   1156      aGlobal, image, format ? Some(format->PixelFormat()) : Nothing(),
   1157      image->GetSize(), visibleRect.value(), displaySize.value(), duration,
   1158      aInit.mTimestamp.Value(), colorSpace);
   1159 }
   1160 
   1161 // https://w3c.github.io/webcodecs/#videoframe-initialize-frame-from-other-frame
   1162 static Result<already_AddRefed<VideoFrame>, nsCString>
   1163 InitializeFrameFromOtherFrame(nsIGlobalObject* aGlobal, VideoFrameData&& aData,
   1164                              const VideoFrameInit& aInit) {
   1165  MOZ_ASSERT(aGlobal);
   1166  MOZ_ASSERT(aData.mImage);
   1167 
   1168  Maybe<VideoFrame::Format> format =
   1169      aData.mFormat ? Some(VideoFrame::Format(*aData.mFormat)) : Nothing();
   1170  if (format && aInit.mAlpha == AlphaOption::Discard) {
   1171    format->MakeOpaque();
   1172    // Keep the alpha data in image for now until it's being rendered.
   1173    // TODO: The alpha will still be rendered if the format is unrecognized
   1174    // since no additional flag keeping this request. Should spec address what
   1175    // to do in this case?
   1176  }
   1177 
   1178  std::pair<Maybe<gfx::IntRect>, Maybe<gfx::IntSize>> init =
   1179      MOZ_TRY(ValidateVideoFrameInit(aInit, format, aData.mImage->GetSize()));
   1180  Maybe<gfx::IntRect> visibleRect = init.first;
   1181  Maybe<gfx::IntSize> displaySize = init.second;
   1182 
   1183  InitializeVisibleRectAndDisplaySize(visibleRect, displaySize,
   1184                                      aData.mVisibleRect, aData.mDisplaySize);
   1185 
   1186  Maybe<uint64_t> duration = OptionalToMaybe(aInit.mDuration);
   1187 
   1188  int64_t timestamp = aInit.mTimestamp.WasPassed() ? aInit.mTimestamp.Value()
   1189                                                   : aData.mTimestamp;
   1190 
   1191  return MakeAndAddRef<VideoFrame>(
   1192      aGlobal, aData.mImage, format ? Some(format->PixelFormat()) : Nothing(),
   1193      aData.mImage->GetSize(), *visibleRect, *displaySize, duration, timestamp,
   1194      aData.mColorSpace);
   1195 }
   1196 
   1197 static void CloneConfiguration(RootedDictionary<VideoFrameCopyToOptions>& aDest,
   1198                               const VideoFrameCopyToOptions& aSrc) {
   1199  if (aSrc.mColorSpace.WasPassed()) {
   1200    aDest.mColorSpace.Construct(aSrc.mColorSpace.Value());
   1201  }
   1202 
   1203  if (aSrc.mFormat.WasPassed()) {
   1204    aDest.mFormat.Construct(aSrc.mFormat.Value());
   1205  }
   1206 
   1207  if (aSrc.mLayout.WasPassed()) {
   1208    aDest.mLayout.Construct(aSrc.mLayout.Value());
   1209  }
   1210 
   1211  if (aSrc.mRect.WasPassed()) {
   1212    aDest.mRect.Construct(aSrc.mRect.Value());
   1213  }
   1214 }
   1215 
   1216 // Convert the aImage to an image with aColorSpace color space in aFormat
   1217 // format.
   1218 static Result<RefPtr<layers::Image>, MediaResult> ConvertToRGBAImage(
   1219    const RefPtr<layers::Image>& aImage, const VideoPixelFormat& aFormat,
   1220    const PredefinedColorSpace& aColorSpace) {
   1221  MOZ_ASSERT(aImage);
   1222 
   1223  if (aFormat != VideoPixelFormat::RGBA && aFormat != VideoPixelFormat::RGBX &&
   1224      aFormat != VideoPixelFormat::BGRA && aFormat != VideoPixelFormat::BGRX) {
   1225    return Err(MediaResult(
   1226        NS_ERROR_INVALID_ARG,
   1227        nsPrintfCString("Image conversion into %s format is invalid",
   1228                        dom::GetEnumString(aFormat).get())));
   1229  }
   1230 
   1231  CheckedInt32 stride(aImage->GetSize().Width());
   1232  stride *= 4;
   1233  if (!stride.isValid()) {
   1234    return Err(
   1235        MediaResult(NS_ERROR_INVALID_ARG, "The image width is too big"_ns));
   1236  }
   1237 
   1238  CheckedInt<size_t> size(stride.value());
   1239  size *= aImage->GetSize().Height();
   1240  if (!size.isValid()) {
   1241    return Err(
   1242        MediaResult(NS_ERROR_INVALID_ARG, "The image size is too big"_ns));
   1243  }
   1244 
   1245  UniquePtr<uint8_t[]> buffer(new uint8_t[size.value()]);
   1246  if (!buffer) {
   1247    return Err(MediaResult(NS_ERROR_OUT_OF_MEMORY,
   1248                           "Failed to allocate buffer for converted image"_ns));
   1249  }
   1250 
   1251  // Bug 1906717: Optimize YUV-to-RGBA with specified color space.
   1252 
   1253  VideoFrame::Format format(aFormat);
   1254  gfx::SurfaceFormat surfaceFormat = format.ToSurfaceFormat();
   1255 
   1256  nsresult r =
   1257      ConvertToRGBA(aImage.get(), surfaceFormat, buffer.get(), stride.value());
   1258  if (NS_FAILED(r)) {
   1259    return Err(
   1260        MediaResult(r, nsPrintfCString("Failed to convert into %s image",
   1261                                       dom::GetEnumString(aFormat).get())));
   1262  }
   1263 
   1264  if (aColorSpace == PredefinedColorSpace::Display_p3) {
   1265    r = ConvertSRGBBufferToDisplayP3(buffer.get(), surfaceFormat, buffer.get(),
   1266                                     aImage->GetSize().Width(),
   1267                                     aImage->GetSize().Height());
   1268    if (NS_FAILED(r)) {
   1269      return Err(MediaResult(
   1270          r, nsPrintfCString("Failed to convert image from srgb into %s color",
   1271                             dom::GetEnumString(aColorSpace).get())));
   1272    }
   1273  }
   1274 
   1275  Span<uint8_t> data(buffer.get(), size.value());
   1276  return CreateImageFromRawData(aImage->GetSize(), stride.value(),
   1277                                surfaceFormat, data);
   1278 }
   1279 
   1280 static VideoColorSpaceInternal ConvertToColorSpace(
   1281    const PredefinedColorSpace& aColorSpace) {
   1282  VideoColorSpaceInternal colorSpace;
   1283  switch (aColorSpace) {
   1284    case PredefinedColorSpace::Srgb:
   1285      // https://w3c.github.io/webcodecs/#srgb-color-space
   1286      colorSpace.mFullRange.emplace(true);
   1287      colorSpace.mMatrix.emplace(VideoMatrixCoefficients::Rgb);
   1288      colorSpace.mPrimaries.emplace(VideoColorPrimaries::Bt709);
   1289      colorSpace.mTransfer.emplace(VideoTransferCharacteristics::Iec61966_2_1);
   1290      break;
   1291    case PredefinedColorSpace::Display_p3:
   1292      colorSpace.mFullRange.emplace(true);
   1293      colorSpace.mMatrix.emplace(VideoMatrixCoefficients::Rgb);
   1294      colorSpace.mPrimaries.emplace(VideoColorPrimaries::Smpte432);
   1295      colorSpace.mTransfer.emplace(VideoTransferCharacteristics::Iec61966_2_1);
   1296      break;
   1297  }
   1298  MOZ_ASSERT(colorSpace.mFullRange.isSome());
   1299  return colorSpace;
   1300 }
   1301 
   1302 /*
   1303 * Helper classes carrying VideoFrame data
   1304 */
   1305 
   1306 VideoFrameData::VideoFrameData(layers::Image* aImage,
   1307                               const Maybe<VideoPixelFormat>& aFormat,
   1308                               gfx::IntRect aVisibleRect,
   1309                               gfx::IntSize aDisplaySize,
   1310                               Maybe<uint64_t> aDuration, int64_t aTimestamp,
   1311                               const VideoColorSpaceInternal& aColorSpace)
   1312    : mImage(aImage),
   1313      mFormat(aFormat),
   1314      mVisibleRect(aVisibleRect),
   1315      mDisplaySize(aDisplaySize),
   1316      mDuration(aDuration),
   1317      mTimestamp(aTimestamp),
   1318      mColorSpace(aColorSpace) {}
   1319 
   1320 VideoFrameSerializedData::VideoFrameSerializedData(const VideoFrameData& aData,
   1321                                                   gfx::IntSize aCodedSize)
   1322    : VideoFrameData(aData), mCodedSize(aCodedSize) {}
   1323 
   1324 /*
   1325 * W3C Webcodecs VideoFrame implementation
   1326 */
   1327 
   1328 VideoFrame::VideoFrame(nsIGlobalObject* aParent,
   1329                       const RefPtr<layers::Image>& aImage,
   1330                       const Maybe<VideoPixelFormat>& aFormat,
   1331                       gfx::IntSize aCodedSize, gfx::IntRect aVisibleRect,
   1332                       gfx::IntSize aDisplaySize,
   1333                       const Maybe<uint64_t>& aDuration, int64_t aTimestamp,
   1334                       const VideoColorSpaceInternal& aColorSpace)
   1335    : mParent(aParent),
   1336      mCodedSize(aCodedSize),
   1337      mVisibleRect(aVisibleRect),
   1338      mDisplaySize(aDisplaySize),
   1339      mDuration(aDuration),
   1340      mTimestamp(aTimestamp),
   1341      mColorSpace(aColorSpace) {
   1342  MOZ_ASSERT(mParent);
   1343  LOG("VideoFrame %p ctor", this);
   1344  mResource.emplace(
   1345      Resource(aImage, aFormat.map([](const VideoPixelFormat& aPixelFormat) {
   1346        return VideoFrame::Format(aPixelFormat);
   1347      })));
   1348  if (!mResource->mFormat) {
   1349    LOGW("Create a VideoFrame with an unrecognized image format");
   1350  }
   1351  StartAutoClose();
   1352 }
   1353 
   1354 VideoFrame::VideoFrame(nsIGlobalObject* aParent,
   1355                       const VideoFrameSerializedData& aData)
   1356    : mParent(aParent),
   1357      mCodedSize(aData.mCodedSize),
   1358      mVisibleRect(aData.mVisibleRect),
   1359      mDisplaySize(aData.mDisplaySize),
   1360      mDuration(aData.mDuration),
   1361      mTimestamp(aData.mTimestamp),
   1362      mColorSpace(aData.mColorSpace) {
   1363  MOZ_ASSERT(mParent);
   1364  LOG("VideoFrame %p ctor (from serialized data)", this);
   1365  mResource.emplace(Resource(
   1366      aData.mImage, aData.mFormat.map([](const VideoPixelFormat& aPixelFormat) {
   1367        return VideoFrame::Format(aPixelFormat);
   1368      })));
   1369  if (!mResource->mFormat) {
   1370    LOGW("Create a VideoFrame with an unrecognized image format");
   1371  }
   1372  StartAutoClose();
   1373 }
   1374 
   1375 VideoFrame::VideoFrame(const VideoFrame& aOther)
   1376    : mParent(aOther.mParent),
   1377      mResource(aOther.mResource),
   1378      mCodedSize(aOther.mCodedSize),
   1379      mVisibleRect(aOther.mVisibleRect),
   1380      mDisplaySize(aOther.mDisplaySize),
   1381      mDuration(aOther.mDuration),
   1382      mTimestamp(aOther.mTimestamp),
   1383      mColorSpace(aOther.mColorSpace) {
   1384  MOZ_ASSERT(mParent);
   1385  LOG("VideoFrame %p copy ctor", this);
   1386  StartAutoClose();
   1387 }
   1388 
   1389 VideoFrame::~VideoFrame() {
   1390  MOZ_ASSERT(IsClosed());
   1391  LOG("VideoFrame %p dtor", this);
   1392 }
   1393 
   1394 nsIGlobalObject* VideoFrame::GetParentObject() const {
   1395  AssertIsOnOwningThread();
   1396 
   1397  return mParent.get();
   1398 }
   1399 
   1400 JSObject* VideoFrame::WrapObject(JSContext* aCx,
   1401                                 JS::Handle<JSObject*> aGivenProto) {
   1402  AssertIsOnOwningThread();
   1403 
   1404  return VideoFrame_Binding::Wrap(aCx, this, aGivenProto);
   1405 }
   1406 
   1407 /* static */
   1408 bool VideoFrame::PrefEnabled(JSContext* aCx, JSObject* aObj) {
   1409  return (StaticPrefs::dom_media_webcodecs_enabled() ||
   1410          StaticPrefs::dom_media_webcodecs_image_decoder_enabled()) &&
   1411         !nsRFPService::IsWebCodecsRFPTargetEnabled(aCx);
   1412 }
   1413 
   1414 // The following constructors are defined in
   1415 // https://w3c.github.io/webcodecs/#dom-videoframe-videoframe
   1416 
   1417 /* static */
   1418 already_AddRefed<VideoFrame> VideoFrame::Constructor(
   1419    const GlobalObject& aGlobal, HTMLImageElement& aImageElement,
   1420    const VideoFrameInit& aInit, ErrorResult& aRv) {
   1421  nsCOMPtr<nsIGlobalObject> global = do_QueryInterface(aGlobal.GetAsSupports());
   1422  if (!global) {
   1423    aRv.Throw(NS_ERROR_FAILURE);
   1424    return nullptr;
   1425  }
   1426 
   1427  // Check the usability.
   1428  if (aImageElement.State().HasState(ElementState::BROKEN)) {
   1429    aRv.ThrowInvalidStateError("The image's state is broken");
   1430    return nullptr;
   1431  }
   1432  if (!aImageElement.Complete()) {
   1433    aRv.ThrowInvalidStateError("The image is not completely loaded yet");
   1434    return nullptr;
   1435  }
   1436  if (aImageElement.NaturalWidth() == 0) {
   1437    aRv.ThrowInvalidStateError("The image has a width of 0");
   1438    return nullptr;
   1439  }
   1440  if (aImageElement.NaturalHeight() == 0) {
   1441    aRv.ThrowInvalidStateError("The image has a height of 0");
   1442    return nullptr;
   1443  }
   1444 
   1445  // If the origin of HTMLImageElement's image data is not same origin with the
   1446  // entry settings object's origin, then throw a SecurityError DOMException.
   1447  SurfaceFromElementResult res = nsLayoutUtils::SurfaceFromElement(
   1448      &aImageElement, nsLayoutUtils::SFE_WANT_FIRST_FRAME_IF_IMAGE);
   1449  if (res.mIsWriteOnly) {
   1450    // Being write-only implies its image is cross-origin w/out CORS headers.
   1451    aRv.ThrowSecurityError("The image is not same-origin");
   1452    return nullptr;
   1453  }
   1454 
   1455  RefPtr<gfx::SourceSurface> surface = res.GetSourceSurface();
   1456  if (NS_WARN_IF(!surface)) {
   1457    aRv.ThrowInvalidStateError("The image's surface acquisition failed");
   1458    return nullptr;
   1459  }
   1460 
   1461  if (!aInit.mTimestamp.WasPassed()) {
   1462    aRv.ThrowTypeError("Missing timestamp");
   1463    return nullptr;
   1464  }
   1465 
   1466  RefPtr<layers::SourceSurfaceImage> image =
   1467      new layers::SourceSurfaceImage(surface.get());
   1468  auto r = InitializeFrameWithResourceAndSize(global, aInit, image.forget());
   1469  if (r.isErr()) {
   1470    aRv.ThrowTypeError(r.unwrapErr());
   1471    return nullptr;
   1472  }
   1473  return r.unwrap();
   1474 }
   1475 
   1476 /* static */
   1477 already_AddRefed<VideoFrame> VideoFrame::Constructor(
   1478    const GlobalObject& aGlobal, SVGImageElement& aSVGImageElement,
   1479    const VideoFrameInit& aInit, ErrorResult& aRv) {
   1480  nsCOMPtr<nsIGlobalObject> global = do_QueryInterface(aGlobal.GetAsSupports());
   1481  if (!global) {
   1482    aRv.Throw(NS_ERROR_FAILURE);
   1483    return nullptr;
   1484  }
   1485 
   1486  // Check the usability.
   1487  if (aSVGImageElement.State().HasState(ElementState::BROKEN)) {
   1488    aRv.ThrowInvalidStateError("The SVG's state is broken");
   1489    return nullptr;
   1490  }
   1491 
   1492  // If the origin of SVGImageElement's image data is not same origin with the
   1493  // entry settings object's origin, then throw a SecurityError DOMException.
   1494  SurfaceFromElementResult res = nsLayoutUtils::SurfaceFromElement(
   1495      &aSVGImageElement, nsLayoutUtils::SFE_WANT_FIRST_FRAME_IF_IMAGE);
   1496  if (res.mIsWriteOnly) {
   1497    // Being write-only implies its image is cross-origin w/out CORS headers.
   1498    aRv.ThrowSecurityError("The SVG is not same-origin");
   1499    return nullptr;
   1500  }
   1501 
   1502  RefPtr<gfx::SourceSurface> surface = res.GetSourceSurface();
   1503  if (NS_WARN_IF(!surface)) {
   1504    aRv.ThrowInvalidStateError("The SVG's surface acquisition failed");
   1505    return nullptr;
   1506  }
   1507 
   1508  if (!aInit.mTimestamp.WasPassed()) {
   1509    aRv.ThrowTypeError("Missing timestamp");
   1510    return nullptr;
   1511  }
   1512 
   1513  RefPtr<layers::SourceSurfaceImage> image =
   1514      new layers::SourceSurfaceImage(surface.get());
   1515  auto r = InitializeFrameWithResourceAndSize(global, aInit, image.forget());
   1516  if (r.isErr()) {
   1517    aRv.ThrowTypeError(r.unwrapErr());
   1518    return nullptr;
   1519  }
   1520  return r.unwrap();
   1521 }
   1522 
   1523 /* static */
   1524 already_AddRefed<VideoFrame> VideoFrame::Constructor(
   1525    const GlobalObject& aGlobal, HTMLCanvasElement& aCanvasElement,
   1526    const VideoFrameInit& aInit, ErrorResult& aRv) {
   1527  nsCOMPtr<nsIGlobalObject> global = do_QueryInterface(aGlobal.GetAsSupports());
   1528  if (!global) {
   1529    aRv.Throw(NS_ERROR_FAILURE);
   1530    return nullptr;
   1531  }
   1532 
   1533  // Check the usability.
   1534  if (aCanvasElement.Width() == 0) {
   1535    aRv.ThrowInvalidStateError("The canvas has a width of 0");
   1536    return nullptr;
   1537  }
   1538 
   1539  if (aCanvasElement.Height() == 0) {
   1540    aRv.ThrowInvalidStateError("The canvas has a height of 0");
   1541    return nullptr;
   1542  }
   1543 
   1544  // If the origin of HTMLCanvasElement's image data is not same origin with the
   1545  // entry settings object's origin, then throw a SecurityError DOMException.
   1546  SurfaceFromElementResult res = nsLayoutUtils::SurfaceFromElement(
   1547      &aCanvasElement, nsLayoutUtils::SFE_WANT_FIRST_FRAME_IF_IMAGE);
   1548  if (res.mIsWriteOnly) {
   1549    // Being write-only implies its image is cross-origin w/out CORS headers.
   1550    aRv.ThrowSecurityError("The canvas is not same-origin");
   1551    return nullptr;
   1552  }
   1553 
   1554  RefPtr<gfx::SourceSurface> surface = res.GetSourceSurface();
   1555  if (NS_WARN_IF(!surface)) {
   1556    aRv.ThrowInvalidStateError("The canvas' surface acquisition failed");
   1557    return nullptr;
   1558  }
   1559 
   1560  if (!aInit.mTimestamp.WasPassed()) {
   1561    aRv.ThrowTypeError("Missing timestamp");
   1562    return nullptr;
   1563  }
   1564 
   1565  auto imageResult = CreateImageFromSourceSurface(surface);
   1566  if (imageResult.isErr()) {
   1567    auto err = imageResult.unwrapErr();
   1568    aRv.ThrowTypeError(err.Message());
   1569    return nullptr;
   1570  }
   1571 
   1572  RefPtr<layers::Image> image = imageResult.unwrap();
   1573  auto frameResult =
   1574      InitializeFrameWithResourceAndSize(global, aInit, image.forget());
   1575  if (frameResult.isErr()) {
   1576    aRv.ThrowTypeError(frameResult.unwrapErr());
   1577    return nullptr;
   1578  }
   1579  return frameResult.unwrap();
   1580 }
   1581 
   1582 /* static */
   1583 already_AddRefed<VideoFrame> VideoFrame::Constructor(
   1584    const GlobalObject& aGlobal, HTMLVideoElement& aVideoElement,
   1585    const VideoFrameInit& aInit, ErrorResult& aRv) {
   1586  nsCOMPtr<nsIGlobalObject> global = do_QueryInterface(aGlobal.GetAsSupports());
   1587  if (!global) {
   1588    aRv.Throw(NS_ERROR_FAILURE);
   1589    return nullptr;
   1590  }
   1591 
   1592  aVideoElement.LogVisibility(
   1593      mozilla::dom::HTMLVideoElement::CallerAPI::CREATE_VIDEOFRAME);
   1594 
   1595  // Check the usability.
   1596  if (aVideoElement.NetworkState() == HTMLMediaElement_Binding::NETWORK_EMPTY) {
   1597    aRv.ThrowInvalidStateError("The video has not been initialized yet");
   1598    return nullptr;
   1599  }
   1600  if (aVideoElement.ReadyState() <= HTMLMediaElement_Binding::HAVE_METADATA) {
   1601    aRv.ThrowInvalidStateError("The video is not ready yet");
   1602    return nullptr;
   1603  }
   1604  RefPtr<layers::Image> image = aVideoElement.GetCurrentImage();
   1605  if (!image) {
   1606    aRv.ThrowInvalidStateError("The video doesn't have any image yet");
   1607    return nullptr;
   1608  }
   1609 
   1610  // If the origin of HTMLVideoElement's image data is not same origin with the
   1611  // entry settings object's origin, then throw a SecurityError DOMException.
   1612  if (!IsSameOrigin(global.get(), aVideoElement)) {
   1613    aRv.ThrowSecurityError("The video is not same-origin");
   1614    return nullptr;
   1615  }
   1616 
   1617  const ImageUtils imageUtils(image);
   1618  Maybe<dom::ImageBitmapFormat> f = imageUtils.GetFormat();
   1619  Maybe<VideoPixelFormat> format =
   1620      f.isSome() ? ImageBitmapFormatToVideoPixelFormat(f.value()) : Nothing();
   1621 
   1622  // TODO: Retrive/infer the duration, and colorspace.
   1623  auto r = InitializeFrameFromOtherFrame(
   1624      global.get(),
   1625      VideoFrameData(image.get(), format, image->GetPictureRect(),
   1626                     image->GetSize(), Nothing(),
   1627                     static_cast<int64_t>(aVideoElement.CurrentTime()), {}),
   1628      aInit);
   1629  if (r.isErr()) {
   1630    aRv.ThrowTypeError(r.unwrapErr());
   1631    return nullptr;
   1632  }
   1633  return r.unwrap();
   1634 }
   1635 
   1636 /* static */
   1637 already_AddRefed<VideoFrame> VideoFrame::Constructor(
   1638    const GlobalObject& aGlobal, OffscreenCanvas& aOffscreenCanvas,
   1639    const VideoFrameInit& aInit, ErrorResult& aRv) {
   1640  nsCOMPtr<nsIGlobalObject> global = do_QueryInterface(aGlobal.GetAsSupports());
   1641  if (!global) {
   1642    aRv.Throw(NS_ERROR_FAILURE);
   1643    return nullptr;
   1644  }
   1645 
   1646  // Check the usability.
   1647  if (aOffscreenCanvas.Width() == 0) {
   1648    aRv.ThrowInvalidStateError("The canvas has a width of 0");
   1649    return nullptr;
   1650  }
   1651  if (aOffscreenCanvas.Height() == 0) {
   1652    aRv.ThrowInvalidStateError("The canvas has a height of 0");
   1653    return nullptr;
   1654  }
   1655 
   1656  // If the origin of the OffscreenCanvas's image data is not same origin with
   1657  // the entry settings object's origin, then throw a SecurityError
   1658  // DOMException.
   1659  SurfaceFromElementResult res = nsLayoutUtils::SurfaceFromOffscreenCanvas(
   1660      &aOffscreenCanvas, nsLayoutUtils::SFE_WANT_FIRST_FRAME_IF_IMAGE);
   1661  if (res.mIsWriteOnly) {
   1662    // Being write-only implies its image is cross-origin w/out CORS headers.
   1663    aRv.ThrowSecurityError("The canvas is not same-origin");
   1664    return nullptr;
   1665  }
   1666 
   1667  RefPtr<gfx::SourceSurface> surface = res.GetSourceSurface();
   1668  if (NS_WARN_IF(!surface)) {
   1669    aRv.ThrowInvalidStateError("The canvas' surface acquisition failed");
   1670    return nullptr;
   1671  }
   1672 
   1673  if (!aInit.mTimestamp.WasPassed()) {
   1674    aRv.ThrowTypeError("Missing timestamp");
   1675    return nullptr;
   1676  }
   1677 
   1678  RefPtr<layers::SourceSurfaceImage> image =
   1679      new layers::SourceSurfaceImage(surface.get());
   1680  auto r = InitializeFrameWithResourceAndSize(global, aInit, image.forget());
   1681  if (r.isErr()) {
   1682    aRv.ThrowTypeError(r.unwrapErr());
   1683    return nullptr;
   1684  }
   1685  return r.unwrap();
   1686 }
   1687 
   1688 /* static */
   1689 already_AddRefed<VideoFrame> VideoFrame::Constructor(
   1690    const GlobalObject& aGlobal, ImageBitmap& aImageBitmap,
   1691    const VideoFrameInit& aInit, ErrorResult& aRv) {
   1692  nsCOMPtr<nsIGlobalObject> global = do_QueryInterface(aGlobal.GetAsSupports());
   1693  if (!global) {
   1694    aRv.Throw(NS_ERROR_FAILURE);
   1695    return nullptr;
   1696  }
   1697 
   1698  // Check the usability.
   1699  UniquePtr<ImageBitmapCloneData> data = aImageBitmap.ToCloneData();
   1700  if (!data || !data->mSurface) {
   1701    aRv.ThrowInvalidStateError(
   1702        "The ImageBitmap is closed or its surface acquisition failed");
   1703    return nullptr;
   1704  }
   1705 
   1706  // If the origin of the ImageBitmap's image data is not same origin with the
   1707  // entry settings object's origin, then throw a SecurityError DOMException.
   1708  if (data->mWriteOnly) {
   1709    // Being write-only implies its image is cross-origin w/out CORS headers.
   1710    aRv.ThrowSecurityError("The ImageBitmap is not same-origin");
   1711    return nullptr;
   1712  }
   1713 
   1714  if (!aInit.mTimestamp.WasPassed()) {
   1715    aRv.ThrowTypeError("Missing timestamp");
   1716    return nullptr;
   1717  }
   1718 
   1719  RefPtr<layers::SourceSurfaceImage> image =
   1720      new layers::SourceSurfaceImage(data->mSurface.get());
   1721  // TODO: Take care of data->mAlphaType
   1722  auto r = InitializeFrameWithResourceAndSize(global, aInit, image.forget());
   1723  if (r.isErr()) {
   1724    aRv.ThrowTypeError(r.unwrapErr());
   1725    return nullptr;
   1726  }
   1727  return r.unwrap();
   1728 }
   1729 
   1730 /* static */
   1731 already_AddRefed<VideoFrame> VideoFrame::Constructor(
   1732    const GlobalObject& aGlobal, VideoFrame& aVideoFrame,
   1733    const VideoFrameInit& aInit, ErrorResult& aRv) {
   1734  nsCOMPtr<nsIGlobalObject> global = do_QueryInterface(aGlobal.GetAsSupports());
   1735  if (!global) {
   1736    aRv.Throw(NS_ERROR_FAILURE);
   1737    return nullptr;
   1738  }
   1739 
   1740  // Check the usability.
   1741  if (!aVideoFrame.mResource) {
   1742    aRv.ThrowInvalidStateError(
   1743        "The VideoFrame is closed or no image found there");
   1744    return nullptr;
   1745  }
   1746  MOZ_ASSERT(aVideoFrame.mResource->mImage->GetSize() ==
   1747             aVideoFrame.mCodedSize);
   1748  MOZ_ASSERT(!aVideoFrame.mCodedSize.IsEmpty());
   1749  MOZ_ASSERT(!aVideoFrame.mVisibleRect.IsEmpty());
   1750  MOZ_ASSERT(!aVideoFrame.mDisplaySize.IsEmpty());
   1751 
   1752  // If the origin of the VideoFrame is not same origin with the entry settings
   1753  // object's origin, then throw a SecurityError DOMException.
   1754  if (!IsSameOrigin(global.get(), aVideoFrame)) {
   1755    aRv.ThrowSecurityError("The VideoFrame is not same-origin");
   1756    return nullptr;
   1757  }
   1758 
   1759  auto r = InitializeFrameFromOtherFrame(
   1760      global.get(), aVideoFrame.GetVideoFrameData(), aInit);
   1761  if (r.isErr()) {
   1762    aRv.ThrowTypeError(r.unwrapErr());
   1763    return nullptr;
   1764  }
   1765  return r.unwrap();
   1766 }
   1767 
   1768 // The following constructors are defined in
   1769 // https://w3c.github.io/webcodecs/#dom-videoframe-videoframe-data-init
   1770 
   1771 /* static */
   1772 already_AddRefed<VideoFrame> VideoFrame::Constructor(
   1773    const GlobalObject& aGlobal, const ArrayBufferView& aBufferView,
   1774    const VideoFrameBufferInit& aInit, ErrorResult& aRv) {
   1775  return CreateVideoFrameFromBuffer(aGlobal, aBufferView, aInit, aRv);
   1776 }
   1777 
   1778 /* static */
   1779 already_AddRefed<VideoFrame> VideoFrame::Constructor(
   1780    const GlobalObject& aGlobal, const ArrayBuffer& aBuffer,
   1781    const VideoFrameBufferInit& aInit, ErrorResult& aRv) {
   1782  return CreateVideoFrameFromBuffer(aGlobal, aBuffer, aInit, aRv);
   1783 }
   1784 
   1785 // https://w3c.github.io/webcodecs/#dom-videoframe-format
   1786 Nullable<VideoPixelFormat> VideoFrame::GetFormat() const {
   1787  AssertIsOnOwningThread();
   1788 
   1789  return mResource ? MaybeToNullable(mResource->TryPixelFormat())
   1790                   : Nullable<VideoPixelFormat>();
   1791 }
   1792 
   1793 // https://w3c.github.io/webcodecs/#dom-videoframe-codedwidth
   1794 uint32_t VideoFrame::CodedWidth() const {
   1795  AssertIsOnOwningThread();
   1796 
   1797  return static_cast<uint32_t>(mCodedSize.Width());
   1798 }
   1799 
   1800 // https://w3c.github.io/webcodecs/#dom-videoframe-codedheight
   1801 uint32_t VideoFrame::CodedHeight() const {
   1802  AssertIsOnOwningThread();
   1803 
   1804  return static_cast<uint32_t>(mCodedSize.Height());
   1805 }
   1806 
   1807 // https://w3c.github.io/webcodecs/#dom-videoframe-codedrect
   1808 already_AddRefed<DOMRectReadOnly> VideoFrame::GetCodedRect() const {
   1809  AssertIsOnOwningThread();
   1810 
   1811  return mResource
   1812             ? MakeAndAddRef<DOMRectReadOnly>(
   1813                   mParent, 0.0f, 0.0f, static_cast<double>(mCodedSize.Width()),
   1814                   static_cast<double>(mCodedSize.Height()))
   1815             : nullptr;
   1816 }
   1817 
   1818 // https://w3c.github.io/webcodecs/#dom-videoframe-visiblerect
   1819 already_AddRefed<DOMRectReadOnly> VideoFrame::GetVisibleRect() const {
   1820  AssertIsOnOwningThread();
   1821 
   1822  return mResource ? MakeAndAddRef<DOMRectReadOnly>(
   1823                         mParent, static_cast<double>(mVisibleRect.X()),
   1824                         static_cast<double>(mVisibleRect.Y()),
   1825                         static_cast<double>(mVisibleRect.Width()),
   1826                         static_cast<double>(mVisibleRect.Height()))
   1827                   : nullptr;
   1828 }
   1829 
   1830 // https://w3c.github.io/webcodecs/#dom-videoframe-displaywidth
   1831 uint32_t VideoFrame::DisplayWidth() const {
   1832  AssertIsOnOwningThread();
   1833 
   1834  return static_cast<uint32_t>(mDisplaySize.Width());
   1835 }
   1836 
   1837 // https://w3c.github.io/webcodecs/#dom-videoframe-displayheight
   1838 uint32_t VideoFrame::DisplayHeight() const {
   1839  AssertIsOnOwningThread();
   1840 
   1841  return static_cast<uint32_t>(mDisplaySize.Height());
   1842 }
   1843 
   1844 // https://w3c.github.io/webcodecs/#dom-videoframe-duration
   1845 Nullable<uint64_t> VideoFrame::GetDuration() const {
   1846  AssertIsOnOwningThread();
   1847  return MaybeToNullable(mDuration);
   1848 }
   1849 
   1850 // https://w3c.github.io/webcodecs/#dom-videoframe-timestamp
   1851 int64_t VideoFrame::Timestamp() const {
   1852  AssertIsOnOwningThread();
   1853 
   1854  return mTimestamp;
   1855 }
   1856 
   1857 // https://w3c.github.io/webcodecs/#dom-videoframe-colorspace
   1858 already_AddRefed<VideoColorSpace> VideoFrame::ColorSpace() const {
   1859  AssertIsOnOwningThread();
   1860 
   1861  return MakeAndAddRef<VideoColorSpace>(mParent,
   1862                                        mColorSpace.ToColorSpaceInit());
   1863 }
   1864 
   1865 // https://w3c.github.io/webcodecs/#dom-videoframe-allocationsize
   1866 uint32_t VideoFrame::AllocationSize(const VideoFrameCopyToOptions& aOptions,
   1867                                    ErrorResult& aRv) {
   1868  AssertIsOnOwningThread();
   1869 
   1870  if (!mResource) {
   1871    aRv.ThrowInvalidStateError("No media resource in VideoFrame");
   1872    return 0;
   1873  }
   1874 
   1875  if (!mResource->mFormat) {
   1876    aRv.ThrowAbortError("The VideoFrame image format is not VideoPixelFormat");
   1877    return 0;
   1878  }
   1879 
   1880  auto r = ParseVideoFrameCopyToOptions(aOptions, mVisibleRect, mCodedSize,
   1881                                        mResource->mFormat.ref());
   1882  if (r.isErr()) {
   1883    MediaResult error = r.unwrapErr();
   1884    if (error.Code() == NS_ERROR_DOM_NOT_SUPPORTED_ERR) {
   1885      aRv.ThrowNotSupportedError(error.Message());
   1886    } else {
   1887      aRv.ThrowTypeError(error.Message());
   1888    }
   1889    return 0;
   1890  }
   1891  CombinedBufferLayout layout = r.unwrap();
   1892 
   1893  return layout.mAllocationSize;
   1894 }
   1895 
   1896 // https://w3c.github.io/webcodecs/#dom-videoframe-copyto
   1897 already_AddRefed<Promise> VideoFrame::CopyTo(
   1898    const AllowSharedBufferSource& aDestination,
   1899    const VideoFrameCopyToOptions& aOptions, ErrorResult& aRv) {
   1900  AssertIsOnOwningThread();
   1901 
   1902  if (!mResource) {
   1903    aRv.ThrowInvalidStateError("No media resource in VideoFrame");
   1904    return nullptr;
   1905  }
   1906 
   1907  if (!mResource->mFormat) {
   1908    aRv.ThrowNotSupportedError("VideoFrame's image format is unrecognized");
   1909    return nullptr;
   1910  }
   1911 
   1912  RefPtr<Promise> p = Promise::Create(mParent.get(), aRv);
   1913  if (NS_WARN_IF(aRv.Failed())) {
   1914    return p.forget();
   1915  }
   1916 
   1917  auto r = ParseVideoFrameCopyToOptions(aOptions, mVisibleRect, mCodedSize,
   1918                                        mResource->mFormat.ref());
   1919  if (r.isErr()) {
   1920    MediaResult error = r.unwrapErr();
   1921    if (error.Code() == NS_ERROR_DOM_NOT_SUPPORTED_ERR) {
   1922      p->MaybeRejectWithNotSupportedError(error.Message());
   1923    } else {
   1924      p->MaybeRejectWithTypeError(error.Message());
   1925    }
   1926    return p.forget();
   1927  }
   1928  CombinedBufferLayout layout = r.unwrap();
   1929 
   1930  if (aOptions.mFormat.WasPassed() &&
   1931      (aOptions.mFormat.Value() == VideoPixelFormat::RGBA ||
   1932       aOptions.mFormat.Value() == VideoPixelFormat::RGBX ||
   1933       aOptions.mFormat.Value() == VideoPixelFormat::BGRA ||
   1934       aOptions.mFormat.Value() == VideoPixelFormat::BGRX)) {
   1935    // By [1], if color space is not set, use "srgb".
   1936    // [1]:
   1937    // https://w3c.github.io/webcodecs/#dom-videoframecopytooptions-colorspace
   1938    PredefinedColorSpace colorSpace = aOptions.mColorSpace.WasPassed()
   1939                                          ? aOptions.mColorSpace.Value()
   1940                                          : PredefinedColorSpace::Srgb;
   1941 
   1942    if (mResource->mFormat->PixelFormat() != aOptions.mFormat.Value() ||
   1943        mColorSpace != ConvertToColorSpace(colorSpace)) {
   1944      AutoJSAPI jsapi;
   1945      if (!jsapi.Init(mParent.get())) {
   1946        p->MaybeRejectWithTypeError("Failed to get JS context");
   1947        return p.forget();
   1948      }
   1949 
   1950      RootedDictionary<VideoFrameCopyToOptions> options(jsapi.cx());
   1951      CloneConfiguration(options, aOptions);
   1952      options.mFormat.Reset();
   1953 
   1954      RefPtr<VideoFrame> rgbFrame =
   1955          ConvertToRGBFrame(aOptions.mFormat.Value(), colorSpace);
   1956      if (!rgbFrame) {
   1957        p->MaybeRejectWithTypeError(
   1958            "Failed to convert videoframe in the defined format");
   1959        return p.forget();
   1960      }
   1961      return rgbFrame->CopyTo(aDestination, options, aRv);
   1962    }
   1963  }
   1964 
   1965  return ProcessTypedArraysFixed(aDestination, [&](const Span<uint8_t>& aData) {
   1966    if (aData.size_bytes() < layout.mAllocationSize) {
   1967      p->MaybeRejectWithTypeError("Destination buffer is too small");
   1968      return p.forget();
   1969    }
   1970 
   1971    Sequence<PlaneLayout> planeLayouts;
   1972 
   1973    nsTArray<Format::Plane> planes = mResource->mFormat->Planes();
   1974    MOZ_ASSERT(layout.mComputedLayouts.Length() == planes.Length());
   1975 
   1976    // TODO: These jobs can be run in a thread pool (bug 1780656) to unblock
   1977    // the current thread.
   1978    for (size_t i = 0; i < layout.mComputedLayouts.Length(); ++i) {
   1979      ComputedPlaneLayout& l = layout.mComputedLayouts[i];
   1980      uint32_t destinationOffset = l.mDestinationOffset;
   1981 
   1982      PlaneLayout* pl = planeLayouts.AppendElement(fallible);
   1983      if (!pl) {
   1984        p->MaybeRejectWithTypeError("Out of memory");
   1985        return p.forget();
   1986      }
   1987      pl->mOffset = l.mDestinationOffset;
   1988      pl->mStride = l.mDestinationStride;
   1989 
   1990      // Copy pixels of `size` starting from `origin` on planes[i] to
   1991      // `aDestination`.
   1992      gfx::IntPoint origin(
   1993          l.mSourceLeftBytes / mResource->mFormat->SampleBytes(planes[i]),
   1994          l.mSourceTop);
   1995      gfx::IntSize size(
   1996          l.mSourceWidthBytes / mResource->mFormat->SampleBytes(planes[i]),
   1997          l.mSourceHeight);
   1998      if (!mResource->CopyTo(planes[i], {origin, size},
   1999                             aData.From(destinationOffset),
   2000                             static_cast<size_t>(l.mDestinationStride))) {
   2001        p->MaybeRejectWithTypeError(
   2002            nsPrintfCString("Failed to copy image data in %s plane",
   2003                            mResource->mFormat->PlaneName(planes[i])));
   2004        return p.forget();
   2005      }
   2006    }
   2007 
   2008    MOZ_ASSERT(layout.mComputedLayouts.Length() == planes.Length());
   2009 
   2010    p->MaybeResolve(planeLayouts);
   2011    return p.forget();
   2012  });
   2013 }
   2014 
   2015 // https://w3c.github.io/webcodecs/#dom-videoframe-clone
   2016 already_AddRefed<VideoFrame> VideoFrame::Clone(ErrorResult& aRv) const {
   2017  AssertIsOnOwningThread();
   2018 
   2019  if (!mResource) {
   2020    aRv.ThrowInvalidStateError("No media resource in the VideoFrame now");
   2021    return nullptr;
   2022  }
   2023  // The VideoFrame's data must be shared instead of copied:
   2024  // https://w3c.github.io/webcodecs/#raw-media-memory-model-reference-counting
   2025  return MakeAndAddRef<VideoFrame>(*this);
   2026 }
   2027 
   2028 // https://w3c.github.io/webcodecs/#close-videoframe
   2029 void VideoFrame::Close() {
   2030  AssertIsOnOwningThread();
   2031  LOG("VideoFrame %p is closed", this);
   2032 
   2033  mResource.reset();
   2034  mCodedSize = gfx::IntSize();
   2035  mVisibleRect = gfx::IntRect();
   2036  mDisplaySize = gfx::IntSize();
   2037  mColorSpace = VideoColorSpaceInternal();
   2038 
   2039  for (const auto& weakExternalTexture : mWebGPUExternalTextures) {
   2040    if (auto* externalTexture = weakExternalTexture.get()) {
   2041      externalTexture->Expire();
   2042    }
   2043  }
   2044  mWebGPUExternalTextures.Clear();
   2045 
   2046  StopAutoClose();
   2047 }
   2048 
   2049 bool VideoFrame::IsClosed() const { return !mResource; }
   2050 
   2051 void VideoFrame::OnShutdown() { CloseIfNeeded(); }
   2052 
   2053 already_AddRefed<layers::Image> VideoFrame::GetImage() const {
   2054  if (!mResource) {
   2055    return nullptr;
   2056  }
   2057  return do_AddRef(mResource->mImage);
   2058 }
   2059 
   2060 void VideoFrame::TrackWebGPUExternalTexture(
   2061    WeakPtr<webgpu::ExternalTexture> aExternalTexture) {
   2062  mWebGPUExternalTextures.AppendElement(aExternalTexture);
   2063 }
   2064 
   2065 nsCString VideoFrame::ToString() const {
   2066  nsCString rv;
   2067 
   2068  if (IsClosed()) {
   2069    rv.AppendPrintf("VideoFrame (closed)");
   2070    return rv;
   2071  }
   2072 
   2073  Maybe<VideoPixelFormat> format = mResource->TryPixelFormat();
   2074  rv.AppendPrintf(
   2075      "VideoFrame ts: %" PRId64
   2076      ", %s, coded[%dx%d] visible[%dx%d], display[%dx%d] color: %s",
   2077      mTimestamp,
   2078      format ? dom::GetEnumString(*format).get() : "unknown pixel format",
   2079      mCodedSize.width, mCodedSize.height, mVisibleRect.width,
   2080      mVisibleRect.height, mDisplaySize.width, mDisplaySize.height,
   2081      mColorSpace.ToString().get());
   2082 
   2083  if (mDuration) {
   2084    rv.AppendPrintf(" dur: %" PRId64, mDuration.value());
   2085  }
   2086 
   2087  return rv;
   2088 }
   2089 
   2090 // https://w3c.github.io/webcodecs/#ref-for-deserialization-steps%E2%91%A0
   2091 /* static */
   2092 JSObject* VideoFrame::ReadStructuredClone(
   2093    JSContext* aCx, nsIGlobalObject* aGlobal, JSStructuredCloneReader* aReader,
   2094    const VideoFrameSerializedData& aData) {
   2095  JS::Rooted<JS::Value> value(aCx, JS::NullValue());
   2096  // To avoid a rooting hazard error from returning a raw JSObject* before
   2097  // running the RefPtr destructor, RefPtr needs to be destructed before
   2098  // returning the raw JSObject*, which is why the RefPtr<VideoFrame> is created
   2099  // in the scope below. Otherwise, the static analysis infers the RefPtr cannot
   2100  // be safely destructed while the unrooted return JSObject* is on the stack.
   2101  {
   2102    RefPtr<VideoFrame> frame = MakeAndAddRef<VideoFrame>(aGlobal, aData);
   2103    if (!GetOrCreateDOMReflector(aCx, frame, &value) || !value.isObject()) {
   2104      return nullptr;
   2105    }
   2106  }
   2107  return value.toObjectOrNull();
   2108 }
   2109 
   2110 // https://w3c.github.io/webcodecs/#ref-for-serialization-steps%E2%91%A0
   2111 bool VideoFrame::WriteStructuredClone(JSStructuredCloneWriter* aWriter,
   2112                                      StructuredCloneHolder* aHolder) const {
   2113  AssertIsOnOwningThread();
   2114 
   2115  if (!mResource) {
   2116    return false;
   2117  }
   2118 
   2119  // Indexing the image and send the index to the receiver.
   2120  const uint32_t index = aHolder->VideoFrames().Length();
   2121  // The serialization is limited to the same process scope so it's ok to
   2122  // serialize a reference instead of a copy.
   2123  aHolder->VideoFrames().AppendElement(
   2124      VideoFrameSerializedData(GetVideoFrameData(), mCodedSize));
   2125 
   2126  return !NS_WARN_IF(!JS_WriteUint32Pair(aWriter, SCTAG_DOM_VIDEOFRAME, index));
   2127 }
   2128 
   2129 // https://w3c.github.io/webcodecs/#ref-for-transfer-steps%E2%91%A0
   2130 UniquePtr<VideoFrame::TransferredData> VideoFrame::Transfer() {
   2131  AssertIsOnOwningThread();
   2132 
   2133  if (!mResource) {
   2134    return nullptr;
   2135  }
   2136 
   2137  auto frame = MakeUnique<TransferredData>(GetVideoFrameData(), mCodedSize);
   2138  Close();
   2139  return frame;
   2140 }
   2141 
   2142 // https://w3c.github.io/webcodecs/#ref-for-transfer-receiving-steps%E2%91%A0
   2143 /* static */
   2144 already_AddRefed<VideoFrame> VideoFrame::FromTransferred(
   2145    nsIGlobalObject* aGlobal, TransferredData* aData) {
   2146  MOZ_ASSERT(aData);
   2147 
   2148  return MakeAndAddRef<VideoFrame>(aGlobal, *aData);
   2149 }
   2150 
   2151 VideoFrameData VideoFrame::GetVideoFrameData() const {
   2152  return VideoFrameData(mResource->mImage.get(), mResource->TryPixelFormat(),
   2153                        mVisibleRect, mDisplaySize, mDuration, mTimestamp,
   2154                        mColorSpace);
   2155 }
   2156 
   2157 already_AddRefed<VideoFrame> VideoFrame::ConvertToRGBFrame(
   2158    const VideoPixelFormat& aFormat, const PredefinedColorSpace& aColorSpace) {
   2159  MOZ_ASSERT(
   2160      aFormat == VideoPixelFormat::RGBA || aFormat == VideoPixelFormat::RGBX ||
   2161      aFormat == VideoPixelFormat::BGRA || aFormat == VideoPixelFormat::BGRX);
   2162  MOZ_ASSERT(mResource);
   2163 
   2164  auto r = ConvertToRGBAImage(mResource->mImage, aFormat, aColorSpace);
   2165  if (r.isErr()) {
   2166    MediaResult err = r.unwrapErr();
   2167    LOGE("VideoFrame %p, failed to convert image into %s format: %s", this,
   2168         dom::GetEnumString(aFormat).get(), err.Description().get());
   2169    return nullptr;
   2170  }
   2171  const RefPtr<layers::Image> img = r.unwrap();
   2172 
   2173  // TODO: https://github.com/w3c/webcodecs/issues/817
   2174  // spec doesn't mention how the display size is set. Use the original one for
   2175  // now.
   2176 
   2177  return MakeAndAddRef<VideoFrame>(
   2178      mParent.get(), img, Some(aFormat), mVisibleRect.Size(),
   2179      gfx::IntRect{{0, 0}, mVisibleRect.Size()}, mDisplaySize, mDuration,
   2180      mTimestamp, ConvertToColorSpace(aColorSpace));
   2181 }
   2182 
   2183 void VideoFrame::StartAutoClose() {
   2184  AssertIsOnOwningThread();
   2185 
   2186  mShutdownWatcher = media::ShutdownWatcher::Create(this);
   2187  if (NS_WARN_IF(!mShutdownWatcher)) {
   2188    LOG("VideoFrame %p, cannot monitor resource release", this);
   2189    Close();
   2190    return;
   2191  }
   2192 
   2193  LOG("VideoFrame %p, start monitoring resource release, watcher %p", this,
   2194      mShutdownWatcher.get());
   2195 }
   2196 
   2197 void VideoFrame::StopAutoClose() {
   2198  AssertIsOnOwningThread();
   2199 
   2200  if (mShutdownWatcher) {
   2201    LOG("VideoFrame %p, stop monitoring resource release, watcher %p", this,
   2202        mShutdownWatcher.get());
   2203    mShutdownWatcher->Destroy();
   2204    mShutdownWatcher = nullptr;
   2205  }
   2206 }
   2207 
   2208 void VideoFrame::CloseIfNeeded() {
   2209  AssertIsOnOwningThread();
   2210 
   2211  LOG("VideoFrame %p, needs to close itself? %s", this,
   2212      IsClosed() ? "no" : "yes");
   2213  if (!IsClosed()) {
   2214    LOG("Close VideoFrame %p obligatorily", this);
   2215    Close();
   2216  }
   2217 }
   2218 
   2219 /*
   2220 * VideoFrame::Format
   2221 *
   2222 * This class wraps a VideoPixelFormat defined in [1] and provides some
   2223 * utilities for the VideoFrame's functions. Each sample in the format is 8
   2224 * bits. The pixel layouts for a 4 x 2 image in the spec are illustrated below:
   2225 * [1] https://w3c.github.io/webcodecs/#pixel-format
   2226 *
   2227 * I420 - 3 planes: Y, U, V (YUV 4:2:0)
   2228 * ------
   2229 *     <- width ->
   2230 *  Y: Y1 Y2 Y3 Y4 ^ height
   2231 *     Y5 Y6 Y7 Y8 v
   2232 *  U: U1    U2      => 1/2 Y's width, 1/2 Y's height
   2233 *  V: V1    V2      => 1/2 Y's width, 1/2 Y's height
   2234 *
   2235 * If Y plane's (width, height) is (640, 480), then both U and V planes' size is
   2236 * (320, 240), and the total bytes of Y plane and U/V planes are 640 x 480 and
   2237 * 320 x 240 respectively
   2238 *
   2239 * High bit-depth variants:
   2240 * 1) I420P10: 10-bit YUV 4:2:0 Planar, 10 bits per channel, but often stored in
   2241 *    16-bit (2-byte) containers for alignment purposes
   2242 *    Total bytes of Y plane and U/V planes are 640 x 480 x 2 and 320 x 240 x 2
   2243 *    respectively
   2244 * 2) I420P12: 12-bit YUV 4:2:0 Planar, 12 bits per channel, but often stored in
   2245 *    16-bit (2-byte) containers for alignment purposes
   2246 *    Total bytes of Y plane and U/V planes are 640 x 480 x 2 and 320 x 240 x 2
   2247 *    respectively
   2248 *
   2249 * NV12 - 2 planes: Y, UV (YUV 4:2:0 with interleaved UV)
   2250 * ------
   2251 *     <- width ->
   2252 *  Y: Y1 Y2 Y3 Y4 ^ height
   2253 *     Y5 Y6 Y7 Y8 v
   2254 * UV: U1,V1 U2,V2 => 1/2 Y's width, 1/2 Y's height
   2255 *
   2256 * If Y plane's (width, height) is (640, 480), then UV plane size is (320, 240),
   2257 * and the total bytes of UV plane is (320 * 240 * 2), since each UV pair
   2258 * consists of 2 bytes (1 byte for U and 1 byte for V)
   2259 *
   2260 * I420A - 4 planes: Y, U, V, A (YUV 4:2:0 with Alpha)
   2261 * ------
   2262 *     <- width ->
   2263 *  Y: Y1 Y2 Y3 Y4 ^ height
   2264 *     Y5 Y6 Y7 Y8 v
   2265 *  U: U1    U2      => 1/2 Y's width, 1/2 Y's height
   2266 *  V: V1    V2      => 1/2 Y's width, 1/2 Y's height
   2267 *  A: A1 A2 A3 A4   => Y's width, Y's height
   2268 *     A5 A6 A7 A8
   2269 *
   2270 * If Y plane's (width, height) is (640, 480), then A plane's size is (640,
   2271 * 480), and both U and V planes' size is (320, 240)
   2272 *
   2273 * High bit-depth variants:
   2274 * 1) I420AP10: 10-bit YUV 4:2:0 Planar with Alpha, 10 bits per channel, but
   2275 *    often stored in 16-bit (2-byte) containers for alignment purposes
   2276 *    Total bytes of Y/A plane and U/V planes are 640 x 480 x 2 and 320 x 240 x
   2277 *    2 respectively
   2278 * 2) I420AP12: 12-bit YUV 4:2:0 Planar with Alpha, 12 bits per channel, but
   2279 *    often stored in 16-bit (2-byte) containers for alignment purposes
   2280 *    Total bytes of Y/A plane and U/V planes are 640 x 480 x 2 and 320 x 240 x
   2281 *    2 respectively
   2282 *
   2283 * I422 - 3 planes: Y, U, V (YUV 4:2:2)
   2284 * ------
   2285 *     <- width ->
   2286 *  Y: Y1 Y2 Y3 Y4 ^ height
   2287 *     Y5 Y6 Y7 Y8 v
   2288 *  U: U1    U2      => 1/2 Y's width, Y's height
   2289 *     U3    U4
   2290 *  V: V1    V2      => 1/2 Y's width, Y's height
   2291 *     V3    V4
   2292 *
   2293 * If Y plane's (width, height) is (640, 480), then both U and V planes' size is
   2294 * (320, 480), and the total bytes of Y plane and U/V planes are 640 x 480 and
   2295 * 320 x 480 respectively
   2296 *
   2297 * High bit-depth variants:
   2298 * 1) I422P10: 10-bit YUV 4:2:2 Planar, 10 bits per channel, but often stored in
   2299 *    16-bit (2-byte) containers for alignment purposes
   2300 *    Total bytes of Y plane and U/V planes are 640 x 480 x 2 and 320 x 480 x 2
   2301 *    respectively
   2302 * 2) I422P12: 12-bit YUV 4:2:2 Planar, 12 bits per channel, but often stored in
   2303 *    16-bit (2-byte) containers for alignment purposes
   2304 *    Total bytes of Y plane and U/V planes are 640 x 480 x 2 and 320 x 480 x 2
   2305 *    respectively
   2306 *
   2307 * I422A - 4 planes: Y, U, V, A (YUV 4:2:2 with Alpha)
   2308 * ------
   2309 *     <- width ->
   2310 *  Y: Y1 Y2 Y3 Y4 ^ height
   2311 *     Y5 Y6 Y7 Y8 v
   2312 *  U: U1    U2      => 1/2 Y's width, Y's height
   2313 *  V: V1    V2      => 1/2 Y's width, Y's height
   2314 *  A: A1 A2 A3 A4   => Y's width, Y's height
   2315 *     A5 A6 A7 A8
   2316 *
   2317 * If Y plane's (width, height) is (640, 480), then A plane's size is (640,
   2318 * 480), and both U and V planes' size is (320, 480)
   2319 *
   2320 * High bit-depth variants:
   2321 * 1) I422AP10: 10-bit YUV 4:2:2 Planar with Alpha, 10 bits per channel, but
   2322 *    often stored in 16-bit (2-byte) containers for alignment purposes
   2323 *    Total bytes of Y/A plane and U/V planes are 640 x 480 x 2 and 320 x 480 x
   2324 *    2 respectively
   2325 * 2) I422AP12: 12-bit YUV 4:2:2 Planar with Alpha, 12 bits per channel, but
   2326 *    often stored in 16-bit (2-byte) containers for alignment purposes
   2327 *    Total bytes of Y/A plane and U/V planes are 640 x 480 x 2 and 320 x 480 x
   2328 *    2 respectively
   2329 *
   2330 * I444 - 3 planes: Y, U, V (YUV 4:4:4)
   2331 * ------
   2332 *     <- width ->
   2333 *  Y: Y1 Y2 Y3 Y4 ^ height
   2334 *     Y5 Y6 Y7 Y8 v
   2335 *  U: U1 U2 U3 U4   => Y's width, Y's height
   2336 *     U5 U6 U7 U8
   2337 *  V: V1 V2 V3 V4   => Y's width, Y's height
   2338 *     V5 V6 V7 V8
   2339 *
   2340 * If Y plane's (width, height) is (640, 480), then both U and V planes' size is
   2341 * (640, 480), and the total bytes of Y plane and U/V planes are 640 x 480 each
   2342 *
   2343 * High bit-depth variants:
   2344 * 1) I444P10: 10-bit YUV 4:4:4 Planar, 10 bits per channel, but often stored in
   2345 *    16-bit (2-byte) containers for alignment purposes
   2346 *    Total bytes of all planes are 640 x 480 x 2
   2347 * 2) I444P12: 12-bit YUV 4:4:4 Planar, 12 bits per channel, but often stored in
   2348 *    16-bit (2-byte) containers for alignment purposes
   2349 *    Total bytes of all planes are 640 x 480 x 2
   2350 *
   2351 * I444A - 4 planes: Y, U, V, A (YUV 4:4:4 with Alpha)
   2352 * ------
   2353 *     <- width ->
   2354 *  Y: Y1 Y2 Y3 Y4 ^ height
   2355 *     Y5 Y6 Y7 Y8 v
   2356 *  U: U1 U2 U3 U4   => Y's width, Y's height
   2357 *     U5 U6 U7 U8
   2358 *  V: V1 V2 V3 V4   => Y's width, Y's height
   2359 *     V5 V6 V7 V8
   2360 *  A: A1 A2 A3 A4   => Y's width, Y's height
   2361 *     A5 A6 A7 A8
   2362 *
   2363 * If Y plane's (width, height) is (640, 480), then A plane's size is (640,
   2364 * 480), and both U and V planes' size is (640, 480).
   2365 *
   2366 * High bit-depth variants:
   2367 * 1) I444AP10: 10-bit YUV 4:4:4 Planar with Alpha, 10 bits per channel, but
   2368 *    often stored in 16-bit (2-byte) containers for alignment purposes
   2369 *    Total bytes of all planes are 640 x 480 x 2
   2370 * 2) I444AP12: 12-bit YUV 4:4:4 Planar with Alpha, 12 bits per channel, but
   2371 *    often stored in 16-bit (2-byte) containers for alignment purposes
   2372 *    Total bytes of all planes are 640 x 480 x 2
   2373 *
   2374 * RGBA - 1 plane encoding 3 colors: Red, Green, Blue, and an Alpha value
   2375 * ------
   2376 *     <---------------------- width ---------------------->
   2377 *     R1 G1 B1 A1 | R2 G2 B2 A2 | R3 G3 B3 A3 | R4 G4 B4 A4 ^ height
   2378 *     R5 G5 B5 A5 | R6 G6 B6 A6 | R7 G7 B7 A7 | R8 G8 B8 A8 v
   2379 *
   2380 * RGBX - 1 plane encoding 3 colors: Red, Green, Blue, and an padding value
   2381 *      This is the opaque version of RGBA
   2382 * ------
   2383 *     <---------------------- width ---------------------->
   2384 *     R1 G1 B1 X1 | R2 G2 B2 X2 | R3 G3 B3 X3 | R4 G4 B4 X4 ^ height
   2385 *     R5 G5 B5 X5 | R6 G6 B6 X6 | R7 G7 B7 X7 | R8 G8 B8 X8 v
   2386 *
   2387 * BGRA - 1 plane encoding 3 colors: Blue, Green, Red, and an Alpha value
   2388 * ------
   2389 *     <---------------------- width ---------------------->
   2390 *     B1 G1 R1 A1 | B2 G2 R2 A2 | B3 G3 R3 A3 | B4 G4 R4 A4 ^ height
   2391 *     B5 G5 R5 A5 | B6 G6 R6 A6 | B7 G7 R7 A7 | B8 G8 R8 A8 v
   2392 *
   2393 * BGRX - 1 plane encoding 3 colors: Blue, Green, Red, and an padding value
   2394 *      This is the opaque version of BGRA
   2395 * ------
   2396 *     <---------------------- width ---------------------->
   2397 *     B1 G1 R1 X1 | B2 G2 R2 X2 | B3 G3 R3 X3 | B4 G4 R4 X4 ^ height
   2398 *     B5 G5 R5 X5 | B6 G6 R6 X6 | B7 G7 R7 X7 | B8 G8 R8 X8 v
   2399 */
   2400 
   2401 VideoFrame::Format::Format(const VideoPixelFormat& aFormat)
   2402    : mFormat(aFormat) {}
   2403 
   2404 const VideoPixelFormat& VideoFrame::Format::PixelFormat() const {
   2405  return mFormat;
   2406 }
   2407 
   2408 gfx::SurfaceFormat VideoFrame::Format::ToSurfaceFormat() const {
   2409  gfx::SurfaceFormat format = gfx::SurfaceFormat::UNKNOWN;
   2410  switch (mFormat) {
   2411    case VideoPixelFormat::I420:
   2412    case VideoPixelFormat::I420P10:
   2413    case VideoPixelFormat::I420P12:
   2414    case VideoPixelFormat::I420A:
   2415    case VideoPixelFormat::I420AP10:
   2416    case VideoPixelFormat::I420AP12:
   2417    case VideoPixelFormat::I422:
   2418    case VideoPixelFormat::I422P10:
   2419    case VideoPixelFormat::I422P12:
   2420    case VideoPixelFormat::I422A:
   2421    case VideoPixelFormat::I422AP10:
   2422    case VideoPixelFormat::I422AP12:
   2423    case VideoPixelFormat::I444:
   2424    case VideoPixelFormat::I444P10:
   2425    case VideoPixelFormat::I444P12:
   2426    case VideoPixelFormat::I444A:
   2427    case VideoPixelFormat::I444AP10:
   2428    case VideoPixelFormat::I444AP12:
   2429    case VideoPixelFormat::NV12:
   2430      // Not yet support for now.
   2431      break;
   2432    case VideoPixelFormat::RGBA:
   2433      format = gfx::SurfaceFormat::R8G8B8A8;
   2434      break;
   2435    case VideoPixelFormat::RGBX:
   2436      format = gfx::SurfaceFormat::R8G8B8X8;
   2437      break;
   2438    case VideoPixelFormat::BGRA:
   2439      format = gfx::SurfaceFormat::B8G8R8A8;
   2440      break;
   2441    case VideoPixelFormat::BGRX:
   2442      format = gfx::SurfaceFormat::B8G8R8X8;
   2443      break;
   2444  }
   2445  return format;
   2446 }
   2447 
   2448 void VideoFrame::Format::MakeOpaque() {
   2449  switch (mFormat) {
   2450    case VideoPixelFormat::I420A:
   2451      mFormat = VideoPixelFormat::I420;
   2452      return;
   2453    case VideoPixelFormat::I420AP10:
   2454      mFormat = VideoPixelFormat::I420P10;
   2455      return;
   2456    case VideoPixelFormat::I420AP12:
   2457      mFormat = VideoPixelFormat::I420P12;
   2458      return;
   2459    case VideoPixelFormat::RGBA:
   2460      mFormat = VideoPixelFormat::RGBX;
   2461      return;
   2462    case VideoPixelFormat::BGRA:
   2463      mFormat = VideoPixelFormat::BGRX;
   2464      return;
   2465    case VideoPixelFormat::I422A:
   2466      mFormat = VideoPixelFormat::I422;
   2467      return;
   2468    case VideoPixelFormat::I422AP10:
   2469      mFormat = VideoPixelFormat::I422P10;
   2470      return;
   2471    case VideoPixelFormat::I422AP12:
   2472      mFormat = VideoPixelFormat::I422P12;
   2473      return;
   2474    case VideoPixelFormat::I444A:
   2475      mFormat = VideoPixelFormat::I444;
   2476      return;
   2477    case VideoPixelFormat::I444AP10:
   2478      mFormat = VideoPixelFormat::I444P10;
   2479      return;
   2480    case VideoPixelFormat::I444AP12:
   2481      mFormat = VideoPixelFormat::I444P12;
   2482      return;
   2483    case VideoPixelFormat::I420:
   2484    case VideoPixelFormat::I420P10:
   2485    case VideoPixelFormat::I420P12:
   2486    case VideoPixelFormat::I422:
   2487    case VideoPixelFormat::I422P10:
   2488    case VideoPixelFormat::I422P12:
   2489    case VideoPixelFormat::I444:
   2490    case VideoPixelFormat::I444P10:
   2491    case VideoPixelFormat::I444P12:
   2492    case VideoPixelFormat::NV12:
   2493    case VideoPixelFormat::RGBX:
   2494    case VideoPixelFormat::BGRX:
   2495      return;
   2496  }
   2497  MOZ_ASSERT_UNREACHABLE("unsupported format");
   2498 }
   2499 
   2500 nsTArray<VideoFrame::Format::Plane> VideoFrame::Format::Planes() const {
   2501  switch (mFormat) {
   2502    case VideoPixelFormat::I420:
   2503    case VideoPixelFormat::I420P10:
   2504    case VideoPixelFormat::I420P12:
   2505    case VideoPixelFormat::I422:
   2506    case VideoPixelFormat::I422P10:
   2507    case VideoPixelFormat::I422P12:
   2508    case VideoPixelFormat::I444:
   2509    case VideoPixelFormat::I444P10:
   2510    case VideoPixelFormat::I444P12:
   2511      return {Plane::Y, Plane::U, Plane::V};
   2512    case VideoPixelFormat::I420A:
   2513    case VideoPixelFormat::I420AP10:
   2514    case VideoPixelFormat::I420AP12:
   2515    case VideoPixelFormat::I422A:
   2516    case VideoPixelFormat::I422AP10:
   2517    case VideoPixelFormat::I422AP12:
   2518    case VideoPixelFormat::I444A:
   2519    case VideoPixelFormat::I444AP10:
   2520    case VideoPixelFormat::I444AP12:
   2521      return {Plane::Y, Plane::U, Plane::V, Plane::A};
   2522    case VideoPixelFormat::NV12:
   2523      return {Plane::Y, Plane::UV};
   2524    case VideoPixelFormat::RGBA:
   2525    case VideoPixelFormat::RGBX:
   2526    case VideoPixelFormat::BGRA:
   2527    case VideoPixelFormat::BGRX:
   2528      return {Plane::RGBA};
   2529  }
   2530  MOZ_ASSERT_UNREACHABLE("unsupported format");
   2531  return {};
   2532 }
   2533 
   2534 const char* VideoFrame::Format::PlaneName(const Plane& aPlane) const {
   2535  switch (aPlane) {
   2536    case Format::Plane::Y:  // and RGBA
   2537      return IsYUV() ? "Y" : "RGBA";
   2538    case Format::Plane::U:  // and UV
   2539      MOZ_ASSERT(IsYUV());
   2540      return mFormat == VideoPixelFormat::NV12 ? "UV" : "U";
   2541    case Format::Plane::V:
   2542      MOZ_ASSERT(IsYUV());
   2543      return "V";
   2544    case Format::Plane::A:
   2545      MOZ_ASSERT(IsYUV());
   2546      return "A";
   2547  }
   2548  MOZ_ASSERT_UNREACHABLE("invalid plane");
   2549  return "Unknown";
   2550 }
   2551 
   2552 uint32_t VideoFrame::Format::SampleBytes(const Plane& aPlane) const {
   2553  switch (mFormat) {
   2554    case VideoPixelFormat::I420:
   2555    case VideoPixelFormat::I420A:
   2556    case VideoPixelFormat::I422:
   2557    case VideoPixelFormat::I422A:
   2558    case VideoPixelFormat::I444:
   2559    case VideoPixelFormat::I444A:
   2560      return 1;  // 8 bits/sample on the Y, U, V, A plane.
   2561    case VideoPixelFormat::I420P10:
   2562    case VideoPixelFormat::I420P12:
   2563    case VideoPixelFormat::I420AP10:
   2564    case VideoPixelFormat::I420AP12:
   2565    case VideoPixelFormat::I422P10:
   2566    case VideoPixelFormat::I422P12:
   2567    case VideoPixelFormat::I422AP10:
   2568    case VideoPixelFormat::I422AP12:
   2569    case VideoPixelFormat::I444P10:
   2570    case VideoPixelFormat::I444P12:
   2571    case VideoPixelFormat::I444AP10:
   2572    case VideoPixelFormat::I444AP12:
   2573      return 2;  // 10 or 12 bits/sample on the Y, U, V, A plane.
   2574    case VideoPixelFormat::NV12:
   2575      switch (aPlane) {
   2576        case Plane::Y:
   2577          return 1;  // 8 bits/sample on the Y plane
   2578        case Plane::UV:
   2579          return 2;  // Interleaved U and V values on the UV plane.
   2580        case Plane::V:
   2581        case Plane::A:
   2582          MOZ_ASSERT_UNREACHABLE("invalid plane");
   2583      }
   2584      return 0;
   2585    case VideoPixelFormat::RGBA:
   2586    case VideoPixelFormat::RGBX:
   2587    case VideoPixelFormat::BGRA:
   2588    case VideoPixelFormat::BGRX:
   2589      return 4;  // 8 bits/sample, 32 bits/pixel
   2590  }
   2591  MOZ_ASSERT_UNREACHABLE("unsupported format");
   2592  return 0;
   2593 }
   2594 
   2595 gfx::IntSize VideoFrame::Format::SampleSize(const Plane& aPlane) const {
   2596  // The sample width and height refers to
   2597  // https://w3c.github.io/webcodecs/#sub-sampling-factor
   2598  switch (aPlane) {
   2599    case Plane::Y:  // and RGBA
   2600    case Plane::A:
   2601      return gfx::IntSize(1, 1);
   2602    case Plane::U:  // and UV
   2603    case Plane::V:
   2604      switch (mFormat) {
   2605        case VideoPixelFormat::I420:
   2606        case VideoPixelFormat::I420P10:
   2607        case VideoPixelFormat::I420P12:
   2608        case VideoPixelFormat::I420A:
   2609        case VideoPixelFormat::I420AP10:
   2610        case VideoPixelFormat::I420AP12:
   2611        case VideoPixelFormat::NV12:
   2612          return gfx::IntSize(2, 2);
   2613        case VideoPixelFormat::I422:
   2614        case VideoPixelFormat::I422P10:
   2615        case VideoPixelFormat::I422P12:
   2616        case VideoPixelFormat::I422A:
   2617        case VideoPixelFormat::I422AP10:
   2618        case VideoPixelFormat::I422AP12:
   2619          return gfx::IntSize(2, 1);
   2620        case VideoPixelFormat::I444:
   2621        case VideoPixelFormat::I444P10:
   2622        case VideoPixelFormat::I444P12:
   2623        case VideoPixelFormat::I444A:
   2624        case VideoPixelFormat::I444AP10:
   2625        case VideoPixelFormat::I444AP12:
   2626          return gfx::IntSize(1, 1);
   2627        case VideoPixelFormat::RGBA:
   2628        case VideoPixelFormat::RGBX:
   2629        case VideoPixelFormat::BGRA:
   2630        case VideoPixelFormat::BGRX:
   2631          MOZ_ASSERT_UNREACHABLE("invalid format");
   2632          return {0, 0};
   2633      }
   2634  }
   2635  MOZ_ASSERT_UNREACHABLE("invalid plane");
   2636  return {0, 0};
   2637 }
   2638 
   2639 bool VideoFrame::Format::IsValidSize(const gfx::IntSize& aSize) const {
   2640  switch (mFormat) {
   2641    case VideoPixelFormat::I420:
   2642    case VideoPixelFormat::I420P10:
   2643    case VideoPixelFormat::I420P12:
   2644    case VideoPixelFormat::I420A:
   2645    case VideoPixelFormat::I420AP10:
   2646    case VideoPixelFormat::I420AP12:
   2647    case VideoPixelFormat::NV12:
   2648      return (aSize.Width() % 2 == 0) && (aSize.Height() % 2 == 0);
   2649    case VideoPixelFormat::I422:
   2650    case VideoPixelFormat::I422P10:
   2651    case VideoPixelFormat::I422P12:
   2652    case VideoPixelFormat::I422A:
   2653    case VideoPixelFormat::I422AP10:
   2654    case VideoPixelFormat::I422AP12:
   2655      return aSize.Height() % 2 == 0;
   2656    case VideoPixelFormat::I444:
   2657    case VideoPixelFormat::I444P10:
   2658    case VideoPixelFormat::I444P12:
   2659    case VideoPixelFormat::I444A:
   2660    case VideoPixelFormat::I444AP10:
   2661    case VideoPixelFormat::I444AP12:
   2662    case VideoPixelFormat::RGBA:
   2663    case VideoPixelFormat::RGBX:
   2664    case VideoPixelFormat::BGRA:
   2665    case VideoPixelFormat::BGRX:
   2666      return true;
   2667  }
   2668  MOZ_ASSERT_UNREACHABLE("unsupported format");
   2669  return false;
   2670 }
   2671 
   2672 size_t VideoFrame::Format::ByteCount(const gfx::IntSize& aSize) const {
   2673  MOZ_ASSERT(IsValidSize(aSize));
   2674 
   2675  CheckedInt<size_t> bytes;
   2676 
   2677  for (const Format::Plane& p : Planes()) {
   2678    const gfx::IntSize factor = SampleSize(p);
   2679 
   2680    gfx::IntSize planeSize{aSize.Width() / factor.Width(),
   2681                           aSize.Height() / factor.Height()};
   2682 
   2683    CheckedInt<size_t> planeBytes(planeSize.Width());
   2684    planeBytes *= planeSize.Height();
   2685    planeBytes *= SampleBytes(p);
   2686 
   2687    bytes += planeBytes;
   2688  }
   2689 
   2690  return bytes.value();
   2691 }
   2692 
   2693 bool VideoFrame::Format::IsYUV() const { return IsYUVFormat(mFormat); }
   2694 
   2695 /*
   2696 * VideoFrame::Resource
   2697 */
   2698 
   2699 VideoFrame::Resource::Resource(const RefPtr<layers::Image>& aImage,
   2700                               Maybe<class Format>&& aFormat)
   2701    : mImage(aImage), mFormat(aFormat) {
   2702  MOZ_ASSERT(mImage);
   2703 }
   2704 
   2705 VideoFrame::Resource::Resource(const Resource& aOther)
   2706    : mImage(aOther.mImage), mFormat(aOther.mFormat) {
   2707  MOZ_ASSERT(mImage);
   2708 }
   2709 
   2710 Maybe<VideoPixelFormat> VideoFrame::Resource::TryPixelFormat() const {
   2711  return mFormat ? Some(mFormat->PixelFormat()) : Nothing();
   2712 }
   2713 
   2714 uint32_t VideoFrame::Resource::Stride(const Format::Plane& aPlane) const {
   2715  MOZ_RELEASE_ASSERT(mFormat);
   2716 
   2717  CheckedInt<uint32_t> width(mImage->GetSize().Width());
   2718  switch (aPlane) {
   2719    case Format::Plane::Y:  // and RGBA
   2720    case Format::Plane::A:
   2721      switch (mFormat->PixelFormat()) {
   2722        case VideoPixelFormat::I420:
   2723        case VideoPixelFormat::I420P10:
   2724        case VideoPixelFormat::I420P12:
   2725        case VideoPixelFormat::I420A:
   2726        case VideoPixelFormat::I420AP10:
   2727        case VideoPixelFormat::I420AP12:
   2728        case VideoPixelFormat::I422:
   2729        case VideoPixelFormat::I422P10:
   2730        case VideoPixelFormat::I422P12:
   2731        case VideoPixelFormat::I422A:
   2732        case VideoPixelFormat::I422AP10:
   2733        case VideoPixelFormat::I422AP12:
   2734        case VideoPixelFormat::I444:
   2735        case VideoPixelFormat::I444P10:
   2736        case VideoPixelFormat::I444P12:
   2737        case VideoPixelFormat::I444A:
   2738        case VideoPixelFormat::I444AP10:
   2739        case VideoPixelFormat::I444AP12:
   2740        case VideoPixelFormat::NV12:
   2741        case VideoPixelFormat::RGBA:
   2742        case VideoPixelFormat::RGBX:
   2743        case VideoPixelFormat::BGRA:
   2744        case VideoPixelFormat::BGRX:
   2745          return (width * mFormat->SampleBytes(aPlane)).value();
   2746      }
   2747      return 0;
   2748    case Format::Plane::U:  // and UV
   2749    case Format::Plane::V:
   2750      switch (mFormat->PixelFormat()) {
   2751        case VideoPixelFormat::I420:
   2752        case VideoPixelFormat::I420P10:
   2753        case VideoPixelFormat::I420P12:
   2754        case VideoPixelFormat::I420A:
   2755        case VideoPixelFormat::I420AP10:
   2756        case VideoPixelFormat::I420AP12:
   2757        case VideoPixelFormat::I422:
   2758        case VideoPixelFormat::I422P10:
   2759        case VideoPixelFormat::I422P12:
   2760        case VideoPixelFormat::I422A:
   2761        case VideoPixelFormat::I422AP10:
   2762        case VideoPixelFormat::I422AP12:
   2763        case VideoPixelFormat::NV12:
   2764          return (((width + 1) / 2) * mFormat->SampleBytes(aPlane)).value();
   2765        case VideoPixelFormat::I444:
   2766        case VideoPixelFormat::I444P10:
   2767        case VideoPixelFormat::I444P12:
   2768        case VideoPixelFormat::I444A:
   2769        case VideoPixelFormat::I444AP10:
   2770        case VideoPixelFormat::I444AP12:
   2771          return (width * mFormat->SampleBytes(aPlane)).value();
   2772        case VideoPixelFormat::RGBA:
   2773        case VideoPixelFormat::RGBX:
   2774        case VideoPixelFormat::BGRA:
   2775        case VideoPixelFormat::BGRX:
   2776          MOZ_ASSERT_UNREACHABLE("invalid format");
   2777      }
   2778      return 0;
   2779  }
   2780  MOZ_ASSERT_UNREACHABLE("invalid plane");
   2781  return 0;
   2782 }
   2783 
   2784 bool VideoFrame::Resource::CopyTo(const Format::Plane& aPlane,
   2785                                  const gfx::IntRect& aRect,
   2786                                  Span<uint8_t>&& aPlaneDest,
   2787                                  size_t aDestinationStride) const {
   2788  if (!mFormat) {
   2789    return false;
   2790  }
   2791 
   2792  auto copyPlane = [&](const uint8_t* aPlaneData) {
   2793    MOZ_ASSERT(aPlaneData);
   2794 
   2795    CheckedInt<size_t> offset(aRect.Y());
   2796    offset *= Stride(aPlane);
   2797    offset += aRect.X() * mFormat->SampleBytes(aPlane);
   2798    if (!offset.isValid()) {
   2799      return false;
   2800    }
   2801 
   2802    CheckedInt<size_t> elementsBytes(aRect.Width());
   2803    elementsBytes *= mFormat->SampleBytes(aPlane);
   2804    if (!elementsBytes.isValid()) {
   2805      return false;
   2806    }
   2807 
   2808    aPlaneData += offset.value();
   2809    for (int32_t row = 0; row < aRect.Height(); ++row) {
   2810      PodCopy(aPlaneDest.data(), aPlaneData, elementsBytes.value());
   2811      aPlaneData += Stride(aPlane);
   2812      // Spec asks to move `aDestinationStride` bytes instead of
   2813      // `Stride(aPlane)` forward.
   2814      aPlaneDest = aPlaneDest.From(aDestinationStride);
   2815    }
   2816    return true;
   2817  };
   2818 
   2819  if (mImage->GetFormat() == ImageFormat::PLANAR_YCBCR) {
   2820    switch (aPlane) {
   2821      case Format::Plane::Y:
   2822        return copyPlane(mImage->AsPlanarYCbCrImage()->GetData()->mYChannel);
   2823      case Format::Plane::U:
   2824        return copyPlane(mImage->AsPlanarYCbCrImage()->GetData()->mCbChannel);
   2825      case Format::Plane::V:
   2826        return copyPlane(mImage->AsPlanarYCbCrImage()->GetData()->mCrChannel);
   2827      case Format::Plane::A:
   2828        MOZ_ASSERT(mFormat->PixelFormat() == VideoPixelFormat::I420A);
   2829        MOZ_ASSERT(mImage->AsPlanarYCbCrImage()->GetData()->mAlpha);
   2830        return copyPlane(
   2831            mImage->AsPlanarYCbCrImage()->GetData()->mAlpha->mChannel);
   2832    }
   2833    MOZ_ASSERT_UNREACHABLE("invalid plane");
   2834  }
   2835 
   2836  if (mImage->GetFormat() == ImageFormat::NV_IMAGE) {
   2837    switch (aPlane) {
   2838      case Format::Plane::Y:
   2839        return copyPlane(mImage->AsNVImage()->GetData()->mYChannel);
   2840      case Format::Plane::UV:
   2841        return copyPlane(mImage->AsNVImage()->GetData()->mCbChannel);
   2842      case Format::Plane::V:
   2843      case Format::Plane::A:
   2844        MOZ_ASSERT_UNREACHABLE("invalid plane");
   2845    }
   2846    return false;
   2847  }
   2848 
   2849  // Attempt to copy data from the underlying SourceSurface. Only copying from
   2850  // RGB format to RGB format is supported.
   2851 
   2852  RefPtr<gfx::SourceSurface> surface = GetSourceSurface(mImage.get());
   2853  if (NS_WARN_IF(!surface)) {
   2854    LOGE("Failed to get SourceSurface from the image");
   2855    return false;
   2856  }
   2857 
   2858  RefPtr<gfx::DataSourceSurface> dataSurface = surface->GetDataSurface();
   2859  if (NS_WARN_IF(!dataSurface)) {
   2860    LOGE("Failed to get DataSourceSurface from the SourceSurface");
   2861    return false;
   2862  }
   2863 
   2864  gfx::DataSourceSurface::ScopedMap map(dataSurface,
   2865                                        gfx::DataSourceSurface::READ);
   2866  if (NS_WARN_IF(!map.IsMapped())) {
   2867    LOGE("Failed to map the DataSourceSurface");
   2868    return false;
   2869  }
   2870 
   2871  const gfx::SurfaceFormat format = dataSurface->GetFormat();
   2872 
   2873  if (aPlane != Format::Plane::RGBA ||
   2874      (format != gfx::SurfaceFormat::R8G8B8A8 &&
   2875       format != gfx::SurfaceFormat::R8G8B8X8 &&
   2876       format != gfx::SurfaceFormat::B8G8R8A8 &&
   2877       format != gfx::SurfaceFormat::B8G8R8X8)) {
   2878    LOGE("The conversion between RGB and non-RGB is unsupported");
   2879    return false;
   2880  }
   2881 
   2882  // The mImage's format can be different from mFormat (since Gecko prefers
   2883  // BGRA). To get the data in the matched format, we create a temp buffer
   2884  // holding the image data in that format and then copy them to `aDestination`.
   2885  const gfx::SurfaceFormat f = mFormat->ToSurfaceFormat();
   2886  MOZ_ASSERT(
   2887      f == gfx::SurfaceFormat::R8G8B8A8 || f == gfx::SurfaceFormat::R8G8B8X8 ||
   2888      f == gfx::SurfaceFormat::B8G8R8A8 || f == gfx::SurfaceFormat::B8G8R8X8);
   2889 
   2890  // TODO: We could use Factory::CreateWrappingDataSourceSurface to wrap
   2891  // `aDestination` to avoid extra copy.
   2892  RefPtr<gfx::DataSourceSurface> tempSurface =
   2893      gfx::Factory::CreateDataSourceSurfaceWithStride(dataSurface->GetSize(), f,
   2894                                                      map.GetStride());
   2895  if (NS_WARN_IF(!tempSurface)) {
   2896    LOGE("Failed to create a temporary DataSourceSurface");
   2897    return false;
   2898  }
   2899 
   2900  gfx::DataSourceSurface::ScopedMap tempMap(tempSurface,
   2901                                            gfx::DataSourceSurface::WRITE);
   2902  if (NS_WARN_IF(!tempMap.IsMapped())) {
   2903    LOGE("Failed to map the temporary DataSourceSurface");
   2904    return false;
   2905  }
   2906 
   2907  if (!gfx::SwizzleData(map.GetData(), map.GetStride(),
   2908                        dataSurface->GetFormat(), tempMap.GetData(),
   2909                        tempMap.GetStride(), tempSurface->GetFormat(),
   2910                        tempSurface->GetSize())) {
   2911    LOGE("Failed to write data into temporary DataSourceSurface");
   2912    return false;
   2913  }
   2914 
   2915  return copyPlane(tempMap.GetData());
   2916 }
   2917 
   2918 #undef LOGW
   2919 #undef LOGE
   2920 #undef LOG_INTERNAL
   2921 
   2922 }  // namespace mozilla::dom