tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

nsAVIFDecoder.cpp (78583B)


      1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*-
      2 *
      3 * This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #include "ImageLogging.h"  // Must appear first
      8 
      9 #include "nsAVIFDecoder.h"
     10 
     11 #include <aom/aomdx.h>
     12 
     13 #include "DAV1DDecoder.h"
     14 #include "gfxPlatform.h"
     15 #include "YCbCrUtils.h"
     16 #include "libyuv.h"
     17 
     18 #include "SurfacePipeFactory.h"
     19 
     20 #include "mozilla/glean/ImageDecodersMetrics.h"
     21 #include "mozilla/UniquePtrExtensions.h"
     22 
     23 using namespace mozilla::gfx;
     24 
     25 namespace mozilla {
     26 
     27 namespace image {
     28 
     29 static LazyLogModule sAVIFLog("AVIFDecoder");
     30 
     31 static Maybe<IntSize> GetImageSize(const Mp4parseAvifInfo& aInfo) {
     32  // Note this does not take cropping via CleanAperture (clap) into account
     33  const struct Mp4parseImageSpatialExtents* ispe = aInfo.spatial_extents;
     34 
     35  if (ispe) {
     36    // Decoder::PostSize takes int32_t, but ispe contains uint32_t
     37    CheckedInt<int32_t> width = ispe->image_width;
     38    CheckedInt<int32_t> height = ispe->image_height;
     39 
     40    if (width.isValid() && height.isValid()) {
     41      return Some(IntSize{width.value(), height.value()});
     42    }
     43  }
     44 
     45  return Nothing();
     46 }
     47 
     48 // Translate the MIAF/HEIF-based orientation transforms (imir, irot) into
     49 // ImageLib's representation. Note that the interpretation of imir was reversed
     50 // Between HEIF (ISO 23008-12:2017) and ISO/IEC 23008-12:2017/DAmd 2. This is
     51 // handled by mp4parse. See mp4parse::read_imir for details.
     52 Orientation GetImageOrientation(const Mp4parseAvifInfo& aInfo) {
     53  // Per MIAF (ISO/IEC 23000-22:2019) § 7.3.6.7
     54  //   These properties, if used, shall be indicated to be applied in the
     55  //   following order: clean aperture first, then rotation, then mirror.
     56  // The Orientation type does the same order, but opposite rotation direction
     57 
     58  const Mp4parseIrot heifRot = aInfo.image_rotation;
     59  const Mp4parseImir* heifMir = aInfo.image_mirror;
     60  Angle mozRot;
     61  Flip mozFlip;
     62 
     63  if (!heifMir) {  // No mirroring
     64    mozFlip = Flip::Unflipped;
     65 
     66    switch (heifRot) {
     67      case MP4PARSE_IROT_D0:
     68        // ⥠ UPWARDS HARPOON WITH BARB LEFT FROM BAR
     69        mozRot = Angle::D0;
     70        break;
     71      case MP4PARSE_IROT_D90:
     72        // ⥞ LEFTWARDS HARPOON WITH BARB DOWN FROM BAR
     73        mozRot = Angle::D270;
     74        break;
     75      case MP4PARSE_IROT_D180:
     76        // ⥝ DOWNWARDS HARPOON WITH BARB RIGHT FROM BAR
     77        mozRot = Angle::D180;
     78        break;
     79      case MP4PARSE_IROT_D270:
     80        // ⥛  RIGHTWARDS HARPOON WITH BARB UP FROM BAR
     81        mozRot = Angle::D90;
     82        break;
     83      default:
     84        MOZ_ASSERT_UNREACHABLE();
     85    }
     86  } else {
     87    MOZ_ASSERT(heifMir);
     88    mozFlip = Flip::Horizontal;
     89 
     90    enum class HeifFlippedOrientation : uint8_t {
     91      IROT_D0_IMIR_V = (MP4PARSE_IROT_D0 << 1) | MP4PARSE_IMIR_LEFT_RIGHT,
     92      IROT_D0_IMIR_H = (MP4PARSE_IROT_D0 << 1) | MP4PARSE_IMIR_TOP_BOTTOM,
     93      IROT_D90_IMIR_V = (MP4PARSE_IROT_D90 << 1) | MP4PARSE_IMIR_LEFT_RIGHT,
     94      IROT_D90_IMIR_H = (MP4PARSE_IROT_D90 << 1) | MP4PARSE_IMIR_TOP_BOTTOM,
     95      IROT_D180_IMIR_V = (MP4PARSE_IROT_D180 << 1) | MP4PARSE_IMIR_LEFT_RIGHT,
     96      IROT_D180_IMIR_H = (MP4PARSE_IROT_D180 << 1) | MP4PARSE_IMIR_TOP_BOTTOM,
     97      IROT_D270_IMIR_V = (MP4PARSE_IROT_D270 << 1) | MP4PARSE_IMIR_LEFT_RIGHT,
     98      IROT_D270_IMIR_H = (MP4PARSE_IROT_D270 << 1) | MP4PARSE_IMIR_TOP_BOTTOM,
     99    };
    100 
    101    HeifFlippedOrientation heifO =
    102        HeifFlippedOrientation((heifRot << 1) | *heifMir);
    103 
    104    switch (heifO) {
    105      case HeifFlippedOrientation::IROT_D0_IMIR_V:
    106      case HeifFlippedOrientation::IROT_D180_IMIR_H:
    107        // ⥜ UPWARDS HARPOON WITH BARB RIGHT FROM BAR
    108        mozRot = Angle::D0;
    109        break;
    110      case HeifFlippedOrientation::IROT_D270_IMIR_V:
    111      case HeifFlippedOrientation::IROT_D90_IMIR_H:
    112        // ⥚ LEFTWARDS HARPOON WITH BARB UP FROM BAR
    113        mozRot = Angle::D90;
    114        break;
    115      case HeifFlippedOrientation::IROT_D180_IMIR_V:
    116      case HeifFlippedOrientation::IROT_D0_IMIR_H:
    117        // ⥡ DOWNWARDS HARPOON WITH BARB LEFT FROM BAR
    118        mozRot = Angle::D180;
    119        break;
    120      case HeifFlippedOrientation::IROT_D90_IMIR_V:
    121      case HeifFlippedOrientation::IROT_D270_IMIR_H:
    122        // ⥟ RIGHTWARDS HARPOON WITH BARB DOWN FROM BAR
    123        mozRot = Angle::D270;
    124        break;
    125      default:
    126        MOZ_ASSERT_UNREACHABLE();
    127    }
    128  }
    129 
    130  MOZ_LOG(sAVIFLog, LogLevel::Debug,
    131          ("GetImageOrientation: (rot%d, imir(%s)) -> (Angle%d, "
    132           "Flip%d)",
    133           static_cast<int>(heifRot),
    134           heifMir ? (*heifMir == MP4PARSE_IMIR_LEFT_RIGHT ? "left-right"
    135                                                           : "top-bottom")
    136                   : "none",
    137           static_cast<int>(mozRot), static_cast<int>(mozFlip)));
    138  return Orientation{mozRot, mozFlip};
    139 }
    140 nsresult AVIFDecoderStream::ReadAt(int64_t offset, void* data, size_t size,
    141                                   size_t* bytes_read) {
    142  size = std::min(size, size_t(mBuffer->length() - offset));
    143 
    144  if (size <= 0) {
    145    return NS_ERROR_DOM_MEDIA_RANGE_ERR;
    146  }
    147 
    148  memcpy(data, mBuffer->begin() + offset, size);
    149  *bytes_read = size;
    150  return NS_OK;
    151 }
    152 
    153 bool AVIFDecoderStream::Length(int64_t* size) {
    154  *size =
    155      static_cast<int64_t>(std::min<uint64_t>(mBuffer->length(), INT64_MAX));
    156  return true;
    157 }
    158 
    159 const uint8_t* AVIFDecoderStream::GetContiguousAccess(int64_t aOffset,
    160                                                      size_t aSize) {
    161  if (aOffset + aSize >= mBuffer->length()) {
    162    return nullptr;
    163  }
    164 
    165  return mBuffer->begin() + aOffset;
    166 }
    167 
    168 AVIFParser::~AVIFParser() {
    169  MOZ_LOG(sAVIFLog, LogLevel::Debug, ("Destroy AVIFParser=%p", this));
    170 }
    171 
    172 Mp4parseStatus AVIFParser::Create(const Mp4parseIo* aIo, ByteStream* aBuffer,
    173                                  UniquePtr<AVIFParser>& aParserOut,
    174                                  bool aAllowSequences,
    175                                  bool aAnimateAVIFMajor) {
    176  MOZ_ASSERT(aIo);
    177  MOZ_ASSERT(!aParserOut);
    178 
    179  UniquePtr<AVIFParser> p(new AVIFParser(aIo));
    180  Mp4parseStatus status = p->Init(aBuffer, aAllowSequences, aAnimateAVIFMajor);
    181 
    182  if (status == MP4PARSE_STATUS_OK) {
    183    MOZ_ASSERT(p->mParser);
    184    aParserOut = std::move(p);
    185  }
    186 
    187  return status;
    188 }
    189 
    190 uint32_t AVIFParser::GetFrameCount() {
    191  MOZ_ASSERT(mParser);
    192 
    193  // Note that because this consumes the frame iterators, this can only be
    194  // requested for metadata decodes. Since we had to partially decode the
    195  // first frame to determine the size, we need to add one to the result.
    196  // This means we return 0 for 1 frame, 1 for 2 frames, etc.
    197 
    198  if (!IsAnimated()) {
    199    return 0;
    200  }
    201 
    202  uint32_t frameCount = 0;
    203  while (true) {
    204    RefPtr<MediaRawData> header = mColorSampleIter->GetNextHeader();
    205    if (!header) {
    206      break;
    207    }
    208 
    209    if (mAlphaSampleIter) {
    210      header = mAlphaSampleIter->GetNextHeader();
    211      if (!header) {
    212        break;
    213      }
    214    }
    215 
    216    ++frameCount;
    217  }
    218 
    219  return frameCount;
    220 }
    221 
    222 nsAVIFDecoder::DecodeResult AVIFParser::GetImage(AVIFImage& aImage) {
    223  MOZ_ASSERT(mParser);
    224 
    225  // If the AVIF is animated, get next frame and yield if sequence is not done.
    226  if (IsAnimated()) {
    227    aImage.mColorImage = mColorSampleIter->GetNext().unwrapOr(nullptr);
    228 
    229    if (!aImage.mColorImage) {
    230      return AsVariant(nsAVIFDecoder::NonDecoderResult::NoSamples);
    231    }
    232 
    233    aImage.mFrameNum = mFrameNum++;
    234    int64_t durationMs = aImage.mColorImage->mDuration.ToMilliseconds();
    235    aImage.mDuration = FrameTimeout::FromRawMilliseconds(
    236        static_cast<int32_t>(std::min<int64_t>(durationMs, INT32_MAX)));
    237 
    238    if (mAlphaSampleIter) {
    239      aImage.mAlphaImage = mAlphaSampleIter->GetNext().unwrapOr(nullptr);
    240      if (!aImage.mAlphaImage) {
    241        return AsVariant(nsAVIFDecoder::NonDecoderResult::NoSamples);
    242      }
    243    }
    244 
    245    bool hasNext = mColorSampleIter->HasNext();
    246    if (mAlphaSampleIter && (hasNext != mAlphaSampleIter->HasNext())) {
    247      MOZ_LOG(
    248          sAVIFLog, LogLevel::Warning,
    249          ("[this=%p] The %s sequence ends before frame %d, aborting decode.",
    250           this, hasNext ? "alpha" : "color", mFrameNum));
    251      return AsVariant(nsAVIFDecoder::NonDecoderResult::NoSamples);
    252    }
    253    if (!hasNext) {
    254      return AsVariant(nsAVIFDecoder::NonDecoderResult::Complete);
    255    }
    256    return AsVariant(nsAVIFDecoder::NonDecoderResult::OutputAvailable);
    257  }
    258 
    259  if (!mInfo.has_primary_item) {
    260    return AsVariant(nsAVIFDecoder::NonDecoderResult::NoSamples);
    261  }
    262 
    263  // If the AVIF is not animated, get the pitm image and return Complete.
    264  Mp4parseAvifImage image = {};
    265  Mp4parseStatus status = mp4parse_avif_get_image(mParser.get(), &image);
    266  MOZ_LOG(sAVIFLog, LogLevel::Debug,
    267          ("[this=%p] mp4parse_avif_get_image -> %d; primary_item length: "
    268           "%zu, alpha_item length: %zu",
    269           this, status, image.primary_image.length, image.alpha_image.length));
    270  if (status != MP4PARSE_STATUS_OK) {
    271    return AsVariant(status);
    272  }
    273 
    274  // Ideally has_primary_item and no errors would guarantee primary_image.data
    275  // exists but it doesn't so we check it too.
    276  if (!image.primary_image.data) {
    277    return AsVariant(nsAVIFDecoder::NonDecoderResult::NoSamples);
    278  }
    279 
    280  RefPtr<MediaRawData> colorImage =
    281      new MediaRawData(image.primary_image.data, image.primary_image.length);
    282  RefPtr<MediaRawData> alphaImage = nullptr;
    283 
    284  if (image.alpha_image.length) {
    285    alphaImage =
    286        new MediaRawData(image.alpha_image.data, image.alpha_image.length);
    287  }
    288 
    289  aImage.mFrameNum = 0;
    290  aImage.mDuration = FrameTimeout::Forever();
    291  aImage.mColorImage = colorImage;
    292  aImage.mAlphaImage = alphaImage;
    293  return AsVariant(nsAVIFDecoder::NonDecoderResult::Complete);
    294 }
    295 
    296 AVIFParser::AVIFParser(const Mp4parseIo* aIo) : mIo(aIo) {
    297  MOZ_ASSERT(mIo);
    298  MOZ_LOG(sAVIFLog, LogLevel::Debug,
    299          ("Create AVIFParser=%p, image.avif.compliance_strictness: %d", this,
    300           StaticPrefs::image_avif_compliance_strictness()));
    301 }
    302 
    303 static Mp4parseStatus CreateSampleIterator(
    304    Mp4parseAvifParser* aParser, ByteStream* aBuffer, uint32_t trackID,
    305    UniquePtr<SampleIterator>& aIteratorOut) {
    306  Mp4parseByteData data;
    307  uint64_t timescale;
    308  Mp4parseStatus rv =
    309      mp4parse_avif_get_indice_table(aParser, trackID, &data, &timescale);
    310  if (rv != MP4PARSE_STATUS_OK) {
    311    return rv;
    312  }
    313 
    314  UniquePtr<IndiceWrapper> wrapper = MakeUnique<IndiceWrapper>(data);
    315  RefPtr<MP4SampleIndex> index = new MP4SampleIndex(
    316      *wrapper, aBuffer, trackID, false, AssertedCast<int32_t>(timescale));
    317  aIteratorOut = MakeUnique<SampleIterator>(index);
    318  return MP4PARSE_STATUS_OK;
    319 }
    320 
    321 Mp4parseStatus AVIFParser::Init(ByteStream* aBuffer, bool aAllowSequences,
    322                                bool aAnimateAVIFMajor) {
    323 #define CHECK_MP4PARSE_STATUS(v)     \
    324  do {                               \
    325    if ((v) != MP4PARSE_STATUS_OK) { \
    326      return v;                      \
    327    }                                \
    328  } while (false)
    329 
    330  MOZ_ASSERT(!mParser);
    331 
    332  Mp4parseAvifParser* parser = nullptr;
    333  Mp4parseStatus status =
    334      mp4parse_avif_new(mIo,
    335                        static_cast<enum Mp4parseStrictness>(
    336                            StaticPrefs::image_avif_compliance_strictness()),
    337                        &parser);
    338  MOZ_LOG(sAVIFLog, LogLevel::Debug,
    339          ("[this=%p] mp4parse_avif_new status: %d", this, status));
    340  CHECK_MP4PARSE_STATUS(status);
    341  MOZ_ASSERT(parser);
    342  mParser.reset(parser);
    343 
    344  status = mp4parse_avif_get_info(mParser.get(), &mInfo);
    345  CHECK_MP4PARSE_STATUS(status);
    346 
    347  bool useSequence = mInfo.has_sequence;
    348  if (useSequence) {
    349    if (!aAllowSequences) {
    350      MOZ_LOG(sAVIFLog, LogLevel::Debug,
    351              ("[this=%p] AVIF sequences disabled", this));
    352      useSequence = false;
    353    } else if (!aAnimateAVIFMajor &&
    354               !!memcmp(mInfo.major_brand, "avis", sizeof(mInfo.major_brand))) {
    355      useSequence = false;
    356      MOZ_LOG(sAVIFLog, LogLevel::Debug,
    357              ("[this=%p] AVIF prefers still image", this));
    358    }
    359  }
    360 
    361  if (useSequence) {
    362    status = CreateSampleIterator(parser, aBuffer, mInfo.color_track_id,
    363                                  mColorSampleIter);
    364    CHECK_MP4PARSE_STATUS(status);
    365    MOZ_ASSERT(mColorSampleIter);
    366 
    367    if (mInfo.alpha_track_id) {
    368      status = CreateSampleIterator(parser, aBuffer, mInfo.alpha_track_id,
    369                                    mAlphaSampleIter);
    370      CHECK_MP4PARSE_STATUS(status);
    371      MOZ_ASSERT(mAlphaSampleIter);
    372    }
    373  }
    374 
    375  return status;
    376 }
    377 
    378 bool AVIFParser::IsAnimated() const { return !!mColorSampleIter; }
    379 
    380 // The gfx::YUVColorSpace value is only used in the conversion from YUV -> RGB.
    381 // Typically this comes directly from the CICP matrix_coefficients value, but
    382 // certain values require additionally considering the colour_primaries value.
    383 // See `gfxUtils::CicpToColorSpace` for details. We return a gfx::YUVColorSpace
    384 // rather than CICP::MatrixCoefficients, since that's what
    385 // `gfx::ConvertYCbCrATo[A]RGB` uses. `aBitstreamColorSpaceFunc` abstracts the
    386 // fact that different decoder libraries require different methods for
    387 // extracting the CICP values from the AV1 bitstream and we don't want to do
    388 // that work unnecessarily because in addition to wasted effort, it would make
    389 // the logging more confusing.
    390 template <typename F>
    391 static gfx::YUVColorSpace GetAVIFColorSpace(
    392    const Mp4parseNclxColourInformation* aNclx, F&& aBitstreamColorSpaceFunc) {
    393  return ToMaybe(aNclx)
    394      .map([=](const auto& nclx) {
    395        return gfxUtils::CicpToColorSpace(
    396            static_cast<CICP::MatrixCoefficients>(nclx.matrix_coefficients),
    397            static_cast<CICP::ColourPrimaries>(nclx.colour_primaries),
    398            sAVIFLog);
    399      })
    400      .valueOrFrom(aBitstreamColorSpaceFunc)
    401      .valueOr(gfx::YUVColorSpace::BT601);
    402 }
    403 
    404 static gfx::ColorRange GetAVIFColorRange(
    405    const Mp4parseNclxColourInformation* aNclx,
    406    const gfx::ColorRange av1ColorRange) {
    407  return ToMaybe(aNclx)
    408      .map([=](const auto& nclx) {
    409        return aNclx->full_range_flag ? gfx::ColorRange::FULL
    410                                      : gfx::ColorRange::LIMITED;
    411      })
    412      .valueOr(av1ColorRange);
    413 }
    414 
    415 void AVIFDecodedData::SetCicpValues(
    416    const Mp4parseNclxColourInformation* aNclx,
    417    const gfx::CICP::ColourPrimaries aAv1ColourPrimaries,
    418    const gfx::CICP::TransferCharacteristics aAv1TransferCharacteristics,
    419    const gfx::CICP::MatrixCoefficients aAv1MatrixCoefficients) {
    420  auto cp = CICP::ColourPrimaries::CP_UNSPECIFIED;
    421  auto tc = CICP::TransferCharacteristics::TC_UNSPECIFIED;
    422  auto mc = CICP::MatrixCoefficients::MC_UNSPECIFIED;
    423 
    424  if (aNclx) {
    425    cp = static_cast<CICP::ColourPrimaries>(aNclx->colour_primaries);
    426    tc = static_cast<CICP::TransferCharacteristics>(
    427        aNclx->transfer_characteristics);
    428    mc = static_cast<CICP::MatrixCoefficients>(aNclx->matrix_coefficients);
    429  }
    430 
    431  if (cp == CICP::ColourPrimaries::CP_UNSPECIFIED) {
    432    if (aAv1ColourPrimaries != CICP::ColourPrimaries::CP_UNSPECIFIED) {
    433      cp = aAv1ColourPrimaries;
    434      MOZ_LOG(sAVIFLog, LogLevel::Info,
    435              ("Unspecified colour_primaries value specified in colr box, "
    436               "using AV1 sequence header (%hhu)",
    437               cp));
    438    } else {
    439      cp = CICP::ColourPrimaries::CP_BT709;
    440      MOZ_LOG(sAVIFLog, LogLevel::Warning,
    441              ("Unspecified colour_primaries value specified in colr box "
    442               "or AV1 sequence header, using fallback value (%hhu)",
    443               cp));
    444    }
    445  } else if (cp != aAv1ColourPrimaries) {
    446    MOZ_LOG(sAVIFLog, LogLevel::Warning,
    447            ("colour_primaries mismatch: colr box = %hhu, AV1 "
    448             "sequence header = %hhu, using colr box",
    449             cp, aAv1ColourPrimaries));
    450  }
    451 
    452  if (tc == CICP::TransferCharacteristics::TC_UNSPECIFIED) {
    453    if (aAv1TransferCharacteristics !=
    454        CICP::TransferCharacteristics::TC_UNSPECIFIED) {
    455      tc = aAv1TransferCharacteristics;
    456      MOZ_LOG(sAVIFLog, LogLevel::Info,
    457              ("Unspecified transfer_characteristics value specified in "
    458               "colr box, using AV1 sequence header (%hhu)",
    459               tc));
    460    } else {
    461      tc = CICP::TransferCharacteristics::TC_SRGB;
    462      MOZ_LOG(sAVIFLog, LogLevel::Warning,
    463              ("Unspecified transfer_characteristics value specified in "
    464               "colr box or AV1 sequence header, using fallback value (%hhu)",
    465               tc));
    466    }
    467  } else if (tc != aAv1TransferCharacteristics) {
    468    MOZ_LOG(sAVIFLog, LogLevel::Warning,
    469            ("transfer_characteristics mismatch: colr box = %hhu, "
    470             "AV1 sequence header = %hhu, using colr box",
    471             tc, aAv1TransferCharacteristics));
    472  }
    473 
    474  if (mc == CICP::MatrixCoefficients::MC_UNSPECIFIED) {
    475    if (aAv1MatrixCoefficients != CICP::MatrixCoefficients::MC_UNSPECIFIED) {
    476      mc = aAv1MatrixCoefficients;
    477      MOZ_LOG(sAVIFLog, LogLevel::Info,
    478              ("Unspecified matrix_coefficients value specified in "
    479               "colr box, using AV1 sequence header (%hhu)",
    480               mc));
    481    } else {
    482      mc = CICP::MatrixCoefficients::MC_BT601;
    483      MOZ_LOG(sAVIFLog, LogLevel::Warning,
    484              ("Unspecified matrix_coefficients value specified in "
    485               "colr box or AV1 sequence header, using fallback value (%hhu)",
    486               mc));
    487    }
    488  } else if (mc != aAv1MatrixCoefficients) {
    489    MOZ_LOG(sAVIFLog, LogLevel::Warning,
    490            ("matrix_coefficients mismatch: colr box = %hhu, "
    491             "AV1 sequence header = %hhu, using colr box",
    492             mc, aAv1TransferCharacteristics));
    493  }
    494 
    495  mColourPrimaries = cp;
    496  mTransferCharacteristics = tc;
    497  mMatrixCoefficients = mc;
    498 }
    499 
    500 class Dav1dDecoder final : AVIFDecoderInterface {
    501 public:
    502  ~Dav1dDecoder() {
    503    MOZ_LOG(sAVIFLog, LogLevel::Verbose, ("Destroy Dav1dDecoder=%p", this));
    504 
    505    if (mColorContext) {
    506      dav1d_close(&mColorContext);
    507      MOZ_ASSERT(!mColorContext);
    508    }
    509 
    510    if (mAlphaContext) {
    511      dav1d_close(&mAlphaContext);
    512      MOZ_ASSERT(!mAlphaContext);
    513    }
    514  }
    515 
    516  static DecodeResult Create(UniquePtr<AVIFDecoderInterface>& aDecoder,
    517                             bool aHasAlpha) {
    518    UniquePtr<Dav1dDecoder> d(new Dav1dDecoder());
    519    Dav1dResult r = d->Init(aHasAlpha);
    520    if (r == 0) {
    521      aDecoder.reset(d.release());
    522    }
    523    return AsVariant(r);
    524  }
    525 
    526  DecodeResult Decode(bool aShouldSendTelemetry,
    527                      const Mp4parseAvifInfo& aAVIFInfo,
    528                      const AVIFImage& aSamples) override {
    529    MOZ_ASSERT(mColorContext);
    530    MOZ_ASSERT(!mDecodedData);
    531    MOZ_ASSERT(aSamples.mColorImage);
    532 
    533    MOZ_LOG(sAVIFLog, LogLevel::Verbose, ("[this=%p] Decoding color", this));
    534 
    535    OwnedDav1dPicture colorPic = OwnedDav1dPicture(new Dav1dPicture());
    536    OwnedDav1dPicture alphaPic = nullptr;
    537    Dav1dResult r = GetPicture(*mColorContext, *aSamples.mColorImage,
    538                               colorPic.get(), aShouldSendTelemetry);
    539    if (r != 0) {
    540      return AsVariant(r);
    541    }
    542 
    543    if (aSamples.mAlphaImage) {
    544      MOZ_ASSERT(mAlphaContext);
    545      MOZ_LOG(sAVIFLog, LogLevel::Verbose, ("[this=%p] Decoding alpha", this));
    546 
    547      alphaPic = OwnedDav1dPicture(new Dav1dPicture());
    548      r = GetPicture(*mAlphaContext, *aSamples.mAlphaImage, alphaPic.get(),
    549                     aShouldSendTelemetry);
    550      if (r != 0) {
    551        return AsVariant(r);
    552      }
    553 
    554      // Per § 4 of the AVIF spec
    555      // https://aomediacodec.github.io/av1-avif/#auxiliary-images: An AV1
    556      // Alpha Image Item […] shall be encoded with the same bit depth as the
    557      // associated master AV1 Image Item
    558      if (colorPic->p.bpc != alphaPic->p.bpc) {
    559        return AsVariant(NonDecoderResult::AlphaYColorDepthMismatch);
    560      }
    561 
    562      if (colorPic->stride[0] != alphaPic->stride[0]) {
    563        return AsVariant(NonDecoderResult::AlphaYSizeMismatch);
    564      }
    565    }
    566 
    567    MOZ_ASSERT_IF(!alphaPic, !aAVIFInfo.premultiplied_alpha);
    568    mDecodedData = Dav1dPictureToDecodedData(
    569        aAVIFInfo.nclx_colour_information, std::move(colorPic),
    570        std::move(alphaPic), aAVIFInfo.premultiplied_alpha);
    571 
    572    return AsVariant(r);
    573  }
    574 
    575 private:
    576  explicit Dav1dDecoder() {
    577    MOZ_LOG(sAVIFLog, LogLevel::Verbose, ("Create Dav1dDecoder=%p", this));
    578  }
    579 
    580  Dav1dResult Init(bool aHasAlpha) {
    581    MOZ_ASSERT(!mColorContext);
    582    MOZ_ASSERT(!mAlphaContext);
    583 
    584    Dav1dSettings settings;
    585    dav1d_default_settings(&settings);
    586    settings.all_layers = 0;
    587    settings.max_frame_delay = 1;
    588    // TODO: tune settings a la DAV1DDecoder for AV1 (Bug 1681816)
    589 
    590    Dav1dResult r = dav1d_open(&mColorContext, &settings);
    591    if (r != 0) {
    592      return r;
    593    }
    594    MOZ_ASSERT(mColorContext);
    595 
    596    if (aHasAlpha) {
    597      r = dav1d_open(&mAlphaContext, &settings);
    598      if (r != 0) {
    599        return r;
    600      }
    601      MOZ_ASSERT(mAlphaContext);
    602    }
    603 
    604    return 0;
    605  }
    606 
    607  static Dav1dResult GetPicture(Dav1dContext& aContext,
    608                                const MediaRawData& aBytes,
    609                                Dav1dPicture* aPicture,
    610                                bool aShouldSendTelemetry) {
    611    MOZ_ASSERT(aPicture);
    612 
    613    Dav1dData dav1dData;
    614    Dav1dResult r = dav1d_data_wrap(&dav1dData, aBytes.Data(), aBytes.Size(),
    615                                    Dav1dFreeCallback_s, nullptr);
    616 
    617    MOZ_LOG(
    618        sAVIFLog, r == 0 ? LogLevel::Verbose : LogLevel::Error,
    619        ("dav1d_data_wrap(%p, %zu) -> %d", dav1dData.data, dav1dData.sz, r));
    620 
    621    if (r != 0) {
    622      return r;
    623    }
    624 
    625    r = dav1d_send_data(&aContext, &dav1dData);
    626 
    627    MOZ_LOG(sAVIFLog, r == 0 ? LogLevel::Debug : LogLevel::Error,
    628            ("dav1d_send_data -> %d", r));
    629 
    630    if (r != 0) {
    631      return r;
    632    }
    633 
    634    r = dav1d_get_picture(&aContext, aPicture);
    635 
    636    MOZ_LOG(sAVIFLog, r == 0 ? LogLevel::Debug : LogLevel::Error,
    637            ("dav1d_get_picture -> %d", r));
    638 
    639    // We already have the avif::decode_result metric to record all the
    640    // successful calls, so only bother recording what type of errors we see
    641    // via events. Unlike AOM, dav1d returns an int, not an enum, so this is
    642    // the easiest way to see if we're getting unexpected behavior to
    643    // investigate.
    644    if (aShouldSendTelemetry && r != 0) {
    645      mozilla::glean::avif::Dav1dGetPictureReturnValueExtra extra = {
    646          .value = Some(nsPrintfCString("%d", r)),
    647      };
    648      mozilla::glean::avif::dav1d_get_picture_return_value.Record(Some(extra));
    649    }
    650 
    651    return r;
    652  }
    653 
    654  // A dummy callback for dav1d_data_wrap
    655  static void Dav1dFreeCallback_s(const uint8_t* aBuf, void* aCookie) {
    656    // The buf is managed by the mParser inside Dav1dDecoder itself. Do
    657    // nothing here.
    658  }
    659 
    660  static UniquePtr<AVIFDecodedData> Dav1dPictureToDecodedData(
    661      const Mp4parseNclxColourInformation* aNclx, OwnedDav1dPicture aPicture,
    662      OwnedDav1dPicture aAlphaPlane, bool aPremultipliedAlpha);
    663 
    664  Dav1dContext* mColorContext = nullptr;
    665  Dav1dContext* mAlphaContext = nullptr;
    666 };
    667 
    668 OwnedAOMImage::OwnedAOMImage() {
    669  MOZ_LOG(sAVIFLog, LogLevel::Verbose, ("Create OwnedAOMImage=%p", this));
    670 }
    671 
    672 OwnedAOMImage::~OwnedAOMImage() {
    673  MOZ_LOG(sAVIFLog, LogLevel::Verbose, ("Destroy OwnedAOMImage=%p", this));
    674 }
    675 
    676 bool OwnedAOMImage::CloneFrom(aom_image_t* aImage, bool aIsAlpha) {
    677  MOZ_ASSERT(aImage);
    678  MOZ_ASSERT(!mImage);
    679  MOZ_ASSERT(!mBuffer);
    680 
    681  uint8_t* srcY = aImage->planes[AOM_PLANE_Y];
    682  int yStride = aImage->stride[AOM_PLANE_Y];
    683  int yHeight = aom_img_plane_height(aImage, AOM_PLANE_Y);
    684  size_t yBufSize = yStride * yHeight;
    685 
    686  // If aImage is alpha plane. The data is located in Y channel.
    687  if (aIsAlpha) {
    688    mBuffer = MakeUniqueFallible<uint8_t[]>(yBufSize);
    689    if (!mBuffer) {
    690      return false;
    691    }
    692    uint8_t* destY = mBuffer.get();
    693    memcpy(destY, srcY, yBufSize);
    694    mImage.emplace(*aImage);
    695    mImage->planes[AOM_PLANE_Y] = destY;
    696 
    697    return true;
    698  }
    699 
    700  uint8_t* srcCb = aImage->planes[AOM_PLANE_U];
    701  int cbStride = aImage->stride[AOM_PLANE_U];
    702  int cbHeight = aom_img_plane_height(aImage, AOM_PLANE_U);
    703  size_t cbBufSize = cbStride * cbHeight;
    704 
    705  uint8_t* srcCr = aImage->planes[AOM_PLANE_V];
    706  int crStride = aImage->stride[AOM_PLANE_V];
    707  int crHeight = aom_img_plane_height(aImage, AOM_PLANE_V);
    708  size_t crBufSize = crStride * crHeight;
    709 
    710  mBuffer = MakeUniqueFallible<uint8_t[]>(yBufSize + cbBufSize + crBufSize);
    711  if (!mBuffer) {
    712    return false;
    713  }
    714 
    715  uint8_t* destY = mBuffer.get();
    716  uint8_t* destCb = destY + yBufSize;
    717  uint8_t* destCr = destCb + cbBufSize;
    718 
    719  memcpy(destY, srcY, yBufSize);
    720  memcpy(destCb, srcCb, cbBufSize);
    721  memcpy(destCr, srcCr, crBufSize);
    722 
    723  mImage.emplace(*aImage);
    724  mImage->planes[AOM_PLANE_Y] = destY;
    725  mImage->planes[AOM_PLANE_U] = destCb;
    726  mImage->planes[AOM_PLANE_V] = destCr;
    727 
    728  return true;
    729 }
    730 
    731 /* static */
    732 OwnedAOMImage* OwnedAOMImage::CopyFrom(aom_image_t* aImage, bool aIsAlpha) {
    733  MOZ_ASSERT(aImage);
    734  UniquePtr<OwnedAOMImage> img(new OwnedAOMImage());
    735  if (!img->CloneFrom(aImage, aIsAlpha)) {
    736    return nullptr;
    737  }
    738  return img.release();
    739 }
    740 
    741 class AOMDecoder final : AVIFDecoderInterface {
    742 public:
    743  ~AOMDecoder() {
    744    MOZ_LOG(sAVIFLog, LogLevel::Verbose, ("Destroy AOMDecoder=%p", this));
    745 
    746    if (mColorContext.isSome()) {
    747      aom_codec_err_t r = aom_codec_destroy(mColorContext.ptr());
    748      MOZ_LOG(sAVIFLog, LogLevel::Debug,
    749              ("[this=%p] aom_codec_destroy -> %d", this, r));
    750    }
    751 
    752    if (mAlphaContext.isSome()) {
    753      aom_codec_err_t r = aom_codec_destroy(mAlphaContext.ptr());
    754      MOZ_LOG(sAVIFLog, LogLevel::Debug,
    755              ("[this=%p] aom_codec_destroy -> %d", this, r));
    756    }
    757  }
    758 
    759  static DecodeResult Create(UniquePtr<AVIFDecoderInterface>& aDecoder,
    760                             bool aHasAlpha) {
    761    UniquePtr<AOMDecoder> d(new AOMDecoder());
    762    aom_codec_err_t e = d->Init(aHasAlpha);
    763    if (e == AOM_CODEC_OK) {
    764      aDecoder.reset(d.release());
    765    }
    766    return AsVariant(AOMResult(e));
    767  }
    768 
    769  DecodeResult Decode(bool aShouldSendTelemetry,
    770                      const Mp4parseAvifInfo& aAVIFInfo,
    771                      const AVIFImage& aSamples) override {
    772    MOZ_ASSERT(mColorContext.isSome());
    773    MOZ_ASSERT(!mDecodedData);
    774    MOZ_ASSERT(aSamples.mColorImage);
    775 
    776    aom_image_t* aomImg = nullptr;
    777    DecodeResult r = GetImage(*mColorContext, *aSamples.mColorImage, &aomImg,
    778                              aShouldSendTelemetry);
    779    if (!IsDecodeSuccess(r)) {
    780      return r;
    781    }
    782    MOZ_ASSERT(aomImg);
    783 
    784    // The aomImg will be released in next GetImage call (aom_codec_decode
    785    // actually). The GetImage could be called again immediately if parsedImg
    786    // contains alpha data. Therefore, we need to copy the image and manage it
    787    // by AOMDecoder itself.
    788    OwnedAOMImage* clonedImg = OwnedAOMImage::CopyFrom(aomImg, false);
    789    if (!clonedImg) {
    790      return AsVariant(NonDecoderResult::OutOfMemory);
    791    }
    792    mOwnedImage.reset(clonedImg);
    793 
    794    if (aSamples.mAlphaImage) {
    795      MOZ_ASSERT(mAlphaContext.isSome());
    796 
    797      aom_image_t* alphaImg = nullptr;
    798      r = GetImage(*mAlphaContext, *aSamples.mAlphaImage, &alphaImg,
    799                   aShouldSendTelemetry);
    800      if (!IsDecodeSuccess(r)) {
    801        return r;
    802      }
    803      MOZ_ASSERT(alphaImg);
    804 
    805      OwnedAOMImage* clonedAlphaImg = OwnedAOMImage::CopyFrom(alphaImg, true);
    806      if (!clonedAlphaImg) {
    807        return AsVariant(NonDecoderResult::OutOfMemory);
    808      }
    809      mOwnedAlphaPlane.reset(clonedAlphaImg);
    810 
    811      // Per § 4 of the AVIF spec
    812      // https://aomediacodec.github.io/av1-avif/#auxiliary-images: An AV1
    813      // Alpha Image Item […] shall be encoded with the same bit depth as the
    814      // associated master AV1 Image Item
    815      MOZ_ASSERT(mOwnedImage->GetImage() && mOwnedAlphaPlane->GetImage());
    816      if (mOwnedImage->GetImage()->bit_depth !=
    817          mOwnedAlphaPlane->GetImage()->bit_depth) {
    818        return AsVariant(NonDecoderResult::AlphaYColorDepthMismatch);
    819      }
    820 
    821      if (mOwnedImage->GetImage()->stride[AOM_PLANE_Y] !=
    822          mOwnedAlphaPlane->GetImage()->stride[AOM_PLANE_Y]) {
    823        return AsVariant(NonDecoderResult::AlphaYSizeMismatch);
    824      }
    825    }
    826 
    827    MOZ_ASSERT_IF(!mOwnedAlphaPlane, !aAVIFInfo.premultiplied_alpha);
    828    mDecodedData = AOMImageToToDecodedData(
    829        aAVIFInfo.nclx_colour_information, std::move(mOwnedImage),
    830        std::move(mOwnedAlphaPlane), aAVIFInfo.premultiplied_alpha);
    831 
    832    return r;
    833  }
    834 
    835 private:
    836  explicit AOMDecoder() {
    837    MOZ_LOG(sAVIFLog, LogLevel::Verbose, ("Create AOMDecoder=%p", this));
    838  }
    839 
    840  aom_codec_err_t Init(bool aHasAlpha) {
    841    MOZ_ASSERT(mColorContext.isNothing());
    842    MOZ_ASSERT(mAlphaContext.isNothing());
    843 
    844    aom_codec_iface_t* iface = aom_codec_av1_dx();
    845 
    846    // Init color decoder context
    847    mColorContext.emplace();
    848    aom_codec_err_t r = aom_codec_dec_init(
    849        mColorContext.ptr(), iface, /* cfg = */ nullptr, /* flags = */ 0);
    850 
    851    MOZ_LOG(sAVIFLog, r == AOM_CODEC_OK ? LogLevel::Verbose : LogLevel::Error,
    852            ("[this=%p] color decoder: aom_codec_dec_init -> %d, name = %s",
    853             this, r, mColorContext->name));
    854 
    855    if (r != AOM_CODEC_OK) {
    856      mColorContext.reset();
    857      return r;
    858    }
    859 
    860    if (aHasAlpha) {
    861      // Init alpha decoder context
    862      mAlphaContext.emplace();
    863      r = aom_codec_dec_init(mAlphaContext.ptr(), iface, /* cfg = */ nullptr,
    864                             /* flags = */ 0);
    865 
    866      MOZ_LOG(sAVIFLog, r == AOM_CODEC_OK ? LogLevel::Verbose : LogLevel::Error,
    867              ("[this=%p] color decoder: aom_codec_dec_init -> %d, name = %s",
    868               this, r, mAlphaContext->name));
    869 
    870      if (r != AOM_CODEC_OK) {
    871        mAlphaContext.reset();
    872        return r;
    873      }
    874    }
    875 
    876    return r;
    877  }
    878 
    879  static DecodeResult GetImage(aom_codec_ctx_t& aContext,
    880                               const MediaRawData& aData, aom_image_t** aImage,
    881                               bool aShouldSendTelemetry) {
    882    aom_codec_err_t r =
    883        aom_codec_decode(&aContext, aData.Data(), aData.Size(), nullptr);
    884 
    885    MOZ_LOG(sAVIFLog, r == AOM_CODEC_OK ? LogLevel::Verbose : LogLevel::Error,
    886            ("aom_codec_decode -> %d", r));
    887 
    888    if (aShouldSendTelemetry) {
    889      switch (r) {
    890        case AOM_CODEC_OK:
    891          // No need to record any telemetry for the common case
    892          break;
    893        case AOM_CODEC_ERROR:
    894          mozilla::glean::avif::aom_decode_error
    895              .EnumGet(glean::avif::AomDecodeErrorLabel::eError)
    896              .Add();
    897          break;
    898        case AOM_CODEC_MEM_ERROR:
    899          mozilla::glean::avif::aom_decode_error
    900              .EnumGet(glean::avif::AomDecodeErrorLabel::eMemError)
    901              .Add();
    902          break;
    903        case AOM_CODEC_ABI_MISMATCH:
    904          mozilla::glean::avif::aom_decode_error
    905              .EnumGet(glean::avif::AomDecodeErrorLabel::eAbiMismatch)
    906              .Add();
    907          break;
    908        case AOM_CODEC_INCAPABLE:
    909          mozilla::glean::avif::aom_decode_error
    910              .EnumGet(glean::avif::AomDecodeErrorLabel::eIncapable)
    911              .Add();
    912          break;
    913        case AOM_CODEC_UNSUP_BITSTREAM:
    914          mozilla::glean::avif::aom_decode_error
    915              .EnumGet(glean::avif::AomDecodeErrorLabel::eUnsupBitstream)
    916              .Add();
    917          break;
    918        case AOM_CODEC_UNSUP_FEATURE:
    919          mozilla::glean::avif::aom_decode_error
    920              .EnumGet(glean::avif::AomDecodeErrorLabel::eUnsupFeature)
    921              .Add();
    922          break;
    923        case AOM_CODEC_CORRUPT_FRAME:
    924          mozilla::glean::avif::aom_decode_error
    925              .EnumGet(glean::avif::AomDecodeErrorLabel::eCorruptFrame)
    926              .Add();
    927          break;
    928        case AOM_CODEC_INVALID_PARAM:
    929          mozilla::glean::avif::aom_decode_error
    930              .EnumGet(glean::avif::AomDecodeErrorLabel::eInvalidParam)
    931              .Add();
    932          break;
    933        default:
    934          MOZ_ASSERT_UNREACHABLE(
    935              "Unknown aom_codec_err_t value from aom_codec_decode");
    936      }
    937    }
    938 
    939    if (r != AOM_CODEC_OK) {
    940      return AsVariant(AOMResult(r));
    941    }
    942 
    943    aom_codec_iter_t iter = nullptr;
    944    aom_image_t* img = aom_codec_get_frame(&aContext, &iter);
    945 
    946    MOZ_LOG(sAVIFLog, img == nullptr ? LogLevel::Error : LogLevel::Verbose,
    947            ("aom_codec_get_frame -> %p", img));
    948 
    949    if (img == nullptr) {
    950      return AsVariant(AOMResult(NonAOMCodecError::NoFrame));
    951    }
    952 
    953    const CheckedInt<int> decoded_width = img->d_w;
    954    const CheckedInt<int> decoded_height = img->d_h;
    955 
    956    if (!decoded_height.isValid() || !decoded_width.isValid()) {
    957      MOZ_LOG(sAVIFLog, LogLevel::Debug,
    958              ("image dimensions can't be stored in int: d_w: %u, "
    959               "d_h: %u",
    960               img->d_w, img->d_h));
    961      return AsVariant(AOMResult(NonAOMCodecError::SizeOverflow));
    962    }
    963 
    964    *aImage = img;
    965    return AsVariant(AOMResult(r));
    966  }
    967 
    968  static UniquePtr<AVIFDecodedData> AOMImageToToDecodedData(
    969      const Mp4parseNclxColourInformation* aNclx,
    970      UniquePtr<OwnedAOMImage> aImage, UniquePtr<OwnedAOMImage> aAlphaPlane,
    971      bool aPremultipliedAlpha);
    972 
    973  Maybe<aom_codec_ctx_t> mColorContext;
    974  Maybe<aom_codec_ctx_t> mAlphaContext;
    975  UniquePtr<OwnedAOMImage> mOwnedImage;
    976  UniquePtr<OwnedAOMImage> mOwnedAlphaPlane;
    977 };
    978 
    979 /* static */
    980 UniquePtr<AVIFDecodedData> Dav1dDecoder::Dav1dPictureToDecodedData(
    981    const Mp4parseNclxColourInformation* aNclx, OwnedDav1dPicture aPicture,
    982    OwnedDav1dPicture aAlphaPlane, bool aPremultipliedAlpha) {
    983  MOZ_ASSERT(aPicture);
    984 
    985  static_assert(std::is_same<int, decltype(aPicture->p.w)>::value);
    986  static_assert(std::is_same<int, decltype(aPicture->p.h)>::value);
    987 
    988  UniquePtr<AVIFDecodedData> data = MakeUnique<AVIFDecodedData>();
    989 
    990  data->mRenderSize.emplace(aPicture->frame_hdr->render_width,
    991                            aPicture->frame_hdr->render_height);
    992 
    993  data->mYChannel = static_cast<uint8_t*>(aPicture->data[0]);
    994  data->mYStride = aPicture->stride[0];
    995  data->mYSkip = aPicture->stride[0] - aPicture->p.w;
    996  data->mCbChannel = static_cast<uint8_t*>(aPicture->data[1]);
    997  data->mCrChannel = static_cast<uint8_t*>(aPicture->data[2]);
    998  data->mCbCrStride = aPicture->stride[1];
    999 
   1000  switch (aPicture->p.layout) {
   1001    case DAV1D_PIXEL_LAYOUT_I400:  // Monochrome, so no Cb or Cr channels
   1002      break;
   1003    case DAV1D_PIXEL_LAYOUT_I420:
   1004      data->mChromaSubsampling = ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
   1005      break;
   1006    case DAV1D_PIXEL_LAYOUT_I422:
   1007      data->mChromaSubsampling = ChromaSubsampling::HALF_WIDTH;
   1008      break;
   1009    case DAV1D_PIXEL_LAYOUT_I444:
   1010      break;
   1011    default:
   1012      MOZ_ASSERT_UNREACHABLE("Unknown pixel layout");
   1013  }
   1014 
   1015  data->mCbSkip = aPicture->stride[1] - aPicture->p.w;
   1016  data->mCrSkip = aPicture->stride[1] - aPicture->p.w;
   1017  data->mPictureRect = IntRect(0, 0, aPicture->p.w, aPicture->p.h);
   1018  data->mStereoMode = StereoMode::MONO;
   1019  data->mColorDepth = ColorDepthForBitDepth(aPicture->p.bpc);
   1020 
   1021  MOZ_ASSERT(aPicture->p.bpc == BitDepthForColorDepth(data->mColorDepth));
   1022 
   1023  data->mYUVColorSpace = GetAVIFColorSpace(aNclx, [&]() {
   1024    MOZ_LOG(sAVIFLog, LogLevel::Info,
   1025            ("YUVColorSpace cannot be determined from colr box, using AV1 "
   1026             "sequence header"));
   1027    return DAV1DDecoder::GetColorSpace(*aPicture, sAVIFLog);
   1028  });
   1029 
   1030  auto av1ColourPrimaries = CICP::ColourPrimaries::CP_UNSPECIFIED;
   1031  auto av1TransferCharacteristics =
   1032      CICP::TransferCharacteristics::TC_UNSPECIFIED;
   1033  auto av1MatrixCoefficients = CICP::MatrixCoefficients::MC_UNSPECIFIED;
   1034 
   1035  MOZ_ASSERT(aPicture->seq_hdr);
   1036  auto& seq_hdr = *aPicture->seq_hdr;
   1037 
   1038  MOZ_LOG(sAVIFLog, LogLevel::Debug,
   1039          ("seq_hdr.color_description_present: %d",
   1040           seq_hdr.color_description_present));
   1041  if (seq_hdr.color_description_present) {
   1042    av1ColourPrimaries = static_cast<CICP::ColourPrimaries>(seq_hdr.pri);
   1043    av1TransferCharacteristics =
   1044        static_cast<CICP::TransferCharacteristics>(seq_hdr.trc);
   1045    av1MatrixCoefficients = static_cast<CICP::MatrixCoefficients>(seq_hdr.mtrx);
   1046  }
   1047 
   1048  data->SetCicpValues(aNclx, av1ColourPrimaries, av1TransferCharacteristics,
   1049                      av1MatrixCoefficients);
   1050 
   1051  gfx::ColorRange av1ColorRange =
   1052      seq_hdr.color_range ? gfx::ColorRange::FULL : gfx::ColorRange::LIMITED;
   1053  data->mColorRange = GetAVIFColorRange(aNclx, av1ColorRange);
   1054 
   1055  auto colorPrimaries =
   1056      gfxUtils::CicpToColorPrimaries(data->mColourPrimaries, sAVIFLog);
   1057  if (colorPrimaries.isSome()) {
   1058    data->mColorPrimaries = *colorPrimaries;
   1059  }
   1060 
   1061  if (aAlphaPlane) {
   1062    MOZ_ASSERT(aAlphaPlane->stride[0] == data->mYStride);
   1063    data->mAlpha.emplace();
   1064    data->mAlpha->mChannel = static_cast<uint8_t*>(aAlphaPlane->data[0]);
   1065    data->mAlpha->mSize = gfx::IntSize(aAlphaPlane->p.w, aAlphaPlane->p.h);
   1066    data->mAlpha->mPremultiplied = aPremultipliedAlpha;
   1067  }
   1068 
   1069  data->mColorDav1d = std::move(aPicture);
   1070  data->mAlphaDav1d = std::move(aAlphaPlane);
   1071 
   1072  return data;
   1073 }
   1074 
   1075 /* static */
   1076 UniquePtr<AVIFDecodedData> AOMDecoder::AOMImageToToDecodedData(
   1077    const Mp4parseNclxColourInformation* aNclx, UniquePtr<OwnedAOMImage> aImage,
   1078    UniquePtr<OwnedAOMImage> aAlphaPlane, bool aPremultipliedAlpha) {
   1079  aom_image_t* colorImage = aImage->GetImage();
   1080  aom_image_t* alphaImage = aAlphaPlane ? aAlphaPlane->GetImage() : nullptr;
   1081 
   1082  MOZ_ASSERT(colorImage);
   1083  MOZ_ASSERT(colorImage->stride[AOM_PLANE_Y] >=
   1084             aom_img_plane_width(colorImage, AOM_PLANE_Y));
   1085  MOZ_ASSERT(colorImage->stride[AOM_PLANE_U] ==
   1086             colorImage->stride[AOM_PLANE_V]);
   1087  MOZ_ASSERT(colorImage->stride[AOM_PLANE_U] >=
   1088             aom_img_plane_width(colorImage, AOM_PLANE_U));
   1089  MOZ_ASSERT(colorImage->stride[AOM_PLANE_V] >=
   1090             aom_img_plane_width(colorImage, AOM_PLANE_V));
   1091  MOZ_ASSERT(aom_img_plane_width(colorImage, AOM_PLANE_U) ==
   1092             aom_img_plane_width(colorImage, AOM_PLANE_V));
   1093  MOZ_ASSERT(aom_img_plane_height(colorImage, AOM_PLANE_U) ==
   1094             aom_img_plane_height(colorImage, AOM_PLANE_V));
   1095 
   1096  UniquePtr<AVIFDecodedData> data = MakeUnique<AVIFDecodedData>();
   1097 
   1098  data->mRenderSize.emplace(colorImage->r_w, colorImage->r_h);
   1099 
   1100  data->mYChannel = colorImage->planes[AOM_PLANE_Y];
   1101  data->mYStride = colorImage->stride[AOM_PLANE_Y];
   1102  data->mYSkip = colorImage->stride[AOM_PLANE_Y] -
   1103                 aom_img_plane_width(colorImage, AOM_PLANE_Y);
   1104  data->mCbChannel = colorImage->planes[AOM_PLANE_U];
   1105  data->mCrChannel = colorImage->planes[AOM_PLANE_V];
   1106  data->mCbCrStride = colorImage->stride[AOM_PLANE_U];
   1107  data->mCbSkip = colorImage->stride[AOM_PLANE_U] -
   1108                  aom_img_plane_width(colorImage, AOM_PLANE_U);
   1109  data->mCrSkip = colorImage->stride[AOM_PLANE_V] -
   1110                  aom_img_plane_width(colorImage, AOM_PLANE_V);
   1111  data->mPictureRect = gfx::IntRect(0, 0, colorImage->d_w, colorImage->d_h);
   1112  data->mStereoMode = StereoMode::MONO;
   1113  data->mColorDepth = ColorDepthForBitDepth(colorImage->bit_depth);
   1114 
   1115  if (colorImage->x_chroma_shift == 1 && colorImage->y_chroma_shift == 1) {
   1116    data->mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
   1117  } else if (colorImage->x_chroma_shift == 1 &&
   1118             colorImage->y_chroma_shift == 0) {
   1119    data->mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH;
   1120  } else if (colorImage->x_chroma_shift != 0 ||
   1121             colorImage->y_chroma_shift != 0) {
   1122    MOZ_ASSERT_UNREACHABLE("unexpected chroma shifts");
   1123  }
   1124 
   1125  MOZ_ASSERT(colorImage->bit_depth == BitDepthForColorDepth(data->mColorDepth));
   1126 
   1127  auto av1ColourPrimaries = static_cast<CICP::ColourPrimaries>(colorImage->cp);
   1128  auto av1TransferCharacteristics =
   1129      static_cast<CICP::TransferCharacteristics>(colorImage->tc);
   1130  auto av1MatrixCoefficients =
   1131      static_cast<CICP::MatrixCoefficients>(colorImage->mc);
   1132 
   1133  data->mYUVColorSpace = GetAVIFColorSpace(aNclx, [=]() {
   1134    MOZ_LOG(sAVIFLog, LogLevel::Info,
   1135            ("YUVColorSpace cannot be determined from colr box, using AV1 "
   1136             "sequence header"));
   1137    return gfxUtils::CicpToColorSpace(av1MatrixCoefficients, av1ColourPrimaries,
   1138                                      sAVIFLog);
   1139  });
   1140 
   1141  gfx::ColorRange av1ColorRange;
   1142  if (colorImage->range == AOM_CR_STUDIO_RANGE) {
   1143    av1ColorRange = gfx::ColorRange::LIMITED;
   1144  } else {
   1145    MOZ_ASSERT(colorImage->range == AOM_CR_FULL_RANGE);
   1146    av1ColorRange = gfx::ColorRange::FULL;
   1147  }
   1148  data->mColorRange = GetAVIFColorRange(aNclx, av1ColorRange);
   1149 
   1150  data->SetCicpValues(aNclx, av1ColourPrimaries, av1TransferCharacteristics,
   1151                      av1MatrixCoefficients);
   1152 
   1153  auto colorPrimaries =
   1154      gfxUtils::CicpToColorPrimaries(data->mColourPrimaries, sAVIFLog);
   1155  if (colorPrimaries.isSome()) {
   1156    data->mColorPrimaries = *colorPrimaries;
   1157  }
   1158 
   1159  if (alphaImage) {
   1160    MOZ_ASSERT(alphaImage->stride[AOM_PLANE_Y] == data->mYStride);
   1161    data->mAlpha.emplace();
   1162    data->mAlpha->mChannel = alphaImage->planes[AOM_PLANE_Y];
   1163    data->mAlpha->mSize = gfx::IntSize(alphaImage->d_w, alphaImage->d_h);
   1164    data->mAlpha->mPremultiplied = aPremultipliedAlpha;
   1165  }
   1166 
   1167  data->mColorAOM = std::move(aImage);
   1168  data->mAlphaAOM = std::move(aAlphaPlane);
   1169 
   1170  return data;
   1171 }
   1172 
   1173 // Wrapper to allow rust to call our read adaptor.
   1174 intptr_t nsAVIFDecoder::ReadSource(uint8_t* aDestBuf, uintptr_t aDestBufSize,
   1175                                   void* aUserData) {
   1176  MOZ_ASSERT(aDestBuf);
   1177  MOZ_ASSERT(aUserData);
   1178 
   1179  MOZ_LOG(sAVIFLog, LogLevel::Verbose,
   1180          ("AVIF ReadSource, aDestBufSize: %zu", aDestBufSize));
   1181 
   1182  auto* decoder = reinterpret_cast<nsAVIFDecoder*>(aUserData);
   1183 
   1184  MOZ_ASSERT(decoder->mReadCursor);
   1185 
   1186  size_t bufferLength = decoder->mBufferedData.end() - decoder->mReadCursor;
   1187  size_t n_bytes = std::min(aDestBufSize, bufferLength);
   1188 
   1189  MOZ_LOG(
   1190      sAVIFLog, LogLevel::Verbose,
   1191      ("AVIF ReadSource, %zu bytes ready, copying %zu", bufferLength, n_bytes));
   1192 
   1193  memcpy(aDestBuf, decoder->mReadCursor, n_bytes);
   1194  decoder->mReadCursor += n_bytes;
   1195 
   1196  return n_bytes;
   1197 }
   1198 
   1199 nsAVIFDecoder::nsAVIFDecoder(RasterImage* aImage) : Decoder(aImage) {
   1200  MOZ_LOG(sAVIFLog, LogLevel::Debug,
   1201          ("[this=%p] nsAVIFDecoder::nsAVIFDecoder", this));
   1202 }
   1203 
   1204 nsAVIFDecoder::~nsAVIFDecoder() {
   1205  MOZ_LOG(sAVIFLog, LogLevel::Debug,
   1206          ("[this=%p] nsAVIFDecoder::~nsAVIFDecoder", this));
   1207 }
   1208 
   1209 LexerResult nsAVIFDecoder::DoDecode(SourceBufferIterator& aIterator,
   1210                                    IResumable* aOnResume) {
   1211  MOZ_LOG(sAVIFLog, LogLevel::Info,
   1212          ("[this=%p] nsAVIFDecoder::DoDecode start", this));
   1213 
   1214  DecodeResult result = DoDecodeInternal(aIterator, aOnResume);
   1215 
   1216  RecordDecodeResultTelemetry(result);
   1217 
   1218  if (result.is<NonDecoderResult>()) {
   1219    NonDecoderResult r = result.as<NonDecoderResult>();
   1220    if (r == NonDecoderResult::NeedMoreData) {
   1221      return LexerResult(Yield::NEED_MORE_DATA);
   1222    }
   1223    if (r == NonDecoderResult::OutputAvailable) {
   1224      MOZ_ASSERT(HasSize());
   1225      return LexerResult(Yield::OUTPUT_AVAILABLE);
   1226    }
   1227    if (r == NonDecoderResult::Complete) {
   1228      MOZ_ASSERT(HasSize());
   1229      return LexerResult(TerminalState::SUCCESS);
   1230    }
   1231    return LexerResult(TerminalState::FAILURE);
   1232  }
   1233 
   1234  MOZ_ASSERT(result.is<Dav1dResult>() || result.is<AOMResult>() ||
   1235             result.is<Mp4parseStatus>());
   1236  // If IsMetadataDecode(), a successful parse should return
   1237  // NonDecoderResult::MetadataOk or else continue to the decode stage
   1238  MOZ_ASSERT_IF(result.is<Mp4parseStatus>(),
   1239                result.as<Mp4parseStatus>() != MP4PARSE_STATUS_OK);
   1240  auto rv = LexerResult(IsDecodeSuccess(result) ? TerminalState::SUCCESS
   1241                                                : TerminalState::FAILURE);
   1242  MOZ_LOG(sAVIFLog, LogLevel::Info,
   1243          ("[this=%p] nsAVIFDecoder::DoDecode end", this));
   1244  return rv;
   1245 }
   1246 
   1247 Mp4parseStatus nsAVIFDecoder::CreateParser() {
   1248  if (!mParser) {
   1249    Mp4parseIo io = {nsAVIFDecoder::ReadSource, this};
   1250    mBufferStream = new AVIFDecoderStream(&mBufferedData);
   1251 
   1252    Mp4parseStatus status = AVIFParser::Create(
   1253        &io, mBufferStream.get(), mParser,
   1254        bool(GetDecoderFlags() & DecoderFlags::AVIF_SEQUENCES_ENABLED),
   1255        bool(GetDecoderFlags() & DecoderFlags::AVIF_ANIMATE_AVIF_MAJOR));
   1256 
   1257    if (status != MP4PARSE_STATUS_OK) {
   1258      return status;
   1259    }
   1260 
   1261    const Mp4parseAvifInfo& info = mParser->GetInfo();
   1262    mIsAnimated = mParser->IsAnimated();
   1263    mHasAlpha = mIsAnimated ? !!info.alpha_track_id : info.has_alpha_item;
   1264  }
   1265 
   1266  return MP4PARSE_STATUS_OK;
   1267 }
   1268 
   1269 nsAVIFDecoder::DecodeResult nsAVIFDecoder::CreateDecoder() {
   1270  if (!mDecoder) {
   1271    DecodeResult r = StaticPrefs::image_avif_use_dav1d()
   1272                         ? Dav1dDecoder::Create(mDecoder, mHasAlpha)
   1273                         : AOMDecoder::Create(mDecoder, mHasAlpha);
   1274 
   1275    MOZ_LOG(sAVIFLog, LogLevel::Debug,
   1276            ("[this=%p] Create %sDecoder %ssuccessfully", this,
   1277             StaticPrefs::image_avif_use_dav1d() ? "Dav1d" : "AOM",
   1278             IsDecodeSuccess(r) ? "" : "un"));
   1279 
   1280    return r;
   1281  }
   1282 
   1283  return StaticPrefs::image_avif_use_dav1d()
   1284             ? DecodeResult(Dav1dResult(0))
   1285             : DecodeResult(AOMResult(AOM_CODEC_OK));
   1286 }
   1287 
   1288 // Records all telemetry available in the AVIF metadata, called only once during
   1289 // the metadata decode to avoid multiple counts.
   1290 static void RecordMetadataTelem(const Mp4parseAvifInfo& aInfo) {
   1291  if (aInfo.pixel_aspect_ratio) {
   1292    const uint32_t& h_spacing = aInfo.pixel_aspect_ratio->h_spacing;
   1293    const uint32_t& v_spacing = aInfo.pixel_aspect_ratio->v_spacing;
   1294 
   1295    if (h_spacing == 0 || v_spacing == 0) {
   1296      mozilla::glean::avif::pasp
   1297          .EnumGet(mozilla::glean::avif::PaspLabel::eInvalid)
   1298          .Add();
   1299    } else if (h_spacing == v_spacing) {
   1300      mozilla::glean::avif::pasp
   1301          .EnumGet(mozilla::glean::avif::PaspLabel::eSquare)
   1302          .Add();
   1303    } else {
   1304      mozilla::glean::avif::pasp
   1305          .EnumGet(mozilla::glean::avif::PaspLabel::eNonsquare)
   1306          .Add();
   1307    }
   1308  } else {
   1309    mozilla::glean::avif::pasp.EnumGet(mozilla::glean::avif::PaspLabel::eAbsent)
   1310        .Add();
   1311  }
   1312 
   1313  const auto& major_brand = aInfo.major_brand;
   1314  if (!memcmp(major_brand, "avif", sizeof(major_brand))) {
   1315    glean::avif::major_brand.EnumGet(glean::avif::MajorBrandLabel::eAvif).Add();
   1316  } else if (!memcmp(major_brand, "avis", sizeof(major_brand))) {
   1317    glean::avif::major_brand.EnumGet(glean::avif::MajorBrandLabel::eAvis).Add();
   1318  } else {
   1319    glean::avif::major_brand.EnumGet(glean::avif::MajorBrandLabel::eOther)
   1320        .Add();
   1321  }
   1322 
   1323  glean::avif::sequence
   1324      .EnumGet(aInfo.has_sequence ? glean::avif::SequenceLabel::ePresent
   1325                                  : glean::avif::SequenceLabel::eAbsent)
   1326      .Add();
   1327 
   1328 #define FEATURE_RECORD_GLEAN(metric, metricLabel, fourcc)        \
   1329  mozilla::glean::avif::metric                                   \
   1330      .EnumGet(aInfo.unsupported_features_bitfield &             \
   1331                       (1 << MP4PARSE_FEATURE_##fourcc)          \
   1332                   ? mozilla::glean::avif::metricLabel::ePresent \
   1333                   : mozilla::glean::avif::metricLabel::eAbsent) \
   1334      .Add()
   1335  FEATURE_RECORD_GLEAN(a1lx, A1lxLabel, A1LX);
   1336  FEATURE_RECORD_GLEAN(a1op, A1opLabel, A1OP);
   1337  FEATURE_RECORD_GLEAN(clap, ClapLabel, CLAP);
   1338  FEATURE_RECORD_GLEAN(grid, GridLabel, GRID);
   1339  FEATURE_RECORD_GLEAN(ipro, IproLabel, IPRO);
   1340  FEATURE_RECORD_GLEAN(lsel, LselLabel, LSEL);
   1341 
   1342  if (aInfo.nclx_colour_information && aInfo.icc_colour_information.data) {
   1343    mozilla::glean::avif::colr.EnumGet(mozilla::glean::avif::ColrLabel::eBoth)
   1344        .Add();
   1345  } else if (aInfo.nclx_colour_information) {
   1346    mozilla::glean::avif::colr.EnumGet(mozilla::glean::avif::ColrLabel::eNclx)
   1347        .Add();
   1348  } else if (aInfo.icc_colour_information.data) {
   1349    mozilla::glean::avif::colr.EnumGet(mozilla::glean::avif::ColrLabel::eIcc)
   1350        .Add();
   1351  } else {
   1352    mozilla::glean::avif::colr.EnumGet(mozilla::glean::avif::ColrLabel::eAbsent)
   1353        .Add();
   1354  }
   1355 }
   1356 
   1357 static void RecordPixiTelemetry(uint8_t aPixiBitDepth,
   1358                                uint8_t aBitstreamBitDepth,
   1359                                const char* aItemName) {
   1360  if (aPixiBitDepth == 0) {
   1361    mozilla::glean::avif::pixi.EnumGet(mozilla::glean::avif::PixiLabel::eAbsent)
   1362        .Add();
   1363 
   1364  } else if (aPixiBitDepth == aBitstreamBitDepth) {
   1365    mozilla::glean::avif::pixi.EnumGet(mozilla::glean::avif::PixiLabel::eValid)
   1366        .Add();
   1367 
   1368  } else {
   1369    MOZ_LOG(sAVIFLog, LogLevel::Error,
   1370            ("%s item pixi bit depth (%hhu) doesn't match "
   1371             "bitstream (%hhu)",
   1372             aItemName, aPixiBitDepth, aBitstreamBitDepth));
   1373    mozilla::glean::avif::pixi
   1374        .EnumGet(mozilla::glean::avif::PixiLabel::eBitstreamMismatch)
   1375        .Add();
   1376  }
   1377 }
   1378 
   1379 // This telemetry depends on the results of decoding.
   1380 // These data must be recorded only on the first frame decoded after metadata
   1381 // decode finishes.
   1382 static void RecordFrameTelem(bool aAnimated, const Mp4parseAvifInfo& aInfo,
   1383                             const AVIFDecodedData& aData) {
   1384  mozilla::glean::avif::yuv_color_space
   1385      .EnumGet(static_cast<mozilla::glean::avif::YuvColorSpaceLabel>(
   1386          aData.mYUVColorSpace))
   1387      .Add();
   1388  mozilla::glean::avif::bit_depth
   1389      .EnumGet(
   1390          static_cast<mozilla::glean::avif::BitDepthLabel>(aData.mColorDepth))
   1391      .Add();
   1392 
   1393  RecordPixiTelemetry(
   1394      aAnimated ? aInfo.color_track_bit_depth : aInfo.primary_item_bit_depth,
   1395      BitDepthForColorDepth(aData.mColorDepth), "color");
   1396 
   1397  if (aData.mAlpha) {
   1398    mozilla::glean::avif::alpha
   1399        .EnumGet(mozilla::glean::avif::AlphaLabel::ePresent)
   1400        .Add();
   1401    RecordPixiTelemetry(
   1402        aAnimated ? aInfo.alpha_track_bit_depth : aInfo.alpha_item_bit_depth,
   1403        BitDepthForColorDepth(aData.mColorDepth), "alpha");
   1404  } else {
   1405    mozilla::glean::avif::alpha
   1406        .EnumGet(mozilla::glean::avif::AlphaLabel::eAbsent)
   1407        .Add();
   1408  }
   1409 
   1410  if (CICP::IsReserved(aData.mColourPrimaries)) {
   1411    mozilla::glean::avif::cicp_cp
   1412        .EnumGet(mozilla::glean::avif::CicpCpLabel::eReservedRest)
   1413        .Add();
   1414  } else {
   1415    mozilla::glean::avif::cicp_cp.EnumGet(
   1416        static_cast<mozilla::glean::avif::CicpCpLabel>(aData.mColourPrimaries));
   1417  }
   1418 
   1419  if (CICP::IsReserved(aData.mTransferCharacteristics)) {
   1420    mozilla::glean::avif::cicp_tc
   1421        .EnumGet(mozilla::glean::avif::CicpTcLabel::eReserved)
   1422        .Add();
   1423  } else {
   1424    mozilla::glean::avif::cicp_tc.EnumGet(
   1425        static_cast<mozilla::glean::avif::CicpTcLabel>(
   1426            aData.mTransferCharacteristics));
   1427  }
   1428 
   1429  if (CICP::IsReserved(aData.mMatrixCoefficients)) {
   1430    mozilla::glean::avif::cicp_mc
   1431        .EnumGet(mozilla::glean::avif::CicpMcLabel::eReserved)
   1432        .Add();
   1433  } else {
   1434    mozilla::glean::avif::cicp_mc.EnumGet(
   1435        static_cast<mozilla::glean::avif::CicpMcLabel>(
   1436            aData.mMatrixCoefficients));
   1437  }
   1438 }
   1439 
   1440 nsAVIFDecoder::DecodeResult nsAVIFDecoder::DoDecodeInternal(
   1441    SourceBufferIterator& aIterator, IResumable* aOnResume) {
   1442  MOZ_LOG(sAVIFLog, LogLevel::Debug,
   1443          ("[this=%p] nsAVIFDecoder::DoDecodeInternal", this));
   1444 
   1445  // Since the SourceBufferIterator doesn't guarantee a contiguous buffer,
   1446  // but the current mp4parse-rust implementation requires it, always buffer
   1447  // locally. This keeps the code simpler at the cost of some performance, but
   1448  // this implementation is only experimental, so we don't want to spend time
   1449  // optimizing it prematurely.
   1450  while (!mReadCursor) {
   1451    SourceBufferIterator::State state =
   1452        aIterator.AdvanceOrScheduleResume(SIZE_MAX, aOnResume);
   1453 
   1454    MOZ_LOG(sAVIFLog, LogLevel::Debug,
   1455            ("[this=%p] After advance, iterator state is %d", this, state));
   1456 
   1457    switch (state) {
   1458      case SourceBufferIterator::WAITING:
   1459        return AsVariant(NonDecoderResult::NeedMoreData);
   1460 
   1461      case SourceBufferIterator::COMPLETE:
   1462        mReadCursor = mBufferedData.begin();
   1463        break;
   1464 
   1465      case SourceBufferIterator::READY: {  // copy new data to buffer
   1466        MOZ_LOG(sAVIFLog, LogLevel::Debug,
   1467                ("[this=%p] SourceBufferIterator ready, %zu bytes available",
   1468                 this, aIterator.Length()));
   1469 
   1470        bool appendSuccess =
   1471            mBufferedData.append(aIterator.Data(), aIterator.Length());
   1472 
   1473        if (!appendSuccess) {
   1474          MOZ_LOG(sAVIFLog, LogLevel::Error,
   1475                  ("[this=%p] Failed to append %zu bytes to buffer", this,
   1476                   aIterator.Length()));
   1477        }
   1478 
   1479        break;
   1480      }
   1481 
   1482      default:
   1483        MOZ_ASSERT_UNREACHABLE("unexpected SourceBufferIterator state");
   1484    }
   1485  }
   1486 
   1487  Mp4parseStatus parserStatus = CreateParser();
   1488 
   1489  if (parserStatus != MP4PARSE_STATUS_OK) {
   1490    return AsVariant(parserStatus);
   1491  }
   1492 
   1493  const Mp4parseAvifInfo& parsedInfo = mParser->GetInfo();
   1494 
   1495  if (parsedInfo.icc_colour_information.data) {
   1496    const auto& icc = parsedInfo.icc_colour_information;
   1497    MOZ_LOG(
   1498        sAVIFLog, LogLevel::Debug,
   1499        ("[this=%p] colr type ICC: %zu bytes %p", this, icc.length, icc.data));
   1500  }
   1501 
   1502  if (IsMetadataDecode()) {
   1503    RecordMetadataTelem(parsedInfo);
   1504  }
   1505 
   1506  if (parsedInfo.nclx_colour_information) {
   1507    const auto& nclx = *parsedInfo.nclx_colour_information;
   1508    MOZ_LOG(
   1509        sAVIFLog, LogLevel::Debug,
   1510        ("[this=%p] colr type CICP: cp/tc/mc/full-range %u/%u/%u/%s", this,
   1511         nclx.colour_primaries, nclx.transfer_characteristics,
   1512         nclx.matrix_coefficients, nclx.full_range_flag ? "true" : "false"));
   1513  }
   1514 
   1515  if (!parsedInfo.icc_colour_information.data &&
   1516      !parsedInfo.nclx_colour_information) {
   1517    MOZ_LOG(sAVIFLog, LogLevel::Debug,
   1518            ("[this=%p] colr box not present", this));
   1519  }
   1520 
   1521  AVIFImage parsedImage;
   1522  DecodeResult r = mParser->GetImage(parsedImage);
   1523  if (!IsDecodeSuccess(r)) {
   1524    return r;
   1525  }
   1526  bool isDone =
   1527      !IsMetadataDecode() && r == DecodeResult(NonDecoderResult::Complete);
   1528 
   1529  if (mIsAnimated) {
   1530    PostIsAnimated(parsedImage.mDuration);
   1531 
   1532    switch (mParser->GetInfo().loop_mode) {
   1533      case MP4PARSE_AVIF_LOOP_MODE_LOOP_BY_COUNT: {
   1534        auto loopCount = mParser->GetInfo().loop_count;
   1535        PostLoopCount(loopCount > INT32_MAX ? -1
   1536                                            : static_cast<int32_t>(loopCount));
   1537        break;
   1538      }
   1539      case MP4PARSE_AVIF_LOOP_MODE_LOOP_INFINITELY:
   1540      case MP4PARSE_AVIF_LOOP_MODE_NO_EDITS:
   1541      default:
   1542        PostLoopCount(-1);
   1543        break;
   1544    }
   1545  }
   1546  if (mHasAlpha) {
   1547    PostHasTransparency();
   1548  }
   1549 
   1550  Orientation orientation = StaticPrefs::image_avif_apply_transforms()
   1551                                ? GetImageOrientation(parsedInfo)
   1552                                : Orientation{};
   1553  // TODO: Orientation should probably also apply to animated AVIFs.
   1554  if (mIsAnimated) {
   1555    orientation = Orientation{};
   1556  }
   1557 
   1558  Maybe<IntSize> ispeImageSize = GetImageSize(parsedInfo);
   1559 
   1560  bool sendDecodeTelemetry = IsMetadataDecode();
   1561  if (ispeImageSize.isSome()) {
   1562    MOZ_LOG(sAVIFLog, LogLevel::Debug,
   1563            ("[this=%p] Parser returned image size %d x %d (%d/%d bit)", this,
   1564             ispeImageSize->width, ispeImageSize->height,
   1565             mIsAnimated ? parsedInfo.color_track_bit_depth
   1566                         : parsedInfo.primary_item_bit_depth,
   1567             mIsAnimated ? parsedInfo.alpha_track_bit_depth
   1568                         : parsedInfo.alpha_item_bit_depth));
   1569    PostSize(ispeImageSize->width, ispeImageSize->height, orientation);
   1570    if (WantsFrameCount()) {
   1571      // Note that this consumes the frame iterators, so this can only be
   1572      // requested for metadata decodes. Since we had to partially decode the
   1573      // first frame to determine the size, we need to add one to the result.
   1574      PostFrameCount(mParser->GetFrameCount() + 1);
   1575    }
   1576    if (IsMetadataDecode()) {
   1577      MOZ_LOG(
   1578          sAVIFLog, LogLevel::Debug,
   1579          ("[this=%p] Finishing metadata decode without image decode", this));
   1580      return AsVariant(NonDecoderResult::Complete);
   1581    }
   1582    // If we're continuing to decode here, this means we skipped decode
   1583    // telemetry for the metadata decode pass. Send it this time.
   1584    sendDecodeTelemetry = true;
   1585  } else {
   1586    MOZ_LOG(sAVIFLog, LogLevel::Error,
   1587            ("[this=%p] Parser returned no image size, decoding...", this));
   1588  }
   1589 
   1590  r = CreateDecoder();
   1591  if (!IsDecodeSuccess(r)) {
   1592    return r;
   1593  }
   1594  MOZ_ASSERT(mDecoder);
   1595  r = mDecoder->Decode(sendDecodeTelemetry, parsedInfo, parsedImage);
   1596  MOZ_LOG(sAVIFLog, LogLevel::Debug,
   1597          ("[this=%p] Decoder%s->Decode() %s", this,
   1598           StaticPrefs::image_avif_use_dav1d() ? "Dav1d" : "AOM",
   1599           IsDecodeSuccess(r) ? "succeeds" : "fails"));
   1600 
   1601  if (!IsDecodeSuccess(r)) {
   1602    return r;
   1603  }
   1604 
   1605  UniquePtr<AVIFDecodedData> decodedData = mDecoder->GetDecodedData();
   1606 
   1607  MOZ_ASSERT_IF(mHasAlpha, decodedData->mAlpha.isSome());
   1608 
   1609  MOZ_ASSERT(decodedData->mColourPrimaries !=
   1610             CICP::ColourPrimaries::CP_UNSPECIFIED);
   1611  MOZ_ASSERT(decodedData->mTransferCharacteristics !=
   1612             CICP::TransferCharacteristics::TC_UNSPECIFIED);
   1613  MOZ_ASSERT(decodedData->mColorRange <= gfx::ColorRange::_Last);
   1614  MOZ_ASSERT(decodedData->mYUVColorSpace <= gfx::YUVColorSpace::_Last);
   1615 
   1616  MOZ_LOG(sAVIFLog, LogLevel::Debug,
   1617          ("[this=%p] decodedData.mColorRange: %hhd", this,
   1618           static_cast<uint8_t>(decodedData->mColorRange)));
   1619 
   1620  // Technically it's valid but we don't handle it now (Bug 1682318).
   1621  if (decodedData->mAlpha &&
   1622      decodedData->mAlpha->mSize != decodedData->YDataSize()) {
   1623    return AsVariant(NonDecoderResult::AlphaYSizeMismatch);
   1624  }
   1625 
   1626  bool isFirstFrame = GetFrameCount() == 0;
   1627 
   1628  if (!HasSize()) {
   1629    MOZ_ASSERT(isFirstFrame);
   1630    MOZ_LOG(
   1631        sAVIFLog, LogLevel::Error,
   1632        ("[this=%p] Using decoded image size: %d x %d", this,
   1633         decodedData->mPictureRect.width, decodedData->mPictureRect.height));
   1634    PostSize(decodedData->mPictureRect.width, decodedData->mPictureRect.height,
   1635             orientation);
   1636    if (WantsFrameCount()) {
   1637      // Note that this consumes the frame iterators, so this can only be
   1638      // requested for metadata decodes. Since we had to partially decode the
   1639      // first frame to determine the size, we need to add one to the result.
   1640      PostFrameCount(mParser->GetFrameCount() + 1);
   1641    }
   1642    mozilla::glean::avif::ispe.EnumGet(mozilla::glean::avif::IspeLabel::eAbsent)
   1643        .Add();
   1644  } else {
   1645    // Verify that the bitstream hasn't changed the image size compared to
   1646    // either the ispe box or the previous frames.
   1647    IntSize expectedSize = GetImageMetadata()
   1648                               .GetOrientation()
   1649                               .ToUnoriented(Size())
   1650                               .ToUnknownSize();
   1651    if (decodedData->mPictureRect.width != expectedSize.width ||
   1652        decodedData->mPictureRect.height != expectedSize.height) {
   1653      if (isFirstFrame) {
   1654        MOZ_LOG(
   1655            sAVIFLog, LogLevel::Error,
   1656            ("[this=%p] Metadata image size doesn't match decoded image size: "
   1657             "(%d x %d) != (%d x %d)",
   1658             this, ispeImageSize->width, ispeImageSize->height,
   1659             decodedData->mPictureRect.width,
   1660             decodedData->mPictureRect.height));
   1661        mozilla::glean::avif::ispe
   1662            .EnumGet(mozilla::glean::avif::IspeLabel::eBitstreamMismatch)
   1663            .Add();
   1664 
   1665        return AsVariant(NonDecoderResult::MetadataImageSizeMismatch);
   1666      }
   1667 
   1668      MOZ_LOG(
   1669          sAVIFLog, LogLevel::Error,
   1670          ("[this=%p] Frame size has changed in the bitstream: "
   1671           "(%d x %d) != (%d x %d)",
   1672           this, expectedSize.width, expectedSize.height,
   1673           decodedData->mPictureRect.width, decodedData->mPictureRect.height));
   1674      return AsVariant(NonDecoderResult::FrameSizeChanged);
   1675    }
   1676 
   1677    if (isFirstFrame) {
   1678      mozilla::glean::avif::ispe
   1679          .EnumGet(mozilla::glean::avif::IspeLabel::eValid)
   1680          .Add();
   1681    }
   1682  }
   1683 
   1684  if (IsMetadataDecode()) {
   1685    return AsVariant(NonDecoderResult::Complete);
   1686  }
   1687 
   1688  IntSize rgbSize = decodedData->mPictureRect.Size();
   1689 
   1690  if (parsedImage.mFrameNum == 0) {
   1691    RecordFrameTelem(mIsAnimated, parsedInfo, *decodedData);
   1692  }
   1693 
   1694  if (decodedData->mRenderSize &&
   1695      decodedData->mRenderSize->ToUnknownSize() != rgbSize) {
   1696    // This may be supported by allowing all metadata decodes to decode a frame
   1697    // and get the render size from the bitstream. However it's unlikely to be
   1698    // used often.
   1699    return AsVariant(NonDecoderResult::RenderSizeMismatch);
   1700  }
   1701 
   1702  // Read color profile
   1703  if (mCMSMode != CMSMode::Off) {
   1704    MOZ_LOG(sAVIFLog, LogLevel::Debug,
   1705            ("[this=%p] Processing color profile", this));
   1706 
   1707    // See comment on AVIFDecodedData
   1708    if (parsedInfo.icc_colour_information.data) {
   1709      // same profile for every frame of image, only create it once
   1710      if (!mInProfile) {
   1711        const auto& icc = parsedInfo.icc_colour_information;
   1712        mInProfile = qcms_profile_from_memory(icc.data, icc.length);
   1713      }
   1714    } else {
   1715      // potentially different profile every frame, destroy the old one
   1716      if (mInProfile) {
   1717        if (mTransform) {
   1718          qcms_transform_release(mTransform);
   1719          mTransform = nullptr;
   1720        }
   1721        qcms_profile_release(mInProfile);
   1722        mInProfile = nullptr;
   1723      }
   1724 
   1725      const auto& cp = decodedData->mColourPrimaries;
   1726      const auto& tc = decodedData->mTransferCharacteristics;
   1727 
   1728      if (CICP::IsReserved(cp)) {
   1729        MOZ_LOG(sAVIFLog, LogLevel::Error,
   1730                ("[this=%p] colour_primaries reserved value (%hhu) is invalid; "
   1731                 "failing",
   1732                 this, cp));
   1733        return AsVariant(NonDecoderResult::InvalidCICP);
   1734      }
   1735 
   1736      if (CICP::IsReserved(tc)) {
   1737        MOZ_LOG(sAVIFLog, LogLevel::Error,
   1738                ("[this=%p] transfer_characteristics reserved value (%hhu) is "
   1739                 "invalid; failing",
   1740                 this, tc));
   1741        return AsVariant(NonDecoderResult::InvalidCICP);
   1742      }
   1743 
   1744      MOZ_ASSERT(cp != CICP::ColourPrimaries::CP_UNSPECIFIED &&
   1745                 !CICP::IsReserved(cp));
   1746      MOZ_ASSERT(tc != CICP::TransferCharacteristics::TC_UNSPECIFIED &&
   1747                 !CICP::IsReserved(tc));
   1748 
   1749      mInProfile =
   1750          qcms_profile_create_cicp(cp, ChooseTransferCharacteristics(tc));
   1751    }
   1752 
   1753    MOZ_LOG(sAVIFLog, LogLevel::Debug,
   1754            ("[this=%p] mInProfile %p", this, mInProfile));
   1755  } else {
   1756    MOZ_LOG(sAVIFLog, LogLevel::Debug,
   1757            ("[this=%p] CMSMode::Off, skipping color profile", this));
   1758  }
   1759 
   1760  if (mInProfile && GetCMSOutputProfile() && !mTransform) {
   1761    auto intent = static_cast<qcms_intent>(gfxPlatform::GetRenderingIntent());
   1762    qcms_data_type inType;
   1763    qcms_data_type outType;
   1764 
   1765    // If we're not mandating an intent, use the one from the image.
   1766    if (gfxPlatform::GetRenderingIntent() == -1) {
   1767      intent = qcms_profile_get_rendering_intent(mInProfile);
   1768    }
   1769 
   1770    uint32_t profileSpace = qcms_profile_get_color_space(mInProfile);
   1771    if (profileSpace != icSigGrayData) {
   1772      mUsePipeTransform = true;
   1773      // When we convert the data to rgb we always pass either B8G8R8A8 or
   1774      // B8G8R8X8 to ConvertYCbCrToRGB32. After that we input the data to the
   1775      // surface pipe where qcms happens in the pipeline. So when the data gets
   1776      // to qcms it will always be in our preferred format and so
   1777      // gfxPlatform::GetCMSOSRGBAType is the correct type.
   1778      inType = gfxPlatform::GetCMSOSRGBAType();
   1779      outType = inType;
   1780    } else {
   1781      // We can't use SurfacePipe to do the color management (it can't handle
   1782      // grayscale data), we have to do it ourselves on the grayscale data
   1783      // before passing the now RGB data to SurfacePipe.
   1784      mUsePipeTransform = false;
   1785      if (mHasAlpha) {
   1786        inType = QCMS_DATA_GRAYA_8;
   1787        outType = gfxPlatform::GetCMSOSRGBAType();
   1788      } else {
   1789        inType = QCMS_DATA_GRAY_8;
   1790        outType = gfxPlatform::GetCMSOSRGBAType();
   1791      }
   1792    }
   1793 
   1794    mTransform = qcms_transform_create(mInProfile, inType,
   1795                                       GetCMSOutputProfile(), outType, intent);
   1796  }
   1797 
   1798  // Get suggested format and size. Note that GetYCbCrToRGBDestFormatAndSize
   1799  // force format to be B8G8R8X8 if it's not.
   1800  gfx::SurfaceFormat format = SurfaceFormat::OS_RGBX;
   1801  gfx::GetYCbCrToRGBDestFormatAndSize(*decodedData, format, rgbSize);
   1802  if (mHasAlpha) {
   1803    // We would use libyuv to do the YCbCrA -> ARGB convertion, which only
   1804    // works for B8G8R8A8.
   1805    format = SurfaceFormat::B8G8R8A8;
   1806  }
   1807 
   1808  const int bytesPerPixel = BytesPerPixel(format);
   1809 
   1810  const CheckedInt rgbStride = CheckedInt<int>(rgbSize.width) * bytesPerPixel;
   1811  const CheckedInt rgbBufLength = rgbStride * rgbSize.height;
   1812 
   1813  if (!rgbStride.isValid() || !rgbBufLength.isValid()) {
   1814    MOZ_LOG(sAVIFLog, LogLevel::Debug,
   1815            ("[this=%p] overflow calculating rgbBufLength: rbgSize.width: %d, "
   1816             "rgbSize.height: %d, "
   1817             "bytesPerPixel: %u",
   1818             this, rgbSize.width, rgbSize.height, bytesPerPixel));
   1819    return AsVariant(NonDecoderResult::SizeOverflow);
   1820  }
   1821 
   1822  UniquePtr<uint8_t[]> rgbBuf =
   1823      MakeUniqueFallible<uint8_t[]>(rgbBufLength.value());
   1824  if (!rgbBuf) {
   1825    MOZ_LOG(sAVIFLog, LogLevel::Debug,
   1826            ("[this=%p] allocation of %u-byte rgbBuf failed", this,
   1827             rgbBufLength.value()));
   1828    return AsVariant(NonDecoderResult::OutOfMemory);
   1829  }
   1830 
   1831  PremultFunc premultOp = nullptr;
   1832  const auto wantPremultiply =
   1833      !bool(GetSurfaceFlags() & SurfaceFlags::NO_PREMULTIPLY_ALPHA);
   1834  if (decodedData->mAlpha) {
   1835    const bool& hasPremultiply = decodedData->mAlpha->mPremultiplied;
   1836    if (mTransform) {
   1837      // Color management needs to be done on non-premult data, so
   1838      // ConvertYCbCrToRGB32 needs to produce non-premult data, then color
   1839      // management can happen (either here for grayscale data, or in surface
   1840      // pipe otherwise) and then later in the surface pipe we will convert to
   1841      // premult if needed.
   1842      if (hasPremultiply) {
   1843        premultOp = libyuv::ARGBUnattenuate;
   1844      }
   1845    } else {
   1846      // no color management, so premult conversion (if needed) can be done by
   1847      // ConvertYCbCrToRGB32 before surface pipe
   1848      if (wantPremultiply && !hasPremultiply) {
   1849        premultOp = libyuv::ARGBAttenuate;
   1850      } else if (!wantPremultiply && hasPremultiply) {
   1851        premultOp = libyuv::ARGBUnattenuate;
   1852      }
   1853    }
   1854  }
   1855 
   1856  MOZ_LOG(sAVIFLog, LogLevel::Debug,
   1857          ("[this=%p] calling gfx::ConvertYCbCrToRGB32 premultOp: %p", this,
   1858           premultOp));
   1859  nsresult result = gfx::ConvertYCbCrToRGB32(*decodedData, format, rgbBuf.get(),
   1860                                             rgbStride.value(), premultOp);
   1861  if (!NS_SUCCEEDED(result)) {
   1862    MOZ_LOG(sAVIFLog, LogLevel::Debug,
   1863            ("[this=%p] ConvertYCbCrToRGB32 failure", this));
   1864    return AsVariant(NonDecoderResult::ConvertYCbCrFailure);
   1865  }
   1866 
   1867  MOZ_LOG(sAVIFLog, LogLevel::Debug,
   1868          ("[this=%p] calling SurfacePipeFactory::CreateSurfacePipe", this));
   1869 
   1870  SurfacePipeFlags pipeFlags = SurfacePipeFlags();
   1871  if (decodedData->mAlpha && mTransform) {
   1872    // we know data is non-premult in this case, see above, so if we
   1873    // wantPremultiply then we have to ask the surface pipe to convert for us
   1874    if (wantPremultiply) {
   1875      pipeFlags |= SurfacePipeFlags::PREMULTIPLY_ALPHA;
   1876    }
   1877  }
   1878 
   1879  Maybe<SurfacePipe> pipe = Nothing();
   1880  auto* transform = mUsePipeTransform ? mTransform : nullptr;
   1881 
   1882  if (mIsAnimated) {
   1883    SurfaceFormat outFormat =
   1884        decodedData->mAlpha ? SurfaceFormat::OS_RGBA : SurfaceFormat::OS_RGBX;
   1885    Maybe<AnimationParams> animParams;
   1886    if (!IsFirstFrameDecode()) {
   1887      animParams.emplace(FullFrame().ToUnknownRect(), parsedImage.mDuration,
   1888                         parsedImage.mFrameNum, BlendMethod::SOURCE,
   1889                         DisposalMethod::CLEAR_ALL);
   1890    }
   1891    pipe = SurfacePipeFactory::CreateSurfacePipe(
   1892        this, Size(), OutputSize(), FullFrame(), format, outFormat, animParams,
   1893        transform, pipeFlags);
   1894  } else {
   1895    pipe = SurfacePipeFactory::CreateReorientSurfacePipe(
   1896        this, Size(), OutputSize(), format, format, transform, GetOrientation(),
   1897        pipeFlags);
   1898  }
   1899 
   1900  if (pipe.isNothing()) {
   1901    MOZ_LOG(sAVIFLog, LogLevel::Debug,
   1902            ("[this=%p] could not initialize surface pipe", this));
   1903    return AsVariant(NonDecoderResult::PipeInitError);
   1904  }
   1905 
   1906  MOZ_LOG(sAVIFLog, LogLevel::Debug, ("[this=%p] writing to surface", this));
   1907  const uint8_t* endOfRgbBuf = {rgbBuf.get() + rgbBufLength.value()};
   1908  WriteState writeBufferResult = WriteState::NEED_MORE_DATA;
   1909  uint8_t* grayLine = nullptr;
   1910  int32_t multiplier = 1;
   1911  if (mTransform && !mUsePipeTransform) {
   1912    if (mHasAlpha) {
   1913      multiplier = 2;
   1914    }
   1915    // We know this calculation doesn't overflow because rgbStride is a larger
   1916    // value and is valid here.
   1917    grayLine = new uint8_t[multiplier * rgbSize.width];
   1918  }
   1919  for (uint8_t* rowPtr = rgbBuf.get(); rowPtr < endOfRgbBuf;
   1920       rowPtr += rgbStride.value()) {
   1921    if (mTransform && !mUsePipeTransform) {
   1922      // format is B8G8R8A8 or B8G8R8X8, so 1 offset picks G
   1923      for (int32_t i = 0; i < rgbSize.width; i++) {
   1924        grayLine[multiplier * i] = rowPtr[i * bytesPerPixel + 1];
   1925        if (mHasAlpha) {
   1926          grayLine[multiplier * i + 1] = rowPtr[i * bytesPerPixel + 3];
   1927        }
   1928      }
   1929      qcms_transform_data(mTransform, grayLine, rowPtr, rgbSize.width);
   1930    }
   1931 
   1932    writeBufferResult = pipe->WriteBuffer(reinterpret_cast<uint32_t*>(rowPtr));
   1933 
   1934    Maybe<SurfaceInvalidRect> invalidRect = pipe->TakeInvalidRect();
   1935    if (invalidRect) {
   1936      PostInvalidation(invalidRect->mInputSpaceRect,
   1937                       Some(invalidRect->mOutputSpaceRect));
   1938    }
   1939 
   1940    if (writeBufferResult == WriteState::FAILURE) {
   1941      MOZ_LOG(sAVIFLog, LogLevel::Debug,
   1942              ("[this=%p] error writing rowPtr to surface pipe", this));
   1943 
   1944    } else if (writeBufferResult == WriteState::FINISHED) {
   1945      MOZ_ASSERT(rowPtr + rgbStride.value() == endOfRgbBuf);
   1946    }
   1947  }
   1948  if (mTransform && !mUsePipeTransform) {
   1949    delete[] grayLine;
   1950  }
   1951 
   1952  MOZ_LOG(sAVIFLog, LogLevel::Debug,
   1953          ("[this=%p] writing to surface complete", this));
   1954 
   1955  if (writeBufferResult == WriteState::FINISHED) {
   1956    PostFrameStop(mHasAlpha ? Opacity::SOME_TRANSPARENCY
   1957                            : Opacity::FULLY_OPAQUE);
   1958 
   1959    if (!mIsAnimated || IsFirstFrameDecode()) {
   1960      PostDecodeDone();
   1961      return DecodeResult(NonDecoderResult::Complete);
   1962    }
   1963 
   1964    if (isDone) {
   1965      PostDecodeDone();
   1966      return DecodeResult(NonDecoderResult::Complete);
   1967    }
   1968 
   1969    return DecodeResult(NonDecoderResult::OutputAvailable);
   1970  }
   1971 
   1972  return AsVariant(NonDecoderResult::WriteBufferError);
   1973 }
   1974 
   1975 /* static */
   1976 bool nsAVIFDecoder::IsDecodeSuccess(const DecodeResult& aResult) {
   1977  return aResult == DecodeResult(NonDecoderResult::OutputAvailable) ||
   1978         aResult == DecodeResult(NonDecoderResult::Complete) ||
   1979         aResult == DecodeResult(Dav1dResult(0)) ||
   1980         aResult == DecodeResult(AOMResult(AOM_CODEC_OK));
   1981 }
   1982 
   1983 void nsAVIFDecoder::RecordDecodeResultTelemetry(
   1984    const nsAVIFDecoder::DecodeResult& aResult) {
   1985  if (aResult.is<Mp4parseStatus>()) {
   1986    switch (aResult.as<Mp4parseStatus>()) {
   1987      case MP4PARSE_STATUS_OK:
   1988        MOZ_ASSERT_UNREACHABLE(
   1989            "Expect NonDecoderResult, Dav1dResult or AOMResult");
   1990        return;
   1991      case MP4PARSE_STATUS_BAD_ARG:
   1992      case MP4PARSE_STATUS_INVALID:
   1993      case MP4PARSE_STATUS_UNSUPPORTED:
   1994      case MP4PARSE_STATUS_EOF:
   1995      case MP4PARSE_STATUS_IO:
   1996        mozilla::glean::avif::decode_result
   1997            .EnumGet(glean::avif::DecodeResultLabel::eParseError)
   1998            .Add();
   1999        return;
   2000      case MP4PARSE_STATUS_OOM:
   2001        mozilla::glean::avif::decode_result
   2002            .EnumGet(glean::avif::DecodeResultLabel::eOutOfMemory)
   2003            .Add();
   2004        return;
   2005      case MP4PARSE_STATUS_MISSING_AVIF_OR_AVIS_BRAND:
   2006        mozilla::glean::avif::decode_result
   2007            .EnumGet(glean::avif::DecodeResultLabel::eMissingBrand)
   2008            .Add();
   2009        return;
   2010      case MP4PARSE_STATUS_FTYP_NOT_FIRST:
   2011        mozilla::glean::avif::decode_result
   2012            .EnumGet(glean::avif::DecodeResultLabel::eFtypNotFirst)
   2013            .Add();
   2014        return;
   2015      case MP4PARSE_STATUS_NO_IMAGE:
   2016        mozilla::glean::avif::decode_result
   2017            .EnumGet(glean::avif::DecodeResultLabel::eNoImage)
   2018            .Add();
   2019        return;
   2020      case MP4PARSE_STATUS_MOOV_BAD_QUANTITY:
   2021        mozilla::glean::avif::decode_result
   2022            .EnumGet(glean::avif::DecodeResultLabel::eMultipleMoov)
   2023            .Add();
   2024        return;
   2025      case MP4PARSE_STATUS_MOOV_MISSING:
   2026        mozilla::glean::avif::decode_result
   2027            .EnumGet(glean::avif::DecodeResultLabel::eNoMoov)
   2028            .Add();
   2029        return;
   2030      case MP4PARSE_STATUS_LSEL_NO_ESSENTIAL:
   2031        mozilla::glean::avif::decode_result
   2032            .EnumGet(glean::avif::DecodeResultLabel::eLselNoEssential)
   2033            .Add();
   2034        return;
   2035      case MP4PARSE_STATUS_A1OP_NO_ESSENTIAL:
   2036        mozilla::glean::avif::decode_result
   2037            .EnumGet(glean::avif::DecodeResultLabel::eA1opNoEssential)
   2038            .Add();
   2039        return;
   2040      case MP4PARSE_STATUS_A1LX_ESSENTIAL:
   2041        mozilla::glean::avif::decode_result
   2042            .EnumGet(glean::avif::DecodeResultLabel::eA1lxEssential)
   2043            .Add();
   2044        return;
   2045      case MP4PARSE_STATUS_TXFORM_NO_ESSENTIAL:
   2046        mozilla::glean::avif::decode_result
   2047            .EnumGet(glean::avif::DecodeResultLabel::eTxformNoEssential)
   2048            .Add();
   2049        return;
   2050      case MP4PARSE_STATUS_PITM_MISSING:
   2051        mozilla::glean::avif::decode_result
   2052            .EnumGet(glean::avif::DecodeResultLabel::eNoPrimaryItem)
   2053            .Add();
   2054        return;
   2055      case MP4PARSE_STATUS_IMAGE_ITEM_TYPE:
   2056        mozilla::glean::avif::decode_result
   2057            .EnumGet(glean::avif::DecodeResultLabel::eImageItemType)
   2058            .Add();
   2059        return;
   2060      case MP4PARSE_STATUS_ITEM_TYPE_MISSING:
   2061        mozilla::glean::avif::decode_result
   2062            .EnumGet(glean::avif::DecodeResultLabel::eItemTypeMissing)
   2063            .Add();
   2064        return;
   2065      case MP4PARSE_STATUS_CONSTRUCTION_METHOD:
   2066        mozilla::glean::avif::decode_result
   2067            .EnumGet(glean::avif::DecodeResultLabel::eConstructionMethod)
   2068            .Add();
   2069        return;
   2070      case MP4PARSE_STATUS_PITM_NOT_FOUND:
   2071        mozilla::glean::avif::decode_result
   2072            .EnumGet(glean::avif::DecodeResultLabel::eItemLocNotFound)
   2073            .Add();
   2074        return;
   2075      case MP4PARSE_STATUS_IDAT_MISSING:
   2076        mozilla::glean::avif::decode_result
   2077            .EnumGet(glean::avif::DecodeResultLabel::eNoItemDataBox)
   2078            .Add();
   2079        return;
   2080      default:
   2081        mozilla::glean::avif::decode_result
   2082            .EnumGet(glean::avif::DecodeResultLabel::eUncategorized)
   2083            .Add();
   2084        return;
   2085    }
   2086 
   2087    MOZ_LOG(sAVIFLog, LogLevel::Error,
   2088            ("[this=%p] unexpected Mp4parseStatus value: %d", this,
   2089             aResult.as<Mp4parseStatus>()));
   2090    MOZ_ASSERT(false, "unexpected Mp4parseStatus value");
   2091    mozilla::glean::avif::decode_result
   2092        .EnumGet(glean::avif::DecodeResultLabel::eInvalidParseStatus)
   2093        .Add();
   2094 
   2095  } else if (aResult.is<NonDecoderResult>()) {
   2096    switch (aResult.as<NonDecoderResult>()) {
   2097      case NonDecoderResult::NeedMoreData:
   2098        return;
   2099      case NonDecoderResult::OutputAvailable:
   2100        return;
   2101      case NonDecoderResult::Complete:
   2102        return;
   2103      case NonDecoderResult::SizeOverflow:
   2104        mozilla::glean::avif::decode_result
   2105            .EnumGet(glean::avif::DecodeResultLabel::eSizeOverflow)
   2106            .Add();
   2107        return;
   2108      case NonDecoderResult::OutOfMemory:
   2109        mozilla::glean::avif::decode_result
   2110            .EnumGet(glean::avif::DecodeResultLabel::eOutOfMemory)
   2111            .Add();
   2112        return;
   2113      case NonDecoderResult::PipeInitError:
   2114        mozilla::glean::avif::decode_result
   2115            .EnumGet(glean::avif::DecodeResultLabel::ePipeInitError)
   2116            .Add();
   2117        return;
   2118      case NonDecoderResult::WriteBufferError:
   2119        mozilla::glean::avif::decode_result
   2120            .EnumGet(glean::avif::DecodeResultLabel::eWriteBufferError)
   2121            .Add();
   2122        return;
   2123      case NonDecoderResult::AlphaYSizeMismatch:
   2124 
   2125        mozilla::glean::avif::decode_result
   2126            .EnumGet(glean::avif::DecodeResultLabel::eAlphaYSzMismatch)
   2127            .Add();
   2128        return;
   2129      case NonDecoderResult::AlphaYColorDepthMismatch:
   2130        mozilla::glean::avif::decode_result
   2131            .EnumGet(glean::avif::DecodeResultLabel::eAlphaYBpcMismatch)
   2132            .Add();
   2133        return;
   2134      case NonDecoderResult::MetadataImageSizeMismatch:
   2135        mozilla::glean::avif::decode_result
   2136            .EnumGet(glean::avif::DecodeResultLabel::eIspeMismatch)
   2137            .Add();
   2138        return;
   2139      case NonDecoderResult::RenderSizeMismatch:
   2140        mozilla::glean::avif::decode_result
   2141            .EnumGet(glean::avif::DecodeResultLabel::eRenderSizeMismatch)
   2142            .Add();
   2143        return;
   2144      case NonDecoderResult::FrameSizeChanged:
   2145        mozilla::glean::avif::decode_result
   2146            .EnumGet(glean::avif::DecodeResultLabel::eFrameSizeChanged)
   2147            .Add();
   2148        return;
   2149      case NonDecoderResult::InvalidCICP:
   2150        mozilla::glean::avif::decode_result
   2151            .EnumGet(glean::avif::DecodeResultLabel::eInvalidCicp)
   2152            .Add();
   2153        return;
   2154      case NonDecoderResult::NoSamples:
   2155        mozilla::glean::avif::decode_result
   2156            .EnumGet(glean::avif::DecodeResultLabel::eNoSamples)
   2157            .Add();
   2158        return;
   2159      case NonDecoderResult::ConvertYCbCrFailure:
   2160        mozilla::glean::avif::decode_result
   2161            .EnumGet(glean::avif::DecodeResultLabel::eConvertycbcrFailure)
   2162            .Add();
   2163        return;
   2164    }
   2165    MOZ_ASSERT_UNREACHABLE("unknown NonDecoderResult");
   2166  } else {
   2167    MOZ_ASSERT(aResult.is<Dav1dResult>() || aResult.is<AOMResult>());
   2168    if (aResult.is<Dav1dResult>()) {
   2169      mozilla::glean::avif::decoder.EnumGet(glean::avif::DecoderLabel::eDav1d)
   2170          .Add();
   2171    } else {
   2172      mozilla::glean::avif::decoder.EnumGet(glean::avif::DecoderLabel::eAom)
   2173          .Add();
   2174    }
   2175 
   2176    if (IsDecodeSuccess(aResult)) {
   2177      mozilla::glean::avif::decode_result
   2178          .EnumGet(glean::avif::DecodeResultLabel::eSuccess)
   2179          .Add();
   2180    } else {
   2181      mozilla::glean::avif::decode_result
   2182          .EnumGet(glean::avif::DecodeResultLabel::eDecodeError)
   2183          .Add();
   2184    }
   2185  }
   2186 }
   2187 
   2188 Maybe<glean::impl::MemoryDistributionMetric> nsAVIFDecoder::SpeedMetric()
   2189    const {
   2190  return Some(glean::image_decode::speed_avif);
   2191 }
   2192 
   2193 }  // namespace image
   2194 }  // namespace mozilla