tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

AOMDecoder.cpp (38063B)


      1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
      2 /* vim:set ts=2 sw=2 sts=2 et cindent: */
      3 /* This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #include "AOMDecoder.h"
      8 
      9 #include <aom/aom_image.h>
     10 #include <aom/aomdx.h>
     11 #include <stdint.h>
     12 
     13 #include <algorithm>
     14 
     15 #include "BitReader.h"
     16 #include "BitWriter.h"
     17 #include "ImageContainer.h"
     18 #include "MediaResult.h"
     19 #include "TimeUnits.h"
     20 #include "VideoUtils.h"
     21 #include "gfx2DGlue.h"
     22 #include "gfxUtils.h"
     23 #include "mozilla/PodOperations.h"
     24 #include "mozilla/SyncRunnable.h"
     25 #include "mozilla/TaskQueue.h"
     26 #include "nsError.h"
     27 #include "nsThreadUtils.h"
     28 #include "prsystem.h"
     29 
     30 #undef LOG
     31 #define LOG(arg, ...)                                                  \
     32  DDMOZ_LOG(sPDMLog, mozilla::LogLevel::Debug, "::%s: " arg, __func__, \
     33            ##__VA_ARGS__)
     34 #define LOG_RESULT(code, message, ...)                                        \
     35  DDMOZ_LOG(sPDMLog, mozilla::LogLevel::Debug, "::%s: %s (code %d) " message, \
     36            __func__, aom_codec_err_to_string(code), (int)code, ##__VA_ARGS__)
     37 #define LOGEX_RESULT(_this, code, message, ...)         \
     38  DDMOZ_LOGEX(_this, sPDMLog, mozilla::LogLevel::Debug, \
     39              "::%s: %s (code %d) " message, __func__,  \
     40              aom_codec_err_to_string(code), (int)code, ##__VA_ARGS__)
     41 #define LOG_STATIC_RESULT(code, message, ...)                 \
     42  MOZ_LOG(sPDMLog, mozilla::LogLevel::Debug,                  \
     43          ("AOMDecoder::%s: %s (code %d) " message, __func__, \
     44           aom_codec_err_to_string(code), (int)code, ##__VA_ARGS__))
     45 
     46 #define ASSERT_BYTE_ALIGNED(bitIO) MOZ_ASSERT((bitIO).BitCount() % 8 == 0)
     47 
     48 namespace mozilla {
     49 
     50 using namespace gfx;
     51 using namespace layers;
     52 using gfx::CICP::ColourPrimaries;
     53 using gfx::CICP::MatrixCoefficients;
     54 using gfx::CICP::TransferCharacteristics;
     55 
     56 static MediaResult InitContext(AOMDecoder& aAOMDecoder, aom_codec_ctx_t* aCtx,
     57                               const VideoInfo& aInfo) {
     58  aom_codec_iface_t* dx = aom_codec_av1_dx();
     59  if (!dx) {
     60    return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
     61                       RESULT_DETAIL("Couldn't get AV1 decoder interface."));
     62  }
     63 
     64  size_t decode_threads = 2;
     65  if (aInfo.mDisplay.width >= 2048) {
     66    decode_threads = 8;
     67  } else if (aInfo.mDisplay.width >= 1024) {
     68    decode_threads = 4;
     69  }
     70  decode_threads = std::min(decode_threads, GetNumberOfProcessors());
     71 
     72  aom_codec_dec_cfg_t config;
     73  PodZero(&config);
     74  config.threads = static_cast<unsigned int>(decode_threads);
     75  config.w = config.h = 0;  // set after decode
     76  config.allow_lowbitdepth = true;
     77 
     78  aom_codec_flags_t flags = 0;
     79 
     80  auto res = aom_codec_dec_init(aCtx, dx, &config, flags);
     81  if (res != AOM_CODEC_OK) {
     82    LOGEX_RESULT(&aAOMDecoder, res, "Codec initialization failed, res=%d",
     83                 int(res));
     84    return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
     85                       RESULT_DETAIL("AOM error initializing AV1 decoder: %s",
     86                                     aom_codec_err_to_string(res)));
     87  }
     88  return NS_OK;
     89 }
     90 
     91 AOMDecoder::AOMDecoder(const CreateDecoderParams& aParams)
     92    : mImageContainer(aParams.mImageContainer),
     93      mTaskQueue(TaskQueue::Create(
     94          GetMediaThreadPool(MediaThreadType::PLATFORM_DECODER), "AOMDecoder")),
     95      mInfo(aParams.VideoConfig()),
     96      mTrackingId(aParams.mTrackingId) {
     97  PodZero(&mCodec);
     98 }
     99 
    100 AOMDecoder::~AOMDecoder() = default;
    101 
    102 RefPtr<ShutdownPromise> AOMDecoder::Shutdown() {
    103  RefPtr<AOMDecoder> self = this;
    104  return InvokeAsync(mTaskQueue, __func__, [self]() {
    105    AUTO_PROFILER_LABEL("AOMDecoder::Shutdown", MEDIA_PLAYBACK);
    106    auto res = aom_codec_destroy(&self->mCodec);
    107    if (res != AOM_CODEC_OK) {
    108      LOGEX_RESULT(self.get(), res, "aom_codec_destroy");
    109    }
    110    return self->mTaskQueue->BeginShutdown();
    111  });
    112 }
    113 
    114 RefPtr<MediaDataDecoder::InitPromise> AOMDecoder::Init() {
    115  AUTO_PROFILER_LABEL("AOMDecoder::Init", MEDIA_PLAYBACK);
    116  MediaResult rv = InitContext(*this, &mCodec, mInfo);
    117  if (NS_FAILED(rv)) {
    118    return AOMDecoder::InitPromise::CreateAndReject(rv, __func__);
    119  }
    120  return AOMDecoder::InitPromise::CreateAndResolve(TrackInfo::kVideoTrack,
    121                                                   __func__);
    122 }
    123 
    124 RefPtr<MediaDataDecoder::FlushPromise> AOMDecoder::Flush() {
    125  return InvokeAsync(mTaskQueue, __func__, [this, self = RefPtr(this)]() {
    126    AUTO_PROFILER_LABEL("AOMDecoder::Flush", MEDIA_PLAYBACK);
    127    mPerformanceRecorder.Record(std::numeric_limits<int64_t>::max());
    128    return FlushPromise::CreateAndResolve(true, __func__);
    129  });
    130 }
    131 
    132 // UniquePtr dtor wrapper for aom_image_t.
    133 struct AomImageFree {
    134  void operator()(aom_image_t* img) { aom_img_free(img); }
    135 };
    136 
    137 RefPtr<MediaDataDecoder::DecodePromise> AOMDecoder::ProcessDecode(
    138    MediaRawData* aSample) {
    139  AUTO_PROFILER_LABEL("AOMDecoder::ProcessDecode", MEDIA_PLAYBACK);
    140  MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn());
    141 
    142 #if defined(DEBUG)
    143  NS_ASSERTION(
    144      IsKeyframe(*aSample) == aSample->mKeyframe,
    145      "AOM Decode Keyframe error sample->mKeyframe and si.si_kf out of sync");
    146 #endif
    147 
    148  MediaInfoFlag flag = MediaInfoFlag::None;
    149  flag |= (aSample->mKeyframe ? MediaInfoFlag::KeyFrame
    150                              : MediaInfoFlag::NonKeyFrame);
    151  flag |= MediaInfoFlag::SoftwareDecoding;
    152  flag |= MediaInfoFlag::VIDEO_AV1;
    153 
    154  mTrackingId.apply([&](const auto& aId) {
    155    mPerformanceRecorder.Start(aSample->mTimecode.ToMicroseconds(),
    156                               "AOMDecoder"_ns, aId, flag);
    157  });
    158 
    159  if (aom_codec_err_t r = aom_codec_decode(&mCodec, aSample->Data(),
    160                                           aSample->Size(), nullptr)) {
    161    LOG_RESULT(r, "Decode error!");
    162    return DecodePromise::CreateAndReject(
    163        MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR,
    164                    RESULT_DETAIL("AOM error decoding AV1 sample: %s",
    165                                  aom_codec_err_to_string(r))),
    166        __func__);
    167  }
    168 
    169  aom_codec_iter_t iter = nullptr;
    170  aom_image_t* img;
    171  UniquePtr<aom_image_t, AomImageFree> img8;
    172  DecodedData results;
    173 
    174  while ((img = aom_codec_get_frame(&mCodec, &iter))) {
    175    NS_ASSERTION(
    176        img->fmt == AOM_IMG_FMT_I420 || img->fmt == AOM_IMG_FMT_I42016 ||
    177            img->fmt == AOM_IMG_FMT_I444 || img->fmt == AOM_IMG_FMT_I44416,
    178        "AV1 image format not I420 or I444");
    179 
    180    // Chroma shifts are rounded down as per the decoding examples in the SDK
    181    VideoData::YCbCrBuffer b;
    182    b.mPlanes[0].mData = img->planes[0];
    183    b.mPlanes[0].mStride = img->stride[0];
    184    b.mPlanes[0].mHeight = img->d_h;
    185    b.mPlanes[0].mWidth = img->d_w;
    186    b.mPlanes[0].mSkip = 0;
    187 
    188    b.mPlanes[1].mData = img->planes[1];
    189    b.mPlanes[1].mStride = img->stride[1];
    190    b.mPlanes[1].mSkip = 0;
    191 
    192    b.mPlanes[2].mData = img->planes[2];
    193    b.mPlanes[2].mStride = img->stride[2];
    194    b.mPlanes[2].mSkip = 0;
    195 
    196    if (img->fmt == AOM_IMG_FMT_I420 || img->fmt == AOM_IMG_FMT_I42016) {
    197      b.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
    198 
    199      b.mPlanes[1].mHeight = (img->d_h + 1) >> img->y_chroma_shift;
    200      b.mPlanes[1].mWidth = (img->d_w + 1) >> img->x_chroma_shift;
    201 
    202      b.mPlanes[2].mHeight = (img->d_h + 1) >> img->y_chroma_shift;
    203      b.mPlanes[2].mWidth = (img->d_w + 1) >> img->x_chroma_shift;
    204    } else if (img->fmt == AOM_IMG_FMT_I444 || img->fmt == AOM_IMG_FMT_I44416) {
    205      b.mPlanes[1].mHeight = img->d_h;
    206      b.mPlanes[1].mWidth = img->d_w;
    207 
    208      b.mPlanes[2].mHeight = img->d_h;
    209      b.mPlanes[2].mWidth = img->d_w;
    210    } else {
    211      LOG("AOM Unknown image format");
    212      return DecodePromise::CreateAndReject(
    213          MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR,
    214                      RESULT_DETAIL("AOM Unknown image format")),
    215          __func__);
    216    }
    217 
    218    if (img->bit_depth == 10) {
    219      b.mColorDepth = ColorDepth::COLOR_10;
    220    } else if (img->bit_depth == 12) {
    221      b.mColorDepth = ColorDepth::COLOR_12;
    222    }
    223 
    224    switch (img->mc) {
    225      case AOM_CICP_MC_BT_601:
    226        b.mYUVColorSpace = YUVColorSpace::BT601;
    227        break;
    228      case AOM_CICP_MC_BT_2020_NCL:
    229      case AOM_CICP_MC_BT_2020_CL:
    230        b.mYUVColorSpace = YUVColorSpace::BT2020;
    231        break;
    232      case AOM_CICP_MC_BT_709:
    233        b.mYUVColorSpace = YUVColorSpace::BT709;
    234        break;
    235      default:
    236        b.mYUVColorSpace = DefaultColorSpace({img->d_w, img->d_h});
    237        break;
    238    }
    239    b.mColorRange = img->range == AOM_CR_FULL_RANGE ? ColorRange::FULL
    240                                                    : ColorRange::LIMITED;
    241 
    242    switch (img->cp) {
    243      case AOM_CICP_CP_BT_709:
    244        b.mColorPrimaries = ColorSpace2::BT709;
    245        break;
    246      case AOM_CICP_CP_BT_2020:
    247        b.mColorPrimaries = ColorSpace2::BT2020;
    248        break;
    249      default:
    250        b.mColorPrimaries = ColorSpace2::BT709;
    251        break;
    252    }
    253 
    254    Result<already_AddRefed<VideoData>, MediaResult> r =
    255        VideoData::CreateAndCopyData(
    256            mInfo, mImageContainer, aSample->mOffset, aSample->mTime,
    257            aSample->mDuration, b, aSample->mKeyframe, aSample->mTimecode,
    258            mInfo.ScaledImageRect(img->d_w, img->d_h), nullptr);
    259 
    260    if (r.isErr()) {
    261      MediaResult rs = r.unwrapErr();
    262      LOG("VideoData::CreateAndCopyData error (source %ux%u display %ux%u "
    263          "picture %ux%u)  - %s: %s",
    264          img->d_w, img->d_h, mInfo.mDisplay.width, mInfo.mDisplay.height,
    265          mInfo.mImage.width, mInfo.mImage.height, rs.ErrorName().get(),
    266          rs.Message().get());
    267 
    268      return DecodePromise::CreateAndReject(std::move(rs), __func__);
    269    }
    270 
    271    RefPtr<VideoData> v = r.unwrap();
    272    MOZ_ASSERT(v);
    273 
    274    mPerformanceRecorder.Record(
    275        aSample->mTimecode.ToMicroseconds(), [&](DecodeStage& aStage) {
    276          aStage.SetResolution(mInfo.mImage.width, mInfo.mImage.height);
    277          auto format = [&]() -> Maybe<DecodeStage::ImageFormat> {
    278            switch (img->fmt) {
    279              case AOM_IMG_FMT_I420:
    280              case AOM_IMG_FMT_I42016:
    281                return Some(DecodeStage::YUV420P);
    282              case AOM_IMG_FMT_I444:
    283              case AOM_IMG_FMT_I44416:
    284                return Some(DecodeStage::YUV444P);
    285              default:
    286                return Nothing();
    287            }
    288          }();
    289          format.apply([&](auto& aFmt) { aStage.SetImageFormat(aFmt); });
    290          aStage.SetYUVColorSpace(b.mYUVColorSpace);
    291          aStage.SetColorRange(b.mColorRange);
    292          aStage.SetColorDepth(b.mColorDepth);
    293          aStage.SetStartTimeAndEndTime(v->mTime.ToMicroseconds(),
    294                                        v->GetEndTime().ToMicroseconds());
    295        });
    296    results.AppendElement(std::move(v));
    297  }
    298  return DecodePromise::CreateAndResolve(std::move(results), __func__);
    299 }
    300 
    301 RefPtr<MediaDataDecoder::DecodePromise> AOMDecoder::Decode(
    302    MediaRawData* aSample) {
    303  return InvokeAsync<MediaRawData*>(mTaskQueue, this, __func__,
    304                                    &AOMDecoder::ProcessDecode, aSample);
    305 }
    306 
    307 RefPtr<MediaDataDecoder::DecodePromise> AOMDecoder::Drain() {
    308  return InvokeAsync(mTaskQueue, __func__, [] {
    309    AUTO_PROFILER_LABEL("AOMDecoder::Drain", MEDIA_PLAYBACK);
    310    return DecodePromise::CreateAndResolve(DecodedData(), __func__);
    311  });
    312 }
    313 
    314 /* static */
    315 bool AOMDecoder::IsAV1(const nsACString& aMimeType) {
    316  return aMimeType.EqualsLiteral("video/av1");
    317 }
    318 
    319 /* static */
    320 bool AOMDecoder::IsMainProfile(const MediaByteBuffer* aBox) {
    321  if (!aBox || aBox->IsEmpty()) {
    322    return false;
    323  }
    324  AV1SequenceInfo av1Info;
    325  MediaResult seqHdrResult;
    326  TryReadAV1CBox(aBox, av1Info, seqHdrResult);
    327  return seqHdrResult.Code() == NS_OK && av1Info.mProfile == 0;
    328 }
    329 
    330 /* static */
    331 bool AOMDecoder::IsKeyframe(Span<const uint8_t> aBuffer) {
    332  aom_codec_stream_info_t info;
    333  PodZero(&info);
    334 
    335  auto res = aom_codec_peek_stream_info(aom_codec_av1_dx(), aBuffer.Elements(),
    336                                        aBuffer.Length(), &info);
    337  if (res != AOM_CODEC_OK) {
    338    LOG_STATIC_RESULT(
    339        res, "couldn't get keyframe flag with aom_codec_peek_stream_info");
    340    return false;
    341  }
    342 
    343  return bool(info.is_kf);
    344 }
    345 
    346 /* static */
    347 gfx::IntSize AOMDecoder::GetFrameSize(Span<const uint8_t> aBuffer) {
    348  aom_codec_stream_info_t info;
    349  PodZero(&info);
    350 
    351  auto res = aom_codec_peek_stream_info(aom_codec_av1_dx(), aBuffer.Elements(),
    352                                        aBuffer.Length(), &info);
    353  if (res != AOM_CODEC_OK) {
    354    LOG_STATIC_RESULT(
    355        res, "couldn't get frame size with aom_codec_peek_stream_info");
    356  }
    357 
    358  return gfx::IntSize(info.w, info.h);
    359 }
    360 
    361 /* static */
    362 AOMDecoder::OBUIterator AOMDecoder::ReadOBUs(const Span<const uint8_t>& aData) {
    363  return OBUIterator(aData);
    364 }
    365 
    366 void AOMDecoder::OBUIterator::UpdateNext() {
    367  // If mGoNext is not set, we don't need to load a new OBU.
    368  if (!mGoNext) {
    369    return;
    370  }
    371  // Check if we've reached the end of the data. Allow mGoNext to stay true so
    372  // that HasNext() will return false.
    373  if (mPosition >= mData.Length()) {
    374    return;
    375  }
    376  mGoNext = false;
    377 
    378  // If retrieving the next OBU fails, reset the current OBU and set the
    379  // position past the end of the data so that HasNext() returns false.
    380  auto resetExit = MakeScopeExit([&]() {
    381    mCurrent = OBUInfo();
    382    mPosition = mData.Length();
    383  });
    384 
    385  auto subspan = mData.Subspan(mPosition, mData.Length() - mPosition);
    386  BitReader br(subspan.Elements(), subspan.Length() * 8);
    387  OBUInfo temp;
    388 
    389  // AV1 spec available at:
    390  // https://aomediacodec.github.io/av1-spec/
    391  // or https://aomediacodec.github.io/av1-spec/av1-spec.pdf
    392 
    393  // begin open_bitstream_unit( )
    394  // https://aomediacodec.github.io/av1-spec/#general-obu-syntax
    395 
    396  // begin obu_header( )
    397  // https://aomediacodec.github.io/av1-spec/#obu-header-syntax
    398  br.ReadBit();  // obu_forbidden_bit
    399  temp.mType = static_cast<OBUType>(br.ReadBits(4));
    400  if (!temp.IsValid()) {
    401    // Non-fatal error, unknown OBUs can be skipped as long as the size field
    402    // is properly specified.
    403    NS_WARNING(nsPrintfCString("Encountered unknown OBU type (%" PRIu8
    404                               ", OBU may be invalid",
    405                               static_cast<uint8_t>(temp.mType))
    406                   .get());
    407  }
    408  temp.mExtensionFlag = br.ReadBit();
    409  bool hasSizeField = br.ReadBit();
    410  br.ReadBit();  // obu_reserved_1bit
    411 
    412  // begin obu_extension_header( ) (5.3.3)
    413  if (temp.mExtensionFlag) {
    414    if (br.BitsLeft() < 8) {
    415      mResult = MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR,
    416                            "Not enough bits left for an OBU extension header");
    417      return;
    418    }
    419    br.ReadBits(3);  // temporal_id
    420    br.ReadBits(2);  // spatial_id
    421    br.ReadBits(3);  // extension_header_reserved_3bits
    422  }
    423  // end obu_extension_header( )
    424  // end obu_header( )
    425 
    426  // Get the size of the remaining OBU data attached to the header in
    427  // bytes.
    428  size_t size;
    429  if (hasSizeField) {
    430    if (br.BitsLeft() < 8) {
    431      mResult = MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR,
    432                            "Not enough bits left for an OBU size field");
    433      return;
    434    }
    435    CheckedUint32 checkedSize = br.ReadULEB128().toChecked<uint32_t>();
    436    // Spec requires that the value ULEB128 reads is (1 << 32) - 1 or below.
    437    // See leb128(): https://aomediacodec.github.io/av1-spec/#leb128
    438    if (!checkedSize.isValid()) {
    439      mResult =
    440          MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR, "OBU size was too large");
    441      return;
    442    }
    443    size = checkedSize.value();
    444  } else {
    445    // This case should rarely happen in practice. To support the Annex B
    446    // format in the specification, we would have to parse every header type
    447    // to skip over them, but this allows us to at least iterate once to
    448    // retrieve the first OBU in the data.
    449    size = mData.Length() - 1 - temp.mExtensionFlag;
    450  }
    451 
    452  if (br.BitsLeft() / 8 < size) {
    453    mResult = MediaResult(
    454        NS_ERROR_DOM_MEDIA_DECODE_ERR,
    455        nsPrintfCString("Size specified by the OBU header (%zu) is more "
    456                        "than the actual remaining OBU data (%zu)",
    457                        size, br.BitsLeft() / 8)
    458            .get());
    459    return;
    460  }
    461 
    462  ASSERT_BYTE_ALIGNED(br);
    463 
    464  size_t bytes = br.BitCount() / 8;
    465  temp.mContents = mData.Subspan(mPosition + bytes, size);
    466  mCurrent = temp;
    467  // end open_bitstream_unit( )
    468 
    469  mPosition += bytes + size;
    470  resetExit.release();
    471  mResult = NS_OK;
    472 }
    473 
    474 /* static */
    475 already_AddRefed<MediaByteBuffer> AOMDecoder::CreateOBU(
    476    const OBUType aType, const Span<const uint8_t>& aContents) {
    477  RefPtr<MediaByteBuffer> buffer = new MediaByteBuffer();
    478 
    479  BitWriter bw(buffer);
    480  bw.WriteBits(0, 1);  // obu_forbidden_bit
    481  bw.WriteBits(static_cast<uint8_t>(aType), 4);
    482  bw.WriteBit(false);  // obu_extension_flag
    483  bw.WriteBit(true);   // obu_has_size_field
    484  bw.WriteBits(0, 1);  // obu_reserved_1bit
    485  ASSERT_BYTE_ALIGNED(bw);
    486  bw.WriteULEB128(aContents.Length());
    487  ASSERT_BYTE_ALIGNED(bw);
    488 
    489  buffer->AppendElements(aContents.Elements(), aContents.Length());
    490  return buffer.forget();
    491 }
    492 
    493 /* static */
    494 MediaResult AOMDecoder::ReadSequenceHeaderInfo(
    495    const Span<const uint8_t>& aSample, AV1SequenceInfo& aDestInfo) {
    496  // We need to get the last sequence header OBU, the specification does not
    497  // limit a temporal unit to one sequence header.
    498  OBUIterator iter = ReadOBUs(aSample);
    499  OBUInfo seqOBU;
    500 
    501  while (true) {
    502    if (!iter.HasNext()) {
    503      // Pass along the error from parsing the OBU.
    504      MediaResult result = iter.GetResult();
    505      if (result.Code() != NS_OK) {
    506        return result;
    507      }
    508      break;
    509    }
    510    OBUInfo obu = iter.Next();
    511    if (obu.mType == OBUType::SequenceHeader) {
    512      seqOBU = obu;
    513    }
    514  }
    515 
    516  if (seqOBU.mType != OBUType::SequenceHeader) {
    517    return NS_ERROR_DOM_MEDIA_WAITING_FOR_DATA;
    518  }
    519 
    520  // Sequence header syntax is specified here:
    521  // https://aomediacodec.github.io/av1-spec/#sequence-header-obu-syntax
    522  // Section 5.5: Sequence header OBU syntax
    523 
    524  // See also Section 6.4: Sequence header OBU semantics
    525  // https://aomediacodec.github.io/av1-spec/#sequence-header-obu-semantics
    526  // This section defines all the fields used in the sequence header.
    527  BitReader br(seqOBU.mContents.Elements(), seqOBU.mContents.Length() * 8);
    528  AV1SequenceInfo tempInfo;
    529 
    530  // begin sequence_header_obu( )
    531  // https://aomediacodec.github.io/av1-spec/#general-sequence-header-obu-syntax
    532  tempInfo.mProfile = br.ReadBits(3);
    533  const bool still_picture = br.ReadBit();
    534  const bool reduced_still_picture_header = br.ReadBit();
    535  if (!still_picture && reduced_still_picture_header) {
    536    return MediaResult(
    537        NS_ERROR_DOM_MEDIA_DECODE_ERR,
    538        "reduced_still_picture is true while still_picture is false");
    539  }
    540 
    541  if (reduced_still_picture_header) {
    542    OperatingPoint op;
    543    op.mLayers = 0;
    544    op.mLevel = br.ReadBits(5);  // seq_level_idx[0]
    545    op.mTier = 0;
    546    tempInfo.mOperatingPoints.SetCapacity(1);
    547    tempInfo.mOperatingPoints.AppendElement(op);
    548  } else {
    549    bool decoder_model_info_present_flag;
    550    uint8_t operating_points_cnt_minus_1;
    551    uint8_t buffer_delay_length_minus_1;
    552    if (br.ReadBit()) {  // timing_info_present_flag
    553      // begin timing_info( )
    554      // https://aomediacodec.github.io/av1-spec/#timing-info-syntax
    555      br.ReadBits(32);     // num_units_in_display_tick
    556      br.ReadBits(32);     // time_scale
    557      if (br.ReadBit()) {  // equal_picture_interval
    558        br.ReadUE();       // num_ticks_per_picture_minus_1
    559      }
    560      // end timing_info( )
    561 
    562      decoder_model_info_present_flag = br.ReadBit();
    563      if (decoder_model_info_present_flag) {
    564        // begin decoder_model_info( )
    565        // https://aomediacodec.github.io/av1-spec/#decoder-model-info-syntax
    566        buffer_delay_length_minus_1 = br.ReadBits(5);
    567        br.ReadBits(32);  // num_units_in_decoding_tick
    568        br.ReadBits(5);   // buffer_removal_time_length_minus_1
    569        br.ReadBits(5);   // frame_presentation_time_length_minus_1
    570        // end decoder_model_info( )
    571      }
    572    } else {
    573      decoder_model_info_present_flag = false;
    574    }
    575 
    576    bool initial_display_delay_present_flag = br.ReadBit();
    577    operating_points_cnt_minus_1 = br.ReadBits(5);
    578    tempInfo.mOperatingPoints.SetCapacity(operating_points_cnt_minus_1 + 1);
    579    for (uint8_t i = 0; i <= operating_points_cnt_minus_1; i++) {
    580      OperatingPoint op;
    581      op.mLayers = br.ReadBits(12);  // operating_point_idc[ i ]
    582      op.mLevel = br.ReadBits(5);    // seq_level_idx[ i ]
    583      op.mTier = op.mLevel > 7 ? br.ReadBits(1) : 0;
    584      if (decoder_model_info_present_flag) {
    585        if (br.ReadBit()) {  // decoder_model_present_for_this_op[ i ]
    586          // begin operating_parameters_info()
    587          // https://aomediacodec.github.io/av1-spec/#operating-parameters-info-syntax
    588          uint8_t n = buffer_delay_length_minus_1 + 1;
    589          br.ReadBits(n);  // decoder_buffer_delay[ op ]
    590          br.ReadBits(n);  // encoder_buffer_delay[ op ]
    591          br.ReadBit();    // low_delay_mode_flag[ op ]
    592          // end operating_parameters_info()
    593        }
    594      }
    595      if (initial_display_delay_present_flag) {
    596        if (br.ReadBit()) {  // initial_display_delay_present_for_this_op[ i ]
    597          br.ReadBits(4);    // initial_display_delay_minus_1[ i ]
    598        }
    599      }
    600      tempInfo.mOperatingPoints.AppendElement(op);
    601    }
    602  }
    603 
    604  uint8_t frame_width_bits_minus_1 = br.ReadBits(4);
    605  uint8_t frame_height_bits_minus_1 = br.ReadBits(4);
    606  uint32_t max_frame_width_minus_1 = br.ReadBits(frame_width_bits_minus_1 + 1);
    607  uint32_t max_frame_height_minus_1 =
    608      br.ReadBits(frame_height_bits_minus_1 + 1);
    609  tempInfo.mImage =
    610      gfx::IntSize(max_frame_width_minus_1 + 1, max_frame_height_minus_1 + 1);
    611 
    612  if (!reduced_still_picture_header) {
    613    if (br.ReadBit()) {  // frame_id_numbers_present_flag
    614      br.ReadBits(4);    // delta_frame_id_length_minus_2
    615      br.ReadBits(3);    // additional_frame_id_legnth_minus_1
    616    }
    617  }
    618 
    619  br.ReadBit();  // use_128x128_superblock
    620  br.ReadBit();  // enable_filter_intra
    621  br.ReadBit();  // enable_intra_edge_filter
    622 
    623  if (reduced_still_picture_header) {
    624    // enable_interintra_compound = 0
    625    // enable_masked_compound = 0
    626    // enable_warped_motion = 0
    627    // enable_dual_filter = 0
    628    // enable_order_hint = 0
    629    // enable_jnt_comp = 0
    630    // enable_ref_frame_mvs = 0
    631    // seq_force_screen_content_tools = SELECT_SCREEN_CONTENT_TOOLS
    632    // seq_force_integer_mv = SELECT_INTEGER_MV
    633    // OrderHintBits = 0
    634  } else {
    635    br.ReadBit();  // enable_interintra_compound
    636    br.ReadBit();  // enable_masked_compound
    637    br.ReadBit();  // enable_warped_motion
    638    br.ReadBit();  // enable_dual_filter
    639 
    640    const bool enable_order_hint = br.ReadBit();
    641    if (enable_order_hint) {
    642      br.ReadBit();  // enable_jnt_comp
    643      br.ReadBit();  // enable_ref_frame_mvs
    644    }
    645 
    646    uint8_t seq_choose_screen_content_tools = br.ReadBit();
    647    if (seq_choose_screen_content_tools) {
    648      seq_choose_screen_content_tools = 2;  // SELECT_SCREEN_CONTENT_TOOLS
    649    } else {
    650      seq_choose_screen_content_tools = br.ReadBits(1);
    651    }
    652    if (seq_choose_screen_content_tools > 0) {
    653      if (!br.ReadBit()) {  // seq_choose_integer_mv
    654        br.ReadBit();       // seq_force_integer_mv
    655      }
    656    }
    657 
    658    if (enable_order_hint) {
    659      br.ReadBits(3);  // order_hint_bits_minus_1
    660    }
    661  }
    662 
    663  br.ReadBit();  // enable_superres
    664  br.ReadBit();  // enable_cdef
    665  br.ReadBit();  // enable_restoration
    666 
    667  // begin color_config( )
    668  // https://aomediacodec.github.io/av1-spec/#color-config-syntax
    669  const bool highBitDepth = br.ReadBit();
    670  if (tempInfo.mProfile == 2 && highBitDepth) {
    671    const bool twelveBit = br.ReadBit();
    672    tempInfo.mBitDepth = twelveBit ? 12 : 10;
    673  } else {
    674    tempInfo.mBitDepth = highBitDepth ? 10 : 8;
    675  }
    676 
    677  tempInfo.mMonochrome = tempInfo.mProfile == 1 ? false : br.ReadBit();
    678 
    679  VideoColorSpace* colors = &tempInfo.mColorSpace;
    680 
    681  if (br.ReadBit()) {  // color_description_present_flag
    682    colors->mPrimaries = static_cast<ColourPrimaries>(br.ReadBits(8));
    683    colors->mTransfer = static_cast<TransferCharacteristics>(br.ReadBits(8));
    684    colors->mMatrix = static_cast<MatrixCoefficients>(br.ReadBits(8));
    685  } else {
    686    colors->mPrimaries = ColourPrimaries::CP_UNSPECIFIED;
    687    colors->mTransfer = TransferCharacteristics::TC_UNSPECIFIED;
    688    colors->mMatrix = MatrixCoefficients::MC_UNSPECIFIED;
    689  }
    690 
    691  if (tempInfo.mMonochrome) {
    692    colors->mRange = br.ReadBit() ? ColorRange::FULL : ColorRange::LIMITED;
    693    tempInfo.mSubsamplingX = true;
    694    tempInfo.mSubsamplingY = true;
    695    tempInfo.mChromaSamplePosition = ChromaSamplePosition::Unknown;
    696  } else if (colors->mPrimaries == ColourPrimaries::CP_BT709 &&
    697             colors->mTransfer == TransferCharacteristics::TC_SRGB &&
    698             colors->mMatrix == MatrixCoefficients::MC_IDENTITY) {
    699    colors->mRange = ColorRange::FULL;
    700    tempInfo.mSubsamplingX = false;
    701    tempInfo.mSubsamplingY = false;
    702  } else {
    703    colors->mRange = br.ReadBit() ? ColorRange::FULL : ColorRange::LIMITED;
    704    switch (tempInfo.mProfile) {
    705      case 0:
    706        tempInfo.mSubsamplingX = true;
    707        tempInfo.mSubsamplingY = true;
    708        break;
    709      case 1:
    710        tempInfo.mSubsamplingX = false;
    711        tempInfo.mSubsamplingY = false;
    712        break;
    713      case 2:
    714        if (tempInfo.mBitDepth == 12) {
    715          tempInfo.mSubsamplingX = br.ReadBit();
    716          tempInfo.mSubsamplingY =
    717              tempInfo.mSubsamplingX ? br.ReadBit() : false;
    718        } else {
    719          tempInfo.mSubsamplingX = true;
    720          tempInfo.mSubsamplingY = false;
    721        }
    722        break;
    723    }
    724    tempInfo.mChromaSamplePosition =
    725        tempInfo.mSubsamplingX && tempInfo.mSubsamplingY
    726            ? static_cast<ChromaSamplePosition>(br.ReadBits(2))
    727            : ChromaSamplePosition::Unknown;
    728  }
    729 
    730  br.ReadBit();  // separate_uv_delta_q
    731  // end color_config( )
    732 
    733  br.ReadBit();  // film_grain_params_present
    734  // end sequence_header_obu( )
    735 
    736  // begin trailing_bits( )
    737  // https://aomediacodec.github.io/av1-spec/#trailing-bits-syntax
    738  if (br.BitsLeft() > 8) {
    739    NS_WARNING(
    740        "AV1 sequence header finished reading with more than "
    741        "a byte of aligning bits, may indicate an error");
    742  }
    743  // Ensure that data is read correctly by checking trailing bits.
    744  bool correct = br.ReadBit();
    745  correct &= br.ReadBits(br.BitsLeft() % 8) == 0;
    746  while (br.BitsLeft() > 0) {
    747    correct &= br.ReadBits(8) == 0;
    748  }
    749  if (!correct) {
    750    return MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR,
    751                       "AV1 sequence header was corrupted");
    752  }
    753  // end trailing_bits( )
    754 
    755  aDestInfo = tempInfo;
    756  return NS_OK;
    757 }
    758 
    759 /* static */
    760 already_AddRefed<MediaByteBuffer> AOMDecoder::CreateSequenceHeader(
    761    const AV1SequenceInfo& aInfo, nsresult& aResult) {
    762  aResult = NS_ERROR_FAILURE;
    763 
    764  RefPtr<MediaByteBuffer> seqHdrBuffer = new MediaByteBuffer();
    765  BitWriter bw(seqHdrBuffer);
    766 
    767  // See 5.5.1: General sequence header OBU syntax
    768  // https://aomediacodec.github.io/av1-spec/#general-sequence-header-obu-syntax
    769  bw.WriteBits(aInfo.mProfile, 3);
    770  bw.WriteBit(false);  // still_picture
    771  bw.WriteBit(false);  // reduced_still_picture_header
    772 
    773  bw.WriteBit(false);  // timing_info_present_flag
    774  // if ( timing_info_present_flag ) {...}
    775  bw.WriteBit(false);  // initial_display_delay_present_flag
    776 
    777  size_t opCount = aInfo.mOperatingPoints.Length();
    778  bw.WriteBits(opCount - 1, 5);  // operating_points_cnt_minus_1
    779  for (size_t i = 0; i < opCount; i++) {
    780    OperatingPoint op = aInfo.mOperatingPoints[i];
    781    bw.WriteBits(op.mLayers, 12);  // operating_point_idc[ i ]
    782    bw.WriteBits(op.mLevel, 5);
    783    if (op.mLevel > 7) {
    784      bw.WriteBits(op.mTier, 1);
    785    } else {
    786      // seq_tier[ i ] = 0
    787      if (op.mTier != 0) {
    788        NS_WARNING("Operating points cannot specify tier for levels under 8.");
    789        return nullptr;
    790      }
    791    }
    792    // if ( decoder_model_info_present_flag ) {...}
    793    // else
    794    //   decoder_model_info_present_for_this_op[ i ] = 0
    795    // if ( initial_display_delay_present_flag ) {...}
    796  }
    797 
    798  if (aInfo.mImage.IsEmpty()) {
    799    NS_WARNING("Sequence header requires a valid image size");
    800    return nullptr;
    801  }
    802  auto getBits = [](int32_t value) {
    803    uint8_t bit = 0;
    804    do {
    805      value >>= 1;
    806      bit++;
    807    } while (value > 0);
    808    return bit;
    809  };
    810  uint8_t bitsW = getBits(aInfo.mImage.Width());
    811  uint8_t bitsH = getBits(aInfo.mImage.Height());
    812  bw.WriteBits(bitsW - 1, 4);
    813  bw.WriteBits(bitsH - 1, 4);
    814  bw.WriteBits(aInfo.mImage.Width() - 1, bitsW);
    815  bw.WriteBits(aInfo.mImage.Height() - 1, bitsH);
    816 
    817  // if ( !reduced_still_picture_header )
    818  bw.WriteBit(false);  // frame_id_numbers_present_flag
    819  // if ( frame_id_numbers_present_flag ) {...}
    820  // end if ( !reduced_still_picture_header )
    821 
    822  // Values below are derived from a 1080p YouTube AV1 stream.
    823  // The values are unused currently for determining the usable
    824  // decoder, and are only included to allow successful validation
    825  // of the generated sequence header.
    826 
    827  bw.WriteBit(true);  // use_128x128_superblock
    828  bw.WriteBit(true);  // enable_filter_intra
    829  bw.WriteBit(true);  // enable_intra_edge_filter
    830 
    831  // if ( !reduced_still_picture_header)
    832  bw.WriteBit(false);  // enable_interintra_compound
    833  bw.WriteBit(true);   // enable_masked_compound
    834  bw.WriteBit(true);   // enable_warped_motion
    835  bw.WriteBit(false);  // enable_dual_filter
    836 
    837  bw.WriteBit(true);  // enable_order_hint
    838  // if ( enable_order_hint )
    839  bw.WriteBit(false);  // enable_jnt_comp
    840  bw.WriteBit(true);   // enable_ref_frame_mvs
    841  // end if ( enable_order_hint )
    842 
    843  bw.WriteBit(true);  // seq_choose_screen_content_tools
    844  // if ( seq_choose_screen_content_tools )
    845  //   seq_force_screen_content_tools = SELECT_SCREEN_CONTENT_TOOLS (2)
    846  // else
    847  //   seq_force_screen_content_tools = f(1)
    848 
    849  // if ( seq_force_screen_content_tools > 0 )
    850  bw.WriteBit(true);  // seq_choose_integer_mv
    851  // if ( !seq_choose_integer_mv ) {...}
    852  // end if ( seq_force_screen_content_tools > 0 )
    853 
    854  // if ( enable_order_hint )
    855  bw.WriteBits(6, 3);  // order_hint_bits_minus_1
    856  // end if ( enable_order_hint )
    857  // end if ( !reduced_still_picture_header )
    858 
    859  bw.WriteBit(false);  // enable_superres
    860  bw.WriteBit(false);  // enable_cdef
    861  bw.WriteBit(true);   // enable_restoration
    862 
    863  // Begin color_config( )
    864  // https://aomediacodec.github.io/av1-spec/#color-config-syntax
    865  bool highBitDepth = aInfo.mBitDepth >= 10;
    866  bw.WriteBit(highBitDepth);
    867 
    868  if (aInfo.mBitDepth == 12 && aInfo.mProfile != 2) {
    869    NS_WARNING("Profile must be 2 for 12-bit");
    870    return nullptr;
    871  }
    872  if (aInfo.mProfile == 2 && highBitDepth) {
    873    bw.WriteBit(aInfo.mBitDepth == 12);  // twelve_bit
    874  }
    875 
    876  if (aInfo.mMonochrome && aInfo.mProfile == 1) {
    877    NS_WARNING("Profile 1 does not support monochrome");
    878    return nullptr;
    879  }
    880  if (aInfo.mProfile != 1) {
    881    bw.WriteBit(aInfo.mMonochrome);
    882  }
    883 
    884  const VideoColorSpace colors = aInfo.mColorSpace;
    885  bool colorsPresent =
    886      colors.mPrimaries != ColourPrimaries::CP_UNSPECIFIED ||
    887      colors.mTransfer != TransferCharacteristics::TC_UNSPECIFIED ||
    888      colors.mMatrix != MatrixCoefficients::MC_UNSPECIFIED;
    889  bw.WriteBit(colorsPresent);
    890 
    891  if (colorsPresent) {
    892    bw.WriteBits(static_cast<uint8_t>(colors.mPrimaries), 8);
    893    bw.WriteBits(static_cast<uint8_t>(colors.mTransfer), 8);
    894    bw.WriteBits(static_cast<uint8_t>(colors.mMatrix), 8);
    895  }
    896 
    897  if (aInfo.mMonochrome) {
    898    if (!aInfo.mSubsamplingX || !aInfo.mSubsamplingY) {
    899      NS_WARNING("Monochrome requires 4:0:0 subsampling");
    900      return nullptr;
    901    }
    902    if (aInfo.mChromaSamplePosition != ChromaSamplePosition::Unknown) {
    903      NS_WARNING(
    904          "Cannot specify chroma sample position on monochrome sequence");
    905      return nullptr;
    906    }
    907    bw.WriteBit(colors.mRange == ColorRange::FULL);
    908  } else if (colors.mPrimaries == ColourPrimaries::CP_BT709 &&
    909             colors.mTransfer == TransferCharacteristics::TC_SRGB &&
    910             colors.mMatrix == MatrixCoefficients::MC_IDENTITY) {
    911    if (aInfo.mSubsamplingX || aInfo.mSubsamplingY ||
    912        colors.mRange != ColorRange::FULL ||
    913        aInfo.mChromaSamplePosition != ChromaSamplePosition::Unknown) {
    914      NS_WARNING("sRGB requires 4:4:4 subsampling with full color range");
    915      return nullptr;
    916    }
    917  } else {
    918    bw.WriteBit(colors.mRange == ColorRange::FULL);
    919    switch (aInfo.mProfile) {
    920      case 0:
    921        if (!aInfo.mSubsamplingX || !aInfo.mSubsamplingY) {
    922          NS_WARNING("Main Profile requires 4:2:0 subsampling");
    923          return nullptr;
    924        }
    925        break;
    926      case 1:
    927        if (aInfo.mSubsamplingX || aInfo.mSubsamplingY) {
    928          NS_WARNING("High Profile requires 4:4:4 subsampling");
    929          return nullptr;
    930        }
    931        break;
    932      case 2:
    933        if (aInfo.mBitDepth == 12) {
    934          bw.WriteBit(aInfo.mSubsamplingX);
    935          if (aInfo.mSubsamplingX) {
    936            bw.WriteBit(aInfo.mSubsamplingY);
    937          }
    938        } else {
    939          if (!aInfo.mSubsamplingX || aInfo.mSubsamplingY) {
    940            NS_WARNING(
    941                "Professional Profile < 12-bit requires 4:2:2 subsampling");
    942            return nullptr;
    943          }
    944        }
    945        break;
    946    }
    947 
    948    if (aInfo.mSubsamplingX && aInfo.mSubsamplingY) {
    949      bw.WriteBits(static_cast<uint8_t>(aInfo.mChromaSamplePosition), 2);
    950    } else {
    951      if (aInfo.mChromaSamplePosition != ChromaSamplePosition::Unknown) {
    952        NS_WARNING("Only 4:2:0 subsampling can specify chroma position");
    953        return nullptr;
    954      }
    955    }
    956  }
    957 
    958  bw.WriteBit(false);  // separate_uv_delta_q
    959  // end color_config( )
    960 
    961  bw.WriteBit(true);  // film_grain_params_present
    962 
    963  // trailing_bits( )
    964  // https://aomediacodec.github.io/av1-spec/#trailing-bits-syntax
    965  size_t numTrailingBits = 8 - (bw.BitCount() % 8);
    966  bw.WriteBit(true);
    967  bw.WriteBits(0, numTrailingBits - 1);
    968  ASSERT_BYTE_ALIGNED(bw);
    969 
    970  Span<const uint8_t> seqHdr(seqHdrBuffer->Elements(), seqHdrBuffer->Length());
    971  aResult = NS_OK;
    972  return CreateOBU(OBUType::SequenceHeader, seqHdr);
    973 }
    974 
    975 /* static */
    976 void AOMDecoder::TryReadAV1CBox(const MediaByteBuffer* aBox,
    977                                AV1SequenceInfo& aDestInfo,
    978                                MediaResult& aSeqHdrResult) {
    979  // See av1C specification:
    980  // https://aomediacodec.github.io/av1-isobmff/#av1codecconfigurationbox-section
    981  BitReader br(aBox);
    982 
    983  br.ReadBits(8);  // marker, version
    984 
    985  aDestInfo.mProfile = br.ReadBits(3);
    986 
    987  OperatingPoint op;
    988  op.mLevel = br.ReadBits(5);
    989  op.mTier = br.ReadBits(1);
    990  aDestInfo.mOperatingPoints.AppendElement(op);
    991 
    992  bool highBitDepth = br.ReadBit();
    993  bool twelveBit = br.ReadBit();
    994  aDestInfo.mBitDepth = highBitDepth ? twelveBit ? 12 : 10 : 8;
    995 
    996  aDestInfo.mMonochrome = br.ReadBit();
    997  aDestInfo.mSubsamplingX = br.ReadBit();
    998  aDestInfo.mSubsamplingY = br.ReadBit();
    999  aDestInfo.mChromaSamplePosition =
   1000      static_cast<ChromaSamplePosition>(br.ReadBits(2));
   1001 
   1002  br.ReadBits(3);  // reserved
   1003  br.ReadBit();    // initial_presentation_delay_present
   1004  br.ReadBits(4);  // initial_presentation_delay_minus_one or reserved
   1005 
   1006  ASSERT_BYTE_ALIGNED(br);
   1007 
   1008  size_t skipBytes = br.BitCount() / 8;
   1009  Span<const uint8_t> obus(aBox->Elements() + skipBytes,
   1010                           aBox->Length() - skipBytes);
   1011 
   1012  // Minimum possible OBU header size
   1013  if (obus.Length() < 1) {
   1014    aSeqHdrResult = NS_ERROR_DOM_MEDIA_WAITING_FOR_DATA;
   1015    return;
   1016  }
   1017 
   1018  // If present, the sequence header will be redundant to some values, but any
   1019  // values stored in it should be treated as more accurate than av1C.
   1020  aSeqHdrResult = ReadSequenceHeaderInfo(obus, aDestInfo);
   1021 }
   1022 
   1023 /* static */
   1024 void AOMDecoder::WriteAV1CBox(const AV1SequenceInfo& aInfo,
   1025                              MediaByteBuffer* aDestBox, bool& aHasSeqHdr) {
   1026  aHasSeqHdr = false;
   1027 
   1028  BitWriter bw(aDestBox);
   1029 
   1030  bw.WriteBit(true);   // marker
   1031  bw.WriteBits(1, 7);  // version
   1032 
   1033  bw.WriteBits(aInfo.mProfile, 3);
   1034 
   1035  MOZ_DIAGNOSTIC_ASSERT(aInfo.mOperatingPoints.Length() > 0);
   1036  bw.WriteBits(aInfo.mOperatingPoints[0].mLevel, 5);
   1037  bw.WriteBits(aInfo.mOperatingPoints[0].mTier, 1);
   1038 
   1039  bw.WriteBit(aInfo.mBitDepth >= 10);  // high_bitdepth
   1040  bw.WriteBit(aInfo.mBitDepth == 12);  // twelve_bit
   1041 
   1042  bw.WriteBit(aInfo.mMonochrome);
   1043  bw.WriteBit(aInfo.mSubsamplingX);
   1044  bw.WriteBit(aInfo.mSubsamplingY);
   1045  bw.WriteBits(static_cast<uint8_t>(aInfo.mChromaSamplePosition), 2);
   1046 
   1047  bw.WriteBits(0, 3);  // reserved
   1048  bw.WriteBit(false);  // initial_presentation_delay_present
   1049  bw.WriteBits(0, 4);  // initial_presentation_delay_minus_one or reserved
   1050 
   1051  ASSERT_BYTE_ALIGNED(bw);
   1052 
   1053  nsresult rv;
   1054  RefPtr<MediaByteBuffer> seqHdrBuffer = CreateSequenceHeader(aInfo, rv);
   1055 
   1056  if (NS_SUCCEEDED(rv)) {
   1057    aDestBox->AppendElements(seqHdrBuffer->Elements(), seqHdrBuffer->Length());
   1058    aHasSeqHdr = true;
   1059  }
   1060 }
   1061 
   1062 /* static */
   1063 Maybe<AOMDecoder::AV1SequenceInfo> AOMDecoder::CreateSequenceInfoFromCodecs(
   1064    const nsAString& aCodec) {
   1065  AV1SequenceInfo info;
   1066  OperatingPoint op;
   1067  uint8_t chromaSamplePosition;
   1068  if (!ExtractAV1CodecDetails(aCodec, info.mProfile, op.mLevel, op.mTier,
   1069                              info.mBitDepth, info.mMonochrome,
   1070                              info.mSubsamplingX, info.mSubsamplingY,
   1071                              chromaSamplePosition, info.mColorSpace)) {
   1072    return Nothing();
   1073  }
   1074  info.mOperatingPoints.AppendElement(op);
   1075  info.mChromaSamplePosition =
   1076      static_cast<ChromaSamplePosition>(chromaSamplePosition);
   1077  return Some(info);
   1078 }
   1079 
   1080 /* static */
   1081 bool AOMDecoder::SetVideoInfo(VideoInfo* aDestInfo, const nsAString& aCodec) {
   1082  Maybe<AV1SequenceInfo> info = CreateSequenceInfoFromCodecs(aCodec);
   1083  if (info.isNothing()) {
   1084    return false;
   1085  }
   1086 
   1087  if (!aDestInfo->mImage.IsEmpty()) {
   1088    info->mImage = aDestInfo->mImage;
   1089  }
   1090 
   1091  RefPtr<MediaByteBuffer> extraData = new MediaByteBuffer();
   1092  bool hasSeqHdr;
   1093  WriteAV1CBox(info.value(), extraData, hasSeqHdr);
   1094  aDestInfo->mExtraData = extraData;
   1095  return true;
   1096 }
   1097 
   1098 }  // namespace mozilla
   1099 #undef LOG
   1100 #undef ASSERT_BYTE_ALIGNED