tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

TestMediaDataEncoder.cpp (42434B)


      1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
      2 /* vim: set ts=2 et sw=2 tw=80: */
      3 /* This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #include <algorithm>
      8 
      9 #include "AnnexB.h"
     10 #include "BufferReader.h"
     11 #include "H264.h"
     12 #include "ImageContainer.h"
     13 #include "PEMFactory.h"
     14 #include "TimeUnits.h"
     15 #include "VPXDecoder.h"
     16 #include "VideoUtils.h"
     17 #include "gtest/gtest.h"
     18 #include "mozilla/AbstractThread.h"
     19 #include "mozilla/Preferences.h"
     20 #include "mozilla/SpinEventLoopUntil.h"
     21 #include "mozilla/gtest/WaitFor.h"
     22 #include "mozilla/media/MediaUtils.h"  // For media::Await
     23 
     24 #ifdef MOZ_WIDGET_ANDROID
     25 // Create/init a H.264 encoder and check if it's SW.
     26 #  define SKIP_IF_ANDROID_SW()                                                \
     27    do {                                                                      \
     28      RefPtr<MediaDataEncoder> e = CreateH264Encoder(                         \
     29          Usage::Record,                                                      \
     30          EncoderConfig::SampleFormat(dom::ImageBitmapFormat::YUV420P),       \
     31          kImageSize, ScalabilityMode::None, AsVariant(kH264SpecificAnnexB)); \
     32      if (EnsureInit(e)) {                                                    \
     33        nsCString dummy;                                                      \
     34        bool isSW = !e->IsHardwareAccelerated(dummy);                         \
     35        WaitForShutdown(e);                                                   \
     36        if (isSW) {                                                           \
     37          return;                                                             \
     38        }                                                                     \
     39      }                                                                       \
     40    } while (0)
     41 #else
     42 #  define SKIP_IF_ANDROID_SW() \
     43    do {                       \
     44    } while (0)
     45 #endif
     46 
     47 #define RUN_IF_SUPPORTED(codecType, test)         \
     48  do {                                            \
     49    RefPtr<PEMFactory> f(new PEMFactory());       \
     50    if (!f->SupportsCodec(codecType).isEmpty()) { \
     51      test();                                     \
     52    }                                             \
     53  } while (0)
     54 
     55 #define GET_OR_RETURN_ON_ERROR(expr)                      \
     56  __extension__({                                         \
     57    auto mozTryVarTempResult = ::mozilla::ToResult(expr); \
     58    if (MOZ_UNLIKELY(mozTryVarTempResult.isErr())) {      \
     59      EXPECT_TRUE(false);                                 \
     60      return;                                             \
     61    }                                                     \
     62    mozTryVarTempResult.unwrap();                         \
     63  })
     64 
     65 #define BLOCK_SIZE 64
     66 #define NUM_FRAMES 150UL
     67 #define FRAME_RATE 30
     68 #define FRAME_DURATION (1000000 / FRAME_RATE)
     69 #define BIT_RATE (1000 * 1000)  // 1Mbps
     70 #define BIT_RATE_MODE BitrateMode::Variable
     71 #define KEYFRAME_INTERVAL FRAME_RATE  // 1 keyframe per second
     72 
     73 using namespace mozilla;
     74 
     75 static gfx::IntSize kImageSize(640, 480);
     76 static gfx::IntSize kImageSize4K(3840, 2160);
     77 // Set codec to avc1.42001E - Base profile, constraint 0, level 30.
     78 MOZ_RUNINIT const H264Specific kH264SpecificAnnexB(H264_PROFILE_BASE,
     79                                                   H264_LEVEL::H264_LEVEL_3,
     80                                                   H264BitStreamFormat::ANNEXB);
     81 MOZ_RUNINIT const H264Specific kH264SpecificAVCC(H264_PROFILE_BASE,
     82                                                 H264_LEVEL::H264_LEVEL_3,
     83                                                 H264BitStreamFormat::AVC);
     84 
     85 class MediaDataEncoderTest : public testing::Test {
     86 protected:
     87  void SetUp() override {
     88    mData.Init(kImageSize);
     89    mData4K.Init(kImageSize4K);
     90  }
     91 
     92  void TearDown() override {
     93    mData.Deinit();
     94    mData4K.Deinit();
     95  }
     96 
     97 public:
     98  struct FrameSource final {
     99    gfx::IntSize mSize = gfx::IntSize(0, 0);
    100    layers::PlanarYCbCrData mYUV;
    101    UniquePtr<uint8_t[]> mBuffer;
    102    RefPtr<layers::BufferRecycleBin> mRecycleBin;
    103    int16_t mColorStep = 4;
    104 
    105    gfx::IntSize GetSize() const { return mSize; }
    106 
    107    void Init(const gfx::IntSize& aSize) {
    108      mSize = aSize;
    109      mYUV.mPictureRect = gfx::IntRect(0, 0, aSize.width, aSize.height);
    110      mYUV.mYStride = aSize.width;
    111      mYUV.mCbCrStride = (aSize.width + 1) / 2;
    112      mYUV.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
    113      auto ySize = mYUV.YDataSize();
    114      auto cbcrSize = mYUV.CbCrDataSize();
    115      size_t bufferSize =
    116          mYUV.mYStride * ySize.height + 2 * mYUV.mCbCrStride * cbcrSize.height;
    117      mBuffer = MakeUnique<uint8_t[]>(bufferSize);
    118      std::fill_n(mBuffer.get(), bufferSize, 0x7F);
    119      mYUV.mYChannel = mBuffer.get();
    120      mYUV.mCbChannel = mYUV.mYChannel + mYUV.mYStride * ySize.height;
    121      mYUV.mCrChannel = mYUV.mCbChannel + mYUV.mCbCrStride * cbcrSize.height;
    122      mYUV.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
    123      mRecycleBin = new layers::BufferRecycleBin();
    124    }
    125 
    126    void Deinit() {
    127      mBuffer.reset();
    128      mRecycleBin = nullptr;
    129      mSize = gfx::IntSize(0, 0);
    130    }
    131 
    132    already_AddRefed<MediaData> GetFrame(const size_t aIndex) {
    133      Draw(aIndex);
    134      RefPtr<layers::PlanarYCbCrImage> img =
    135          new layers::RecyclingPlanarYCbCrImage(mRecycleBin);
    136      img->CopyData(mYUV);
    137      RefPtr<MediaData> frame = VideoData::CreateFromImage(
    138          kImageSize, 0,
    139          // The precise time unit should be media::TimeUnit(1, FRAME_RATE)
    140          // instead of media::TimeUnit(FRAME_DURATION, USECS_PER_S)
    141          // (FRAME_DURATION microseconds), but this setting forces us to take
    142          // care some potential rounding issue, e.g., when converting to a time
    143          // unit based in FRAME_RATE by TimeUnit::ToTicksAtRate(FRAME_RATE),
    144          // the time unit would be calculated from 999990 / 1000000, which
    145          // could be zero.
    146          media::TimeUnit::FromMicroseconds(AssertedCast<int64_t>(aIndex) *
    147                                            FRAME_DURATION),
    148          media::TimeUnit::FromMicroseconds(FRAME_DURATION), img,
    149          (aIndex & 0xF) == 0,
    150          media::TimeUnit::FromMicroseconds(AssertedCast<int64_t>(aIndex) *
    151                                            FRAME_DURATION));
    152      return frame.forget();
    153    }
    154 
    155    void DrawChessboard(uint8_t* aAddr, const size_t aWidth,
    156                        const size_t aHeight, const size_t aOffset) {
    157      uint8_t pixels[2][BLOCK_SIZE];
    158      size_t x = aOffset % BLOCK_SIZE;
    159      if ((aOffset / BLOCK_SIZE) & 1) {
    160        x = BLOCK_SIZE - x;
    161      }
    162      for (size_t i = 0; i < x; i++) {
    163        pixels[0][i] = 0x00;
    164        pixels[1][i] = 0xFF;
    165      }
    166      for (size_t i = x; i < BLOCK_SIZE; i++) {
    167        pixels[0][i] = 0xFF;
    168        pixels[1][i] = 0x00;
    169      }
    170 
    171      uint8_t* p = aAddr;
    172      for (size_t row = 0; row < aHeight; row++) {
    173        for (size_t col = 0; col < aWidth; col += BLOCK_SIZE) {
    174          memcpy(p, pixels[((row / BLOCK_SIZE) + (col / BLOCK_SIZE)) % 2],
    175                 BLOCK_SIZE);
    176          p += BLOCK_SIZE;
    177        }
    178      }
    179    }
    180 
    181    void Draw(const size_t aIndex) {
    182      auto ySize = mYUV.YDataSize();
    183      DrawChessboard(mYUV.mYChannel, ySize.width, ySize.height, aIndex << 1);
    184      int16_t color = AssertedCast<int16_t>(mYUV.mCbChannel[0] + mColorStep);
    185      if (color > 255 || color < 0) {
    186        mColorStep = AssertedCast<int16_t>(-mColorStep);
    187        color = AssertedCast<int16_t>(mYUV.mCbChannel[0] + mColorStep);
    188      }
    189 
    190      size_t size = (mYUV.mCrChannel - mYUV.mCbChannel);
    191 
    192      std::fill_n(mYUV.mCbChannel, size, static_cast<uint8_t>(color));
    193      std::fill_n(mYUV.mCrChannel, size, 0xFF - static_cast<uint8_t>(color));
    194    }
    195  };
    196 
    197 public:
    198  FrameSource mData;
    199  FrameSource mData4K;
    200 };
    201 
    202 already_AddRefed<MediaDataEncoder> CreateVideoEncoder(
    203    CodecType aCodec, Usage aUsage, EncoderConfig::SampleFormat aFormat,
    204    gfx::IntSize aSize, ScalabilityMode aScalabilityMode,
    205    const EncoderConfig::CodecSpecific& aSpecific) {
    206  RefPtr<PEMFactory> f(new PEMFactory());
    207 
    208  if (f->SupportsCodec(aCodec).isEmpty()) {
    209    return nullptr;
    210  }
    211 
    212  const EncoderConfig config(
    213      aCodec, aSize, aUsage, aFormat, FRAME_RATE /* FPS */,
    214      KEYFRAME_INTERVAL /* keyframe interval */, BIT_RATE /* bitrate */, 0, 0,
    215      BIT_RATE_MODE, HardwarePreference::None /* hardware preference */,
    216      aScalabilityMode, aSpecific);
    217  if (f->Supports(config).isEmpty()) {
    218    return nullptr;
    219  }
    220 
    221  const RefPtr<TaskQueue> taskQueue(
    222      TaskQueue::Create(GetMediaThreadPool(MediaThreadType::PLATFORM_ENCODER),
    223                        "TestMediaDataEncoder"));
    224  RefPtr<MediaDataEncoder> e = f->CreateEncoder(config, taskQueue);
    225  return e.forget();
    226 }
    227 
    228 static bool EnsureInit(const RefPtr<MediaDataEncoder>& aEncoder) {
    229  if (!aEncoder) {
    230    return false;
    231  }
    232  auto r = WaitFor(aEncoder->Init());
    233  return r.isOk();
    234 }
    235 
    236 void WaitForShutdown(const RefPtr<MediaDataEncoder>& aEncoder) {
    237  MOZ_RELEASE_ASSERT(aEncoder);
    238 
    239  Maybe<bool> result;
    240  // media::Await() supports exclusive promises only, but ShutdownPromise is
    241  // not.
    242  aEncoder->Shutdown()->Then(
    243      AbstractThread::MainThread(), __func__,
    244      [&result](bool rv) {
    245        EXPECT_TRUE(rv);
    246        result = Some(true);
    247      },
    248      []() { FAIL() << "Shutdown should never be rejected"; });
    249  SpinEventLoopUntil("TestMediaDataEncoder.cpp:WaitForShutdown"_ns,
    250                     [&result]() { return result; });
    251 }
    252 
    253 static Result<MediaDataEncoder::EncodedData, MediaResult> Drain(
    254    const RefPtr<MediaDataEncoder>& aEncoder) {
    255  MOZ_RELEASE_ASSERT(aEncoder);
    256 
    257  size_t pending = 0;
    258  MediaDataEncoder::EncodedData output;
    259  do {
    260    MediaDataEncoder::EncodedData data = MOZ_TRY(WaitFor(aEncoder->Drain()));
    261    pending = data.Length();
    262    output.AppendElements(std::move(data));
    263  } while (pending > 0);
    264 
    265  return output;
    266 }
    267 
    268 struct EncodeResult {
    269  MediaDataEncoder::EncodedData mEncodedData;
    270  size_t mInputKeyframes = 0;
    271 };
    272 static Result<EncodeResult, MediaResult> EncodeWithInputStats(
    273    const RefPtr<MediaDataEncoder>& aEncoder, const size_t aNumFrames,
    274    MediaDataEncoderTest::FrameSource& aSource) {
    275  MOZ_RELEASE_ASSERT(aEncoder);
    276 
    277  size_t inputKeyframes = 0;
    278  MediaDataEncoder::EncodedData output;
    279  for (size_t i = 0; i < aNumFrames; i++) {
    280    RefPtr<MediaData> frame = aSource.GetFrame(i);
    281    if (frame->mKeyframe) {
    282      inputKeyframes++;
    283    }
    284    output.AppendElements(MOZ_TRY(WaitFor(aEncoder->Encode(frame))));
    285  }
    286  output.AppendElements(std::move(MOZ_TRY(Drain(aEncoder))));
    287  return EncodeResult{std::move(output), inputKeyframes};
    288 }
    289 
    290 static Result<MediaDataEncoder::EncodedData, MediaResult> Encode(
    291    const RefPtr<MediaDataEncoder>& aEncoder, const size_t aNumFrames,
    292    MediaDataEncoderTest::FrameSource& aSource) {
    293  EncodeResult r = MOZ_TRY(EncodeWithInputStats(aEncoder, aNumFrames, aSource));
    294  return std::move(r.mEncodedData);
    295 }
    296 
    297 static Result<EncodeResult, MediaResult> EncodeBatchWithInputStats(
    298    const RefPtr<MediaDataEncoder>& aEncoder, const size_t aTotalNumFrames,
    299    MediaDataEncoderTest::FrameSource& aSource, const size_t aBatchSize) {
    300  if (aBatchSize == 0 || aTotalNumFrames == 0) {
    301    return Err(MediaResult(
    302        NS_ERROR_INVALID_ARG,
    303        "Batch size and total number of frames must be greater than 0"));
    304  }
    305 
    306  size_t inputKeyframes = 0;
    307  MediaDataEncoder::EncodedData output;
    308  nsTArray<RefPtr<MediaData>> frames;
    309  for (size_t i = 0; i < aTotalNumFrames; i++) {
    310    RefPtr<MediaData> frame = aSource.GetFrame(i);
    311    frames.AppendElement(frame);
    312    if (frame->mKeyframe) {
    313      inputKeyframes++;
    314    }
    315    if (frames.Length() == aBatchSize || i == aTotalNumFrames - 1) {
    316      nsTArray<RefPtr<MediaData>> batch = std::move(frames);
    317      output.AppendElements(
    318          MOZ_TRY(WaitFor(aEncoder->Encode(std::move(batch)))));
    319    }
    320  }
    321  MOZ_RELEASE_ASSERT(frames.IsEmpty());
    322 
    323  output.AppendElements(std::move(MOZ_TRY(Drain(aEncoder))));
    324  return EncodeResult{std::move(output), inputKeyframes};
    325 }
    326 
    327 template <typename T>
    328 size_t GetKeyFrameCount(const T& aData) {
    329  size_t count = 0;
    330  for (auto sample : aData) {
    331    if (sample->mKeyframe) {
    332      count++;
    333    }
    334  }
    335  return count;
    336 }
    337 
    338 Result<uint8_t, nsresult> GetNALUSize(const mozilla::MediaRawData* aSample) {
    339  return AVCCConfig::Parse(aSample).map(
    340      [](AVCCConfig config) { return config.NALUSize(); });
    341 }
    342 
    343 Result<Ok, nsresult> IsValidAVCC(const mozilla::MediaRawData* aSample,
    344                                 uint8_t aNALUSize) {
    345  BufferReader reader(aSample->Data(), aSample->Size());
    346  while (reader.Remaining() >= aNALUSize) {
    347    uint32_t nalLen;
    348    switch (aNALUSize) {
    349      case 1:
    350        nalLen = MOZ_TRY(reader.ReadU8());
    351        break;
    352      case 2:
    353        nalLen = MOZ_TRY(reader.ReadU16());
    354        break;
    355      case 3:
    356        nalLen = MOZ_TRY(reader.ReadU24());
    357        break;
    358      case 4:
    359        nalLen = MOZ_TRY(reader.ReadU32());
    360        break;
    361      default:
    362        return Err(NS_ERROR_INVALID_ARG);
    363    }
    364    const uint8_t* p = reader.Read(nalLen);
    365    if (!p) {
    366      return Err(NS_ERROR_ILLEGAL_VALUE);
    367    }
    368  }
    369  return Ok();
    370 }
    371 
    372 static already_AddRefed<MediaDataEncoder> CreateH264Encoder(
    373    Usage aUsage = Usage::Realtime,
    374    EncoderConfig::SampleFormat aFormat =
    375        EncoderConfig::SampleFormat(dom::ImageBitmapFormat::YUV420P),
    376    gfx::IntSize aSize = kImageSize,
    377    ScalabilityMode aScalabilityMode = ScalabilityMode::None,
    378    const EncoderConfig::CodecSpecific& aSpecific =
    379        AsVariant(kH264SpecificAnnexB)) {
    380  return CreateVideoEncoder(CodecType::H264, aUsage, aFormat, aSize,
    381                            aScalabilityMode, aSpecific);
    382 }
    383 
    384 TEST_F(MediaDataEncoderTest, H264Create) {
    385  RUN_IF_SUPPORTED(CodecType::H264, []() {
    386    RefPtr<MediaDataEncoder> e = CreateH264Encoder();
    387    EXPECT_TRUE(e);
    388    WaitForShutdown(e);
    389  });
    390 }
    391 
    392 TEST_F(MediaDataEncoderTest, H264Inits) {
    393  RUN_IF_SUPPORTED(CodecType::H264, []() {
    394    // w/o codec specific: should fail for h264.
    395    RefPtr<MediaDataEncoder> e = CreateH264Encoder(
    396        Usage::Realtime,
    397        EncoderConfig::SampleFormat(dom::ImageBitmapFormat::YUV420P),
    398        kImageSize, ScalabilityMode::None, AsVariant(void_t{}));
    399    EXPECT_FALSE(e);
    400 
    401    // w/ codec specific
    402    e = CreateH264Encoder();
    403    EXPECT_TRUE(EnsureInit(e));
    404    WaitForShutdown(e);
    405  });
    406 }
    407 
    408 static void H264EncodesTest(Usage aUsage,
    409                            const EncoderConfig::CodecSpecific& aSpecific,
    410                            MediaDataEncoderTest::FrameSource& aFrameSource) {
    411  ASSERT_TRUE(aSpecific.is<H264Specific>());
    412  ASSERT_TRUE(aSpecific.as<H264Specific>().mFormat ==
    413                  H264BitStreamFormat::ANNEXB ||
    414              aSpecific.as<H264Specific>().mFormat == H264BitStreamFormat::AVC);
    415 
    416  RUN_IF_SUPPORTED(CodecType::H264, [&]() {
    417    bool isAVCC =
    418        aSpecific.as<H264Specific>().mFormat == H264BitStreamFormat::AVC;
    419 
    420    // Encode one frame and output in AnnexB/AVCC format.
    421    RefPtr<MediaDataEncoder> e = CreateH264Encoder(
    422        aUsage, EncoderConfig::SampleFormat(dom::ImageBitmapFormat::YUV420P),
    423        aFrameSource.GetSize(), ScalabilityMode::None, aSpecific);
    424    EXPECT_TRUE(EnsureInit(e));
    425    MediaDataEncoder::EncodedData output =
    426        GET_OR_RETURN_ON_ERROR(Encode(e, 1UL, aFrameSource));
    427    EXPECT_EQ(output.Length(), 1UL);
    428    EXPECT_TRUE(isAVCC ? AnnexB::IsAVCC(output[0])
    429                       : AnnexB::IsAnnexB(*output[0]));
    430    WaitForShutdown(e);
    431    output.Clear();
    432 
    433    // Encode multiple frames and output in AnnexB/AVCC format.
    434    e = CreateH264Encoder(
    435        aUsage, EncoderConfig::SampleFormat(dom::ImageBitmapFormat::YUV420P),
    436        aFrameSource.GetSize(), ScalabilityMode::None, aSpecific);
    437    EXPECT_TRUE(EnsureInit(e));
    438    const bool is4KOrLarger = kImageSize4K <= aFrameSource.GetSize();
    439    const size_t numFrames = NUM_FRAMES / (is4KOrLarger ? 3 : 1);
    440    EncodeResult r = GET_OR_RETURN_ON_ERROR(
    441        EncodeWithInputStats(e, numFrames, aFrameSource));
    442    output = std::move(r.mEncodedData);
    443    if (aUsage == Usage::Realtime && is4KOrLarger) {
    444      // Realtime encoding may drop frames for large frame sizes.
    445      EXPECT_LE(output.Length(), numFrames);
    446    } else {
    447      EXPECT_EQ(output.Length(), numFrames);
    448    }
    449    EXPECT_GE(GetKeyFrameCount(output), r.mInputKeyframes);
    450    if (isAVCC) {
    451      uint8_t naluSize = GetNALUSize(output[0]).unwrapOr(0);
    452      EXPECT_GT(naluSize, 0);
    453      EXPECT_LE(naluSize, 4);
    454      for (auto frame : output) {
    455        if (frame->mExtraData && !frame->mExtraData->IsEmpty()) {
    456          naluSize = GetNALUSize(frame).unwrapOr(0);
    457          EXPECT_GT(naluSize, 0);
    458          EXPECT_LE(naluSize, 4);
    459        }
    460        EXPECT_TRUE(IsValidAVCC(frame, naluSize).isOk());
    461      }
    462    } else {
    463      for (auto frame : output) {
    464        EXPECT_TRUE(AnnexB::IsAnnexB(*frame));
    465      }
    466    }
    467 
    468    WaitForShutdown(e);
    469  });
    470 };
    471 
    472 TEST_F(MediaDataEncoderTest, H264EncodesAnnexBRecord) {
    473  H264EncodesTest(Usage::Record, AsVariant(kH264SpecificAnnexB), mData);
    474 }
    475 
    476 TEST_F(MediaDataEncoderTest, H264EncodesAnnexBRealtime) {
    477  H264EncodesTest(Usage::Realtime, AsVariant(kH264SpecificAnnexB), mData);
    478 }
    479 
    480 TEST_F(MediaDataEncoderTest, H264EncodesAVCCRecord) {
    481  H264EncodesTest(Usage::Record, AsVariant(kH264SpecificAVCC), mData);
    482 }
    483 
    484 TEST_F(MediaDataEncoderTest, H264EncodesAVCCRealtime) {
    485  H264EncodesTest(Usage::Realtime, AsVariant(kH264SpecificAVCC), mData);
    486 }
    487 
    488 TEST_F(MediaDataEncoderTest, H264Encodes4KAnnexBRecord) {
    489  SKIP_IF_ANDROID_SW();  // Android SW can't encode 4K.
    490  H264EncodesTest(Usage::Record, AsVariant(kH264SpecificAnnexB), mData4K);
    491 }
    492 
    493 TEST_F(MediaDataEncoderTest, H264Encodes4KAnnexBRealtime) {
    494  SKIP_IF_ANDROID_SW();  // Android SW can't encode 4K.
    495  H264EncodesTest(Usage::Realtime, AsVariant(kH264SpecificAnnexB), mData4K);
    496 }
    497 
    498 TEST_F(MediaDataEncoderTest, H264Encodes4KAVCCRecord) {
    499  SKIP_IF_ANDROID_SW();  // Android SW can't encode 4K.
    500  H264EncodesTest(Usage::Record, AsVariant(kH264SpecificAVCC), mData4K);
    501 }
    502 
    503 TEST_F(MediaDataEncoderTest, H264Encodes4KAVCCRealtime) {
    504  SKIP_IF_ANDROID_SW();  // Android SW can't encode 4K.
    505  H264EncodesTest(Usage::Realtime, AsVariant(kH264SpecificAVCC), mData4K);
    506 }
    507 
    508 static void H264EncodeBatchTest(
    509    Usage aUsage, const EncoderConfig::CodecSpecific& aSpecific,
    510    MediaDataEncoderTest::FrameSource& aFrameSource) {
    511  ASSERT_TRUE(aSpecific.is<H264Specific>());
    512  ASSERT_TRUE(aSpecific.as<H264Specific>().mFormat ==
    513                  H264BitStreamFormat::ANNEXB ||
    514              aSpecific.as<H264Specific>().mFormat == H264BitStreamFormat::AVC);
    515 
    516  RUN_IF_SUPPORTED(CodecType::H264, [&]() {
    517    bool isAVCC =
    518        aSpecific.as<H264Specific>().mFormat == H264BitStreamFormat::AVC;
    519 
    520    RefPtr<MediaDataEncoder> e = CreateH264Encoder(
    521        aUsage, EncoderConfig::SampleFormat(dom::ImageBitmapFormat::YUV420P),
    522        aFrameSource.GetSize(), ScalabilityMode::None, aSpecific);
    523    EXPECT_TRUE(EnsureInit(e));
    524 
    525    const bool is4KOrLarger = kImageSize4K <= aFrameSource.GetSize();
    526    const size_t numFrames = NUM_FRAMES / (is4KOrLarger ? 3 : 1);
    527    constexpr size_t batchSize = 6;
    528    EncodeResult r = GET_OR_RETURN_ON_ERROR(
    529        EncodeBatchWithInputStats(e, numFrames, aFrameSource, batchSize));
    530    MediaDataEncoder::EncodedData output = std::move(r.mEncodedData);
    531    if (aUsage == Usage::Realtime && is4KOrLarger) {
    532      // Realtime encoding may drop frames for large frame sizes.
    533      EXPECT_LE(output.Length(), numFrames);
    534    } else {
    535      EXPECT_EQ(output.Length(), numFrames);
    536    }
    537    EXPECT_GE(GetKeyFrameCount(output), r.mInputKeyframes);
    538    if (isAVCC) {
    539      uint8_t naluSize = GetNALUSize(output[0]).unwrapOr(0);
    540      EXPECT_GT(naluSize, 0);
    541      EXPECT_LE(naluSize, 4);
    542      for (auto frame : output) {
    543        if (frame->mExtraData && !frame->mExtraData->IsEmpty()) {
    544          naluSize = GetNALUSize(frame).unwrapOr(0);
    545          EXPECT_GT(naluSize, 0);
    546          EXPECT_LE(naluSize, 4);
    547        }
    548        EXPECT_TRUE(IsValidAVCC(frame, naluSize).isOk());
    549      }
    550    } else {
    551      for (auto frame : output) {
    552        EXPECT_TRUE(AnnexB::IsAnnexB(*frame));
    553      }
    554    }
    555 
    556    WaitForShutdown(e);
    557  });
    558 };
    559 
    560 TEST_F(MediaDataEncoderTest, H264EncodeBatchAnnexBRecord) {
    561  H264EncodeBatchTest(Usage::Record, AsVariant(kH264SpecificAnnexB), mData);
    562 }
    563 
    564 TEST_F(MediaDataEncoderTest, H264EncodeBatchAnnexBRealtime) {
    565  H264EncodeBatchTest(Usage::Realtime, AsVariant(kH264SpecificAnnexB), mData);
    566 }
    567 
    568 TEST_F(MediaDataEncoderTest, H264EncodeBatchAVCCRecord) {
    569  H264EncodeBatchTest(Usage::Record, AsVariant(kH264SpecificAVCC), mData);
    570 }
    571 
    572 TEST_F(MediaDataEncoderTest, H264EncodeBatchAVCCRealtime) {
    573  H264EncodeBatchTest(Usage::Realtime, AsVariant(kH264SpecificAVCC), mData);
    574 }
    575 
    576 TEST_F(MediaDataEncoderTest, H264EncodeBatch4KAnnexBRecord) {
    577  SKIP_IF_ANDROID_SW();  // Android SW can't encode 4K.
    578  H264EncodeBatchTest(Usage::Record, AsVariant(kH264SpecificAnnexB), mData4K);
    579 }
    580 
    581 TEST_F(MediaDataEncoderTest, H264EncodeBatch4KAnnexBRealtime) {
    582  SKIP_IF_ANDROID_SW();  // Android SW can't encode 4K.
    583  H264EncodeBatchTest(Usage::Realtime, AsVariant(kH264SpecificAnnexB), mData4K);
    584 }
    585 
    586 TEST_F(MediaDataEncoderTest, H264EncodeBatch4KAVCCRecord) {
    587  SKIP_IF_ANDROID_SW();  // Android SW can't encode 4K.
    588  H264EncodeBatchTest(Usage::Record, AsVariant(kH264SpecificAVCC), mData4K);
    589 }
    590 
    591 TEST_F(MediaDataEncoderTest, H264EncodeBatch4KAVCCRealtime) {
    592  SKIP_IF_ANDROID_SW();  // Android SW can't encode 4K.
    593  H264EncodeBatchTest(Usage::Realtime, AsVariant(kH264SpecificAVCC), mData4K);
    594 }
    595 
    596 #if !defined(ANDROID)
    597 static void H264EncodeAfterDrainTest(
    598    Usage aUsage, const EncoderConfig::CodecSpecific& aSpecific,
    599    MediaDataEncoderTest::FrameSource& aFrameSource) {
    600  ASSERT_TRUE(aSpecific.is<H264Specific>());
    601  ASSERT_TRUE(aSpecific.as<H264Specific>().mFormat ==
    602                  H264BitStreamFormat::ANNEXB ||
    603              aSpecific.as<H264Specific>().mFormat == H264BitStreamFormat::AVC);
    604 
    605  RUN_IF_SUPPORTED(CodecType::H264, [&]() {
    606    RefPtr<MediaDataEncoder> e = CreateH264Encoder(
    607        aUsage, EncoderConfig::SampleFormat(dom::ImageBitmapFormat::YUV420P),
    608        aFrameSource.GetSize(), ScalabilityMode::None, aSpecific);
    609 
    610    EXPECT_TRUE(EnsureInit(e));
    611 
    612    MediaDataEncoder::EncodedData output =
    613        GET_OR_RETURN_ON_ERROR(Encode(e, NUM_FRAMES, aFrameSource));
    614    EXPECT_EQ(output.Length(), NUM_FRAMES);
    615 
    616    output = GET_OR_RETURN_ON_ERROR(Encode(e, NUM_FRAMES, aFrameSource));
    617    EXPECT_EQ(output.Length(), NUM_FRAMES);
    618 
    619    WaitForShutdown(e);
    620  });
    621 }
    622 
    623 TEST_F(MediaDataEncoderTest, H264EncodeAfterDrainAnnexBRecord) {
    624  H264EncodeAfterDrainTest(Usage::Record, AsVariant(kH264SpecificAnnexB),
    625                           mData);
    626 }
    627 
    628 TEST_F(MediaDataEncoderTest, H264EncodeAfterDrainAnnexBRealtime) {
    629  H264EncodeAfterDrainTest(Usage::Realtime, AsVariant(kH264SpecificAnnexB),
    630                           mData);
    631 }
    632 
    633 TEST_F(MediaDataEncoderTest, H264EncodeAfterDrainAVCCRecord) {
    634  H264EncodeAfterDrainTest(Usage::Record, AsVariant(kH264SpecificAVCC), mData);
    635 }
    636 
    637 TEST_F(MediaDataEncoderTest, H264EncodeAfterDrainAVCCRealtime) {
    638  H264EncodeAfterDrainTest(Usage::Realtime, AsVariant(kH264SpecificAVCC),
    639                           mData);
    640 }
    641 
    642 static void H264InterleavedEncodeAndDrainTest(
    643    Usage aUsage, const EncoderConfig::CodecSpecific& aSpecific,
    644    MediaDataEncoderTest::FrameSource& aFrameSource) {
    645  ASSERT_TRUE(aSpecific.is<H264Specific>());
    646  ASSERT_TRUE(aSpecific.as<H264Specific>().mFormat ==
    647                  H264BitStreamFormat::ANNEXB ||
    648              aSpecific.as<H264Specific>().mFormat == H264BitStreamFormat::AVC);
    649 
    650  RUN_IF_SUPPORTED(CodecType::H264, [&]() {
    651    RefPtr<MediaDataEncoder> e = CreateH264Encoder(
    652        aUsage, EncoderConfig::SampleFormat(dom::ImageBitmapFormat::YUV420P),
    653        aFrameSource.GetSize(), ScalabilityMode::None, aSpecific);
    654 
    655    EXPECT_TRUE(EnsureInit(e));
    656 
    657    MediaDataEncoder::EncodedData output;
    658    for (size_t i = 0; i < NUM_FRAMES; i++) {
    659      RefPtr<MediaData> frame = aFrameSource.GetFrame(i);
    660      output.AppendElements(GET_OR_RETURN_ON_ERROR(WaitFor(e->Encode(frame))));
    661      if (i % 5 == 0) {
    662        output.AppendElements(GET_OR_RETURN_ON_ERROR(Drain(e)));
    663      }
    664    }
    665    output.AppendElements(GET_OR_RETURN_ON_ERROR(Drain(e)));
    666 
    667    EXPECT_EQ(output.Length(), NUM_FRAMES);
    668 
    669    WaitForShutdown(e);
    670  });
    671 }
    672 
    673 TEST_F(MediaDataEncoderTest, H264InterleavedEncodeAndDrainAnnexBRecord) {
    674  H264InterleavedEncodeAndDrainTest(Usage::Record,
    675                                    AsVariant(kH264SpecificAnnexB), mData);
    676 }
    677 
    678 TEST_F(MediaDataEncoderTest, H264InterleavedEncodeAndDrainAnnexBRealtime) {
    679  H264InterleavedEncodeAndDrainTest(Usage::Realtime,
    680                                    AsVariant(kH264SpecificAnnexB), mData);
    681 }
    682 
    683 TEST_F(MediaDataEncoderTest, H264InterleavedEncodeAndDrainAVCCRecord) {
    684  H264InterleavedEncodeAndDrainTest(Usage::Record, AsVariant(kH264SpecificAVCC),
    685                                    mData);
    686 }
    687 
    688 TEST_F(MediaDataEncoderTest, H264InterleavedEncodeAndDrainAVCCRealtime) {
    689  H264InterleavedEncodeAndDrainTest(Usage::Realtime,
    690                                    AsVariant(kH264SpecificAVCC), mData);
    691 }
    692 #endif
    693 
    694 TEST_F(MediaDataEncoderTest, H264Duration) {
    695  RUN_IF_SUPPORTED(CodecType::H264, [this]() {
    696    RefPtr<MediaDataEncoder> e = CreateH264Encoder();
    697    EXPECT_TRUE(EnsureInit(e));
    698    MediaDataEncoder::EncodedData output =
    699        GET_OR_RETURN_ON_ERROR(Encode(e, NUM_FRAMES, mData));
    700    EXPECT_EQ(output.Length(), NUM_FRAMES);
    701    for (const auto& frame : output) {
    702      EXPECT_GT(frame->mDuration, media::TimeUnit::Zero());
    703    }
    704    WaitForShutdown(e);
    705  });
    706 }
    707 
    708 TEST_F(MediaDataEncoderTest, H264InvalidSize) {
    709  RUN_IF_SUPPORTED(CodecType::H264, []() {
    710    RefPtr<MediaDataEncoder> e0x0 = CreateH264Encoder(
    711        Usage::Realtime,
    712        EncoderConfig::SampleFormat(dom::ImageBitmapFormat::YUV420P), {0, 0},
    713        ScalabilityMode::None, AsVariant(kH264SpecificAnnexB));
    714    EXPECT_EQ(e0x0, nullptr);
    715 
    716    RefPtr<MediaDataEncoder> e0x1 = CreateH264Encoder(
    717        Usage::Realtime,
    718        EncoderConfig::SampleFormat(dom::ImageBitmapFormat::YUV420P), {0, 1},
    719        ScalabilityMode::None, AsVariant(kH264SpecificAnnexB));
    720    EXPECT_EQ(e0x1, nullptr);
    721 
    722    RefPtr<MediaDataEncoder> e1x0 = CreateH264Encoder(
    723        Usage::Realtime,
    724        EncoderConfig::SampleFormat(dom::ImageBitmapFormat::YUV420P), {1, 0},
    725        ScalabilityMode::None, AsVariant(kH264SpecificAnnexB));
    726    EXPECT_EQ(e1x0, nullptr);
    727  });
    728 }
    729 
    730 #if !defined(ANDROID)
    731 TEST_F(MediaDataEncoderTest, H264AVCC) {
    732  RUN_IF_SUPPORTED(CodecType::H264, [this]() {
    733    // Encod frames in avcC format.
    734    RefPtr<MediaDataEncoder> e = CreateH264Encoder(
    735        Usage::Record,
    736        EncoderConfig::SampleFormat(dom::ImageBitmapFormat::YUV420P),
    737        kImageSize, ScalabilityMode::None, AsVariant(kH264SpecificAVCC));
    738    EXPECT_TRUE(EnsureInit(e));
    739    MediaDataEncoder::EncodedData output =
    740        GET_OR_RETURN_ON_ERROR(Encode(e, NUM_FRAMES, mData));
    741    EXPECT_EQ(output.Length(), NUM_FRAMES);
    742    for (auto frame : output) {
    743      EXPECT_FALSE(AnnexB::IsAnnexB(*frame));
    744      if (frame->mKeyframe) {
    745        // The extradata may be included at the beginning, whenever it changes,
    746        // or with every keyframe to support robust seeking or decoder resets.
    747        if (frame->mExtraData && !frame->mExtraData->IsEmpty()) {
    748          EXPECT_TRUE(AnnexB::IsAVCC(frame));
    749          AVCCConfig config = AVCCConfig::Parse(frame).unwrap();
    750          EXPECT_EQ(config.mAVCProfileIndication,
    751                    static_cast<decltype(config.mAVCProfileIndication)>(
    752                        kH264SpecificAVCC.mProfile));
    753          EXPECT_EQ(config.mAVCLevelIndication,
    754                    static_cast<decltype(config.mAVCLevelIndication)>(
    755                        kH264SpecificAVCC.mLevel));
    756        }
    757      }
    758    }
    759    WaitForShutdown(e);
    760  });
    761 }
    762 #endif
    763 
    764 // For Android HW encoder only.
    765 #ifdef MOZ_WIDGET_ANDROID
    766 TEST_F(MediaDataEncoderTest, AndroidNotSupportedSize) {
    767  SKIP_IF_ANDROID_SW();
    768  RUN_IF_SUPPORTED(CodecType::H264, []() {
    769    RefPtr<MediaDataEncoder> e = CreateH264Encoder(
    770        Usage::Realtime,
    771        EncoderConfig::SampleFormat(dom::ImageBitmapFormat::YUV420P), {1, 1},
    772        ScalabilityMode::None, AsVariant(kH264SpecificAnnexB));
    773    EXPECT_NE(e, nullptr);
    774    EXPECT_FALSE(EnsureInit(e));
    775  });
    776 }
    777 #endif
    778 
    779 #if !(defined(MOZ_WIDGET_GTK) && defined(__i386__))
    780 static already_AddRefed<MediaDataEncoder> CreateVP8Encoder(
    781    Usage aUsage = Usage::Realtime,
    782    EncoderConfig::SampleFormat aFormat =
    783        EncoderConfig::SampleFormat(dom::ImageBitmapFormat::YUV420P),
    784    gfx::IntSize aSize = kImageSize,
    785    ScalabilityMode aScalabilityMode = ScalabilityMode::None,
    786    const EncoderConfig::CodecSpecific& aSpecific = AsVariant(VP8Specific())) {
    787  return CreateVideoEncoder(CodecType::VP8, aUsage, aFormat, aSize,
    788                            aScalabilityMode, aSpecific);
    789 }
    790 
    791 static already_AddRefed<MediaDataEncoder> CreateVP9Encoder(
    792    Usage aUsage = Usage::Realtime,
    793    EncoderConfig::SampleFormat aFormat =
    794        EncoderConfig::SampleFormat(dom::ImageBitmapFormat::YUV420P),
    795    gfx::IntSize aSize = kImageSize,
    796    ScalabilityMode aScalabilityMode = ScalabilityMode::None,
    797    const EncoderConfig::CodecSpecific& aSpecific = AsVariant(VP9Specific())) {
    798  return CreateVideoEncoder(CodecType::VP9, aUsage, aFormat, aSize,
    799                            aScalabilityMode, aSpecific);
    800 }
    801 
    802 TEST_F(MediaDataEncoderTest, VP8Create) {
    803  RUN_IF_SUPPORTED(CodecType::VP8, []() {
    804    RefPtr<MediaDataEncoder> e = CreateVP8Encoder();
    805    EXPECT_TRUE(e);
    806    WaitForShutdown(e);
    807  });
    808 }
    809 
    810 TEST_F(MediaDataEncoderTest, VP8Inits) {
    811  RUN_IF_SUPPORTED(CodecType::VP8, []() {
    812    // w/o codec specific.
    813    RefPtr<MediaDataEncoder> e = CreateVP8Encoder(
    814        Usage::Realtime,
    815        EncoderConfig::SampleFormat(dom::ImageBitmapFormat::YUV420P),
    816        kImageSize, ScalabilityMode::None, AsVariant(void_t{}));
    817    EXPECT_TRUE(EnsureInit(e));
    818    WaitForShutdown(e);
    819 
    820    // w/ codec specific
    821    e = CreateVP8Encoder();
    822    EXPECT_TRUE(EnsureInit(e));
    823    WaitForShutdown(e);
    824  });
    825 }
    826 
    827 TEST_F(MediaDataEncoderTest, VP8Encodes) {
    828  RUN_IF_SUPPORTED(CodecType::VP8, [this]() {
    829    // Encode one VPX frame.
    830    RefPtr<MediaDataEncoder> e = CreateVP8Encoder();
    831    EXPECT_TRUE(EnsureInit(e));
    832    MediaDataEncoder::EncodedData output =
    833        GET_OR_RETURN_ON_ERROR(Encode(e, 1UL, mData));
    834    EXPECT_EQ(output.Length(), 1UL);
    835    VPXDecoder::VPXStreamInfo info;
    836    EXPECT_TRUE(
    837        VPXDecoder::GetStreamInfo(*output[0], info, VPXDecoder::Codec::VP8));
    838    EXPECT_EQ(info.mKeyFrame, output[0]->mKeyframe);
    839    if (info.mKeyFrame) {
    840      EXPECT_EQ(info.mImage, kImageSize);
    841    }
    842    WaitForShutdown(e);
    843 
    844    // Encode multiple VPX frames.
    845    e = CreateVP8Encoder();
    846    EXPECT_TRUE(EnsureInit(e));
    847    output = GET_OR_RETURN_ON_ERROR(Encode(e, NUM_FRAMES, mData));
    848    EXPECT_EQ(output.Length(), NUM_FRAMES);
    849    for (auto frame : output) {
    850      VPXDecoder::VPXStreamInfo info;
    851      EXPECT_TRUE(
    852          VPXDecoder::GetStreamInfo(*frame, info, VPXDecoder::Codec::VP8));
    853      EXPECT_EQ(info.mKeyFrame, frame->mKeyframe);
    854      if (info.mKeyFrame) {
    855        EXPECT_EQ(info.mImage, kImageSize);
    856      }
    857    }
    858    WaitForShutdown(e);
    859  });
    860 }
    861 
    862 TEST_F(MediaDataEncoderTest, VP8Duration) {
    863  RUN_IF_SUPPORTED(CodecType::VP8, [this]() {
    864    RefPtr<MediaDataEncoder> e = CreateVP8Encoder();
    865    EXPECT_TRUE(EnsureInit(e));
    866    MediaDataEncoder::EncodedData output =
    867        GET_OR_RETURN_ON_ERROR(Encode(e, NUM_FRAMES, mData));
    868    EXPECT_EQ(output.Length(), NUM_FRAMES);
    869    for (const auto& frame : output) {
    870      EXPECT_GT(frame->mDuration, media::TimeUnit::Zero());
    871    }
    872    WaitForShutdown(e);
    873  });
    874 }
    875 
    876 #  if !defined(ANDROID)
    877 TEST_F(MediaDataEncoderTest, VP8EncodeAfterDrain) {
    878  RUN_IF_SUPPORTED(CodecType::VP8, [this]() {
    879    RefPtr<MediaDataEncoder> e = CreateVP8Encoder();
    880    EXPECT_TRUE(EnsureInit(e));
    881 
    882    MediaDataEncoder::EncodedData output =
    883        GET_OR_RETURN_ON_ERROR(Encode(e, NUM_FRAMES, mData));
    884    EXPECT_EQ(output.Length(), NUM_FRAMES);
    885    for (auto frame : output) {
    886      VPXDecoder::VPXStreamInfo info;
    887      EXPECT_TRUE(
    888          VPXDecoder::GetStreamInfo(*frame, info, VPXDecoder::Codec::VP8));
    889      EXPECT_EQ(info.mKeyFrame, frame->mKeyframe);
    890      if (info.mKeyFrame) {
    891        EXPECT_EQ(info.mImage, kImageSize);
    892      }
    893    }
    894    output.Clear();
    895 
    896    output = GET_OR_RETURN_ON_ERROR(Encode(e, NUM_FRAMES, mData));
    897    EXPECT_EQ(output.Length(), NUM_FRAMES);
    898    for (auto frame : output) {
    899      VPXDecoder::VPXStreamInfo info;
    900      EXPECT_TRUE(
    901          VPXDecoder::GetStreamInfo(*frame, info, VPXDecoder::Codec::VP8));
    902      EXPECT_EQ(info.mKeyFrame, frame->mKeyframe);
    903      if (info.mKeyFrame) {
    904        EXPECT_EQ(info.mImage, kImageSize);
    905      }
    906    }
    907 
    908    WaitForShutdown(e);
    909  });
    910 }
    911 
    912 TEST_F(MediaDataEncoderTest, VP8EncodeWithScalabilityModeL1T2) {
    913  RUN_IF_SUPPORTED(CodecType::VP8, [this]() {
    914    VP8Specific specific(VPXComplexity::Normal, /* mComplexity */
    915                         true,                  /* mResilience */
    916                         2,                     /* mNumTemporalLayers */
    917                         true,                  /* mDenoising */
    918                         false,                 /* mAutoResize */
    919                         false                  /* mFrameDropping */
    920    );
    921    RefPtr<MediaDataEncoder> e = CreateVP8Encoder(
    922        Usage::Realtime,
    923        EncoderConfig::SampleFormat(dom::ImageBitmapFormat::YUV420P),
    924        kImageSize, ScalabilityMode::L1T2, AsVariant(specific));
    925    EXPECT_TRUE(EnsureInit(e));
    926 
    927    const nsTArray<uint8_t> pattern({0, 1});
    928    MediaDataEncoder::EncodedData output =
    929        GET_OR_RETURN_ON_ERROR(Encode(e, NUM_FRAMES, mData));
    930    int temporal_idx = 0;
    931    EXPECT_EQ(output.Length(), NUM_FRAMES);
    932    for (size_t i = 0; i < output.Length(); ++i) {
    933      const RefPtr<MediaRawData> frame = output[i];
    934      if (frame->mKeyframe) {
    935        temporal_idx = 0;
    936      }
    937      EXPECT_TRUE(frame->mTemporalLayerId);
    938      size_t idx = temporal_idx++ % pattern.Length();
    939      EXPECT_EQ(frame->mTemporalLayerId.value(), pattern[idx]);
    940    }
    941    WaitForShutdown(e);
    942  });
    943 }
    944 
    945 TEST_F(MediaDataEncoderTest, VP8EncodeWithScalabilityModeL1T3) {
    946  RUN_IF_SUPPORTED(CodecType::VP8, [this]() {
    947    VP8Specific specific(VPXComplexity::Normal, /* mComplexity */
    948                         true,                  /* mResilience */
    949                         3,                     /* mNumTemporalLayers */
    950                         true,                  /* mDenoising */
    951                         false,                 /* mAutoResize */
    952                         false                  /* mFrameDropping */
    953    );
    954    RefPtr<MediaDataEncoder> e = CreateVP8Encoder(
    955        Usage::Realtime,
    956        EncoderConfig::SampleFormat(dom::ImageBitmapFormat::YUV420P),
    957        kImageSize, ScalabilityMode::L1T3, AsVariant(specific));
    958    EXPECT_TRUE(EnsureInit(e));
    959 
    960    const nsTArray<uint8_t> pattern({0, 2, 1, 2});
    961    MediaDataEncoder::EncodedData output =
    962        GET_OR_RETURN_ON_ERROR(Encode(e, NUM_FRAMES, mData));
    963    EXPECT_EQ(output.Length(), NUM_FRAMES);
    964    int temporal_idx = 0;
    965    for (size_t i = 0; i < output.Length(); ++i) {
    966      const RefPtr<MediaRawData> frame = output[i];
    967      if (frame->mKeyframe) {
    968        temporal_idx = 0;
    969      }
    970      EXPECT_TRUE(frame->mTemporalLayerId);
    971      size_t idx = temporal_idx++ % pattern.Length();
    972      EXPECT_EQ(frame->mTemporalLayerId.value(), pattern[idx]);
    973    }
    974    WaitForShutdown(e);
    975  });
    976 }
    977 #  endif
    978 
    979 TEST_F(MediaDataEncoderTest, VP9Create) {
    980  RUN_IF_SUPPORTED(CodecType::VP9, []() {
    981    RefPtr<MediaDataEncoder> e = CreateVP9Encoder();
    982    EXPECT_TRUE(e);
    983    WaitForShutdown(e);
    984  });
    985 }
    986 
    987 TEST_F(MediaDataEncoderTest, VP9Inits) {
    988  RUN_IF_SUPPORTED(CodecType::VP9, []() {
    989    // w/o codec specific.
    990    RefPtr<MediaDataEncoder> e = CreateVP9Encoder(
    991        Usage::Realtime,
    992        EncoderConfig::SampleFormat(dom::ImageBitmapFormat::YUV420P),
    993        kImageSize, ScalabilityMode::None, AsVariant(void_t{}));
    994    EXPECT_TRUE(EnsureInit(e));
    995    WaitForShutdown(e);
    996 
    997    // w/ codec specific
    998    e = CreateVP9Encoder();
    999    EXPECT_TRUE(EnsureInit(e));
   1000    WaitForShutdown(e);
   1001  });
   1002 }
   1003 
   1004 TEST_F(MediaDataEncoderTest, VP9Encodes) {
   1005  RUN_IF_SUPPORTED(CodecType::VP9, [this]() {
   1006    RefPtr<MediaDataEncoder> e = CreateVP9Encoder();
   1007    EXPECT_TRUE(EnsureInit(e));
   1008    MediaDataEncoder::EncodedData output =
   1009        GET_OR_RETURN_ON_ERROR(Encode(e, 1UL, mData));
   1010    EXPECT_EQ(output.Length(), 1UL);
   1011    VPXDecoder::VPXStreamInfo info;
   1012    EXPECT_TRUE(
   1013        VPXDecoder::GetStreamInfo(*output[0], info, VPXDecoder::Codec::VP9));
   1014    EXPECT_EQ(info.mKeyFrame, output[0]->mKeyframe);
   1015    if (info.mKeyFrame) {
   1016      EXPECT_EQ(info.mImage, kImageSize);
   1017    }
   1018    WaitForShutdown(e);
   1019 
   1020    e = CreateVP9Encoder();
   1021    EXPECT_TRUE(EnsureInit(e));
   1022    output = GET_OR_RETURN_ON_ERROR(Encode(e, NUM_FRAMES, mData));
   1023    EXPECT_EQ(output.Length(), NUM_FRAMES);
   1024    for (auto frame : output) {
   1025      VPXDecoder::VPXStreamInfo info;
   1026      EXPECT_TRUE(
   1027          VPXDecoder::GetStreamInfo(*frame, info, VPXDecoder::Codec::VP9));
   1028      EXPECT_EQ(info.mKeyFrame, frame->mKeyframe);
   1029      if (info.mKeyFrame) {
   1030        EXPECT_EQ(info.mImage, kImageSize);
   1031      }
   1032    }
   1033    WaitForShutdown(e);
   1034  });
   1035 }
   1036 
   1037 TEST_F(MediaDataEncoderTest, VP9Duration) {
   1038  RUN_IF_SUPPORTED(CodecType::VP9, [this]() {
   1039    RefPtr<MediaDataEncoder> e = CreateVP9Encoder();
   1040    EXPECT_TRUE(EnsureInit(e));
   1041    MediaDataEncoder::EncodedData output =
   1042        GET_OR_RETURN_ON_ERROR(Encode(e, NUM_FRAMES, mData));
   1043    EXPECT_EQ(output.Length(), NUM_FRAMES);
   1044    for (const auto& frame : output) {
   1045      EXPECT_GT(frame->mDuration, media::TimeUnit::Zero());
   1046    }
   1047    WaitForShutdown(e);
   1048  });
   1049 }
   1050 
   1051 #  if !defined(ANDROID)
   1052 TEST_F(MediaDataEncoderTest, VP9EncodeAfterDrain) {
   1053  RUN_IF_SUPPORTED(CodecType::VP9, [this]() {
   1054    RefPtr<MediaDataEncoder> e = CreateVP9Encoder();
   1055    EXPECT_TRUE(EnsureInit(e));
   1056 
   1057    MediaDataEncoder::EncodedData output =
   1058        GET_OR_RETURN_ON_ERROR(Encode(e, NUM_FRAMES, mData));
   1059    EXPECT_EQ(output.Length(), NUM_FRAMES);
   1060    for (auto frame : output) {
   1061      VPXDecoder::VPXStreamInfo info;
   1062      EXPECT_TRUE(
   1063          VPXDecoder::GetStreamInfo(*frame, info, VPXDecoder::Codec::VP9));
   1064      EXPECT_EQ(info.mKeyFrame, frame->mKeyframe);
   1065      if (info.mKeyFrame) {
   1066        EXPECT_EQ(info.mImage, kImageSize);
   1067      }
   1068    }
   1069    output.Clear();
   1070 
   1071    output = GET_OR_RETURN_ON_ERROR(Encode(e, NUM_FRAMES, mData));
   1072    EXPECT_EQ(output.Length(), NUM_FRAMES);
   1073    for (auto frame : output) {
   1074      VPXDecoder::VPXStreamInfo info;
   1075      EXPECT_TRUE(
   1076          VPXDecoder::GetStreamInfo(*frame, info, VPXDecoder::Codec::VP9));
   1077      EXPECT_EQ(info.mKeyFrame, frame->mKeyframe);
   1078      if (info.mKeyFrame) {
   1079        EXPECT_EQ(info.mImage, kImageSize);
   1080      }
   1081    }
   1082 
   1083    WaitForShutdown(e);
   1084  });
   1085 }
   1086 
   1087 TEST_F(MediaDataEncoderTest, VP9EncodeWithScalabilityModeL1T2) {
   1088  RUN_IF_SUPPORTED(CodecType::VP9, [this]() {
   1089    VP9Specific specific(VPXComplexity::Normal, /* mComplexity */
   1090                         true,                  /* mResilience */
   1091                         2,                     /* mNumTemporalLayers */
   1092                         true,                  /* mDenoising */
   1093                         false,                 /* mAutoResize */
   1094                         false,                 /* mFrameDropping */
   1095                         true,                  /* mAdaptiveQp */
   1096                         1,                     /* mNumSpatialLayers */
   1097                         false                  /* mFlexible */
   1098    );
   1099 
   1100    RefPtr<MediaDataEncoder> e = CreateVP9Encoder(
   1101        Usage::Realtime,
   1102        EncoderConfig::SampleFormat(dom::ImageBitmapFormat::YUV420P),
   1103        kImageSize, ScalabilityMode::L1T2, AsVariant(specific));
   1104    EXPECT_TRUE(EnsureInit(e));
   1105 
   1106    const nsTArray<uint8_t> pattern({0, 1});
   1107    MediaDataEncoder::EncodedData output =
   1108        GET_OR_RETURN_ON_ERROR(Encode(e, NUM_FRAMES, mData));
   1109    int temporal_idx = 0;
   1110    EXPECT_EQ(output.Length(), NUM_FRAMES);
   1111    for (size_t i = 0; i < output.Length(); ++i) {
   1112      const RefPtr<MediaRawData> frame = output[i];
   1113      if (frame->mKeyframe) {
   1114        temporal_idx = 0;
   1115      }
   1116      EXPECT_TRUE(frame->mTemporalLayerId);
   1117      size_t idx = temporal_idx++ % pattern.Length();
   1118      EXPECT_EQ(frame->mTemporalLayerId.value(), pattern[idx]);
   1119    }
   1120    WaitForShutdown(e);
   1121  });
   1122 }
   1123 
   1124 TEST_F(MediaDataEncoderTest, VP9EncodeWithScalabilityModeL1T3) {
   1125  RUN_IF_SUPPORTED(CodecType::VP9, [this]() {
   1126    VP9Specific specific(VPXComplexity::Normal, /* mComplexity */
   1127                         true,                  /* mResilience */
   1128                         3,                     /* mNumTemporalLayers */
   1129                         true,                  /* mDenoising */
   1130                         false,                 /* mAutoResize */
   1131                         false,                 /* mFrameDropping */
   1132                         true,                  /* mAdaptiveQp */
   1133                         1,                     /* mNumSpatialLayers */
   1134                         false                  /* mFlexible */
   1135    );
   1136 
   1137    RefPtr<MediaDataEncoder> e = CreateVP9Encoder(
   1138        Usage::Realtime,
   1139        EncoderConfig::SampleFormat(dom::ImageBitmapFormat::YUV420P),
   1140        kImageSize, ScalabilityMode::L1T3, AsVariant(specific));
   1141    EXPECT_TRUE(EnsureInit(e));
   1142 
   1143    const nsTArray<uint8_t> pattern({0, 2, 1, 2});
   1144    MediaDataEncoder::EncodedData output =
   1145        GET_OR_RETURN_ON_ERROR(Encode(e, NUM_FRAMES, mData));
   1146    int temporal_idx = 0;
   1147    EXPECT_EQ(output.Length(), NUM_FRAMES);
   1148    for (size_t i = 0; i < output.Length(); ++i) {
   1149      const RefPtr<MediaRawData> frame = output[i];
   1150      if (frame->mKeyframe) {
   1151        temporal_idx = 0;
   1152      }
   1153      EXPECT_TRUE(frame->mTemporalLayerId);
   1154      size_t idx = temporal_idx++ % pattern.Length();
   1155      EXPECT_EQ(frame->mTemporalLayerId.value(), pattern[idx]);
   1156    }
   1157    WaitForShutdown(e);
   1158  });
   1159 }
   1160 #  endif
   1161 #endif
   1162 
   1163 #undef BLOCK_SIZE
   1164 #undef GET_OR_RETURN_ON_ERROR
   1165 #undef RUN_IF_SUPPORTED