tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

FFmpegVideoDecoder.cpp (90527B)


      1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
      2 /* vim:set ts=2 sw=2 sts=2 et cindent: */
      3 /* This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #include "FFmpegVideoDecoder.h"
      8 
      9 #include "EncoderConfig.h"
     10 #include "FFmpegLibWrapper.h"
     11 #include "FFmpegLog.h"
     12 #include "FFmpegUtils.h"
     13 #include "ImageContainer.h"
     14 #include "MP4Decoder.h"
     15 #include "MediaInfo.h"
     16 #include "VALibWrapper.h"
     17 #include "VPXDecoder.h"
     18 #include "VideoUtils.h"
     19 #if LIBAVCODEC_VERSION_MAJOR >= 58
     20 #  include "libavutil/buffer.h"
     21 #  include "libavutil/frame.h"
     22 #  include "libavutil/hwcontext.h"
     23 #  include "libavutil/pixfmt.h"
     24 #endif
     25 #include "mozilla/UniquePtr.h"
     26 #include "mozilla/gfx/gfxVars.h"
     27 #include "mozilla/layers/KnowsCompositor.h"
     28 #include "nsPrintfCString.h"
     29 #if LIBAVCODEC_VERSION_MAJOR >= 57
     30 #  include "mozilla/layers/TextureClient.h"
     31 #endif
     32 #if LIBAVCODEC_VERSION_MAJOR >= 58
     33 #  include "mozilla/ProfilerMarkers.h"
     34 #endif
     35 #ifdef MOZ_USE_HWDECODE
     36 #  include "H264.h"
     37 #  include "H265.h"
     38 #endif
     39 #if defined(MOZ_USE_HWDECODE) && defined(MOZ_WIDGET_GTK)
     40 #  include "FFmpegVideoFramePool.h"
     41 #  include "mozilla/layers/DMABUFSurfaceImage.h"
     42 #  include "va/va.h"
     43 #endif
     44 
     45 #if defined(MOZ_AV1) && \
     46    (defined(FFVPX_VERSION) || LIBAVCODEC_VERSION_MAJOR >= 59)
     47 #  define FFMPEG_AV1_DECODE 1
     48 #  include "AOMDecoder.h"
     49 #endif
     50 
     51 #if LIBAVCODEC_VERSION_MAJOR < 54
     52 #  define AV_PIX_FMT_YUV420P PIX_FMT_YUV420P
     53 #  define AV_PIX_FMT_YUVJ420P PIX_FMT_YUVJ420P
     54 #  define AV_PIX_FMT_YUV420P10LE PIX_FMT_YUV420P10LE
     55 #  define AV_PIX_FMT_YUV422P PIX_FMT_YUV422P
     56 #  define AV_PIX_FMT_YUV422P10LE PIX_FMT_YUV422P10LE
     57 #  define AV_PIX_FMT_YUV444P PIX_FMT_YUV444P
     58 #  define AV_PIX_FMT_YUVJ444P PIX_FMT_YUVJ444P
     59 #  define AV_PIX_FMT_YUV444P10LE PIX_FMT_YUV444P10LE
     60 #  define AV_PIX_FMT_GBRP PIX_FMT_GBRP
     61 #  define AV_PIX_FMT_GBRP10LE PIX_FMT_GBRP10LE
     62 #  define AV_PIX_FMT_NONE PIX_FMT_NONE
     63 #  define AV_PIX_FMT_VAAPI_VLD PIX_FMT_VAAPI_VLD
     64 #endif
     65 #if LIBAVCODEC_VERSION_MAJOR > 58
     66 #  define AV_PIX_FMT_VAAPI_VLD AV_PIX_FMT_VAAPI
     67 #endif
     68 #include "mozilla/StaticPrefs_gfx.h"
     69 #include "mozilla/StaticPrefs_media.h"
     70 #include "mozilla/TaskQueue.h"
     71 #include "nsThreadUtils.h"
     72 #include "prsystem.h"
     73 
     74 #ifdef XP_WIN
     75 #  include "mozilla/gfx/DeviceManagerDx.h"
     76 #endif
     77 
     78 #ifdef MOZ_ENABLE_D3D11VA
     79 #  include "D3D11TextureWrapper.h"
     80 #  include "DXVA2Manager.h"
     81 #  include "ffvpx/hwcontext_d3d11va.h"
     82 #endif
     83 
     84 #ifdef MOZ_WIDGET_ANDROID
     85 #  include "ffvpx/hwcontext_mediacodec.h"
     86 #  include "ffvpx/mediacodec.h"
     87 #  include "mozilla/java/CodecProxyWrappers.h"
     88 #  include "mozilla/java/GeckoSurfaceWrappers.h"
     89 #  include "mozilla/java/SampleBufferWrappers.h"
     90 #  include "mozilla/java/SampleWrappers.h"
     91 #  include "mozilla/java/SurfaceAllocatorWrappers.h"
     92 #  include "mozilla/layers/TextureClientOGL.h"
     93 #endif
     94 
     95 #if defined(MOZ_WIDGET_ANDROID) && defined(FFVPX_VERSION)
     96 #  include "mozilla/MediaDrmRemoteCDMParent.h"
     97 #endif
     98 
     99 #if defined(MOZ_USE_HWDECODE) && defined(MOZ_WIDGET_GTK)
    100 // Forward declare from va.h
    101 typedef int VAStatus;
    102 #  define VA_EXPORT_SURFACE_READ_ONLY 0x0001
    103 #  define VA_EXPORT_SURFACE_SEPARATE_LAYERS 0x0004
    104 #  define VA_STATUS_SUCCESS 0x00000000
    105 #endif
    106 
    107 // Use some extra HW frames for potential rendering lags.
    108 // AV1 and VP9 can have maximum 8 frames for reference frames, so 1 base + 8
    109 // references.
    110 #define EXTRA_HW_FRAMES 9
    111 
    112 #if LIBAVCODEC_VERSION_MAJOR >= 57 && LIBAVUTIL_VERSION_MAJOR >= 56
    113 #  define CUSTOMIZED_BUFFER_ALLOCATION 1
    114 #endif
    115 
    116 #define AV_LOG_DEBUG 48
    117 
    118 typedef mozilla::layers::Image Image;
    119 typedef mozilla::layers::PlanarYCbCrImage PlanarYCbCrImage;
    120 typedef mozilla::layers::BufferRecycleBin BufferRecycleBin;
    121 
    122 namespace mozilla {
    123 
    124 #if defined(MOZ_USE_HWDECODE) && defined(MOZ_WIDGET_GTK)
    125 constinit nsTArray<AVCodecID>
    126    FFmpegVideoDecoder<LIBAV_VER>::mAcceleratedFormats;
    127 #endif
    128 
    129 using media::TimeUnit;
    130 
    131 /**
    132 * FFmpeg calls back to this function with a list of pixel formats it supports.
    133 * We choose a pixel format that we support and return it.
    134 * For now, we just look for YUV420P, YUVJ420P, YUV444 and YUVJ444 as
    135 * those are the only non-HW accelerated format supported by FFmpeg's H264 and
    136 * VP9 decoder.
    137 */
    138 static AVPixelFormat ChoosePixelFormat(AVCodecContext* aCodecContext,
    139                                       const AVPixelFormat* aFormats) {
    140  FFMPEGV_LOG("Choosing FFmpeg pixel format for video decoding.");
    141  for (; *aFormats > -1; aFormats++) {
    142    switch (*aFormats) {
    143      case AV_PIX_FMT_YUV420P:
    144        FFMPEGV_LOG("Requesting pixel format YUV420P.");
    145        return AV_PIX_FMT_YUV420P;
    146      case AV_PIX_FMT_YUVJ420P:
    147        FFMPEGV_LOG("Requesting pixel format YUVJ420P.");
    148        return AV_PIX_FMT_YUVJ420P;
    149      case AV_PIX_FMT_YUV420P10LE:
    150        FFMPEGV_LOG("Requesting pixel format YUV420P10LE.");
    151        return AV_PIX_FMT_YUV420P10LE;
    152      case AV_PIX_FMT_YUV422P:
    153        FFMPEGV_LOG("Requesting pixel format YUV422P.");
    154        return AV_PIX_FMT_YUV422P;
    155      case AV_PIX_FMT_YUV422P10LE:
    156        FFMPEGV_LOG("Requesting pixel format YUV422P10LE.");
    157        return AV_PIX_FMT_YUV422P10LE;
    158      case AV_PIX_FMT_YUV444P:
    159        FFMPEGV_LOG("Requesting pixel format YUV444P.");
    160        return AV_PIX_FMT_YUV444P;
    161      case AV_PIX_FMT_YUVJ444P:
    162        FFMPEGV_LOG("Requesting pixel format YUVJ444P.");
    163        return AV_PIX_FMT_YUVJ444P;
    164      case AV_PIX_FMT_YUV444P10LE:
    165        FFMPEGV_LOG("Requesting pixel format YUV444P10LE.");
    166        return AV_PIX_FMT_YUV444P10LE;
    167 #if LIBAVCODEC_VERSION_MAJOR >= 57
    168      case AV_PIX_FMT_YUV420P12LE:
    169        FFMPEGV_LOG("Requesting pixel format YUV420P12LE.");
    170        return AV_PIX_FMT_YUV420P12LE;
    171      case AV_PIX_FMT_YUV422P12LE:
    172        FFMPEGV_LOG("Requesting pixel format YUV422P12LE.");
    173        return AV_PIX_FMT_YUV422P12LE;
    174      case AV_PIX_FMT_YUV444P12LE:
    175        FFMPEGV_LOG("Requesting pixel format YUV444P12LE.");
    176        return AV_PIX_FMT_YUV444P12LE;
    177 #endif
    178      case AV_PIX_FMT_GBRP:
    179        FFMPEGV_LOG("Requesting pixel format GBRP.");
    180        return AV_PIX_FMT_GBRP;
    181      case AV_PIX_FMT_GBRP10LE:
    182        FFMPEGV_LOG("Requesting pixel format GBRP10LE.");
    183        return AV_PIX_FMT_GBRP10LE;
    184      default:
    185        break;
    186    }
    187  }
    188 
    189  NS_WARNING("FFmpeg does not share any supported pixel formats.");
    190  return AV_PIX_FMT_NONE;
    191 }
    192 
    193 #ifdef MOZ_USE_HWDECODE
    194 static AVPixelFormat ChooseVAAPIPixelFormat(AVCodecContext* aCodecContext,
    195                                            const AVPixelFormat* aFormats) {
    196  FFMPEGV_LOG("Choosing FFmpeg pixel format for VA-API video decoding.");
    197  for (; *aFormats > -1; aFormats++) {
    198    switch (*aFormats) {
    199      case AV_PIX_FMT_VAAPI_VLD:
    200        FFMPEGV_LOG("Requesting pixel format VAAPI_VLD");
    201        return AV_PIX_FMT_VAAPI_VLD;
    202      default:
    203        break;
    204    }
    205  }
    206  NS_WARNING("FFmpeg does not share any supported pixel formats.");
    207  return AV_PIX_FMT_NONE;
    208 }
    209 
    210 static AVPixelFormat ChooseV4L2PixelFormat(AVCodecContext* aCodecContext,
    211                                           const AVPixelFormat* aFormats) {
    212  FFMPEGV_LOG("Choosing FFmpeg pixel format for V4L2 video decoding.");
    213  for (; *aFormats > -1; aFormats++) {
    214    switch (*aFormats) {
    215      case AV_PIX_FMT_DRM_PRIME:
    216        FFMPEGV_LOG("Requesting pixel format DRM PRIME");
    217        return AV_PIX_FMT_DRM_PRIME;
    218      default:
    219        break;
    220    }
    221  }
    222  NS_WARNING("FFmpeg does not share any supported V4L2 pixel formats.");
    223  return AV_PIX_FMT_NONE;
    224 }
    225 
    226 static AVPixelFormat ChooseD3D11VAPixelFormat(AVCodecContext* aCodecContext,
    227                                              const AVPixelFormat* aFormats) {
    228 #  ifdef MOZ_ENABLE_D3D11VA
    229  FFMPEGV_LOG("Choosing FFmpeg pixel format for D3D11VA video decoding %d. ",
    230              *aFormats);
    231  for (; *aFormats > -1; aFormats++) {
    232    switch (*aFormats) {
    233      case AV_PIX_FMT_D3D11:
    234        FFMPEGV_LOG("Requesting pixel format D3D11");
    235        return AV_PIX_FMT_D3D11;
    236      default:
    237        break;
    238    }
    239  }
    240  NS_WARNING("FFmpeg does not share any supported D3D11 pixel formats.");
    241 #  endif  // MOZ_ENABLE_D3D11VA
    242  return AV_PIX_FMT_NONE;
    243 }
    244 #endif
    245 
    246 #ifdef MOZ_USE_HWDECODE
    247 static AVPixelFormat ChooseMediaCodecPixelFormat(
    248    AVCodecContext* aCodecContext, const AVPixelFormat* aFormats) {
    249 #  ifdef MOZ_WIDGET_ANDROID
    250  FFMPEGV_LOG("Choosing FFmpeg pixel format for MediaCodec video decoding %d. ",
    251              *aFormats);
    252  for (; *aFormats > -1; aFormats++) {
    253    switch (*aFormats) {
    254      case AV_PIX_FMT_MEDIACODEC:
    255        FFMPEGV_LOG("Requesting pixel format MediaCodec");
    256        return AV_PIX_FMT_MEDIACODEC;
    257      default:
    258        break;
    259    }
    260  }
    261  NS_WARNING("FFmpeg does not share any supported MediaCodec pixel formats.");
    262 #  endif  // MOZ_WIDGET_ANDROID
    263  return AV_PIX_FMT_NONE;
    264 }
    265 #endif
    266 
    267 #if defined(MOZ_USE_HWDECODE) && defined(MOZ_WIDGET_GTK)
    268 static void VAAPIDisplayReleaseCallback(struct AVHWDeviceContext* hwctx) {
    269  auto displayHolder = static_cast<VADisplayHolder*>(hwctx->user_opaque);
    270  displayHolder->Release();
    271 }
    272 
    273 bool FFmpegVideoDecoder<LIBAV_VER>::CreateVAAPIDeviceContext() {
    274  mVAAPIDeviceContext = mLib->av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_VAAPI);
    275  if (!mVAAPIDeviceContext) {
    276    FFMPEG_LOG("  av_hwdevice_ctx_alloc failed.");
    277    return false;
    278  }
    279 
    280  auto releaseVAAPIcontext =
    281      MakeScopeExit([&] { mLib->av_buffer_unref(&mVAAPIDeviceContext); });
    282 
    283  AVHWDeviceContext* hwctx = (AVHWDeviceContext*)mVAAPIDeviceContext->data;
    284  AVVAAPIDeviceContext* vactx = (AVVAAPIDeviceContext*)hwctx->hwctx;
    285 
    286  RefPtr displayHolder = VADisplayHolder::GetSingleton();
    287  if (!displayHolder) {
    288    return false;
    289  }
    290 
    291  mDisplay = displayHolder->Display();
    292  hwctx->user_opaque = displayHolder.forget().take();
    293  hwctx->free = VAAPIDisplayReleaseCallback;
    294 
    295  vactx->display = mDisplay;
    296  if (mLib->av_hwdevice_ctx_init(mVAAPIDeviceContext) < 0) {
    297    FFMPEG_LOG("  av_hwdevice_ctx_init failed.");
    298    return false;
    299  }
    300 
    301  mCodecContext->hw_device_ctx = mLib->av_buffer_ref(mVAAPIDeviceContext);
    302  releaseVAAPIcontext.release();
    303  return true;
    304 }
    305 
    306 void FFmpegVideoDecoder<LIBAV_VER>::AdjustHWDecodeLogging() {
    307  if (!getenv("LIBVA_MESSAGING_LEVEL")) {
    308    if (MOZ_LOG_TEST(sFFmpegVideoLog, LogLevel::Debug)) {
    309      setenv("LIBVA_MESSAGING_LEVEL", "1", false);
    310    } else if (MOZ_LOG_TEST(sFFmpegVideoLog, LogLevel::Info)) {
    311      setenv("LIBVA_MESSAGING_LEVEL", "2", false);
    312    } else {
    313      setenv("LIBVA_MESSAGING_LEVEL", "0", false);
    314    }
    315  }
    316 }
    317 
    318 MediaResult FFmpegVideoDecoder<LIBAV_VER>::InitVAAPIDecoder() {
    319  FFMPEG_LOG("Initialising VA-API FFmpeg decoder");
    320 
    321  StaticMutexAutoLock mon(sMutex);
    322 
    323  // mAcceleratedFormats is already configured so check supported
    324  // formats before we do anything.
    325  if (mAcceleratedFormats.Length()) {
    326    if (!IsFormatAccelerated(mCodecID)) {
    327      FFMPEG_LOG("  Format %s is not accelerated",
    328                 mLib->avcodec_get_name(mCodecID));
    329      return NS_ERROR_NOT_AVAILABLE;
    330    } else {
    331      FFMPEG_LOG("  Format %s is accelerated",
    332                 mLib->avcodec_get_name(mCodecID));
    333    }
    334  }
    335 
    336  if (!mLib->IsVAAPIAvailable()) {
    337    FFMPEG_LOG("  libva library or symbols are missing.");
    338    return NS_ERROR_NOT_AVAILABLE;
    339  }
    340 
    341  AVCodec* codec =
    342      FindVideoHardwareAVCodec(mLib, mCodecID, AV_HWDEVICE_TYPE_VAAPI);
    343  if (!codec) {
    344    FFMPEG_LOG("  couldn't find ffmpeg VA-API decoder");
    345    return NS_ERROR_DOM_MEDIA_FATAL_ERR;
    346  }
    347  // This logic is mirrored in FFmpegDecoderModule::Supports. We prefer to use
    348  // our own OpenH264 decoder through the plugin over ffmpeg by default due to
    349  // broken decoding with some versions. openh264 has broken decoding of some
    350  // h264 videos so don't use it unless explicitly allowed for now.
    351  if (!strcmp(codec->name, "libopenh264") &&
    352      !StaticPrefs::media_ffmpeg_allow_openh264()) {
    353    FFMPEG_LOG("  unable to find codec (openh264 disabled by pref)");
    354    return MediaResult(
    355        NS_ERROR_DOM_MEDIA_FATAL_ERR,
    356        RESULT_DETAIL("unable to find codec (openh264 disabled by pref)"));
    357  }
    358  FFMPEG_LOG("  codec %s : %s", codec->name, codec->long_name);
    359 
    360  if (!(mCodecContext = mLib->avcodec_alloc_context3(codec))) {
    361    FFMPEG_LOG("  couldn't init VA-API ffmpeg context");
    362    return NS_ERROR_OUT_OF_MEMORY;
    363  }
    364  mCodecContext->opaque = this;
    365 
    366  InitHWCodecContext(ContextType::VAAPI);
    367 
    368  // MOZ_REQUIRES isn't recognized in MakeScopeExit, but InitVAAPIDecoder
    369  // already locks sMutex at the start, so just escape thread analysis.
    370  auto releaseVAAPIdecoder = MakeScopeExit([&]() MOZ_NO_THREAD_SAFETY_ANALYSIS {
    371    if (mVAAPIDeviceContext) {
    372      mLib->av_buffer_unref(&mVAAPIDeviceContext);
    373    }
    374    ReleaseCodecContext();
    375  });
    376 
    377  if (!CreateVAAPIDeviceContext()) {
    378    FFMPEG_LOG("  Failed to create VA-API device context");
    379    return NS_ERROR_DOM_MEDIA_FATAL_ERR;
    380  }
    381 
    382  MediaResult ret = AllocateExtraData();
    383  if (NS_FAILED(ret)) {
    384    return ret;
    385  }
    386 
    387  if (mLib->avcodec_open2(mCodecContext, codec, nullptr) < 0) {
    388    FFMPEG_LOG("  Couldn't initialise VA-API decoder");
    389    return NS_ERROR_DOM_MEDIA_FATAL_ERR;
    390  }
    391 
    392  if (mAcceleratedFormats.IsEmpty()) {
    393    mAcceleratedFormats = GetAcceleratedFormats();
    394    if (!IsFormatAccelerated(mCodecID)) {
    395      FFMPEG_LOG("  Format %s is not accelerated",
    396                 mLib->avcodec_get_name(mCodecID));
    397      return NS_ERROR_NOT_AVAILABLE;
    398    }
    399  }
    400 
    401  AdjustHWDecodeLogging();
    402 
    403  FFMPEG_LOG("  VA-API FFmpeg init successful");
    404  releaseVAAPIdecoder.release();
    405  return NS_OK;
    406 }
    407 
    408 MediaResult FFmpegVideoDecoder<LIBAV_VER>::InitV4L2Decoder() {
    409  FFMPEG_LOG("Initialising V4L2-DRM FFmpeg decoder");
    410 
    411  StaticMutexAutoLock mon(sMutex);
    412 
    413  // mAcceleratedFormats is already configured so check supported
    414  // formats before we do anything.
    415  if (mAcceleratedFormats.Length()) {
    416    if (!IsFormatAccelerated(mCodecID)) {
    417      FFMPEG_LOG("  Format %s is not accelerated",
    418                 mLib->avcodec_get_name(mCodecID));
    419      return NS_ERROR_NOT_AVAILABLE;
    420    }
    421    FFMPEG_LOG("  Format %s is accelerated", mLib->avcodec_get_name(mCodecID));
    422  }
    423 
    424  // Select the appropriate v4l2 codec
    425  AVCodec* codec = FindVideoHardwareAVCodec(mLib, mCodecID);
    426  if (!codec) {
    427    FFMPEG_LOG("No appropriate v4l2 codec found");
    428    return NS_ERROR_DOM_MEDIA_FATAL_ERR;
    429  }
    430  FFMPEG_LOG("  V4L2 codec %s : %s", codec->name, codec->long_name);
    431 
    432  if (!(mCodecContext = mLib->avcodec_alloc_context3(codec))) {
    433    FFMPEG_LOG("  couldn't init HW ffmpeg context");
    434    return NS_ERROR_OUT_OF_MEMORY;
    435  }
    436  mCodecContext->opaque = this;
    437 
    438  InitHWCodecContext(ContextType::V4L2);
    439 
    440  // Disable cropping in FFmpeg.  Because our frames are opaque DRM buffers
    441  // FFmpeg can't actually crop them and it tries to do so by just modifying
    442  // the width and height.  This causes problems because V4L2 outputs a single
    443  // buffer/layer/plane with all three planes stored contiguously.  We need to
    444  // know the offsets to each plane, and if FFmpeg applies cropping (and then
    445  // we can't find out what the original uncropped width/height was) then we
    446  // can't work out the offsets.
    447  mCodecContext->apply_cropping = 0;
    448 
    449  // MOZ_REQUIRES isn't recognized in MakeScopeExit, but InitV4L2Decoder
    450  // already locks sMutex at the start, so just escape thread analysis.
    451  auto releaseDecoder = MakeScopeExit(
    452      [&]() MOZ_NO_THREAD_SAFETY_ANALYSIS { ReleaseCodecContext(); });
    453 
    454  MediaResult ret = AllocateExtraData();
    455  if (NS_FAILED(ret)) {
    456    return ret;
    457  }
    458 
    459  if (mLib->avcodec_open2(mCodecContext, codec, nullptr) < 0) {
    460    FFMPEG_LOG("  Couldn't initialise V4L2 decoder");
    461    return NS_ERROR_DOM_MEDIA_FATAL_ERR;
    462  }
    463 
    464  // Set mAcceleratedFormats
    465  if (mAcceleratedFormats.IsEmpty()) {
    466    // FFmpeg does not correctly report that the V4L2 wrapper decoders are
    467    // hardware accelerated, but we know they always are.  If we've gotten
    468    // this far then we know this codec has a V4L2 wrapper decoder and so is
    469    // accelerateed.
    470    mAcceleratedFormats.AppendElement(mCodecID);
    471  }
    472 
    473  AdjustHWDecodeLogging();
    474 
    475  FFMPEG_LOG("  V4L2 FFmpeg init successful");
    476  mUsingV4L2 = true;
    477  releaseDecoder.release();
    478  return NS_OK;
    479 }
    480 #endif
    481 
    482 #if LIBAVCODEC_VERSION_MAJOR < 58
    483 FFmpegVideoDecoder<LIBAV_VER>::PtsCorrectionContext::PtsCorrectionContext()
    484    : mNumFaultyPts(0),
    485      mNumFaultyDts(0),
    486      mLastPts(INT64_MIN),
    487      mLastDts(INT64_MIN) {}
    488 
    489 int64_t FFmpegVideoDecoder<LIBAV_VER>::PtsCorrectionContext::GuessCorrectPts(
    490    int64_t aPts, int64_t aDts) {
    491  int64_t pts = AV_NOPTS_VALUE;
    492 
    493  if (aDts != int64_t(AV_NOPTS_VALUE)) {
    494    mNumFaultyDts += aDts <= mLastDts;
    495    mLastDts = aDts;
    496  }
    497  if (aPts != int64_t(AV_NOPTS_VALUE)) {
    498    mNumFaultyPts += aPts <= mLastPts;
    499    mLastPts = aPts;
    500  }
    501  if ((mNumFaultyPts <= mNumFaultyDts || aDts == int64_t(AV_NOPTS_VALUE)) &&
    502      aPts != int64_t(AV_NOPTS_VALUE)) {
    503    pts = aPts;
    504  } else {
    505    pts = aDts;
    506  }
    507  return pts;
    508 }
    509 
    510 void FFmpegVideoDecoder<LIBAV_VER>::PtsCorrectionContext::Reset() {
    511  mNumFaultyPts = 0;
    512  mNumFaultyDts = 0;
    513  mLastPts = INT64_MIN;
    514  mLastDts = INT64_MIN;
    515 }
    516 #endif
    517 
    518 #if defined(MOZ_USE_HWDECODE)
    519 bool FFmpegVideoDecoder<LIBAV_VER>::ShouldDisableHWDecoding(
    520    bool aDisableHardwareDecoding) const {
    521 #  ifdef MOZ_WIDGET_ANDROID
    522 #    ifdef FFVPX_VERSION
    523  // We only support decrypt and decode with MediaCodec.
    524  if (mCDM) {
    525    FFMPEG_LOG("CDM requires platform decoder");
    526    return false;
    527  }
    528 #    endif
    529  switch (mCodecID) {
    530    case AV_CODEC_ID_H264:
    531    case AV_CODEC_ID_HEVC:
    532      // We only support decoding H264/HEVC with MediaCodec.
    533      FFMPEG_LOG("Codec %s requires platform decoder",
    534                 AVCodecToString(mCodecID));
    535      return false;
    536    case AV_CODEC_ID_AV1:
    537      // We only support main profile AV1 with MediaCodec. See bug 1967752.
    538      if (!AOMDecoder::IsMainProfile(mInfo.mExtraData)) {
    539        FFMPEG_LOG("Cannot use platfrom decoder AV1 without main profile");
    540        return true;
    541      }
    542      break;
    543    default:
    544      break;
    545  }
    546 #  endif
    547 
    548 #  if defined(MOZ_WIDGET_GTK) || defined(MOZ_WIDGET_ANDROID)
    549  bool supported = false;
    550  switch (mCodecID) {
    551    case AV_CODEC_ID_H264:
    552      supported = gfx::gfxVars::UseH264HwDecode();
    553      break;
    554    case AV_CODEC_ID_VP8:
    555      supported = gfx::gfxVars::UseVP8HwDecode();
    556      break;
    557    case AV_CODEC_ID_VP9:
    558      supported = gfx::gfxVars::UseVP9HwDecode();
    559      break;
    560    case AV_CODEC_ID_AV1:
    561      supported = gfx::gfxVars::UseAV1HwDecode();
    562      break;
    563    case AV_CODEC_ID_HEVC:
    564      supported = gfx::gfxVars::UseHEVCHwDecode();
    565      break;
    566    default:
    567      break;
    568  }
    569  if (!supported) {
    570    FFMPEG_LOG("Codec %s is not accelerated", AVCodecToString(mCodecID));
    571    return true;
    572  }
    573  if (!XRE_IsRDDProcess()) {
    574    FFMPEG_LOG("Platform decoder works in RDD process only");
    575    return true;
    576  }
    577 #  endif
    578 
    579 #  ifdef MOZ_WIDGET_GTK
    580  bool isHardwareWebRenderUsed = mImageAllocator &&
    581                                 (mImageAllocator->GetCompositorBackendType() ==
    582                                  layers::LayersBackend::LAYERS_WR) &&
    583                                 !mImageAllocator->UsingSoftwareWebRender();
    584  if (!isHardwareWebRenderUsed) {
    585    FFMPEG_LOG("Hardware WebRender is off, VAAPI is disabled");
    586    return true;
    587  }
    588 #  endif
    589  return aDisableHardwareDecoding;
    590 }
    591 #endif
    592 
    593 #if defined(MOZ_WIDGET_GTK) && defined(MOZ_USE_HWDECODE)
    594 bool FFmpegVideoDecoder<LIBAV_VER>::UploadSWDecodeToDMABuf() const {
    595  // Use direct DMABuf upload for GL backend Wayland compositor only.
    596  return mImageAllocator && (mImageAllocator->GetCompositorBackendType() ==
    597                                 layers::LayersBackend::LAYERS_WR &&
    598                             !mImageAllocator->UsingSoftwareWebRender() &&
    599                             mImageAllocator->GetWebRenderCompositorType() ==
    600                                 layers::WebRenderCompositor::WAYLAND);
    601 }
    602 #endif
    603 
    604 FFmpegVideoDecoder<LIBAV_VER>::FFmpegVideoDecoder(
    605    const FFmpegLibWrapper* aLib, const VideoInfo& aConfig,
    606    KnowsCompositor* aAllocator, ImageContainer* aImageContainer,
    607    bool aLowLatency, bool aDisableHardwareDecoding, bool a8BitOutput,
    608    Maybe<TrackingId> aTrackingId, PRemoteCDMActor* aCDM)
    609    : FFmpegDataDecoder(aLib, GetCodecId(aConfig.mMimeType), aCDM),
    610      mImageAllocator(aAllocator),
    611      mImageContainer(aImageContainer),
    612      mInfo(aConfig),
    613 #ifdef MOZ_USE_HWDECODE
    614      mHardwareDecodingDisabled(
    615          ShouldDisableHWDecoding(aDisableHardwareDecoding)),
    616 #endif  // MOZ_USE_HWDECODE
    617      mLowLatency(aLowLatency),
    618      mTrackingId(std::move(aTrackingId)),
    619      // Value may be changed later when codec is known after initialization.
    620      m8BitOutput(a8BitOutput) {
    621  FFMPEG_LOG("FFmpegVideoDecoder::FFmpegVideoDecoder MIME %s Codec ID %d",
    622             aConfig.mMimeType.get(), mCodecID);
    623  // Use a new MediaByteBuffer as the object will be modified during
    624  // initialization.
    625  mExtraData = new MediaByteBuffer;
    626  mExtraData->AppendElements(*aConfig.mExtraData);
    627 #if defined(MOZ_WIDGET_GTK) && defined(MOZ_USE_HWDECODE)
    628  mUploadSWDecodeToDMABuf = UploadSWDecodeToDMABuf();
    629 #endif
    630 #ifdef MOZ_USE_HWDECODE
    631  InitHWDecoderIfAllowed();
    632 #endif  // MOZ_USE_HWDECODE
    633 }
    634 
    635 FFmpegVideoDecoder<LIBAV_VER>::~FFmpegVideoDecoder() {
    636 #ifdef CUSTOMIZED_BUFFER_ALLOCATION
    637  MOZ_DIAGNOSTIC_ASSERT(mAllocatedImages.IsEmpty(),
    638                        "Should release all shmem buffers before destroy!");
    639 #endif
    640 }
    641 
    642 #ifdef MOZ_USE_HWDECODE
    643 void FFmpegVideoDecoder<LIBAV_VER>::InitHWDecoderIfAllowed() {
    644  if (mHardwareDecodingDisabled) {
    645    return;
    646  }
    647 
    648 #  ifdef MOZ_ENABLE_VAAPI
    649  if (NS_SUCCEEDED(InitVAAPIDecoder())) {
    650    return;
    651  }
    652 #  endif  // MOZ_ENABLE_VAAPI
    653 
    654 #  ifdef MOZ_ENABLE_V4L2
    655  // VAAPI didn't work or is disabled, so try V4L2 with DRM
    656  if (NS_SUCCEEDED(InitV4L2Decoder())) {
    657    return;
    658  }
    659 #  endif  // MOZ_ENABLE_V4L2
    660 
    661 #  ifdef MOZ_ENABLE_D3D11VA
    662  if (XRE_IsGPUProcess() && NS_SUCCEEDED(InitD3D11VADecoder())) {
    663    return;
    664  }
    665 #  endif  // MOZ_ENABLE_D3D11VA
    666 
    667 #  ifdef MOZ_WIDGET_ANDROID
    668  if ((XRE_IsRDDProcess() ||
    669       (XRE_IsParentProcess() && PR_GetEnv("MOZ_RUN_GTEST"))) &&
    670      NS_SUCCEEDED(InitMediaCodecDecoder())) {
    671    return;
    672  }
    673 #  endif
    674 }
    675 #endif  // MOZ_USE_HWDECODE
    676 
    677 static bool ShouldEnable8BitConversion(const struct AVCodec* aCodec) {
    678  return 0 == strncmp(aCodec->name, "libdav1d", 8) ||
    679         0 == strncmp(aCodec->name, "vp9", 3);
    680 }
    681 
    682 RefPtr<MediaDataDecoder::InitPromise> FFmpegVideoDecoder<LIBAV_VER>::Init() {
    683  AUTO_PROFILER_LABEL("FFmpegVideoDecoder::Init", MEDIA_PLAYBACK);
    684  FFMPEG_LOG("FFmpegVideoDecoder, init, IsHardwareAccelerated=%d\n",
    685             IsHardwareAccelerated());
    686  // We've finished the HW decoder initialization in the ctor.
    687  if (IsHardwareAccelerated()) {
    688    return InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, __func__);
    689  }
    690  MediaResult rv = InitSWDecoder(nullptr);
    691  if (NS_FAILED(rv)) {
    692    return InitPromise::CreateAndReject(rv, __func__);
    693  }
    694  m8BitOutput = m8BitOutput && ShouldEnable8BitConversion(mCodecContext->codec);
    695  if (m8BitOutput) {
    696    FFMPEG_LOG("Enable 8-bit output for %s", mCodecContext->codec->name);
    697    m8BitRecycleBin = MakeRefPtr<BufferRecycleBin>();
    698  }
    699  return InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, __func__);
    700 }
    701 
    702 static gfx::ColorRange GetColorRange(enum AVColorRange& aColorRange) {
    703  return aColorRange == AVCOL_RANGE_JPEG ? gfx::ColorRange::FULL
    704                                         : gfx::ColorRange::LIMITED;
    705 }
    706 
    707 static bool IsYUVFormat(const AVPixelFormat& aFormat) {
    708  return aFormat != AV_PIX_FMT_GBRP && aFormat != AV_PIX_FMT_GBRP10LE;
    709 }
    710 
    711 static gfx::YUVColorSpace TransferAVColorSpaceToColorSpace(
    712    const AVColorSpace aSpace, const AVPixelFormat aFormat,
    713    const gfx::IntSize& aSize) {
    714  if (!IsYUVFormat(aFormat)) {
    715    return gfx::YUVColorSpace::Identity;
    716  }
    717  switch (aSpace) {
    718 #if LIBAVCODEC_VERSION_MAJOR >= 55
    719    case AVCOL_SPC_BT2020_NCL:
    720    case AVCOL_SPC_BT2020_CL:
    721      return gfx::YUVColorSpace::BT2020;
    722 #endif
    723    case AVCOL_SPC_BT709:
    724      return gfx::YUVColorSpace::BT709;
    725    case AVCOL_SPC_SMPTE170M:
    726    case AVCOL_SPC_BT470BG:
    727      return gfx::YUVColorSpace::BT601;
    728    default:
    729      return DefaultColorSpace(aSize);
    730  }
    731 }
    732 
    733 #ifdef CUSTOMIZED_BUFFER_ALLOCATION
    734 static int GetVideoBufferWrapper(struct AVCodecContext* aCodecContext,
    735                                 AVFrame* aFrame, int aFlags) {
    736  auto* decoder =
    737      static_cast<FFmpegVideoDecoder<LIBAV_VER>*>(aCodecContext->opaque);
    738  int rv = decoder->GetVideoBuffer(aCodecContext, aFrame, aFlags);
    739  return rv < 0 ? decoder->GetVideoBufferDefault(aCodecContext, aFrame, aFlags)
    740                : rv;
    741 }
    742 
    743 static void ReleaseVideoBufferWrapper(void* opaque, uint8_t* data) {
    744  if (opaque) {
    745    FFMPEGV_LOG("ReleaseVideoBufferWrapper: PlanarYCbCrImage=%p", opaque);
    746    RefPtr<ImageBufferWrapper> image = static_cast<ImageBufferWrapper*>(opaque);
    747    image->ReleaseBuffer();
    748  }
    749 }
    750 
    751 static bool IsColorFormatSupportedForUsingCustomizedBuffer(
    752    const AVPixelFormat& aFormat) {
    753 #  if XP_WIN
    754  // Currently the web render doesn't support uploading R16 surface, so we can't
    755  // use the shmem texture for 10 bit+ videos which would be uploaded by the
    756  // web render. See Bug 1751498.
    757  return aFormat == AV_PIX_FMT_YUV420P || aFormat == AV_PIX_FMT_YUVJ420P ||
    758         aFormat == AV_PIX_FMT_YUV444P || aFormat == AV_PIX_FMT_YUVJ444P;
    759 #  else
    760  // For now, we only support for YUV420P, YUVJ420P, YUV444P and YUVJ444P which
    761  // are the only non-HW accelerated format supported by FFmpeg's H264 and VP9
    762  // decoder.
    763  return aFormat == AV_PIX_FMT_YUV420P || aFormat == AV_PIX_FMT_YUVJ420P ||
    764         aFormat == AV_PIX_FMT_YUV420P10LE ||
    765         aFormat == AV_PIX_FMT_YUV420P12LE || aFormat == AV_PIX_FMT_YUV444P ||
    766         aFormat == AV_PIX_FMT_YUVJ444P || aFormat == AV_PIX_FMT_YUV444P10LE ||
    767         aFormat == AV_PIX_FMT_YUV444P12LE;
    768 #  endif
    769 }
    770 
    771 static bool IsYUV420Sampling(const AVPixelFormat& aFormat) {
    772  return aFormat == AV_PIX_FMT_YUV420P || aFormat == AV_PIX_FMT_YUVJ420P ||
    773         aFormat == AV_PIX_FMT_YUV420P10LE || aFormat == AV_PIX_FMT_YUV420P12LE;
    774 }
    775 
    776 #  if defined(MOZ_WIDGET_GTK)
    777 bool FFmpegVideoDecoder<LIBAV_VER>::IsLinuxHDR() const {
    778  if (!mInfo.mColorPrimaries || !mInfo.mTransferFunction) {
    779    return false;
    780  }
    781  return mInfo.mColorPrimaries.value() == gfx::ColorSpace2::BT2020 &&
    782         (mInfo.mTransferFunction.value() == gfx::TransferFunction::PQ ||
    783          mInfo.mTransferFunction.value() == gfx::TransferFunction::HLG);
    784 }
    785 #  endif
    786 
    787 layers::TextureClient*
    788 FFmpegVideoDecoder<LIBAV_VER>::AllocateTextureClientForImage(
    789    struct AVCodecContext* aCodecContext, PlanarYCbCrImage* aImage) {
    790  MOZ_ASSERT(
    791      IsColorFormatSupportedForUsingCustomizedBuffer(aCodecContext->pix_fmt));
    792 
    793  // FFmpeg will store images with color depth > 8 bits in 16 bits with extra
    794  // padding.
    795  const int32_t bytesPerChannel =
    796      GetColorDepth(aCodecContext->pix_fmt) == gfx::ColorDepth::COLOR_8 ? 1 : 2;
    797 
    798  // If adjusted Ysize is larger than the actual image size (coded_width *
    799  // coded_height), that means ffmpeg decoder needs extra padding on both width
    800  // and height. If that happens, the planes will need to be cropped later in
    801  // order to avoid visible incorrect border on the right and bottom of the
    802  // actual image.
    803  //
    804  // Here are examples of various sizes video in YUV420P format, the width and
    805  // height would need to be adjusted in order to align padding.
    806  //
    807  // Eg1. video (1920*1080)
    808  // plane Y
    809  // width 1920 height 1080 -> adjusted-width 1920 adjusted-height 1088
    810  // plane Cb/Cr
    811  // width 960  height  540 -> adjusted-width 1024 adjusted-height 544
    812  //
    813  // Eg2. video (2560*1440)
    814  // plane Y
    815  // width 2560 height 1440 -> adjusted-width 2560 adjusted-height 1440
    816  // plane Cb/Cr
    817  // width 1280 height  720 -> adjusted-width 1280 adjusted-height 736
    818  layers::PlanarYCbCrData data;
    819  const auto yDims =
    820      gfx::IntSize{aCodecContext->coded_width, aCodecContext->coded_height};
    821  auto paddedYSize = yDims;
    822  mLib->avcodec_align_dimensions(aCodecContext, &paddedYSize.width,
    823                                 &paddedYSize.height);
    824  data.mYStride = paddedYSize.Width() * bytesPerChannel;
    825 
    826  MOZ_ASSERT(
    827      IsColorFormatSupportedForUsingCustomizedBuffer(aCodecContext->pix_fmt));
    828  auto uvDims = yDims;
    829  if (IsYUV420Sampling(aCodecContext->pix_fmt)) {
    830    uvDims.width = (uvDims.width + 1) / 2;
    831    uvDims.height = (uvDims.height + 1) / 2;
    832    data.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
    833  }
    834  auto paddedCbCrSize = uvDims;
    835  mLib->avcodec_align_dimensions(aCodecContext, &paddedCbCrSize.width,
    836                                 &paddedCbCrSize.height);
    837  data.mCbCrStride = paddedCbCrSize.Width() * bytesPerChannel;
    838 
    839  // Setting other attributes
    840  data.mPictureRect = gfx::IntRect(
    841      mInfo.ScaledImageRect(aCodecContext->width, aCodecContext->height)
    842          .TopLeft(),
    843      gfx::IntSize(aCodecContext->width, aCodecContext->height));
    844  data.mStereoMode = mInfo.mStereoMode;
    845  if (aCodecContext->colorspace != AVCOL_SPC_UNSPECIFIED) {
    846    data.mYUVColorSpace = TransferAVColorSpaceToColorSpace(
    847        aCodecContext->colorspace, aCodecContext->pix_fmt,
    848        data.mPictureRect.Size());
    849  } else {
    850    data.mYUVColorSpace = mInfo.mColorSpace
    851                              ? *mInfo.mColorSpace
    852                              : DefaultColorSpace(data.mPictureRect.Size());
    853  }
    854  data.mColorDepth = GetColorDepth(aCodecContext->pix_fmt);
    855  data.mColorRange = GetColorRange(aCodecContext->color_range);
    856 
    857  FFMPEG_LOGV(
    858      "Created plane data, YSize=(%d, %d), CbCrSize=(%d, %d), "
    859      "CroppedYSize=(%d, %d), CroppedCbCrSize=(%d, %d), ColorDepth=%hhu",
    860      paddedYSize.Width(), paddedYSize.Height(), paddedCbCrSize.Width(),
    861      paddedCbCrSize.Height(), data.YPictureSize().Width(),
    862      data.YPictureSize().Height(), data.CbCrPictureSize().Width(),
    863      data.CbCrPictureSize().Height(), static_cast<uint8_t>(data.mColorDepth));
    864 
    865  // Allocate a shmem buffer for image.
    866  if (NS_FAILED(aImage->CreateEmptyBuffer(data, paddedYSize, paddedCbCrSize))) {
    867    return nullptr;
    868  }
    869  return aImage->GetTextureClient(mImageAllocator);
    870 }
    871 
    872 int FFmpegVideoDecoder<LIBAV_VER>::GetVideoBuffer(
    873    struct AVCodecContext* aCodecContext, AVFrame* aFrame, int aFlags) {
    874  FFMPEG_LOGV("GetVideoBuffer: aCodecContext=%p aFrame=%p", aCodecContext,
    875              aFrame);
    876  if (!StaticPrefs::media_ffmpeg_customized_buffer_allocation()) {
    877    return AVERROR(EINVAL);
    878  }
    879 
    880  if (mIsUsingShmemBufferForDecode && !*mIsUsingShmemBufferForDecode) {
    881    return AVERROR(EINVAL);
    882  }
    883 
    884  // Codec doesn't support custom allocator.
    885  if (!(aCodecContext->codec->capabilities & AV_CODEC_CAP_DR1)) {
    886    return AVERROR(EINVAL);
    887  }
    888 
    889  // Pre-allocation is only for sw decoding. During decoding, ffmpeg decoder
    890  // will need to reference decoded frames, if those frames are on shmem buffer,
    891  // then it would cause a need to read CPU data from GPU, which is slow.
    892  if (IsHardwareAccelerated()) {
    893    return AVERROR(EINVAL);
    894  }
    895 
    896 #  if defined(MOZ_WIDGET_GTK) && defined(MOZ_USE_HWDECODE)
    897  if (mUploadSWDecodeToDMABuf) {
    898    FFMPEG_LOG("DMABuf upload doesn't use shm buffers");
    899    return AVERROR(EINVAL);
    900  }
    901 #  endif
    902 
    903  if (!IsColorFormatSupportedForUsingCustomizedBuffer(aCodecContext->pix_fmt)) {
    904    FFMPEG_LOG("Not support color format %d", aCodecContext->pix_fmt);
    905    return AVERROR(EINVAL);
    906  }
    907 
    908  if (aCodecContext->lowres != 0) {
    909    FFMPEG_LOG("Not support low resolution decoding");
    910    return AVERROR(EINVAL);
    911  }
    912 
    913  const gfx::IntSize size(aCodecContext->width, aCodecContext->height);
    914  int rv = mLib->av_image_check_size(size.Width(), size.Height(), 0, nullptr);
    915  if (rv < 0) {
    916    FFMPEG_LOG("Invalid image size");
    917    return rv;
    918  }
    919 
    920  CheckedInt32 dataSize = mLib->av_image_get_buffer_size(
    921      aCodecContext->pix_fmt, aCodecContext->coded_width,
    922      aCodecContext->coded_height, 32);
    923  if (!dataSize.isValid()) {
    924    FFMPEG_LOG("Data size overflow!");
    925    return AVERROR(EINVAL);
    926  }
    927 
    928  if (!mImageContainer) {
    929    FFMPEG_LOG("No Image container!");
    930    return AVERROR(EINVAL);
    931  }
    932 
    933  RefPtr<PlanarYCbCrImage> image = mImageContainer->CreatePlanarYCbCrImage();
    934  if (!image) {
    935    FFMPEG_LOG("Failed to create YCbCr image");
    936    return AVERROR(EINVAL);
    937  }
    938  image->SetColorDepth(mInfo.mColorDepth);
    939 
    940  RefPtr<layers::TextureClient> texture =
    941      AllocateTextureClientForImage(aCodecContext, image);
    942  if (!texture) {
    943    FFMPEG_LOG("Failed to allocate a texture client");
    944    return AVERROR(EINVAL);
    945  }
    946 
    947  if (!texture->Lock(layers::OpenMode::OPEN_WRITE)) {
    948    FFMPEG_LOG("Failed to lock the texture");
    949    return AVERROR(EINVAL);
    950  }
    951  auto autoUnlock = MakeScopeExit([&] { texture->Unlock(); });
    952 
    953  layers::MappedYCbCrTextureData mapped;
    954  if (!texture->BorrowMappedYCbCrData(mapped)) {
    955    FFMPEG_LOG("Failed to borrow mapped data for the texture");
    956    return AVERROR(EINVAL);
    957  }
    958 
    959  aFrame->data[0] = mapped.y.data;
    960  aFrame->data[1] = mapped.cb.data;
    961  aFrame->data[2] = mapped.cr.data;
    962 
    963  aFrame->linesize[0] = mapped.y.stride;
    964  aFrame->linesize[1] = mapped.cb.stride;
    965  aFrame->linesize[2] = mapped.cr.stride;
    966 
    967  aFrame->width = aCodecContext->coded_width;
    968  aFrame->height = aCodecContext->coded_height;
    969  aFrame->format = aCodecContext->pix_fmt;
    970  aFrame->extended_data = aFrame->data;
    971 #  if LIBAVCODEC_VERSION_MAJOR < 61
    972  aFrame->reordered_opaque = aCodecContext->reordered_opaque;
    973 #  endif
    974  MOZ_ASSERT(aFrame->data[0] && aFrame->data[1] && aFrame->data[2]);
    975 
    976  // This will hold a reference to image, and the reference would be dropped
    977  // when ffmpeg tells us that the buffer is no longer needed.
    978  auto imageWrapper = MakeRefPtr<ImageBufferWrapper>(image.get(), this);
    979  aFrame->buf[0] =
    980      mLib->av_buffer_create(aFrame->data[0], dataSize.value(),
    981                             ReleaseVideoBufferWrapper, imageWrapper.get(), 0);
    982  if (!aFrame->buf[0]) {
    983    FFMPEG_LOG("Failed to allocate buffer");
    984    return AVERROR(EINVAL);
    985  }
    986 
    987  FFMPEG_LOG("Created av buffer, buf=%p, data=%p, image=%p, sz=%d",
    988             aFrame->buf[0], aFrame->data[0], imageWrapper.get(),
    989             dataSize.value());
    990  mAllocatedImages.Insert(imageWrapper.get());
    991  mIsUsingShmemBufferForDecode = Some(true);
    992  return 0;
    993 }
    994 #endif
    995 
    996 void FFmpegVideoDecoder<LIBAV_VER>::InitCodecContext() {
    997  mCodecContext->width = mInfo.mImage.width;
    998  mCodecContext->height = mInfo.mImage.height;
    999 
   1000  // We use the same logic as libvpx in determining the number of threads to use
   1001  // so that we end up behaving in the same fashion when using ffmpeg as
   1002  // we would otherwise cause various crashes (see bug 1236167)
   1003  int decode_threads = 1;
   1004  if (mInfo.mDisplay.width >= 2048) {
   1005    decode_threads = 8;
   1006  } else if (mInfo.mDisplay.width >= 1024) {
   1007    decode_threads = 4;
   1008  } else if (mInfo.mDisplay.width >= 320) {
   1009    decode_threads = 2;
   1010  }
   1011 
   1012  if (mLowLatency) {
   1013    mCodecContext->flags |= AV_CODEC_FLAG_LOW_DELAY;
   1014    // ffvp9 and ffvp8 at this stage do not support slice threading, but it may
   1015    // help with the h264 decoder if there's ever one.
   1016    mCodecContext->thread_type = FF_THREAD_SLICE;
   1017  } else {
   1018    decode_threads = std::min(decode_threads, PR_GetNumberOfProcessors() - 1);
   1019    decode_threads = std::max(decode_threads, 1);
   1020    mCodecContext->thread_count = decode_threads;
   1021    if (decode_threads > 1) {
   1022      mCodecContext->thread_type = FF_THREAD_SLICE | FF_THREAD_FRAME;
   1023    }
   1024  }
   1025 
   1026  // FFmpeg will call back to this to negotiate a video pixel format.
   1027  mCodecContext->get_format = ChoosePixelFormat;
   1028 #ifdef CUSTOMIZED_BUFFER_ALLOCATION
   1029  FFMPEG_LOG("Set get_buffer2 for customized buffer allocation");
   1030  mCodecContext->get_buffer2 = GetVideoBufferWrapper;
   1031  mCodecContext->opaque = this;
   1032 #  if FF_API_THREAD_SAFE_CALLBACKS
   1033  mCodecContext->thread_safe_callbacks = 1;
   1034 #  endif
   1035 #endif
   1036 }
   1037 
   1038 nsCString FFmpegVideoDecoder<LIBAV_VER>::GetCodecName() const {
   1039 #if LIBAVCODEC_VERSION_MAJOR > 53
   1040  return nsCString(mLib->avcodec_descriptor_get(mCodecID)->name);
   1041 #else
   1042  return nsLiteralCString("FFmpegAudioDecoder");
   1043 #endif
   1044 }
   1045 
   1046 #ifdef MOZ_USE_HWDECODE
   1047 void FFmpegVideoDecoder<LIBAV_VER>::InitHWCodecContext(ContextType aType) {
   1048  mCodecContext->width = mInfo.mImage.width;
   1049  mCodecContext->height = mInfo.mImage.height;
   1050  mCodecContext->thread_count = 1;
   1051 
   1052  switch (aType) {
   1053    case ContextType::V4L2:
   1054      mCodecContext->get_format = ChooseV4L2PixelFormat;
   1055      break;
   1056    case ContextType::VAAPI:
   1057      mCodecContext->get_format = ChooseVAAPIPixelFormat;
   1058      break;
   1059    case ContextType::D3D11VA:
   1060      MOZ_DIAGNOSTIC_ASSERT(aType == ContextType::D3D11VA);
   1061      mCodecContext->get_format = ChooseD3D11VAPixelFormat;
   1062      break;
   1063    case ContextType::MediaCodec:
   1064      mCodecContext->get_format = ChooseMediaCodecPixelFormat;
   1065      break;
   1066    default:
   1067      break;
   1068  }
   1069 
   1070  if (mCodecID == AV_CODEC_ID_H264) {
   1071    mCodecContext->extra_hw_frames =
   1072        H264::ComputeMaxRefFrames(mInfo.mExtraData);
   1073  } else if (mCodecID == AV_CODEC_ID_HEVC) {
   1074    mCodecContext->extra_hw_frames =
   1075        H265::ComputeMaxRefFrames(mInfo.mExtraData);
   1076  } else {
   1077    mCodecContext->extra_hw_frames = EXTRA_HW_FRAMES;
   1078  }
   1079  if (mLowLatency) {
   1080    mCodecContext->flags |= AV_CODEC_FLAG_LOW_DELAY;
   1081  }
   1082 }
   1083 #endif
   1084 
   1085 static int64_t GetFramePts(const AVFrame* aFrame) {
   1086 #if LIBAVCODEC_VERSION_MAJOR > 57
   1087  return aFrame->pts;
   1088 #else
   1089  return aFrame->pkt_pts;
   1090 #endif
   1091 }
   1092 
   1093 static bool IsKeyFrame(const AVFrame* aFrame) {
   1094 #if LIBAVCODEC_VERSION_MAJOR > 61
   1095  return !!(aFrame->flags & AV_FRAME_FLAG_KEY);
   1096 #else
   1097  return !!aFrame->key_frame;
   1098 #endif
   1099 }
   1100 
   1101 #if LIBAVCODEC_VERSION_MAJOR >= 58
   1102 void FFmpegVideoDecoder<LIBAV_VER>::DecodeStats::DecodeStart() {
   1103  mDecodeStart = TimeStamp::Now();
   1104 }
   1105 
   1106 bool FFmpegVideoDecoder<LIBAV_VER>::DecodeStats::IsDecodingSlow() const {
   1107  return mDecodedFramesLate > mMaxLateDecodedFrames;
   1108 }
   1109 
   1110 void FFmpegVideoDecoder<LIBAV_VER>::DecodeStats::UpdateDecodeTimes(
   1111    int64_t aDuration) {
   1112  TimeStamp now = TimeStamp::Now();
   1113  float decodeTime = (now - mDecodeStart).ToMilliseconds();
   1114  mDecodeStart = now;
   1115 
   1116  const float frameDuration = aDuration / 1000.0f;
   1117  if (frameDuration <= 0.0f) {
   1118    FFMPEGV_LOG("Incorrect frame duration, skipping decode stats.");
   1119    return;
   1120  }
   1121 
   1122  mDecodedFrames++;
   1123  mAverageFrameDuration =
   1124      (mAverageFrameDuration * (mDecodedFrames - 1) + frameDuration) /
   1125      mDecodedFrames;
   1126  mAverageFrameDecodeTime =
   1127      (mAverageFrameDecodeTime * (mDecodedFrames - 1) + decodeTime) /
   1128      mDecodedFrames;
   1129 
   1130  FFMPEGV_LOG(
   1131      "Frame decode takes %.2f ms average decode time %.2f ms frame duration "
   1132      "%.2f average frame duration %.2f decoded %d frames\n",
   1133      decodeTime, mAverageFrameDecodeTime, frameDuration, mAverageFrameDuration,
   1134      mDecodedFrames);
   1135 
   1136  // Frame duration and frame decode times may vary and may not
   1137  // neccessarily lead to video playback failure.
   1138  //
   1139  // Checks frame decode time and recent frame duration and also
   1140  // frame decode time and average frame duration (video fps).
   1141  //
   1142  // Log a problem only if both indicators fails.
   1143  if (decodeTime > frameDuration && decodeTime > mAverageFrameDuration) {
   1144    PROFILER_MARKER_TEXT("FFmpegVideoDecoder::DoDecode", MEDIA_PLAYBACK, {},
   1145                         "frame decode takes too long");
   1146    mDecodedFramesLate++;
   1147    mLastDelayedFrameNum = mDecodedFrames;
   1148    FFMPEGV_LOG("  slow decode: failed to decode in time (decoded late %d)",
   1149                mDecodedFramesLate);
   1150  } else if (mLastDelayedFrameNum) {
   1151    // Reset mDecodedFramesLate in case of correct decode during
   1152    // mDelayedFrameReset period.
   1153    float correctPlaybackTime =
   1154        (mDecodedFrames - mLastDelayedFrameNum) * mAverageFrameDuration;
   1155    if (correctPlaybackTime > mDelayedFrameReset) {
   1156      FFMPEGV_LOG("  mLastFramePts reset due to seamless decode period");
   1157      mDecodedFramesLate = 0;
   1158      mLastDelayedFrameNum = 0;
   1159    }
   1160  }
   1161 }
   1162 #endif
   1163 
   1164 MediaResult FFmpegVideoDecoder<LIBAV_VER>::DoDecode(
   1165    MediaRawData* aSample, uint8_t* aData, int aSize, bool* aGotFrame,
   1166    MediaDataDecoder::DecodedData& aResults) {
   1167  MOZ_ASSERT(mTaskQueue->IsOnCurrentThread());
   1168  AVPacket* packet;
   1169 
   1170 #if LIBAVCODEC_VERSION_MAJOR >= 61
   1171  packet = mLib->av_packet_alloc();
   1172  auto raii = MakeScopeExit([&]() { mLib->av_packet_free(&packet); });
   1173 #else
   1174  AVPacket packet_mem;
   1175  packet = &packet_mem;
   1176  mLib->av_init_packet(packet);
   1177 #endif
   1178 
   1179 #if LIBAVCODEC_VERSION_MAJOR >= 58
   1180  mDecodeStats.DecodeStart();
   1181 #endif
   1182 
   1183  packet->data = aData;
   1184  packet->size = aSize;
   1185  packet->dts = aSample->mTimecode.ToMicroseconds();
   1186  packet->pts = aSample->mTime.ToMicroseconds();
   1187  packet->flags = aSample->mKeyframe ? AV_PKT_FLAG_KEY : 0;
   1188  packet->pos = aSample->mOffset;
   1189 
   1190  mTrackingId.apply([&](const auto& aId) {
   1191    MediaInfoFlag flag = MediaInfoFlag::None;
   1192    flag |= (aSample->mKeyframe ? MediaInfoFlag::KeyFrame
   1193                                : MediaInfoFlag::NonKeyFrame);
   1194    flag |= (IsHardwareAccelerated() ? MediaInfoFlag::HardwareDecoding
   1195                                     : MediaInfoFlag::SoftwareDecoding);
   1196    switch (mCodecID) {
   1197      case AV_CODEC_ID_H264:
   1198        flag |= MediaInfoFlag::VIDEO_H264;
   1199        break;
   1200 #if LIBAVCODEC_VERSION_MAJOR >= 54
   1201      case AV_CODEC_ID_VP8:
   1202        flag |= MediaInfoFlag::VIDEO_VP8;
   1203        break;
   1204 #endif
   1205 #if LIBAVCODEC_VERSION_MAJOR >= 55
   1206      case AV_CODEC_ID_VP9:
   1207        flag |= MediaInfoFlag::VIDEO_VP9;
   1208        break;
   1209      case AV_CODEC_ID_HEVC:
   1210        flag |= MediaInfoFlag::VIDEO_HEVC;
   1211        break;
   1212 #endif
   1213 #ifdef FFMPEG_AV1_DECODE
   1214      case AV_CODEC_ID_AV1:
   1215        flag |= MediaInfoFlag::VIDEO_AV1;
   1216        break;
   1217 #endif
   1218      default:
   1219        break;
   1220    }
   1221    mPerformanceRecorder.Start(
   1222        packet->dts,
   1223        nsPrintfCString("FFmpegVideoDecoder(%d)", LIBAVCODEC_VERSION_MAJOR),
   1224        aId, flag);
   1225  });
   1226 
   1227 #if defined(MOZ_WIDGET_ANDROID) && defined(USING_MOZFFVPX)
   1228  MediaResult ret = MaybeAttachCryptoInfo(aSample, packet);
   1229  if (NS_FAILED(ret)) {
   1230    return ret;
   1231  }
   1232 #endif
   1233 
   1234 #ifdef MOZ_FFMPEG_USE_INPUT_INFO_MAP
   1235 #  ifdef MOZ_WIDGET_ANDROID
   1236  if (IsHardwareAccelerated())
   1237 #  endif
   1238  {
   1239    InsertInputInfo(aSample);
   1240  }
   1241 #endif
   1242 
   1243 #if LIBAVCODEC_VERSION_MAJOR >= 58
   1244 #  ifdef MOZ_WIDGET_ANDROID
   1245  if (!aData) {
   1246    mShouldResumeDrain = true;
   1247  }
   1248 #  endif
   1249  if (aData || !mHasSentDrainPacket) {
   1250    packet->duration = aSample->mDuration.ToMicroseconds();
   1251    int res = mLib->avcodec_send_packet(mCodecContext, packet);
   1252    if (res < 0) {
   1253      // In theory, avcodec_send_packet could sent -EAGAIN should its internal
   1254      // buffers be full. In practice this can't happen as we only feed one
   1255      // frame at a time, and we immediately call avcodec_receive_frame right
   1256      // after.
   1257      char errStr[AV_ERROR_MAX_STRING_SIZE];
   1258      mLib->av_strerror(res, errStr, AV_ERROR_MAX_STRING_SIZE);
   1259      FFMPEG_LOG("avcodec_send_packet error: %s", errStr);
   1260      nsresult rv;
   1261      if (res == int(AVERROR_EOF)) {
   1262        rv = MaybeQueueDrain(aResults) ? NS_ERROR_DOM_MEDIA_END_OF_STREAM
   1263                                       : NS_ERROR_NOT_AVAILABLE;
   1264      } else {
   1265        rv = NS_ERROR_DOM_MEDIA_DECODE_ERR;
   1266      }
   1267      return MediaResult(
   1268          rv, RESULT_DETAIL("avcodec_send_packet error: %s", errStr));
   1269    }
   1270  }
   1271  if (!aData) {
   1272    // On some platforms (e.g. Android), there are a limited number of output
   1273    // buffers available. When draining, we may reach this limit, so we must
   1274    // return what we have, and allow the caller to try again. We don't need to
   1275    // resend the null packet in that case since the codec is still in the
   1276    // draining state.
   1277    mHasSentDrainPacket = true;
   1278  }
   1279  if (aGotFrame) {
   1280    *aGotFrame = false;
   1281  }
   1282  do {
   1283    if (!PrepareFrame()) {
   1284      NS_WARNING("FFmpeg decoder failed to allocate frame.");
   1285      return MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__);
   1286    }
   1287 
   1288 #  if defined(MOZ_USE_HWDECODE) && defined(MOZ_WIDGET_GTK)
   1289    // Release unused VA-API surfaces before avcodec_receive_frame() as
   1290    // ffmpeg recycles VASurface for HW decoding.
   1291    if (mVideoFramePool) {
   1292      mVideoFramePool->ReleaseUnusedVAAPIFrames();
   1293    }
   1294 #  endif
   1295 
   1296    int res = mLib->avcodec_receive_frame(mCodecContext, mFrame);
   1297    int64_t fpos =
   1298 #  if LIBAVCODEC_VERSION_MAJOR > 61
   1299        packet->pos;
   1300 #  else
   1301        mFrame->pkt_pos;
   1302 #  endif
   1303    if (res == int(AVERROR_EOF)) {
   1304      if (MaybeQueueDrain(aResults)) {
   1305        FFMPEG_LOG("  Output buffer shortage.");
   1306        return NS_ERROR_NOT_AVAILABLE;
   1307      }
   1308      FFMPEG_LOG("  End of stream.");
   1309      return NS_ERROR_DOM_MEDIA_END_OF_STREAM;
   1310    }
   1311    if (res == AVERROR(EAGAIN)) {
   1312      return NS_OK;
   1313    }
   1314    if (res < 0) {
   1315      char errStr[AV_ERROR_MAX_STRING_SIZE];
   1316      mLib->av_strerror(res, errStr, AV_ERROR_MAX_STRING_SIZE);
   1317      FFMPEG_LOG("  avcodec_receive_frame error: %s", errStr);
   1318      return MediaResult(
   1319          NS_ERROR_DOM_MEDIA_DECODE_ERR,
   1320          RESULT_DETAIL("avcodec_receive_frame error: %s", errStr));
   1321    }
   1322 
   1323    MediaResult rv;
   1324 #  ifdef MOZ_USE_HWDECODE
   1325    if (IsHardwareAccelerated()) {
   1326 #    ifdef MOZ_WIDGET_GTK
   1327      mDecodeStats.UpdateDecodeTimes(Duration(mFrame));
   1328      if (mDecodeStats.IsDecodingSlow() &&
   1329          !StaticPrefs::media_ffmpeg_disable_software_fallback()) {
   1330        PROFILER_MARKER_TEXT("FFmpegVideoDecoder::DoDecode", MEDIA_PLAYBACK, {},
   1331                             "Fallback to SW decode");
   1332        FFMPEG_LOG("  HW decoding is slow, switching back to SW decode");
   1333        return MediaResult(
   1334            NS_ERROR_DOM_MEDIA_DECODE_ERR,
   1335            RESULT_DETAIL("HW decoding is slow, switching back to SW decode"));
   1336      }
   1337      if (mUsingV4L2) {
   1338        rv = CreateImageV4L2(fpos, GetFramePts(mFrame), Duration(mFrame),
   1339                             aResults);
   1340      } else {
   1341        rv = CreateImageVAAPI(fpos, GetFramePts(mFrame), Duration(mFrame),
   1342                              aResults);
   1343      }
   1344 
   1345      // If VA-API/V4L2 playback failed, just quit. Decoder is going to be
   1346      // restarted without hardware acceleration
   1347      if (NS_FAILED(rv)) {
   1348        // Explicitly remove dmabuf surface pool as it's configured
   1349        // for VA-API/V4L2 support.
   1350        mVideoFramePool = nullptr;
   1351        return rv;
   1352      }
   1353 #    elif defined(MOZ_ENABLE_D3D11VA)
   1354      mDecodeStats.UpdateDecodeTimes(Duration(mFrame));
   1355      rv = CreateImageD3D11(fpos, GetFramePts(mFrame), Duration(mFrame),
   1356                            aResults);
   1357 #    elif defined(MOZ_WIDGET_ANDROID)
   1358      InputInfo info(aSample);
   1359      info.mTimecode = -1;
   1360      TakeInputInfo(mFrame, info);
   1361      mDecodeStats.UpdateDecodeTimes(info.mDuration);
   1362      rv = CreateImageMediaCodec(fpos, GetFramePts(mFrame), info.mTimecode,
   1363                                 info.mDuration, aResults);
   1364 #    else
   1365      mDecodeStats.UpdateDecodeTimes(Duration(mFrame));
   1366      return MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR,
   1367                         RESULT_DETAIL("No HW decoding implementation!"));
   1368 #    endif
   1369    } else
   1370 #  endif
   1371    {
   1372      mDecodeStats.UpdateDecodeTimes(Duration(mFrame));
   1373      rv = CreateImage(fpos, GetFramePts(mFrame), Duration(mFrame), aResults);
   1374    }
   1375    if (NS_FAILED(rv)) {
   1376      return rv;
   1377    }
   1378 
   1379    RecordFrame(aSample, aResults.LastElement());
   1380    if (aGotFrame) {
   1381      *aGotFrame = true;
   1382    }
   1383  } while (true);
   1384 #else
   1385  if (!PrepareFrame()) {
   1386    NS_WARNING("FFmpeg decoder failed to allocate frame.");
   1387    return MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__);
   1388  }
   1389 
   1390  // Required with old version of FFmpeg/LibAV
   1391  mFrame->reordered_opaque = AV_NOPTS_VALUE;
   1392 
   1393  int decoded;
   1394  int bytesConsumed =
   1395      mLib->avcodec_decode_video2(mCodecContext, mFrame, &decoded, packet);
   1396 
   1397  FFMPEG_LOG(
   1398      "DoDecodeFrame:decode_video: rv=%d decoded=%d "
   1399      "(Input: pts(%" PRId64 ") dts(%" PRId64 ") Output: pts(%" PRId64
   1400      ") "
   1401      "opaque(%" PRId64 ") pts(%" PRId64 ") pkt_dts(%" PRId64 "))",
   1402      bytesConsumed, decoded, packet->pts, packet->dts, mFrame->pts,
   1403      mFrame->reordered_opaque, mFrame->pts, mFrame->pkt_dts);
   1404 
   1405  if (bytesConsumed < 0) {
   1406    return MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR,
   1407                       RESULT_DETAIL("FFmpeg video error: %d", bytesConsumed));
   1408  }
   1409 
   1410  if (!decoded) {
   1411    if (aGotFrame) {
   1412      *aGotFrame = false;
   1413    }
   1414    return NS_OK;
   1415  }
   1416 
   1417  // If we've decoded a frame then we need to output it
   1418  int64_t pts =
   1419      mPtsContext.GuessCorrectPts(GetFramePts(mFrame), mFrame->pkt_dts);
   1420 
   1421  InputInfo info(aSample);
   1422  TakeInputInfo(mFrame, info);
   1423 
   1424  MediaResult rv = CreateImage(aSample->mOffset, pts, info.mDuration, aResults);
   1425  if (NS_FAILED(rv)) {
   1426    return rv;
   1427  }
   1428 
   1429  mTrackingId.apply(
   1430      [&](const auto&) { RecordFrame(aSample, aResults.LastElement()); });
   1431 
   1432  if (aGotFrame) {
   1433    *aGotFrame = true;
   1434  }
   1435  return rv;
   1436 #endif
   1437 }
   1438 
   1439 void FFmpegVideoDecoder<LIBAV_VER>::RecordFrame(const MediaRawData* aSample,
   1440                                                const MediaData* aData) {
   1441  mPerformanceRecorder.Record(
   1442      aData->mTimecode.ToMicroseconds(), [&](auto& aStage) {
   1443        aStage.SetResolution(mFrame->width, mFrame->height);
   1444        auto format = [&]() -> Maybe<DecodeStage::ImageFormat> {
   1445          switch (mCodecContext->pix_fmt) {
   1446            case AV_PIX_FMT_YUV420P:
   1447            case AV_PIX_FMT_YUVJ420P:
   1448            case AV_PIX_FMT_YUV420P10LE:
   1449 #if LIBAVCODEC_VERSION_MAJOR >= 57
   1450            case AV_PIX_FMT_YUV420P12LE:
   1451 #endif
   1452              return Some(DecodeStage::YUV420P);
   1453            case AV_PIX_FMT_YUV422P:
   1454            case AV_PIX_FMT_YUV422P10LE:
   1455 #if LIBAVCODEC_VERSION_MAJOR >= 57
   1456            case AV_PIX_FMT_YUV422P12LE:
   1457 #endif
   1458              return Some(DecodeStage::YUV422P);
   1459            case AV_PIX_FMT_YUV444P:
   1460            case AV_PIX_FMT_YUVJ444P:
   1461            case AV_PIX_FMT_YUV444P10LE:
   1462 #if LIBAVCODEC_VERSION_MAJOR >= 57
   1463            case AV_PIX_FMT_YUV444P12LE:
   1464 #endif
   1465              return Some(DecodeStage::YUV444P);
   1466            case AV_PIX_FMT_GBRP:
   1467            case AV_PIX_FMT_GBRP10LE:
   1468              return Some(DecodeStage::GBRP);
   1469            case AV_PIX_FMT_VAAPI_VLD:
   1470              return Some(DecodeStage::VAAPI_SURFACE);
   1471 #ifdef MOZ_ENABLE_D3D11VA
   1472            case AV_PIX_FMT_D3D11:
   1473              return Some(DecodeStage::D3D11_SURFACE);
   1474 #endif
   1475            default:
   1476              return Nothing();
   1477          }
   1478        }();
   1479        format.apply([&](auto& aFmt) { aStage.SetImageFormat(aFmt); });
   1480        aStage.SetColorDepth(GetColorDepth(mCodecContext->pix_fmt));
   1481        aStage.SetYUVColorSpace(GetFrameColorSpace());
   1482        aStage.SetColorRange(GetFrameColorRange());
   1483        aStage.SetStartTimeAndEndTime(aSample->mTime.ToMicroseconds(),
   1484                                      aSample->GetEndTime().ToMicroseconds());
   1485      });
   1486 }
   1487 
   1488 #ifdef MOZ_WIDGET_ANDROID
   1489 void FFmpegVideoDecoder<LIBAV_VER>::ResumeDrain() {
   1490  MOZ_ASSERT(mTaskQueue->IsOnCurrentThread());
   1491 
   1492  if (mDrainPromise.IsEmpty()) {
   1493    FFMPEG_LOGV("Resume drain but promise already fulfilled");
   1494    return;
   1495  }
   1496 
   1497  FFMPEG_LOGV("Resume drain");
   1498  mShouldResumeDrain = true;
   1499  ProcessDrain();
   1500 }
   1501 
   1502 void FFmpegVideoDecoder<LIBAV_VER>::QueueResumeDrain() {
   1503  if (!mShouldResumeDrain.exchange(false)) {
   1504    return;
   1505  }
   1506 
   1507  MOZ_ALWAYS_SUCCEEDS(mTaskQueue->Dispatch(NS_NewRunnableFunction(
   1508      __func__, [self = RefPtr{this}] { self->ResumeDrain(); })));
   1509 }
   1510 #endif
   1511 
   1512 bool FFmpegVideoDecoder<LIBAV_VER>::MaybeQueueDrain(
   1513    const MediaDataDecoder::DecodedData& aData) {
   1514 #if defined(MOZ_WIDGET_ANDROID) && defined(USING_MOZFFVPX)
   1515  if (aData.IsEmpty() && mMediaCodecDeviceContext &&
   1516      !mLib->moz_avcodec_mediacodec_is_eos(mCodecContext)) {
   1517    FFMPEG_LOGV("Schedule drain");
   1518    return true;
   1519  }
   1520  mShouldResumeDrain = false;
   1521 #endif
   1522  return false;
   1523 }
   1524 
   1525 gfx::ColorDepth FFmpegVideoDecoder<LIBAV_VER>::GetColorDepth(
   1526    const AVPixelFormat& aFormat) const {
   1527  switch (aFormat) {
   1528    case AV_PIX_FMT_YUV420P:
   1529    case AV_PIX_FMT_YUVJ420P:
   1530    case AV_PIX_FMT_YUV422P:
   1531    case AV_PIX_FMT_YUV444P:
   1532    case AV_PIX_FMT_YUVJ444P:
   1533      return gfx::ColorDepth::COLOR_8;
   1534    case AV_PIX_FMT_YUV420P10LE:
   1535    case AV_PIX_FMT_YUV422P10LE:
   1536    case AV_PIX_FMT_YUV444P10LE:
   1537    case AV_PIX_FMT_GBRP10LE:
   1538      return gfx::ColorDepth::COLOR_10;
   1539 #if LIBAVCODEC_VERSION_MAJOR >= 57
   1540    case AV_PIX_FMT_YUV420P12LE:
   1541    case AV_PIX_FMT_YUV422P12LE:
   1542    case AV_PIX_FMT_YUV444P12LE:
   1543      return gfx::ColorDepth::COLOR_12;
   1544 #endif
   1545 #ifdef MOZ_ENABLE_D3D11VA
   1546    case AV_PIX_FMT_D3D11:
   1547 #endif
   1548    case AV_PIX_FMT_VAAPI_VLD:
   1549      return mInfo.mColorDepth;
   1550    default:
   1551      MOZ_ASSERT_UNREACHABLE("Not supported format?");
   1552      return gfx::ColorDepth::COLOR_8;
   1553  }
   1554 }
   1555 
   1556 gfx::YUVColorSpace FFmpegVideoDecoder<LIBAV_VER>::GetFrameColorSpace() const {
   1557  AVColorSpace colorSpace = AVCOL_SPC_UNSPECIFIED;
   1558 #if LIBAVCODEC_VERSION_MAJOR > 58
   1559  colorSpace = mFrame->colorspace;
   1560 #else
   1561  if (mLib->av_frame_get_colorspace) {
   1562    colorSpace = (AVColorSpace)mLib->av_frame_get_colorspace(mFrame);
   1563  }
   1564 #endif
   1565  return TransferAVColorSpaceToColorSpace(
   1566      colorSpace, (AVPixelFormat)mFrame->format,
   1567      gfx::IntSize{mFrame->width, mFrame->height});
   1568 }
   1569 
   1570 gfx::ColorSpace2 FFmpegVideoDecoder<LIBAV_VER>::GetFrameColorPrimaries() const {
   1571  AVColorPrimaries colorPrimaries = AVCOL_PRI_UNSPECIFIED;
   1572 #if LIBAVCODEC_VERSION_MAJOR > 57
   1573  colorPrimaries = mFrame->color_primaries;
   1574 #endif
   1575  switch (colorPrimaries) {
   1576 #if LIBAVCODEC_VERSION_MAJOR >= 55
   1577    case AVCOL_PRI_BT2020:
   1578      return gfx::ColorSpace2::BT2020;
   1579 #endif
   1580    case AVCOL_PRI_BT709:
   1581      return gfx::ColorSpace2::BT709;
   1582    default:
   1583      return gfx::ColorSpace2::BT709;
   1584  }
   1585 }
   1586 
   1587 gfx::ColorRange FFmpegVideoDecoder<LIBAV_VER>::GetFrameColorRange() const {
   1588  AVColorRange range = AVCOL_RANGE_UNSPECIFIED;
   1589 #if LIBAVCODEC_VERSION_MAJOR > 58
   1590  range = mFrame->color_range;
   1591 #else
   1592  if (mLib->av_frame_get_color_range) {
   1593    range = (AVColorRange)mLib->av_frame_get_color_range(mFrame);
   1594  }
   1595 #endif
   1596  return GetColorRange(range);
   1597 }
   1598 
   1599 gfx::SurfaceFormat FFmpegVideoDecoder<LIBAV_VER>::GetSurfaceFormat() const {
   1600  switch (mInfo.mColorDepth) {
   1601    case gfx::ColorDepth::COLOR_8:
   1602      return gfx::SurfaceFormat::NV12;
   1603    case gfx::ColorDepth::COLOR_10:
   1604      return gfx::SurfaceFormat::P010;
   1605    default:
   1606      MOZ_ASSERT_UNREACHABLE("Unexpected surface type");
   1607      return gfx::SurfaceFormat::NV12;
   1608  }
   1609 }
   1610 
   1611 #if defined(MOZ_WIDGET_GTK) && defined(MOZ_USE_HWDECODE)
   1612 // Convert AVChromaLocation to
   1613 // wp_color_representation_surface_v1_chroma_location
   1614 static uint32_t AVChromaLocationToWPChromaLocation(uint32_t aAVChromaLocation) {
   1615  switch (aAVChromaLocation) {
   1616    case AVCHROMA_LOC_UNSPECIFIED:
   1617    default:
   1618      return 0;  // No chroma location specified
   1619    case AVCHROMA_LOC_LEFT:
   1620      return 1;
   1621    case AVCHROMA_LOC_CENTER:
   1622      return 2;
   1623    case AVCHROMA_LOC_TOPLEFT:
   1624      return 3;
   1625    case AVCHROMA_LOC_TOP:
   1626      return 4;
   1627    case AVCHROMA_LOC_BOTTOMLEFT:
   1628      return 5;
   1629    case AVCHROMA_LOC_BOTTOM:
   1630      return 6;
   1631  }
   1632 }
   1633 #endif
   1634 
   1635 MediaResult FFmpegVideoDecoder<LIBAV_VER>::CreateImage(
   1636    int64_t aOffset, int64_t aPts, int64_t aDuration,
   1637    MediaDataDecoder::DecodedData& aResults) {
   1638  FFMPEG_LOG("Got one frame output with pts=%" PRId64 " dts=%" PRId64
   1639             " duration=%" PRId64,
   1640             aPts, mFrame->pkt_dts, aDuration);
   1641 
   1642  VideoData::QuantizableBuffer b;
   1643  b.mPlanes[0].mData = mFrame->data[0];
   1644  b.mPlanes[1].mData = mFrame->data[1];
   1645  b.mPlanes[2].mData = mFrame->data[2];
   1646 
   1647  b.mPlanes[0].mStride = mFrame->linesize[0];
   1648  b.mPlanes[1].mStride = mFrame->linesize[1];
   1649  b.mPlanes[2].mStride = mFrame->linesize[2];
   1650 
   1651  b.mPlanes[0].mSkip = 0;
   1652  b.mPlanes[1].mSkip = 0;
   1653  b.mPlanes[2].mSkip = 0;
   1654 
   1655  b.mPlanes[0].mWidth = mFrame->width;
   1656  b.mPlanes[0].mHeight = mFrame->height;
   1657  if (mCodecContext->pix_fmt == AV_PIX_FMT_YUV444P ||
   1658      mCodecContext->pix_fmt == AV_PIX_FMT_YUV444P10LE ||
   1659      mCodecContext->pix_fmt == AV_PIX_FMT_GBRP ||
   1660      mCodecContext->pix_fmt == AV_PIX_FMT_GBRP10LE
   1661 #if LIBAVCODEC_VERSION_MAJOR >= 57
   1662      || mCodecContext->pix_fmt == AV_PIX_FMT_YUV444P12LE
   1663 #endif
   1664  ) {
   1665    b.mPlanes[1].mWidth = b.mPlanes[2].mWidth = mFrame->width;
   1666    b.mPlanes[1].mHeight = b.mPlanes[2].mHeight = mFrame->height;
   1667    if (mCodecContext->pix_fmt == AV_PIX_FMT_YUV444P10LE ||
   1668        mCodecContext->pix_fmt == AV_PIX_FMT_GBRP10LE) {
   1669      b.mColorDepth = gfx::ColorDepth::COLOR_10;
   1670    }
   1671 #if LIBAVCODEC_VERSION_MAJOR >= 57
   1672    else if (mCodecContext->pix_fmt == AV_PIX_FMT_YUV444P12LE) {
   1673      b.mColorDepth = gfx::ColorDepth::COLOR_12;
   1674    }
   1675 #endif
   1676  } else if (mCodecContext->pix_fmt == AV_PIX_FMT_YUV422P ||
   1677             mCodecContext->pix_fmt == AV_PIX_FMT_YUV422P10LE
   1678 #if LIBAVCODEC_VERSION_MAJOR >= 57
   1679             || mCodecContext->pix_fmt == AV_PIX_FMT_YUV422P12LE
   1680 #endif
   1681  ) {
   1682    b.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH;
   1683    b.mPlanes[1].mWidth = b.mPlanes[2].mWidth = (mFrame->width + 1) >> 1;
   1684    b.mPlanes[1].mHeight = b.mPlanes[2].mHeight = mFrame->height;
   1685    if (mCodecContext->pix_fmt == AV_PIX_FMT_YUV422P10LE) {
   1686      b.mColorDepth = gfx::ColorDepth::COLOR_10;
   1687    }
   1688 #if LIBAVCODEC_VERSION_MAJOR >= 57
   1689    else if (mCodecContext->pix_fmt == AV_PIX_FMT_YUV422P12LE) {
   1690      b.mColorDepth = gfx::ColorDepth::COLOR_12;
   1691    }
   1692 #endif
   1693  } else {
   1694    b.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
   1695    b.mPlanes[1].mWidth = b.mPlanes[2].mWidth = (mFrame->width + 1) >> 1;
   1696    b.mPlanes[1].mHeight = b.mPlanes[2].mHeight = (mFrame->height + 1) >> 1;
   1697    if (mCodecContext->pix_fmt == AV_PIX_FMT_YUV420P10LE) {
   1698      b.mColorDepth = gfx::ColorDepth::COLOR_10;
   1699    }
   1700 #if LIBAVCODEC_VERSION_MAJOR >= 57
   1701    else if (mCodecContext->pix_fmt == AV_PIX_FMT_YUV420P12LE) {
   1702      b.mColorDepth = gfx::ColorDepth::COLOR_12;
   1703    }
   1704 #endif
   1705  }
   1706  b.mYUVColorSpace = GetFrameColorSpace();
   1707  b.mColorRange = GetFrameColorRange();
   1708 
   1709  RefPtr<VideoData> v;
   1710 #ifdef CUSTOMIZED_BUFFER_ALLOCATION
   1711  bool requiresCopy = false;
   1712 #  ifdef XP_MACOSX
   1713  // Bug 1765388: macOS needs to generate a MacIOSurfaceImage in order to
   1714  // properly display HDR video. The later call to ::CreateAndCopyData does
   1715  // that. If this shared memory buffer path also generated a
   1716  // MacIOSurfaceImage, then we could use it for HDR.
   1717  requiresCopy = (b.mColorDepth != gfx::ColorDepth::COLOR_8);
   1718 #  endif
   1719 #  ifdef MOZ_WIDGET_ANDROID
   1720  // Some Android devices can only render 8-bit images and cannot use high
   1721  // bit-depth decoded data directly.
   1722  requiresCopy = m8BitOutput && b.mColorDepth != gfx::ColorDepth::COLOR_8;
   1723 #  endif
   1724  if (mIsUsingShmemBufferForDecode && *mIsUsingShmemBufferForDecode &&
   1725      !requiresCopy) {
   1726    RefPtr<ImageBufferWrapper> wrapper = static_cast<ImageBufferWrapper*>(
   1727        mLib->av_buffer_get_opaque(mFrame->buf[0]));
   1728    MOZ_ASSERT(wrapper);
   1729    FFMPEG_LOGV("Create a video data from a shmem image=%p", wrapper.get());
   1730    v = VideoData::CreateFromImage(
   1731        mInfo.mDisplay, aOffset, TimeUnit::FromMicroseconds(aPts),
   1732        TimeUnit::FromMicroseconds(aDuration), wrapper->AsImage(),
   1733        IsKeyFrame(mFrame), TimeUnit::FromMicroseconds(-1));
   1734  }
   1735 #endif
   1736 #if defined(MOZ_WIDGET_GTK) && defined(MOZ_USE_HWDECODE)
   1737  if (mUploadSWDecodeToDMABuf) {
   1738    MOZ_DIAGNOSTIC_ASSERT(!v);
   1739    if (!mVideoFramePool) {
   1740      mVideoFramePool = MakeUnique<VideoFramePool<LIBAV_VER>>(10);
   1741    }
   1742    const auto yuvData = layers::PlanarYCbCrData::From(b);
   1743    if (yuvData) {
   1744      auto surface =
   1745          mVideoFramePool->GetVideoFrameSurface(*yuvData, mCodecContext);
   1746      if (surface) {
   1747        surface->SetYUVColorSpace(GetFrameColorSpace());
   1748        surface->SetColorRange(GetFrameColorRange());
   1749        if (mInfo.mColorPrimaries) {
   1750          surface->SetColorPrimaries(mInfo.mColorPrimaries.value());
   1751        }
   1752        if (mInfo.mTransferFunction) {
   1753          surface->SetTransferFunction(mInfo.mTransferFunction.value());
   1754        }
   1755        surface->SetWPChromaLocation(
   1756            AVChromaLocationToWPChromaLocation(mFrame->chroma_location));
   1757        FFMPEG_LOGV(
   1758            "Uploaded frame DMABuf surface UID %d HDR %d color space %s/%s "
   1759            "transfer %s",
   1760            surface->GetDMABufSurface()->GetUID(), IsLinuxHDR(),
   1761            YUVColorSpaceToString(GetFrameColorSpace()),
   1762            mInfo.mColorPrimaries
   1763                ? ColorSpace2ToString(mInfo.mColorPrimaries.value())
   1764                : "unknown",
   1765            mInfo.mTransferFunction
   1766                ? TransferFunctionToString(mInfo.mTransferFunction.value())
   1767                : "unknown");
   1768        v = VideoData::CreateFromImage(
   1769            mInfo.mDisplay, aOffset, TimeUnit::FromMicroseconds(aPts),
   1770            TimeUnit::FromMicroseconds(aDuration), surface->GetAsImage(),
   1771            IsKeyFrame(mFrame), TimeUnit::FromMicroseconds(-1));
   1772      } else {
   1773        FFMPEG_LOG("Failed to uploaded video data to DMABuf");
   1774      }
   1775    } else {
   1776      FFMPEG_LOG("Failed to convert PlanarYCbCrData");
   1777    }
   1778  }
   1779 #endif
   1780  if (!v) {
   1781    if (m8BitOutput && b.mColorDepth != gfx::ColorDepth::COLOR_8) {
   1782      MediaResult ret = b.To8BitPerChannel(m8BitRecycleBin);
   1783      if (NS_FAILED(ret.Code())) {
   1784        FFMPEG_LOG("%s: %s", __func__, ret.Message().get());
   1785        return ret;
   1786      }
   1787    }
   1788    Result<already_AddRefed<VideoData>, MediaResult> r =
   1789        VideoData::CreateAndCopyData(
   1790            mInfo, mImageContainer, aOffset, TimeUnit::FromMicroseconds(aPts),
   1791            TimeUnit::FromMicroseconds(aDuration), b, IsKeyFrame(mFrame),
   1792            TimeUnit::FromMicroseconds(mFrame->pkt_dts),
   1793            mInfo.ScaledImageRect(mFrame->width, mFrame->height),
   1794            mImageAllocator);
   1795    if (r.isErr()) {
   1796      return r.unwrapErr();
   1797    }
   1798    v = r.unwrap();
   1799  }
   1800  MOZ_ASSERT(v);
   1801  aResults.AppendElement(std::move(v));
   1802  return NS_OK;
   1803 }
   1804 
   1805 #if defined(MOZ_USE_HWDECODE) && defined(MOZ_WIDGET_GTK)
   1806 bool FFmpegVideoDecoder<LIBAV_VER>::GetVAAPISurfaceDescriptor(
   1807    VADRMPRIMESurfaceDescriptor* aVaDesc) {
   1808  VASurfaceID surface_id = (VASurfaceID)(uintptr_t)mFrame->data[3];
   1809  VAStatus vas = VALibWrapper::sFuncs.vaExportSurfaceHandle(
   1810      mDisplay, surface_id, VA_SURFACE_ATTRIB_MEM_TYPE_DRM_PRIME_2,
   1811      VA_EXPORT_SURFACE_READ_ONLY | VA_EXPORT_SURFACE_SEPARATE_LAYERS, aVaDesc);
   1812  if (vas != VA_STATUS_SUCCESS) {
   1813    FFMPEG_LOG("GetVAAPISurfaceDescriptor(): vaExportSurfaceHandle failed");
   1814    return false;
   1815  }
   1816  vas = VALibWrapper::sFuncs.vaSyncSurface(mDisplay, surface_id);
   1817  if (vas != VA_STATUS_SUCCESS) {
   1818    FFMPEG_LOG("GetVAAPISurfaceDescriptor(): vaSyncSurface failed");
   1819  }
   1820  return true;
   1821 }
   1822 
   1823 MediaResult FFmpegVideoDecoder<LIBAV_VER>::CreateImageVAAPI(
   1824    int64_t aOffset, int64_t aPts, int64_t aDuration,
   1825    MediaDataDecoder::DecodedData& aResults) {
   1826  VADRMPRIMESurfaceDescriptor vaDesc;
   1827  if (!GetVAAPISurfaceDescriptor(&vaDesc)) {
   1828    return MediaResult(
   1829        NS_ERROR_DOM_MEDIA_DECODE_ERR,
   1830        RESULT_DETAIL("Unable to get frame by vaExportSurfaceHandle()"));
   1831  }
   1832  auto releaseSurfaceDescriptor = MakeScopeExit(
   1833      [&] { DMABufSurfaceYUV::ReleaseVADRMPRIMESurfaceDescriptor(vaDesc); });
   1834 
   1835  MOZ_ASSERT(mTaskQueue->IsOnCurrentThread());
   1836  if (!mVideoFramePool) {
   1837    AVHWFramesContext* context =
   1838        (AVHWFramesContext*)mCodecContext->hw_frames_ctx->data;
   1839    mVideoFramePool =
   1840        MakeUnique<VideoFramePool<LIBAV_VER>>(context->initial_pool_size);
   1841  }
   1842  auto surface = mVideoFramePool->GetVideoFrameSurface(
   1843      vaDesc, mFrame->width, mFrame->height, mCodecContext, mFrame, mLib);
   1844  if (!surface) {
   1845    FFMPEG_LOG("CreateImageVAAPI(): failed to get VideoFrameSurface");
   1846    return MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR,
   1847                       RESULT_DETAIL("VAAPI dmabuf allocation error"));
   1848  }
   1849 
   1850  surface->SetYUVColorSpace(GetFrameColorSpace());
   1851  surface->SetColorRange(GetFrameColorRange());
   1852  if (mInfo.mColorPrimaries) {
   1853    surface->SetColorPrimaries(mInfo.mColorPrimaries.value());
   1854  }
   1855  if (mInfo.mTransferFunction) {
   1856    surface->SetTransferFunction(mInfo.mTransferFunction.value());
   1857  }
   1858 
   1859  FFMPEG_LOG("VA-API frame pts=%" PRId64 " dts=%" PRId64 " duration=%" PRId64
   1860             " color space %s/%s transfer %s",
   1861             aPts, mFrame->pkt_dts, aDuration,
   1862             YUVColorSpaceToString(GetFrameColorSpace()),
   1863             mInfo.mColorPrimaries
   1864                 ? ColorSpace2ToString(mInfo.mColorPrimaries.value())
   1865                 : "unknown",
   1866             mInfo.mTransferFunction
   1867                 ? TransferFunctionToString(mInfo.mTransferFunction.value())
   1868                 : "unknown");
   1869  RefPtr<VideoData> vp = VideoData::CreateFromImage(
   1870      mInfo.mDisplay, aOffset, TimeUnit::FromMicroseconds(aPts),
   1871      TimeUnit::FromMicroseconds(aDuration), surface->GetAsImage(),
   1872      IsKeyFrame(mFrame), TimeUnit::FromMicroseconds(mFrame->pkt_dts));
   1873 
   1874  if (!vp) {
   1875    return MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR,
   1876                       RESULT_DETAIL("VAAPI image allocation error"));
   1877  }
   1878 
   1879  aResults.AppendElement(std::move(vp));
   1880  return NS_OK;
   1881 }
   1882 
   1883 MediaResult FFmpegVideoDecoder<LIBAV_VER>::CreateImageV4L2(
   1884    int64_t aOffset, int64_t aPts, int64_t aDuration,
   1885    MediaDataDecoder::DecodedData& aResults) {
   1886  FFMPEG_LOG("V4L2 Got one frame output with pts=%" PRId64 " dts=%" PRId64
   1887             " duration=%" PRId64,
   1888             aPts, mFrame->pkt_dts, aDuration);
   1889 
   1890  AVDRMFrameDescriptor* desc = (AVDRMFrameDescriptor*)mFrame->data[0];
   1891  if (!desc) {
   1892    return MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR,
   1893                       RESULT_DETAIL("Missing DRM PRIME descriptor in frame"));
   1894  }
   1895 
   1896  // Note that the FDs in desc are owned by FFmpeg and it will reuse them
   1897  // each time the same buffer is dequeued in future.  So we shouldn't close
   1898  // them and so don't setup a clean-up handler for desc.
   1899 
   1900  MOZ_ASSERT(mTaskQueue->IsOnCurrentThread());
   1901  if (!mVideoFramePool) {
   1902    // With the V4L2 wrapper codec we can't see the capture buffer pool size.
   1903    // But, this value is only used for deciding when we are running out of
   1904    // free buffers and so should start copying them.  So a rough estimate
   1905    // is sufficient, and the codec defaults to 20 capture buffers.
   1906    mVideoFramePool = MakeUnique<VideoFramePool<LIBAV_VER>>(20);
   1907  }
   1908 
   1909  auto surface = mVideoFramePool->GetVideoFrameSurface(
   1910      *desc, mFrame->width, mFrame->height, mCodecContext, mFrame, mLib);
   1911  if (!surface) {
   1912    return MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR,
   1913                       RESULT_DETAIL("V4L2 dmabuf allocation error"));
   1914  }
   1915  surface->SetYUVColorSpace(GetFrameColorSpace());
   1916  surface->SetColorRange(GetFrameColorRange());
   1917 
   1918  RefPtr<VideoData> vp = VideoData::CreateFromImage(
   1919      mInfo.mDisplay, aOffset, TimeUnit::FromMicroseconds(aPts),
   1920      TimeUnit::FromMicroseconds(aDuration), surface->GetAsImage(),
   1921      IsKeyFrame(mFrame), TimeUnit::FromMicroseconds(mFrame->pkt_dts));
   1922 
   1923  if (!vp) {
   1924    return MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR,
   1925                       RESULT_DETAIL("V4L2 image creation error"));
   1926  }
   1927 
   1928  aResults.AppendElement(std::move(vp));
   1929  return NS_OK;
   1930 }
   1931 #endif
   1932 
   1933 RefPtr<MediaDataDecoder::FlushPromise>
   1934 FFmpegVideoDecoder<LIBAV_VER>::ProcessFlush() {
   1935  FFMPEG_LOG("ProcessFlush()");
   1936  MOZ_ASSERT(mTaskQueue->IsOnCurrentThread());
   1937 #if LIBAVCODEC_VERSION_MAJOR >= 58
   1938  mHasSentDrainPacket = false;
   1939 #endif
   1940 #if LIBAVCODEC_VERSION_MAJOR < 58
   1941  mPtsContext.Reset();
   1942 #endif
   1943 #ifdef MOZ_FFMPEG_USE_DURATION_MAP
   1944  mDurationMap.Clear();
   1945 #endif
   1946 #if defined(MOZ_USE_HWDECODE) && defined(MOZ_WIDGET_GTK)
   1947  if (mVideoFramePool) {
   1948    mVideoFramePool->FlushFFmpegFrames();
   1949  }
   1950 #endif
   1951  mPerformanceRecorder.Record(std::numeric_limits<int64_t>::max());
   1952  return FFmpegDataDecoder::ProcessFlush();
   1953 }
   1954 
   1955 #ifdef MOZ_WIDGET_ANDROID
   1956 Maybe<MediaDataDecoder::PropertyValue> FFmpegVideoDecoder<
   1957    LIBAV_VER>::GetDecodeProperty(MediaDataDecoder::PropertyName aName) const {
   1958  // If we are using a software decoder, then we aren't subject to platform
   1959  // limits. If we don't have mCodecContext, assume worst case.
   1960  if (mCodecContext) {
   1961    if (const auto* codec = mCodecContext->codec) {
   1962      if (!(codec->capabilities & AV_CODEC_CAP_HARDWARE)) {
   1963        return MediaDataDecoder::GetDecodeProperty(aName);
   1964      }
   1965    }
   1966  }
   1967 
   1968  // Android has limited amount of output buffers. See Bug 794747.
   1969  static constexpr uint32_t kNumOutputBuffers = 3;
   1970  // SurfaceTexture can have only one current/renderable image at a time.
   1971  // See Bug 1299068
   1972  static constexpr uint32_t kNumCurrentImages = 1;
   1973  switch (aName) {
   1974    case PropertyName::MaxNumVideoBuffers:
   1975      [[fallthrough]];
   1976    case PropertyName::MinNumVideoBuffers:
   1977      return Some(PropertyValue(kNumOutputBuffers));
   1978    case PropertyName::MaxNumCurrentImages:
   1979      return Some(PropertyValue(kNumCurrentImages));
   1980    default:
   1981      return MediaDataDecoder::GetDecodeProperty(aName);
   1982  }
   1983 }
   1984 #endif
   1985 
   1986 AVCodecID FFmpegVideoDecoder<LIBAV_VER>::GetCodecId(
   1987    const nsACString& aMimeType) {
   1988  if (MP4Decoder::IsH264(aMimeType)) {
   1989    return AV_CODEC_ID_H264;
   1990  }
   1991 
   1992 #if LIBAVCODEC_VERSION_MAJOR >= 55
   1993  if (MP4Decoder::IsHEVC(aMimeType)) {
   1994    return AV_CODEC_ID_HEVC;
   1995  }
   1996 #endif
   1997 
   1998  if (aMimeType.EqualsLiteral("video/x-vnd.on2.vp6")) {
   1999    return AV_CODEC_ID_VP6F;
   2000  }
   2001 
   2002 #if LIBAVCODEC_VERSION_MAJOR >= 54
   2003  if (VPXDecoder::IsVP8(aMimeType)) {
   2004    return AV_CODEC_ID_VP8;
   2005  }
   2006 #endif
   2007 
   2008 #if LIBAVCODEC_VERSION_MAJOR >= 55
   2009  if (VPXDecoder::IsVP9(aMimeType)) {
   2010    return AV_CODEC_ID_VP9;
   2011  }
   2012 #endif
   2013 
   2014 #if defined(FFMPEG_AV1_DECODE)
   2015  if (AOMDecoder::IsAV1(aMimeType)) {
   2016    return AV_CODEC_ID_AV1;
   2017  }
   2018 #endif
   2019 
   2020  return AV_CODEC_ID_NONE;
   2021 }
   2022 
   2023 void FFmpegVideoDecoder<LIBAV_VER>::ProcessShutdown() {
   2024  MOZ_ASSERT(mTaskQueue->IsOnCurrentThread());
   2025 #if defined(MOZ_USE_HWDECODE) && defined(MOZ_WIDGET_GTK)
   2026  mVideoFramePool = nullptr;
   2027  if (IsHardwareAccelerated()) {
   2028    mLib->av_buffer_unref(&mVAAPIDeviceContext);
   2029  }
   2030 #endif
   2031 #ifdef MOZ_ENABLE_D3D11VA
   2032  if (IsHardwareAccelerated()) {
   2033    AVHWDeviceContext* hwctx =
   2034        reinterpret_cast<AVHWDeviceContext*>(mD3D11VADeviceContext->data);
   2035    AVD3D11VADeviceContext* d3d11vactx =
   2036        reinterpret_cast<AVD3D11VADeviceContext*>(hwctx->hwctx);
   2037    d3d11vactx->device = nullptr;
   2038    mLib->av_buffer_unref(&mD3D11VADeviceContext);
   2039    mD3D11VADeviceContext = nullptr;
   2040  }
   2041 #endif
   2042  FFmpegDataDecoder<LIBAV_VER>::ProcessShutdown();
   2043 }
   2044 
   2045 bool FFmpegVideoDecoder<LIBAV_VER>::IsHardwareAccelerated(
   2046    nsACString& aFailureReason) const {
   2047 #if defined(MOZ_USE_HWDECODE) && defined(MOZ_WIDGET_GTK)
   2048  return mUsingV4L2 || !!mVAAPIDeviceContext;
   2049 #elif defined(MOZ_ENABLE_D3D11VA)
   2050  return !!mD3D11VADeviceContext;
   2051 #elif defined(MOZ_WIDGET_ANDROID)
   2052  return !!mMediaCodecDeviceContext;
   2053 #else
   2054  return false;
   2055 #endif
   2056 }
   2057 
   2058 #if defined(MOZ_USE_HWDECODE) && defined(MOZ_WIDGET_GTK)
   2059 bool FFmpegVideoDecoder<LIBAV_VER>::IsFormatAccelerated(
   2060    AVCodecID aCodecID) const {
   2061  for (const auto& format : mAcceleratedFormats) {
   2062    if (format == aCodecID) {
   2063      return true;
   2064    }
   2065  }
   2066  return false;
   2067 }
   2068 
   2069 // See ffmpeg / vaapi_decode.c how CodecID is mapped to VAProfile.
   2070 static const struct {
   2071  enum AVCodecID codec_id;
   2072  VAProfile va_profile;
   2073  char name[100];
   2074 } vaapi_profile_map[] = {
   2075 #  define MAP(c, v, n) {AV_CODEC_ID_##c, VAProfile##v, n}
   2076    MAP(H264, H264ConstrainedBaseline, "H264ConstrainedBaseline"),
   2077    MAP(H264, H264Main, "H264Main"),
   2078    MAP(H264, H264High, "H264High"),
   2079    MAP(VP8, VP8Version0_3, "VP8Version0_3"),
   2080    MAP(VP9, VP9Profile0, "VP9Profile0"),
   2081    MAP(VP9, VP9Profile2, "VP9Profile2"),
   2082    MAP(AV1, AV1Profile0, "AV1Profile0"),
   2083    MAP(AV1, AV1Profile1, "AV1Profile1"),
   2084    MAP(HEVC, HEVCMain, "HEVCMain"),
   2085    MAP(HEVC, HEVCMain10, "HEVCMain10"),
   2086    MAP(HEVC, HEVCMain10, "HEVCMain12"),
   2087 #  undef MAP
   2088 };
   2089 
   2090 static AVCodecID VAProfileToCodecID(VAProfile aVAProfile) {
   2091  for (const auto& profile : vaapi_profile_map) {
   2092    if (profile.va_profile == aVAProfile) {
   2093      return profile.codec_id;
   2094    }
   2095  }
   2096  return AV_CODEC_ID_NONE;
   2097 }
   2098 
   2099 static const char* VAProfileName(VAProfile aVAProfile) {
   2100  for (const auto& profile : vaapi_profile_map) {
   2101    if (profile.va_profile == aVAProfile) {
   2102      return profile.name;
   2103    }
   2104  }
   2105  return nullptr;
   2106 }
   2107 
   2108 // This code is adopted from mpv project va-api routine
   2109 // determine_working_formats()
   2110 void FFmpegVideoDecoder<LIBAV_VER>::AddAcceleratedFormats(
   2111    nsTArray<AVCodecID>& aCodecList, AVCodecID aCodecID,
   2112    AVVAAPIHWConfig* hwconfig) {
   2113  AVHWFramesConstraints* fc =
   2114      mLib->av_hwdevice_get_hwframe_constraints(mVAAPIDeviceContext, hwconfig);
   2115  if (!fc) {
   2116    FFMPEG_LOG("    failed to retrieve libavutil frame constraints");
   2117    return;
   2118  }
   2119  auto autoRelease =
   2120      MakeScopeExit([&] { mLib->av_hwframe_constraints_free(&fc); });
   2121 
   2122  bool foundSupportedFormat = false;
   2123  for (int n = 0;
   2124       fc->valid_sw_formats && fc->valid_sw_formats[n] != AV_PIX_FMT_NONE;
   2125       n++) {
   2126 #  ifdef MOZ_LOGGING
   2127    char formatDesc[1000];
   2128    FFMPEG_LOG("    codec %s format %s", mLib->avcodec_get_name(aCodecID),
   2129               mLib->av_get_pix_fmt_string(formatDesc, sizeof(formatDesc),
   2130                                           fc->valid_sw_formats[n]));
   2131 #  endif
   2132    if (fc->valid_sw_formats[n] == AV_PIX_FMT_NV12 ||
   2133        fc->valid_sw_formats[n] == AV_PIX_FMT_YUV420P) {
   2134      foundSupportedFormat = true;
   2135 #  ifndef MOZ_LOGGING
   2136      break;
   2137 #  endif
   2138    }
   2139  }
   2140 
   2141  if (!foundSupportedFormat) {
   2142    FFMPEG_LOG("    %s target pixel format is not supported!",
   2143               mLib->avcodec_get_name(aCodecID));
   2144    return;
   2145  }
   2146 
   2147  if (!aCodecList.Contains(aCodecID)) {
   2148    aCodecList.AppendElement(aCodecID);
   2149  }
   2150 }
   2151 
   2152 nsTArray<AVCodecID> FFmpegVideoDecoder<LIBAV_VER>::GetAcceleratedFormats() {
   2153  FFMPEG_LOG("FFmpegVideoDecoder::GetAcceleratedFormats()");
   2154 
   2155  VAProfile* profiles = nullptr;
   2156  VAEntrypoint* entryPoints = nullptr;
   2157 
   2158  nsTArray<AVCodecID> supportedHWCodecs(AV_CODEC_ID_NONE);
   2159 #  ifdef MOZ_LOGGING
   2160  auto printCodecs = MakeScopeExit([&] {
   2161    FFMPEG_LOG("  Supported accelerated formats:");
   2162    for (unsigned i = 0; i < supportedHWCodecs.Length(); i++) {
   2163      FFMPEG_LOG("      %s", mLib->avcodec_get_name(supportedHWCodecs[i]));
   2164    }
   2165  });
   2166 #  endif
   2167 
   2168  AVVAAPIHWConfig* hwconfig =
   2169      mLib->av_hwdevice_hwconfig_alloc(mVAAPIDeviceContext);
   2170  if (!hwconfig) {
   2171    FFMPEG_LOG("  failed to get AVVAAPIHWConfig");
   2172    return supportedHWCodecs;
   2173  }
   2174  auto autoRelease = MakeScopeExit([&] {
   2175    delete[] profiles;
   2176    delete[] entryPoints;
   2177    mLib->av_freep(&hwconfig);
   2178  });
   2179 
   2180  int maxProfiles = vaMaxNumProfiles(mDisplay);
   2181  int maxEntryPoints = vaMaxNumEntrypoints(mDisplay);
   2182  if (MOZ_UNLIKELY(maxProfiles <= 0 || maxEntryPoints <= 0)) {
   2183    return supportedHWCodecs;
   2184  }
   2185 
   2186  profiles = new VAProfile[maxProfiles];
   2187  int numProfiles = 0;
   2188  VAStatus status = vaQueryConfigProfiles(mDisplay, profiles, &numProfiles);
   2189  if (status != VA_STATUS_SUCCESS) {
   2190    FFMPEG_LOG("  vaQueryConfigProfiles() failed %s", vaErrorStr(status));
   2191    return supportedHWCodecs;
   2192  }
   2193  numProfiles = std::min(numProfiles, maxProfiles);
   2194 
   2195  entryPoints = new VAEntrypoint[maxEntryPoints];
   2196  for (int p = 0; p < numProfiles; p++) {
   2197    VAProfile profile = profiles[p];
   2198 
   2199    AVCodecID codecID = VAProfileToCodecID(profile);
   2200    if (codecID == AV_CODEC_ID_NONE) {
   2201      continue;
   2202    }
   2203 
   2204    int numEntryPoints = 0;
   2205    status = vaQueryConfigEntrypoints(mDisplay, profile, entryPoints,
   2206                                      &numEntryPoints);
   2207    if (status != VA_STATUS_SUCCESS) {
   2208      FFMPEG_LOG("  vaQueryConfigEntrypoints() failed: '%s' for profile %d",
   2209                 vaErrorStr(status), (int)profile);
   2210      continue;
   2211    }
   2212    numEntryPoints = std::min(numEntryPoints, maxEntryPoints);
   2213 
   2214    FFMPEG_LOG("  Profile %s:", VAProfileName(profile));
   2215    for (int e = 0; e < numEntryPoints; e++) {
   2216      VAConfigID config = VA_INVALID_ID;
   2217      status = vaCreateConfig(mDisplay, profile, entryPoints[e], nullptr, 0,
   2218                              &config);
   2219      if (status != VA_STATUS_SUCCESS) {
   2220        FFMPEG_LOG("  vaCreateConfig() failed: '%s' for profile %d",
   2221                   vaErrorStr(status), (int)profile);
   2222        continue;
   2223      }
   2224      hwconfig->config_id = config;
   2225      AddAcceleratedFormats(supportedHWCodecs, codecID, hwconfig);
   2226      vaDestroyConfig(mDisplay, config);
   2227    }
   2228  }
   2229 
   2230  return supportedHWCodecs;
   2231 }
   2232 
   2233 #endif
   2234 
   2235 #ifdef MOZ_ENABLE_D3D11VA
   2236 MediaResult FFmpegVideoDecoder<LIBAV_VER>::InitD3D11VADecoder() {
   2237  MOZ_DIAGNOSTIC_ASSERT(XRE_IsGPUProcess());
   2238  FFMPEG_LOG("Initialising D3D11VA FFmpeg decoder");
   2239  StaticMutexAutoLock mon(sMutex);
   2240 
   2241  if (!mImageAllocator || !mImageAllocator->SupportsD3D11()) {
   2242    FFMPEG_LOG("  no KnowsCompositor or it doesn't support D3D11");
   2243    return NS_ERROR_DOM_MEDIA_FATAL_ERR;
   2244  }
   2245 
   2246  if (mInfo.mColorDepth > gfx::ColorDepth::COLOR_10) {
   2247    return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
   2248                       RESULT_DETAIL("not supported color depth"));
   2249  }
   2250 
   2251  AVCodec* codec = FindVideoHardwareAVCodec(mLib, mCodecID);
   2252  if (!codec) {
   2253    FFMPEG_LOG("  couldn't find d3d11va decoder for %s",
   2254               AVCodecToString(mCodecID));
   2255    return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
   2256                       RESULT_DETAIL("unable to find codec"));
   2257  }
   2258  FFMPEG_LOG("  codec %s : %s", codec->name, codec->long_name);
   2259 
   2260  if (!(mCodecContext = mLib->avcodec_alloc_context3(codec))) {
   2261    FFMPEG_LOG("  couldn't init d3d11va ffmpeg context");
   2262    return NS_ERROR_OUT_OF_MEMORY;
   2263  }
   2264  mCodecContext->opaque = this;
   2265  InitHWCodecContext(ContextType::D3D11VA);
   2266 
   2267  // MOZ_REQUIRES isn't recognized in MakeScopeExit, but InitD3D11VADecoder
   2268  // already locks sMutex at the start, so just escape thread analysis.
   2269  auto releaseResources = MakeScopeExit([&]() MOZ_NO_THREAD_SAFETY_ANALYSIS {
   2270    ReleaseCodecContext();
   2271    if (mD3D11VADeviceContext) {
   2272      AVHWDeviceContext* hwctx =
   2273          reinterpret_cast<AVHWDeviceContext*>(mD3D11VADeviceContext->data);
   2274      AVD3D11VADeviceContext* d3d11vactx =
   2275          reinterpret_cast<AVD3D11VADeviceContext*>(hwctx->hwctx);
   2276      d3d11vactx->device = nullptr;
   2277      mLib->av_buffer_unref(&mD3D11VADeviceContext);
   2278      mD3D11VADeviceContext = nullptr;
   2279    }
   2280    mDXVA2Manager.reset();
   2281  });
   2282 
   2283  FFMPEG_LOG("  creating device context");
   2284  mD3D11VADeviceContext = mLib->av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_D3D11VA);
   2285  if (!mD3D11VADeviceContext) {
   2286    FFMPEG_LOG("  av_hwdevice_ctx_alloc failed.");
   2287    return NS_ERROR_DOM_MEDIA_FATAL_ERR;
   2288  }
   2289 
   2290  nsAutoCString failureReason;
   2291  mDXVA2Manager.reset(
   2292      DXVA2Manager::CreateD3D11DXVA(mImageAllocator, failureReason));
   2293  if (!mDXVA2Manager) {
   2294    FFMPEG_LOG("  failed to create dxva manager.");
   2295    return NS_ERROR_DOM_MEDIA_FATAL_ERR;
   2296  }
   2297 
   2298  ID3D11Device* device = mDXVA2Manager->GetD3D11Device();
   2299  if (!device) {
   2300    FFMPEG_LOG("  failed to get D3D11 device.");
   2301    return NS_ERROR_DOM_MEDIA_FATAL_ERR;
   2302  }
   2303 
   2304  AVHWDeviceContext* hwctx = (AVHWDeviceContext*)mD3D11VADeviceContext->data;
   2305  AVD3D11VADeviceContext* d3d11vactx = (AVD3D11VADeviceContext*)hwctx->hwctx;
   2306  d3d11vactx->device = device;
   2307 
   2308  if (mLib->av_hwdevice_ctx_init(mD3D11VADeviceContext) < 0) {
   2309    FFMPEG_LOG("  av_hwdevice_ctx_init failed.");
   2310    return NS_ERROR_DOM_MEDIA_FATAL_ERR;
   2311  }
   2312 
   2313  mCodecContext->hw_device_ctx = mLib->av_buffer_ref(mD3D11VADeviceContext);
   2314  MediaResult ret = AllocateExtraData();
   2315  if (NS_FAILED(ret)) {
   2316    FFMPEG_LOG("  failed to allocate extradata.");
   2317    return ret;
   2318  }
   2319 
   2320  if (mLib->avcodec_open2(mCodecContext, codec, nullptr) < 0) {
   2321    FFMPEG_LOG("  avcodec_open2 failed for d3d11va decoder");
   2322    return NS_ERROR_DOM_MEDIA_FATAL_ERR;
   2323  }
   2324 
   2325  FFMPEG_LOG("  D3D11VA FFmpeg init successful");
   2326  releaseResources.release();
   2327  return NS_OK;
   2328 }
   2329 
   2330 MediaResult FFmpegVideoDecoder<LIBAV_VER>::CreateImageD3D11(
   2331    int64_t aOffset, int64_t aPts, int64_t aDuration,
   2332    MediaDataDecoder::DecodedData& aResults) {
   2333  MOZ_DIAGNOSTIC_ASSERT(mFrame);
   2334  MOZ_DIAGNOSTIC_ASSERT(mDXVA2Manager);
   2335 
   2336  gfx::TransferFunction transferFunction =
   2337      mInfo.mTransferFunction.refOr(gfx::TransferFunction::BT709);
   2338  bool isHDR = transferFunction == gfx::TransferFunction::PQ ||
   2339               transferFunction == gfx::TransferFunction::HLG;
   2340  HRESULT hr = mDXVA2Manager->ConfigureForSize(
   2341      GetSurfaceFormat(), GetFrameColorSpace(), GetFrameColorRange(),
   2342      mInfo.mColorDepth,
   2343      mInfo.mTransferFunction.refOr(gfx::TransferFunction::BT709),
   2344      mFrame->width, mFrame->height);
   2345  if (FAILED(hr)) {
   2346    nsPrintfCString msg("Failed to configure DXVA2Manager, hr=%lx", hr);
   2347    FFMPEG_LOG("%s", msg.get());
   2348    return MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR, msg);
   2349  }
   2350 
   2351  if (!mFrame->data[0]) {
   2352    nsPrintfCString msg("Frame data shouldn't be null!");
   2353    FFMPEG_LOG("%s", msg.get());
   2354    return MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR, msg);
   2355  }
   2356 
   2357  ID3D11Resource* resource = reinterpret_cast<ID3D11Resource*>(mFrame->data[0]);
   2358  RefPtr<ID3D11Texture2D> texture;
   2359  hr = resource->QueryInterface(
   2360      static_cast<ID3D11Texture2D**>(getter_AddRefs(texture)));
   2361  if (FAILED(hr)) {
   2362    nsPrintfCString msg("Failed to get ID3D11Texture2D, hr=%lx", hr);
   2363    FFMPEG_LOG("%s", msg.get());
   2364    return MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR, msg);
   2365  }
   2366 
   2367  D3D11_TEXTURE2D_DESC desc;
   2368  texture->GetDesc(&desc);
   2369 
   2370  auto format = [&]() {
   2371    if (desc.Format == DXGI_FORMAT_P010) {
   2372      return gfx::SurfaceFormat::P010;
   2373    }
   2374    if (desc.Format == DXGI_FORMAT_P016) {
   2375      return gfx::SurfaceFormat::P016;
   2376    }
   2377    if (isHDR) {
   2378      return gfx::SurfaceFormat::P010;
   2379    }
   2380    MOZ_ASSERT(desc.Format == DXGI_FORMAT_NV12);
   2381    return gfx::SurfaceFormat::NV12;
   2382  }();
   2383 
   2384  RefPtr<Image> image;
   2385  gfx::IntRect pictureRegion =
   2386      mInfo.ScaledImageRect(mFrame->width, mFrame->height);
   2387  UINT index = (uintptr_t)mFrame->data[1];
   2388 
   2389  // TODO(https://bugzilla.mozilla.org/show_bug.cgi?id=2008886)
   2390  // Currently the zero-copy path supports NV12 but not P010 so it can't do HDR
   2391  // yet, this can be implemented in future.
   2392  if (format == gfx::SurfaceFormat::NV12 && CanUseZeroCopyVideoFrame()) {
   2393    mNumOfHWTexturesInUse++;
   2394    FFMPEGV_LOG("CreateImageD3D11, zero copy, index=%u (texInUse=%u), isHDR=%u",
   2395                index, mNumOfHWTexturesInUse.load(), (unsigned int)isHDR);
   2396    hr = mDXVA2Manager->WrapTextureWithImage(
   2397        new D3D11TextureWrapper(
   2398            mFrame, mLib, texture, format, index,
   2399            [self = RefPtr<FFmpegVideoDecoder>(this), this]() {
   2400              MOZ_ASSERT(mNumOfHWTexturesInUse > 0);
   2401              mNumOfHWTexturesInUse--;
   2402            }),
   2403        pictureRegion, getter_AddRefs(image));
   2404  } else {
   2405    FFMPEGV_LOG("CreateImageD3D11, copy output to a shared texture, isHDR=%u",
   2406                (unsigned int)isHDR);
   2407    hr = mDXVA2Manager->CopyToImage(texture, index, pictureRegion,
   2408                                    getter_AddRefs(image));
   2409  }
   2410  if (FAILED(hr)) {
   2411    nsPrintfCString msg("Failed to create a D3D image");
   2412    FFMPEG_LOG("%s", msg.get());
   2413    return MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR, msg);
   2414  }
   2415  MOZ_ASSERT(image);
   2416 
   2417  RefPtr<VideoData> v = VideoData::CreateFromImage(
   2418      mInfo.mDisplay, aOffset, TimeUnit::FromMicroseconds(aPts),
   2419      TimeUnit::FromMicroseconds(aDuration), image, IsKeyFrame(mFrame),
   2420      TimeUnit::FromMicroseconds(mFrame->pkt_dts));
   2421  if (!v) {
   2422    nsPrintfCString msg("D3D image allocation error");
   2423    FFMPEG_LOG("%s", msg.get());
   2424    return MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR, msg);
   2425  }
   2426  aResults.AppendElement(std::move(v));
   2427  return NS_OK;
   2428 }
   2429 
   2430 bool FFmpegVideoDecoder<LIBAV_VER>::CanUseZeroCopyVideoFrame() const {
   2431  // When zero-copy is available, we use a hybrid approach that combines
   2432  // zero-copy and texture copying. This prevents scenarios where all
   2433  // zero-copy frames remain unreleased, which could block ffmpeg from
   2434  // allocating new textures for subsequent frames. Zero-copy should only be
   2435  // used when there is sufficient space available in the texture pool.
   2436  return gfx::gfxVars::HwDecodedVideoZeroCopy() && mImageAllocator &&
   2437         mImageAllocator->UsingHardwareWebRender() && mDXVA2Manager &&
   2438         mDXVA2Manager->SupportsZeroCopyNV12Texture() &&
   2439         mNumOfHWTexturesInUse <= EXTRA_HW_FRAMES / 2;
   2440 }
   2441 #endif
   2442 
   2443 #ifdef MOZ_WIDGET_ANDROID
   2444 MediaResult FFmpegVideoDecoder<LIBAV_VER>::InitMediaCodecDecoder() {
   2445  FFMPEG_LOG("Initialising MediaCodec FFmpeg decoder");
   2446  StaticMutexAutoLock mon(sMutex);
   2447 
   2448  if (mInfo.mColorDepth > gfx::ColorDepth::COLOR_10) {
   2449    return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
   2450                       RESULT_DETAIL("not supported color depth"));
   2451  }
   2452 
   2453  AVCodec* codec =
   2454      FindHardwareAVCodec(mLib, mCodecID, AV_HWDEVICE_TYPE_MEDIACODEC);
   2455  if (!codec) {
   2456    FFMPEG_LOG("  couldn't find MediaCodec decoder for %s",
   2457               AVCodecToString(mCodecID));
   2458    return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
   2459                       RESULT_DETAIL("unable to find codec"));
   2460  }
   2461  FFMPEG_LOG("  codec %s : %s", codec->name, codec->long_name);
   2462 
   2463  if (!(mCodecContext = mLib->avcodec_alloc_context3(codec))) {
   2464    FFMPEG_LOG("  couldn't alloc_context3 for MediaCodec");
   2465    return MediaResult(NS_ERROR_OUT_OF_MEMORY,
   2466                       RESULT_DETAIL("unable to alloc codec context"));
   2467  }
   2468  mCodecContext->opaque = this;
   2469  InitHWCodecContext(ContextType::MediaCodec);
   2470 
   2471  // MOZ_REQUIRES isn't recognized in MakeScopeExit, but InitMediaCodecDecoder
   2472  // already locks sMutex at the start, so just escape thread analysis.
   2473  auto releaseResources = MakeScopeExit([&]() MOZ_NO_THREAD_SAFETY_ANALYSIS {
   2474    ReleaseCodecContext();
   2475    if (mMediaCodecDeviceContext) {
   2476      mLib->av_buffer_unref(&mMediaCodecDeviceContext);
   2477    }
   2478  });
   2479 
   2480  FFMPEG_LOG("  creating device context");
   2481  mMediaCodecDeviceContext =
   2482      mLib->av_hwdevice_ctx_alloc(AV_HWDEVICE_TYPE_MEDIACODEC);
   2483  if (!mMediaCodecDeviceContext) {
   2484    FFMPEG_LOG("  av_hwdevice_ctx_alloc failed.");
   2485    return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
   2486                       RESULT_DETAIL("unable to alloc hwdevice context"));
   2487  }
   2488 
   2489  AVHWDeviceContext* hwctx = (AVHWDeviceContext*)mMediaCodecDeviceContext->data;
   2490  AVMediaCodecDeviceContext* mediacodecctx =
   2491      (AVMediaCodecDeviceContext*)hwctx->hwctx;
   2492 
   2493  mSurface =
   2494      java::GeckoSurface::LocalRef(java::SurfaceAllocator::AcquireSurface(
   2495          mInfo.mImage.width, mInfo.mImage.height, false));
   2496  if (!mSurface) {
   2497    return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
   2498                       RESULT_DETAIL("unable to acquire Java surface"));
   2499  }
   2500 
   2501  mSurfaceHandle = mSurface->GetHandle();
   2502 
   2503  JNIEnv* const env = jni::GetEnvForThread();
   2504  ANativeWindow* native_window =
   2505      ANativeWindow_fromSurface(env, mSurface->GetSurface().Get());
   2506 
   2507  mediacodecctx->surface = mSurface->GetSurface().Get();
   2508  mediacodecctx->native_window = native_window;
   2509  mediacodecctx->create_window = 0;  // default -- useful when encoding?
   2510 
   2511  if (mLib->av_hwdevice_ctx_init(mMediaCodecDeviceContext) < 0) {
   2512    FFMPEG_LOG("  av_hwdevice_ctx_init failed.");
   2513    return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
   2514                       RESULT_DETAIL("unable to init hwdevice context"));
   2515  }
   2516 
   2517  mCodecContext->hw_device_ctx = mLib->av_buffer_ref(mMediaCodecDeviceContext);
   2518 
   2519  MediaResult ret = AllocateExtraData();
   2520  if (NS_FAILED(ret)) {
   2521    FFMPEG_LOG("  failed to allocate extradata.");
   2522    return ret;
   2523  }
   2524 
   2525 #  ifdef USING_MOZFFVPX
   2526  ret = MaybeAttachCDM();
   2527  if (NS_FAILED(ret)) {
   2528    FFMPEG_LOG("  failed to attach CDM.");
   2529    return ret;
   2530  }
   2531 #  endif
   2532 
   2533  if (mLib->avcodec_open2(mCodecContext, codec, nullptr) < 0) {
   2534    FFMPEG_LOG("  avcodec_open2 failed for MediaCodec decoder");
   2535    return MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
   2536                       RESULT_DETAIL("avcodec_open2 failed for MediaCodec"));
   2537  }
   2538 
   2539  FFMPEG_LOG("  MediaCodec FFmpeg init successful");
   2540  releaseResources.release();
   2541  return NS_OK;
   2542 }
   2543 
   2544 MediaResult FFmpegVideoDecoder<LIBAV_VER>::CreateImageMediaCodec(
   2545    int64_t aOffset, int64_t aPts, int64_t aTimecode, int64_t aDuration,
   2546    MediaDataDecoder::DecodedData& aResults) {
   2547  MOZ_DIAGNOSTIC_ASSERT(mFrame);
   2548 
   2549  auto img = MakeRefPtr<layers::SurfaceTextureImage>(
   2550      mSurfaceHandle, gfx::IntSize(mFrame->width, mFrame->height),
   2551      false /* NOT continuous */, gl::OriginPos::BottomLeft, mInfo.HasAlpha(),
   2552      false /* force color space stuff */,
   2553      /* aTransformOverride */ Nothing());
   2554 
   2555  class CompositeListener final
   2556      : public layers::SurfaceTextureImage::SetCurrentCallback {
   2557   public:
   2558    CompositeListener() = default;
   2559 
   2560    ~CompositeListener() override { MaybeRelease(/* aRender */ false); }
   2561 
   2562    bool Init(FFmpegVideoDecoder<LIBAV_VER>* aDecoder, AVFrame* aFrame) {
   2563      if (NS_WARN_IF(!aFrame) || NS_WARN_IF(!aFrame->buf[0])) {
   2564        return false;
   2565      }
   2566      mFrame = aDecoder->mLib->av_frame_clone(aFrame);
   2567      if (NS_WARN_IF(!mFrame)) {
   2568        return false;
   2569      }
   2570      mDecoder = aDecoder;
   2571      return true;
   2572    }
   2573 
   2574    void operator()(void) override { MaybeRelease(/* aRender */ true); }
   2575 
   2576    void MaybeRelease(bool aRender) {
   2577      if (!mDecoder) {
   2578        return;
   2579      }
   2580      for (int i = 0; i < AV_NUM_DATA_POINTERS; ++i) {
   2581        if (mFrame->data[i]) {
   2582          mDecoder->mLib->av_mediacodec_release_buffer(
   2583              (AVMediaCodecBuffer*)mFrame->data[i], aRender ? 1 : 0);
   2584        }
   2585      }
   2586      mDecoder->mLib->av_frame_free(&mFrame);
   2587      mDecoder->QueueResumeDrain();
   2588      mDecoder = nullptr;
   2589    }
   2590 
   2591    RefPtr<FFmpegVideoDecoder<LIBAV_VER>> mDecoder;
   2592    AVFrame* mFrame = nullptr;
   2593  };
   2594 
   2595  auto listener = MakeUnique<CompositeListener>();
   2596  if (!listener->Init(this, mFrame)) {
   2597    FFMPEG_LOG("  CreateImageMediaCodec failed to init listener");
   2598    return NS_ERROR_INVALID_ARG;
   2599  }
   2600 
   2601  img->RegisterSetCurrentCallback(std::move(listener));
   2602 
   2603  RefPtr<VideoData> v = VideoData::CreateFromImage(
   2604      {mFrame->width, mFrame->height}, aOffset,
   2605      TimeUnit::FromMicroseconds(aPts), TimeUnit::FromMicroseconds(aDuration),
   2606      img.forget().downcast<layers::Image>(), mFrame->flags & AV_FRAME_FLAG_KEY,
   2607      TimeUnit::FromMicroseconds(aTimecode));
   2608 
   2609  aResults.AppendElement(std::move(v));
   2610  return NS_OK;
   2611 }
   2612 #endif  // MOZ_WIDGET_ANDROID
   2613 
   2614 #if MOZ_USE_HWDECODE
   2615 /* static */ AVCodec* FFmpegVideoDecoder<LIBAV_VER>::FindVideoHardwareAVCodec(
   2616    const FFmpegLibWrapper* aLib, AVCodecID aCodec,
   2617    AVHWDeviceType aDeviceType) {
   2618 #  ifdef MOZ_WIDGET_GTK
   2619  if (aDeviceType == AV_HWDEVICE_TYPE_NONE) {
   2620    switch (aCodec) {
   2621      case AV_CODEC_ID_H264:
   2622        return aLib->avcodec_find_decoder_by_name("h264_v4l2m2m");
   2623      case AV_CODEC_ID_VP8:
   2624        return aLib->avcodec_find_decoder_by_name("vp8_v4l2m2m");
   2625      case AV_CODEC_ID_VP9:
   2626        return aLib->avcodec_find_decoder_by_name("vp9_v4l2m2m");
   2627      case AV_CODEC_ID_HEVC:
   2628        return aLib->avcodec_find_decoder_by_name("hevc_v4l2m2m");
   2629      default:
   2630        return nullptr;
   2631    }
   2632  }
   2633 #  endif
   2634  return FindHardwareAVCodec(aLib, aCodec, aDeviceType);
   2635 }
   2636 #endif
   2637 
   2638 }  // namespace mozilla