tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

WMFVideoMFTManager.cpp (41108B)


      1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
      2 /* vim:set ts=2 sw=2 sts=2 et cindent: */
      3 /* This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #include "WMFVideoMFTManager.h"
      8 
      9 #include <cguid.h>
     10 #include <psapi.h>
     11 
     12 #include <algorithm>
     13 
     14 #include "DXVA2Manager.h"
     15 #include "GMPUtils.h"  // For SplitAt. TODO: Move SplitAt to a central place.
     16 #include "IMFYCbCrImage.h"
     17 #include "ImageContainer.h"
     18 #include "MediaInfo.h"
     19 #include "MediaTelemetryConstants.h"
     20 #include "VideoUtils.h"
     21 #include "WMFDecoderModule.h"
     22 #include "WMFUtils.h"
     23 #include "gfx2DGlue.h"
     24 #include "gfxWindowsPlatform.h"
     25 #include "mozilla/AbstractThread.h"
     26 #include "mozilla/ClearOnShutdown.h"
     27 #include "mozilla/Logging.h"
     28 #include "mozilla/SchedulerGroup.h"
     29 #include "mozilla/StaticPrefs_gfx.h"
     30 #include "mozilla/StaticPrefs_media.h"
     31 #include "mozilla/SyncRunnable.h"
     32 #include "mozilla/gfx/DeviceManagerDx.h"
     33 #include "mozilla/gfx/gfxVars.h"
     34 #include "mozilla/glean/DomMediaPlatformsWmfMetrics.h"
     35 #include "mozilla/layers/FenceD3D11.h"
     36 #include "mozilla/layers/LayersTypes.h"
     37 #include "mozilla/mscom/EnsureMTA.h"
     38 #include "nsPrintfCString.h"
     39 #include "nsThreadUtils.h"
     40 #include "nsWindowsHelpers.h"
     41 
     42 #define LOG(...) MOZ_LOG(sPDMLog, mozilla::LogLevel::Debug, (__VA_ARGS__))
     43 #define LOGV(...) MOZ_LOG(sPDMLog, mozilla::LogLevel::Verbose, (__VA_ARGS__))
     44 
     45 using mozilla::layers::Image;
     46 using mozilla::layers::IMFYCbCrImage;
     47 using mozilla::layers::LayerManager;
     48 using mozilla::layers::LayersBackend;
     49 using mozilla::media::TimeUnit;
     50 
     51 namespace mozilla {
     52 
     53 LayersBackend GetCompositorBackendType(
     54    layers::KnowsCompositor* aKnowsCompositor) {
     55  if (aKnowsCompositor) {
     56    return aKnowsCompositor->GetCompositorBackendType();
     57  }
     58  return LayersBackend::LAYERS_NONE;
     59 }
     60 
     61 WMFVideoMFTManager::WMFVideoMFTManager(
     62    const VideoInfo& aConfig, layers::KnowsCompositor* aKnowsCompositor,
     63    layers::ImageContainer* aImageContainer, float aFramerate,
     64    const CreateDecoderParams::OptionSet& aOptions, bool aDXVAEnabled,
     65    Maybe<TrackingId> aTrackingId)
     66    : mVideoInfo(aConfig),
     67      mImageSize(aConfig.mImage),
     68      mStreamType(GetStreamTypeFromMimeType(aConfig.mMimeType)),
     69      mSoftwareImageSize(aConfig.mImage),
     70      mSoftwarePictureSize(aConfig.mImage),
     71      mVideoStride(0),
     72      mColorSpace(aConfig.mColorSpace),
     73      mColorRange(aConfig.mColorRange),
     74      mColorDepth(aConfig.mColorDepth),
     75      mImageContainer(aImageContainer),
     76      mKnowsCompositor(aKnowsCompositor),
     77      mDXVAEnabled(aDXVAEnabled &&
     78                   !aOptions.contains(
     79                       CreateDecoderParams::Option::HardwareDecoderNotAllowed)),
     80      mZeroCopyNV12Texture(false),
     81      mFramerate(aFramerate),
     82      mLowLatency(aOptions.contains(CreateDecoderParams::Option::LowLatency)),
     83      mKeepOriginalPts(
     84          aOptions.contains(CreateDecoderParams::Option::KeepOriginalPts)),
     85      mTrackingId(std::move(aTrackingId))
     86 // mVideoStride, mVideoWidth, mVideoHeight, mUseHwAccel are initialized in
     87 // Init().
     88 {
     89  MOZ_COUNT_CTOR(WMFVideoMFTManager);
     90 
     91  // The V and U planes are stored 16-row-aligned, so we need to add padding
     92  // to the row heights to ensure the Y'CbCr planes are referenced properly.
     93  // This value is only used with software decoder.
     94  if (mSoftwareImageSize.height % 16 != 0) {
     95    mSoftwareImageSize.height += 16 - (mSoftwareImageSize.height % 16);
     96  }
     97 }
     98 
     99 WMFVideoMFTManager::~WMFVideoMFTManager() {
    100  MOZ_COUNT_DTOR(WMFVideoMFTManager);
    101 }
    102 
    103 /* static */
    104 const GUID& WMFVideoMFTManager::GetMediaSubtypeGUID() {
    105  MOZ_ASSERT(StreamTypeIsVideo(mStreamType));
    106  switch (mStreamType) {
    107    case WMFStreamType::H264:
    108      return MFVideoFormat_H264;
    109    case WMFStreamType::VP8:
    110      return MFVideoFormat_VP80;
    111    case WMFStreamType::VP9:
    112      return MFVideoFormat_VP90;
    113    case WMFStreamType::AV1:
    114      return MFVideoFormat_AV1;
    115    case WMFStreamType::HEVC:
    116      return MFVideoFormat_HEVC;
    117    default:
    118      return GUID_NULL;
    119  };
    120 }
    121 
    122 bool WMFVideoMFTManager::InitializeDXVA() {
    123  // If we use DXVA but aren't running with a D3D layer manager then the
    124  // readback of decoded video frames from GPU to CPU memory grinds painting
    125  // to a halt, and makes playback performance *worse*.
    126  if (!mDXVAEnabled) {
    127    mDXVAFailureReason.AssignLiteral(
    128        "Hardware video decoding disabled or blacklisted");
    129    return false;
    130  }
    131  MOZ_ASSERT(!mDXVA2Manager);
    132  if (!mKnowsCompositor || !mKnowsCompositor->SupportsD3D11()) {
    133    mDXVAFailureReason.AssignLiteral("Unsupported layers backend");
    134    return false;
    135  }
    136 
    137  if (!XRE_IsRDDProcess() && !XRE_IsGPUProcess()) {
    138    mDXVAFailureReason.AssignLiteral(
    139        "DXVA only supported in RDD or GPU process");
    140    return false;
    141  }
    142 
    143  bool d3d11 = true;
    144  if (!StaticPrefs::media_wmf_dxva_d3d11_enabled()) {
    145    mDXVAFailureReason = nsPrintfCString(
    146        "D3D11: %s is false",
    147        StaticPrefs::GetPrefName_media_wmf_dxva_d3d11_enabled());
    148    d3d11 = false;
    149  }
    150 
    151  if (d3d11) {
    152    mDXVAFailureReason.AppendLiteral("D3D11: ");
    153    mDXVA2Manager.reset(
    154        DXVA2Manager::CreateD3D11DXVA(mKnowsCompositor, mDXVAFailureReason));
    155    if (mDXVA2Manager) {
    156      return true;
    157    }
    158  }
    159 
    160  return mDXVA2Manager != nullptr;
    161 }
    162 
    163 MediaResult WMFVideoMFTManager::ValidateVideoInfo() {
    164  NS_ENSURE_TRUE(StreamTypeIsVideo(mStreamType),
    165                 MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
    166                             RESULT_DETAIL("Invalid stream type")));
    167  switch (mStreamType) {
    168    case WMFStreamType::H264:
    169      if (!StaticPrefs::media_wmf_allow_unsupported_resolutions()) {
    170        // The WMF H.264 decoder is documented to have a minimum resolution
    171        // 48x48 pixels for resolution, but we won't enable hw decoding for the
    172        // resolution < 132 pixels. It's assumed the software decoder doesn't
    173        // have this limitation, but it still might have maximum resolution
    174        // limitation.
    175        // https://msdn.microsoft.com/en-us/library/windows/desktop/dd797815(v=vs.85).aspx
    176        static const int32_t MAX_H264_PIXEL_COUNT = 4096 * 2304;
    177        const CheckedInt32 pixelCount =
    178            CheckedInt32(mVideoInfo.mImage.width) * mVideoInfo.mImage.height;
    179 
    180        if (!pixelCount.isValid() ||
    181            pixelCount.value() > MAX_H264_PIXEL_COUNT) {
    182          mIsValid = false;
    183          return MediaResult(
    184              NS_ERROR_DOM_MEDIA_FATAL_ERR,
    185              RESULT_DETAIL("Can't decode H.264 stream because its "
    186                            "resolution is out of the maximum limitation"));
    187        }
    188      }
    189      break;
    190    default:
    191      break;
    192  }
    193 
    194  return NS_OK;
    195 }
    196 
    197 MediaResult WMFVideoMFTManager::Init() {
    198  AUTO_PROFILER_LABEL("WMFVideoMFTManager::Init", MEDIA_PLAYBACK);
    199  MediaResult result = ValidateVideoInfo();
    200  if (NS_FAILED(result)) {
    201    return result;
    202  }
    203 
    204  // InitInternal() indirectly calls IMFTransform interface and should run on
    205  // MTA thread.
    206  // https://msdn.microsoft.com/en-us/library/windows/desktop/ee892371(v=vs.85).aspx#components
    207  mozilla::mscom::EnsureMTA([&]() { result = InitInternal(); });
    208  if (NS_SUCCEEDED(result) && mDXVA2Manager) {
    209    // If we had some failures but eventually made it work,
    210    // make sure we preserve the messages.
    211    mDXVAFailureReason.AppendLiteral("Using D3D11 API");
    212  }
    213 
    214  return result;
    215 }
    216 
    217 MediaResult WMFVideoMFTManager::InitInternal() {
    218  // The H264 SanityTest uses a 132x132 videos to determine if DXVA can be used.
    219  // so we want to use the software decoder for videos with lower resolutions.
    220  static const int MIN_H264_HW_WIDTH = 132;
    221  static const int MIN_H264_HW_HEIGHT = 132;
    222 
    223  mUseHwAccel = false;  // default value; changed if D3D setup succeeds.
    224  bool useDxva = true;
    225 
    226  if (mStreamType == WMFStreamType::H264 &&
    227      (mVideoInfo.ImageRect().width < MIN_H264_HW_WIDTH ||
    228       mVideoInfo.ImageRect().height < MIN_H264_HW_HEIGHT)) {
    229    useDxva = false;
    230    mDXVAFailureReason = nsPrintfCString(
    231        "H264 video resolution too low: %" PRIu32 "x%" PRIu32,
    232        mVideoInfo.ImageRect().width, mVideoInfo.ImageRect().height);
    233  }
    234 
    235  if (useDxva) {
    236    useDxva = InitializeDXVA();
    237  }
    238 
    239  RefPtr<MFTDecoder> decoder = new MFTDecoder();
    240  RETURN_PARAM_IF_FAILED(
    241      WMFDecoderModule::CreateMFTDecoder(mStreamType, decoder),
    242      MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
    243                  RESULT_DETAIL("Can't create the MFT decoder.")));
    244 
    245  RefPtr<IMFAttributes> attr(decoder->GetAttributes());
    246  UINT32 aware = 0;
    247  if (attr) {
    248    attr->GetUINT32(MF_SA_D3D_AWARE, &aware);
    249    attr->SetUINT32(CODECAPI_AVDecNumWorkerThreads,
    250                    WMFDecoderModule::GetNumDecoderThreads());
    251    bool lowLatency = StaticPrefs::media_wmf_low_latency_enabled();
    252    if (mLowLatency || lowLatency) {
    253      HRESULT hr = attr->SetUINT32(CODECAPI_AVLowLatencyMode, TRUE);
    254      if (SUCCEEDED(hr)) {
    255        LOG("Enabling Low Latency Mode");
    256      } else {
    257        LOG("Couldn't enable Low Latency Mode");
    258      }
    259    }
    260 
    261    // TODO(https://bugzilla.mozilla.org/show_bug.cgi?id=2008886)
    262    // The zero-copy implementation doesn't support P010 for HDR video yet, only
    263    // NV12 - change this when it is implemented.
    264    if (gfx::gfxVars::HwDecodedVideoZeroCopy() && mKnowsCompositor &&
    265        mKnowsCompositor->UsingHardwareWebRender() && mDXVA2Manager &&
    266        mDXVA2Manager->SupportsZeroCopyNV12Texture() &&
    267        mColorDepth == gfx::ColorDepth::COLOR_8 && !IsHDR()) {
    268      mZeroCopyNV12Texture = true;
    269      const int kOutputBufferSize = 10;
    270 
    271      // Each picture buffer can store a sample, plus one in
    272      // pending_output_samples_. The decoder adds this number to the number of
    273      // reference pictures it expects to need and uses that to determine the
    274      // array size of the output texture.
    275      const int kMaxOutputSamples = kOutputBufferSize + 1;
    276      attr->SetUINT32(MF_SA_MINIMUM_OUTPUT_SAMPLE_COUNT_PROGRESSIVE,
    277                      kMaxOutputSamples);
    278      attr->SetUINT32(MF_SA_MINIMUM_OUTPUT_SAMPLE_COUNT, kMaxOutputSamples);
    279    }
    280  }
    281 
    282  if (useDxva) {
    283    if (aware) {
    284      // TODO: Test if I need this anywhere... Maybe on Vista?
    285      // hr = attr->SetUINT32(CODECAPI_AVDecVideoAcceleration_H264, TRUE);
    286      // NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
    287      MOZ_ASSERT(mDXVA2Manager);
    288      ULONG_PTR manager = ULONG_PTR(mDXVA2Manager->GetDXVADeviceManager());
    289      HRESULT hr =
    290          decoder->SendMFTMessage(MFT_MESSAGE_SET_D3D_MANAGER, manager);
    291      if (SUCCEEDED(hr)) {
    292        mUseHwAccel = true;
    293      } else {
    294        mDXVAFailureReason = nsPrintfCString(
    295            "MFT_MESSAGE_SET_D3D_MANAGER failed with code %lX", hr);
    296      }
    297    } else {
    298      mDXVAFailureReason.AssignLiteral(
    299          "Decoder returned false for MF_SA_D3D_AWARE");
    300    }
    301  }
    302 
    303  if (!mDXVAFailureReason.IsEmpty()) {
    304    // DXVA failure reason being set can mean that D3D11 failed, or that DXVA is
    305    // entirely disabled.
    306    LOG("DXVA failure: %s", mDXVAFailureReason.get());
    307  }
    308 
    309  if (!mUseHwAccel) {
    310    if (mDXVA2Manager) {
    311      // Either mDXVAEnabled was set to false prior the second call to
    312      // InitInternal() due to CanUseDXVA() returning false, or
    313      // MFT_MESSAGE_SET_D3D_MANAGER failed
    314      mDXVA2Manager.reset();
    315    }
    316    if (mStreamType == WMFStreamType::VP9 ||
    317        mStreamType == WMFStreamType::VP8 ||
    318        mStreamType == WMFStreamType::AV1 ||
    319        mStreamType == WMFStreamType::HEVC) {
    320      return MediaResult(
    321          NS_ERROR_DOM_MEDIA_FATAL_ERR,
    322          RESULT_DETAIL("Use VP8/VP9/AV1 MFT only if HW acceleration "
    323                        "is available."));
    324    }
    325    glean::media::decoder_backend_used.AccumulateSingleSample(
    326        uint32_t(media::MediaDecoderBackend::WMFSoftware));
    327  }
    328 
    329  // Note that some HDR videos are 8bit, and end up decoding to NV12/YV12,
    330  // rather than the more obvious P010, and the decoder won't let us force P010.
    331  // See https://bugzilla.mozilla.org/show_bug.cgi?id=2008887
    332  const GUID& outputSubType = GetOutputSubtype();
    333  LOG("Created a video decoder, useDxva=%s, streamType=%s, outputSubType=%s, "
    334      "isHDR=%u",
    335      mUseHwAccel ? "Yes" : "No", EnumValueToString(mStreamType),
    336      GetSubTypeStr(outputSubType).get(), (unsigned int)IsHDR());
    337 
    338  mDecoder = decoder;
    339  RETURN_PARAM_IF_FAILED(
    340      SetDecoderMediaTypes(outputSubType),
    341      MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
    342                  RESULT_DETAIL("Fail to set the decoder media types")));
    343 
    344  RefPtr<IMFMediaType> inputType;
    345  RETURN_PARAM_IF_FAILED(
    346      mDecoder->GetInputMediaType(inputType),
    347      MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
    348                  RESULT_DETAIL("Fail to get the input media type")));
    349 
    350  RefPtr<IMFMediaType> outputType;
    351  RETURN_PARAM_IF_FAILED(
    352      mDecoder->GetOutputMediaType(outputType),
    353      MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
    354                  RESULT_DETAIL("Fail to get the output media type")));
    355 
    356  if (mUseHwAccel && !CanUseDXVA(inputType, outputType)) {
    357    LOG("DXVA manager determined that the input type was unsupported in "
    358        "hardware, retrying init without DXVA.");
    359    mDXVAEnabled = false;
    360    // DXVA initialization with current decoder actually failed,
    361    // re-do initialization.
    362    return InitInternal();
    363  }
    364 
    365  LOG("Video Decoder initialized, Using DXVA: %s",
    366      (mUseHwAccel ? "Yes" : "No"));
    367 
    368  // Now we need to convert the video decode output to a display format.
    369  if (mUseHwAccel) {
    370    RETURN_PARAM_IF_FAILED(
    371        mDXVA2Manager->ConfigureForSize(
    372            outputType,
    373            mColorSpace.refOr(
    374                DefaultColorSpace({mImageSize.width, mImageSize.height})),
    375            mColorRange, mColorDepth,
    376            mVideoInfo.mTransferFunction.refOr(gfx::TransferFunction::BT709),
    377            mVideoInfo.ImageRect().width, mVideoInfo.ImageRect().height),
    378        MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
    379                    RESULT_DETAIL("Fail to configure image size for "
    380                                  "DXVA2Manager.")));
    381  } else {
    382    GetDefaultStride(outputType, mVideoInfo.ImageRect().width, &mVideoStride);
    383  }
    384  LOG("WMFVideoMFTManager frame geometry stride=%u picture=(%d, %d, %d, %d) "
    385      "display=(%d,%d)",
    386      mVideoStride, mVideoInfo.ImageRect().x, mVideoInfo.ImageRect().y,
    387      mVideoInfo.ImageRect().width, mVideoInfo.ImageRect().height,
    388      mVideoInfo.mDisplay.width, mVideoInfo.mDisplay.height);
    389 
    390  if (!mUseHwAccel) {
    391    RefPtr<ID3D11Device> device = gfx::DeviceManagerDx::Get()->GetImageDevice();
    392    if (device) {
    393      mIMFUsable = true;
    394    }
    395  }
    396  return MediaResult(NS_OK);
    397 }
    398 
    399 HRESULT
    400 WMFVideoMFTManager::SetDecoderMediaTypes(const GUID& aFallbackSubType) {
    401  // Setup the input/output media types.
    402  RefPtr<IMFMediaType> inputType;
    403  RETURN_IF_FAILED(wmf::MFCreateMediaType(getter_AddRefs(inputType)));
    404  RETURN_IF_FAILED(inputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
    405  RETURN_IF_FAILED(inputType->SetGUID(MF_MT_SUBTYPE, GetMediaSubtypeGUID()));
    406  RETURN_IF_FAILED(inputType->SetUINT32(
    407      MF_MT_INTERLACE_MODE, MFVideoInterlace_MixedInterlaceOrProgressive));
    408  RETURN_IF_FAILED(
    409      inputType->SetUINT32(MF_MT_INTERLACE_MODE, MFVideoInterlace_Progressive));
    410  RETURN_IF_FAILED(MFSetAttributeSize(inputType, MF_MT_FRAME_SIZE,
    411                                      mVideoInfo.ImageRect().width,
    412                                      mVideoInfo.ImageRect().height));
    413  UINT32 fpsDenominator = 1000;
    414  UINT32 fpsNumerator = static_cast<uint32_t>(mFramerate * fpsDenominator);
    415  if (fpsNumerator > 0) {
    416    RETURN_IF_FAILED(MFSetAttributeRatio(inputType, MF_MT_FRAME_RATE,
    417                                         fpsNumerator, fpsDenominator));
    418  }
    419 
    420  RefPtr<IMFMediaType> outputType;
    421  RETURN_IF_FAILED(wmf::MFCreateMediaType(getter_AddRefs(outputType)));
    422  RETURN_IF_FAILED(outputType->SetGUID(MF_MT_MAJOR_TYPE, MFMediaType_Video));
    423  RETURN_IF_FAILED(MFSetAttributeSize(outputType, MF_MT_FRAME_SIZE,
    424                                      mVideoInfo.ImageRect().width,
    425                                      mVideoInfo.ImageRect().height));
    426  if (fpsNumerator > 0) {
    427    RETURN_IF_FAILED(MFSetAttributeRatio(outputType, MF_MT_FRAME_RATE,
    428                                         fpsNumerator, fpsDenominator));
    429  }
    430 
    431  RETURN_IF_FAILED(outputType->SetGUID(MF_MT_SUBTYPE, GetOutputSubtype()));
    432 
    433  if (mZeroCopyNV12Texture) {
    434    RefPtr<IMFAttributes> attr(mDecoder->GetOutputStreamAttributes());
    435    if (attr) {
    436      RETURN_IF_FAILED(attr->SetUINT32(MF_SA_D3D11_SHARED_WITHOUT_MUTEX, TRUE));
    437      RETURN_IF_FAILED(
    438          attr->SetUINT32(MF_SA_D3D11_BINDFLAGS,
    439                          D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_DECODER));
    440    }
    441  }
    442  return mDecoder->SetMediaTypes(inputType, outputType, aFallbackSubType);
    443 }
    444 
    445 HRESULT
    446 WMFVideoMFTManager::Input(MediaRawData* aSample) {
    447  if (!mIsValid) {
    448    return E_FAIL;
    449  }
    450 
    451  if (!mDecoder) {
    452    // This can happen during shutdown.
    453    return E_FAIL;
    454  }
    455 
    456  mTrackingId.apply([&](const auto& aId) {
    457    MediaInfoFlag flag = MediaInfoFlag::None;
    458    flag |= (aSample->mKeyframe ? MediaInfoFlag::KeyFrame
    459                                : MediaInfoFlag::NonKeyFrame);
    460    flag |= (mUseHwAccel ? MediaInfoFlag::HardwareDecoding
    461                         : MediaInfoFlag::SoftwareDecoding);
    462    switch (mStreamType) {
    463      case WMFStreamType::H264:
    464        flag |= MediaInfoFlag::VIDEO_H264;
    465        break;
    466      case WMFStreamType::VP8:
    467        flag |= MediaInfoFlag::VIDEO_VP8;
    468        break;
    469      case WMFStreamType::VP9:
    470        flag |= MediaInfoFlag::VIDEO_VP9;
    471        break;
    472      case WMFStreamType::AV1:
    473        flag |= MediaInfoFlag::VIDEO_AV1;
    474        break;
    475      case WMFStreamType::HEVC:
    476        flag |= MediaInfoFlag::VIDEO_HEVC;
    477        break;
    478      default:
    479        break;
    480    };
    481    mPerformanceRecorder.Start(aSample->mTime.ToMicroseconds(),
    482                               "WMFVideoDecoder"_ns, aId, flag);
    483  });
    484 
    485  RefPtr<IMFSample> inputSample;
    486  HRESULT hr = mDecoder->CreateInputSample(
    487      aSample->Data(), uint32_t(aSample->Size()),
    488      aSample->mTime.ToMicroseconds(), aSample->mDuration.ToMicroseconds(),
    489      &inputSample);
    490  NS_ENSURE_TRUE(SUCCEEDED(hr) && inputSample != nullptr, hr);
    491  LOGV("WMFVIdeoMFTManager(%p)::Input: %s", this,
    492       aSample->mDuration.ToString().get());
    493 
    494  if (!mColorSpace && aSample->mTrackInfo) {
    495    // The colorspace definition is found in the H264 SPS NAL, available out of
    496    // band, while for VP9 it's only available within the VP9 bytestream.
    497    // The info would have been updated by the MediaChangeMonitor.
    498    mColorSpace = aSample->mTrackInfo->GetAsVideoInfo()->mColorSpace;
    499    mColorRange = aSample->mTrackInfo->GetAsVideoInfo()->mColorRange;
    500  }
    501  mLastDuration = aSample->mDuration;
    502 
    503  if (mKeepOriginalPts) {
    504    mPTSQueue.InsertElementSorted(aSample->mTime.ToMicroseconds());
    505  }
    506 
    507  // Forward sample data to the decoder.
    508  return mDecoder->Input(inputSample);
    509 }
    510 
    511 // The MFTransforms we use for decoding H264 and AV1 video will silently fall
    512 // back to software decoding (even if we've negotiated DXVA) if the GPU
    513 // doesn't support decoding the given codec and resolution. It will then upload
    514 // the software decoded frames into d3d textures to preserve behaviour.
    515 //
    516 // Unfortunately this seems to cause corruption (see bug 1193547) and is
    517 // slow because the upload is done into a non-shareable texture and requires
    518 // us to copy it.
    519 //
    520 // This code tests if the given codec and resolution can be supported directly
    521 // on the GPU, and makes sure we only ask the MFT for DXVA if it can be
    522 // supported properly.
    523 //
    524 // Ideally we'd know the framerate during initialization and would also ensure
    525 // that new decoders are created if the resolution changes. Then we could move
    526 // this check into Init and consolidate the main thread blocking code.
    527 bool WMFVideoMFTManager::CanUseDXVA(IMFMediaType* aInputType,
    528                                    IMFMediaType* aOutputType) {
    529  MOZ_ASSERT(mDXVA2Manager);
    530  // Check if we're able to use hardware decoding for the current codec config.
    531  return mDXVA2Manager->SupportsConfig(mVideoInfo, aInputType, aOutputType);
    532 }
    533 
    534 TimeUnit WMFVideoMFTManager::GetSampleDurationOrLastKnownDuration(
    535    IMFSample* aSample) const {
    536  TimeUnit duration = GetSampleDuration(aSample);
    537  if (!duration.IsValid()) {
    538    // WMF returned a non-success code (likely duration unknown, but the API
    539    // also allows for other, unspecified codes).
    540    LOG("Got unknown sample duration -- bad return code. Using mLastDuration.");
    541  } else if (duration == TimeUnit::Zero()) {
    542    // Duration is zero. WMF uses this to indicate an unknown duration.
    543    LOG("Got unknown sample duration -- zero duration returned. Using "
    544        "mLastDuration.");
    545  } else if (duration.IsNegative()) {
    546    // A negative duration will cause issues up the stack. It's also unclear
    547    // why this would happen, but the API allows for it by returning a signed
    548    // int, so we handle it here.
    549    LOG("Got negative sample duration: %f seconds. Using mLastDuration "
    550        "instead.",
    551        duration.ToSeconds());
    552  } else {
    553    // We got a duration without any problems.
    554    return duration;
    555  }
    556 
    557  return mLastDuration;
    558 }
    559 
    560 HRESULT
    561 WMFVideoMFTManager::CreateBasicVideoFrame(IMFSample* aSample,
    562                                          int64_t aStreamOffset,
    563                                          VideoData** aOutVideoData) {
    564  NS_ENSURE_TRUE(aSample, E_POINTER);
    565  NS_ENSURE_TRUE(aOutVideoData, E_POINTER);
    566 
    567  *aOutVideoData = nullptr;
    568 
    569  HRESULT hr;
    570  RefPtr<IMFMediaBuffer> buffer;
    571 
    572  // Must convert to contiguous buffer to use IMD2DBuffer interface.
    573  hr = aSample->ConvertToContiguousBuffer(getter_AddRefs(buffer));
    574  NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
    575 
    576  // Try and use the IMF2DBuffer interface if available, otherwise fallback
    577  // to the IMFMediaBuffer interface. Apparently IMF2DBuffer is more efficient,
    578  // but only some systems (Windows 8?) support it.
    579  BYTE* data = nullptr;
    580  LONG stride = 0;
    581  RefPtr<IMF2DBuffer> twoDBuffer;
    582  hr = buffer->QueryInterface(
    583      static_cast<IMF2DBuffer**>(getter_AddRefs(twoDBuffer)));
    584  if (SUCCEEDED(hr)) {
    585    hr = twoDBuffer->Lock2D(&data, &stride);
    586    NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
    587  } else {
    588    hr = buffer->Lock(&data, nullptr, nullptr);
    589    NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
    590    stride = mVideoStride;
    591  }
    592 
    593  const GUID& subType = mDecoder->GetOutputMediaSubType();
    594  MOZ_DIAGNOSTIC_ASSERT(subType == MFVideoFormat_YV12 ||
    595                        subType == MFVideoFormat_P010 ||
    596                        subType == MFVideoFormat_P016);
    597  const gfx::ColorDepth colorDepth = subType == MFVideoFormat_YV12
    598                                         ? gfx::ColorDepth::COLOR_8
    599                                         : gfx::ColorDepth::COLOR_16;
    600 
    601  // YV12, planar format (3 planes): [YYYY....][VVVV....][UUUU....]
    602  // i.e., Y, then V, then U.
    603  // P010, P016 planar format (2 planes) [YYYY....][UVUV...]
    604  // See
    605  // https://docs.microsoft.com/en-us/windows/desktop/medfound/10-bit-and-16-bit-yuv-video-formats
    606  VideoData::YCbCrBuffer b;
    607 
    608  const uint32_t videoWidth = mSoftwareImageSize.width;
    609  const uint32_t videoHeight = mSoftwareImageSize.height;
    610 
    611  // Y (Y') plane
    612  b.mPlanes[0].mData = data;
    613  b.mPlanes[0].mStride = stride;
    614  b.mPlanes[0].mHeight = videoHeight;
    615  b.mPlanes[0].mWidth = videoWidth;
    616  b.mPlanes[0].mSkip = 0;
    617 
    618  MOZ_DIAGNOSTIC_ASSERT(mSoftwareImageSize.height % 16 == 0,
    619                        "decoded height must be 16 bytes aligned");
    620  const uint32_t y_size = stride * mSoftwareImageSize.height;
    621  const uint32_t v_size = stride * mSoftwareImageSize.height / 4;
    622  const uint32_t halfStride = (stride + 1) / 2;
    623  const uint32_t halfHeight = (videoHeight + 1) / 2;
    624  const uint32_t halfWidth = (videoWidth + 1) / 2;
    625 
    626  if (subType == MFVideoFormat_YV12) {
    627    // U plane (Cb)
    628    b.mPlanes[1].mData = data + y_size + v_size;
    629    b.mPlanes[1].mStride = halfStride;
    630    b.mPlanes[1].mHeight = halfHeight;
    631    b.mPlanes[1].mWidth = halfWidth;
    632    b.mPlanes[1].mSkip = 0;
    633 
    634    // V plane (Cr)
    635    b.mPlanes[2].mData = data + y_size;
    636    b.mPlanes[2].mStride = halfStride;
    637    b.mPlanes[2].mHeight = halfHeight;
    638    b.mPlanes[2].mWidth = halfWidth;
    639    b.mPlanes[2].mSkip = 0;
    640  } else {
    641    // U plane (Cb)
    642    b.mPlanes[1].mData = data + y_size;
    643    b.mPlanes[1].mStride = stride;
    644    b.mPlanes[1].mHeight = halfHeight;
    645    b.mPlanes[1].mWidth = halfWidth;
    646    b.mPlanes[1].mSkip = 1;
    647 
    648    // V plane (Cr)
    649    b.mPlanes[2].mData = data + y_size + sizeof(short);
    650    b.mPlanes[2].mStride = stride;
    651    b.mPlanes[2].mHeight = halfHeight;
    652    b.mPlanes[2].mWidth = halfWidth;
    653    b.mPlanes[2].mSkip = 1;
    654  }
    655 
    656  b.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
    657 
    658  // YuvColorSpace
    659  b.mYUVColorSpace =
    660      mColorSpace.refOr(DefaultColorSpace({videoWidth, videoHeight}));
    661  b.mColorDepth = colorDepth;
    662  b.mColorRange = mColorRange;
    663 
    664  TimeUnit pts = GetSampleTime(aSample);
    665  if (!pts.IsValid() && mKeepOriginalPts) {
    666    LOG("Couldn't get pts from IMFSample, falling back on container pts");
    667    pts = TimeUnit::Zero();
    668  }
    669  TimeUnit duration = GetSampleDurationOrLastKnownDuration(aSample);
    670  NS_ENSURE_TRUE(duration.IsValid(), E_FAIL);
    671  gfx::IntRect pictureRegion = mVideoInfo.ScaledImageRect(
    672      mSoftwarePictureSize.width, mSoftwarePictureSize.height);
    673 
    674  if (colorDepth != gfx::ColorDepth::COLOR_8 || !mKnowsCompositor ||
    675      !mKnowsCompositor->SupportsD3D11() || !mIMFUsable) {
    676    Result<already_AddRefed<VideoData>, MediaResult> r =
    677        VideoData::CreateAndCopyData(
    678            mVideoInfo, mImageContainer, aStreamOffset, pts, duration, b, false,
    679            TimeUnit::FromMicroseconds(-1), pictureRegion, mKnowsCompositor);
    680    RefPtr<VideoData> v = r.unwrapOr(nullptr);
    681    if (twoDBuffer) {
    682      twoDBuffer->Unlock2D();
    683    } else {
    684      buffer->Unlock();
    685    }
    686    v.forget(aOutVideoData);
    687    return S_OK;
    688  }
    689 
    690  RefPtr<layers::PlanarYCbCrImage> image;
    691  RefPtr<ID3D11Device> device = gfx::DeviceManagerDx::Get()->GetImageDevice();
    692  if (XRE_IsGPUProcess() && layers::FenceD3D11::IsSupported(device)) {
    693    // Store YCbCr to 3 ID3D11Texture2Ds
    694    image = new IMFYCbCrImage(buffer, twoDBuffer, mKnowsCompositor,
    695                              mImageContainer);
    696    VideoData::SetVideoDataToImage(image, mVideoInfo, b, pictureRegion, false);
    697  } else {
    698    // Store YCbCr to shmem
    699    image = mImageContainer->CreatePlanarYCbCrImage();
    700    VideoData::SetVideoDataToImage(image, mVideoInfo, b, pictureRegion, true);
    701  }
    702 
    703  RefPtr<VideoData> v = VideoData::CreateFromImage(
    704      mVideoInfo.mDisplay, aStreamOffset, pts, duration, image.forget(), false,
    705      TimeUnit::FromMicroseconds(-1));
    706 
    707  mPerformanceRecorder.Record(pts.ToMicroseconds(), [&](DecodeStage& aStage) {
    708    aStage.SetColorDepth(b.mColorDepth);
    709    aStage.SetColorRange(b.mColorRange);
    710    aStage.SetYUVColorSpace(b.mYUVColorSpace);
    711    if (subType == MFVideoFormat_NV12) {
    712      aStage.SetImageFormat(DecodeStage::NV12);
    713    } else if (subType == MFVideoFormat_YV12) {
    714      aStage.SetImageFormat(DecodeStage::YV12);
    715    } else if (subType == MFVideoFormat_P010) {
    716      aStage.SetImageFormat(DecodeStage::P010);
    717    } else if (subType == MFVideoFormat_P016) {
    718      aStage.SetImageFormat(DecodeStage::P016);
    719    }
    720    aStage.SetResolution(videoWidth, videoHeight);
    721    aStage.SetStartTimeAndEndTime(v->mTime.ToMicroseconds(),
    722                                  v->GetEndTime().ToMicroseconds());
    723  });
    724 
    725  v.forget(aOutVideoData);
    726  return S_OK;
    727 }
    728 
    729 HRESULT
    730 WMFVideoMFTManager::CreateD3DVideoFrame(IMFSample* aSample,
    731                                        int64_t aStreamOffset,
    732                                        VideoData** aOutVideoData) {
    733  NS_ENSURE_TRUE(aSample, E_POINTER);
    734  NS_ENSURE_TRUE(aOutVideoData, E_POINTER);
    735  NS_ENSURE_TRUE(mDXVA2Manager, E_ABORT);
    736  NS_ENSURE_TRUE(mUseHwAccel, E_ABORT);
    737 
    738  *aOutVideoData = nullptr;
    739  HRESULT hr;
    740 
    741  gfx::IntRect pictureRegion =
    742      mVideoInfo.ScaledImageRect(mImageSize.width, mImageSize.height);
    743  RefPtr<Image> image;
    744  if (mZeroCopyNV12Texture && mDXVA2Manager->SupportsZeroCopyNV12Texture()) {
    745    hr = mDXVA2Manager->WrapTextureWithImage(aSample, pictureRegion,
    746                                             getter_AddRefs(image));
    747  } else {
    748    hr = mDXVA2Manager->CopyToImage(aSample, pictureRegion,
    749                                    getter_AddRefs(image));
    750    NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
    751  }
    752  NS_ENSURE_TRUE(image, E_FAIL);
    753 
    754  gfx::IntSize size = image->GetSize();
    755 
    756  TimeUnit pts = GetSampleTime(aSample);
    757  if (!pts.IsValid() && mKeepOriginalPts) {
    758    LOG("Couldn't get pts from IMFSample, falling back on container pts");
    759    pts = TimeUnit::Zero();
    760  }
    761  NS_ENSURE_TRUE(pts.IsValid(), E_FAIL);
    762  TimeUnit duration = GetSampleDurationOrLastKnownDuration(aSample);
    763  NS_ENSURE_TRUE(duration.IsValid(), E_FAIL);
    764  RefPtr<VideoData> v = VideoData::CreateFromImage(
    765      mVideoInfo.mDisplay, aStreamOffset, pts, duration, image.forget(), false,
    766      TimeUnit::FromMicroseconds(-1));
    767 
    768  NS_ENSURE_TRUE(v, E_FAIL);
    769 
    770  mPerformanceRecorder.Record(pts.ToMicroseconds(), [&](DecodeStage& aStage) {
    771    aStage.SetColorDepth(mVideoInfo.mColorDepth);
    772    aStage.SetColorRange(mColorRange);
    773    aStage.SetYUVColorSpace(mColorSpace.refOr(
    774        DefaultColorSpace({mImageSize.width, mImageSize.height})));
    775    const GUID& subType = mDecoder->GetOutputMediaSubType();
    776    if (subType == MFVideoFormat_NV12) {
    777      aStage.SetImageFormat(DecodeStage::NV12);
    778    } else if (subType == MFVideoFormat_YV12) {
    779      aStage.SetImageFormat(DecodeStage::YV12);
    780    } else if (subType == MFVideoFormat_P010) {
    781      aStage.SetImageFormat(DecodeStage::P010);
    782    } else if (subType == MFVideoFormat_P016) {
    783      aStage.SetImageFormat(DecodeStage::P016);
    784    }
    785    aStage.SetResolution(size.width, size.height);
    786    aStage.SetStartTimeAndEndTime(v->mTime.ToMicroseconds(),
    787                                  v->GetEndTime().ToMicroseconds());
    788  });
    789 
    790  v.forget(aOutVideoData);
    791  return S_OK;
    792 }
    793 
    794 // Blocks until decoded sample is produced by the decoder.
    795 HRESULT
    796 WMFVideoMFTManager::Output(int64_t aStreamOffset, RefPtr<MediaData>& aOutData) {
    797  RefPtr<IMFSample> sample;
    798  HRESULT hr;
    799  aOutData = nullptr;
    800  int typeChangeCount = 0;
    801 
    802  // Loop until we decode a sample, or an unexpected error that we can't
    803  // handle occurs.
    804  while (true) {
    805    hr = mDecoder->Output(&sample);
    806    if (hr == MF_E_TRANSFORM_NEED_MORE_INPUT) {
    807      LOGV("WMFVideoMFTManager(%p)::Output: need more input", this);
    808      return MF_E_TRANSFORM_NEED_MORE_INPUT;
    809    }
    810 
    811    if (hr == MF_E_TRANSFORM_STREAM_CHANGE) {
    812      LOGV("WMFVideoMFTManager(%p)::Output: transform stream change", this);
    813      MOZ_ASSERT(!sample);
    814      // Video stream output type change, probably geometric aperture change or
    815      // pixel type.
    816      // We must reconfigure the decoder output type.
    817 
    818      // Attempt to find an appropriate SubType for video decoding:
    819      // * If the video is SDR we prefer decoding in 8bit formats (NV12 for HW,
    820      //   YV12 for SW decode), if that decoder is unavailable we can use the
    821      //   10bit formats but they are more memory bandwidth intensive.
    822      // * If the video is HDR, we want to prefer the 10bit formats (P010/P016)
    823      //   because HDR videos typically use PQ transfer function which requires
    824      //   10bit to avoid severe banding artifacts, this probably matters less
    825      //   for HLG transfer function but that seems to be uncommon.
    826      //
    827      // Note that we deliberately pass GUID_NULL for aFallbackSubType to avoid
    828      // the full fallback logic - on the final attempt we specify two preferred
    829      // subtypes which will pick anything if both fail to be found; see
    830      // MFTDecoder::SetDecoderOutputType for the full logic.
    831      //
    832      // Conversion from this subtype to a display-ready format (e.g. BGRA8)
    833      // will be handled in DXVA2Manager below.
    834      const GUID& SDRSubType =
    835          mUseHwAccel ? MFVideoFormat_NV12 : MFVideoFormat_YV12;
    836      bool preferP010 = mColorDepth > gfx::ColorDepth::COLOR_8 || IsHDR();
    837      if (preferP010) {
    838        if (FAILED((hr = mDecoder->FindDecoderOutputTypeWithSubtype(
    839                        MFVideoFormat_P010, GUID_NULL))) &&
    840            FAILED((hr = mDecoder->FindDecoderOutputTypeWithSubtype(
    841                        MFVideoFormat_P016, SDRSubType)))) {
    842          LOG("No suitable output format found");
    843          return hr;
    844        }
    845      } else {
    846        if (FAILED((hr = (mDecoder->FindDecoderOutputTypeWithSubtype(
    847                        SDRSubType, GUID_NULL)))) &&
    848            FAILED((hr = mDecoder->FindDecoderOutputTypeWithSubtype(
    849                        MFVideoFormat_P010, MFVideoFormat_P016)))) {
    850          LOG("No suitable output format found");
    851          return hr;
    852        }
    853      }
    854 
    855      RefPtr<IMFMediaType> outputType;
    856      hr = mDecoder->GetOutputMediaType(outputType);
    857      NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
    858 
    859      // Now we need to convert the video decode output to a display format.
    860      if (mUseHwAccel) {
    861        hr = mDXVA2Manager->ConfigureForSize(
    862            outputType,
    863            mColorSpace.refOr(
    864                DefaultColorSpace({mImageSize.width, mImageSize.height})),
    865            mColorRange, mColorDepth,
    866            mVideoInfo.mTransferFunction.refOr(gfx::TransferFunction::BT709),
    867            mVideoInfo.ImageRect().width, mVideoInfo.ImageRect().height);
    868        NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
    869      } else {
    870        // The stride may have changed, recheck for it.
    871        hr = GetDefaultStride(outputType, mVideoInfo.ImageRect().width,
    872                              &mVideoStride);
    873        NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
    874 
    875        UINT32 width = 0, height = 0;
    876        hr = MFGetAttributeSize(outputType, MF_MT_FRAME_SIZE, &width, &height);
    877        NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
    878        NS_ENSURE_TRUE(width <= MAX_VIDEO_WIDTH, E_FAIL);
    879        NS_ENSURE_TRUE(height <= MAX_VIDEO_HEIGHT, E_FAIL);
    880        mSoftwareImageSize = gfx::IntSize(width, height);
    881 
    882        gfx::IntRect picture;
    883        hr = GetPictureRegion(outputType, picture);
    884        NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
    885        MOZ_ASSERT(picture.width != 0 && picture.height != 0);
    886        mSoftwarePictureSize = gfx::IntSize(picture.width, picture.height);
    887        LOG("Output stream change, image size=[%ux%u], picture=[%u,%u]",
    888            mSoftwareImageSize.width, mSoftwareImageSize.height,
    889            mSoftwarePictureSize.width, mSoftwarePictureSize.height);
    890      }
    891      // Catch infinite loops, but some decoders perform at least 2 stream
    892      // changes on consecutive calls, so be permissive.
    893      // 100 is arbitrarily > 2.
    894      NS_ENSURE_TRUE(typeChangeCount < 100, MF_E_TRANSFORM_STREAM_CHANGE);
    895      // Loop back and try decoding again...
    896      ++typeChangeCount;
    897      continue;
    898    }
    899 
    900    if (SUCCEEDED(hr)) {
    901      if (!sample) {
    902        LOG("Video MFTDecoder returned success but no output!");
    903        // On some machines/input the MFT returns success but doesn't output
    904        // a video frame. If we detect this, try again, but only up to a
    905        // point; after 250 failures, give up. Note we count all failures
    906        // over the life of the decoder, as we may end up exiting with a
    907        // NEED_MORE_INPUT and coming back to hit the same error. So just
    908        // counting with a local variable (like typeChangeCount does) may
    909        // not work in this situation.
    910        ++mNullOutputCount;
    911        if (mNullOutputCount > 250) {
    912          LOG("Excessive Video MFTDecoder returning success but no output; "
    913              "giving up");
    914          mGotExcessiveNullOutput = true;
    915          return E_FAIL;
    916        }
    917        continue;
    918      }
    919      TimeUnit pts = GetSampleTime(sample);
    920      if (!pts.IsValid() && mKeepOriginalPts) {
    921        LOG("Couldn't get pts from IMFSample, falling back on container pts");
    922        pts = TimeUnit::Zero();
    923      }
    924      LOG("WMFVIdeoMFTManager(%p)::Output: %s", this, pts.ToString().get());
    925      TimeUnit duration = GetSampleDurationOrLastKnownDuration(sample);
    926 
    927      // AV1 MFT fix: Sample duration after seeking is always equal to the
    928      // sample time, for some reason. Set it to last duration instead.
    929      if (mStreamType == WMFStreamType::AV1 && duration == pts) {
    930        LOG("Video sample duration (%" PRId64 ") matched timestamp (%" PRId64
    931            "), setting to previous sample duration (%" PRId64 ") instead.",
    932            pts.ToMicroseconds(), duration.ToMicroseconds(),
    933            mLastDuration.ToMicroseconds());
    934        duration = mLastDuration;
    935        sample->SetSampleDuration(UsecsToHNs(duration.ToMicroseconds()));
    936      }
    937 
    938      if (!pts.IsValid() || !duration.IsValid()) {
    939        return E_FAIL;
    940      }
    941      if (mSeekTargetThreshold.isSome()) {
    942        if ((pts + duration) < mSeekTargetThreshold.ref()) {
    943          LOG("Dropping video frame which pts (%" PRId64 " + %" PRId64
    944              ") is smaller than seek target (%" PRId64 ").",
    945              pts.ToMicroseconds(), duration.ToMicroseconds(),
    946              mSeekTargetThreshold->ToMicroseconds());
    947          // It is necessary to clear the pointer to release the previous output
    948          // buffer.
    949          sample = nullptr;
    950          continue;
    951        }
    952        mSeekTargetThreshold.reset();
    953      }
    954      break;
    955    }
    956    // Else unexpected error so bail.
    957    NS_WARNING("WMFVideoMFTManager::Output() unexpected error");
    958    return hr;
    959  }
    960 
    961  RefPtr<VideoData> frame;
    962  if (mUseHwAccel) {
    963    hr = CreateD3DVideoFrame(sample, aStreamOffset, getter_AddRefs(frame));
    964  } else {
    965    hr = CreateBasicVideoFrame(sample, aStreamOffset, getter_AddRefs(frame));
    966  }
    967  // Frame should be non null only when we succeeded.
    968  MOZ_ASSERT((frame != nullptr) == SUCCEEDED(hr));
    969  NS_ENSURE_TRUE(SUCCEEDED(hr), hr);
    970  NS_ENSURE_TRUE(frame, E_FAIL);
    971 
    972  if (mKeepOriginalPts) {
    973    MOZ_ASSERT(!mPTSQueue.IsEmpty());
    974    int64_t originalPts = mPTSQueue[0];
    975    mPTSQueue.RemoveElementAt(0);
    976    LOG("Overriding decoded pts of %s with original pts of %" PRId64,
    977        frame->mTime.ToString().get(), originalPts);
    978    frame->mTime = TimeUnit::FromMicroseconds(originalPts);
    979  }
    980 
    981  aOutData = frame;
    982 
    983  if (mNullOutputCount) {
    984    mGotValidOutputAfterNullOutput = true;
    985  }
    986 
    987  return S_OK;
    988 }
    989 
    990 void WMFVideoMFTManager::Flush() {
    991  MFTManager::Flush();
    992  mPerformanceRecorder.Record(std::numeric_limits<int64_t>::max());
    993 }
    994 
    995 void WMFVideoMFTManager::Shutdown() {
    996  if (mDXVA2Manager) {
    997    mDXVA2Manager->BeforeShutdownVideoMFTDecoder();
    998  }
    999  mDecoder = nullptr;
   1000  mDXVA2Manager.reset();
   1001 }
   1002 
   1003 bool WMFVideoMFTManager::IsHardwareAccelerated(
   1004    nsACString& aFailureReason) const {
   1005  aFailureReason = mDXVAFailureReason;
   1006  return mDecoder && mUseHwAccel;
   1007 }
   1008 
   1009 nsCString WMFVideoMFTManager::GetDescriptionName() const {
   1010  nsCString failureReason;
   1011  bool hw = IsHardwareAccelerated(failureReason);
   1012 
   1013  const char* formatName = [&]() {
   1014    if (!mDecoder) {
   1015      return "not initialized";
   1016    }
   1017    GUID format = mDecoder->GetOutputMediaSubType();
   1018    if (format == MFVideoFormat_NV12) {
   1019      if (!gfx::DeviceManagerDx::Get()->CanUseNV12()) {
   1020        return "nv12->argb32";
   1021      }
   1022      return "nv12";
   1023    }
   1024    if (format == MFVideoFormat_P010) {
   1025      if (!gfx::DeviceManagerDx::Get()->CanUseP010()) {
   1026        return "p010->a2rgb10";
   1027      }
   1028      return "p010";
   1029    }
   1030    if (format == MFVideoFormat_P016) {
   1031      if (!gfx::DeviceManagerDx::Get()->CanUseP016()) {
   1032        return "p016->argb16f";
   1033      }
   1034      return "p016";
   1035    }
   1036    if (format == MFVideoFormat_YV12) {
   1037      return "yv12";
   1038    }
   1039    return "unknown";
   1040  }();
   1041 
   1042  const char* dxvaName = [&]() {
   1043    if (!mDXVA2Manager) {
   1044      return "no DXVA";
   1045    }
   1046    return "D3D11";
   1047  }();
   1048 
   1049  return nsPrintfCString("wmf %s codec %s video decoder - %s, %s",
   1050                         EnumValueToString(mStreamType),
   1051                         hw ? "hardware" : "software", dxvaName, formatName);
   1052 }
   1053 nsCString WMFVideoMFTManager::GetCodecName() const {
   1054  switch (mStreamType) {
   1055    case WMFStreamType::H264:
   1056      return "h264"_ns;
   1057    case WMFStreamType::VP8:
   1058      return "vp8"_ns;
   1059    case WMFStreamType::VP9:
   1060      return "vp9"_ns;
   1061    case WMFStreamType::AV1:
   1062      return "av1"_ns;
   1063    case WMFStreamType::HEVC:
   1064      return "hevc"_ns;
   1065    default:
   1066      return "unknown"_ns;
   1067  };
   1068 }
   1069 
   1070 bool WMFVideoMFTManager::UseZeroCopyVideoFrame() const {
   1071  if (mZeroCopyNV12Texture && mDXVA2Manager &&
   1072      mDXVA2Manager->SupportsZeroCopyNV12Texture()) {
   1073    return true;
   1074  }
   1075  return false;
   1076 }
   1077 
   1078 GUID WMFVideoMFTManager::GetOutputSubtype() const {
   1079  switch (mVideoInfo.mColorDepth) {
   1080    case gfx::ColorDepth::COLOR_8:
   1081      return mUseHwAccel ? MFVideoFormat_NV12 : MFVideoFormat_YV12;
   1082    case gfx::ColorDepth::COLOR_10:
   1083      return MFVideoFormat_P010;
   1084    case gfx::ColorDepth::COLOR_12:
   1085    case gfx::ColorDepth::COLOR_16:
   1086      return MFVideoFormat_P016;
   1087    default:
   1088      MOZ_ASSERT_UNREACHABLE("Unexpected color depth");
   1089      return GUID_NULL;
   1090  }
   1091 }
   1092 
   1093 }  // namespace mozilla