tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

RemoteDataDecoder.cpp (44258B)


      1 /* This Source Code Form is subject to the terms of the Mozilla Public
      2 * License, v. 2.0. If a copy of the MPL was not distributed with this
      3 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      4 
      5 #include "RemoteDataDecoder.h"
      6 
      7 #include <jni.h>
      8 
      9 #include "AndroidBridge.h"
     10 #include "AndroidBuild.h"
     11 #include "AndroidDecoderModule.h"
     12 #include "EMEDecoderModule.h"
     13 #include "GLImages.h"
     14 #include "JavaCallbacksSupport.h"
     15 #include "MediaCodec.h"
     16 #include "MediaData.h"
     17 #include "MediaInfo.h"
     18 #include "PerformanceRecorder.h"
     19 #include "SimpleMap.h"
     20 #include "VPXDecoder.h"
     21 #include "VideoUtils.h"
     22 #include "mozilla/Maybe.h"
     23 #include "mozilla/Mutex.h"
     24 #include "mozilla/fallible.h"
     25 #include "mozilla/gfx/Matrix.h"
     26 #include "mozilla/gfx/Types.h"
     27 #include "mozilla/java/CodecProxyWrappers.h"
     28 #include "mozilla/java/GeckoSurfaceWrappers.h"
     29 #include "mozilla/java/SampleBufferWrappers.h"
     30 #include "mozilla/java/SampleWrappers.h"
     31 #include "mozilla/java/SurfaceAllocatorWrappers.h"
     32 #include "nsPromiseFlatString.h"
     33 #include "nsThreadUtils.h"
     34 #include "prlog.h"
     35 
     36 #undef LOG
     37 #define LOG(arg, ...)                                         \
     38  MOZ_LOG(sAndroidDecoderModuleLog, mozilla::LogLevel::Debug, \
     39          ("RemoteDataDecoder(%p)::%s: " arg, this, __func__, ##__VA_ARGS__))
     40 
     41 using namespace mozilla;
     42 using namespace mozilla::gl;
     43 using media::TimeUnit;
     44 
     45 namespace mozilla {
     46 
     47 // Hold a reference to the output buffer until we're ready to release it back to
     48 // the Java codec (for rendering or not).
     49 class RenderOrReleaseOutput {
     50 public:
     51  RenderOrReleaseOutput(java::CodecProxy::Param aCodec,
     52                        java::Sample::Param aSample)
     53      : mMutex("AndroidRenderOrReleaseOutput"),
     54        mCodec(aCodec),
     55        mSample(aSample) {}
     56 
     57  virtual ~RenderOrReleaseOutput() { ReleaseOutput(false); }
     58 
     59 protected:
     60  void ReleaseOutput(bool aToRender) {
     61    MutexAutoLock lock(mMutex);
     62    if (mCodec && mSample) {
     63      mCodec->ReleaseOutput(mSample, aToRender);
     64      mCodec = nullptr;
     65      mSample = nullptr;
     66    }
     67  }
     68 
     69 private:
     70  Mutex mMutex;
     71  java::CodecProxy::GlobalRef mCodec MOZ_GUARDED_BY(mMutex);
     72  java::Sample::GlobalRef mSample MOZ_GUARDED_BY(mMutex);
     73 };
     74 
     75 static bool areSmpte432ColorPrimariesBuggy() {
     76  if (jni::GetAPIVersion() >= 34) {
     77    const auto socManufacturer =
     78        java::sdk::Build::SOC_MANUFACTURER()->ToString();
     79    if (socManufacturer.EqualsASCII("Google")) {
     80      return true;
     81    }
     82  }
     83  return false;
     84 }
     85 
     86 static bool areBT709ColorPrimariesMisreported() {
     87  if (jni::GetAPIVersion() >= 33) {
     88    const auto socModel = java::sdk::Build::SOC_MODEL()->ToString();
     89    if (socModel.EqualsASCII("Tensor") || socModel.EqualsASCII("GS201")) {
     90      return true;
     91    }
     92  }
     93  return false;
     94 }
     95 
     96 class RemoteVideoDecoder final : public RemoteDataDecoder {
     97 public:
     98  // Render the output to the surface when the frame is sent
     99  // to compositor, or release it if not presented.
    100  class CompositeListener
    101      : private RenderOrReleaseOutput,
    102        public layers::SurfaceTextureImage::SetCurrentCallback {
    103   public:
    104    CompositeListener(java::CodecProxy::Param aCodec,
    105                      java::Sample::Param aSample)
    106        : RenderOrReleaseOutput(aCodec, aSample) {}
    107 
    108    void operator()(void) override { ReleaseOutput(true); }
    109  };
    110 
    111  class InputInfo {
    112   public:
    113    InputInfo() = default;
    114 
    115    InputInfo(const int64_t aDurationUs, const gfx::IntSize& aImageSize,
    116              const gfx::IntSize& aDisplaySize)
    117        : mDurationUs(aDurationUs),
    118          mImageSize(aImageSize),
    119          mDisplaySize(aDisplaySize) {}
    120 
    121    int64_t mDurationUs = {};
    122    gfx::IntSize mImageSize = {};
    123    gfx::IntSize mDisplaySize = {};
    124  };
    125 
    126  class CallbacksSupport final : public JavaCallbacksSupport {
    127   public:
    128    explicit CallbacksSupport(RemoteVideoDecoder* aDecoder)
    129        : mDecoder(aDecoder) {}
    130 
    131    void HandleInput(int64_t aTimestamp, bool aProcessed) override {
    132      mDecoder->UpdateInputStatus(aTimestamp, aProcessed);
    133    }
    134 
    135    void HandleOutput(java::Sample::Param aSample,
    136                      java::SampleBuffer::Param aBuffer) override {
    137      MOZ_ASSERT(!aBuffer, "Video sample should be bufferless");
    138      // aSample will be implicitly converted into a GlobalRef.
    139      mDecoder->ProcessOutput(aSample);
    140    }
    141 
    142    void HandleOutputFormatChanged(
    143        java::sdk::MediaFormat::Param aFormat) override {
    144      int32_t colorFormat = 0;
    145      aFormat->GetInteger(java::sdk::MediaFormat::KEY_COLOR_FORMAT,
    146                          &colorFormat);
    147      if (colorFormat == 0) {
    148        mDecoder->Error(
    149            MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
    150                        RESULT_DETAIL("Invalid color format:%d", colorFormat)));
    151        return;
    152      }
    153 
    154      Maybe<int32_t> colorRange;
    155      {
    156        int32_t range = 0;
    157        if (NS_SUCCEEDED(aFormat->GetInteger(
    158                java::sdk::MediaFormat::KEY_COLOR_RANGE, &range))) {
    159          colorRange.emplace(range);
    160        }
    161      }
    162 
    163      Maybe<int32_t> colorSpace;
    164      {
    165        int32_t space = 0;
    166        if (NS_SUCCEEDED(aFormat->GetInteger(
    167                java::sdk::MediaFormat::KEY_COLOR_STANDARD, &space))) {
    168          colorSpace.emplace(space);
    169        }
    170      }
    171 
    172      mDecoder->ProcessOutputFormatChange(colorFormat, colorRange, colorSpace);
    173    }
    174 
    175    void HandleError(const MediaResult& aError) override {
    176      mDecoder->Error(aError);
    177    }
    178 
    179    friend class RemoteDataDecoder;
    180 
    181   private:
    182    RemoteVideoDecoder* mDecoder;
    183  };
    184 
    185  RemoteVideoDecoder(const VideoInfo& aConfig,
    186                     java::sdk::MediaFormat::Param aFormat,
    187                     const nsString& aDrmStubId, Maybe<TrackingId> aTrackingId)
    188      : RemoteDataDecoder(MediaData::Type::VIDEO_DATA, aConfig.mMimeType,
    189                          aFormat, aDrmStubId),
    190        mConfig(aConfig),
    191        mTrackingId(std::move(aTrackingId)) {}
    192 
    193  ~RemoteVideoDecoder() {
    194    if (mSurface) {
    195      java::SurfaceAllocator::DisposeSurface(mSurface);
    196    }
    197  }
    198 
    199  RefPtr<InitPromise> Init() override {
    200    mThread = GetCurrentSerialEventTarget();
    201    java::sdk::MediaCodec::BufferInfo::LocalRef bufferInfo;
    202    if (NS_FAILED(java::sdk::MediaCodec::BufferInfo::New(&bufferInfo)) ||
    203        !bufferInfo) {
    204      return InitPromise::CreateAndReject(NS_ERROR_OUT_OF_MEMORY, __func__);
    205    }
    206    mInputBufferInfo = bufferInfo;
    207 
    208    mSurface =
    209        java::GeckoSurface::LocalRef(java::SurfaceAllocator::AcquireSurface(
    210            mConfig.mImage.width, mConfig.mImage.height, false));
    211    if (!mSurface) {
    212      return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
    213                                          __func__);
    214    }
    215 
    216    mSurfaceHandle = mSurface->GetHandle();
    217 
    218    // Register native methods.
    219    JavaCallbacksSupport::Init();
    220 
    221    mJavaCallbacks = java::CodecProxy::NativeCallbacks::New();
    222    if (!mJavaCallbacks) {
    223      return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
    224                                          __func__);
    225    }
    226    JavaCallbacksSupport::AttachNative(
    227        mJavaCallbacks, mozilla::MakeUnique<CallbacksSupport>(this));
    228 
    229    mJavaDecoder = java::CodecProxy::Create(
    230        false,  // false indicates to create a decoder and true denotes encoder
    231        mFormat, mSurface, mJavaCallbacks, mDrmStubId);
    232    if (mJavaDecoder == nullptr) {
    233      return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
    234                                          __func__);
    235    }
    236    mIsCodecSupportAdaptivePlayback =
    237        mJavaDecoder->IsAdaptivePlaybackSupported();
    238    mIsHardwareAccelerated = mJavaDecoder->IsHardwareAccelerated();
    239 
    240    mMediaInfoFlag = MediaInfoFlag::None;
    241    mMediaInfoFlag |= mIsHardwareAccelerated ? MediaInfoFlag::HardwareDecoding
    242                                             : MediaInfoFlag::SoftwareDecoding;
    243    if (mMimeType.EqualsLiteral("video/mp4") ||
    244        mMimeType.EqualsLiteral("video/avc")) {
    245      mMediaInfoFlag |= MediaInfoFlag::VIDEO_H264;
    246    } else if (mMimeType.EqualsLiteral("video/vp8")) {
    247      mMediaInfoFlag |= MediaInfoFlag::VIDEO_VP8;
    248    } else if (mMimeType.EqualsLiteral("video/vp9")) {
    249      mMediaInfoFlag |= MediaInfoFlag::VIDEO_VP9;
    250    } else if (mMimeType.EqualsLiteral("video/av1")) {
    251      mMediaInfoFlag |= MediaInfoFlag::VIDEO_AV1;
    252    }
    253    return InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, __func__);
    254  }
    255 
    256  RefPtr<MediaDataDecoder::FlushPromise> Flush() override {
    257    AssertOnThread();
    258    mInputInfos.Clear();
    259    mSeekTarget.reset();
    260    mLatestOutputTime.reset();
    261    mPerformanceRecorder.Record(std::numeric_limits<int64_t>::max());
    262    return RemoteDataDecoder::Flush();
    263  }
    264 
    265  nsCString GetCodecName() const override {
    266    if (mMediaInfoFlag & MediaInfoFlag::VIDEO_H264) {
    267      return "h264"_ns;
    268    }
    269    if (mMediaInfoFlag & MediaInfoFlag::VIDEO_VP8) {
    270      return "vp8"_ns;
    271    }
    272    if (mMediaInfoFlag & MediaInfoFlag::VIDEO_VP9) {
    273      return "vp9"_ns;
    274    }
    275    if (mMediaInfoFlag & MediaInfoFlag::VIDEO_AV1) {
    276      return "av1"_ns;
    277    }
    278    return "unknown"_ns;
    279  }
    280 
    281  RefPtr<MediaDataDecoder::DecodePromise> Decode(
    282      MediaRawData* aSample) override {
    283    AssertOnThread();
    284 
    285    if (NeedsNewDecoder()) {
    286      return DecodePromise::CreateAndReject(NS_ERROR_DOM_MEDIA_NEED_NEW_DECODER,
    287                                            __func__);
    288    }
    289 
    290    const VideoInfo* config =
    291        aSample->mTrackInfo ? aSample->mTrackInfo->GetAsVideoInfo() : &mConfig;
    292    MOZ_ASSERT(config);
    293 
    294    mTrackingId.apply([&](const auto& aId) {
    295      MediaInfoFlag flag = mMediaInfoFlag;
    296      flag |= (aSample->mKeyframe ? MediaInfoFlag::KeyFrame
    297                                  : MediaInfoFlag::NonKeyFrame);
    298      mPerformanceRecorder.Start(aSample->mTime.ToMicroseconds(),
    299                                 "AndroidDecoder"_ns, aId, flag);
    300    });
    301 
    302    InputInfo info(aSample->mDuration.ToMicroseconds(), config->mImage,
    303                   config->mDisplay);
    304    mInputInfos.Insert(aSample->mTime.ToMicroseconds(), info);
    305    return RemoteDataDecoder::Decode(aSample);
    306  }
    307 
    308  bool SupportDecoderRecycling() const override {
    309    return mIsCodecSupportAdaptivePlayback;
    310  }
    311 
    312  void SetSeekThreshold(const TimeUnit& aTime) override {
    313    auto setter = [self = RefPtr{this}, aTime] {
    314      if (aTime.IsValid()) {
    315        self->mSeekTarget = Some(aTime);
    316      } else {
    317        self->mSeekTarget.reset();
    318      }
    319    };
    320    if (mThread->IsOnCurrentThread()) {
    321      setter();
    322    } else {
    323      nsCOMPtr<nsIRunnable> runnable = NS_NewRunnableFunction(
    324          "RemoteVideoDecoder::SetSeekThreshold", std::move(setter));
    325      nsresult rv = mThread->Dispatch(runnable.forget());
    326      MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
    327      (void)rv;
    328    }
    329  }
    330 
    331  bool IsUsefulData(const RefPtr<MediaData>& aSample) override {
    332    AssertOnThread();
    333 
    334    if (mLatestOutputTime && aSample->mTime < mLatestOutputTime.value()) {
    335      return false;
    336    }
    337 
    338    const TimeUnit endTime = aSample->GetEndTime();
    339    if (mSeekTarget && endTime <= mSeekTarget.value()) {
    340      return false;
    341    }
    342 
    343    mSeekTarget.reset();
    344    mLatestOutputTime = Some(endTime);
    345    return true;
    346  }
    347 
    348  bool IsHardwareAccelerated(nsACString& aFailureReason) const override {
    349    return mIsHardwareAccelerated;
    350  }
    351 
    352  ConversionRequired NeedsConversion() const override {
    353    return ConversionRequired::kNeedAnnexB;
    354  }
    355 
    356  Maybe<MediaDataDecoder::PropertyValue> GetDecodeProperty(
    357      MediaDataDecoder::PropertyName aName) const override {
    358    // Android has limited amount of output buffers. See Bug 794747.
    359    static constexpr uint32_t kNumOutputBuffers = 3;
    360    // SurfaceTexture can have only one current/renderable image at a time.
    361    // See Bug 1299068
    362    static constexpr uint32_t kNumCurrentImages = 1;
    363    switch (aName) {
    364      case PropertyName::MaxNumVideoBuffers:
    365        [[fallthrough]];
    366      case PropertyName::MinNumVideoBuffers:
    367        return Some(PropertyValue(kNumOutputBuffers));
    368      case PropertyName::MaxNumCurrentImages:
    369        return Some(PropertyValue(kNumCurrentImages));
    370      default:
    371        return MediaDataDecoder::GetDecodeProperty(aName);
    372    }
    373  }
    374 
    375 private:
    376  // Param and LocalRef are only valid for the duration of a JNI method call.
    377  // Use GlobalRef as the parameter type to keep the Java object referenced
    378  // until running.
    379  void ProcessOutput(java::Sample::GlobalRef&& aSample) {
    380    if (!mThread->IsOnCurrentThread()) {
    381      nsresult rv =
    382          mThread->Dispatch(NewRunnableMethod<java::Sample::GlobalRef&&>(
    383              "RemoteVideoDecoder::ProcessOutput", this,
    384              &RemoteVideoDecoder::ProcessOutput, std::move(aSample)));
    385      MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
    386      (void)rv;
    387      return;
    388    }
    389 
    390    AssertOnThread();
    391    if (GetState() == State::SHUTDOWN) {
    392      aSample->Dispose();
    393      return;
    394    }
    395 
    396    UniquePtr<layers::SurfaceTextureImage::SetCurrentCallback> releaseSample(
    397        new CompositeListener(mJavaDecoder, aSample));
    398 
    399    // If our output surface has been released (due to the GPU process crashing)
    400    // then request a new decoder, which will in turn allocate a new
    401    // Surface. This is usually be handled by the Error() callback, but on some
    402    // devices (or at least on the emulator) the java decoder does not raise an
    403    // error when the Surface is released. So we raise this error here as well.
    404    if (NeedsNewDecoder()) {
    405      Error(MediaResult(NS_ERROR_DOM_MEDIA_NEED_NEW_DECODER,
    406                        RESULT_DETAIL("VideoCallBack::HandleOutput")));
    407      return;
    408    }
    409 
    410    java::sdk::MediaCodec::BufferInfo::LocalRef info = aSample->Info();
    411    MOZ_ASSERT(info);
    412 
    413    int32_t flags;
    414    bool ok = NS_SUCCEEDED(info->Flags(&flags));
    415 
    416    int32_t offset;
    417    ok &= NS_SUCCEEDED(info->Offset(&offset));
    418 
    419    int32_t size;
    420    ok &= NS_SUCCEEDED(info->Size(&size));
    421 
    422    int64_t presentationTimeUs;
    423    ok &= NS_SUCCEEDED(info->PresentationTimeUs(&presentationTimeUs));
    424 
    425    if (!ok) {
    426      Error(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
    427                        RESULT_DETAIL("VideoCallBack::HandleOutput")));
    428      return;
    429    }
    430 
    431    InputInfo inputInfo;
    432    ok = mInputInfos.Find(presentationTimeUs, inputInfo);
    433    bool isEOS = !!(flags & java::sdk::MediaCodec::BUFFER_FLAG_END_OF_STREAM);
    434    if (!ok && !isEOS) {
    435      LOG("No corresponding input");
    436      // Ignore output with no corresponding input.
    437      return;
    438    }
    439 
    440    LOG("flags=%" PRIx32 " size=%" PRIi32 " presentationTimeUs=%" PRIi64, flags,
    441        size, presentationTimeUs);
    442    if (ok && (size > 0 || presentationTimeUs >= 0)) {
    443      bool forceBT709ColorSpace = false;
    444      // On certain devices SMPTE 432 color primaries are rendered incorrectly,
    445      // so we force BT709 to be used instead.
    446      // Color space 10 comes from the video in bug 1866020 and corresponds to
    447      // libstagefright's kColorStandardDCI_P3.
    448      // 65800 comes from the video in bug 1879720 and is vendor-specific.
    449      static bool isSmpte432Buggy = areSmpte432ColorPrimariesBuggy();
    450      if (isSmpte432Buggy &&
    451          (mColorSpace == Some(10) || mColorSpace == Some(65800))) {
    452        forceBT709ColorSpace = true;
    453      }
    454 
    455      // On certain devices the av1 decoder intermittently misreports some BT709
    456      // video frames as having BT609 color primaries. This results in a
    457      // flickering effect during playback whilst alternating between frames
    458      // which the GPU believes have different color spaces. To work around this
    459      // we force BT709 conversion to be used for all frames which the decoder
    460      // believes are BT601, as long as our demuxer has reported the color
    461      // primaries as BT709. See bug 1933055.
    462      static bool isBT709Misreported = areBT709ColorPrimariesMisreported();
    463      if (isBT709Misreported && mMediaInfoFlag & MediaInfoFlag::VIDEO_AV1 &&
    464          mConfig.mColorPrimaries == Some(gfx::ColorSpace2::BT709) &&
    465          // 4 = kColorStandardBT601_525
    466          mColorSpace == Some(4)) {
    467        forceBT709ColorSpace = true;
    468      }
    469 
    470      RefPtr<layers::Image> img = new layers::SurfaceTextureImage(
    471          mSurfaceHandle, inputInfo.mImageSize, false /* NOT continuous */,
    472          gl::OriginPos::BottomLeft, mConfig.HasAlpha(), forceBT709ColorSpace,
    473          /* aTransformOverride */ Nothing());
    474      img->AsSurfaceTextureImage()->RegisterSetCurrentCallback(
    475          std::move(releaseSample));
    476 
    477      RefPtr<VideoData> v = VideoData::CreateFromImage(
    478          inputInfo.mDisplaySize, offset,
    479          TimeUnit::FromMicroseconds(presentationTimeUs),
    480          TimeUnit::FromMicroseconds(inputInfo.mDurationUs), img.forget(),
    481          !!(flags & java::sdk::MediaCodec::BUFFER_FLAG_SYNC_FRAME),
    482          TimeUnit::FromMicroseconds(presentationTimeUs));
    483 
    484      mPerformanceRecorder.Record(presentationTimeUs, [&](DecodeStage& aStage) {
    485        using Cap = java::sdk::MediaCodecInfo::CodecCapabilities;
    486        using Fmt = java::sdk::MediaFormat;
    487        mColorFormat.apply([&](int32_t aFormat) {
    488          switch (aFormat) {
    489            case Cap::COLOR_Format32bitABGR8888:
    490            case Cap::COLOR_Format32bitARGB8888:
    491            case Cap::COLOR_Format32bitBGRA8888:
    492            case Cap::COLOR_FormatRGBAFlexible:
    493              aStage.SetImageFormat(DecodeStage::RGBA32);
    494              break;
    495            case Cap::COLOR_Format24bitBGR888:
    496            case Cap::COLOR_Format24bitRGB888:
    497            case Cap::COLOR_FormatRGBFlexible:
    498              aStage.SetImageFormat(DecodeStage::RGB24);
    499              break;
    500            case Cap::COLOR_FormatYUV411Planar:
    501            case Cap::COLOR_FormatYUV411PackedPlanar:
    502            case Cap::COLOR_FormatYUV420Planar:
    503            case Cap::COLOR_FormatYUV420PackedPlanar:
    504            case Cap::COLOR_FormatYUV420Flexible:
    505              aStage.SetImageFormat(DecodeStage::YUV420P);
    506              break;
    507            case Cap::COLOR_FormatYUV420SemiPlanar:
    508            case Cap::COLOR_FormatYUV420PackedSemiPlanar:
    509            case Cap::COLOR_QCOM_FormatYUV420SemiPlanar:
    510            case Cap::COLOR_TI_FormatYUV420PackedSemiPlanar:
    511              aStage.SetImageFormat(DecodeStage::NV12);
    512              break;
    513            case Cap::COLOR_FormatYCbYCr:
    514            case Cap::COLOR_FormatYCrYCb:
    515            case Cap::COLOR_FormatCbYCrY:
    516            case Cap::COLOR_FormatCrYCbY:
    517            case Cap::COLOR_FormatYUV422Planar:
    518            case Cap::COLOR_FormatYUV422PackedPlanar:
    519            case Cap::COLOR_FormatYUV422Flexible:
    520              aStage.SetImageFormat(DecodeStage::YUV422P);
    521              break;
    522            case Cap::COLOR_FormatYUV444Interleaved:
    523            case Cap::COLOR_FormatYUV444Flexible:
    524              aStage.SetImageFormat(DecodeStage::YUV444P);
    525              break;
    526            case Cap::COLOR_FormatSurface:
    527              aStage.SetImageFormat(DecodeStage::ANDROID_SURFACE);
    528              break;
    529            /* Added in API level 33
    530            case Cap::COLOR_FormatYUVP010:
    531              aStage.SetImageFormat(DecodeStage::P010);
    532              break;
    533            */
    534            default:
    535              NS_WARNING(nsPrintfCString("Unhandled color format %d (0x%08x)",
    536                                         aFormat, aFormat)
    537                             .get());
    538          }
    539        });
    540        mColorRange.apply([&](int32_t aRange) {
    541          switch (aRange) {
    542            case Fmt::COLOR_RANGE_FULL:
    543              aStage.SetColorRange(gfx::ColorRange::FULL);
    544              break;
    545            case Fmt::COLOR_RANGE_LIMITED:
    546              aStage.SetColorRange(gfx::ColorRange::LIMITED);
    547              break;
    548            default:
    549              NS_WARNING(nsPrintfCString("Unhandled color range %d (0x%08x)",
    550                                         aRange, aRange)
    551                             .get());
    552          }
    553        });
    554        mColorSpace.apply([&](int32_t aSpace) {
    555          switch (aSpace) {
    556            case Fmt::COLOR_STANDARD_BT2020:
    557              aStage.SetYUVColorSpace(gfx::YUVColorSpace::BT2020);
    558              break;
    559            case Fmt::COLOR_STANDARD_BT601_NTSC:
    560            case Fmt::COLOR_STANDARD_BT601_PAL:
    561              aStage.SetYUVColorSpace(gfx::YUVColorSpace::BT601);
    562              break;
    563            case Fmt::COLOR_STANDARD_BT709:
    564              aStage.SetYUVColorSpace(gfx::YUVColorSpace::BT709);
    565              break;
    566            default:
    567              NS_WARNING(nsPrintfCString("Unhandled color space %d (0x%08x)",
    568                                         aSpace, aSpace)
    569                             .get());
    570          }
    571        });
    572        aStage.SetResolution(v->mImage->GetSize().Width(),
    573                             v->mImage->GetSize().Height());
    574        aStage.SetStartTimeAndEndTime(v->mTime.ToMicroseconds(),
    575                                      v->GetEndTime().ToMicroseconds());
    576      });
    577 
    578      RemoteDataDecoder::UpdateOutputStatus(std::move(v));
    579    }
    580 
    581    if (isEOS) {
    582      DrainComplete();
    583    }
    584  }
    585 
    586  void ProcessOutputFormatChange(int32_t aColorFormat,
    587                                 Maybe<int32_t> aColorRange,
    588                                 Maybe<int32_t> aColorSpace) {
    589    if (!mThread->IsOnCurrentThread()) {
    590      nsresult rv = mThread->Dispatch(
    591          NewRunnableMethod<int32_t, Maybe<int32_t>, Maybe<int32_t>>(
    592              "RemoteVideoDecoder::ProcessOutputFormatChange", this,
    593              &RemoteVideoDecoder::ProcessOutputFormatChange, aColorFormat,
    594              aColorRange, aColorSpace));
    595      MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
    596      (void)rv;
    597      return;
    598    }
    599 
    600    AssertOnThread();
    601 
    602    mColorFormat = Some(aColorFormat);
    603    mColorRange = aColorRange;
    604    mColorSpace = aColorSpace;
    605  }
    606 
    607  bool NeedsNewDecoder() const override {
    608    return !mSurface || mSurface->IsReleased();
    609  }
    610 
    611  const VideoInfo mConfig;
    612  java::GeckoSurface::GlobalRef mSurface;
    613  AndroidSurfaceTextureHandle mSurfaceHandle{};
    614  // Only accessed on reader's task queue.
    615  bool mIsCodecSupportAdaptivePlayback = false;
    616  // Can be accessed on any thread, but only written on during init.
    617  bool mIsHardwareAccelerated = false;
    618  // Accessed on mThread and reader's thread. SimpleMap however is
    619  // thread-safe, so it's okay to do so.
    620  SimpleMap<int64_t, InputInfo, ThreadSafePolicy> mInputInfos;
    621  // Only accessed on mThread.
    622  Maybe<TimeUnit> mSeekTarget;
    623  Maybe<TimeUnit> mLatestOutputTime;
    624  Maybe<int32_t> mColorFormat;
    625  Maybe<int32_t> mColorRange;
    626  Maybe<int32_t> mColorSpace;
    627  // Only accessed on mThread.
    628  // Tracking id for the performance recorder.
    629  const Maybe<TrackingId> mTrackingId;
    630  // Can be accessed on any thread, but only written during init.
    631  // Pre-filled decode info used by the performance recorder.
    632  MediaInfoFlag mMediaInfoFlag = {};
    633  // Only accessed on mThread.
    634  // Records decode performance to the profiler.
    635  PerformanceRecorderMulti<DecodeStage> mPerformanceRecorder;
    636 };
    637 
    638 class RemoteAudioDecoder final : public RemoteDataDecoder {
    639 public:
    640  RemoteAudioDecoder(const AudioInfo& aConfig,
    641                     java::sdk::MediaFormat::Param aFormat,
    642                     const nsString& aDrmStubId)
    643      : RemoteDataDecoder(MediaData::Type::AUDIO_DATA, aConfig.mMimeType,
    644                          aFormat, aDrmStubId),
    645        mOutputChannels(AssertedCast<int32_t>(aConfig.mChannels)),
    646        mOutputSampleRate(AssertedCast<int32_t>(aConfig.mRate)) {
    647    JNIEnv* const env = jni::GetEnvForThread();
    648 
    649    bool formatHasCSD = false;
    650    NS_ENSURE_SUCCESS_VOID(aFormat->ContainsKey(u"csd-0"_ns, &formatHasCSD));
    651 
    652    // It would be nice to instead use more specific information here, but
    653    // we force a byte buffer for now since this handles arbitrary codecs.
    654    // TODO(bug 1768564): implement further type checking for codec data.
    655    RefPtr<MediaByteBuffer> audioCodecSpecificBinaryBlob =
    656        ForceGetAudioCodecSpecificBlob(aConfig.mCodecSpecificConfig);
    657    if (!formatHasCSD && audioCodecSpecificBinaryBlob->Length() >= 2) {
    658      jni::ByteBuffer::LocalRef buffer(env);
    659      buffer = jni::ByteBuffer::New(audioCodecSpecificBinaryBlob->Elements(),
    660                                    audioCodecSpecificBinaryBlob->Length());
    661      NS_ENSURE_SUCCESS_VOID(aFormat->SetByteBuffer(u"csd-0"_ns, buffer));
    662    }
    663  }
    664 
    665  RefPtr<InitPromise> Init() override {
    666    mThread = GetCurrentSerialEventTarget();
    667    java::sdk::MediaCodec::BufferInfo::LocalRef bufferInfo;
    668    if (NS_FAILED(java::sdk::MediaCodec::BufferInfo::New(&bufferInfo)) ||
    669        !bufferInfo) {
    670      return InitPromise::CreateAndReject(NS_ERROR_OUT_OF_MEMORY, __func__);
    671    }
    672    mInputBufferInfo = bufferInfo;
    673 
    674    // Register native methods.
    675    JavaCallbacksSupport::Init();
    676 
    677    mJavaCallbacks = java::CodecProxy::NativeCallbacks::New();
    678    if (!mJavaCallbacks) {
    679      return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
    680                                          __func__);
    681    }
    682    JavaCallbacksSupport::AttachNative(
    683        mJavaCallbacks, mozilla::MakeUnique<CallbacksSupport>(this));
    684 
    685    mJavaDecoder = java::CodecProxy::Create(false, mFormat, nullptr,
    686                                            mJavaCallbacks, mDrmStubId);
    687    if (mJavaDecoder == nullptr) {
    688      return InitPromise::CreateAndReject(NS_ERROR_DOM_MEDIA_FATAL_ERR,
    689                                          __func__);
    690    }
    691 
    692    return InitPromise::CreateAndResolve(TrackInfo::kAudioTrack, __func__);
    693  }
    694 
    695  nsCString GetCodecName() const override {
    696    if (mMimeType.EqualsLiteral("audio/mp4a-latm")) {
    697      return "aac"_ns;
    698    }
    699    return "unknown"_ns;
    700  }
    701 
    702  RefPtr<FlushPromise> Flush() override {
    703    AssertOnThread();
    704    mFirstDemuxedSampleTime.reset();
    705    return RemoteDataDecoder::Flush();
    706  }
    707 
    708  RefPtr<DecodePromise> Decode(MediaRawData* aSample) override {
    709    AssertOnThread();
    710    if (!mFirstDemuxedSampleTime) {
    711      MOZ_ASSERT(aSample->mTime.IsValid());
    712      mFirstDemuxedSampleTime.emplace(aSample->mTime);
    713    }
    714    return RemoteDataDecoder::Decode(aSample);
    715  }
    716 
    717 private:
    718  class CallbacksSupport final : public JavaCallbacksSupport {
    719   public:
    720    explicit CallbacksSupport(RemoteAudioDecoder* aDecoder)
    721        : mDecoder(aDecoder) {}
    722 
    723    void HandleInput(int64_t aTimestamp, bool aProcessed) override {
    724      mDecoder->UpdateInputStatus(aTimestamp, aProcessed);
    725    }
    726 
    727    void HandleOutput(java::Sample::Param aSample,
    728                      java::SampleBuffer::Param aBuffer) override {
    729      MOZ_ASSERT(aBuffer, "Audio sample should have buffer");
    730      // aSample will be implicitly converted into a GlobalRef.
    731      mDecoder->ProcessOutput(aSample, aBuffer);
    732    }
    733 
    734    void HandleOutputFormatChanged(
    735        java::sdk::MediaFormat::Param aFormat) override {
    736      int32_t outputChannels = 0;
    737      aFormat->GetInteger(u"channel-count"_ns, &outputChannels);
    738      AudioConfig::ChannelLayout layout(outputChannels);
    739      if (!layout.IsValid()) {
    740        mDecoder->Error(MediaResult(
    741            NS_ERROR_DOM_MEDIA_FATAL_ERR,
    742            RESULT_DETAIL("Invalid channel layout:%d", outputChannels)));
    743        return;
    744      }
    745 
    746      int32_t sampleRate = 0;
    747      aFormat->GetInteger(u"sample-rate"_ns, &sampleRate);
    748      LOG("Audio output format changed: channels:%d sample rate:%d",
    749          outputChannels, sampleRate);
    750 
    751      mDecoder->ProcessOutputFormatChange(outputChannels, sampleRate);
    752    }
    753 
    754    void HandleError(const MediaResult& aError) override {
    755      mDecoder->Error(aError);
    756    }
    757 
    758   private:
    759    RemoteAudioDecoder* mDecoder;
    760  };
    761 
    762  bool IsSampleTimeSmallerThanFirstDemuxedSampleTime(int64_t aTime) const {
    763    return mFirstDemuxedSampleTime->ToMicroseconds() > aTime;
    764  }
    765 
    766  bool ShouldDiscardSample(int64_t aSession) const {
    767    AssertOnThread();
    768    // HandleOutput() runs on Android binder thread pool and could be preempted
    769    // by RemoteDateDecoder task queue. That means ProcessOutput() could be
    770    // scheduled after Shutdown() or Flush(). We won't need the
    771    // sample which is returned after calling Shutdown() and Flush(). We can
    772    // check mFirstDemuxedSampleTime to know whether the Flush() has been
    773    // called, becasue it would be reset in Flush().
    774    return GetState() == State::SHUTDOWN || !mFirstDemuxedSampleTime ||
    775           mSession != aSession;
    776  }
    777 
    778  // Param and LocalRef are only valid for the duration of a JNI method call.
    779  // Use GlobalRef as the parameter type to keep the Java object referenced
    780  // until running.
    781  void ProcessOutput(java::Sample::GlobalRef&& aSample,
    782                     java::SampleBuffer::GlobalRef&& aBuffer) {
    783    if (!mThread->IsOnCurrentThread()) {
    784      nsresult rv =
    785          mThread->Dispatch(NewRunnableMethod<java::Sample::GlobalRef&&,
    786                                              java::SampleBuffer::GlobalRef&&>(
    787              "RemoteAudioDecoder::ProcessOutput", this,
    788              &RemoteAudioDecoder::ProcessOutput, std::move(aSample),
    789              std::move(aBuffer)));
    790      MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
    791      (void)rv;
    792      return;
    793    }
    794 
    795    AssertOnThread();
    796 
    797    LOG("ProcessOutput");
    798 
    799    if (ShouldDiscardSample(aSample->Session()) || !aBuffer->IsValid()) {
    800      aSample->Dispose();
    801      LOG("Discarding sample");
    802      return;
    803    }
    804 
    805    RenderOrReleaseOutput autoRelease(mJavaDecoder, aSample);
    806 
    807    java::sdk::MediaCodec::BufferInfo::LocalRef info = aSample->Info();
    808    MOZ_ASSERT(info);
    809 
    810    int32_t flags = 0;
    811    bool ok = NS_SUCCEEDED(info->Flags(&flags));
    812    bool isEOS = !!(flags & java::sdk::MediaCodec::BUFFER_FLAG_END_OF_STREAM);
    813 
    814    int32_t offset;
    815    ok &= NS_SUCCEEDED(info->Offset(&offset));
    816 
    817    int64_t presentationTimeUs;
    818    ok &= NS_SUCCEEDED(info->PresentationTimeUs(&presentationTimeUs));
    819 
    820    int32_t size;
    821    ok &= NS_SUCCEEDED(info->Size(&size));
    822 
    823    if (!ok ||
    824        (IsSampleTimeSmallerThanFirstDemuxedSampleTime(presentationTimeUs) &&
    825         !isEOS)) {
    826      LOG("ProcessOutput: decoding error ok[%s], pts[%" PRId64 "], eos[%s]",
    827          ok ? "true" : "false", presentationTimeUs, isEOS ? "true" : "false");
    828      Error(MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__));
    829      return;
    830    }
    831 
    832    if (size > 0) {
    833      const int32_t sampleSize = sizeof(int16_t);
    834      const int32_t numSamples = size / sampleSize;
    835 
    836      InflatableShortBuffer audio(numSamples);
    837      if (!audio) {
    838        Error(MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__));
    839        LOG("OOM while allocating temporary output buffer");
    840        return;
    841      }
    842      jni::ByteBuffer::LocalRef dest = jni::ByteBuffer::New(audio.get(), size);
    843      aBuffer->WriteToByteBuffer(dest, offset, size);
    844      AlignedFloatBuffer converted = audio.Inflate();
    845 
    846      TimeUnit pts = TimeUnit::FromMicroseconds(presentationTimeUs);
    847 
    848      LOG("Decoded: %u frames of %s audio, pts: %s, %d channels, %" PRId32
    849          " Hz",
    850          numSamples / mOutputChannels,
    851          sampleSize == sizeof(int16_t) ? "int16" : "f32", pts.ToString().get(),
    852          mOutputChannels, mOutputSampleRate);
    853 
    854      RefPtr<AudioData> data = new AudioData(
    855          0, pts, std::move(converted), mOutputChannels, mOutputSampleRate);
    856 
    857      UpdateOutputStatus(std::move(data));
    858    } else {
    859      LOG("ProcessOutput but size 0");
    860    }
    861 
    862    if (isEOS) {
    863      DrainComplete();
    864    }
    865  }
    866 
    867  void ProcessOutputFormatChange(int32_t aChannels, int32_t aSampleRate) {
    868    if (!mThread->IsOnCurrentThread()) {
    869      nsresult rv = mThread->Dispatch(NewRunnableMethod<int32_t, int32_t>(
    870          "RemoteAudioDecoder::ProcessOutputFormatChange", this,
    871          &RemoteAudioDecoder::ProcessOutputFormatChange, aChannels,
    872          aSampleRate));
    873      MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
    874      (void)rv;
    875      return;
    876    }
    877 
    878    AssertOnThread();
    879 
    880    mOutputChannels = aChannels;
    881    mOutputSampleRate = aSampleRate;
    882  }
    883 
    884  int32_t mOutputChannels{};
    885  int32_t mOutputSampleRate{};
    886  Maybe<TimeUnit> mFirstDemuxedSampleTime;
    887 };
    888 
    889 already_AddRefed<MediaDataDecoder> RemoteDataDecoder::CreateAudioDecoder(
    890    const CreateDecoderParams& aParams, const nsString& aDrmStubId,
    891    CDMProxy* aProxy) {
    892  const AudioInfo& config = aParams.AudioConfig();
    893  java::sdk::MediaFormat::LocalRef format;
    894  NS_ENSURE_SUCCESS(
    895      java::sdk::MediaFormat::CreateAudioFormat(config.mMimeType, config.mRate,
    896                                                config.mChannels, &format),
    897      nullptr);
    898  // format->SetInteger(java::sdk::MediaFormat::KEY_PCM_ENCODING,
    899  //                    java::sdk::AudioFormat::ENCODING_PCM_FLOAT);
    900 
    901  RefPtr<MediaDataDecoder> decoder =
    902      new RemoteAudioDecoder(config, format, aDrmStubId);
    903  if (aProxy) {
    904    decoder = new EMEMediaDataDecoderProxy(aParams, decoder.forget(), aProxy);
    905  }
    906  return decoder.forget();
    907 }
    908 
    909 already_AddRefed<MediaDataDecoder> RemoteDataDecoder::CreateVideoDecoder(
    910    const CreateDecoderParams& aParams, const nsString& aDrmStubId,
    911    CDMProxy* aProxy) {
    912  const VideoInfo& config = aParams.VideoConfig();
    913  java::sdk::MediaFormat::LocalRef format;
    914  NS_ENSURE_SUCCESS(java::sdk::MediaFormat::CreateVideoFormat(
    915                        TranslateMimeType(config.mMimeType),
    916                        config.mImage.width, config.mImage.height, &format),
    917                    nullptr);
    918 
    919  RefPtr<MediaDataDecoder> decoder =
    920      new RemoteVideoDecoder(config, format, aDrmStubId, aParams.mTrackingId);
    921  if (aProxy) {
    922    decoder = new EMEMediaDataDecoderProxy(aParams, decoder.forget(), aProxy);
    923  }
    924  return decoder.forget();
    925 }
    926 
    927 RemoteDataDecoder::RemoteDataDecoder(MediaData::Type aType,
    928                                     const nsACString& aMimeType,
    929                                     java::sdk::MediaFormat::Param aFormat,
    930                                     const nsString& aDrmStubId)
    931    : mType(aType),
    932      mMimeType(aMimeType),
    933      mFormat(aFormat),
    934      mDrmStubId(aDrmStubId),
    935      mSession(0),
    936      mNumPendingInputs(0) {}
    937 
    938 RefPtr<MediaDataDecoder::FlushPromise> RemoteDataDecoder::Flush() {
    939  AssertOnThread();
    940  MOZ_ASSERT(GetState() != State::SHUTDOWN);
    941 
    942  mDecodedData = DecodedData();
    943  UpdatePendingInputStatus(PendingOp::CLEAR);
    944  mDecodePromise.RejectIfExists(NS_ERROR_DOM_MEDIA_CANCELED, __func__);
    945  mDrainPromise.RejectIfExists(NS_ERROR_DOM_MEDIA_CANCELED, __func__);
    946  SetState(State::DRAINED);
    947  mJavaDecoder->Flush();
    948  return FlushPromise::CreateAndResolve(true, __func__);
    949 }
    950 
    951 RefPtr<MediaDataDecoder::DecodePromise> RemoteDataDecoder::Drain() {
    952  AssertOnThread();
    953  if (GetState() == State::SHUTDOWN) {
    954    return DecodePromise::CreateAndReject(NS_ERROR_DOM_MEDIA_CANCELED,
    955                                          __func__);
    956  }
    957  RefPtr<DecodePromise> p = mDrainPromise.Ensure(__func__);
    958  if (GetState() == State::DRAINING || GetState() == State::DRAINED) {
    959    // Drain operation already in progress or complete.
    960    // There's no operation to perform other than returning any already
    961    // decoded data.
    962    ReturnDecodedData();
    963    return p;
    964  }
    965 
    966  SetState(State::DRAINING);
    967  mInputBufferInfo->Set(0, 0, -1,
    968                        java::sdk::MediaCodec::BUFFER_FLAG_END_OF_STREAM);
    969  mSession = mJavaDecoder->Input(nullptr, mInputBufferInfo, nullptr);
    970  return p;
    971 }
    972 
    973 RefPtr<ShutdownPromise> RemoteDataDecoder::Shutdown() {
    974  LOG("Shutdown");
    975  AssertOnThread();
    976  SetState(State::SHUTDOWN);
    977  if (mJavaDecoder) {
    978    mJavaDecoder->Release();
    979    mJavaDecoder = nullptr;
    980  }
    981 
    982  if (mJavaCallbacks) {
    983    JavaCallbacksSupport::GetNative(mJavaCallbacks)->Cancel();
    984    JavaCallbacksSupport::DisposeNative(mJavaCallbacks);
    985    mJavaCallbacks = nullptr;
    986  }
    987 
    988  mFormat = nullptr;
    989 
    990  return ShutdownPromise::CreateAndResolve(true, __func__);
    991 }
    992 
    993 using CryptoInfoResult =
    994    Result<java::sdk::MediaCodec::CryptoInfo::LocalRef, nsresult>;
    995 
    996 static CryptoInfoResult GetCryptoInfoFromSample(const MediaRawData* aSample) {
    997  const auto& cryptoObj = aSample->mCrypto;
    998  java::sdk::MediaCodec::CryptoInfo::LocalRef cryptoInfo;
    999 
   1000  if (!cryptoObj.IsEncrypted()) {
   1001    return CryptoInfoResult(cryptoInfo);
   1002  }
   1003 
   1004  nsresult rv = java::sdk::MediaCodec::CryptoInfo::New(&cryptoInfo);
   1005  NS_ENSURE_SUCCESS(rv, CryptoInfoResult(rv));
   1006 
   1007  uint32_t numSubSamples = std::min<uint32_t>(
   1008      cryptoObj.mPlainSizes.Length(), cryptoObj.mEncryptedSizes.Length());
   1009 
   1010  uint32_t totalSubSamplesSize = 0;
   1011  for (const auto& size : cryptoObj.mPlainSizes) {
   1012    totalSubSamplesSize += size;
   1013  }
   1014  for (const auto& size : cryptoObj.mEncryptedSizes) {
   1015    totalSubSamplesSize += size;
   1016  }
   1017 
   1018  // Deep copy the plain sizes so we can modify them.
   1019  nsTArray<uint32_t> plainSizes = cryptoObj.mPlainSizes.Clone();
   1020  uint32_t codecSpecificDataSize = aSample->Size() - totalSubSamplesSize;
   1021  // Size of codec specific data("CSD") for Android java::sdk::MediaCodec usage
   1022  // should be included in the 1st plain size if it exists.
   1023  if (codecSpecificDataSize > 0 && !plainSizes.IsEmpty()) {
   1024    // This shouldn't overflow as the the plain size should be UINT16_MAX at
   1025    // most, and the CSD should never be that large. Checked int acts like a
   1026    // diagnostic assert here to help catch if we ever have insane inputs.
   1027    CheckedUint32 newLeadingPlainSize{plainSizes[0]};
   1028    newLeadingPlainSize += codecSpecificDataSize;
   1029    plainSizes[0] = newLeadingPlainSize.value();
   1030  }
   1031 
   1032  static const int kExpectedIVLength = 16;
   1033  nsTArray<uint8_t> tempIV(kExpectedIVLength);
   1034  jint mode;
   1035  switch (cryptoObj.mCryptoScheme) {
   1036    case CryptoScheme::None:
   1037      mode = java::sdk::MediaCodec::CRYPTO_MODE_UNENCRYPTED;
   1038      MOZ_ASSERT(cryptoObj.mIV.Length() <= kExpectedIVLength);
   1039      tempIV.AppendElements(cryptoObj.mIV);
   1040      break;
   1041    case CryptoScheme::Cenc:
   1042      mode = java::sdk::MediaCodec::CRYPTO_MODE_AES_CTR;
   1043      MOZ_ASSERT(cryptoObj.mIV.Length() <= kExpectedIVLength);
   1044      tempIV.AppendElements(cryptoObj.mIV);
   1045      break;
   1046    case CryptoScheme::Cbcs:
   1047    case CryptoScheme::Cbcs_1_9:
   1048      mode = java::sdk::MediaCodec::CRYPTO_MODE_AES_CBC;
   1049      MOZ_ASSERT(cryptoObj.mConstantIV.Length() <= kExpectedIVLength);
   1050      tempIV.AppendElements(cryptoObj.mConstantIV);
   1051      break;
   1052  }
   1053  auto tempIVLength = tempIV.Length();
   1054  for (size_t i = tempIVLength; i < kExpectedIVLength; i++) {
   1055    // Padding with 0
   1056    tempIV.AppendElement(0);
   1057  }
   1058 
   1059  MOZ_ASSERT(numSubSamples <= INT32_MAX);
   1060  cryptoInfo->Set(static_cast<int32_t>(numSubSamples),
   1061                  mozilla::jni::IntArray::From(plainSizes),
   1062                  mozilla::jni::IntArray::From(cryptoObj.mEncryptedSizes),
   1063                  mozilla::jni::ByteArray::From(cryptoObj.mKeyId),
   1064                  mozilla::jni::ByteArray::From(tempIV), mode);
   1065  if (mode == java::sdk::MediaCodec::CRYPTO_MODE_AES_CBC) {
   1066    java::CodecProxy::SetCryptoPatternIfNeeded(
   1067        cryptoInfo, cryptoObj.mCryptByteBlock, cryptoObj.mSkipByteBlock);
   1068  }
   1069 
   1070  return CryptoInfoResult(cryptoInfo);
   1071 }
   1072 
   1073 RefPtr<MediaDataDecoder::DecodePromise> RemoteDataDecoder::Decode(
   1074    MediaRawData* aSample) {
   1075  AssertOnThread();
   1076  MOZ_ASSERT(GetState() != State::SHUTDOWN);
   1077  MOZ_ASSERT(aSample != nullptr);
   1078  jni::ByteBuffer::LocalRef bytes = jni::ByteBuffer::New(
   1079      const_cast<uint8_t*>(aSample->Data()), aSample->Size(), fallible);
   1080  if (!bytes) {
   1081    return DecodePromise::CreateAndReject(
   1082        MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__), __func__);
   1083  }
   1084 
   1085  SetState(State::DRAINABLE);
   1086  MOZ_ASSERT(aSample->Size() <= INT32_MAX);
   1087  mInputBufferInfo->Set(0, static_cast<int32_t>(aSample->Size()),
   1088                        aSample->mTime.ToMicroseconds(), 0);
   1089  CryptoInfoResult crypto = GetCryptoInfoFromSample(aSample);
   1090  if (crypto.isErr()) {
   1091    return DecodePromise::CreateAndReject(
   1092        MediaResult(crypto.unwrapErr(), __func__), __func__);
   1093  }
   1094  int64_t session =
   1095      mJavaDecoder->Input(bytes, mInputBufferInfo, crypto.unwrap());
   1096  if (session == java::CodecProxy::INVALID_SESSION) {
   1097    return DecodePromise::CreateAndReject(
   1098        MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__), __func__);
   1099  }
   1100  mSession = session;
   1101  return mDecodePromise.Ensure(__func__);
   1102 }
   1103 
   1104 void RemoteDataDecoder::UpdatePendingInputStatus(PendingOp aOp) {
   1105  AssertOnThread();
   1106  switch (aOp) {
   1107    case PendingOp::INCREASE:
   1108      mNumPendingInputs++;
   1109      break;
   1110    case PendingOp::DECREASE:
   1111      mNumPendingInputs--;
   1112      break;
   1113    case PendingOp::CLEAR:
   1114      mNumPendingInputs = 0;
   1115      break;
   1116  }
   1117 }
   1118 
   1119 void RemoteDataDecoder::UpdateInputStatus(int64_t aTimestamp, bool aProcessed) {
   1120  if (!mThread->IsOnCurrentThread()) {
   1121    nsresult rv = mThread->Dispatch(NewRunnableMethod<int64_t, bool>(
   1122        "RemoteDataDecoder::UpdateInputStatus", this,
   1123        &RemoteDataDecoder::UpdateInputStatus, aTimestamp, aProcessed));
   1124    MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
   1125    (void)rv;
   1126    return;
   1127  }
   1128  AssertOnThread();
   1129  if (GetState() == State::SHUTDOWN) {
   1130    return;
   1131  }
   1132 
   1133  if (!aProcessed) {
   1134    UpdatePendingInputStatus(PendingOp::INCREASE);
   1135  } else if (HasPendingInputs()) {
   1136    UpdatePendingInputStatus(PendingOp::DECREASE);
   1137  }
   1138 
   1139  if (!HasPendingInputs() ||  // Input has been processed, request the next one.
   1140      !mDecodedData.IsEmpty()) {  // Previous output arrived before Decode().
   1141    ReturnDecodedData();
   1142  }
   1143 }
   1144 
   1145 void RemoteDataDecoder::UpdateOutputStatus(RefPtr<MediaData>&& aSample) {
   1146  AssertOnThread();
   1147  if (GetState() == State::SHUTDOWN) {
   1148    LOG("Update output status, but decoder has been shut down, dropping the "
   1149        "decoded results");
   1150    return;
   1151  }
   1152  if (IsUsefulData(aSample)) {
   1153    mDecodedData.AppendElement(std::move(aSample));
   1154  } else {
   1155    LOG("Decoded data, but not considered useful");
   1156  }
   1157  ReturnDecodedData();
   1158 }
   1159 
   1160 void RemoteDataDecoder::ReturnDecodedData() {
   1161  AssertOnThread();
   1162  MOZ_ASSERT(GetState() != State::SHUTDOWN);
   1163 
   1164  // We only want to clear mDecodedData when we have resolved the promises.
   1165  LOG("have decode promise=%i, have drain promise=%i, state=%i",
   1166      static_cast<int>(!mDecodePromise.IsEmpty()),
   1167      static_cast<int>(!mDrainPromise.IsEmpty()), static_cast<int>(GetState()));
   1168  MOZ_ASSERT(mDecodePromise.IsEmpty() || mDrainPromise.IsEmpty());
   1169 
   1170  if (!mDecodePromise.IsEmpty()) {
   1171    // Return successfully decoded samples, even if there is an error, which
   1172    // can be returned for a subsequent decode or drain request.
   1173    if (!mDecodedData.IsEmpty() || mDecodeError.isNothing()) {
   1174      mDecodePromise.Resolve(std::move(mDecodedData), __func__);
   1175      MOZ_ASSERT(mDecodedData.IsEmpty());
   1176    } else if (mDecodeError.isSome()) {
   1177      mDecodePromise.Reject(mDecodeError.extract(), __func__);
   1178    }
   1179  } else if (!mDrainPromise.IsEmpty()) {
   1180    if (!mDecodedData.IsEmpty() ||
   1181        (GetState() == State::DRAINED && mDecodeError.isNothing())) {
   1182      mDrainPromise.Resolve(std::move(mDecodedData), __func__);
   1183      MOZ_ASSERT(mDecodedData.IsEmpty());
   1184    } else if (mDecodeError.isSome()) {
   1185      mDrainPromise.Reject(mDecodeError.extract(), __func__);
   1186    }
   1187  }
   1188 }
   1189 
   1190 void RemoteDataDecoder::DrainComplete() {
   1191  if (!mThread->IsOnCurrentThread()) {
   1192    nsresult rv = mThread->Dispatch(
   1193        NewRunnableMethod("RemoteDataDecoder::DrainComplete", this,
   1194                          &RemoteDataDecoder::DrainComplete));
   1195    MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
   1196    (void)rv;
   1197    return;
   1198  }
   1199  LOG("EOS");
   1200  AssertOnThread();
   1201  if (GetState() == State::SHUTDOWN) {
   1202    return;
   1203  }
   1204  SetState(State::DRAINED);
   1205  ReturnDecodedData();
   1206 }
   1207 
   1208 void RemoteDataDecoder::Error(const MediaResult& aError) {
   1209  if (!mThread->IsOnCurrentThread()) {
   1210    nsresult rv = mThread->Dispatch(NewRunnableMethod<MediaResult>(
   1211        "RemoteDataDecoder::Error", this, &RemoteDataDecoder::Error, aError));
   1212    MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
   1213    (void)rv;
   1214    return;
   1215  }
   1216  AssertOnThread();
   1217  if (GetState() == State::SHUTDOWN) {
   1218    return;
   1219  }
   1220 
   1221  LOG("ErrorName=%s Message=%s", aError.ErrorName().get(),
   1222      aError.Message().get());
   1223  // If we know we need a new decoder (eg because RemoteVideoDecoder's mSurface
   1224  // has been released due to a GPU process crash) then override the error to
   1225  // request a new decoder.
   1226  if (NeedsNewDecoder()) {
   1227    mDecodeError =
   1228        Some(MediaResult(NS_ERROR_DOM_MEDIA_NEED_NEW_DECODER, __func__));
   1229  } else if (!mDecodeError.isSome()) {
   1230    mDecodeError.emplace(aError);
   1231  }  // else keep the first error to report.
   1232 
   1233  ReturnDecodedData();
   1234 }
   1235 
   1236 void RemoteDataDecoder::SetState(RemoteDataDecoder::State aState) {
   1237  LOG("%i", static_cast<int>(aState));
   1238  AssertOnThread();
   1239  mState = aState;
   1240 }
   1241 
   1242 }  // namespace mozilla
   1243 #undef LOG