tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

DAV1DDecoder.cpp (15794B)


      1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
      2 /* vim:set ts=2 sw=2 sts=2 et cindent: */
      3 /* This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #include "DAV1DDecoder.h"
      8 
      9 #include "ImageContainer.h"
     10 #include "PerformanceRecorder.h"
     11 #include "VideoUtils.h"
     12 #include "gfxUtils.h"
     13 #include "mozilla/StaticPrefs_media.h"
     14 #include "mozilla/TaskQueue.h"
     15 #include "nsThreadUtils.h"
     16 
     17 #undef LOG
     18 #define LOG(arg, ...)                                                  \
     19  DDMOZ_LOG(sPDMLog, mozilla::LogLevel::Debug, "::%s: " arg, __func__, \
     20            ##__VA_ARGS__)
     21 
     22 namespace mozilla {
     23 using layers::BufferRecycleBin;
     24 
     25 static int GetDecodingThreadCount(uint32_t aCodedHeight) {
     26  /**
     27   * Based on the result we print out from the dav1decoder [1], the
     28   * following information shows the number of tiles for AV1 videos served on
     29   * Youtube. Each Tile can be decoded in parallel, so we would like to make
     30   * sure we at least use enough threads to match the number of tiles.
     31   *
     32   * ----------------------------
     33   * | resolution row col total |
     34   * |    480p      2  1     2  |
     35   * |    720p      2  2     4  |
     36   * |   1080p      4  2     8  |
     37   * |   1440p      4  2     8  |
     38   * |   2160p      8  4    32  |
     39   * ----------------------------
     40   *
     41   * Besides the tile thread count, the frame thread count also needs to be
     42   * considered. As we didn't find anything about what the best number is for
     43   * the count of frame thread, just simply use 2 for parallel jobs, which
     44   * is similar with Chromium's implementation. They uses 3 frame threads for
     45   * 720p+ but less tile threads, so we will still use more total threads. In
     46   * addition, their data is measured on 2019, our data should be closer to the
     47   * current real world situation.
     48   * [1]
     49   * https://searchfox.org/mozilla-central/rev/2f5ed7b7244172d46f538051250b14fb4d8f1a5f/third_party/dav1d/src/decode.c#2940
     50   */
     51  int tileThreads = 2, frameThreads = 2;
     52  if (aCodedHeight >= 2160) {
     53    tileThreads = 32;
     54  } else if (aCodedHeight >= 1080) {
     55    tileThreads = 8;
     56  } else if (aCodedHeight >= 720) {
     57    tileThreads = 4;
     58  }
     59  return tileThreads * frameThreads;
     60 }
     61 
     62 DAV1DDecoder::DAV1DDecoder(const CreateDecoderParams& aParams)
     63    : mInfo(aParams.VideoConfig()),
     64      mTaskQueue(TaskQueue::Create(
     65          GetMediaThreadPool(MediaThreadType::PLATFORM_DECODER),
     66          "Dav1dDecoder")),
     67      mImageContainer(aParams.mImageContainer),
     68      mImageAllocator(aParams.mKnowsCompositor),
     69      mTrackingId(aParams.mTrackingId),
     70      mLowLatency(
     71          aParams.mOptions.contains(CreateDecoderParams::Option::LowLatency)),
     72      m8bpcOutput(aParams.mOptions.contains(
     73          CreateDecoderParams::Option::Output8BitPerChannel)) {
     74  if (m8bpcOutput) {
     75    m8bpcRecycleBin = MakeRefPtr<BufferRecycleBin>();
     76  }
     77 }
     78 
     79 DAV1DDecoder::~DAV1DDecoder() = default;
     80 
     81 RefPtr<MediaDataDecoder::InitPromise> DAV1DDecoder::Init() {
     82  AUTO_PROFILER_LABEL("DAV1DDecoder::Init", MEDIA_PLAYBACK);
     83  Dav1dSettings settings;
     84  dav1d_default_settings(&settings);
     85  if (mLowLatency) {
     86    settings.max_frame_delay = 1;
     87  }
     88  size_t decoder_threads = 2;
     89  if (mInfo.mDisplay.width >= 2048) {
     90    decoder_threads = 8;
     91  } else if (mInfo.mDisplay.width >= 1024) {
     92    decoder_threads = 4;
     93  }
     94  if (StaticPrefs::media_av1_new_thread_count_strategy()) {
     95    decoder_threads = GetDecodingThreadCount(mInfo.mImage.Height());
     96  }
     97  // Still need to consider the amount of physical cores in order to achieve
     98  // best performance.
     99  settings.n_threads =
    100      static_cast<int>(std::min(decoder_threads, GetNumberOfProcessors()));
    101  if (int32_t count = StaticPrefs::media_av1_force_thread_count(); count > 0) {
    102    settings.n_threads = count;
    103  }
    104 
    105  int res = dav1d_open(&mContext, &settings);
    106  if (res < 0) {
    107    return DAV1DDecoder::InitPromise::CreateAndReject(
    108        MediaResult(NS_ERROR_DOM_MEDIA_FATAL_ERR,
    109                    RESULT_DETAIL("Couldn't get dAV1d decoder interface.")),
    110        __func__);
    111  }
    112  return DAV1DDecoder::InitPromise::CreateAndResolve(TrackInfo::kVideoTrack,
    113                                                     __func__);
    114 }
    115 
    116 RefPtr<MediaDataDecoder::DecodePromise> DAV1DDecoder::Decode(
    117    MediaRawData* aSample) {
    118  return InvokeAsync<MediaRawData*>(mTaskQueue, this, __func__,
    119                                    &DAV1DDecoder::InvokeDecode, aSample);
    120 }
    121 
    122 void ReleaseDataBuffer_s(const uint8_t* buf, void* user_data) {
    123  MOZ_ASSERT(user_data);
    124  MOZ_ASSERT(buf);
    125  DAV1DDecoder* d = static_cast<DAV1DDecoder*>(user_data);
    126  d->ReleaseDataBuffer(buf);
    127 }
    128 
    129 void DAV1DDecoder::ReleaseDataBuffer(const uint8_t* buf) {
    130  // The release callback may be called on a different thread defined by the
    131  // third party dav1d execution. In that case post a task into TaskQueue to
    132  // ensure that mDecodingBuffers is only ever accessed on the TaskQueue.
    133  RefPtr<DAV1DDecoder> self = this;
    134  auto releaseBuffer = [self, buf] {
    135    MOZ_ASSERT(self->mTaskQueue->IsCurrentThreadIn());
    136    DebugOnly<bool> found = self->mDecodingBuffers.Remove(buf);
    137    MOZ_ASSERT(found);
    138  };
    139 
    140  if (mTaskQueue->IsCurrentThreadIn()) {
    141    releaseBuffer();
    142  } else {
    143    nsresult rv = mTaskQueue->Dispatch(NS_NewRunnableFunction(
    144        "DAV1DDecoder::ReleaseDataBuffer", std::move(releaseBuffer)));
    145    MOZ_DIAGNOSTIC_ASSERT(NS_SUCCEEDED(rv));
    146    (void)rv;
    147  }
    148 }
    149 
    150 RefPtr<MediaDataDecoder::DecodePromise> DAV1DDecoder::InvokeDecode(
    151    MediaRawData* aSample) {
    152  AUTO_PROFILER_LABEL("DAV1DDecoder::InvokeDecode", MEDIA_PLAYBACK);
    153  MOZ_ASSERT(mTaskQueue->IsCurrentThreadIn());
    154  MOZ_ASSERT(aSample);
    155 
    156  MediaInfoFlag flag = MediaInfoFlag::None;
    157  flag |= (aSample->mKeyframe ? MediaInfoFlag::KeyFrame
    158                              : MediaInfoFlag::NonKeyFrame);
    159  flag |= MediaInfoFlag::SoftwareDecoding;
    160  flag |= MediaInfoFlag::VIDEO_AV1;
    161  mTrackingId.apply([&](const auto& aId) {
    162    mPerformanceRecorder.Start(aSample->mTimecode.ToMicroseconds(),
    163                               "DAV1DDecoder"_ns, aId, flag);
    164  });
    165 
    166  // Add the buffer to the hashtable in order to increase
    167  // the ref counter and keep it alive. When dav1d does not
    168  // need it any more will call it's release callback. Remove
    169  // the buffer, in there, to reduce the ref counter and eventually
    170  // free it. We need a hashtable and not an array because the
    171  // release callback are not coming in the same order that the
    172  // buffers have been added in the decoder (threading ordering
    173  // inside decoder)
    174  mDecodingBuffers.InsertOrUpdate(aSample->Data(), RefPtr{aSample});
    175  Dav1dData data;
    176  int res = dav1d_data_wrap(&data, aSample->Data(), aSample->Size(),
    177                            ReleaseDataBuffer_s, this);
    178  data.m.timestamp = aSample->mTimecode.ToMicroseconds();
    179  data.m.duration = aSample->mDuration.ToMicroseconds();
    180  data.m.offset = aSample->mOffset;
    181 
    182  if (res < 0) {
    183    LOG("Create decoder data error.");
    184    return DecodePromise::CreateAndReject(
    185        MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__), __func__);
    186  }
    187  DecodedData results;
    188  do {
    189    res = dav1d_send_data(mContext, &data);
    190    if (res < 0 && res != DAV1D_ERR(EAGAIN)) {
    191      LOG("Decode error: %d", res);
    192      return DecodePromise::CreateAndReject(
    193          MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR, __func__), __func__);
    194    }
    195    // Alway consume the whole buffer on success.
    196    // At this point only DAV1D_ERR(EAGAIN) is expected.
    197    MOZ_ASSERT((res == 0 && !data.sz) ||
    198               (res == DAV1D_ERR(EAGAIN) && data.sz == aSample->Size()));
    199 
    200    Result<already_AddRefed<VideoData>, MediaResult> r = GetPicture();
    201    if (r.isOk()) {
    202      results.AppendElement(r.unwrap());
    203    } else {
    204      MediaResult rs = r.unwrapErr();
    205      if (rs.Code() == NS_ERROR_DOM_MEDIA_WAITING_FOR_DATA) {
    206        // No frames ready to return. This is not an error, in some
    207        // circumstances, we need to feed it with a certain amount of frames
    208        // before we get a picture.
    209        continue;
    210      }
    211      // Skip if rs is NS_OK, which can happen if picture layout is I400.
    212      if (NS_FAILED(rs.Code())) {
    213        return DecodePromise::CreateAndReject(rs, __func__);
    214      }
    215    }
    216  } while (data.sz > 0);
    217 
    218  return DecodePromise::CreateAndResolve(std::move(results), __func__);
    219 }
    220 
    221 Result<already_AddRefed<VideoData>, MediaResult> DAV1DDecoder::GetPicture() {
    222  class Dav1dPictureWrapper {
    223   public:
    224    Dav1dPicture* operator&() { return &p; }
    225    const Dav1dPicture& operator*() const { return p; }
    226    ~Dav1dPictureWrapper() { dav1d_picture_unref(&p); }
    227 
    228   private:
    229    Dav1dPicture p = Dav1dPicture();
    230  };
    231  Dav1dPictureWrapper picture;
    232 
    233  int res = dav1d_get_picture(mContext, &picture);
    234  if (res < 0) {
    235    auto r = MediaResult(res == DAV1D_ERR(EAGAIN)
    236                             ? NS_ERROR_DOM_MEDIA_WAITING_FOR_DATA
    237                             : NS_ERROR_DOM_MEDIA_DECODE_ERR,
    238                         RESULT_DETAIL("dav1d_get_picture: %d", res));
    239    LOG("%s", r.Message().get());
    240    return Err(r);
    241  }
    242 
    243  if ((*picture).p.layout == DAV1D_PIXEL_LAYOUT_I400) {
    244    // Use NS_OK to indicate that this picture should be skipped.
    245    auto r = MediaResult(
    246        NS_OK,
    247        RESULT_DETAIL("I400 picture: No chroma data to construct an image"));
    248    LOG("%s", r.Message().get());
    249    return Err(r);
    250  }
    251 
    252  Result<already_AddRefed<VideoData>, MediaResult> r = ConstructImage(*picture);
    253  return r.mapErr([&](const MediaResult& aResult) {
    254    LOG("ConstructImage (%ux%u display %ux%u picture %ux%u ) error - %s: %s",
    255        (*picture).p.w, (*picture).p.h, mInfo.mDisplay.width,
    256        mInfo.mDisplay.height, mInfo.mImage.width, mInfo.mImage.height,
    257        aResult.ErrorName().get(), aResult.Message().get());
    258    return aResult;
    259  });
    260 }
    261 
    262 /* static */
    263 Maybe<gfx::YUVColorSpace> DAV1DDecoder::GetColorSpace(
    264    const Dav1dPicture& aPicture, LazyLogModule& aLogger) {
    265  // When returning Nothing(), the caller chooses the appropriate default.
    266  if (!aPicture.seq_hdr || !aPicture.seq_hdr->color_description_present) {
    267    return Nothing();
    268  }
    269 
    270  return gfxUtils::CicpToColorSpace(
    271      static_cast<gfx::CICP::MatrixCoefficients>(aPicture.seq_hdr->mtrx),
    272      static_cast<gfx::CICP::ColourPrimaries>(aPicture.seq_hdr->pri), aLogger);
    273 }
    274 
    275 /* static */
    276 Maybe<gfx::ColorSpace2> DAV1DDecoder::GetColorPrimaries(
    277    const Dav1dPicture& aPicture, LazyLogModule& aLogger) {
    278  // When returning Nothing(), the caller chooses the appropriate default.
    279  if (!aPicture.seq_hdr || !aPicture.seq_hdr->color_description_present) {
    280    return Nothing();
    281  }
    282 
    283  return gfxUtils::CicpToColorPrimaries(
    284      static_cast<gfx::CICP::ColourPrimaries>(aPicture.seq_hdr->pri), aLogger);
    285 }
    286 
    287 Result<already_AddRefed<VideoData>, MediaResult> DAV1DDecoder::ConstructImage(
    288    const Dav1dPicture& aPicture) {
    289  VideoData::QuantizableBuffer b;
    290  b.mColorDepth = gfx::ColorDepthForBitDepth(aPicture.p.bpc);
    291 
    292  b.mYUVColorSpace =
    293      DAV1DDecoder::GetColorSpace(aPicture, sPDMLog)
    294          .valueOr(DefaultColorSpace({aPicture.p.w, aPicture.p.h}));
    295  b.mColorPrimaries = DAV1DDecoder::GetColorPrimaries(aPicture, sPDMLog)
    296                          .valueOr(gfx::ColorSpace2::BT709);
    297  b.mColorRange = aPicture.seq_hdr->color_range ? gfx::ColorRange::FULL
    298                                                : gfx::ColorRange::LIMITED;
    299 
    300  b.mPlanes[0].mData = static_cast<uint8_t*>(aPicture.data[0]);
    301  b.mPlanes[0].mStride = aPicture.stride[0];
    302  b.mPlanes[0].mHeight = aPicture.p.h;
    303  b.mPlanes[0].mWidth = aPicture.p.w;
    304  b.mPlanes[0].mSkip = 0;
    305 
    306  b.mPlanes[1].mData = static_cast<uint8_t*>(aPicture.data[1]);
    307  b.mPlanes[1].mStride = aPicture.stride[1];
    308  b.mPlanes[1].mSkip = 0;
    309 
    310  b.mPlanes[2].mData = static_cast<uint8_t*>(aPicture.data[2]);
    311  b.mPlanes[2].mStride = b.mPlanes[1].mStride;
    312  b.mPlanes[2].mSkip = 0;
    313 
    314  // https://code.videolan.org/videolan/dav1d/blob/master/tools/output/yuv.c#L67
    315  const int ss_ver = aPicture.p.layout == DAV1D_PIXEL_LAYOUT_I420;
    316  const int ss_hor = aPicture.p.layout != DAV1D_PIXEL_LAYOUT_I444;
    317 
    318  b.mPlanes[1].mHeight = b.mPlanes[2].mHeight =
    319      (aPicture.p.h + ss_ver) >> ss_ver;
    320  b.mPlanes[1].mWidth = b.mPlanes[2].mWidth = (aPicture.p.w + ss_hor) >> ss_hor;
    321 
    322  if (ss_ver) {
    323    b.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT;
    324  } else if (ss_hor) {
    325    b.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH;
    326  }
    327 
    328  // Timestamp, duration and offset used here are wrong.
    329  // We need to take those values from the decoder. Latest
    330  // dav1d version allows for that.
    331  media::TimeUnit timecode =
    332      media::TimeUnit::FromMicroseconds(aPicture.m.timestamp);
    333  media::TimeUnit duration =
    334      media::TimeUnit::FromMicroseconds(aPicture.m.duration);
    335  int64_t offset = aPicture.m.offset;
    336  bool keyframe = aPicture.frame_hdr->frame_type == DAV1D_FRAME_TYPE_KEY;
    337 
    338  mPerformanceRecorder.Record(aPicture.m.timestamp, [&](DecodeStage& aStage) {
    339    aStage.SetResolution(aPicture.p.w, aPicture.p.h);
    340    auto format = [&]() -> Maybe<DecodeStage::ImageFormat> {
    341      switch (aPicture.p.layout) {
    342        case DAV1D_PIXEL_LAYOUT_I420:
    343          return Some(DecodeStage::YUV420P);
    344        case DAV1D_PIXEL_LAYOUT_I422:
    345          return Some(DecodeStage::YUV422P);
    346        case DAV1D_PIXEL_LAYOUT_I444:
    347          return Some(DecodeStage::YUV444P);
    348        default:
    349          return Nothing();
    350      }
    351    }();
    352    format.apply([&](auto& aFmt) { aStage.SetImageFormat(aFmt); });
    353    aStage.SetYUVColorSpace(b.mYUVColorSpace);
    354    aStage.SetColorRange(b.mColorRange);
    355    aStage.SetColorDepth(b.mColorDepth);
    356    aStage.SetStartTimeAndEndTime(aPicture.m.timestamp,
    357                                  aPicture.m.timestamp + aPicture.m.duration);
    358  });
    359 
    360  if (aPicture.p.bpc != 8 && m8bpcOutput) {
    361    MediaResult rv = b.To8BitPerChannel(m8bpcRecycleBin);
    362    if (NS_FAILED(rv.Code())) {
    363      return Result<already_AddRefed<VideoData>, MediaResult>(rv);
    364    }
    365  }
    366 
    367  return VideoData::CreateAndCopyData(
    368      mInfo, mImageContainer, offset, timecode, duration, b, keyframe, timecode,
    369      mInfo.ScaledImageRect(aPicture.p.w, aPicture.p.h), mImageAllocator);
    370 }
    371 
    372 RefPtr<MediaDataDecoder::DecodePromise> DAV1DDecoder::Drain() {
    373  RefPtr<DAV1DDecoder> self = this;
    374  return InvokeAsync(mTaskQueue, __func__, [self, this] {
    375    AUTO_PROFILER_LABEL("DAV1DDecoder::Drain", MEDIA_PLAYBACK);
    376    DecodedData results;
    377    while (true) {
    378      Result<already_AddRefed<VideoData>, MediaResult> r = GetPicture();
    379      if (r.isOk()) {
    380        results.AppendElement(r.unwrap());
    381      } else {
    382        MediaResult rs = r.unwrapErr();
    383        if (rs.Code() == NS_ERROR_DOM_MEDIA_WAITING_FOR_DATA) {
    384          break;
    385        }
    386        // Skip if rs is NS_OK, which can happen if picture layout is I400.
    387        if (NS_FAILED(rs.Code())) {
    388          return DecodePromise::CreateAndReject(rs, __func__);
    389        }
    390      }
    391    }
    392    return DecodePromise::CreateAndResolve(std::move(results), __func__);
    393  });
    394 }
    395 
    396 RefPtr<MediaDataDecoder::FlushPromise> DAV1DDecoder::Flush() {
    397  RefPtr<DAV1DDecoder> self = this;
    398  return InvokeAsync(mTaskQueue, __func__, [this, self]() {
    399    AUTO_PROFILER_LABEL("DAV1DDecoder::Flush", MEDIA_PLAYBACK);
    400    dav1d_flush(self->mContext);
    401    mPerformanceRecorder.Record(std::numeric_limits<int64_t>::max());
    402    return FlushPromise::CreateAndResolve(true, __func__);
    403  });
    404 }
    405 
    406 RefPtr<ShutdownPromise> DAV1DDecoder::Shutdown() {
    407  RefPtr<DAV1DDecoder> self = this;
    408  return InvokeAsync(mTaskQueue, __func__, [self]() {
    409    AUTO_PROFILER_LABEL("DAV1DDecoder::Shutdown", MEDIA_PLAYBACK);
    410    dav1d_close(&self->mContext);
    411    return self->mTaskQueue->BeginShutdown();
    412  });
    413 }
    414 
    415 }  // namespace mozilla
    416 #undef LOG