tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

MediaData.cpp (25306B)


      1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
      2 /* vim: set ts=8 sts=2 et sw=2 tw=80: */
      3 /* This Source Code Form is subject to the terms of the Mozilla Public
      4 * License, v. 2.0. If a copy of the MPL was not distributed with this
      5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
      6 
      7 #include "MediaData.h"
      8 
      9 #include <stdint.h>
     10 
     11 #include <functional>
     12 
     13 #include "ImageContainer.h"
     14 #include "MediaInfo.h"
     15 #include "MediaResult.h"
     16 #include "PerformanceRecorder.h"
     17 #include "VideoUtils.h"
     18 #include "YCbCrUtils.h"
     19 #include "libyuv.h"
     20 #include "mozilla/gfx/gfxVars.h"
     21 #include "mozilla/layers/ImageBridgeChild.h"
     22 #include "mozilla/layers/KnowsCompositor.h"
     23 #include "mozilla/layers/SharedRGBImage.h"
     24 
     25 #ifdef XP_WIN
     26 #  include "mozilla/gfx/DeviceManagerDx.h"
     27 #  include "mozilla/layers/D3D11ShareHandleImage.h"
     28 #  include "mozilla/layers/D3D11YCbCrImage.h"
     29 #elif XP_MACOSX
     30 #  include "MacIOSurfaceImage.h"
     31 #  include "mozilla/gfx/gfxVars.h"
     32 #endif
     33 
     34 namespace mozilla {
     35 
     36 using namespace mozilla::gfx;
     37 using layers::BufferRecycleBin;
     38 using layers::PlanarYCbCrData;
     39 using layers::PlanarYCbCrImage;
     40 using media::TimeUnit;
     41 
     42 const char* AudioData::sTypeName = "audio";
     43 const char* VideoData::sTypeName = "video";
     44 
     45 AudioData::AudioData(int64_t aOffset, const media::TimeUnit& aTime,
     46                     AlignedAudioBuffer&& aData, uint32_t aChannels,
     47                     uint32_t aRate, uint32_t aChannelMap)
     48    // Passing TimeUnit::Zero() here because we can't pass the result of an
     49    // arithmetic operation to the CheckedInt ctor. We set the duration in the
     50    // ctor body below.
     51    : MediaData(sType, aOffset, aTime, TimeUnit::Zero()),
     52      mChannels(aChannels),
     53      mChannelMap(aChannelMap),
     54      mRate(aRate),
     55      mOriginalTime(aTime),
     56      mAudioData(std::move(aData)),
     57      mFrames(mAudioData.Length() / aChannels) {
     58  MOZ_RELEASE_ASSERT(aChannels != 0,
     59                     "Can't create an AudioData with 0 channels.");
     60  MOZ_RELEASE_ASSERT(aRate != 0,
     61                     "Can't create an AudioData with a sample-rate of 0.");
     62  mDuration = TimeUnit(mFrames, aRate);
     63 }
     64 
     65 Span<AudioDataValue> AudioData::Data() const {
     66  return Span{GetAdjustedData(), mFrames * mChannels};
     67 }
     68 
     69 nsCString AudioData::ToString() const {
     70  nsCString rv;
     71  rv.AppendPrintf("AudioData: %s %s %" PRIu32 " frames %" PRIu32 "Hz, %" PRIu32
     72                  "ch",
     73                  mTime.ToString().get(), mDuration.ToString().get(), mFrames,
     74                  mRate, mChannels);
     75  return rv;
     76 }
     77 
     78 void AudioData::SetOriginalStartTime(const media::TimeUnit& aStartTime) {
     79  MOZ_ASSERT(mTime == mOriginalTime,
     80             "Do not call this if data has been trimmed!");
     81  mTime = aStartTime;
     82  mOriginalTime = aStartTime;
     83 }
     84 
     85 bool AudioData::AdjustForStartTime(const media::TimeUnit& aStartTime) {
     86  mOriginalTime -= aStartTime;
     87  mTime -= aStartTime;
     88  if (mTrimWindow) {
     89    *mTrimWindow -= aStartTime;
     90  }
     91  if (mTime.IsNegative()) {
     92    NS_WARNING("Negative audio start time after time-adjustment!");
     93  }
     94  return mTime.IsValid() && mOriginalTime.IsValid();
     95 }
     96 
     97 bool AudioData::SetTrimWindow(const media::TimeInterval& aTrim) {
     98  MOZ_DIAGNOSTIC_ASSERT(aTrim.mStart.IsValid() && aTrim.mEnd.IsValid(),
     99                        "An overflow occurred on the provided TimeInterval");
    100  if (!mAudioData) {
    101    // MoveableData got called. Can no longer work on it.
    102    return false;
    103  }
    104  if (aTrim.mStart < mOriginalTime || aTrim.mEnd > GetEndTime()) {
    105    return false;
    106  }
    107 
    108  auto trimBefore = aTrim.mStart - mOriginalTime;
    109  auto trimAfter = aTrim.mEnd - mOriginalTime;
    110  if (!trimBefore.IsValid() || !trimAfter.IsValid()) {
    111    // Overflow.
    112    return false;
    113  }
    114  if (!mTrimWindow && trimBefore.IsZero() && trimAfter == mDuration) {
    115    // Nothing to change, abort early to prevent rounding errors.
    116    return true;
    117  }
    118 
    119  size_t frameOffset = trimBefore.ToTicksAtRate(mRate);
    120  mTrimWindow = Some(aTrim);
    121  mDataOffset = frameOffset * mChannels;
    122  MOZ_DIAGNOSTIC_ASSERT(mDataOffset <= mAudioData.Length(),
    123                        "Data offset outside original buffer");
    124  int64_t frameCountAfterTrim = (trimAfter - trimBefore).ToTicksAtRate(mRate);
    125  if (frameCountAfterTrim >
    126      AssertedCast<int64_t>(mAudioData.Length() / mChannels)) {
    127    // Accept rounding error caused by an imprecise time_base in the container,
    128    // that can cause a mismatch but not other kind of unexpected frame count.
    129    MOZ_RELEASE_ASSERT(!trimBefore.IsBase(mRate));
    130    mFrames = 0;
    131  } else {
    132    mFrames = frameCountAfterTrim;
    133  }
    134  mTime = mOriginalTime + trimBefore;
    135  mDuration = TimeUnit(mFrames, mRate);
    136 
    137  return true;
    138 }
    139 
    140 AudioDataValue* AudioData::GetAdjustedData() const {
    141  if (!mAudioData) {
    142    return nullptr;
    143  }
    144  return mAudioData.Data() + mDataOffset;
    145 }
    146 
    147 void AudioData::EnsureAudioBuffer() {
    148  if (mAudioBuffer || !mAudioData) {
    149    return;
    150  }
    151  const AudioDataValue* srcData = GetAdjustedData();
    152  CheckedInt<size_t> bufferSize(sizeof(AudioDataValue));
    153  bufferSize *= mFrames;
    154  bufferSize *= mChannels;
    155  mAudioBuffer = SharedBuffer::Create(bufferSize);
    156 
    157  AudioDataValue* destData = static_cast<AudioDataValue*>(mAudioBuffer->Data());
    158  for (uint32_t i = 0; i < mFrames; ++i) {
    159    for (uint32_t j = 0; j < mChannels; ++j) {
    160      destData[j * mFrames + i] = srcData[i * mChannels + j];
    161    }
    162  }
    163 }
    164 
    165 size_t AudioData::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const {
    166  size_t size =
    167      aMallocSizeOf(this) + mAudioData.SizeOfExcludingThis(aMallocSizeOf);
    168  if (mAudioBuffer) {
    169    size += mAudioBuffer->SizeOfIncludingThis(aMallocSizeOf);
    170  }
    171  return size;
    172 }
    173 
    174 AlignedAudioBuffer AudioData::MoveableData() {
    175  // Trim buffer according to trimming mask.
    176  mAudioData.PopFront(mDataOffset);
    177  mAudioData.SetLength(mFrames * mChannels);
    178  mDataOffset = 0;
    179  mFrames = 0;
    180  mTrimWindow.reset();
    181  return std::move(mAudioData);
    182 }
    183 
    184 static bool ValidatePlane(const VideoData::YCbCrBuffer::Plane& aPlane) {
    185  return aPlane.mWidth <= PlanarYCbCrImage::MAX_DIMENSION &&
    186         aPlane.mHeight <= PlanarYCbCrImage::MAX_DIMENSION &&
    187         aPlane.mWidth * aPlane.mHeight < MAX_VIDEO_WIDTH * MAX_VIDEO_HEIGHT &&
    188         aPlane.mStride > 0 && aPlane.mWidth <= aPlane.mStride;
    189 }
    190 
    191 static MediaResult ValidateBufferAndPicture(
    192    const VideoData::YCbCrBuffer& aBuffer, const IntRect& aPicture) {
    193  // The following situation should never happen unless there is a bug
    194  // in the decoder
    195  if (aBuffer.mPlanes[1].mWidth != aBuffer.mPlanes[2].mWidth ||
    196      aBuffer.mPlanes[1].mHeight != aBuffer.mPlanes[2].mHeight) {
    197    return MediaResult(NS_ERROR_INVALID_ARG,
    198                       "Chroma planes with different sizes");
    199  }
    200 
    201  // The following situations could be triggered by invalid input
    202  if (aPicture.width <= 0 || aPicture.height <= 0) {
    203    return MediaResult(NS_ERROR_INVALID_ARG, "Empty picture rect");
    204  }
    205  if (!ValidatePlane(aBuffer.mPlanes[0]) ||
    206      !ValidatePlane(aBuffer.mPlanes[1]) ||
    207      !ValidatePlane(aBuffer.mPlanes[2])) {
    208    return MediaResult(NS_ERROR_INVALID_ARG, "Invalid plane size");
    209  }
    210 
    211  // Ensure the picture size specified in the headers can be extracted out of
    212  // the frame we've been supplied without indexing out of bounds.
    213  CheckedUint32 xLimit = aPicture.x + CheckedUint32(aPicture.width);
    214  CheckedUint32 yLimit = aPicture.y + CheckedUint32(aPicture.height);
    215  if (!xLimit.isValid() || xLimit.value() > aBuffer.mPlanes[0].mStride ||
    216      !yLimit.isValid() || yLimit.value() > aBuffer.mPlanes[0].mHeight) {
    217    // The specified picture dimensions can't be contained inside the video
    218    // frame, we'll stomp memory if we try to copy it. Fail.
    219    return MediaResult(NS_ERROR_INVALID_ARG, "Overflowing picture rect");
    220  }
    221  return MediaResult(NS_OK);
    222 }
    223 
    224 VideoData::VideoData(int64_t aOffset, const TimeUnit& aTime,
    225                     const TimeUnit& aDuration, bool aKeyframe,
    226                     const TimeUnit& aTimecode, IntSize aDisplay,
    227                     layers::ImageContainer::FrameID aFrameID)
    228    : MediaData(Type::VIDEO_DATA, aOffset, aTime, aDuration),
    229      mDisplay(aDisplay),
    230      mFrameID(aFrameID),
    231      mSentToCompositor(false),
    232      mNextKeyFrameTime(TimeUnit::Invalid()) {
    233  MOZ_ASSERT(!mDuration.IsNegative(), "Frame must have non-negative duration.");
    234  mKeyframe = aKeyframe;
    235  mTimecode = aTimecode;
    236 }
    237 
    238 VideoData::~VideoData() = default;
    239 
    240 size_t VideoData::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const {
    241  size_t size = aMallocSizeOf(this);
    242 
    243  // Currently only PLANAR_YCBCR has a well defined function for determining
    244  // it's size, so reporting is limited to that type.
    245  if (mImage && mImage->GetFormat() == ImageFormat::PLANAR_YCBCR) {
    246    const mozilla::layers::PlanarYCbCrImage* img =
    247        static_cast<const mozilla::layers::PlanarYCbCrImage*>(mImage.get());
    248    size += img->SizeOfIncludingThis(aMallocSizeOf);
    249  }
    250 
    251  return size;
    252 }
    253 
    254 ColorDepth VideoData::GetColorDepth() const {
    255  if (!mImage) {
    256    return ColorDepth::COLOR_8;
    257  }
    258 
    259  return mImage->GetColorDepth();
    260 }
    261 
    262 void VideoData::UpdateDuration(const TimeUnit& aDuration) {
    263  MOZ_ASSERT(!aDuration.IsNegative());
    264  mDuration = aDuration;
    265 }
    266 
    267 void VideoData::UpdateTimestamp(const TimeUnit& aTimestamp) {
    268  MOZ_ASSERT(!aTimestamp.IsNegative());
    269 
    270  auto updatedDuration = GetEndTime() - aTimestamp;
    271  MOZ_ASSERT(!updatedDuration.IsNegative());
    272 
    273  mTime = aTimestamp;
    274  mDuration = updatedDuration;
    275 }
    276 
    277 bool VideoData::AdjustForStartTime(const media::TimeUnit& aStartTime) {
    278  mTime -= aStartTime;
    279  if (mTime.IsNegative()) {
    280    NS_WARNING("Negative video start time after time-adjustment!");
    281  }
    282  return mTime.IsValid();
    283 }
    284 
    285 PlanarYCbCrData ConstructPlanarYCbCrData(const VideoInfo& aInfo,
    286                                         const VideoData::YCbCrBuffer& aBuffer,
    287                                         const IntRect& aPicture) {
    288  const VideoData::YCbCrBuffer::Plane& Y = aBuffer.mPlanes[0];
    289  const VideoData::YCbCrBuffer::Plane& Cb = aBuffer.mPlanes[1];
    290  const VideoData::YCbCrBuffer::Plane& Cr = aBuffer.mPlanes[2];
    291 
    292  PlanarYCbCrData data;
    293  data.mYChannel = Y.mData;
    294  data.mYStride = AssertedCast<int32_t>(Y.mStride);
    295  data.mYSkip = AssertedCast<int32_t>(Y.mSkip);
    296  data.mCbChannel = Cb.mData;
    297  data.mCrChannel = Cr.mData;
    298  data.mCbCrStride = AssertedCast<int32_t>(Cb.mStride);
    299  data.mCbSkip = AssertedCast<int32_t>(Cb.mSkip);
    300  data.mCrSkip = AssertedCast<int32_t>(Cr.mSkip);
    301  data.mPictureRect = aPicture;
    302  data.mStereoMode = aInfo.mStereoMode;
    303  data.mYUVColorSpace = aBuffer.mYUVColorSpace;
    304  data.mColorPrimaries = aBuffer.mColorPrimaries;
    305  data.mColorDepth = aBuffer.mColorDepth;
    306  if (aInfo.mTransferFunction) {
    307    data.mTransferFunction = *aInfo.mTransferFunction;
    308  }
    309  data.mColorRange = aBuffer.mColorRange;
    310  data.mChromaSubsampling = aBuffer.mChromaSubsampling;
    311  return data;
    312 }
    313 
    314 /* static */
    315 MediaResult VideoData::SetVideoDataToImage(PlanarYCbCrImage* aVideoImage,
    316                                           const VideoInfo& aInfo,
    317                                           const YCbCrBuffer& aBuffer,
    318                                           const IntRect& aPicture,
    319                                           bool aCopyData) {
    320  MOZ_ASSERT(aVideoImage);
    321 
    322  PlanarYCbCrData data = ConstructPlanarYCbCrData(aInfo, aBuffer, aPicture);
    323 
    324  if (aCopyData) {
    325    return MediaResult(aVideoImage->CopyData(data),
    326                       RESULT_DETAIL("Failed to copy image data"));
    327  }
    328  return MediaResult(aVideoImage->AdoptData(data),
    329                     RESULT_DETAIL("Failed to adopt image data"));
    330 }
    331 
    332 /* static */
    333 Result<already_AddRefed<VideoData>, MediaResult> VideoData::CreateAndCopyData(
    334    const VideoInfo& aInfo, ImageContainer* aContainer, int64_t aOffset,
    335    const TimeUnit& aTime, const TimeUnit& aDuration,
    336    const YCbCrBuffer& aBuffer, bool aKeyframe, const TimeUnit& aTimecode,
    337    const IntRect& aPicture, layers::KnowsCompositor* aAllocator) {
    338  if (!aContainer) {
    339    // Create a dummy VideoData with no image. This gives us something to
    340    // send to media streams if necessary.
    341    RefPtr<VideoData> v(new VideoData(aOffset, aTime, aDuration, aKeyframe,
    342                                      aTimecode, aInfo.mDisplay, 0));
    343    return v.forget();
    344  }
    345 
    346  if (MediaResult r = ValidateBufferAndPicture(aBuffer, aPicture);
    347      NS_FAILED(r)) {
    348    return Err(r);
    349  }
    350 
    351  PerformanceRecorder<PlaybackStage> perfRecorder(MediaStage::CopyDecodedVideo,
    352                                                  aInfo.mImage.height);
    353  RefPtr<VideoData> v(new VideoData(aOffset, aTime, aDuration, aKeyframe,
    354                                    aTimecode, aInfo.mDisplay, 0));
    355 
    356  // Currently our decoder only knows how to output to ImageFormat::PLANAR_YCBCR
    357  // format.
    358 #if XP_MACOSX
    359  if (aAllocator && aAllocator->GetWebRenderCompositorType() !=
    360                        layers::WebRenderCompositor::SOFTWARE) {
    361    RefPtr<layers::MacIOSurfaceImage> ioImage =
    362        new layers::MacIOSurfaceImage(nullptr);
    363    PlanarYCbCrData data = ConstructPlanarYCbCrData(aInfo, aBuffer, aPicture);
    364    if (ioImage->SetData(aContainer, data)) {
    365      v->mImage = ioImage;
    366      perfRecorder.Record();
    367      return v.forget();
    368    }
    369  }
    370 #endif
    371  if (!v->mImage) {
    372    v->mImage = aContainer->CreatePlanarYCbCrImage();
    373  }
    374 
    375  if (!v->mImage) {
    376    // TODO: Should other error like NS_ERROR_UNEXPECTED be used here to
    377    // distinguish this error from the NS_ERROR_OUT_OF_MEMORY below?
    378    return Err(MediaResult(NS_ERROR_OUT_OF_MEMORY,
    379                           "Failed to create a PlanarYCbCrImage"));
    380  }
    381  NS_ASSERTION(v->mImage->GetFormat() == ImageFormat::PLANAR_YCBCR,
    382               "Wrong format?");
    383  PlanarYCbCrImage* videoImage = v->mImage->AsPlanarYCbCrImage();
    384  MOZ_ASSERT(videoImage);
    385  videoImage->SetColorDepth(aBuffer.mColorDepth);
    386 
    387  if (MediaResult r = VideoData::SetVideoDataToImage(
    388          videoImage, aInfo, aBuffer, aPicture, true /* aCopyData */);
    389      NS_FAILED(r)) {
    390    return Err(r);
    391  }
    392 
    393  perfRecorder.Record();
    394  return v.forget();
    395 }
    396 
    397 /* static */
    398 already_AddRefed<VideoData> VideoData::CreateAndCopyData(
    399    const VideoInfo& aInfo, ImageContainer* aContainer, int64_t aOffset,
    400    const TimeUnit& aTime, const TimeUnit& aDuration,
    401    const YCbCrBuffer& aBuffer, const YCbCrBuffer::Plane& aAlphaPlane,
    402    bool aKeyframe, const TimeUnit& aTimecode, const IntRect& aPicture) {
    403  if (!aContainer) {
    404    // Create a dummy VideoData with no image. This gives us something to
    405    // send to media streams if necessary.
    406    RefPtr<VideoData> v(new VideoData(aOffset, aTime, aDuration, aKeyframe,
    407                                      aTimecode, aInfo.mDisplay, 0));
    408    return v.forget();
    409  }
    410 
    411  if (MediaResult r = ValidateBufferAndPicture(aBuffer, aPicture);
    412      NS_FAILED(r)) {
    413    NS_ERROR(r.Message().get());
    414    return nullptr;
    415  }
    416 
    417  RefPtr<VideoData> v(new VideoData(aOffset, aTime, aDuration, aKeyframe,
    418                                    aTimecode, aInfo.mDisplay, 0));
    419 
    420  // Convert from YUVA to BGRA format on the software side.
    421  RefPtr<layers::SharedRGBImage> videoImage =
    422      aContainer->CreateSharedRGBImage();
    423  v->mImage = videoImage;
    424 
    425  if (!v->mImage) {
    426    return nullptr;
    427  }
    428  if (!videoImage->Allocate(
    429          IntSize(aBuffer.mPlanes[0].mWidth, aBuffer.mPlanes[0].mHeight),
    430          SurfaceFormat::B8G8R8A8)) {
    431    return nullptr;
    432  }
    433 
    434  RefPtr<layers::TextureClient> texture =
    435      videoImage->GetTextureClient(/* aKnowsCompositor */ nullptr);
    436  if (!texture) {
    437    NS_WARNING("Failed to allocate TextureClient");
    438    return nullptr;
    439  }
    440 
    441  layers::TextureClientAutoLock autoLock(texture,
    442                                         layers::OpenMode::OPEN_WRITE_ONLY);
    443  if (!autoLock.Succeeded()) {
    444    NS_WARNING("Failed to lock TextureClient");
    445    return nullptr;
    446  }
    447 
    448  layers::MappedTextureData buffer;
    449  if (!texture->BorrowMappedData(buffer)) {
    450    NS_WARNING("Failed to borrow mapped data");
    451    return nullptr;
    452  }
    453 
    454  // The naming convention for libyuv and associated utils is word-order.
    455  // The naming convention in the gfx stack is byte-order.
    456  nsresult result = ConvertI420AlphaToARGB(
    457      aBuffer.mPlanes[0].mData, aBuffer.mPlanes[1].mData,
    458      aBuffer.mPlanes[2].mData, aAlphaPlane.mData,
    459      AssertedCast<int>(aBuffer.mPlanes[0].mStride),
    460      AssertedCast<int>(aBuffer.mPlanes[1].mStride), buffer.data, buffer.stride,
    461      buffer.size.width, buffer.size.height);
    462  if (NS_FAILED(result)) {
    463    MOZ_ASSERT_UNREACHABLE("Failed to convert I420 YUVA into RGBA data");
    464    return nullptr;
    465  }
    466 
    467  return v.forget();
    468 }
    469 
    470 /* static */
    471 already_AddRefed<VideoData> VideoData::CreateFromImage(
    472    const IntSize& aDisplay, int64_t aOffset, const TimeUnit& aTime,
    473    const TimeUnit& aDuration, const RefPtr<Image>& aImage, bool aKeyframe,
    474    const TimeUnit& aTimecode) {
    475  RefPtr<VideoData> v(new VideoData(aOffset, aTime, aDuration, aKeyframe,
    476                                    aTimecode, aDisplay, 0));
    477  v->mImage = aImage;
    478  return v.forget();
    479 }
    480 
    481 nsCString VideoData::ToString() const {
    482  std::array ImageFormatStrings = {
    483      "PLANAR_YCBCR",
    484      "NV_IMAGE",
    485      "SHARED_RGB",
    486      "MOZ2D_SURFACE",
    487      "MAC_IOSURFACE",
    488      "SURFACE_TEXTURE",
    489      "D3D9_RGB32_TEXTURE",
    490      "OVERLAY_IMAGE",
    491      "D3D11_SHARE_HANDLE_TEXTURE",
    492      "D3D11_TEXTURE_ZERO_COPY",
    493      "TEXTURE_WRAPPER",
    494      "GPU_VIDEO",
    495      "DMABUF",
    496      "DCOMP_SURFACE",
    497  };
    498 
    499  nsCString rv;
    500  rv.AppendPrintf(
    501      "VideoFrame [%s,%s] [%dx%d] format: %s", mTime.ToString().get(),
    502      mDuration.ToString().get(), mDisplay.Width(), mDisplay.Height(),
    503      mImage ? ImageFormatStrings[static_cast<int>(mImage->GetFormat())]
    504             : "null");
    505  return rv;
    506 }
    507 
    508 MediaResult VideoData::QuantizableBuffer::To8BitPerChannel(
    509    BufferRecycleBin* aRecycleBin) {
    510  MOZ_ASSERT(!mRecycleBin, "Should not be called more than once.");
    511  mRecycleBin = aRecycleBin;
    512 
    513  MOZ_ASSERT(mColorDepth == ColorDepth::COLOR_10 ||
    514             mColorDepth == ColorDepth::COLOR_12);
    515  int yStride = mPlanes[0].mStride / 2;
    516  int uvStride = mPlanes[1].mStride / 2;
    517  size_t yLength = yStride * mPlanes[0].mHeight;
    518  size_t uvLength = uvStride * mPlanes[1].mHeight;
    519 
    520  const uint16_t* srcPlanes[3]{
    521      reinterpret_cast<const uint16_t*>(mPlanes[0].mData),
    522      reinterpret_cast<const uint16_t*>(mPlanes[1].mData),
    523      reinterpret_cast<const uint16_t*>(mPlanes[2].mData)};
    524  AllocateRecyclableData(yLength + (uvLength * 2));
    525  if (!m8bpcPlanes) {
    526    return MediaResult(
    527        NS_ERROR_OUT_OF_MEMORY,
    528        RESULT_DETAIL("Cannot allocate %zu bytes for 8-bit conversion",
    529                      yLength + (uvLength * 2)));
    530  }
    531  uint8_t* destPlanes[3]{m8bpcPlanes.get(), m8bpcPlanes.get() + yLength,
    532                         m8bpcPlanes.get() + yLength + uvLength};
    533  using Func16To8 =  // libyuv function type.
    534      std::function<int(const uint16_t*, int, const uint16_t*, int,
    535                        const uint16_t*, int, uint8_t*, int, uint8_t*, int,
    536                        uint8_t*, int, int, int)>;
    537  auto convertFunc = [](ColorDepth aDepth,
    538                        ChromaSubsampling aSubsampling) -> Func16To8 {
    539    switch (aSubsampling) {
    540      case ChromaSubsampling::HALF_WIDTH_AND_HEIGHT:  // 420p
    541        return aDepth == ColorDepth::COLOR_10 ? libyuv::I010ToI420
    542                                              : libyuv::I012ToI420;
    543      case ChromaSubsampling::HALF_WIDTH:  // 422p
    544        return aDepth == ColorDepth::COLOR_10 ? libyuv::I210ToI422
    545                                              : libyuv::I212ToI422;
    546      case ChromaSubsampling::FULL:  // 444p
    547        return aDepth == ColorDepth::COLOR_10 ? libyuv::I410ToI444
    548                                              : libyuv::I412ToI444;
    549      default:
    550        return Func16To8();
    551    }
    552  }(mColorDepth, mChromaSubsampling);
    553  if (!convertFunc) {
    554    return MediaResult(
    555        NS_ERROR_DOM_MEDIA_DECODE_ERR,
    556        RESULT_DETAIL("Source format (color depth=%d, subsampling=%" PRIu8
    557                      ") not supported",
    558                      BitDepthForColorDepth(mColorDepth),
    559                      static_cast<uint8_t>(mChromaSubsampling)));
    560  }
    561  int r = convertFunc(srcPlanes[0], yStride, srcPlanes[1], uvStride,
    562                      srcPlanes[2], uvStride, destPlanes[0], yStride,
    563                      destPlanes[1], uvStride, destPlanes[2], uvStride,
    564                      mPlanes[0].mWidth, mPlanes[0].mHeight);
    565  if (r != 0) {
    566    return MediaResult(
    567        NS_ERROR_DOM_MEDIA_DECODE_ERR,
    568        RESULT_DETAIL("Conversion to 8-bit failed. libyuv error=%d", r));
    569  }
    570  // Update buffer info.
    571  mColorDepth = ColorDepth::COLOR_8;
    572  mPlanes[0].mData = destPlanes[0];
    573  mPlanes[0].mStride = yStride;
    574  mPlanes[1].mData = destPlanes[1];
    575  mPlanes[2].mData = destPlanes[2];
    576  mPlanes[1].mStride = mPlanes[2].mStride = uvStride;
    577 
    578  return MediaResult(NS_OK);
    579 }
    580 
    581 void VideoData::QuantizableBuffer::AllocateRecyclableData(size_t aLength) {
    582  MOZ_ASSERT(!m8bpcPlanes, "Should not allocate more than once.");
    583  MOZ_ASSERT(aLength > 0, "Zero-length allocation!");
    584 
    585  m8bpcPlanes = mRecycleBin->GetBuffer(aLength);
    586  mAllocatedLength = aLength;
    587 }
    588 
    589 VideoData::QuantizableBuffer::~QuantizableBuffer() {
    590  if (m8bpcPlanes) {
    591    mRecycleBin->RecycleBuffer(std::move(m8bpcPlanes), mAllocatedLength);
    592  }
    593 }
    594 
    595 MediaRawData::MediaRawData()
    596    : MediaData(Type::RAW_DATA), mCrypto(mCryptoInternal) {}
    597 
    598 MediaRawData::MediaRawData(const uint8_t* aData, size_t aSize)
    599    : MediaData(Type::RAW_DATA),
    600      mCrypto(mCryptoInternal),
    601      mBuffer(aData, aSize) {}
    602 
    603 MediaRawData::MediaRawData(const uint8_t* aData, size_t aSize,
    604                           const uint8_t* aAlphaData, size_t aAlphaSize)
    605    : MediaData(Type::RAW_DATA),
    606      mCrypto(mCryptoInternal),
    607      mBuffer(aData, aSize),
    608      mAlphaBuffer(aAlphaData, aAlphaSize) {}
    609 
    610 MediaRawData::MediaRawData(AlignedByteBuffer&& aData)
    611    : MediaData(Type::RAW_DATA),
    612      mCrypto(mCryptoInternal),
    613      mBuffer(std::move(aData)) {}
    614 
    615 MediaRawData::MediaRawData(AlignedByteBuffer&& aData,
    616                           AlignedByteBuffer&& aAlphaData)
    617    : MediaData(Type::RAW_DATA),
    618      mCrypto(mCryptoInternal),
    619      mBuffer(std::move(aData)),
    620      mAlphaBuffer(std::move(aAlphaData)) {}
    621 
    622 already_AddRefed<MediaRawData> MediaRawData::Clone() const {
    623  int32_t sampleHeight = 0;
    624  if (mTrackInfo && mTrackInfo->GetAsVideoInfo()) {
    625    sampleHeight = mTrackInfo->GetAsVideoInfo()->mImage.height;
    626  }
    627  PerformanceRecorder<PlaybackStage> perfRecorder(MediaStage::CopyDemuxedData,
    628                                                  sampleHeight);
    629  RefPtr<MediaRawData> s = new MediaRawData;
    630  s->mTimecode = mTimecode;
    631  s->mTime = mTime;
    632  s->mDuration = mDuration;
    633  s->mOffset = mOffset;
    634  s->mKeyframe = mKeyframe;
    635  s->mExtraData = mExtraData;
    636  s->mCryptoInternal = mCryptoInternal;
    637  s->mTrackInfo = mTrackInfo;
    638  s->mEOS = mEOS;
    639  s->mOriginalPresentationWindow = mOriginalPresentationWindow;
    640  if (!s->mBuffer.Append(mBuffer.Data(), mBuffer.Length())) {
    641    return nullptr;
    642  }
    643  if (!s->mAlphaBuffer.Append(mAlphaBuffer.Data(), mAlphaBuffer.Length())) {
    644    return nullptr;
    645  }
    646  perfRecorder.Record();
    647  return s.forget();
    648 }
    649 
    650 MediaRawData::~MediaRawData() = default;
    651 
    652 size_t MediaRawData::SizeOfIncludingThis(MallocSizeOf aMallocSizeOf) const {
    653  size_t size = aMallocSizeOf(this);
    654  size += mBuffer.SizeOfExcludingThis(aMallocSizeOf);
    655  return size;
    656 }
    657 
    658 UniquePtr<MediaRawDataWriter> MediaRawData::CreateWriter() {
    659  UniquePtr<MediaRawDataWriter> p(new MediaRawDataWriter(this));
    660  return p;
    661 }
    662 
    663 MediaRawDataWriter::MediaRawDataWriter(MediaRawData* aMediaRawData)
    664    : mCrypto(aMediaRawData->mCryptoInternal), mTarget(aMediaRawData) {}
    665 
    666 bool MediaRawDataWriter::SetSize(size_t aSize) {
    667  return mTarget->mBuffer.SetLength(aSize);
    668 }
    669 
    670 bool MediaRawDataWriter::Prepend(const uint8_t* aData, size_t aSize) {
    671  return mTarget->mBuffer.Prepend(aData, aSize);
    672 }
    673 
    674 bool MediaRawDataWriter::Append(const uint8_t* aData, size_t aSize) {
    675  return mTarget->mBuffer.Append(aData, aSize);
    676 }
    677 
    678 bool MediaRawDataWriter::Replace(const uint8_t* aData, size_t aSize) {
    679  return mTarget->mBuffer.Replace(aData, aSize);
    680 }
    681 
    682 void MediaRawDataWriter::Clear() { mTarget->mBuffer.Clear(); }
    683 
    684 uint8_t* MediaRawDataWriter::Data() { return mTarget->mBuffer.Data(); }
    685 
    686 size_t MediaRawDataWriter::Size() { return mTarget->Size(); }
    687 
    688 void MediaRawDataWriter::PopFront(size_t aSize) {
    689  mTarget->mBuffer.PopFront(aSize);
    690 }
    691 
    692 nsCString CryptoSchemeSetToString(const CryptoSchemeSet& aSchemes) {
    693  nsAutoCString rv;
    694  if (aSchemes.contains(CryptoScheme::Cenc)) {
    695    rv.AppendLiteral("cenc");
    696  }
    697  if (aSchemes.contains(CryptoScheme::Cbcs)) {
    698    if (!rv.IsEmpty()) {
    699      rv.AppendLiteral("/");
    700    }
    701    rv.AppendLiteral("cbcs");
    702  }
    703  if (aSchemes.contains(CryptoScheme::Cbcs_1_9)) {
    704    if (!rv.IsEmpty()) {
    705      rv.AppendLiteral("/");
    706    }
    707    rv.AppendLiteral("cbcs-1-9");
    708  }
    709  if (rv.IsEmpty()) {
    710    rv.AppendLiteral("none");
    711  }
    712  return std::move(rv);
    713 }
    714 
    715 CryptoScheme StringToCryptoScheme(const nsAString& aString) {
    716  if (aString.EqualsLiteral("cenc")) {
    717    return CryptoScheme::Cenc;
    718  }
    719  if (aString.EqualsLiteral("cbcs")) {
    720    return CryptoScheme::Cbcs;
    721  }
    722  if (aString.EqualsLiteral("cbcs-1-9")) {
    723    return CryptoScheme::Cbcs_1_9;
    724  }
    725  return CryptoScheme::None;
    726 }
    727 
    728 }  // namespace mozilla