VPXDecoder.cpp (22355B)
1 /* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ 2 /* vim:set ts=2 sw=2 sts=2 et cindent: */ 3 /* This Source Code Form is subject to the terms of the Mozilla Public 4 * License, v. 2.0. If a copy of the MPL was not distributed with this 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ 6 7 #include "VPXDecoder.h" 8 9 #include <vpx/vpx_image.h> 10 11 #include <algorithm> 12 13 #include "BitReader.h" 14 #include "BitWriter.h" 15 #include "ImageContainer.h" 16 #include "PerformanceRecorder.h" 17 #include "TimeUnits.h" 18 #include "VideoUtils.h" 19 #include "gfx2DGlue.h" 20 #include "gfxUtils.h" 21 #include "mozilla/PodOperations.h" 22 #include "mozilla/SyncRunnable.h" 23 #include "mozilla/TaskQueue.h" 24 #include "nsError.h" 25 #include "prsystem.h" 26 27 #undef LOG 28 #define LOG(arg, ...) \ 29 DDMOZ_LOG(sPDMLog, mozilla::LogLevel::Debug, "::%s: " arg, __func__, \ 30 ##__VA_ARGS__) 31 32 namespace mozilla { 33 34 using namespace gfx; 35 using namespace layers; 36 37 static VPXDecoder::Codec MimeTypeToCodec(const nsACString& aMimeType) { 38 if (aMimeType.EqualsLiteral("video/vp8")) { 39 return VPXDecoder::Codec::VP8; 40 } 41 if (aMimeType.EqualsLiteral("video/vp9")) { 42 return VPXDecoder::Codec::VP9; 43 } 44 #ifdef ANDROID 45 if (aMimeType.EqualsLiteral("video/x-vnd.on2.vp8")) { 46 return VPXDecoder::Codec::VP8; 47 } 48 if (aMimeType.EqualsLiteral("video/x-vnd.on2.vp9")) { 49 return VPXDecoder::Codec::VP9; 50 } 51 #endif 52 return VPXDecoder::Codec::Unknown; 53 } 54 55 static nsresult InitContext(vpx_codec_ctx_t* aCtx, const VideoInfo& aInfo, 56 const VPXDecoder::Codec aCodec, bool aLowLatency) { 57 int decode_threads = 2; 58 59 vpx_codec_iface_t* dx = nullptr; 60 if (aCodec == VPXDecoder::Codec::VP8) { 61 dx = vpx_codec_vp8_dx(); 62 } else if (aCodec == VPXDecoder::Codec::VP9) { 63 dx = vpx_codec_vp9_dx(); 64 if (aInfo.mDisplay.width >= 2048) { 65 decode_threads = 8; 66 } else if (aInfo.mDisplay.width >= 1024) { 67 decode_threads = 4; 68 } 69 } 70 decode_threads = std::min(decode_threads, PR_GetNumberOfProcessors()); 71 72 vpx_codec_dec_cfg_t config; 73 config.threads = aLowLatency ? 1 : decode_threads; 74 config.w = config.h = 0; // set after decode 75 76 if (!dx || vpx_codec_dec_init(aCtx, dx, &config, 0)) { 77 return NS_ERROR_FAILURE; 78 } 79 return NS_OK; 80 } 81 82 VPXDecoder::VPXDecoder(const CreateDecoderParams& aParams) 83 : mImageContainer(aParams.mImageContainer), 84 mImageAllocator(aParams.mKnowsCompositor), 85 mTaskQueue(TaskQueue::Create( 86 GetMediaThreadPool(MediaThreadType::PLATFORM_DECODER), "VPXDecoder")), 87 mInfo(aParams.VideoConfig()), 88 mCodec(MimeTypeToCodec(aParams.VideoConfig().mMimeType)), 89 mLowLatency( 90 aParams.mOptions.contains(CreateDecoderParams::Option::LowLatency)), 91 mTrackingId(aParams.mTrackingId) { 92 MOZ_COUNT_CTOR(VPXDecoder); 93 PodZero(&mVPX); 94 PodZero(&mVPXAlpha); 95 } 96 97 VPXDecoder::~VPXDecoder() { MOZ_COUNT_DTOR(VPXDecoder); } 98 99 RefPtr<ShutdownPromise> VPXDecoder::Shutdown() { 100 RefPtr<VPXDecoder> self = this; 101 return InvokeAsync(mTaskQueue, __func__, [self]() { 102 AUTO_PROFILER_LABEL("VPXDecoder::Shutdown", MEDIA_PLAYBACK); 103 vpx_codec_destroy(&self->mVPX); 104 vpx_codec_destroy(&self->mVPXAlpha); 105 return self->mTaskQueue->BeginShutdown(); 106 }); 107 } 108 109 RefPtr<MediaDataDecoder::InitPromise> VPXDecoder::Init() { 110 AUTO_PROFILER_LABEL("VPXDecoder::Init", MEDIA_PLAYBACK); 111 if (NS_FAILED(InitContext(&mVPX, mInfo, mCodec, mLowLatency))) { 112 return VPXDecoder::InitPromise::CreateAndReject( 113 NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__); 114 } 115 if (mInfo.HasAlpha()) { 116 if (NS_FAILED(InitContext(&mVPXAlpha, mInfo, mCodec, mLowLatency))) { 117 return VPXDecoder::InitPromise::CreateAndReject( 118 NS_ERROR_DOM_MEDIA_FATAL_ERR, __func__); 119 } 120 } 121 return VPXDecoder::InitPromise::CreateAndResolve(TrackInfo::kVideoTrack, 122 __func__); 123 } 124 125 RefPtr<MediaDataDecoder::FlushPromise> VPXDecoder::Flush() { 126 return InvokeAsync(mTaskQueue, __func__, []() { 127 AUTO_PROFILER_LABEL("VPXDecoder::Flush", MEDIA_PLAYBACK); 128 return FlushPromise::CreateAndResolve(true, __func__); 129 }); 130 } 131 132 RefPtr<MediaDataDecoder::DecodePromise> VPXDecoder::ProcessDecode( 133 MediaRawData* aSample) { 134 AUTO_PROFILER_LABEL("VPXDecoder::ProcessDecode", MEDIA_PLAYBACK); 135 MOZ_ASSERT(mTaskQueue->IsOnCurrentThread()); 136 137 MediaInfoFlag flag = MediaInfoFlag::None; 138 flag |= (aSample->mKeyframe ? MediaInfoFlag::KeyFrame 139 : MediaInfoFlag::NonKeyFrame); 140 flag |= MediaInfoFlag::SoftwareDecoding; 141 switch (mCodec) { 142 case Codec::VP8: 143 flag |= MediaInfoFlag::VIDEO_VP8; 144 break; 145 case Codec::VP9: 146 flag |= MediaInfoFlag::VIDEO_VP9; 147 break; 148 default: 149 break; 150 } 151 auto rec = mTrackingId.map([&](const auto& aId) { 152 return PerformanceRecorder<DecodeStage>("VPXDecoder"_ns, aId, flag); 153 }); 154 155 if (vpx_codec_err_t r = vpx_codec_decode(&mVPX, aSample->Data(), 156 aSample->Size(), nullptr, 0)) { 157 LOG("VPX Decode error: %s", vpx_codec_err_to_string(r)); 158 return DecodePromise::CreateAndReject( 159 MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR, 160 RESULT_DETAIL("VPX error: %s", vpx_codec_err_to_string(r))), 161 __func__); 162 } 163 164 vpx_codec_iter_t iter = nullptr; 165 vpx_image_t* img; 166 vpx_image_t* img_alpha = nullptr; 167 bool alpha_decoded = false; 168 DecodedData results; 169 170 while ((img = vpx_codec_get_frame(&mVPX, &iter))) { 171 NS_ASSERTION(img->fmt == VPX_IMG_FMT_I420 || img->fmt == VPX_IMG_FMT_I444, 172 "WebM image format not I420 or I444"); 173 NS_ASSERTION(!alpha_decoded, 174 "Multiple frames per packet that contains alpha"); 175 176 if (aSample->AlphaSize() > 0) { 177 if (!alpha_decoded) { 178 MediaResult rv = DecodeAlpha(&img_alpha, aSample); 179 if (NS_FAILED(rv)) { 180 return DecodePromise::CreateAndReject(rv, __func__); 181 } 182 alpha_decoded = true; 183 } 184 } 185 // Chroma shifts are rounded down as per the decoding examples in the SDK 186 VideoData::YCbCrBuffer b; 187 b.mPlanes[0].mData = img->planes[0]; 188 b.mPlanes[0].mStride = img->stride[0]; 189 b.mPlanes[0].mHeight = img->d_h; 190 b.mPlanes[0].mWidth = img->d_w; 191 b.mPlanes[0].mSkip = 0; 192 193 b.mPlanes[1].mData = img->planes[1]; 194 b.mPlanes[1].mStride = img->stride[1]; 195 b.mPlanes[1].mSkip = 0; 196 197 b.mPlanes[2].mData = img->planes[2]; 198 b.mPlanes[2].mStride = img->stride[2]; 199 b.mPlanes[2].mSkip = 0; 200 201 if (img->fmt == VPX_IMG_FMT_I420) { 202 b.mChromaSubsampling = gfx::ChromaSubsampling::HALF_WIDTH_AND_HEIGHT; 203 204 b.mPlanes[1].mHeight = (img->d_h + 1) >> img->y_chroma_shift; 205 b.mPlanes[1].mWidth = (img->d_w + 1) >> img->x_chroma_shift; 206 207 b.mPlanes[2].mHeight = (img->d_h + 1) >> img->y_chroma_shift; 208 b.mPlanes[2].mWidth = (img->d_w + 1) >> img->x_chroma_shift; 209 } else if (img->fmt == VPX_IMG_FMT_I444) { 210 b.mPlanes[1].mHeight = img->d_h; 211 b.mPlanes[1].mWidth = img->d_w; 212 213 b.mPlanes[2].mHeight = img->d_h; 214 b.mPlanes[2].mWidth = img->d_w; 215 } else { 216 LOG("VPX Unknown image format"); 217 return DecodePromise::CreateAndReject( 218 MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR, 219 RESULT_DETAIL("VPX Unknown image format")), 220 __func__); 221 } 222 b.mYUVColorSpace = [&]() { 223 switch (img->cs) { 224 case VPX_CS_BT_601: 225 case VPX_CS_SMPTE_170: 226 case VPX_CS_SMPTE_240: 227 return gfx::YUVColorSpace::BT601; 228 case VPX_CS_BT_709: 229 return gfx::YUVColorSpace::BT709; 230 case VPX_CS_BT_2020: 231 return gfx::YUVColorSpace::BT2020; 232 default: 233 return DefaultColorSpace({img->d_w, img->d_h}); 234 } 235 }(); 236 b.mColorRange = img->range == VPX_CR_FULL_RANGE ? gfx::ColorRange::FULL 237 : gfx::ColorRange::LIMITED; 238 239 RefPtr<VideoData> v; 240 if (!img_alpha) { 241 Result<already_AddRefed<VideoData>, MediaResult> r = 242 VideoData::CreateAndCopyData( 243 mInfo, mImageContainer, aSample->mOffset, aSample->mTime, 244 aSample->mDuration, b, aSample->mKeyframe, aSample->mTimecode, 245 mInfo.ScaledImageRect(img->d_w, img->d_h), mImageAllocator); 246 // TODO: Reject DecodePromise below with r's error return. 247 v = r.unwrapOr(nullptr); 248 } else { 249 VideoData::YCbCrBuffer::Plane alpha_plane; 250 alpha_plane.mData = img_alpha->planes[0]; 251 alpha_plane.mStride = img_alpha->stride[0]; 252 alpha_plane.mHeight = img_alpha->d_h; 253 alpha_plane.mWidth = img_alpha->d_w; 254 alpha_plane.mSkip = 0; 255 v = VideoData::CreateAndCopyData( 256 mInfo, mImageContainer, aSample->mOffset, aSample->mTime, 257 aSample->mDuration, b, alpha_plane, aSample->mKeyframe, 258 aSample->mTimecode, mInfo.ScaledImageRect(img->d_w, img->d_h)); 259 } 260 261 if (!v) { 262 LOG("Image allocation error source %ux%u display %ux%u picture %ux%u", 263 img->d_w, img->d_h, mInfo.mDisplay.width, mInfo.mDisplay.height, 264 mInfo.mImage.width, mInfo.mImage.height); 265 return DecodePromise::CreateAndReject( 266 MediaResult(NS_ERROR_OUT_OF_MEMORY, __func__), __func__); 267 } 268 269 rec.apply([&](auto& aRec) { 270 return aRec.Record([&](DecodeStage& aStage) { 271 aStage.SetResolution(static_cast<int>(img->d_w), 272 static_cast<int>(img->d_h)); 273 auto format = [&]() -> Maybe<DecodeStage::ImageFormat> { 274 switch (img->fmt) { 275 case VPX_IMG_FMT_I420: 276 return Some(DecodeStage::YUV420P); 277 case VPX_IMG_FMT_I444: 278 return Some(DecodeStage::YUV444P); 279 default: 280 return Nothing(); 281 } 282 }(); 283 format.apply([&](auto& aFmt) { aStage.SetImageFormat(aFmt); }); 284 aStage.SetYUVColorSpace(b.mYUVColorSpace); 285 aStage.SetColorRange(b.mColorRange); 286 aStage.SetColorDepth(b.mColorDepth); 287 aStage.SetStartTimeAndEndTime(v->mTime.ToMicroseconds(), 288 v->GetEndTime().ToMicroseconds()); 289 }); 290 }); 291 292 results.AppendElement(std::move(v)); 293 } 294 return DecodePromise::CreateAndResolve(std::move(results), __func__); 295 } 296 297 RefPtr<MediaDataDecoder::DecodePromise> VPXDecoder::Decode( 298 MediaRawData* aSample) { 299 return InvokeAsync<MediaRawData*>(mTaskQueue, this, __func__, 300 &VPXDecoder::ProcessDecode, aSample); 301 } 302 303 RefPtr<MediaDataDecoder::DecodePromise> VPXDecoder::Drain() { 304 return InvokeAsync(mTaskQueue, __func__, [] { 305 AUTO_PROFILER_LABEL("VPXDecoder::Flush", MEDIA_PLAYBACK); 306 return DecodePromise::CreateAndResolve(DecodedData(), __func__); 307 }); 308 } 309 310 MediaResult VPXDecoder::DecodeAlpha(vpx_image_t** aImgAlpha, 311 const MediaRawData* aSample) { 312 vpx_codec_err_t r = vpx_codec_decode(&mVPXAlpha, aSample->AlphaData(), 313 aSample->AlphaSize(), nullptr, 0); 314 if (r) { 315 LOG("VPX decode alpha error: %s", vpx_codec_err_to_string(r)); 316 return MediaResult(NS_ERROR_DOM_MEDIA_DECODE_ERR, 317 RESULT_DETAIL("VPX decode alpha error: %s", 318 vpx_codec_err_to_string(r))); 319 } 320 321 vpx_codec_iter_t iter = nullptr; 322 323 *aImgAlpha = vpx_codec_get_frame(&mVPXAlpha, &iter); 324 NS_ASSERTION((*aImgAlpha)->fmt == VPX_IMG_FMT_I420 || 325 (*aImgAlpha)->fmt == VPX_IMG_FMT_I444, 326 "WebM image format not I420 or I444"); 327 328 return NS_OK; 329 } 330 331 nsCString VPXDecoder::GetCodecName() const { 332 switch (mCodec) { 333 case Codec::VP8: 334 return "vp8"_ns; 335 case Codec::VP9: 336 return "vp9"_ns; 337 default: 338 return "unknown"_ns; 339 } 340 } 341 342 /* static */ 343 bool VPXDecoder::IsVPX(const nsACString& aMimeType, uint8_t aCodecMask) { 344 VPXDecoder::Codec codec = MimeTypeToCodec(aMimeType); 345 return ((aCodecMask & VPXDecoder::VP8) && codec == VPXDecoder::Codec::VP8) || 346 ((aCodecMask & VPXDecoder::VP9) && codec == VPXDecoder::Codec::VP9); 347 } 348 349 /* static */ 350 bool VPXDecoder::IsVP8(const nsACString& aMimeType) { 351 return IsVPX(aMimeType, VPXDecoder::VP8); 352 } 353 354 /* static */ 355 bool VPXDecoder::IsVP9(const nsACString& aMimeType) { 356 return IsVPX(aMimeType, VPXDecoder::VP9); 357 } 358 359 /* static */ 360 bool VPXDecoder::IsKeyframe(Span<const uint8_t> aBuffer, Codec aCodec) { 361 VPXStreamInfo info; 362 return GetStreamInfo(aBuffer, info, aCodec) && info.mKeyFrame; 363 } 364 365 /* static */ 366 gfx::IntSize VPXDecoder::GetFrameSize(Span<const uint8_t> aBuffer, 367 Codec aCodec) { 368 VPXStreamInfo info; 369 if (!GetStreamInfo(aBuffer, info, aCodec)) { 370 return gfx::IntSize(); 371 } 372 return info.mImage; 373 } 374 375 /* static */ 376 gfx::IntSize VPXDecoder::GetDisplaySize(Span<const uint8_t> aBuffer, 377 Codec aCodec) { 378 VPXStreamInfo info; 379 if (!GetStreamInfo(aBuffer, info, aCodec)) { 380 return gfx::IntSize(); 381 } 382 return info.mDisplay; 383 } 384 385 /* static */ 386 int VPXDecoder::GetVP9Profile(Span<const uint8_t> aBuffer) { 387 VPXStreamInfo info; 388 if (!GetStreamInfo(aBuffer, info, Codec::VP9)) { 389 return -1; 390 } 391 return info.mProfile; 392 } 393 394 /* static */ 395 bool VPXDecoder::GetStreamInfo(Span<const uint8_t> aBuffer, 396 VPXDecoder::VPXStreamInfo& aInfo, Codec aCodec) { 397 if (aBuffer.IsEmpty()) { 398 // Can't be good. 399 return false; 400 } 401 402 aInfo = VPXStreamInfo(); 403 404 if (aCodec == Codec::VP8) { 405 aInfo.mKeyFrame = (aBuffer[0] & 1) == 406 0; // frame type (0 for key frames, 1 for interframes) 407 if (!aInfo.mKeyFrame) { 408 // We can't retrieve the required information from interframes. 409 return true; 410 } 411 if (aBuffer.Length() < 10) { 412 return false; 413 } 414 uint8_t version = (aBuffer[0] >> 1) & 0x7; 415 if (version > 3) { 416 return false; 417 } 418 uint8_t start_code_byte_0 = aBuffer[3]; 419 uint8_t start_code_byte_1 = aBuffer[4]; 420 uint8_t start_code_byte_2 = aBuffer[5]; 421 if (start_code_byte_0 != 0x9d || start_code_byte_1 != 0x01 || 422 start_code_byte_2 != 0x2a) { 423 return false; 424 } 425 uint16_t width = (aBuffer[6] | aBuffer[7] << 8) & 0x3fff; 426 uint16_t height = (aBuffer[8] | aBuffer[9] << 8) & 0x3fff; 427 428 // aspect ratio isn't found in the VP8 frame header. 429 aInfo.mImage = gfx::IntSize(width, height); 430 aInfo.mDisplayAndImageDifferent = false; 431 aInfo.mDisplay = aInfo.mImage; 432 return true; 433 } 434 435 BitReader br(aBuffer.Elements(), aBuffer.Length() * 8); 436 uint32_t frameMarker = br.ReadBits(2); // frame_marker 437 if (frameMarker != 2) { 438 // That's not a valid vp9 header. 439 return false; 440 } 441 uint32_t profile = br.ReadBits(1); // profile_low_bit 442 profile |= br.ReadBits(1) << 1; // profile_high_bit 443 if (profile == 3) { 444 profile += br.ReadBits(1); // reserved_zero 445 if (profile > 3) { 446 // reserved_zero wasn't zero. 447 return false; 448 } 449 } 450 451 aInfo.mProfile = profile; 452 453 bool show_existing_frame = br.ReadBits(1); 454 if (show_existing_frame) { 455 if (profile == 3 && aBuffer.Length() < 2) { 456 return false; 457 } 458 (void)br.ReadBits(3); // frame_to_show_map_idx 459 return true; 460 } 461 462 if (aBuffer.Length() < 10) { 463 // Header too small; 464 return false; 465 } 466 467 aInfo.mKeyFrame = !br.ReadBits(1); 468 bool show_frame = br.ReadBits(1); 469 bool error_resilient_mode = br.ReadBits(1); 470 471 auto frame_sync_code = [&]() -> bool { 472 uint8_t frame_sync_byte_1 = br.ReadBits(8); 473 uint8_t frame_sync_byte_2 = br.ReadBits(8); 474 uint8_t frame_sync_byte_3 = br.ReadBits(8); 475 return frame_sync_byte_1 == 0x49 && frame_sync_byte_2 == 0x83 && 476 frame_sync_byte_3 == 0x42; 477 }; 478 479 auto color_config = [&]() -> bool { 480 aInfo.mBitDepth = 8; 481 if (profile >= 2) { 482 bool ten_or_twelve_bit = br.ReadBits(1); 483 aInfo.mBitDepth = ten_or_twelve_bit ? 12 : 10; 484 } 485 aInfo.mColorSpace = br.ReadBits(3); 486 if (aInfo.mColorSpace != 7 /* CS_RGB */) { 487 aInfo.mFullRange = br.ReadBits(1); 488 if (profile == 1 || profile == 3) { 489 aInfo.mSubSampling_x = br.ReadBits(1); 490 aInfo.mSubSampling_y = br.ReadBits(1); 491 if (br.ReadBits(1)) { // reserved_zero 492 return false; 493 }; 494 } else { 495 aInfo.mSubSampling_x = true; 496 aInfo.mSubSampling_y = true; 497 } 498 } else { 499 aInfo.mFullRange = true; 500 if (profile == 1 || profile == 3) { 501 aInfo.mSubSampling_x = false; 502 aInfo.mSubSampling_y = false; 503 if (br.ReadBits(1)) { // reserved_zero 504 return false; 505 }; 506 } else { 507 // sRGB color space is only available with VP9 profile 1. 508 return false; 509 } 510 } 511 return true; 512 }; 513 514 auto frame_size = [&]() { 515 int32_t width = static_cast<int32_t>(br.ReadBits(16)) + 1; 516 int32_t height = static_cast<int32_t>(br.ReadBits(16)) + 1; 517 aInfo.mImage = gfx::IntSize(width, height); 518 }; 519 520 auto render_size = [&]() { 521 // render_and_frame_size_different 522 aInfo.mDisplayAndImageDifferent = br.ReadBits(1); 523 if (aInfo.mDisplayAndImageDifferent) { 524 int32_t width = static_cast<int32_t>(br.ReadBits(16)) + 1; 525 int32_t height = static_cast<int32_t>(br.ReadBits(16)) + 1; 526 aInfo.mDisplay = gfx::IntSize(width, height); 527 } else { 528 aInfo.mDisplay = aInfo.mImage; 529 } 530 }; 531 532 if (aInfo.mKeyFrame) { 533 if (!frame_sync_code()) { 534 return false; 535 } 536 if (!color_config()) { 537 return false; 538 } 539 frame_size(); 540 render_size(); 541 } else { 542 bool intra_only = show_frame ? false : br.ReadBit(); 543 if (!error_resilient_mode) { 544 (void)br.ReadBits(2); // reset_frame_context 545 } 546 if (intra_only) { 547 if (!frame_sync_code()) { 548 return false; 549 } 550 if (profile > 0) { 551 if (!color_config()) { 552 return false; 553 } 554 } else { 555 aInfo.mColorSpace = 1; // CS_BT_601 556 aInfo.mSubSampling_x = true; 557 aInfo.mSubSampling_y = true; 558 aInfo.mBitDepth = 8; 559 } 560 (void)br.ReadBits(8); // refresh_frame_flags 561 frame_size(); 562 render_size(); 563 } 564 } 565 return true; 566 } 567 568 // Ref: "VP Codec ISO Media File Format Binding, v1.0, 2017-03-31" 569 // <https://www.webmproject.org/vp9/mp4/> 570 // 571 // class VPCodecConfigurationBox extends FullBox('vpcC', version = 1, 0) 572 // { 573 // VPCodecConfigurationRecord() vpcConfig; 574 // } 575 // 576 // aligned (8) class VPCodecConfigurationRecord { 577 // unsigned int (8) profile; 578 // unsigned int (8) level; 579 // unsigned int (4) bitDepth; 580 // unsigned int (3) chromaSubsampling; 581 // unsigned int (1) videoFullRangeFlag; 582 // unsigned int (8) colourPrimaries; 583 // unsigned int (8) transferCharacteristics; 584 // unsigned int (8) matrixCoefficients; 585 // unsigned int (16) codecIntializationDataSize; 586 // unsigned int (8)[] codecIntializationData; 587 // } 588 589 /* static */ 590 void VPXDecoder::GetVPCCBox(MediaByteBuffer* aDestBox, 591 const VPXStreamInfo& aInfo) { 592 BitWriter writer(aDestBox); 593 594 int chroma = [&]() { 595 if (aInfo.mSubSampling_x && aInfo.mSubSampling_y) { 596 return 1; // 420 Colocated; 597 } 598 if (aInfo.mSubSampling_x && !aInfo.mSubSampling_y) { 599 return 2; // 422 600 } 601 if (!aInfo.mSubSampling_x && !aInfo.mSubSampling_y) { 602 return 3; // 444 603 } 604 // This indicates 4:4:0 subsampling, which is not expressable in the 605 // 'vpcC' box. Default to 4:2:0. 606 return 1; 607 }(); 608 609 writer.WriteU8(1); // version 610 writer.WriteBits(0, 24); // flags 611 612 writer.WriteU8(aInfo.mProfile); // profile 613 writer.WriteU8(10); // level set it to 1.0 614 615 writer.WriteBits(aInfo.mBitDepth, 4); // bitdepth 616 writer.WriteBits(chroma, 3); // chroma 617 writer.WriteBit(aInfo.mFullRange); // full/restricted range 618 619 // See VPXDecoder::VPXStreamInfo enums 620 writer.WriteU8(aInfo.mColorPrimaries); // color primaries 621 writer.WriteU8(aInfo.mTransferFunction); // transfer characteristics 622 writer.WriteU8(2); // matrix coefficients: unspecified 623 624 writer.WriteBits(0, 625 16); // codecIntializationDataSize (must be 0 for VP8/VP9) 626 } 627 628 /* static */ 629 bool VPXDecoder::SetVideoInfo(VideoInfo* aDestInfo, const nsAString& aCodec) { 630 VPXDecoder::VPXStreamInfo info; 631 uint8_t level = 0; 632 uint8_t chroma = 1; 633 VideoColorSpace colorSpace; 634 if (!ExtractVPXCodecDetails(aCodec, info.mProfile, level, info.mBitDepth, 635 chroma, colorSpace)) { 636 return false; 637 } 638 639 aDestInfo->mColorPrimaries = 640 gfxUtils::CicpToColorPrimaries(colorSpace.mPrimaries, sPDMLog); 641 aDestInfo->mTransferFunction = 642 gfxUtils::CicpToTransferFunction(colorSpace.mTransfer); 643 aDestInfo->mColorDepth = gfx::ColorDepthForBitDepth(info.mBitDepth); 644 VPXDecoder::SetChroma(info, chroma); 645 info.mFullRange = colorSpace.mRange == ColorRange::FULL; 646 RefPtr<MediaByteBuffer> extraData = new MediaByteBuffer(); 647 VPXDecoder::GetVPCCBox(extraData, info); 648 aDestInfo->mExtraData = extraData; 649 return true; 650 } 651 652 /* static */ 653 void VPXDecoder::SetChroma(VPXStreamInfo& aDestInfo, uint8_t chroma) { 654 switch (chroma) { 655 case 0: 656 case 1: 657 aDestInfo.mSubSampling_x = true; 658 aDestInfo.mSubSampling_y = true; 659 break; 660 case 2: 661 aDestInfo.mSubSampling_x = true; 662 aDestInfo.mSubSampling_y = false; 663 break; 664 case 3: 665 aDestInfo.mSubSampling_x = false; 666 aDestInfo.mSubSampling_y = false; 667 break; 668 } 669 } 670 671 /* static */ 672 void VPXDecoder::ReadVPCCBox(VPXStreamInfo& aDestInfo, MediaByteBuffer* aBox) { 673 BitReader reader(aBox); 674 675 reader.ReadBits(8); // version 676 reader.ReadBits(24); // flags 677 aDestInfo.mProfile = reader.ReadBits(8); 678 reader.ReadBits(8); // level 679 680 aDestInfo.mBitDepth = reader.ReadBits(4); 681 SetChroma(aDestInfo, reader.ReadBits(3)); 682 aDestInfo.mFullRange = reader.ReadBit(); 683 684 aDestInfo.mColorPrimaries = reader.ReadBits(8); // color primaries 685 aDestInfo.mTransferFunction = reader.ReadBits(8); // transfer characteristics 686 reader.ReadBits(8); // matrix coefficients 687 688 MOZ_ASSERT(reader.ReadBits(16) == 689 0); // codecInitializationDataSize (must be 0 for VP8/VP9) 690 } 691 692 } // namespace mozilla 693 #undef LOG