WebrtcGmpVideoCodec.cpp (39425B)
1 /* This Source Code Form is subject to the terms of the Mozilla Public 2 * License, v. 2.0. If a copy of the MPL was not distributed with this file, 3 * You can obtain one at http://mozilla.org/MPL/2.0/. */ 4 5 #include "WebrtcGmpVideoCodec.h" 6 7 #include <utility> 8 #include <vector> 9 10 #include "GMPLog.h" 11 #include "GMPUtils.h" 12 #include "MainThreadUtils.h" 13 #include "VideoConduit.h" 14 #include "api/video/video_frame_type.h" 15 #include "common_video/include/video_frame_buffer.h" 16 #include "gmp-video-frame-encoded.h" 17 #include "gmp-video-frame-i420.h" 18 #include "media/base/media_constants.h" 19 #include "modules/video_coding/include/video_codec_interface.h" 20 #include "modules/video_coding/svc/create_scalability_structure.h" 21 #include "mozilla/CheckedInt.h" 22 #include "nsServiceManagerUtils.h" 23 24 namespace mozilla { 25 26 using detail::InputImageData; 27 28 // QP scaling thresholds. 29 static const int kLowH264QpThreshold = 24; 30 static const int kHighH264QpThreshold = 37; 31 32 // Encoder. 33 WebrtcGmpVideoEncoder::WebrtcGmpVideoEncoder( 34 const webrtc::SdpVideoFormat& aFormat, std::string aPCHandle) 35 : mGMP(nullptr), 36 mInitting(false), 37 mConfiguredBitrateKbps(0), 38 mHost(nullptr), 39 mMaxPayloadSize(0), 40 mNeedKeyframe(true), 41 mSyncLayerCap(webrtc::kMaxTemporalStreams), 42 mFormatParams(aFormat.parameters), 43 mCallbackMutex("WebrtcGmpVideoEncoder encoded callback mutex"), 44 mCallback(nullptr), 45 mPCHandle(std::move(aPCHandle)) { 46 mCodecParams.mCodecType = kGMPVideoCodecInvalid; 47 mCodecParams.mMode = kGMPCodecModeInvalid; 48 mCodecParams.mLogLevel = GetGMPLibraryLogLevel(); 49 MOZ_ASSERT(!mPCHandle.empty()); 50 } 51 52 WebrtcGmpVideoEncoder::~WebrtcGmpVideoEncoder() { 53 // We should not have been destroyed if we never closed our GMP 54 MOZ_ASSERT(!mGMP); 55 } 56 57 static int WebrtcFrameTypeToGmpFrameType(webrtc::VideoFrameType aIn, 58 GMPVideoFrameType* aOut) { 59 MOZ_ASSERT(aOut); 60 switch (aIn) { 61 case webrtc::VideoFrameType::kVideoFrameKey: 62 *aOut = kGMPKeyFrame; 63 break; 64 case webrtc::VideoFrameType::kVideoFrameDelta: 65 *aOut = kGMPDeltaFrame; 66 break; 67 case webrtc::VideoFrameType::kEmptyFrame: 68 *aOut = kGMPSkipFrame; 69 break; 70 default: 71 MOZ_CRASH("Unexpected webrtc::FrameType"); 72 } 73 74 return WEBRTC_VIDEO_CODEC_OK; 75 } 76 77 static int GmpFrameTypeToWebrtcFrameType(GMPVideoFrameType aIn, 78 webrtc::VideoFrameType* aOut) { 79 MOZ_ASSERT(aOut); 80 switch (aIn) { 81 case kGMPKeyFrame: 82 *aOut = webrtc::VideoFrameType::kVideoFrameKey; 83 break; 84 case kGMPDeltaFrame: 85 *aOut = webrtc::VideoFrameType::kVideoFrameDelta; 86 break; 87 case kGMPSkipFrame: 88 *aOut = webrtc::VideoFrameType::kEmptyFrame; 89 break; 90 default: 91 MOZ_CRASH("Unexpected GMPVideoFrameType"); 92 } 93 94 return WEBRTC_VIDEO_CODEC_OK; 95 } 96 97 static webrtc::ScalabilityMode GmpCodecParamsToScalabilityMode( 98 const GMPVideoCodec& aParams) { 99 switch (aParams.mTemporalLayerNum) { 100 case 1: 101 return webrtc::ScalabilityMode::kL1T1; 102 case 2: 103 return webrtc::ScalabilityMode::kL1T2; 104 case 3: 105 return webrtc::ScalabilityMode::kL1T3; 106 default: 107 NS_WARNING(nsPrintfCString("Expected 1-3 temporal layers but got %d.\n", 108 aParams.mTemporalLayerNum) 109 .get()); 110 MOZ_CRASH("Unexpected number of temporal layers"); 111 } 112 } 113 114 int32_t WebrtcGmpVideoEncoder::InitEncode( 115 const webrtc::VideoCodec* aCodecSettings, 116 const webrtc::VideoEncoder::Settings& aSettings) { 117 if (!mEncodeQueue) { 118 mEncodeQueue.emplace(GetCurrentSerialEventTarget()); 119 } 120 mEncodeQueue->AssertOnCurrentThread(); 121 122 if (!mMPS) { 123 mMPS = do_GetService("@mozilla.org/gecko-media-plugin-service;1"); 124 } 125 MOZ_ASSERT(mMPS); 126 127 if (!mGMPThread) { 128 if (NS_WARN_IF(NS_FAILED(mMPS->GetThread(getter_AddRefs(mGMPThread))))) { 129 return WEBRTC_VIDEO_CODEC_ERROR; 130 } 131 } 132 133 if (aCodecSettings->numberOfSimulcastStreams > 1) { 134 // Simulcast not implemented for GMP-H264 135 return WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED; 136 } 137 138 if (aCodecSettings->simulcastStream[0].numberOfTemporalLayers > 1 && 139 !HaveGMPFor("encode-video"_ns, {"moz-h264-temporal-svc"_ns})) { 140 return WEBRTC_VIDEO_CODEC_ERR_PARAMETER; 141 } 142 143 GMPVideoCodec codecParams{}; 144 codecParams.mGMPApiVersion = kGMPVersion36; 145 codecParams.mLogLevel = GetGMPLibraryLogLevel(); 146 codecParams.mStartBitrate = aCodecSettings->startBitrate; 147 codecParams.mMinBitrate = aCodecSettings->minBitrate; 148 codecParams.mMaxBitrate = aCodecSettings->maxBitrate; 149 codecParams.mMaxFramerate = aCodecSettings->maxFramerate; 150 codecParams.mFrameDroppingOn = aCodecSettings->GetFrameDropEnabled(); 151 codecParams.mTemporalLayerNum = 152 aCodecSettings->simulcastStream[0].GetNumberOfTemporalLayers(); 153 if (aCodecSettings->mode == webrtc::VideoCodecMode::kScreensharing) { 154 codecParams.mMode = kGMPScreensharing; 155 } else { 156 codecParams.mMode = kGMPRealtimeVideo; 157 } 158 codecParams.mWidth = aCodecSettings->width; 159 codecParams.mHeight = aCodecSettings->height; 160 161 uint32_t maxPayloadSize = aSettings.max_payload_size; 162 if (mFormatParams.count(webrtc::kH264FmtpPacketizationMode) == 1 && 163 mFormatParams.at(webrtc::kH264FmtpPacketizationMode) == "1") { 164 maxPayloadSize = 0; // No limit, use FUAs 165 } 166 167 mConfiguredBitrateKbps = codecParams.mMaxBitrate; 168 169 MOZ_ALWAYS_SUCCEEDS( 170 mGMPThread->Dispatch(NewRunnableMethod<GMPVideoCodec, int32_t, uint32_t>( 171 __func__, this, &WebrtcGmpVideoEncoder::InitEncode_g, codecParams, 172 aSettings.number_of_cores, maxPayloadSize))); 173 174 // Since init of the GMP encoder is a multi-step async dispatch (including 175 // dispatches to main), and since this function is invoked on main, there's 176 // no safe way to block until this init is done. If an error occurs, we'll 177 // handle it later. 178 return WEBRTC_VIDEO_CODEC_OK; 179 } 180 181 void WebrtcGmpVideoEncoder::InitEncode_g(const GMPVideoCodec& aCodecParams, 182 int32_t aNumberOfCores, 183 uint32_t aMaxPayloadSize) { 184 nsTArray<nsCString> tags; 185 tags.AppendElement("h264"_ns); 186 UniquePtr<GetGMPVideoEncoderCallback> callback( 187 new InitDoneCallback(this, aCodecParams)); 188 mInitting = true; 189 mMaxPayloadSize = aMaxPayloadSize; 190 mSyncLayerCap = aCodecParams.mTemporalLayerNum; 191 mSvcController = webrtc::CreateScalabilityStructure( 192 GmpCodecParamsToScalabilityMode(aCodecParams)); 193 if (!mSvcController) { 194 GMP_LOG_DEBUG( 195 "GMP Encode: CreateScalabilityStructure for %d temporal layers failed", 196 aCodecParams.mTemporalLayerNum); 197 Close_g(); 198 NotifyGmpInitDone(mPCHandle, WEBRTC_VIDEO_CODEC_ERROR, 199 "GMP Encode: CreateScalabilityStructure failed"); 200 return; 201 } 202 nsresult rv = 203 mMPS->GetGMPVideoEncoder(nullptr, &tags, ""_ns, std::move(callback)); 204 if (NS_WARN_IF(NS_FAILED(rv))) { 205 GMP_LOG_DEBUG("GMP Encode: GetGMPVideoEncoder failed"); 206 Close_g(); 207 NotifyGmpInitDone(mPCHandle, WEBRTC_VIDEO_CODEC_ERROR, 208 "GMP Encode: GetGMPVideoEncoder failed"); 209 } 210 } 211 212 int32_t WebrtcGmpVideoEncoder::GmpInitDone_g(GMPVideoEncoderProxy* aGMP, 213 GMPVideoHost* aHost, 214 std::string* aErrorOut) { 215 if (!mInitting || !aGMP || !aHost) { 216 *aErrorOut = 217 "GMP Encode: Either init was aborted, " 218 "or init failed to supply either a GMP Encoder or GMP host."; 219 if (aGMP) { 220 // This could destroy us, since aGMP may be the last thing holding a ref 221 // Return immediately. 222 aGMP->Close(); 223 } 224 return WEBRTC_VIDEO_CODEC_ERROR; 225 } 226 227 mInitting = false; 228 229 if (mGMP && mGMP != aGMP) { 230 Close_g(); 231 } 232 233 mGMP = aGMP; 234 mHost = aHost; 235 mCachedPluginId = Some(mGMP->GetPluginId()); 236 mInitPluginEvent.Notify(*mCachedPluginId); 237 return WEBRTC_VIDEO_CODEC_OK; 238 } 239 240 int32_t WebrtcGmpVideoEncoder::GmpInitDone_g(GMPVideoEncoderProxy* aGMP, 241 GMPVideoHost* aHost, 242 const GMPVideoCodec& aCodecParams, 243 std::string* aErrorOut) { 244 int32_t r = GmpInitDone_g(aGMP, aHost, aErrorOut); 245 if (r != WEBRTC_VIDEO_CODEC_OK) { 246 // We might have been destroyed if GmpInitDone failed. 247 // Return immediately. 248 return r; 249 } 250 mCodecParams = aCodecParams; 251 return InitEncoderForSize(aCodecParams.mWidth, aCodecParams.mHeight, 252 aErrorOut); 253 } 254 255 void WebrtcGmpVideoEncoder::Close_g() { 256 GMPVideoEncoderProxy* gmp(mGMP); 257 mGMP = nullptr; 258 mHost = nullptr; 259 mInitting = false; 260 mInputImageMap.Clear(); 261 262 if (mCachedPluginId) { 263 mReleasePluginEvent.Notify(*mCachedPluginId); 264 } 265 mCachedPluginId = Nothing(); 266 267 if (gmp) { 268 // Do this last, since this could cause us to be destroyed 269 gmp->Close(); 270 } 271 } 272 273 int32_t WebrtcGmpVideoEncoder::InitEncoderForSize(unsigned short aWidth, 274 unsigned short aHeight, 275 std::string* aErrorOut) { 276 mCodecParams.mWidth = aWidth; 277 mCodecParams.mHeight = aHeight; 278 // Pass dummy codecSpecific data for now... 279 nsTArray<uint8_t> codecSpecific; 280 281 GMPErr err = 282 mGMP->InitEncode(mCodecParams, codecSpecific, this, 1, mMaxPayloadSize); 283 if (err != GMPNoErr) { 284 *aErrorOut = "GMP Encode: InitEncode failed"; 285 return WEBRTC_VIDEO_CODEC_ERROR; 286 } 287 288 return WEBRTC_VIDEO_CODEC_OK; 289 } 290 291 int32_t WebrtcGmpVideoEncoder::Encode( 292 const webrtc::VideoFrame& aInputImage, 293 const std::vector<webrtc::VideoFrameType>* aFrameTypes) { 294 mEncodeQueue->AssertOnCurrentThread(); 295 MOZ_ASSERT(aInputImage.width() >= 0 && aInputImage.height() >= 0); 296 if (!aFrameTypes) { 297 return WEBRTC_VIDEO_CODEC_ERROR; 298 } 299 300 if (mConfiguredBitrateKbps == 0) { 301 GMP_LOG_VERBOSE("GMP Encode: not enabled"); 302 MutexAutoLock lock(mCallbackMutex); 303 if (mCallback) { 304 mCallback->OnDroppedFrame( 305 webrtc::EncodedImageCallback::DropReason::kDroppedByEncoder); 306 } 307 return WEBRTC_VIDEO_CODEC_OK; 308 } 309 310 // It is safe to copy aInputImage here because the frame buffer is held by 311 // a refptr. 312 MOZ_ALWAYS_SUCCEEDS(mGMPThread->Dispatch( 313 NewRunnableMethod<webrtc::VideoFrame, 314 std::vector<webrtc::VideoFrameType>>( 315 __func__, this, &WebrtcGmpVideoEncoder::Encode_g, aInputImage, 316 *aFrameTypes))); 317 318 return WEBRTC_VIDEO_CODEC_OK; 319 } 320 321 void WebrtcGmpVideoEncoder::RegetEncoderForResolutionChange(uint32_t aWidth, 322 uint32_t aHeight) { 323 Close_g(); 324 325 UniquePtr<GetGMPVideoEncoderCallback> callback( 326 new InitDoneForResolutionChangeCallback(this, aWidth, aHeight)); 327 328 // OpenH264 codec (at least) can't handle dynamic input resolution changes 329 // re-init the plugin when the resolution changes 330 // XXX allow codec to indicate it doesn't need re-init! 331 nsTArray<nsCString> tags; 332 tags.AppendElement("h264"_ns); 333 mInitting = true; 334 if (NS_WARN_IF(NS_FAILED(mMPS->GetGMPVideoEncoder(nullptr, &tags, ""_ns, 335 std::move(callback))))) { 336 NotifyGmpInitDone(mPCHandle, WEBRTC_VIDEO_CODEC_ERROR, 337 "GMP Encode: GetGMPVideoEncoder failed"); 338 } 339 } 340 341 void WebrtcGmpVideoEncoder::Encode_g( 342 const webrtc::VideoFrame& aInputImage, 343 std::vector<webrtc::VideoFrameType> aFrameTypes) { 344 auto reportDroppedOnExit = MakeScopeExit([&] { 345 MutexAutoLock lock(mCallbackMutex); 346 if (mCallback) { 347 mCallback->OnDroppedFrame( 348 webrtc::EncodedImageCallback::DropReason::kDroppedByEncoder); 349 } 350 }); 351 352 if (!mGMP) { 353 // destroyed via Terminate(), failed to init, or just not initted yet 354 GMP_LOG_DEBUG("GMP Encode: not initted yet"); 355 return; 356 } 357 MOZ_ASSERT(mHost); 358 359 if (mInputImageMap.Length() >= kMaxImagesInFlight) { 360 GMP_LOG_WARNING( 361 "GMP Encode: Max number of frames already in flight. Dropping this " 362 "one."); 363 return; 364 } 365 366 if (static_cast<uint32_t>(aInputImage.width()) != mCodecParams.mWidth || 367 static_cast<uint32_t>(aInputImage.height()) != mCodecParams.mHeight) { 368 GMP_LOG_DEBUG("GMP Encode: resolution change from %ux%u to %dx%d", 369 mCodecParams.mWidth, mCodecParams.mHeight, 370 aInputImage.width(), aInputImage.height()); 371 372 mNeedKeyframe = true; 373 RegetEncoderForResolutionChange(aInputImage.width(), aInputImage.height()); 374 if (!mGMP) { 375 // We needed to go async to re-get the encoder. Bail. 376 return; 377 } 378 } 379 380 GMPVideoFrame* ftmp = nullptr; 381 GMPErr err = mHost->CreateFrame(kGMPI420VideoFrame, &ftmp); 382 if (err != GMPNoErr) { 383 GMP_LOG_DEBUG("GMP Encode: failed to create frame on host"); 384 return; 385 } 386 GMPUniquePtr<GMPVideoi420Frame> frame(static_cast<GMPVideoi420Frame*>(ftmp)); 387 const webrtc::I420BufferInterface* input_image = 388 aInputImage.video_frame_buffer()->GetI420(); 389 // check for overflow of stride * height 390 CheckedInt32 ysize = 391 CheckedInt32(input_image->StrideY()) * input_image->height(); 392 MOZ_RELEASE_ASSERT(ysize.isValid()); 393 // I will assume that if that doesn't overflow, the others case - YUV 394 // 4:2:0 has U/V widths <= Y, even with alignment issues. 395 err = frame->CreateFrame( 396 ysize.value(), input_image->DataY(), 397 input_image->StrideU() * ((input_image->height() + 1) / 2), 398 input_image->DataU(), 399 input_image->StrideV() * ((input_image->height() + 1) / 2), 400 input_image->DataV(), input_image->width(), input_image->height(), 401 input_image->StrideY(), input_image->StrideU(), input_image->StrideV()); 402 if (err != GMPNoErr) { 403 GMP_LOG_DEBUG("GMP Encode: failed to create frame"); 404 return; 405 } 406 const auto gmpTimestamp = 407 AssertedCast<uint64_t>(aInputImage.ntp_time_ms() * 1000); 408 frame->SetTimestamp(gmpTimestamp); 409 410 GMPCodecSpecificInfo info{}; 411 info.mCodecType = kGMPVideoCodecH264; 412 nsTArray<uint8_t> codecSpecificInfo; 413 codecSpecificInfo.AppendElements((uint8_t*)&info, 414 sizeof(GMPCodecSpecificInfo)); 415 416 nsTArray<GMPVideoFrameType> gmp_frame_types; 417 for (const auto& frameType : aFrameTypes) { 418 GMPVideoFrameType ft; 419 420 if (mNeedKeyframe) { 421 ft = kGMPKeyFrame; 422 } else { 423 int32_t ret = WebrtcFrameTypeToGmpFrameType(frameType, &ft); 424 if (ret != WEBRTC_VIDEO_CODEC_OK) { 425 GMP_LOG_DEBUG( 426 "GMP Encode: failed to map webrtc frame type to gmp frame type"); 427 return; 428 } 429 } 430 431 gmp_frame_types.AppendElement(ft); 432 } 433 mNeedKeyframe = false; 434 435 auto frameConfigs = 436 mSvcController->NextFrameConfig(gmp_frame_types[0] == kGMPKeyFrame); 437 MOZ_ASSERT(frameConfigs.size() == 1); 438 439 MOZ_RELEASE_ASSERT(mInputImageMap.IsEmpty() || 440 mInputImageMap.LastElement().ntp_timestamp_ms < 441 aInputImage.ntp_time_ms()); 442 443 GMP_LOG_DEBUG("GMP Encode: %" PRIu64, (frame->Timestamp())); 444 err = mGMP->Encode(std::move(frame), codecSpecificInfo, gmp_frame_types); 445 if (err != GMPNoErr) { 446 GMP_LOG_DEBUG("GMP Encode: failed to encode frame"); 447 return; 448 } 449 450 // Once in mInputImageMap, frame drops are reported by GMP callbacks 451 // (Encoded/Dropped). 452 reportDroppedOnExit.release(); 453 mInputImageMap.AppendElement( 454 InputImageData{.gmp_timestamp_us = gmpTimestamp, 455 .ntp_timestamp_ms = aInputImage.ntp_time_ms(), 456 .timestamp_us = aInputImage.timestamp_us(), 457 .rtp_timestamp = aInputImage.rtp_timestamp(), 458 .frame_config = frameConfigs[0]}); 459 } 460 461 int32_t WebrtcGmpVideoEncoder::RegisterEncodeCompleteCallback( 462 webrtc::EncodedImageCallback* aCallback) { 463 MutexAutoLock lock(mCallbackMutex); 464 mCallback = aCallback; 465 466 return WEBRTC_VIDEO_CODEC_OK; 467 } 468 469 int32_t WebrtcGmpVideoEncoder::Shutdown() { 470 GMP_LOG_DEBUG("GMP Released:"); 471 RegisterEncodeCompleteCallback(nullptr); 472 473 if (mGMPThread) { 474 MOZ_ALWAYS_SUCCEEDS(mGMPThread->Dispatch( 475 NewRunnableMethod(__func__, this, &WebrtcGmpVideoEncoder::Close_g))); 476 } 477 return WEBRTC_VIDEO_CODEC_OK; 478 } 479 480 int32_t WebrtcGmpVideoEncoder::SetRates( 481 const webrtc::VideoEncoder::RateControlParameters& aParameters) { 482 mEncodeQueue->AssertOnCurrentThread(); 483 MOZ_ASSERT(mGMPThread); 484 MOZ_ASSERT(!aParameters.bitrate.IsSpatialLayerUsed(1), 485 "No simulcast support for H264"); 486 auto old = mConfiguredBitrateKbps; 487 mConfiguredBitrateKbps = aParameters.bitrate.GetSpatialLayerSum(0) / 1000; 488 MOZ_ALWAYS_SUCCEEDS( 489 mGMPThread->Dispatch(NewRunnableMethod<uint32_t, uint32_t, Maybe<double>>( 490 __func__, this, &WebrtcGmpVideoEncoder::SetRates_g, old, 491 mConfiguredBitrateKbps, 492 aParameters.framerate_fps > 0.0 ? Some(aParameters.framerate_fps) 493 : Nothing()))); 494 495 return WEBRTC_VIDEO_CODEC_OK; 496 } 497 498 WebrtcVideoEncoder::EncoderInfo WebrtcGmpVideoEncoder::GetEncoderInfo() const { 499 WebrtcVideoEncoder::EncoderInfo info; 500 info.supports_native_handle = false; 501 info.implementation_name = "GMPOpenH264"; 502 info.scaling_settings = WebrtcVideoEncoder::ScalingSettings( 503 kLowH264QpThreshold, kHighH264QpThreshold); 504 info.is_hardware_accelerated = false; 505 info.supports_simulcast = false; 506 return info; 507 } 508 509 int32_t WebrtcGmpVideoEncoder::SetRates_g(uint32_t aOldBitRateKbps, 510 uint32_t aNewBitRateKbps, 511 Maybe<double> aFrameRate) { 512 if (!mGMP) { 513 // destroyed via Terminate() 514 return WEBRTC_VIDEO_CODEC_ERROR; 515 } 516 517 GMP_LOG_DEBUG("GMP Encoder %p setting rate %ukbps", this, aNewBitRateKbps); 518 519 mNeedKeyframe |= (aOldBitRateKbps == 0 && aNewBitRateKbps != 0); 520 521 GMPErr err = mGMP->SetRates( 522 aNewBitRateKbps, aFrameRate 523 .map([](double aFr) { 524 // Avoid rounding to 0 525 return std::max(1U, static_cast<uint32_t>(aFr)); 526 }) 527 .valueOr(mCodecParams.mMaxFramerate)); 528 if (err != GMPNoErr) { 529 return WEBRTC_VIDEO_CODEC_ERROR; 530 } 531 532 return WEBRTC_VIDEO_CODEC_OK; 533 } 534 535 // GMPVideoEncoderCallback virtual functions. 536 void WebrtcGmpVideoEncoder::Terminated() { 537 GMP_LOG_DEBUG("GMP Encoder Terminated: %p", (void*)this); 538 539 GMPVideoEncoderProxy* gmp(mGMP); 540 mGMP = nullptr; 541 mHost = nullptr; 542 mInitting = false; 543 mInputImageMap.Clear(); 544 545 if (gmp) { 546 // Do this last, since this could cause us to be destroyed 547 gmp->Close(); 548 } 549 550 // Could now notify that it's dead 551 } 552 553 static int32_t GmpTimestampComparator(const InputImageData& aA, 554 const InputImageData& aB) { 555 const auto& a = aA.gmp_timestamp_us; 556 const auto& b = aB.gmp_timestamp_us; 557 return a < b ? -1 : a != b; 558 } 559 560 void WebrtcGmpVideoEncoder::Encoded( 561 GMPVideoEncodedFrame* aEncodedFrame, 562 const nsTArray<uint8_t>& aCodecSpecificInfo) { 563 MOZ_ASSERT(mGMPThread->IsOnCurrentThread()); 564 Maybe<InputImageData> data; 565 MOZ_ASSERT(!mInputImageMap.IsEmpty()); 566 MOZ_ASSERT(mInputImageMap.Length() <= kMaxImagesInFlight); 567 size_t nextIdx = mInputImageMap.IndexOfFirstElementGt( 568 InputImageData{.gmp_timestamp_us = aEncodedFrame->TimeStamp()}, 569 GmpTimestampComparator); 570 const size_t numToRemove = nextIdx; 571 size_t numFramesDropped = numToRemove; 572 MOZ_ASSERT(nextIdx != 0); 573 if (nextIdx != 0 && mInputImageMap.ElementAt(nextIdx - 1).gmp_timestamp_us == 574 aEncodedFrame->TimeStamp()) { 575 --numFramesDropped; 576 data = Some(mInputImageMap.ElementAt(nextIdx - 1)); 577 } 578 mInputImageMap.RemoveElementsAt(0, numToRemove); 579 580 webrtc::VideoFrameType frt; 581 GmpFrameTypeToWebrtcFrameType(aEncodedFrame->FrameType(), &frt); 582 MOZ_ASSERT_IF(mCodecParams.mTemporalLayerNum > 1 && 583 aEncodedFrame->FrameType() == kGMPKeyFrame, 584 aEncodedFrame->GetTemporalLayerId() == 0); 585 if (aEncodedFrame->FrameType() == kGMPKeyFrame && 586 !data->frame_config.IsKeyframe()) { 587 GMP_LOG_WARNING("GMP Encoded non-requested keyframe at t=%" PRIu64, 588 aEncodedFrame->TimeStamp()); 589 // If there could be multiple encode jobs in flight this would be racy. 590 auto frameConfigs = mSvcController->NextFrameConfig(/* restart =*/true); 591 MOZ_ASSERT(frameConfigs.size() == 1); 592 data->frame_config = frameConfigs[0]; 593 } 594 595 MOZ_ASSERT((aEncodedFrame->FrameType() == kGMPKeyFrame) == 596 data->frame_config.IsKeyframe()); 597 MOZ_ASSERT_IF( 598 mCodecParams.mTemporalLayerNum > 1, 599 aEncodedFrame->GetTemporalLayerId() == data->frame_config.TemporalId()); 600 601 MutexAutoLock lock(mCallbackMutex); 602 if (!mCallback) { 603 return; 604 } 605 606 for (size_t i = 0; i < numFramesDropped; ++i) { 607 mCallback->OnDroppedFrame( 608 webrtc::EncodedImageCallback::DropReason::kDroppedByEncoder); 609 } 610 611 if (data.isNothing()) { 612 MOZ_ASSERT_UNREACHABLE( 613 "Unexpectedly didn't find an input image for this encoded frame"); 614 return; 615 } 616 617 webrtc::VideoFrameType ft; 618 GmpFrameTypeToWebrtcFrameType(aEncodedFrame->FrameType(), &ft); 619 620 GMP_LOG_DEBUG("GMP Encoded: %" PRIu64 ", type %d, len %d", 621 aEncodedFrame->TimeStamp(), aEncodedFrame->BufferType(), 622 aEncodedFrame->Size()); 623 624 // Libwebrtc's RtpPacketizerH264 expects a 3- or 4-byte NALU start sequence 625 // before the start of the NALU payload. {0,0,1} or {0,0,0,1}. We set this 626 // in-place. Any other length of the length field we reject. 627 if (NS_WARN_IF(!AdjustOpenH264NALUSequence(aEncodedFrame))) { 628 return; 629 } 630 631 webrtc::EncodedImage unit; 632 unit.SetEncodedData(webrtc::EncodedImageBuffer::Create( 633 aEncodedFrame->Buffer(), aEncodedFrame->Size())); 634 unit._frameType = ft; 635 unit.SetRtpTimestamp(data->rtp_timestamp); 636 unit.capture_time_ms_ = webrtc::Timestamp::Micros(data->timestamp_us).ms(); 637 unit.ntp_time_ms_ = data->ntp_timestamp_ms; 638 unit._encodedWidth = aEncodedFrame->EncodedWidth(); 639 unit._encodedHeight = aEncodedFrame->EncodedHeight(); 640 641 webrtc::CodecSpecificInfo info; 642 #ifdef __LP64__ 643 // Only do these checks on some common builds to avoid build issues on more 644 // exotic flavors. 645 static_assert( 646 sizeof(info.codecSpecific.H264) == 8, 647 "webrtc::CodecSpecificInfoH264 has changed. We must handle the changes."); 648 static_assert( 649 sizeof(info) - sizeof(info.codecSpecific) - 650 sizeof(info.generic_frame_info) - 651 sizeof(info.template_structure) - 652 sizeof(info.frame_instrumentation_data) == 653 24, 654 "webrtc::CodecSpecificInfo's generic bits have changed. We must handle " 655 "the changes."); 656 #endif 657 info.codecType = webrtc::kVideoCodecH264; 658 info.codecSpecific = {}; 659 info.codecSpecific.H264.packetization_mode = 660 mFormatParams.count(webrtc::kH264FmtpPacketizationMode) == 1 && 661 mFormatParams.at(webrtc::kH264FmtpPacketizationMode) == "1" 662 ? webrtc::H264PacketizationMode::NonInterleaved 663 : webrtc::H264PacketizationMode::SingleNalUnit; 664 info.codecSpecific.H264.temporal_idx = webrtc::kNoTemporalIdx; 665 info.codecSpecific.H264.base_layer_sync = false; 666 info.codecSpecific.H264.idr_frame = 667 ft == webrtc::VideoFrameType::kVideoFrameKey; 668 info.generic_frame_info = mSvcController->OnEncodeDone(data->frame_config); 669 if (info.codecSpecific.H264.idr_frame && 670 info.generic_frame_info.has_value()) { 671 info.template_structure = mSvcController->DependencyStructure(); 672 } 673 674 if (mCodecParams.mTemporalLayerNum > 1) { 675 int temporalIdx = std::max(0, aEncodedFrame->GetTemporalLayerId()); 676 unit.SetTemporalIndex(temporalIdx); 677 info.codecSpecific.H264.temporal_idx = temporalIdx; 678 info.scalability_mode = GmpCodecParamsToScalabilityMode(mCodecParams); 679 680 if (temporalIdx == 0) { 681 // Base layer. Reset the sync layer tracking. 682 mSyncLayerCap = mCodecParams.mTemporalLayerNum; 683 } else { 684 // Decrease the sync layer tracking. base_layer_sync per upstream code 685 // shall be true iff the layer in question only depends on layer 0, i.e. 686 // the base layer. Note in L1T3 the frame dependencies (and cap) are: 687 // | Temporal | Dependency | | 688 // Frame | Layer | Frame | Sync? | Cap 689 // =============================================== 690 // 0 | 0 | 0 | False | _ -> 3 691 // 1 | 2 | 0 | True | 3 -> 2 692 // 2 | 1 | 0 | True | 2 -> 1 693 // 3 | 2 | 1 | False | 1 -> 2 694 info.codecSpecific.H264.base_layer_sync = temporalIdx < mSyncLayerCap; 695 mSyncLayerCap = temporalIdx; 696 } 697 } 698 699 // Parse QP. 700 mH264BitstreamParser.ParseBitstream(unit); 701 unit.qp_ = mH264BitstreamParser.GetLastSliceQp().value_or(-1); 702 703 mCallback->OnEncodedImage(unit, &info); 704 } 705 706 void WebrtcGmpVideoEncoder::Dropped(uint64_t aTimestamp) { 707 MOZ_ASSERT(mGMPThread->IsOnCurrentThread()); 708 MOZ_ASSERT(!mInputImageMap.IsEmpty()); 709 MOZ_ASSERT(mInputImageMap.Length() <= kMaxImagesInFlight); 710 711 size_t nextIdx = mInputImageMap.IndexOfFirstElementGt( 712 InputImageData{.gmp_timestamp_us = aTimestamp}, GmpTimestampComparator); 713 const size_t numDropped = nextIdx; 714 MOZ_ASSERT(nextIdx != 0); 715 MOZ_ASSERT(mInputImageMap.ElementAt(nextIdx - 1).gmp_timestamp_us == 716 aTimestamp); 717 mInputImageMap.RemoveElementsAt(0, numDropped); 718 719 GMP_LOG_DEBUG("GMP Dropped: %" PRIu64 720 " dropped by encoder. Reporting %u frames dropped.", 721 aTimestamp, static_cast<uint32_t>(numDropped)); 722 723 MutexAutoLock lock(mCallbackMutex); 724 if (!mCallback) { 725 return; 726 } 727 728 for (size_t i = 0; i < numDropped; ++i) { 729 mCallback->OnDroppedFrame( 730 webrtc::EncodedImageCallback::DropReason::kDroppedByEncoder); 731 } 732 } 733 734 // Decoder. 735 WebrtcGmpVideoDecoder::WebrtcGmpVideoDecoder(std::string aPCHandle, 736 TrackingId aTrackingId) 737 : mGMP(nullptr), 738 mInitting(false), 739 mHost(nullptr), 740 mCallbackMutex("WebrtcGmpVideoDecoder decoded callback mutex"), 741 mCallback(nullptr), 742 mDecoderStatus(GMPNoErr), 743 mPCHandle(std::move(aPCHandle)), 744 mTrackingId(std::move(aTrackingId)) { 745 MOZ_ASSERT(!mPCHandle.empty()); 746 } 747 748 WebrtcGmpVideoDecoder::~WebrtcGmpVideoDecoder() { 749 // We should not have been destroyed if we never closed our GMP 750 MOZ_ASSERT(!mGMP); 751 } 752 753 bool WebrtcGmpVideoDecoder::Configure( 754 const webrtc::VideoDecoder::Settings& settings) { 755 if (!mMPS) { 756 mMPS = do_GetService("@mozilla.org/gecko-media-plugin-service;1"); 757 } 758 MOZ_ASSERT(mMPS); 759 760 if (!mGMPThread) { 761 if (NS_WARN_IF(NS_FAILED(mMPS->GetThread(getter_AddRefs(mGMPThread))))) { 762 return false; 763 } 764 } 765 766 MOZ_ALWAYS_SUCCEEDS( 767 mGMPThread->Dispatch(NewRunnableMethod<webrtc::VideoDecoder::Settings>( 768 __func__, this, &WebrtcGmpVideoDecoder::Configure_g, settings))); 769 770 return true; 771 } 772 773 void WebrtcGmpVideoDecoder::Configure_g( 774 const webrtc::VideoDecoder::Settings& settings) { 775 nsTArray<nsCString> tags; 776 tags.AppendElement("h264"_ns); 777 UniquePtr<GetGMPVideoDecoderCallback> callback(new InitDoneCallback(this)); 778 mInitting = true; 779 nsresult rv = 780 mMPS->GetGMPVideoDecoder(nullptr, &tags, ""_ns, std::move(callback)); 781 if (NS_WARN_IF(NS_FAILED(rv))) { 782 GMP_LOG_DEBUG("GMP Decode: GetGMPVideoDecoder failed"); 783 Close_g(); 784 NotifyGmpInitDone(mPCHandle, WEBRTC_VIDEO_CODEC_ERROR, 785 "GMP Decode: GetGMPVideoDecoder failed."); 786 } 787 } 788 789 int32_t WebrtcGmpVideoDecoder::GmpInitDone_g(GMPVideoDecoderProxy* aGMP, 790 GMPVideoHost* aHost, 791 std::string* aErrorOut) { 792 if (!mInitting || !aGMP || !aHost) { 793 *aErrorOut = 794 "GMP Decode: Either init was aborted, " 795 "or init failed to supply either a GMP decoder or GMP host."; 796 if (aGMP) { 797 // This could destroy us, since aGMP may be the last thing holding a ref 798 // Return immediately. 799 aGMP->Close(); 800 } 801 return WEBRTC_VIDEO_CODEC_ERROR; 802 } 803 804 mInitting = false; 805 806 if (mGMP && mGMP != aGMP) { 807 Close_g(); 808 } 809 810 mGMP = aGMP; 811 mHost = aHost; 812 mCachedPluginId = Some(mGMP->GetPluginId()); 813 mInitPluginEvent.Notify(*mCachedPluginId); 814 815 GMPVideoCodec codec{}; 816 codec.mGMPApiVersion = kGMPVersion34; 817 codec.mLogLevel = GetGMPLibraryLogLevel(); 818 819 // XXX this is currently a hack 820 // GMPVideoCodecUnion codecSpecific; 821 // memset(&codecSpecific, 0, sizeof(codecSpecific)); 822 nsTArray<uint8_t> codecSpecific; 823 nsresult rv = mGMP->InitDecode(codec, codecSpecific, this, 1); 824 if (NS_FAILED(rv)) { 825 *aErrorOut = "GMP Decode: InitDecode failed"; 826 mQueuedFrames.Clear(); 827 return WEBRTC_VIDEO_CODEC_ERROR; 828 } 829 830 // now release any frames that got queued waiting for InitDone 831 if (!mQueuedFrames.IsEmpty()) { 832 // So we're safe to call Decode_g(), which asserts it's empty 833 nsTArray<UniquePtr<GMPDecodeData>> temp = std::move(mQueuedFrames); 834 for (auto& queued : temp) { 835 Decode_g(std::move(queued)); 836 } 837 } 838 839 // This is an ugly solution to asynchronous decoding errors 840 // from Decode_g() not being returned to the synchronous Decode() method. 841 // If we don't return an error code at this point, our caller ultimately won't 842 // know to request a PLI and the video stream will remain frozen unless an IDR 843 // happens to arrive for other reasons. Bug 1492852 tracks implementing a 844 // proper solution. 845 if (mDecoderStatus != GMPNoErr) { 846 GMP_LOG_ERROR("%s: Decoder status is bad (%u)!", __PRETTY_FUNCTION__, 847 static_cast<unsigned>(mDecoderStatus)); 848 return WEBRTC_VIDEO_CODEC_ERROR; 849 } 850 851 return WEBRTC_VIDEO_CODEC_OK; 852 } 853 854 void WebrtcGmpVideoDecoder::Close_g() { 855 GMPVideoDecoderProxy* gmp(mGMP); 856 mGMP = nullptr; 857 mHost = nullptr; 858 mInitting = false; 859 860 if (mCachedPluginId) { 861 mReleasePluginEvent.Notify(*mCachedPluginId); 862 } 863 mCachedPluginId = Nothing(); 864 865 if (gmp) { 866 // Do this last, since this could cause us to be destroyed 867 gmp->Close(); 868 } 869 } 870 871 int32_t WebrtcGmpVideoDecoder::Decode(const webrtc::EncodedImage& aInputImage, 872 bool aMissingFrames, 873 int64_t aRenderTimeMs) { 874 MOZ_ASSERT(mGMPThread); 875 MOZ_ASSERT(!NS_IsMainThread()); 876 if (!aInputImage.size()) { 877 return WEBRTC_VIDEO_CODEC_ERROR; 878 } 879 880 MediaInfoFlag flag = MediaInfoFlag::None; 881 flag |= (aInputImage._frameType == webrtc::VideoFrameType::kVideoFrameKey 882 ? MediaInfoFlag::KeyFrame 883 : MediaInfoFlag::NonKeyFrame); 884 flag |= MediaInfoFlag::SoftwareDecoding; 885 flag |= MediaInfoFlag::VIDEO_H264; 886 mPerformanceRecorder.Start((aInputImage.RtpTimestamp() * 1000ll) / 90, 887 "WebrtcGmpVideoDecoder"_ns, mTrackingId, flag); 888 889 // This is an ugly solution to asynchronous decoding errors 890 // from Decode_g() not being returned to the synchronous Decode() method. 891 // If we don't return an error code at this point, our caller ultimately won't 892 // know to request a PLI and the video stream will remain frozen unless an IDR 893 // happens to arrive for other reasons. Bug 1492852 tracks implementing a 894 // proper solution. 895 auto decodeData = 896 MakeUnique<GMPDecodeData>(aInputImage, aMissingFrames, aRenderTimeMs); 897 898 MOZ_ALWAYS_SUCCEEDS( 899 mGMPThread->Dispatch(NewRunnableMethod<UniquePtr<GMPDecodeData>&&>( 900 __func__, this, &WebrtcGmpVideoDecoder::Decode_g, 901 std::move(decodeData)))); 902 903 if (mDecoderStatus != GMPNoErr) { 904 GMP_LOG_ERROR("%s: Decoder status is bad (%u)!", __PRETTY_FUNCTION__, 905 static_cast<unsigned>(mDecoderStatus)); 906 return WEBRTC_VIDEO_CODEC_ERROR; 907 } 908 909 return WEBRTC_VIDEO_CODEC_OK; 910 } 911 912 void WebrtcGmpVideoDecoder::Decode_g(UniquePtr<GMPDecodeData>&& aDecodeData) { 913 if (!mGMP) { 914 if (mInitting) { 915 // InitDone hasn't been called yet (race) 916 mQueuedFrames.AppendElement(std::move(aDecodeData)); 917 return; 918 } 919 // destroyed via Terminate(), failed to init, or just not initted yet 920 GMP_LOG_DEBUG("GMP Decode: not initted yet"); 921 922 mDecoderStatus = GMPDecodeErr; 923 return; 924 } 925 926 MOZ_ASSERT(mQueuedFrames.IsEmpty()); 927 MOZ_ASSERT(mHost); 928 929 GMPVideoFrame* ftmp = nullptr; 930 GMPErr err = mHost->CreateFrame(kGMPEncodedVideoFrame, &ftmp); 931 if (err != GMPNoErr) { 932 GMP_LOG_ERROR("%s: CreateFrame failed (%u)!", __PRETTY_FUNCTION__, 933 static_cast<unsigned>(err)); 934 mDecoderStatus = err; 935 return; 936 } 937 938 GMPUniquePtr<GMPVideoEncodedFrame> frame( 939 static_cast<GMPVideoEncodedFrame*>(ftmp)); 940 err = frame->CreateEmptyFrame(aDecodeData->mImage.size()); 941 if (err != GMPNoErr) { 942 GMP_LOG_ERROR("%s: CreateEmptyFrame failed (%u)!", __PRETTY_FUNCTION__, 943 static_cast<unsigned>(err)); 944 mDecoderStatus = err; 945 return; 946 } 947 948 // XXX At this point, we only will get mode1 data (a single length and a 949 // buffer) Session_info.cc/etc code needs to change to support mode 0. 950 *(reinterpret_cast<uint32_t*>(frame->Buffer())) = frame->Size(); 951 952 // XXX It'd be wonderful not to have to memcpy the encoded data! 953 memcpy(frame->Buffer() + 4, aDecodeData->mImage.data() + 4, 954 frame->Size() - 4); 955 956 frame->SetEncodedWidth(aDecodeData->mImage._encodedWidth); 957 frame->SetEncodedHeight(aDecodeData->mImage._encodedHeight); 958 frame->SetTimeStamp((aDecodeData->mImage.RtpTimestamp() * 1000ll) / 959 90); // rounds down 960 frame->SetCompleteFrame( 961 true); // upstream no longer deals with incomplete frames 962 frame->SetBufferType(GMP_BufferLength32); 963 964 GMPVideoFrameType ft; 965 int32_t ret = 966 WebrtcFrameTypeToGmpFrameType(aDecodeData->mImage._frameType, &ft); 967 if (ret != WEBRTC_VIDEO_CODEC_OK) { 968 GMP_LOG_ERROR("%s: WebrtcFrameTypeToGmpFrameType failed (%u)!", 969 __PRETTY_FUNCTION__, static_cast<unsigned>(ret)); 970 mDecoderStatus = GMPDecodeErr; 971 return; 972 } 973 974 GMPCodecSpecificInfo info{}; 975 info.mCodecType = kGMPVideoCodecH264; 976 info.mCodecSpecific.mH264.mSimulcastIdx = 0; 977 nsTArray<uint8_t> codecSpecificInfo; 978 codecSpecificInfo.AppendElements((uint8_t*)&info, 979 sizeof(GMPCodecSpecificInfo)); 980 981 GMP_LOG_DEBUG("GMP Decode: %" PRIu64 ", len %zu%s", frame->TimeStamp(), 982 aDecodeData->mImage.size(), 983 ft == kGMPKeyFrame ? ", KeyFrame" : ""); 984 985 nsresult rv = mGMP->Decode(std::move(frame), aDecodeData->mMissingFrames, 986 codecSpecificInfo, aDecodeData->mRenderTimeMs); 987 if (NS_FAILED(rv)) { 988 GMP_LOG_ERROR("%s: Decode failed (rv=%u)!", __PRETTY_FUNCTION__, 989 static_cast<unsigned>(rv)); 990 mDecoderStatus = GMPDecodeErr; 991 return; 992 } 993 994 mDecoderStatus = GMPNoErr; 995 } 996 997 int32_t WebrtcGmpVideoDecoder::RegisterDecodeCompleteCallback( 998 webrtc::DecodedImageCallback* aCallback) { 999 MutexAutoLock lock(mCallbackMutex); 1000 mCallback = aCallback; 1001 1002 return WEBRTC_VIDEO_CODEC_OK; 1003 } 1004 1005 int32_t WebrtcGmpVideoDecoder::ReleaseGmp() { 1006 GMP_LOG_DEBUG("GMP Released:"); 1007 RegisterDecodeCompleteCallback(nullptr); 1008 1009 if (mGMPThread) { 1010 MOZ_ALWAYS_SUCCEEDS(mGMPThread->Dispatch( 1011 NewRunnableMethod(__func__, this, &WebrtcGmpVideoDecoder::Close_g))); 1012 } 1013 return WEBRTC_VIDEO_CODEC_OK; 1014 } 1015 1016 void WebrtcGmpVideoDecoder::Terminated() { 1017 GMP_LOG_DEBUG("GMP Decoder Terminated: %p", (void*)this); 1018 1019 GMPVideoDecoderProxy* gmp(mGMP); 1020 mGMP = nullptr; 1021 mHost = nullptr; 1022 mInitting = false; 1023 1024 if (gmp) { 1025 // Do this last, since this could cause us to be destroyed 1026 gmp->Close(); 1027 } 1028 1029 // Could now notify that it's dead 1030 } 1031 1032 void WebrtcGmpVideoDecoder::Decoded(GMPVideoi420Frame* aDecodedFrame) { 1033 // we have two choices here: wrap the frame with a callback that frees 1034 // the data later (risking running out of shmems), or copy the data out 1035 // always. Also, we can only Destroy() the frame on the gmp thread, so 1036 // copying is simplest if expensive. 1037 // I420 size including rounding... 1038 CheckedInt32 length = 1039 (CheckedInt32(aDecodedFrame->Stride(kGMPYPlane)) * 1040 aDecodedFrame->Height()) + 1041 (aDecodedFrame->Stride(kGMPVPlane) + aDecodedFrame->Stride(kGMPUPlane)) * 1042 ((aDecodedFrame->Height() + 1) / 2); 1043 int32_t size = length.value(); 1044 MOZ_RELEASE_ASSERT(length.isValid() && size > 0); 1045 1046 // Don't use MakeUniqueFallible here, because UniquePtr isn't copyable, and 1047 // the closure below in WrapI420Buffer uses std::function which _is_ copyable. 1048 // We'll alloc the buffer here, so we preserve the "fallible" nature, and 1049 // then hand a shared_ptr, which is copyable, to WrapI420Buffer. 1050 auto* falliblebuffer = new (std::nothrow) uint8_t[size]; 1051 if (falliblebuffer) { 1052 auto buffer = std::shared_ptr<uint8_t>(falliblebuffer); 1053 1054 // This is 3 separate buffers currently anyways, no use in trying to 1055 // see if we can use a single memcpy. 1056 uint8_t* buffer_y = buffer.get(); 1057 memcpy(buffer_y, aDecodedFrame->Buffer(kGMPYPlane), 1058 aDecodedFrame->Stride(kGMPYPlane) * aDecodedFrame->Height()); 1059 // Should this be aligned, making it non-contiguous? Assume no, this is 1060 // already factored into the strides. 1061 uint8_t* buffer_u = 1062 buffer_y + aDecodedFrame->Stride(kGMPYPlane) * aDecodedFrame->Height(); 1063 memcpy(buffer_u, aDecodedFrame->Buffer(kGMPUPlane), 1064 aDecodedFrame->Stride(kGMPUPlane) * 1065 ((aDecodedFrame->Height() + 1) / 2)); 1066 uint8_t* buffer_v = buffer_u + aDecodedFrame->Stride(kGMPUPlane) * 1067 ((aDecodedFrame->Height() + 1) / 2); 1068 memcpy(buffer_v, aDecodedFrame->Buffer(kGMPVPlane), 1069 aDecodedFrame->Stride(kGMPVPlane) * 1070 ((aDecodedFrame->Height() + 1) / 2)); 1071 1072 MutexAutoLock lock(mCallbackMutex); 1073 if (mCallback) { 1074 // Note: the last parameter to WrapI420Buffer is named no_longer_used, 1075 // but is currently called in the destructor of WrappedYuvBuffer when 1076 // the buffer is "no_longer_used". 1077 webrtc::scoped_refptr<webrtc::I420BufferInterface> video_frame_buffer = 1078 webrtc::WrapI420Buffer( 1079 aDecodedFrame->Width(), aDecodedFrame->Height(), buffer_y, 1080 aDecodedFrame->Stride(kGMPYPlane), buffer_u, 1081 aDecodedFrame->Stride(kGMPUPlane), buffer_v, 1082 aDecodedFrame->Stride(kGMPVPlane), [buffer] {}); 1083 1084 GMP_LOG_DEBUG("GMP Decoded: %" PRIu64, aDecodedFrame->Timestamp()); 1085 auto videoFrame = 1086 webrtc::VideoFrame::Builder() 1087 .set_video_frame_buffer(video_frame_buffer) 1088 .set_timestamp_rtp( 1089 // round up 1090 (aDecodedFrame->UpdatedTimestamp() * 90ll + 999) / 1000) 1091 .build(); 1092 mPerformanceRecorder.Record( 1093 static_cast<int64_t>(aDecodedFrame->Timestamp()), 1094 [&](DecodeStage& aStage) { 1095 aStage.SetImageFormat(DecodeStage::YUV420P); 1096 aStage.SetResolution(aDecodedFrame->Width(), 1097 aDecodedFrame->Height()); 1098 aStage.SetColorDepth(gfx::ColorDepth::COLOR_8); 1099 }); 1100 mCallback->Decoded(videoFrame); 1101 } 1102 } 1103 aDecodedFrame->Destroy(); 1104 } 1105 1106 } // namespace mozilla