commit 2166da6f01a3820fdcf611af4166603e22404ad9
parent 7f080c41c94daf2043ff2acad671050833ac8c8b
Author: Michael Froman <mfroman@mozilla.com>
Date: Wed, 8 Oct 2025 16:53:01 -0500
Bug 1993083 - Vendor libwebrtc from c7bcf5ba9a
Upstream commit: https://webrtc.googlesource.com/src/+/c7bcf5ba9aaa3d8e769a74fdfa86aaa3faeb384e
Add VideoFrameSampler helper class to abstract away buffer formats.
This is a prerequisite for dealing natively with NV12 buffers in
`HaltonFrameSampler` et al.
Bug: webrtc:358039777
Change-Id: I8a79389903b4ac15193665cb8375be4891cfa005
Reviewed-on: https://webrtc-review.googlesource.com/c/src/+/397801
Commit-Queue: Erik Språng <sprang@webrtc.org>
Reviewed-by: Fanny Linderborg <linderborg@webrtc.org>
Cr-Commit-Position: refs/heads/main@{#45024}
Diffstat:
5 files changed, 540 insertions(+), 2 deletions(-)
diff --git a/third_party/libwebrtc/README.mozilla.last-vendor b/third_party/libwebrtc/README.mozilla.last-vendor
@@ -1,4 +1,4 @@
# ./mach python dom/media/webrtc/third_party_build/vendor-libwebrtc.py --from-local /home/mfroman/mozilla/elm/.moz-fast-forward/moz-libwebrtc --commit mozpatches libwebrtc
-libwebrtc updated from /home/mfroman/mozilla/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2025-10-08T21:51:47.922559+00:00.
+libwebrtc updated from /home/mfroman/mozilla/elm/.moz-fast-forward/moz-libwebrtc commit mozpatches on 2025-10-08T21:52:50.800744+00:00.
# base of lastest vendoring
-3589135d09
+c7bcf5ba9a
diff --git a/third_party/libwebrtc/video/corruption_detection/BUILD.gn b/third_party/libwebrtc/video/corruption_detection/BUILD.gn
@@ -101,6 +101,7 @@ rtc_library("halton_frame_sampler") {
]
deps = [
":halton_sequence",
+ ":video_frame_sampler",
"../../api:scoped_refptr",
"../../api/video:video_frame",
"../../rtc_base:checks",
@@ -109,6 +110,18 @@ rtc_library("halton_frame_sampler") {
]
}
+rtc_library("video_frame_sampler") {
+ sources = [
+ "video_frame_sampler.cc",
+ "video_frame_sampler.h",
+ ]
+ deps = [
+ "../../api:scoped_refptr",
+ "../../api/video:video_frame",
+ "../../rtc_base:checks",
+ ]
+}
+
rtc_library("halton_sequence") {
sources = [
"halton_sequence.cc",
@@ -207,6 +220,17 @@ if (rtc_include_tests) {
]
}
+ rtc_library("video_frame_sampler_unittest") {
+ testonly = true
+ sources = [ "video_frame_sampler_unittest.cc" ]
+ deps = [
+ ":video_frame_sampler",
+ "../../api:scoped_refptr",
+ "../../api/video:video_frame",
+ "../../test:test_support",
+ ]
+ }
+
rtc_library("halton_sequence_unittest") {
testonly = true
sources = [ "halton_sequence_unittest.cc" ]
@@ -238,6 +262,7 @@ if (rtc_include_tests) {
":halton_frame_sampler_unittest",
":halton_sequence_unittest",
":utils_unittest",
+ ":video_frame_sampler_unittest",
]
}
}
diff --git a/third_party/libwebrtc/video/corruption_detection/video_frame_sampler.cc b/third_party/libwebrtc/video/corruption_detection/video_frame_sampler.cc
@@ -0,0 +1,143 @@
+/*
+ * Copyright 2025 The WebRTC project authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video/corruption_detection/video_frame_sampler.h"
+
+#include <cstdint>
+#include <memory>
+
+#include "api/scoped_refptr.h"
+#include "api/video/video_frame.h"
+#include "api/video/video_frame_buffer.h"
+#include "rtc_base/checks.h"
+
+namespace webrtc {
+
+class I420FrameSampler : public VideoFrameSampler {
+ public:
+ explicit I420FrameSampler(scoped_refptr<const I420BufferInterface> buffer)
+ : buffer_(buffer) {}
+
+ uint8_t GetSampleValue(ChannelType channel, int col, int row) const override {
+ RTC_DCHECK_GE(col, 0);
+ RTC_DCHECK_GE(row, 0);
+ switch (channel) {
+ case ChannelType::Y:
+ RTC_DCHECK_LT(col, width(ChannelType::Y));
+ RTC_DCHECK_LT(row, height(ChannelType::Y));
+ return buffer_->DataY()[row * buffer_->StrideY() + col];
+ case ChannelType::U:
+ RTC_DCHECK_LT(col, width(ChannelType::U));
+ RTC_DCHECK_LT(row, height(ChannelType::U));
+ return buffer_->DataU()[row * buffer_->StrideU() + col];
+ case ChannelType::V:
+ RTC_DCHECK_LT(col, width(ChannelType::V));
+ RTC_DCHECK_LT(row, height(ChannelType::V));
+ return buffer_->DataV()[row * buffer_->StrideV() + col];
+ }
+ }
+
+ int width(ChannelType channel) const override {
+ switch (channel) {
+ case ChannelType::Y:
+ return buffer_->width();
+ case ChannelType::U:
+ case ChannelType::V:
+ return buffer_->ChromaWidth();
+ }
+ }
+
+ int height(ChannelType channel) const override {
+ switch (channel) {
+ case ChannelType::Y:
+ return buffer_->height();
+ case ChannelType::U:
+ case ChannelType::V:
+ return buffer_->ChromaHeight();
+ }
+ }
+
+ private:
+ const scoped_refptr<const I420BufferInterface> buffer_;
+};
+
+class NV12FrameSampler : public VideoFrameSampler {
+ public:
+ explicit NV12FrameSampler(scoped_refptr<const NV12BufferInterface> buffer)
+ : buffer_(buffer) {}
+
+ uint8_t GetSampleValue(ChannelType channel, int col, int row) const override {
+ RTC_DCHECK_GE(col, 0);
+ RTC_DCHECK_GE(row, 0);
+ switch (channel) {
+ case ChannelType::Y:
+ RTC_DCHECK_LT(col, width(ChannelType::Y));
+ RTC_DCHECK_LT(row, height(ChannelType::Y));
+ return buffer_->DataY()[row * buffer_->StrideY() + col];
+ case ChannelType::U:
+ RTC_DCHECK_LT(col, width(ChannelType::U));
+ RTC_DCHECK_LT(row, height(ChannelType::U));
+ return buffer_->DataUV()[row * buffer_->StrideUV() + (col * 2)];
+ case ChannelType::V:
+ RTC_DCHECK_LT(col, width(ChannelType::V));
+ RTC_DCHECK_LT(row, height(ChannelType::V));
+ return buffer_->DataUV()[row * buffer_->StrideUV() + (col * 2) + 1];
+ }
+ }
+
+ int width(ChannelType channel) const override {
+ switch (channel) {
+ case ChannelType::Y:
+ return buffer_->width();
+ case ChannelType::U:
+ case ChannelType::V:
+ return buffer_->ChromaWidth();
+ }
+ }
+
+ int height(ChannelType channel) const override {
+ switch (channel) {
+ case ChannelType::Y:
+ return buffer_->height();
+ case ChannelType::U:
+ case ChannelType::V:
+ return buffer_->ChromaHeight();
+ }
+ }
+
+ private:
+ const scoped_refptr<const NV12BufferInterface> buffer_;
+};
+
+std::unique_ptr<VideoFrameSampler> VideoFrameSampler::Create(
+ const VideoFrame& frame) {
+ if (frame.video_frame_buffer() == nullptr) {
+ return nullptr;
+ }
+ switch (frame.video_frame_buffer()->type()) {
+ case VideoFrameBuffer::Type::kNV12: {
+ return std::make_unique<NV12FrameSampler>(
+ scoped_refptr<const NV12BufferInterface>(
+ frame.video_frame_buffer()->GetNV12()));
+ }
+ case VideoFrameBuffer::Type::kI420:
+ case VideoFrameBuffer::Type::kI420A:
+ // Native I420 and I420A are used directly (Alpha channel ignored).
+ return std::make_unique<I420FrameSampler>(
+ scoped_refptr<const I420BufferInterface>(
+ frame.video_frame_buffer()->GetI420()));
+ default:
+ // Conversion and copy to I420 from some other format.
+ return std::make_unique<I420FrameSampler>(
+ frame.video_frame_buffer()->ToI420());
+ }
+}
+
+} // namespace webrtc
diff --git a/third_party/libwebrtc/video/corruption_detection/video_frame_sampler.h b/third_party/libwebrtc/video/corruption_detection/video_frame_sampler.h
@@ -0,0 +1,36 @@
+/*
+ * Copyright 2025 The WebRTC project authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#ifndef VIDEO_CORRUPTION_DETECTION_VIDEO_FRAME_SAMPLER_H_
+#define VIDEO_CORRUPTION_DETECTION_VIDEO_FRAME_SAMPLER_H_
+
+#include <cstdint>
+#include <memory>
+
+#include "api/video/video_frame.h"
+
+namespace webrtc {
+
+class VideoFrameSampler {
+ public:
+ static std::unique_ptr<VideoFrameSampler> Create(const VideoFrame& frame);
+ virtual ~VideoFrameSampler() = default;
+
+ enum class ChannelType { Y, U, V };
+ virtual uint8_t GetSampleValue(ChannelType channel,
+ int col,
+ int row) const = 0;
+ virtual int width(ChannelType channel) const = 0;
+ virtual int height(ChannelType channel) const = 0;
+};
+
+} // namespace webrtc
+
+#endif // VIDEO_CORRUPTION_DETECTION_VIDEO_FRAME_SAMPLER_H_
diff --git a/third_party/libwebrtc/video/corruption_detection/video_frame_sampler_unittest.cc b/third_party/libwebrtc/video/corruption_detection/video_frame_sampler_unittest.cc
@@ -0,0 +1,334 @@
+/*
+ * Copyright 2025 The WebRTC project authors. All rights reserved.
+ *
+ * Use of this source code is governed by a BSD-style license
+ * that can be found in the LICENSE file in the root of the source
+ * tree. An additional intellectual property rights grant can be found
+ * in the file PATENTS. All contributing project authors may
+ * be found in the AUTHORS file in the root of the source tree.
+ */
+
+#include "video/corruption_detection/video_frame_sampler.h"
+
+#include <cstdint>
+#include <cstring>
+#include <memory>
+
+#include "api/scoped_refptr.h"
+#include "api/video/i420_buffer.h"
+#include "api/video/i444_buffer.h"
+#include "api/video/nv12_buffer.h"
+#include "api/video/video_frame.h"
+#include "test/gmock.h"
+#include "test/gtest.h"
+
+using ::testing::_;
+
+namespace webrtc {
+
+scoped_refptr<I420Buffer> MakeSimpleI420FrameBuffer() {
+ // Create a simple I420 frame of size 4x4 with each sample having a unique
+ // value. An additional stride is used with a "poisoned" value of 99.
+ scoped_refptr<I420Buffer> buffer = I420Buffer::Create(/*width=*/4,
+ /*height=*/4,
+ /*stride_y=*/5,
+ /*stride_u=*/3,
+ /*stride_v=*/3);
+ const uint8_t kYContent[] = {
+ // clang-format off
+ 1, 2, 3, 4, 99,
+ 5, 6, 7, 8, 99,
+ 9, 10, 11, 12, 99,
+ 13, 14, 15, 16, 99
+ // clang-format on
+ };
+ memcpy(buffer->MutableDataY(), kYContent, sizeof(kYContent));
+
+ const uint8_t kUContent[] = {
+ // clang-format off
+ 17, 18, 99,
+ 19, 20, 99
+ // clang-format on
+ };
+ memcpy(buffer->MutableDataU(), kUContent, sizeof(kUContent));
+
+ const uint8_t kVContent[] = {
+ // clang-format off
+ 21, 22, 99,
+ 23, 24, 99
+ // clang-format on
+ };
+ memcpy(buffer->MutableDataV(), kVContent, sizeof(kVContent));
+
+ return buffer;
+}
+
+scoped_refptr<I444Buffer> MakeSimpleI444FrameBuffer() {
+ // Create an I444 frame, with the same contents as `MakeSimpleI420FrameBuffer`
+ // just upscaled with nearest-neighbour.
+ scoped_refptr<I444Buffer> buffer = I444Buffer::Create(/*width=*/4,
+ /*height=*/4,
+ /*stride_y=*/5,
+ /*stride_u=*/5,
+ /*stride_v=*/5);
+ const uint8_t kYContent[] = {
+ // clang-format off
+ 1, 2, 3, 4, 99,
+ 5, 6, 7, 8, 99,
+ 9, 10, 11, 12, 99,
+ 13, 14, 15, 16, 99
+ // clang-format on
+ };
+ memcpy(buffer->MutableDataY(), kYContent, sizeof(kYContent));
+
+ const uint8_t kUContent[] = {
+ // clang-format off
+ 17, 17, 18, 18, 99,
+ 17, 17, 18, 18, 99,
+ 19, 19, 20, 20, 99,
+ 19, 19, 20, 20, 99
+ // clang-format on
+ };
+ memcpy(buffer->MutableDataU(), kUContent, sizeof(kUContent));
+
+ const uint8_t kVContent[] = {
+ // clang-format off
+ 21, 21, 22, 22, 99,
+ 21, 21, 22, 22, 99,
+ 23, 23, 24, 24, 99,
+ 23, 23, 24, 24, 99
+ // clang-format on
+ };
+ memcpy(buffer->MutableDataV(), kVContent, sizeof(kVContent));
+
+ return buffer;
+}
+
+std::unique_ptr<VideoFrameSampler> GetDefaultSampler() {
+ return VideoFrameSampler::Create(
+ VideoFrame::Builder()
+ .set_video_frame_buffer(MakeSimpleI420FrameBuffer())
+ .build());
+}
+
+TEST(VideoFrameSampler, ParsesI420YChannel) {
+ std::unique_ptr<VideoFrameSampler> sampler = GetDefaultSampler();
+ int val = 1;
+ for (int row = 0; row < 4; ++row) {
+ for (int col = 0; col < 4; ++col) {
+ EXPECT_EQ(
+ sampler->GetSampleValue(VideoFrameSampler::ChannelType::Y, col, row),
+ val++);
+ }
+ }
+}
+
+TEST(VideoFrameSampler, ParsesI420UChannel) {
+ std::unique_ptr<VideoFrameSampler> sampler = GetDefaultSampler();
+ EXPECT_EQ(sampler->GetSampleValue(VideoFrameSampler::ChannelType::U, 0, 0),
+ 17);
+ EXPECT_EQ(sampler->GetSampleValue(VideoFrameSampler::ChannelType::U, 1, 0),
+ 18);
+ EXPECT_EQ(sampler->GetSampleValue(VideoFrameSampler::ChannelType::U, 0, 1),
+ 19);
+ EXPECT_EQ(sampler->GetSampleValue(VideoFrameSampler::ChannelType::U, 1, 1),
+ 20);
+}
+
+TEST(VideoFrameSampler, ParsesI420VChannel) {
+ std::unique_ptr<VideoFrameSampler> sampler = GetDefaultSampler();
+ EXPECT_EQ(sampler->GetSampleValue(VideoFrameSampler::ChannelType::V, 0, 0),
+ 21);
+ EXPECT_EQ(sampler->GetSampleValue(VideoFrameSampler::ChannelType::V, 1, 0),
+ 22);
+ EXPECT_EQ(sampler->GetSampleValue(VideoFrameSampler::ChannelType::V, 0, 1),
+ 23);
+ EXPECT_EQ(sampler->GetSampleValue(VideoFrameSampler::ChannelType::V, 1, 1),
+ 24);
+}
+
+TEST(VideoFrameSampler, ReportsI420Resolution) {
+ std::unique_ptr<VideoFrameSampler> sampler = GetDefaultSampler();
+ EXPECT_EQ(sampler->width(VideoFrameSampler::ChannelType::Y), 4);
+ EXPECT_EQ(sampler->height(VideoFrameSampler::ChannelType::Y), 4);
+ EXPECT_EQ(sampler->width(VideoFrameSampler::ChannelType::U), 2);
+ EXPECT_EQ(sampler->height(VideoFrameSampler::ChannelType::U), 2);
+ EXPECT_EQ(sampler->width(VideoFrameSampler::ChannelType::V), 2);
+ EXPECT_EQ(sampler->height(VideoFrameSampler::ChannelType::V), 2);
+}
+
+TEST(VideoFrameSampler, ParsesNV12YChannel) {
+ std::unique_ptr<VideoFrameSampler> sampler =
+ VideoFrameSampler::Create(VideoFrame::Builder()
+ .set_video_frame_buffer(NV12Buffer::Copy(
+ *MakeSimpleI420FrameBuffer()))
+ .build());
+ int val = 1;
+ for (int row = 0; row < 4; ++row) {
+ for (int col = 0; col < 4; ++col) {
+ EXPECT_EQ(
+ sampler->GetSampleValue(VideoFrameSampler::ChannelType::Y, col, row),
+ val++);
+ }
+ }
+}
+
+TEST(VideoFrameSampler, ParsesNV12UChannel) {
+ std::unique_ptr<VideoFrameSampler> sampler =
+ VideoFrameSampler::Create(VideoFrame::Builder()
+ .set_video_frame_buffer(NV12Buffer::Copy(
+ *MakeSimpleI420FrameBuffer()))
+ .build());
+ EXPECT_EQ(sampler->GetSampleValue(VideoFrameSampler::ChannelType::U, 0, 0),
+ 17);
+ EXPECT_EQ(sampler->GetSampleValue(VideoFrameSampler::ChannelType::U, 1, 0),
+ 18);
+ EXPECT_EQ(sampler->GetSampleValue(VideoFrameSampler::ChannelType::U, 0, 1),
+ 19);
+ EXPECT_EQ(sampler->GetSampleValue(VideoFrameSampler::ChannelType::U, 1, 1),
+ 20);
+}
+
+TEST(VideoFrameSampler, ParsesNV12VChannel) {
+ std::unique_ptr<VideoFrameSampler> sampler =
+ VideoFrameSampler::Create(VideoFrame::Builder()
+ .set_video_frame_buffer(NV12Buffer::Copy(
+ *MakeSimpleI420FrameBuffer()))
+ .build());
+ EXPECT_EQ(sampler->GetSampleValue(VideoFrameSampler::ChannelType::V, 0, 0),
+ 21);
+ EXPECT_EQ(sampler->GetSampleValue(VideoFrameSampler::ChannelType::V, 1, 0),
+ 22);
+ EXPECT_EQ(sampler->GetSampleValue(VideoFrameSampler::ChannelType::V, 0, 1),
+ 23);
+ EXPECT_EQ(sampler->GetSampleValue(VideoFrameSampler::ChannelType::V, 1, 1),
+ 24);
+}
+
+TEST(VideoFrameSampler, ReportsNV12Resolution) {
+ std::unique_ptr<VideoFrameSampler> sampler =
+ VideoFrameSampler::Create(VideoFrame::Builder()
+ .set_video_frame_buffer(NV12Buffer::Copy(
+ *MakeSimpleI420FrameBuffer()))
+ .build());
+ EXPECT_EQ(sampler->width(VideoFrameSampler::ChannelType::Y), 4);
+ EXPECT_EQ(sampler->height(VideoFrameSampler::ChannelType::Y), 4);
+ EXPECT_EQ(sampler->width(VideoFrameSampler::ChannelType::U), 2);
+ EXPECT_EQ(sampler->height(VideoFrameSampler::ChannelType::U), 2);
+ EXPECT_EQ(sampler->width(VideoFrameSampler::ChannelType::V), 2);
+ EXPECT_EQ(sampler->height(VideoFrameSampler::ChannelType::V), 2);
+}
+
+TEST(VideoFrameSampler, ParsesI444YChannel) {
+ // I444 will be converted to I420, but the Y channel should remain unchanged.
+ std::unique_ptr<VideoFrameSampler> sampler = VideoFrameSampler::Create(
+ VideoFrame::Builder()
+ .set_video_frame_buffer(MakeSimpleI444FrameBuffer())
+ .build());
+ int val = 1;
+ for (int row = 0; row < 4; ++row) {
+ for (int col = 0; col < 4; ++col) {
+ EXPECT_EQ(
+ sampler->GetSampleValue(VideoFrameSampler::ChannelType::Y, col, row),
+ val++);
+ }
+ }
+}
+
+TEST(VideoFrameSampler, ParsesI444UChannel) {
+ // I444 will be converted to I420, with U/V dowscaled by 2x.
+ std::unique_ptr<VideoFrameSampler> sampler = VideoFrameSampler::Create(
+ VideoFrame::Builder()
+ .set_video_frame_buffer(MakeSimpleI444FrameBuffer())
+ .build());
+ EXPECT_EQ(sampler->GetSampleValue(VideoFrameSampler::ChannelType::U, 0, 0),
+ 17);
+ EXPECT_EQ(sampler->GetSampleValue(VideoFrameSampler::ChannelType::U, 1, 0),
+ 18);
+ EXPECT_EQ(sampler->GetSampleValue(VideoFrameSampler::ChannelType::U, 0, 1),
+ 19);
+ EXPECT_EQ(sampler->GetSampleValue(VideoFrameSampler::ChannelType::U, 1, 1),
+ 20);
+}
+
+TEST(VideoFrameSampler, ParsesI444VChannel) {
+ // I444 will be converted to I420, with U/V dowscaled by 2x.
+ std::unique_ptr<VideoFrameSampler> sampler = VideoFrameSampler::Create(
+ VideoFrame::Builder()
+ .set_video_frame_buffer(MakeSimpleI444FrameBuffer())
+ .build());
+ EXPECT_EQ(sampler->GetSampleValue(VideoFrameSampler::ChannelType::V, 0, 0),
+ 21);
+ EXPECT_EQ(sampler->GetSampleValue(VideoFrameSampler::ChannelType::V, 1, 0),
+ 22);
+ EXPECT_EQ(sampler->GetSampleValue(VideoFrameSampler::ChannelType::V, 0, 1),
+ 23);
+ EXPECT_EQ(sampler->GetSampleValue(VideoFrameSampler::ChannelType::V, 1, 1),
+ 24);
+}
+
+TEST(VideoFrameSampler, ReportsI444Resolution) {
+ // I444 will be converted to I420, with U/V dowscaled by 2x.
+ std::unique_ptr<VideoFrameSampler> sampler = VideoFrameSampler::Create(
+ VideoFrame::Builder()
+ .set_video_frame_buffer(MakeSimpleI444FrameBuffer())
+ .build());
+ EXPECT_EQ(sampler->width(VideoFrameSampler::ChannelType::Y), 4);
+ EXPECT_EQ(sampler->height(VideoFrameSampler::ChannelType::Y), 4);
+ EXPECT_EQ(sampler->width(VideoFrameSampler::ChannelType::U), 2);
+ EXPECT_EQ(sampler->height(VideoFrameSampler::ChannelType::U), 2);
+ EXPECT_EQ(sampler->width(VideoFrameSampler::ChannelType::V), 2);
+ EXPECT_EQ(sampler->height(VideoFrameSampler::ChannelType::V), 2);
+}
+
+#if GTEST_HAS_DEATH_TEST
+TEST(VideoFrameSampler, RejectsNegativeColumn) {
+ EXPECT_DEATH(GetDefaultSampler()->GetSampleValue(
+ VideoFrameSampler::ChannelType::Y, -1, 0),
+ _);
+}
+
+TEST(VideoFrameSampler, RejectsNegativeRow) {
+ EXPECT_DEATH(GetDefaultSampler()->GetSampleValue(
+ VideoFrameSampler::ChannelType::Y, 0, -1),
+ _);
+}
+
+TEST(VideoFrameSampler, RejectsTooLargeYColumn) {
+ EXPECT_DEATH(GetDefaultSampler()->GetSampleValue(
+ VideoFrameSampler::ChannelType::Y, 4, 0),
+ _);
+}
+
+TEST(VideoFrameSampler, RejectsTooLargeYRow) {
+ EXPECT_DEATH(GetDefaultSampler()->GetSampleValue(
+ VideoFrameSampler::ChannelType::Y, 5, 0),
+ _);
+}
+
+TEST(VideoFrameSampler, RejectsTooLargeUColumn) {
+ EXPECT_DEATH(GetDefaultSampler()->GetSampleValue(
+ VideoFrameSampler::ChannelType::U, 3, 0),
+ _);
+}
+
+TEST(VideoFrameSampler, RejectsTooLargeURow) {
+ EXPECT_DEATH(GetDefaultSampler()->GetSampleValue(
+ VideoFrameSampler::ChannelType::U, 2, 0),
+ _);
+}
+
+TEST(VideoFrameSampler, RejectsTooLargeVColumn) {
+ EXPECT_DEATH(GetDefaultSampler()->GetSampleValue(
+ VideoFrameSampler::ChannelType::V, 2, 0),
+ _);
+}
+
+TEST(VideoFrameSampler, RejectsTooLargeVRow) {
+ EXPECT_DEATH(GetDefaultSampler()->GetSampleValue(
+ VideoFrameSampler::ChannelType::V, 2, 0),
+ _);
+}
+#endif // GTEST_HAS_DEATH_TEST
+
+} // namespace webrtc