1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include <stdio.h>
12
13 #include <memory>
14
15 #include "api/test/create_frame_generator.h"
16 #include "api/test/frame_generator_interface.h"
17 #include "api/test/mock_video_decoder.h"
18 #include "api/test/mock_video_encoder.h"
19 #include "api/video_codecs/video_encoder.h"
20 #include "api/video_codecs/vp8_temporal_layers.h"
21 #include "common_video/libyuv/include/webrtc_libyuv.h"
22 #include "common_video/test/utilities.h"
23 #include "modules/video_coding/codecs/interface/mock_libvpx_interface.h"
24 #include "modules/video_coding/codecs/test/video_codec_unittest.h"
25 #include "modules/video_coding/codecs/vp8/include/vp8.h"
26 #include "modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h"
27 #include "modules/video_coding/utility/vp8_header_parser.h"
28 #include "rtc_base/time_utils.h"
29 #include "test/field_trial.h"
30 #include "test/mappable_native_buffer.h"
31 #include "test/video_codec_settings.h"
32
33 namespace webrtc {
34
35 using ::testing::_;
36 using ::testing::AllOf;
37 using ::testing::ElementsAre;
38 using ::testing::ElementsAreArray;
39 using ::testing::Field;
40 using ::testing::Invoke;
41 using ::testing::NiceMock;
42 using ::testing::Return;
43 using EncoderInfo = webrtc::VideoEncoder::EncoderInfo;
44 using FramerateFractions =
45 absl::InlinedVector<uint8_t, webrtc::kMaxTemporalStreams>;
46
47 namespace {
48 constexpr uint32_t kLegacyScreenshareTl0BitrateKbps = 200;
49 constexpr uint32_t kLegacyScreenshareTl1BitrateKbps = 1000;
50 constexpr uint32_t kInitialTimestampRtp = 123;
51 constexpr int64_t kTestNtpTimeMs = 456;
52 constexpr int64_t kInitialTimestampMs = 789;
53 constexpr int kNumCores = 1;
54 constexpr size_t kMaxPayloadSize = 1440;
55 constexpr int kWidth = 172;
56 constexpr int kHeight = 144;
57 constexpr float kFramerateFps = 30;
58
59 const VideoEncoder::Capabilities kCapabilities(false);
60 const VideoEncoder::Settings kSettings(kCapabilities,
61 kNumCores,
62 kMaxPayloadSize);
63 } // namespace
64
65 class TestVp8Impl : public VideoCodecUnitTest {
66 protected:
CreateEncoder()67 std::unique_ptr<VideoEncoder> CreateEncoder() override {
68 return VP8Encoder::Create();
69 }
70
CreateDecoder()71 std::unique_ptr<VideoDecoder> CreateDecoder() override {
72 return VP8Decoder::Create();
73 }
74
ModifyCodecSettings(VideoCodec * codec_settings)75 void ModifyCodecSettings(VideoCodec* codec_settings) override {
76 webrtc::test::CodecSettings(kVideoCodecVP8, codec_settings);
77 codec_settings->width = kWidth;
78 codec_settings->height = kHeight;
79 codec_settings->SetVideoEncoderComplexity(
80 VideoCodecComplexity::kComplexityNormal);
81 }
82
EncodeAndWaitForFrame(const VideoFrame & input_frame,EncodedImage * encoded_frame,CodecSpecificInfo * codec_specific_info,bool keyframe=false)83 void EncodeAndWaitForFrame(const VideoFrame& input_frame,
84 EncodedImage* encoded_frame,
85 CodecSpecificInfo* codec_specific_info,
86 bool keyframe = false) {
87 std::vector<VideoFrameType> frame_types;
88 if (keyframe) {
89 frame_types.emplace_back(VideoFrameType::kVideoFrameKey);
90 } else {
91 frame_types.emplace_back(VideoFrameType::kVideoFrameDelta);
92 }
93 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
94 encoder_->Encode(input_frame, &frame_types));
95 ASSERT_TRUE(WaitForEncodedFrame(encoded_frame, codec_specific_info));
96 VerifyQpParser(*encoded_frame);
97 EXPECT_EQ(kVideoCodecVP8, codec_specific_info->codecType);
98 EXPECT_EQ(0, encoded_frame->SpatialIndex());
99 }
100
EncodeAndExpectFrameWith(const VideoFrame & input_frame,uint8_t temporal_idx,bool keyframe=false)101 void EncodeAndExpectFrameWith(const VideoFrame& input_frame,
102 uint8_t temporal_idx,
103 bool keyframe = false) {
104 EncodedImage encoded_frame;
105 CodecSpecificInfo codec_specific_info;
106 EncodeAndWaitForFrame(input_frame, &encoded_frame, &codec_specific_info,
107 keyframe);
108 EXPECT_EQ(temporal_idx, codec_specific_info.codecSpecific.VP8.temporalIdx);
109 }
110
VerifyQpParser(const EncodedImage & encoded_frame) const111 void VerifyQpParser(const EncodedImage& encoded_frame) const {
112 int qp;
113 EXPECT_GT(encoded_frame.size(), 0u);
114 ASSERT_TRUE(vp8::GetQp(encoded_frame.data(), encoded_frame.size(), &qp));
115 EXPECT_EQ(encoded_frame.qp_, qp) << "Encoder QP != parsed bitstream QP.";
116 }
117 };
118
TEST_F(TestVp8Impl,ErrorResilienceDisabledForNoTemporalLayers)119 TEST_F(TestVp8Impl, ErrorResilienceDisabledForNoTemporalLayers) {
120 codec_settings_.simulcastStream[0].numberOfTemporalLayers = 1;
121
122 auto* const vpx = new NiceMock<MockLibvpxInterface>();
123 LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
124 VP8Encoder::Settings());
125 EXPECT_CALL(*vpx,
126 codec_enc_init(
127 _, _, Field(&vpx_codec_enc_cfg_t::g_error_resilient, 0), _));
128 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
129 encoder.InitEncode(&codec_settings_, kSettings));
130 }
131
TEST_F(TestVp8Impl,DefaultErrorResilienceEnabledForTemporalLayers)132 TEST_F(TestVp8Impl, DefaultErrorResilienceEnabledForTemporalLayers) {
133 codec_settings_.simulcastStream[0].numberOfTemporalLayers = 2;
134 codec_settings_.VP8()->numberOfTemporalLayers = 2;
135
136 auto* const vpx = new NiceMock<MockLibvpxInterface>();
137 LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
138 VP8Encoder::Settings());
139 EXPECT_CALL(*vpx,
140 codec_enc_init(_, _,
141 Field(&vpx_codec_enc_cfg_t::g_error_resilient,
142 VPX_ERROR_RESILIENT_DEFAULT),
143 _));
144 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
145 encoder.InitEncode(&codec_settings_, kSettings));
146 }
147
TEST_F(TestVp8Impl,PartitionErrorResilienceEnabledForTemporalLayersWithFieldTrial)148 TEST_F(TestVp8Impl,
149 PartitionErrorResilienceEnabledForTemporalLayersWithFieldTrial) {
150 test::ScopedFieldTrials field_trials(
151 "WebRTC-VP8-ForcePartitionResilience/Enabled/");
152 codec_settings_.simulcastStream[0].numberOfTemporalLayers = 2;
153 codec_settings_.VP8()->numberOfTemporalLayers = 2;
154
155 auto* const vpx = new NiceMock<MockLibvpxInterface>();
156 LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
157 VP8Encoder::Settings());
158 EXPECT_CALL(*vpx,
159 codec_enc_init(_, _,
160 Field(&vpx_codec_enc_cfg_t::g_error_resilient,
161 VPX_ERROR_RESILIENT_PARTITIONS),
162 _));
163 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
164 encoder.InitEncode(&codec_settings_, kSettings));
165 }
166
TEST_F(TestVp8Impl,SetRates)167 TEST_F(TestVp8Impl, SetRates) {
168 codec_settings_.SetFrameDropEnabled(true);
169 auto* const vpx = new NiceMock<MockLibvpxInterface>();
170 LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
171 VP8Encoder::Settings());
172 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
173 encoder.InitEncode(&codec_settings_,
174 VideoEncoder::Settings(kCapabilities, 1, 1000)));
175
176 const uint32_t kBitrateBps = 300000;
177 VideoBitrateAllocation bitrate_allocation;
178 bitrate_allocation.SetBitrate(0, 0, kBitrateBps);
179 EXPECT_CALL(
180 *vpx,
181 codec_enc_config_set(
182 _, AllOf(Field(&vpx_codec_enc_cfg_t::rc_target_bitrate,
183 kBitrateBps / 1000),
184 Field(&vpx_codec_enc_cfg_t::rc_undershoot_pct, 100u),
185 Field(&vpx_codec_enc_cfg_t::rc_overshoot_pct, 15u),
186 Field(&vpx_codec_enc_cfg_t::rc_buf_sz, 1000u),
187 Field(&vpx_codec_enc_cfg_t::rc_buf_optimal_sz, 600u),
188 Field(&vpx_codec_enc_cfg_t::rc_dropframe_thresh, 30u))))
189 .WillOnce(Return(VPX_CODEC_OK));
190 encoder.SetRates(VideoEncoder::RateControlParameters(
191 bitrate_allocation, static_cast<double>(codec_settings_.maxFramerate)));
192 }
193
TEST_F(TestVp8Impl,EncodeFrameAndRelease)194 TEST_F(TestVp8Impl, EncodeFrameAndRelease) {
195 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
196 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
197 encoder_->InitEncode(&codec_settings_, kSettings));
198
199 EncodedImage encoded_frame;
200 CodecSpecificInfo codec_specific_info;
201 EncodeAndWaitForFrame(NextInputFrame(), &encoded_frame, &codec_specific_info);
202
203 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
204 EXPECT_EQ(WEBRTC_VIDEO_CODEC_UNINITIALIZED,
205 encoder_->Encode(NextInputFrame(), nullptr));
206 }
207
TEST_F(TestVp8Impl,EncodeNv12FrameSimulcast)208 TEST_F(TestVp8Impl, EncodeNv12FrameSimulcast) {
209 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
210 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
211 encoder_->InitEncode(&codec_settings_, kSettings));
212
213 EncodedImage encoded_frame;
214 CodecSpecificInfo codec_specific_info;
215 input_frame_generator_ = test::CreateSquareFrameGenerator(
216 kWidth, kHeight, test::FrameGeneratorInterface::OutputType::kNV12,
217 absl::nullopt);
218 EncodeAndWaitForFrame(NextInputFrame(), &encoded_frame, &codec_specific_info);
219
220 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
221 EXPECT_EQ(WEBRTC_VIDEO_CODEC_UNINITIALIZED,
222 encoder_->Encode(NextInputFrame(), nullptr));
223 }
224
TEST_F(TestVp8Impl,EncodeI420FrameAfterNv12Frame)225 TEST_F(TestVp8Impl, EncodeI420FrameAfterNv12Frame) {
226 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
227 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
228 encoder_->InitEncode(&codec_settings_, kSettings));
229
230 EncodedImage encoded_frame;
231 CodecSpecificInfo codec_specific_info;
232 input_frame_generator_ = test::CreateSquareFrameGenerator(
233 kWidth, kHeight, test::FrameGeneratorInterface::OutputType::kNV12,
234 absl::nullopt);
235 EncodeAndWaitForFrame(NextInputFrame(), &encoded_frame, &codec_specific_info);
236 input_frame_generator_ = test::CreateSquareFrameGenerator(
237 kWidth, kHeight, test::FrameGeneratorInterface::OutputType::kI420,
238 absl::nullopt);
239 EncodeAndWaitForFrame(NextInputFrame(), &encoded_frame, &codec_specific_info);
240
241 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
242 EXPECT_EQ(WEBRTC_VIDEO_CODEC_UNINITIALIZED,
243 encoder_->Encode(NextInputFrame(), nullptr));
244 }
245
TEST_F(TestVp8Impl,Configure)246 TEST_F(TestVp8Impl, Configure) {
247 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Release());
248 EXPECT_TRUE(decoder_->Configure({}));
249 }
250
TEST_F(TestVp8Impl,OnEncodedImageReportsInfo)251 TEST_F(TestVp8Impl, OnEncodedImageReportsInfo) {
252 VideoFrame input_frame = NextInputFrame();
253 input_frame.set_timestamp(kInitialTimestampRtp);
254 input_frame.set_timestamp_us(kInitialTimestampMs *
255 rtc::kNumMicrosecsPerMillisec);
256 EncodedImage encoded_frame;
257 CodecSpecificInfo codec_specific_info;
258 EncodeAndWaitForFrame(input_frame, &encoded_frame, &codec_specific_info);
259
260 EXPECT_EQ(kInitialTimestampRtp, encoded_frame.Timestamp());
261 EXPECT_EQ(kWidth, static_cast<int>(encoded_frame._encodedWidth));
262 EXPECT_EQ(kHeight, static_cast<int>(encoded_frame._encodedHeight));
263 }
264
TEST_F(TestVp8Impl,EncoderFillsResolutionInCodecAgnosticSectionOfCodecSpecificInfo)265 TEST_F(TestVp8Impl,
266 EncoderFillsResolutionInCodecAgnosticSectionOfCodecSpecificInfo) {
267 EncodedImage encoded_frame;
268 CodecSpecificInfo codec_specific_info;
269 EncodeAndWaitForFrame(NextInputFrame(), &encoded_frame, &codec_specific_info);
270
271 ASSERT_TRUE(codec_specific_info.template_structure);
272 EXPECT_THAT(codec_specific_info.template_structure->resolutions,
273 ElementsAre(RenderResolution(kWidth, kHeight)));
274 }
275
TEST_F(TestVp8Impl,DecodedQpEqualsEncodedQp)276 TEST_F(TestVp8Impl, DecodedQpEqualsEncodedQp) {
277 VideoFrame input_frame = NextInputFrame();
278 EncodedImage encoded_frame;
279 CodecSpecificInfo codec_specific_info;
280 EncodeAndWaitForFrame(input_frame, &encoded_frame, &codec_specific_info);
281
282 // First frame should be a key frame.
283 encoded_frame._frameType = VideoFrameType::kVideoFrameKey;
284 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encoded_frame, false, -1));
285 std::unique_ptr<VideoFrame> decoded_frame;
286 absl::optional<uint8_t> decoded_qp;
287 ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
288 ASSERT_TRUE(decoded_frame);
289 ASSERT_TRUE(decoded_qp);
290 EXPECT_GT(I420PSNR(&input_frame, decoded_frame.get()), 36);
291 EXPECT_EQ(encoded_frame.qp_, *decoded_qp);
292 }
293
TEST_F(TestVp8Impl,ChecksSimulcastSettings)294 TEST_F(TestVp8Impl, ChecksSimulcastSettings) {
295 codec_settings_.numberOfSimulcastStreams = 2;
296 // Resolutions are not in ascending order, temporal layers do not match.
297 codec_settings_.simulcastStream[0] = {.width = kWidth,
298 .height = kHeight,
299 .maxFramerate = kFramerateFps,
300 .numberOfTemporalLayers = 2,
301 .maxBitrate = 4000,
302 .targetBitrate = 3000,
303 .minBitrate = 2000,
304 .qpMax = 80};
305 codec_settings_.simulcastStream[1] = {.width = kWidth / 2,
306 .height = kHeight / 2,
307 .maxFramerate = 30,
308 .numberOfTemporalLayers = 3,
309 .maxBitrate = 4000,
310 .targetBitrate = 3000,
311 .minBitrate = 2000,
312 .qpMax = 80};
313 EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED,
314 encoder_->InitEncode(&codec_settings_, kSettings));
315 codec_settings_.numberOfSimulcastStreams = 3;
316 // Resolutions are not in ascending order.
317 codec_settings_.simulcastStream[0] = {.width = kWidth / 2,
318 .height = kHeight / 2,
319 .maxFramerate = kFramerateFps,
320 .numberOfTemporalLayers = 1,
321 .maxBitrate = 4000,
322 .targetBitrate = 3000,
323 .minBitrate = 2000,
324 .qpMax = 80};
325 codec_settings_.simulcastStream[1] = {.width = kWidth / 2 - 1,
326 .height = kHeight / 2 - 1,
327 .maxFramerate = kFramerateFps,
328 .numberOfTemporalLayers = 1,
329 .maxBitrate = 4000,
330 .targetBitrate = 3000,
331 .minBitrate = 2000,
332 .qpMax = 80};
333 codec_settings_.simulcastStream[2] = {.width = kWidth,
334 .height = kHeight,
335 .maxFramerate = 30,
336 .numberOfTemporalLayers = 1,
337 .maxBitrate = 4000,
338 .targetBitrate = 3000,
339 .minBitrate = 2000,
340 .qpMax = 80};
341 EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED,
342 encoder_->InitEncode(&codec_settings_, kSettings));
343 // Resolutions are not in ascending order.
344 codec_settings_.simulcastStream[0] = {.width = kWidth,
345 .height = kHeight,
346 .maxFramerate = kFramerateFps,
347 .numberOfTemporalLayers = 1,
348 .maxBitrate = 4000,
349 .targetBitrate = 3000,
350 .minBitrate = 2000,
351 .qpMax = 80};
352 codec_settings_.simulcastStream[1] = {.width = kWidth,
353 .height = kHeight,
354 .maxFramerate = kFramerateFps,
355 .numberOfTemporalLayers = 1,
356 .maxBitrate = 4000,
357 .targetBitrate = 3000,
358 .minBitrate = 2000,
359 .qpMax = 80};
360 codec_settings_.simulcastStream[2] = {.width = kWidth - 1,
361 .height = kHeight - 1,
362 .maxFramerate = kFramerateFps,
363 .numberOfTemporalLayers = 1,
364 .maxBitrate = 4000,
365 .targetBitrate = 3000,
366 .minBitrate = 2000,
367 .qpMax = 80};
368 EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED,
369 encoder_->InitEncode(&codec_settings_, kSettings));
370 // Temporal layers do not match.
371 codec_settings_.simulcastStream[0] = {.width = kWidth / 4,
372 .height = kHeight / 4,
373 .maxFramerate = kFramerateFps,
374 .numberOfTemporalLayers = 1,
375 .maxBitrate = 4000,
376 .targetBitrate = 3000,
377 .minBitrate = 2000,
378 .qpMax = 80};
379 codec_settings_.simulcastStream[1] = {.width = kWidth / 2,
380 .height = kHeight / 2,
381 .maxFramerate = kFramerateFps,
382 .numberOfTemporalLayers = 2,
383 .maxBitrate = 4000,
384 .targetBitrate = 3000,
385 .minBitrate = 2000,
386 .qpMax = 80};
387 codec_settings_.simulcastStream[2] = {.width = kWidth,
388 .height = kHeight,
389 .maxFramerate = kFramerateFps,
390 .numberOfTemporalLayers = 3,
391 .maxBitrate = 4000,
392 .targetBitrate = 3000,
393 .minBitrate = 2000,
394 .qpMax = 80};
395 EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED,
396 encoder_->InitEncode(&codec_settings_, kSettings));
397 // Resolutions do not match codec config.
398 codec_settings_.simulcastStream[0] = {.width = kWidth / 4 + 1,
399 .height = kHeight / 4 + 1,
400 .maxFramerate = kFramerateFps,
401 .numberOfTemporalLayers = 1,
402 .maxBitrate = 4000,
403 .targetBitrate = 3000,
404 .minBitrate = 2000,
405 .qpMax = 80};
406 codec_settings_.simulcastStream[1] = {.width = kWidth / 2 + 2,
407 .height = kHeight / 2 + 2,
408 .maxFramerate = kFramerateFps,
409 .numberOfTemporalLayers = 1,
410 .maxBitrate = 4000,
411 .targetBitrate = 3000,
412 .minBitrate = 2000,
413 .qpMax = 80};
414 codec_settings_.simulcastStream[2] = {.width = kWidth + 4,
415 .height = kHeight + 4,
416 .maxFramerate = kFramerateFps,
417 .numberOfTemporalLayers = 1,
418 .maxBitrate = 4000,
419 .targetBitrate = 3000,
420 .minBitrate = 2000,
421 .qpMax = 80};
422 EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED,
423 encoder_->InitEncode(&codec_settings_, kSettings));
424 // Everything fine: scaling by 2, top resolution matches video, temporal
425 // settings are the same for all layers.
426 codec_settings_.simulcastStream[0] = {.width = kWidth / 4,
427 .height = kHeight / 4,
428 .maxFramerate = kFramerateFps,
429 .numberOfTemporalLayers = 1,
430 .maxBitrate = 4000,
431 .targetBitrate = 3000,
432 .minBitrate = 2000,
433 .qpMax = 80};
434 codec_settings_.simulcastStream[1] = {.width = kWidth / 2,
435 .height = kHeight / 2,
436 .maxFramerate = kFramerateFps,
437 .numberOfTemporalLayers = 1,
438 .maxBitrate = 4000,
439 .targetBitrate = 3000,
440 .minBitrate = 2000,
441 .qpMax = 80};
442 codec_settings_.simulcastStream[2] = {.width = kWidth,
443 .height = kHeight,
444 .maxFramerate = kFramerateFps,
445 .numberOfTemporalLayers = 1,
446 .maxBitrate = 4000,
447 .targetBitrate = 3000,
448 .minBitrate = 2000,
449 .qpMax = 80};
450 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
451 encoder_->InitEncode(&codec_settings_, kSettings));
452 // Everything fine: custom scaling, top resolution matches video, temporal
453 // settings are the same for all layers.
454 codec_settings_.simulcastStream[0] = {.width = kWidth / 4,
455 .height = kHeight / 4,
456 .maxFramerate = kFramerateFps,
457 .numberOfTemporalLayers = 1,
458 .maxBitrate = 4000,
459 .targetBitrate = 3000,
460 .minBitrate = 2000,
461 .qpMax = 80};
462 codec_settings_.simulcastStream[1] = {.width = kWidth,
463 .height = kHeight,
464 .maxFramerate = kFramerateFps,
465 .numberOfTemporalLayers = 1,
466 .maxBitrate = 4000,
467 .targetBitrate = 3000,
468 .minBitrate = 2000,
469 .qpMax = 80};
470 codec_settings_.simulcastStream[2] = {.width = kWidth,
471 .height = kHeight,
472 .maxFramerate = kFramerateFps,
473 .numberOfTemporalLayers = 1,
474 .maxBitrate = 4000,
475 .targetBitrate = 3000,
476 .minBitrate = 2000,
477 .qpMax = 80};
478 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
479 encoder_->InitEncode(&codec_settings_, kSettings));
480 }
481
482 #if defined(WEBRTC_ANDROID)
483 #define MAYBE_AlignedStrideEncodeDecode DISABLED_AlignedStrideEncodeDecode
484 #else
485 #define MAYBE_AlignedStrideEncodeDecode AlignedStrideEncodeDecode
486 #endif
TEST_F(TestVp8Impl,MAYBE_AlignedStrideEncodeDecode)487 TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) {
488 VideoFrame input_frame = NextInputFrame();
489 input_frame.set_timestamp(kInitialTimestampRtp);
490 input_frame.set_timestamp_us(kInitialTimestampMs *
491 rtc::kNumMicrosecsPerMillisec);
492 EncodedImage encoded_frame;
493 CodecSpecificInfo codec_specific_info;
494 EncodeAndWaitForFrame(input_frame, &encoded_frame, &codec_specific_info);
495
496 // First frame should be a key frame.
497 encoded_frame._frameType = VideoFrameType::kVideoFrameKey;
498 encoded_frame.ntp_time_ms_ = kTestNtpTimeMs;
499 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encoded_frame, false, -1));
500
501 std::unique_ptr<VideoFrame> decoded_frame;
502 absl::optional<uint8_t> decoded_qp;
503 ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
504 ASSERT_TRUE(decoded_frame);
505 // Compute PSNR on all planes (faster than SSIM).
506 EXPECT_GT(I420PSNR(&input_frame, decoded_frame.get()), 36);
507 EXPECT_EQ(kInitialTimestampRtp, decoded_frame->timestamp());
508 }
509
TEST_F(TestVp8Impl,EncoderWith2TemporalLayers)510 TEST_F(TestVp8Impl, EncoderWith2TemporalLayers) {
511 codec_settings_.VP8()->numberOfTemporalLayers = 2;
512 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
513 encoder_->InitEncode(&codec_settings_, kSettings));
514
515 // Temporal layer 0.
516 EncodedImage encoded_frame;
517 CodecSpecificInfo codec_specific_info;
518 EncodeAndWaitForFrame(NextInputFrame(), &encoded_frame, &codec_specific_info);
519
520 EXPECT_EQ(0, codec_specific_info.codecSpecific.VP8.temporalIdx);
521 // Temporal layer 1.
522 EncodeAndExpectFrameWith(NextInputFrame(), 1);
523 // Temporal layer 0.
524 EncodeAndExpectFrameWith(NextInputFrame(), 0);
525 // Temporal layer 1.
526 EncodeAndExpectFrameWith(NextInputFrame(), 1);
527 }
528
TEST_F(TestVp8Impl,ScalingDisabledIfAutomaticResizeOff)529 TEST_F(TestVp8Impl, ScalingDisabledIfAutomaticResizeOff) {
530 codec_settings_.VP8()->automaticResizeOn = false;
531 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
532 encoder_->InitEncode(&codec_settings_, kSettings));
533
534 VideoEncoder::ScalingSettings settings =
535 encoder_->GetEncoderInfo().scaling_settings;
536 EXPECT_FALSE(settings.thresholds.has_value());
537 }
538
TEST_F(TestVp8Impl,ScalingEnabledIfAutomaticResizeOn)539 TEST_F(TestVp8Impl, ScalingEnabledIfAutomaticResizeOn) {
540 codec_settings_.SetFrameDropEnabled(true);
541 codec_settings_.VP8()->automaticResizeOn = true;
542 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
543 encoder_->InitEncode(&codec_settings_, kSettings));
544
545 VideoEncoder::ScalingSettings settings =
546 encoder_->GetEncoderInfo().scaling_settings;
547 EXPECT_TRUE(settings.thresholds.has_value());
548 EXPECT_EQ(kDefaultMinPixelsPerFrame, settings.min_pixels_per_frame);
549 }
550
TEST_F(TestVp8Impl,DontDropKeyframes)551 TEST_F(TestVp8Impl, DontDropKeyframes) {
552 // Set very high resolution to trigger overuse more easily.
553 const int kScreenWidth = 1920;
554 const int kScreenHeight = 1080;
555
556 codec_settings_.width = kScreenWidth;
557 codec_settings_.height = kScreenHeight;
558
559 // Screensharing has the internal frame dropper off, and instead per frame
560 // asks ScreenshareLayers to decide if it should be dropped or not.
561 codec_settings_.SetFrameDropEnabled(false);
562 codec_settings_.mode = VideoCodecMode::kScreensharing;
563 // ScreenshareLayers triggers on 2 temporal layers and 1000kbps max bitrate.
564 codec_settings_.VP8()->numberOfTemporalLayers = 2;
565 codec_settings_.maxBitrate = 1000;
566
567 // Reset the frame generator with large number of squares, leading to lots of
568 // details and high probability of overshoot.
569 input_frame_generator_ = test::CreateSquareFrameGenerator(
570 codec_settings_.width, codec_settings_.height,
571 test::FrameGeneratorInterface::OutputType::kI420,
572 /* num_squares = */ absl::optional<int>(300));
573
574 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
575 encoder_->InitEncode(&codec_settings_, kSettings));
576
577 VideoBitrateAllocation bitrate_allocation;
578 // Bitrate only enough for TL0.
579 bitrate_allocation.SetBitrate(0, 0, 200000);
580 encoder_->SetRates(
581 VideoEncoder::RateControlParameters(bitrate_allocation, 5.0));
582
583 EncodedImage encoded_frame;
584 CodecSpecificInfo codec_specific_info;
585 EncodeAndWaitForFrame(NextInputFrame(), &encoded_frame, &codec_specific_info,
586 true);
587 EncodeAndExpectFrameWith(NextInputFrame(), 0, true);
588 EncodeAndExpectFrameWith(NextInputFrame(), 0, true);
589 EncodeAndExpectFrameWith(NextInputFrame(), 0, true);
590 }
591
TEST_F(TestVp8Impl,KeepsTimestampOnReencode)592 TEST_F(TestVp8Impl, KeepsTimestampOnReencode) {
593 auto* const vpx = new NiceMock<MockLibvpxInterface>();
594 LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
595 VP8Encoder::Settings());
596
597 // Settings needed to trigger ScreenshareLayers usage, which is required for
598 // overshoot-drop-reencode logic.
599 codec_settings_.maxBitrate = 1000;
600 codec_settings_.mode = VideoCodecMode::kScreensharing;
601 codec_settings_.VP8()->numberOfTemporalLayers = 2;
602 codec_settings_.legacy_conference_mode = true;
603
604 EXPECT_CALL(*vpx, img_wrap(_, _, _, _, _, _))
605 .WillOnce(Invoke([](vpx_image_t* img, vpx_img_fmt_t fmt, unsigned int d_w,
606 unsigned int d_h, unsigned int stride_align,
607 unsigned char* img_data) {
608 img->fmt = fmt;
609 img->d_w = d_w;
610 img->d_h = d_h;
611 img->img_data = img_data;
612 return img;
613 }));
614 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
615 encoder.InitEncode(&codec_settings_,
616 VideoEncoder::Settings(kCapabilities, 1, 1000)));
617 MockEncodedImageCallback callback;
618 encoder.RegisterEncodeCompleteCallback(&callback);
619
620 // Simulate overshoot drop, re-encode: encode function will be called twice
621 // with the same parameters. codec_get_cx_data() will by default return no
622 // image data and be interpreted as drop.
623 EXPECT_CALL(*vpx, codec_encode(_, _, /* pts = */ 0, _, _, _))
624 .Times(2)
625 .WillRepeatedly(Return(vpx_codec_err_t::VPX_CODEC_OK));
626
627 auto delta_frame =
628 std::vector<VideoFrameType>{VideoFrameType::kVideoFrameDelta};
629 encoder.Encode(NextInputFrame(), &delta_frame);
630 }
631
TEST(LibvpxVp8EncoderTest,GetEncoderInfoReturnsStaticInformation)632 TEST(LibvpxVp8EncoderTest, GetEncoderInfoReturnsStaticInformation) {
633 auto* const vpx = new NiceMock<MockLibvpxInterface>();
634 LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
635 VP8Encoder::Settings());
636
637 const auto info = encoder.GetEncoderInfo();
638
639 EXPECT_FALSE(info.supports_native_handle);
640 EXPECT_FALSE(info.is_hardware_accelerated);
641 EXPECT_TRUE(info.supports_simulcast);
642 EXPECT_EQ(info.implementation_name, "libvpx");
643 EXPECT_EQ(info.requested_resolution_alignment, 1);
644 EXPECT_THAT(info.preferred_pixel_formats,
645 testing::UnorderedElementsAre(VideoFrameBuffer::Type::kNV12,
646 VideoFrameBuffer::Type::kI420));
647 }
648
TEST(LibvpxVp8EncoderTest,RequestedResolutionAlignmentFromFieldTrial)649 TEST(LibvpxVp8EncoderTest, RequestedResolutionAlignmentFromFieldTrial) {
650 test::ScopedFieldTrials field_trials(
651 "WebRTC-VP8-GetEncoderInfoOverride/"
652 "requested_resolution_alignment:10/");
653
654 auto* const vpx = new NiceMock<MockLibvpxInterface>();
655 LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
656 VP8Encoder::Settings());
657
658 EXPECT_EQ(encoder.GetEncoderInfo().requested_resolution_alignment, 10);
659 EXPECT_FALSE(
660 encoder.GetEncoderInfo().apply_alignment_to_all_simulcast_layers);
661 EXPECT_TRUE(encoder.GetEncoderInfo().resolution_bitrate_limits.empty());
662 }
663
TEST(LibvpxVp8EncoderTest,ResolutionBitrateLimitsFromFieldTrial)664 TEST(LibvpxVp8EncoderTest, ResolutionBitrateLimitsFromFieldTrial) {
665 test::ScopedFieldTrials field_trials(
666 "WebRTC-VP8-GetEncoderInfoOverride/"
667 "frame_size_pixels:123|456|789,"
668 "min_start_bitrate_bps:11000|22000|33000,"
669 "min_bitrate_bps:44000|55000|66000,"
670 "max_bitrate_bps:77000|88000|99000/");
671
672 auto* const vpx = new NiceMock<MockLibvpxInterface>();
673 LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
674 VP8Encoder::Settings());
675
676 EXPECT_THAT(
677 encoder.GetEncoderInfo().resolution_bitrate_limits,
678 ::testing::ElementsAre(
679 VideoEncoder::ResolutionBitrateLimits{123, 11000, 44000, 77000},
680 VideoEncoder::ResolutionBitrateLimits{456, 22000, 55000, 88000},
681 VideoEncoder::ResolutionBitrateLimits{789, 33000, 66000, 99000}));
682 }
683
TEST(LibvpxVp8EncoderTest,GetEncoderInfoReturnsEmptyResolutionBitrateLimitsByDefault)684 TEST(LibvpxVp8EncoderTest,
685 GetEncoderInfoReturnsEmptyResolutionBitrateLimitsByDefault) {
686 auto* const vpx = new NiceMock<MockLibvpxInterface>();
687 LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
688 VP8Encoder::Settings());
689
690 const auto info = encoder.GetEncoderInfo();
691
692 EXPECT_TRUE(info.resolution_bitrate_limits.empty());
693 }
694
TEST(LibvpxVp8EncoderTest,GetEncoderInfoReturnsResolutionBitrateLimitsAsConfigured)695 TEST(LibvpxVp8EncoderTest,
696 GetEncoderInfoReturnsResolutionBitrateLimitsAsConfigured) {
697 std::vector<VideoEncoder::ResolutionBitrateLimits> resolution_bitrate_limits =
698 {VideoEncoder::ResolutionBitrateLimits(/*frame_size_pixels=*/640 * 360,
699 /*min_start_bitrate_bps=*/300,
700 /*min_bitrate_bps=*/100,
701 /*max_bitrate_bps=*/1000),
702 VideoEncoder::ResolutionBitrateLimits(320 * 180, 100, 30, 500)};
703 VP8Encoder::Settings settings;
704 settings.resolution_bitrate_limits = resolution_bitrate_limits;
705
706 auto* const vpx = new NiceMock<MockLibvpxInterface>();
707 LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
708 std::move(settings));
709
710 const auto info = encoder.GetEncoderInfo();
711
712 EXPECT_EQ(info.resolution_bitrate_limits, resolution_bitrate_limits);
713 }
714
TEST_F(TestVp8Impl,GetEncoderInfoFpsAllocationNoLayers)715 TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationNoLayers) {
716 FramerateFractions expected_fps_allocation[kMaxSpatialLayers] = {
717 FramerateFractions(1, EncoderInfo::kMaxFramerateFraction)};
718
719 EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation,
720 ::testing::ElementsAreArray(expected_fps_allocation));
721 }
722
TEST_F(TestVp8Impl,GetEncoderInfoFpsAllocationTwoTemporalLayers)723 TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationTwoTemporalLayers) {
724 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
725 codec_settings_.numberOfSimulcastStreams = 1;
726 codec_settings_.simulcastStream[0].active = true;
727 codec_settings_.simulcastStream[0].targetBitrate = 100;
728 codec_settings_.simulcastStream[0].maxBitrate = 100;
729 codec_settings_.simulcastStream[0].numberOfTemporalLayers = 2;
730 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
731 encoder_->InitEncode(&codec_settings_, kSettings));
732
733 FramerateFractions expected_fps_allocation[kMaxSpatialLayers];
734 expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction / 2);
735 expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction);
736
737 EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation,
738 ::testing::ElementsAreArray(expected_fps_allocation));
739 }
740
TEST_F(TestVp8Impl,GetEncoderInfoFpsAllocationThreeTemporalLayers)741 TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationThreeTemporalLayers) {
742 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
743 codec_settings_.numberOfSimulcastStreams = 1;
744 codec_settings_.simulcastStream[0].active = true;
745 codec_settings_.simulcastStream[0].targetBitrate = 100;
746 codec_settings_.simulcastStream[0].maxBitrate = 100;
747 codec_settings_.simulcastStream[0].numberOfTemporalLayers = 3;
748 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
749 encoder_->InitEncode(&codec_settings_, kSettings));
750
751 FramerateFractions expected_fps_allocation[kMaxSpatialLayers];
752 expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction / 4);
753 expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction / 2);
754 expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction);
755
756 EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation,
757 ::testing::ElementsAreArray(expected_fps_allocation));
758 }
759
TEST_F(TestVp8Impl,GetEncoderInfoFpsAllocationScreenshareLayers)760 TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationScreenshareLayers) {
761 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
762 codec_settings_.numberOfSimulcastStreams = 1;
763 codec_settings_.mode = VideoCodecMode::kScreensharing;
764 codec_settings_.simulcastStream[0].active = true;
765 codec_settings_.simulcastStream[0].minBitrate = 30;
766 codec_settings_.simulcastStream[0].targetBitrate =
767 kLegacyScreenshareTl0BitrateKbps;
768 codec_settings_.simulcastStream[0].maxBitrate =
769 kLegacyScreenshareTl1BitrateKbps;
770 codec_settings_.simulcastStream[0].numberOfTemporalLayers = 2;
771 codec_settings_.legacy_conference_mode = true;
772 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
773 encoder_->InitEncode(&codec_settings_, kSettings));
774
775 // Expect empty vector, since this mode doesn't have a fixed framerate.
776 FramerateFractions expected_fps_allocation[kMaxSpatialLayers];
777 EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation,
778 ::testing::ElementsAreArray(expected_fps_allocation));
779 }
780
TEST_F(TestVp8Impl,GetEncoderInfoFpsAllocationSimulcastVideo)781 TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationSimulcastVideo) {
782 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
783
784 // Set up three simulcast streams with three temporal layers each.
785 codec_settings_.numberOfSimulcastStreams = 3;
786 for (int i = 0; i < codec_settings_.numberOfSimulcastStreams; ++i) {
787 codec_settings_.simulcastStream[i].active = true;
788 codec_settings_.simulcastStream[i].minBitrate = 30;
789 codec_settings_.simulcastStream[i].targetBitrate = 30;
790 codec_settings_.simulcastStream[i].maxBitrate = 30;
791 codec_settings_.simulcastStream[i].numberOfTemporalLayers = 3;
792 codec_settings_.simulcastStream[i].width =
793 codec_settings_.width >>
794 (codec_settings_.numberOfSimulcastStreams - i - 1);
795 codec_settings_.simulcastStream[i].height =
796 codec_settings_.height >>
797 (codec_settings_.numberOfSimulcastStreams - i - 1);
798 }
799
800 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
801 encoder_->InitEncode(&codec_settings_, kSettings));
802
803 FramerateFractions expected_fps_allocation[kMaxSpatialLayers];
804 expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction / 4);
805 expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction / 2);
806 expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction);
807 expected_fps_allocation[1] = expected_fps_allocation[0];
808 expected_fps_allocation[2] = expected_fps_allocation[0];
809 EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation,
810 ::testing::ElementsAreArray(expected_fps_allocation));
811
812 // Release encoder and re-init without temporal layers.
813 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
814
815 // Sanity check fps allocation when not inited.
816 FramerateFractions default_fps_fraction[kMaxSpatialLayers];
817 default_fps_fraction[0].push_back(EncoderInfo::kMaxFramerateFraction);
818 EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation,
819 ::testing::ElementsAreArray(default_fps_fraction));
820
821 for (int i = 0; i < codec_settings_.numberOfSimulcastStreams; ++i) {
822 codec_settings_.simulcastStream[i].numberOfTemporalLayers = 1;
823 }
824 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
825 encoder_->InitEncode(&codec_settings_, kSettings));
826
827 for (size_t i = 0; i < 3; ++i) {
828 expected_fps_allocation[i].clear();
829 expected_fps_allocation[i].push_back(EncoderInfo::kMaxFramerateFraction);
830 }
831 EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation,
832 ::testing::ElementsAreArray(expected_fps_allocation));
833 }
834
835 class TestVp8ImplForPixelFormat
836 : public TestVp8Impl,
837 public ::testing::WithParamInterface<VideoFrameBuffer::Type> {
838 public:
TestVp8ImplForPixelFormat()839 TestVp8ImplForPixelFormat() : TestVp8Impl(), mappable_type_(GetParam()) {}
840
841 protected:
842 VideoFrameBuffer::Type mappable_type_;
843 };
844
TEST_P(TestVp8ImplForPixelFormat,EncodeNativeFrameSimulcast)845 TEST_P(TestVp8ImplForPixelFormat, EncodeNativeFrameSimulcast) {
846 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
847
848 // Configure simulcast.
849 codec_settings_.numberOfSimulcastStreams = 3;
850 codec_settings_.simulcastStream[0] = {.width = kWidth / 4,
851 .height = kHeight / 4,
852 .maxFramerate = kFramerateFps,
853 .numberOfTemporalLayers = 1,
854 .maxBitrate = 4000,
855 .targetBitrate = 3000,
856 .minBitrate = 2000,
857 .qpMax = 80,
858 .active = true};
859 codec_settings_.simulcastStream[1] = {.width = kWidth / 2,
860 .height = kHeight / 2,
861 .maxFramerate = kFramerateFps,
862 .numberOfTemporalLayers = 1,
863 .maxBitrate = 4000,
864 .targetBitrate = 3000,
865 .minBitrate = 2000,
866 .qpMax = 80,
867 .active = true};
868 codec_settings_.simulcastStream[2] = {.width = kWidth,
869 .height = kHeight,
870 .maxFramerate = kFramerateFps,
871 .numberOfTemporalLayers = 1,
872 .maxBitrate = 4000,
873 .targetBitrate = 3000,
874 .minBitrate = 2000,
875 .qpMax = 80,
876 .active = true};
877 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
878 encoder_->InitEncode(&codec_settings_, kSettings));
879
880 // Create a zero-conversion NV12 frame (calling ToI420 on it crashes).
881 VideoFrame input_frame =
882 test::CreateMappableNativeFrame(1, mappable_type_, kWidth, kHeight);
883
884 EncodedImage encoded_frame;
885 CodecSpecificInfo codec_specific_info;
886 EncodeAndWaitForFrame(input_frame, &encoded_frame, &codec_specific_info);
887
888 // After encoding, we expect one mapping per simulcast layer.
889 rtc::scoped_refptr<test::MappableNativeBuffer> mappable_buffer =
890 test::GetMappableNativeBufferFromVideoFrame(input_frame);
891 std::vector<rtc::scoped_refptr<VideoFrameBuffer>> mapped_buffers =
892 mappable_buffer->GetMappedFramedBuffers();
893 ASSERT_EQ(mapped_buffers.size(), 3u);
894 EXPECT_EQ(mapped_buffers[0]->type(), mappable_type_);
895 EXPECT_EQ(mapped_buffers[0]->width(), kWidth);
896 EXPECT_EQ(mapped_buffers[0]->height(), kHeight);
897 EXPECT_EQ(mapped_buffers[1]->type(), mappable_type_);
898 EXPECT_EQ(mapped_buffers[1]->width(), kWidth / 2);
899 EXPECT_EQ(mapped_buffers[1]->height(), kHeight / 2);
900 EXPECT_EQ(mapped_buffers[2]->type(), mappable_type_);
901 EXPECT_EQ(mapped_buffers[2]->width(), kWidth / 4);
902 EXPECT_EQ(mapped_buffers[2]->height(), kHeight / 4);
903 EXPECT_FALSE(mappable_buffer->DidConvertToI420());
904
905 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
906 }
907
908 INSTANTIATE_TEST_SUITE_P(All,
909 TestVp8ImplForPixelFormat,
910 ::testing::Values(VideoFrameBuffer::Type::kI420,
911 VideoFrameBuffer::Type::kNV12));
912
913 } // namespace webrtc
914