xref: /aosp_15_r20/external/webrtc/modules/video_coding/codecs/multiplex/test/multiplex_adapter_unittest.cc (revision d9f758449e529ab9291ac668be2861e7a55c2422)
1 /*
2  *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 #include <stddef.h>
12 
13 #include <cstdint>
14 #include <memory>
15 #include <utility>
16 #include <vector>
17 
18 #include "absl/types/optional.h"
19 #include "api/scoped_refptr.h"
20 #include "api/test/mock_video_decoder_factory.h"
21 #include "api/test/mock_video_encoder_factory.h"
22 #include "api/video/encoded_image.h"
23 #include "api/video/video_frame.h"
24 #include "api/video/video_frame_buffer.h"
25 #include "api/video/video_rotation.h"
26 #include "api/video_codecs/sdp_video_format.h"
27 #include "api/video_codecs/video_codec.h"
28 #include "api/video_codecs/video_decoder.h"
29 #include "api/video_codecs/video_encoder.h"
30 #include "common_video/include/video_frame_buffer.h"
31 #include "common_video/libyuv/include/webrtc_libyuv.h"
32 #include "media/base/media_constants.h"
33 #include "modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h"
34 #include "modules/video_coding/codecs/multiplex/include/multiplex_decoder_adapter.h"
35 #include "modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h"
36 #include "modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h"
37 #include "modules/video_coding/codecs/test/video_codec_unittest.h"
38 #include "modules/video_coding/codecs/vp9/include/vp9.h"
39 #include "modules/video_coding/include/video_codec_interface.h"
40 #include "modules/video_coding/include/video_error_codes.h"
41 #include "test/gmock.h"
42 #include "test/gtest.h"
43 #include "test/video_codec_settings.h"
44 
45 using ::testing::_;
46 using ::testing::Return;
47 
48 namespace webrtc {
49 
50 constexpr const char* kMultiplexAssociatedCodecName = cricket::kVp9CodecName;
51 const VideoCodecType kMultiplexAssociatedCodecType =
52     PayloadStringToCodecType(kMultiplexAssociatedCodecName);
53 
54 class TestMultiplexAdapter : public VideoCodecUnitTest,
55                              public ::testing::WithParamInterface<
56                                  bool /* supports_augmenting_data */> {
57  public:
TestMultiplexAdapter()58   TestMultiplexAdapter()
59       : decoder_factory_(new webrtc::MockVideoDecoderFactory),
60         encoder_factory_(new webrtc::MockVideoEncoderFactory),
61         supports_augmenting_data_(GetParam()) {}
62 
63  protected:
CreateDecoder()64   std::unique_ptr<VideoDecoder> CreateDecoder() override {
65     return std::make_unique<MultiplexDecoderAdapter>(
66         decoder_factory_.get(), SdpVideoFormat(kMultiplexAssociatedCodecName),
67         supports_augmenting_data_);
68   }
69 
CreateEncoder()70   std::unique_ptr<VideoEncoder> CreateEncoder() override {
71     return std::make_unique<MultiplexEncoderAdapter>(
72         encoder_factory_.get(), SdpVideoFormat(kMultiplexAssociatedCodecName),
73         supports_augmenting_data_);
74   }
75 
ModifyCodecSettings(VideoCodec * codec_settings)76   void ModifyCodecSettings(VideoCodec* codec_settings) override {
77     webrtc::test::CodecSettings(kMultiplexAssociatedCodecType, codec_settings);
78     codec_settings->VP9()->numberOfTemporalLayers = 1;
79     codec_settings->VP9()->numberOfSpatialLayers = 1;
80     codec_settings->codecType = webrtc::kVideoCodecMultiplex;
81   }
82 
CreateDataAugmentedInputFrame(VideoFrame * video_frame)83   std::unique_ptr<VideoFrame> CreateDataAugmentedInputFrame(
84       VideoFrame* video_frame) {
85     rtc::scoped_refptr<VideoFrameBuffer> video_buffer =
86         video_frame->video_frame_buffer();
87     std::unique_ptr<uint8_t[]> data =
88         std::unique_ptr<uint8_t[]>(new uint8_t[16]);
89     for (int i = 0; i < 16; i++) {
90       data[i] = i;
91     }
92     auto augmented_video_frame_buffer =
93         rtc::make_ref_counted<AugmentedVideoFrameBuffer>(video_buffer,
94                                                          std::move(data), 16);
95     return std::make_unique<VideoFrame>(
96         VideoFrame::Builder()
97             .set_video_frame_buffer(augmented_video_frame_buffer)
98             .set_timestamp_rtp(video_frame->timestamp())
99             .set_timestamp_ms(video_frame->render_time_ms())
100             .set_rotation(video_frame->rotation())
101             .set_id(video_frame->id())
102             .build());
103   }
104 
CreateI420AInputFrame()105   std::unique_ptr<VideoFrame> CreateI420AInputFrame() {
106     VideoFrame input_frame = NextInputFrame();
107     rtc::scoped_refptr<webrtc::I420BufferInterface> yuv_buffer =
108         input_frame.video_frame_buffer()->ToI420();
109     rtc::scoped_refptr<I420ABufferInterface> yuva_buffer = WrapI420ABuffer(
110         yuv_buffer->width(), yuv_buffer->height(), yuv_buffer->DataY(),
111         yuv_buffer->StrideY(), yuv_buffer->DataU(), yuv_buffer->StrideU(),
112         yuv_buffer->DataV(), yuv_buffer->StrideV(), yuv_buffer->DataY(),
113         yuv_buffer->StrideY(),
114         // To keep reference alive.
115         [yuv_buffer] {});
116     return std::make_unique<VideoFrame>(VideoFrame::Builder()
117                                             .set_video_frame_buffer(yuva_buffer)
118                                             .set_timestamp_rtp(123)
119                                             .set_timestamp_ms(345)
120                                             .set_rotation(kVideoRotation_0)
121                                             .build());
122   }
123 
CreateInputFrame(bool contains_alpha)124   std::unique_ptr<VideoFrame> CreateInputFrame(bool contains_alpha) {
125     std::unique_ptr<VideoFrame> video_frame;
126     if (contains_alpha) {
127       video_frame = CreateI420AInputFrame();
128     } else {
129       VideoFrame next_frame = NextInputFrame();
130       video_frame = std::make_unique<VideoFrame>(
131           VideoFrame::Builder()
132               .set_video_frame_buffer(next_frame.video_frame_buffer())
133               .set_timestamp_rtp(next_frame.timestamp())
134               .set_timestamp_ms(next_frame.render_time_ms())
135               .set_rotation(next_frame.rotation())
136               .set_id(next_frame.id())
137               .build());
138     }
139     if (supports_augmenting_data_) {
140       video_frame = CreateDataAugmentedInputFrame(video_frame.get());
141     }
142 
143     return video_frame;
144   }
145 
CheckData(rtc::scoped_refptr<VideoFrameBuffer> video_frame_buffer)146   void CheckData(rtc::scoped_refptr<VideoFrameBuffer> video_frame_buffer) {
147     if (!supports_augmenting_data_) {
148       return;
149     }
150     AugmentedVideoFrameBuffer* augmented_buffer =
151         static_cast<AugmentedVideoFrameBuffer*>(video_frame_buffer.get());
152     EXPECT_EQ(augmented_buffer->GetAugmentingDataSize(), 16);
153     uint8_t* data = augmented_buffer->GetAugmentingData();
154     for (int i = 0; i < 16; i++) {
155       EXPECT_EQ(data[i], i);
156     }
157   }
158 
ExtractAXXFrame(const VideoFrame & video_frame)159   std::unique_ptr<VideoFrame> ExtractAXXFrame(const VideoFrame& video_frame) {
160     rtc::scoped_refptr<VideoFrameBuffer> video_frame_buffer =
161         video_frame.video_frame_buffer();
162     if (supports_augmenting_data_) {
163       AugmentedVideoFrameBuffer* augmentedBuffer =
164           static_cast<AugmentedVideoFrameBuffer*>(video_frame_buffer.get());
165       video_frame_buffer = augmentedBuffer->GetVideoFrameBuffer();
166     }
167     const I420ABufferInterface* yuva_buffer = video_frame_buffer->GetI420A();
168     rtc::scoped_refptr<I420BufferInterface> axx_buffer = WrapI420Buffer(
169         yuva_buffer->width(), yuva_buffer->height(), yuva_buffer->DataA(),
170         yuva_buffer->StrideA(), yuva_buffer->DataU(), yuva_buffer->StrideU(),
171         yuva_buffer->DataV(), yuva_buffer->StrideV(), [video_frame_buffer] {});
172     return std::make_unique<VideoFrame>(VideoFrame::Builder()
173                                             .set_video_frame_buffer(axx_buffer)
174                                             .set_timestamp_rtp(123)
175                                             .set_timestamp_ms(345)
176                                             .set_rotation(kVideoRotation_0)
177                                             .build());
178   }
179 
180  private:
SetUp()181   void SetUp() override {
182     EXPECT_CALL(*decoder_factory_, Die);
183     // The decoders/encoders will be owned by the caller of
184     // CreateVideoDecoder()/CreateVideoEncoder().
185     EXPECT_CALL(*decoder_factory_, CreateVideoDecoder)
186         .Times(2)
187         .WillRepeatedly([] { return VP9Decoder::Create(); });
188 
189     EXPECT_CALL(*encoder_factory_, Die);
190     EXPECT_CALL(*encoder_factory_, CreateVideoEncoder)
191         .Times(2)
192         .WillRepeatedly([] { return VP9Encoder::Create(); });
193 
194     VideoCodecUnitTest::SetUp();
195   }
196 
197   const std::unique_ptr<webrtc::MockVideoDecoderFactory> decoder_factory_;
198   const std::unique_ptr<webrtc::MockVideoEncoderFactory> encoder_factory_;
199   const bool supports_augmenting_data_;
200 };
201 
202 // TODO(emircan): Currently VideoCodecUnitTest tests do a complete setup
203 // step that goes beyond constructing `decoder_`. Simplify these tests to do
204 // less.
TEST_P(TestMultiplexAdapter,ConstructAndDestructDecoder)205 TEST_P(TestMultiplexAdapter, ConstructAndDestructDecoder) {
206   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Release());
207 }
208 
TEST_P(TestMultiplexAdapter,ConstructAndDestructEncoder)209 TEST_P(TestMultiplexAdapter, ConstructAndDestructEncoder) {
210   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
211 }
212 
TEST_P(TestMultiplexAdapter,EncodeDecodeI420Frame)213 TEST_P(TestMultiplexAdapter, EncodeDecodeI420Frame) {
214   std::unique_ptr<VideoFrame> input_frame = CreateInputFrame(false);
215   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(*input_frame, nullptr));
216   EncodedImage encoded_frame;
217   CodecSpecificInfo codec_specific_info;
218   ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
219   EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType);
220 
221   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encoded_frame, false, -1));
222   std::unique_ptr<VideoFrame> decoded_frame;
223   absl::optional<uint8_t> decoded_qp;
224   ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
225   ASSERT_TRUE(decoded_frame);
226   EXPECT_GT(I420PSNR(input_frame.get(), decoded_frame.get()), 36);
227   CheckData(decoded_frame->video_frame_buffer());
228 }
229 
TEST_P(TestMultiplexAdapter,EncodeDecodeI420AFrame)230 TEST_P(TestMultiplexAdapter, EncodeDecodeI420AFrame) {
231   std::unique_ptr<VideoFrame> yuva_frame = CreateInputFrame(true);
232   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(*yuva_frame, nullptr));
233   EncodedImage encoded_frame;
234   CodecSpecificInfo codec_specific_info;
235   ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
236   EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType);
237 
238   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encoded_frame, false, 0));
239   std::unique_ptr<VideoFrame> decoded_frame;
240   absl::optional<uint8_t> decoded_qp;
241   ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
242   ASSERT_TRUE(decoded_frame);
243   EXPECT_GT(I420PSNR(yuva_frame.get(), decoded_frame.get()), 36);
244 
245   // Find PSNR for AXX bits.
246   std::unique_ptr<VideoFrame> input_axx_frame = ExtractAXXFrame(*yuva_frame);
247   std::unique_ptr<VideoFrame> output_axx_frame =
248       ExtractAXXFrame(*decoded_frame);
249   EXPECT_GT(I420PSNR(input_axx_frame.get(), output_axx_frame.get()), 47);
250 
251   CheckData(decoded_frame->video_frame_buffer());
252 }
253 
TEST_P(TestMultiplexAdapter,CheckSingleFrameEncodedBitstream)254 TEST_P(TestMultiplexAdapter, CheckSingleFrameEncodedBitstream) {
255   std::unique_ptr<VideoFrame> input_frame = CreateInputFrame(false);
256   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(*input_frame, nullptr));
257   EncodedImage encoded_frame;
258   CodecSpecificInfo codec_specific_info;
259   ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
260   EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType);
261   EXPECT_FALSE(encoded_frame.SpatialIndex());
262 
263   const MultiplexImage& unpacked_frame =
264       MultiplexEncodedImagePacker::Unpack(encoded_frame);
265   EXPECT_EQ(0, unpacked_frame.image_index);
266   EXPECT_EQ(1, unpacked_frame.component_count);
267   const MultiplexImageComponent& component = unpacked_frame.image_components[0];
268   EXPECT_EQ(0, component.component_index);
269   EXPECT_NE(nullptr, component.encoded_image.data());
270   EXPECT_EQ(VideoFrameType::kVideoFrameKey, component.encoded_image._frameType);
271 }
272 
TEST_P(TestMultiplexAdapter,CheckDoubleFramesEncodedBitstream)273 TEST_P(TestMultiplexAdapter, CheckDoubleFramesEncodedBitstream) {
274   std::unique_ptr<VideoFrame> yuva_frame = CreateInputFrame(true);
275   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(*yuva_frame, nullptr));
276   EncodedImage encoded_frame;
277   CodecSpecificInfo codec_specific_info;
278   ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
279   EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType);
280   EXPECT_FALSE(encoded_frame.SpatialIndex());
281 
282   const MultiplexImage& unpacked_frame =
283       MultiplexEncodedImagePacker::Unpack(encoded_frame);
284   EXPECT_EQ(0, unpacked_frame.image_index);
285   EXPECT_EQ(2, unpacked_frame.component_count);
286   EXPECT_EQ(unpacked_frame.image_components.size(),
287             unpacked_frame.component_count);
288   for (int i = 0; i < unpacked_frame.component_count; ++i) {
289     const MultiplexImageComponent& component =
290         unpacked_frame.image_components[i];
291     EXPECT_EQ(i, component.component_index);
292     EXPECT_NE(nullptr, component.encoded_image.data());
293     EXPECT_EQ(VideoFrameType::kVideoFrameKey,
294               component.encoded_image._frameType);
295   }
296 }
297 
TEST_P(TestMultiplexAdapter,ImageIndexIncreases)298 TEST_P(TestMultiplexAdapter, ImageIndexIncreases) {
299   std::unique_ptr<VideoFrame> yuva_frame = CreateInputFrame(true);
300   const size_t expected_num_encoded_frames = 3;
301   for (size_t i = 0; i < expected_num_encoded_frames; ++i) {
302     EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(*yuva_frame, nullptr));
303     EncodedImage encoded_frame;
304     CodecSpecificInfo codec_specific_info;
305     ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
306     const MultiplexImage& unpacked_frame =
307         MultiplexEncodedImagePacker::Unpack(encoded_frame);
308     EXPECT_EQ(i, unpacked_frame.image_index);
309     EXPECT_EQ(
310         i ? VideoFrameType::kVideoFrameDelta : VideoFrameType::kVideoFrameKey,
311         encoded_frame._frameType);
312   }
313 }
314 
315 INSTANTIATE_TEST_SUITE_P(TestMultiplexAdapter,
316                          TestMultiplexAdapter,
317                          ::testing::Bool());
318 
319 }  // namespace webrtc
320