1 /*
2 * Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10 #include "video/video_quality_test.h"
11
12 #include <stdio.h>
13
14 #if defined(WEBRTC_WIN)
15 #include <conio.h>
16 #endif
17
18 #include <algorithm>
19 #include <deque>
20 #include <map>
21 #include <memory>
22 #include <string>
23 #include <vector>
24
25 #include "api/fec_controller_override.h"
26 #include "api/rtc_event_log_output_file.h"
27 #include "api/task_queue/default_task_queue_factory.h"
28 #include "api/task_queue/task_queue_base.h"
29 #include "api/test/create_frame_generator.h"
30 #include "api/video/builtin_video_bitrate_allocator_factory.h"
31 #include "api/video_codecs/video_encoder.h"
32 #include "call/fake_network_pipe.h"
33 #include "call/simulated_network.h"
34 #include "media/base/media_constants.h"
35 #include "media/engine/adm_helpers.h"
36 #include "media/engine/encoder_simulcast_proxy.h"
37 #include "media/engine/fake_video_codec_factory.h"
38 #include "media/engine/internal_encoder_factory.h"
39 #include "media/engine/webrtc_video_engine.h"
40 #include "modules/audio_device/include/audio_device.h"
41 #include "modules/audio_mixer/audio_mixer_impl.h"
42 #include "modules/video_coding/codecs/h264/include/h264.h"
43 #include "modules/video_coding/codecs/multiplex/include/multiplex_decoder_adapter.h"
44 #include "modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h"
45 #include "modules/video_coding/codecs/vp8/include/vp8.h"
46 #include "modules/video_coding/codecs/vp9/include/vp9.h"
47 #include "modules/video_coding/utility/ivf_file_writer.h"
48 #include "rtc_base/strings/string_builder.h"
49 #include "rtc_base/task_queue_for_test.h"
50 #include "test/platform_video_capturer.h"
51 #include "test/testsupport/file_utils.h"
52 #include "test/video_renderer.h"
53 #include "video/frame_dumping_decoder.h"
54 #ifdef WEBRTC_WIN
55 #include "modules/audio_device/include/audio_device_factory.h"
56 #endif
57 #include "video/config/encoder_stream_factory.h"
58
59 namespace webrtc {
60
61 namespace {
62 enum : int { // The first valid value is 1.
63 kAbsSendTimeExtensionId = 1,
64 kGenericFrameDescriptorExtensionId00,
65 kGenericFrameDescriptorExtensionId01,
66 kTransportSequenceNumberExtensionId,
67 kVideoContentTypeExtensionId,
68 kVideoTimingExtensionId,
69 };
70
71 constexpr char kSyncGroup[] = "av_sync";
72 constexpr int kOpusMinBitrateBps = 6000;
73 constexpr int kOpusBitrateFbBps = 32000;
74 constexpr int kFramesSentInQuickTest = 1;
75 constexpr uint32_t kThumbnailSendSsrcStart = 0xE0000;
76 constexpr uint32_t kThumbnailRtxSsrcStart = 0xF0000;
77
78 constexpr int kDefaultMaxQp = cricket::WebRtcVideoChannel::kDefaultQpMax;
79
80 const VideoEncoder::Capabilities kCapabilities(false);
81
GetMinMaxBitratesBps(const VideoCodec & codec,size_t spatial_idx)82 std::pair<uint32_t, uint32_t> GetMinMaxBitratesBps(const VideoCodec& codec,
83 size_t spatial_idx) {
84 uint32_t min_bitrate = codec.minBitrate;
85 uint32_t max_bitrate = codec.maxBitrate;
86 if (spatial_idx < codec.numberOfSimulcastStreams) {
87 min_bitrate =
88 std::max(min_bitrate, codec.simulcastStream[spatial_idx].minBitrate);
89 max_bitrate =
90 std::min(max_bitrate, codec.simulcastStream[spatial_idx].maxBitrate);
91 }
92 if (codec.codecType == VideoCodecType::kVideoCodecVP9 &&
93 spatial_idx < codec.VP9().numberOfSpatialLayers) {
94 min_bitrate =
95 std::max(min_bitrate, codec.spatialLayers[spatial_idx].minBitrate);
96 max_bitrate =
97 std::min(max_bitrate, codec.spatialLayers[spatial_idx].maxBitrate);
98 }
99 max_bitrate = std::max(max_bitrate, min_bitrate);
100 return {min_bitrate * 1000, max_bitrate * 1000};
101 }
102
103 class VideoStreamFactory
104 : public VideoEncoderConfig::VideoStreamFactoryInterface {
105 public:
VideoStreamFactory(const std::vector<VideoStream> & streams)106 explicit VideoStreamFactory(const std::vector<VideoStream>& streams)
107 : streams_(streams) {}
108
109 private:
CreateEncoderStreams(int frame_width,int frame_height,const VideoEncoderConfig & encoder_config)110 std::vector<VideoStream> CreateEncoderStreams(
111 int frame_width,
112 int frame_height,
113 const VideoEncoderConfig& encoder_config) override {
114 // The highest layer must match the incoming resolution.
115 std::vector<VideoStream> streams = streams_;
116 streams[streams_.size() - 1].height = frame_height;
117 streams[streams_.size() - 1].width = frame_width;
118
119 streams[0].bitrate_priority = encoder_config.bitrate_priority;
120 return streams;
121 }
122
123 std::vector<VideoStream> streams_;
124 };
125
126 // This wrapper provides two features needed by the video quality tests:
127 // 1. Invoke VideoAnalyzer callbacks before and after encoding each frame.
128 // 2. Write the encoded frames to file, one file per simulcast layer.
129 class QualityTestVideoEncoder : public VideoEncoder,
130 private EncodedImageCallback {
131 public:
QualityTestVideoEncoder(std::unique_ptr<VideoEncoder> encoder,VideoAnalyzer * analyzer,std::vector<FileWrapper> files,double overshoot_factor)132 QualityTestVideoEncoder(std::unique_ptr<VideoEncoder> encoder,
133 VideoAnalyzer* analyzer,
134 std::vector<FileWrapper> files,
135 double overshoot_factor)
136 : encoder_(std::move(encoder)),
137 overshoot_factor_(overshoot_factor),
138 analyzer_(analyzer) {
139 for (FileWrapper& file : files) {
140 writers_.push_back(
141 IvfFileWriter::Wrap(std::move(file), /* byte_limit= */ 100000000));
142 }
143 }
144
145 // Implement VideoEncoder
SetFecControllerOverride(FecControllerOverride * fec_controller_override)146 void SetFecControllerOverride(
147 FecControllerOverride* fec_controller_override) {
148 // Ignored.
149 }
150
InitEncode(const VideoCodec * codec_settings,const Settings & settings)151 int32_t InitEncode(const VideoCodec* codec_settings,
152 const Settings& settings) override {
153 codec_settings_ = *codec_settings;
154 return encoder_->InitEncode(codec_settings, settings);
155 }
156
RegisterEncodeCompleteCallback(EncodedImageCallback * callback)157 int32_t RegisterEncodeCompleteCallback(
158 EncodedImageCallback* callback) override {
159 callback_ = callback;
160 return encoder_->RegisterEncodeCompleteCallback(this);
161 }
162
Release()163 int32_t Release() override { return encoder_->Release(); }
164
Encode(const VideoFrame & frame,const std::vector<VideoFrameType> * frame_types)165 int32_t Encode(const VideoFrame& frame,
166 const std::vector<VideoFrameType>* frame_types) {
167 if (analyzer_) {
168 analyzer_->PreEncodeOnFrame(frame);
169 }
170 return encoder_->Encode(frame, frame_types);
171 }
172
SetRates(const RateControlParameters & parameters)173 void SetRates(const RateControlParameters& parameters) override {
174 RTC_DCHECK_GT(overshoot_factor_, 0.0);
175 if (overshoot_factor_ == 1.0) {
176 encoder_->SetRates(parameters);
177 return;
178 }
179
180 // Simulating encoder overshooting target bitrate, by configuring actual
181 // encoder too high. Take care not to adjust past limits of config,
182 // otherwise encoders may crash on DCHECK.
183 VideoBitrateAllocation overshot_allocation;
184 for (size_t si = 0; si < kMaxSpatialLayers; ++si) {
185 const uint32_t spatial_layer_bitrate_bps =
186 parameters.bitrate.GetSpatialLayerSum(si);
187 if (spatial_layer_bitrate_bps == 0) {
188 continue;
189 }
190
191 uint32_t min_bitrate_bps;
192 uint32_t max_bitrate_bps;
193 std::tie(min_bitrate_bps, max_bitrate_bps) =
194 GetMinMaxBitratesBps(codec_settings_, si);
195 double overshoot_factor = overshoot_factor_;
196 const uint32_t corrected_bitrate = rtc::checked_cast<uint32_t>(
197 overshoot_factor * spatial_layer_bitrate_bps);
198 if (corrected_bitrate < min_bitrate_bps) {
199 overshoot_factor = min_bitrate_bps / spatial_layer_bitrate_bps;
200 } else if (corrected_bitrate > max_bitrate_bps) {
201 overshoot_factor = max_bitrate_bps / spatial_layer_bitrate_bps;
202 }
203
204 for (size_t ti = 0; ti < kMaxTemporalStreams; ++ti) {
205 if (parameters.bitrate.HasBitrate(si, ti)) {
206 overshot_allocation.SetBitrate(
207 si, ti,
208 rtc::checked_cast<uint32_t>(
209 overshoot_factor * parameters.bitrate.GetBitrate(si, ti)));
210 }
211 }
212 }
213
214 return encoder_->SetRates(
215 RateControlParameters(overshot_allocation, parameters.framerate_fps,
216 parameters.bandwidth_allocation));
217 }
218
OnPacketLossRateUpdate(float packet_loss_rate)219 void OnPacketLossRateUpdate(float packet_loss_rate) override {
220 encoder_->OnPacketLossRateUpdate(packet_loss_rate);
221 }
222
OnRttUpdate(int64_t rtt_ms)223 void OnRttUpdate(int64_t rtt_ms) override { encoder_->OnRttUpdate(rtt_ms); }
224
OnLossNotification(const LossNotification & loss_notification)225 void OnLossNotification(const LossNotification& loss_notification) override {
226 encoder_->OnLossNotification(loss_notification);
227 }
228
GetEncoderInfo() const229 EncoderInfo GetEncoderInfo() const override {
230 EncoderInfo info = encoder_->GetEncoderInfo();
231 if (overshoot_factor_ != 1.0) {
232 // We're simulating bad encoder, don't forward trusted setting
233 // from eg libvpx.
234 info.has_trusted_rate_controller = false;
235 }
236 return info;
237 }
238
239 private:
240 // Implement EncodedImageCallback
OnEncodedImage(const EncodedImage & encoded_image,const CodecSpecificInfo * codec_specific_info)241 Result OnEncodedImage(const EncodedImage& encoded_image,
242 const CodecSpecificInfo* codec_specific_info) override {
243 if (codec_specific_info) {
244 int simulcast_index;
245 if (codec_specific_info->codecType == kVideoCodecVP9) {
246 simulcast_index = 0;
247 } else {
248 simulcast_index = encoded_image.SpatialIndex().value_or(0);
249 }
250 RTC_DCHECK_GE(simulcast_index, 0);
251 if (analyzer_) {
252 analyzer_->PostEncodeOnFrame(simulcast_index,
253 encoded_image.Timestamp());
254 }
255 if (static_cast<size_t>(simulcast_index) < writers_.size()) {
256 writers_[simulcast_index]->WriteFrame(encoded_image,
257 codec_specific_info->codecType);
258 }
259 }
260
261 return callback_->OnEncodedImage(encoded_image, codec_specific_info);
262 }
263
OnDroppedFrame(DropReason reason)264 void OnDroppedFrame(DropReason reason) override {
265 callback_->OnDroppedFrame(reason);
266 }
267
268 const std::unique_ptr<VideoEncoder> encoder_;
269 const double overshoot_factor_;
270 VideoAnalyzer* const analyzer_;
271 std::vector<std::unique_ptr<IvfFileWriter>> writers_;
272 EncodedImageCallback* callback_ = nullptr;
273 VideoCodec codec_settings_;
274 };
275
276 #if defined(WEBRTC_WIN) && !defined(WINUWP)
PressEnterToContinue(TaskQueueBase * task_queue)277 void PressEnterToContinue(TaskQueueBase* task_queue) {
278 puts(">> Press ENTER to continue...");
279
280 while (!_kbhit() || _getch() != '\r') {
281 // Drive the message loop for the thread running the task_queue
282 SendTask(task_queue, [&]() {
283 MSG msg;
284 if (PeekMessage(&msg, NULL, 0, 0, PM_REMOVE)) {
285 TranslateMessage(&msg);
286 DispatchMessage(&msg);
287 }
288 });
289 }
290 }
291 #else
PressEnterToContinue(TaskQueueBase *)292 void PressEnterToContinue(TaskQueueBase* /*task_queue*/) {
293 puts(">> Press ENTER to continue...");
294 while (getc(stdin) != '\n' && !feof(stdin))
295 ; // NOLINT
296 }
297 #endif
298
299 } // namespace
300
CreateVideoDecoder(const SdpVideoFormat & format)301 std::unique_ptr<VideoDecoder> VideoQualityTest::CreateVideoDecoder(
302 const SdpVideoFormat& format) {
303 std::unique_ptr<VideoDecoder> decoder;
304 if (format.name == "multiplex") {
305 decoder = std::make_unique<MultiplexDecoderAdapter>(
306 decoder_factory_.get(), SdpVideoFormat(cricket::kVp9CodecName));
307 } else if (format.name == "FakeCodec") {
308 decoder = webrtc::FakeVideoDecoderFactory::CreateVideoDecoder();
309 } else {
310 decoder = decoder_factory_->CreateVideoDecoder(format);
311 }
312 if (!params_.logging.encoded_frame_base_path.empty()) {
313 rtc::StringBuilder str;
314 str << receive_logs_++;
315 std::string path =
316 params_.logging.encoded_frame_base_path + "." + str.str() + ".recv.ivf";
317 decoder = CreateFrameDumpingDecoderWrapper(
318 std::move(decoder), FileWrapper::OpenWriteOnly(path));
319 }
320 return decoder;
321 }
322
CreateVideoEncoder(const SdpVideoFormat & format,VideoAnalyzer * analyzer)323 std::unique_ptr<VideoEncoder> VideoQualityTest::CreateVideoEncoder(
324 const SdpVideoFormat& format,
325 VideoAnalyzer* analyzer) {
326 std::unique_ptr<VideoEncoder> encoder;
327 if (format.name == "VP8") {
328 encoder =
329 std::make_unique<EncoderSimulcastProxy>(encoder_factory_.get(), format);
330 } else if (format.name == "multiplex") {
331 encoder = std::make_unique<MultiplexEncoderAdapter>(
332 encoder_factory_.get(), SdpVideoFormat(cricket::kVp9CodecName));
333 } else if (format.name == "FakeCodec") {
334 encoder = webrtc::FakeVideoEncoderFactory::CreateVideoEncoder();
335 } else {
336 encoder = encoder_factory_->CreateVideoEncoder(format);
337 }
338
339 std::vector<FileWrapper> encoded_frame_dump_files;
340 if (!params_.logging.encoded_frame_base_path.empty()) {
341 char ss_buf[100];
342 rtc::SimpleStringBuilder sb(ss_buf);
343 sb << send_logs_++;
344 std::string prefix =
345 params_.logging.encoded_frame_base_path + "." + sb.str() + ".send.";
346 encoded_frame_dump_files.push_back(
347 FileWrapper::OpenWriteOnly(prefix + "1.ivf"));
348 encoded_frame_dump_files.push_back(
349 FileWrapper::OpenWriteOnly(prefix + "2.ivf"));
350 encoded_frame_dump_files.push_back(
351 FileWrapper::OpenWriteOnly(prefix + "3.ivf"));
352 }
353
354 double overshoot_factor = 1.0;
355 // Match format to either of the streams in dual-stream mode in order to get
356 // the overshoot factor. This is not very robust but we can't know for sure
357 // which stream this encoder is meant for, from within the factory.
358 if (format ==
359 SdpVideoFormat(params_.video[0].codec, params_.video[0].sdp_params)) {
360 overshoot_factor = params_.video[0].encoder_overshoot_factor;
361 } else if (format == SdpVideoFormat(params_.video[1].codec,
362 params_.video[1].sdp_params)) {
363 overshoot_factor = params_.video[1].encoder_overshoot_factor;
364 }
365 if (overshoot_factor == 0.0) {
366 // If params were zero-initialized, set to 1.0 instead.
367 overshoot_factor = 1.0;
368 }
369
370 if (analyzer || !encoded_frame_dump_files.empty() || overshoot_factor > 1.0) {
371 encoder = std::make_unique<QualityTestVideoEncoder>(
372 std::move(encoder), analyzer, std::move(encoded_frame_dump_files),
373 overshoot_factor);
374 }
375
376 return encoder;
377 }
378
VideoQualityTest(std::unique_ptr<InjectionComponents> injection_components)379 VideoQualityTest::VideoQualityTest(
380 std::unique_ptr<InjectionComponents> injection_components)
381 : clock_(Clock::GetRealTimeClock()),
382 task_queue_factory_(CreateDefaultTaskQueueFactory()),
383 rtc_event_log_factory_(task_queue_factory_.get()),
384 video_decoder_factory_([this](const SdpVideoFormat& format) {
385 return this->CreateVideoDecoder(format);
386 }),
__anonea2e15550502(const SdpVideoFormat& format) 387 video_encoder_factory_([this](const SdpVideoFormat& format) {
388 return this->CreateVideoEncoder(format, nullptr);
389 }),
390 video_encoder_factory_with_analyzer_(
__anonea2e15550602(const SdpVideoFormat& format) 391 [this](const SdpVideoFormat& format) {
392 return this->CreateVideoEncoder(format, analyzer_.get());
393 }),
394 video_bitrate_allocator_factory_(
395 CreateBuiltinVideoBitrateAllocatorFactory()),
396 receive_logs_(0),
397 send_logs_(0),
398 injection_components_(std::move(injection_components)),
399 num_video_streams_(0) {
400 if (injection_components_ == nullptr) {
401 injection_components_ = std::make_unique<InjectionComponents>();
402 }
403 if (injection_components_->video_decoder_factory != nullptr) {
404 decoder_factory_ = std::move(injection_components_->video_decoder_factory);
405 } else {
406 decoder_factory_ = std::make_unique<InternalDecoderFactory>();
407 }
408 if (injection_components_->video_encoder_factory != nullptr) {
409 encoder_factory_ = std::move(injection_components_->video_encoder_factory);
410 } else {
411 encoder_factory_ = std::make_unique<InternalEncoderFactory>();
412 }
413
414 payload_type_map_ = test::CallTest::payload_type_map_;
415 RTC_DCHECK(payload_type_map_.find(kPayloadTypeH264) ==
416 payload_type_map_.end());
417 RTC_DCHECK(payload_type_map_.find(kPayloadTypeVP8) ==
418 payload_type_map_.end());
419 RTC_DCHECK(payload_type_map_.find(kPayloadTypeVP9) ==
420 payload_type_map_.end());
421 RTC_DCHECK(payload_type_map_.find(kPayloadTypeGeneric) ==
422 payload_type_map_.end());
423 payload_type_map_[kPayloadTypeH264] = webrtc::MediaType::VIDEO;
424 payload_type_map_[kPayloadTypeVP8] = webrtc::MediaType::VIDEO;
425 payload_type_map_[kPayloadTypeVP9] = webrtc::MediaType::VIDEO;
426 payload_type_map_[kPayloadTypeGeneric] = webrtc::MediaType::VIDEO;
427
428 fec_controller_factory_ =
429 std::move(injection_components_->fec_controller_factory);
430 network_state_predictor_factory_ =
431 std::move(injection_components_->network_state_predictor_factory);
432 network_controller_factory_ =
433 std::move(injection_components_->network_controller_factory);
434 }
435
436 VideoQualityTest::InjectionComponents::InjectionComponents() = default;
437
438 VideoQualityTest::InjectionComponents::~InjectionComponents() = default;
439
TestBody()440 void VideoQualityTest::TestBody() {}
441
GenerateGraphTitle() const442 std::string VideoQualityTest::GenerateGraphTitle() const {
443 rtc::StringBuilder ss;
444 ss << params_.video[0].codec;
445 ss << " (" << params_.video[0].target_bitrate_bps / 1000 << "kbps";
446 ss << ", " << params_.video[0].fps << " FPS";
447 if (params_.screenshare[0].scroll_duration)
448 ss << ", " << params_.screenshare[0].scroll_duration << "s scroll";
449 if (params_.ss[0].streams.size() > 1)
450 ss << ", Stream #" << params_.ss[0].selected_stream;
451 if (params_.ss[0].num_spatial_layers > 1)
452 ss << ", Layer #" << params_.ss[0].selected_sl;
453 ss << ")";
454 return ss.Release();
455 }
456
CheckParamsAndInjectionComponents()457 void VideoQualityTest::CheckParamsAndInjectionComponents() {
458 if (injection_components_ == nullptr) {
459 injection_components_ = std::make_unique<InjectionComponents>();
460 }
461 if (!params_.config && injection_components_->sender_network == nullptr &&
462 injection_components_->receiver_network == nullptr) {
463 params_.config = BuiltInNetworkBehaviorConfig();
464 }
465 RTC_CHECK(
466 (params_.config && injection_components_->sender_network == nullptr &&
467 injection_components_->receiver_network == nullptr) ||
468 (!params_.config && injection_components_->sender_network != nullptr &&
469 injection_components_->receiver_network != nullptr));
470 for (size_t video_idx = 0; video_idx < num_video_streams_; ++video_idx) {
471 // Iterate over primary and secondary video streams.
472 if (!params_.video[video_idx].enabled)
473 return;
474 // Add a default stream in none specified.
475 if (params_.ss[video_idx].streams.empty())
476 params_.ss[video_idx].streams.push_back(
477 VideoQualityTest::DefaultVideoStream(params_, video_idx));
478 if (params_.ss[video_idx].num_spatial_layers == 0)
479 params_.ss[video_idx].num_spatial_layers = 1;
480
481 if (params_.config) {
482 if (params_.config->loss_percent != 0 ||
483 params_.config->queue_length_packets != 0) {
484 // Since LayerFilteringTransport changes the sequence numbers, we can't
485 // use that feature with pack loss, since the NACK request would end up
486 // retransmitting the wrong packets.
487 RTC_CHECK(params_.ss[video_idx].selected_sl == -1 ||
488 params_.ss[video_idx].selected_sl ==
489 params_.ss[video_idx].num_spatial_layers - 1);
490 RTC_CHECK(params_.video[video_idx].selected_tl == -1 ||
491 params_.video[video_idx].selected_tl ==
492 params_.video[video_idx].num_temporal_layers - 1);
493 }
494 }
495
496 // TODO(ivica): Should max_bitrate_bps == -1 represent inf max bitrate, as
497 // it does in some parts of the code?
498 RTC_CHECK_GE(params_.video[video_idx].max_bitrate_bps,
499 params_.video[video_idx].target_bitrate_bps);
500 RTC_CHECK_GE(params_.video[video_idx].target_bitrate_bps,
501 params_.video[video_idx].min_bitrate_bps);
502 int selected_stream = params_.ss[video_idx].selected_stream;
503 if (params_.video[video_idx].selected_tl > -1) {
504 RTC_CHECK_LT(selected_stream, params_.ss[video_idx].streams.size())
505 << "Can not use --selected_tl when --selected_stream is all streams";
506 int stream_tl = params_.ss[video_idx]
507 .streams[selected_stream]
508 .num_temporal_layers.value_or(1);
509 RTC_CHECK_LT(params_.video[video_idx].selected_tl, stream_tl);
510 }
511 RTC_CHECK_LE(params_.ss[video_idx].selected_stream,
512 params_.ss[video_idx].streams.size());
513 for (const VideoStream& stream : params_.ss[video_idx].streams) {
514 RTC_CHECK_GE(stream.min_bitrate_bps, 0);
515 RTC_CHECK_GE(stream.target_bitrate_bps, stream.min_bitrate_bps);
516 RTC_CHECK_GE(stream.max_bitrate_bps, stream.target_bitrate_bps);
517 }
518 // TODO(ivica): Should we check if the sum of all streams/layers is equal to
519 // the total bitrate? We anyway have to update them in the case bitrate
520 // estimator changes the total bitrates.
521 RTC_CHECK_GE(params_.ss[video_idx].num_spatial_layers, 1);
522 RTC_CHECK_LE(params_.ss[video_idx].selected_sl,
523 params_.ss[video_idx].num_spatial_layers);
524 RTC_CHECK(
525 params_.ss[video_idx].spatial_layers.empty() ||
526 params_.ss[video_idx].spatial_layers.size() ==
527 static_cast<size_t>(params_.ss[video_idx].num_spatial_layers));
528 if (params_.video[video_idx].codec == "VP8") {
529 RTC_CHECK_EQ(params_.ss[video_idx].num_spatial_layers, 1);
530 } else if (params_.video[video_idx].codec == "VP9") {
531 RTC_CHECK_EQ(params_.ss[video_idx].streams.size(), 1);
532 }
533 RTC_CHECK_GE(params_.call.num_thumbnails, 0);
534 if (params_.call.num_thumbnails > 0) {
535 RTC_CHECK_EQ(params_.ss[video_idx].num_spatial_layers, 1);
536 RTC_CHECK_EQ(params_.ss[video_idx].streams.size(), 3);
537 RTC_CHECK_EQ(params_.video[video_idx].num_temporal_layers, 3);
538 RTC_CHECK_EQ(params_.video[video_idx].codec, "VP8");
539 }
540 // Dual streams with FEC not supported in tests yet.
541 RTC_CHECK(!params_.video[video_idx].flexfec || num_video_streams_ == 1);
542 RTC_CHECK(!params_.video[video_idx].ulpfec || num_video_streams_ == 1);
543 }
544 }
545
546 // Static.
ParseCSV(const std::string & str)547 std::vector<int> VideoQualityTest::ParseCSV(const std::string& str) {
548 // Parse comma separated nonnegative integers, where some elements may be
549 // empty. The empty values are replaced with -1.
550 // E.g. "10,-20,,30,40" --> {10, 20, -1, 30,40}
551 // E.g. ",,10,,20," --> {-1, -1, 10, -1, 20, -1}
552 std::vector<int> result;
553 if (str.empty())
554 return result;
555
556 const char* p = str.c_str();
557 int value = -1;
558 int pos;
559 while (*p) {
560 if (*p == ',') {
561 result.push_back(value);
562 value = -1;
563 ++p;
564 continue;
565 }
566 RTC_CHECK_EQ(sscanf(p, "%d%n", &value, &pos), 1)
567 << "Unexpected non-number value.";
568 p += pos;
569 }
570 result.push_back(value);
571 return result;
572 }
573
574 // Static.
DefaultVideoStream(const Params & params,size_t video_idx)575 VideoStream VideoQualityTest::DefaultVideoStream(const Params& params,
576 size_t video_idx) {
577 VideoStream stream;
578 stream.width = params.video[video_idx].width;
579 stream.height = params.video[video_idx].height;
580 stream.max_framerate = params.video[video_idx].fps;
581 stream.min_bitrate_bps = params.video[video_idx].min_bitrate_bps;
582 stream.target_bitrate_bps = params.video[video_idx].target_bitrate_bps;
583 stream.max_bitrate_bps = params.video[video_idx].max_bitrate_bps;
584 stream.max_qp = kDefaultMaxQp;
585 stream.num_temporal_layers = params.video[video_idx].num_temporal_layers;
586 stream.active = true;
587 return stream;
588 }
589
590 // Static.
DefaultThumbnailStream()591 VideoStream VideoQualityTest::DefaultThumbnailStream() {
592 VideoStream stream;
593 stream.width = 320;
594 stream.height = 180;
595 stream.max_framerate = 7;
596 stream.min_bitrate_bps = 7500;
597 stream.target_bitrate_bps = 37500;
598 stream.max_bitrate_bps = 50000;
599 stream.max_qp = kDefaultMaxQp;
600 return stream;
601 }
602
603 // Static.
FillScalabilitySettings(Params * params,size_t video_idx,const std::vector<std::string> & stream_descriptors,int num_streams,size_t selected_stream,int num_spatial_layers,int selected_sl,InterLayerPredMode inter_layer_pred,const std::vector<std::string> & sl_descriptors)604 void VideoQualityTest::FillScalabilitySettings(
605 Params* params,
606 size_t video_idx,
607 const std::vector<std::string>& stream_descriptors,
608 int num_streams,
609 size_t selected_stream,
610 int num_spatial_layers,
611 int selected_sl,
612 InterLayerPredMode inter_layer_pred,
613 const std::vector<std::string>& sl_descriptors) {
614 if (params->ss[video_idx].streams.empty() &&
615 params->ss[video_idx].infer_streams) {
616 webrtc::VideoEncoder::EncoderInfo encoder_info;
617 webrtc::VideoEncoderConfig encoder_config;
618 encoder_config.codec_type =
619 PayloadStringToCodecType(params->video[video_idx].codec);
620 encoder_config.content_type =
621 params->screenshare[video_idx].enabled
622 ? webrtc::VideoEncoderConfig::ContentType::kScreen
623 : webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo;
624 encoder_config.max_bitrate_bps = params->video[video_idx].max_bitrate_bps;
625 encoder_config.min_transmit_bitrate_bps =
626 params->video[video_idx].min_transmit_bps;
627 encoder_config.number_of_streams = num_streams;
628 encoder_config.spatial_layers = params->ss[video_idx].spatial_layers;
629 encoder_config.simulcast_layers = std::vector<VideoStream>(num_streams);
630 encoder_config.video_stream_factory =
631 rtc::make_ref_counted<cricket::EncoderStreamFactory>(
632 params->video[video_idx].codec, kDefaultMaxQp,
633 params->screenshare[video_idx].enabled, true, encoder_info);
634 params->ss[video_idx].streams =
635 encoder_config.video_stream_factory->CreateEncoderStreams(
636 params->video[video_idx].width, params->video[video_idx].height,
637 encoder_config);
638 } else {
639 // Read VideoStream and SpatialLayer elements from a list of comma separated
640 // lists. To use a default value for an element, use -1 or leave empty.
641 // Validity checks performed in CheckParamsAndInjectionComponents.
642 RTC_CHECK(params->ss[video_idx].streams.empty());
643 for (const auto& descriptor : stream_descriptors) {
644 if (descriptor.empty())
645 continue;
646 VideoStream stream =
647 VideoQualityTest::DefaultVideoStream(*params, video_idx);
648 std::vector<int> v = VideoQualityTest::ParseCSV(descriptor);
649 if (v[0] != -1)
650 stream.width = static_cast<size_t>(v[0]);
651 if (v[1] != -1)
652 stream.height = static_cast<size_t>(v[1]);
653 if (v[2] != -1)
654 stream.max_framerate = v[2];
655 if (v[3] != -1)
656 stream.min_bitrate_bps = v[3];
657 if (v[4] != -1)
658 stream.target_bitrate_bps = v[4];
659 if (v[5] != -1)
660 stream.max_bitrate_bps = v[5];
661 if (v.size() > 6 && v[6] != -1)
662 stream.max_qp = v[6];
663 if (v.size() > 7 && v[7] != -1) {
664 stream.num_temporal_layers = v[7];
665 } else {
666 // Automatic TL thresholds for more than two layers not supported.
667 RTC_CHECK_LE(params->video[video_idx].num_temporal_layers, 2);
668 }
669 params->ss[video_idx].streams.push_back(stream);
670 }
671 }
672
673 params->ss[video_idx].num_spatial_layers = std::max(1, num_spatial_layers);
674 params->ss[video_idx].selected_stream = selected_stream;
675
676 params->ss[video_idx].selected_sl = selected_sl;
677 params->ss[video_idx].inter_layer_pred = inter_layer_pred;
678 RTC_CHECK(params->ss[video_idx].spatial_layers.empty());
679 for (const auto& descriptor : sl_descriptors) {
680 if (descriptor.empty())
681 continue;
682 std::vector<int> v = VideoQualityTest::ParseCSV(descriptor);
683 RTC_CHECK_EQ(v.size(), 8);
684
685 SpatialLayer layer = {0};
686 layer.width = v[0];
687 layer.height = v[1];
688 layer.maxFramerate = v[2];
689 layer.numberOfTemporalLayers = v[3];
690 layer.maxBitrate = v[4];
691 layer.minBitrate = v[5];
692 layer.targetBitrate = v[6];
693 layer.qpMax = v[7];
694 layer.active = true;
695
696 params->ss[video_idx].spatial_layers.push_back(layer);
697 }
698 }
699
SetupVideo(Transport * send_transport,Transport * recv_transport)700 void VideoQualityTest::SetupVideo(Transport* send_transport,
701 Transport* recv_transport) {
702 size_t total_streams_used = 0;
703 video_receive_configs_.clear();
704 video_send_configs_.clear();
705 video_encoder_configs_.clear();
706 bool decode_all_receive_streams = true;
707 size_t num_video_substreams = params_.ss[0].streams.size();
708 RTC_CHECK(num_video_streams_ > 0);
709 video_encoder_configs_.resize(num_video_streams_);
710 std::string generic_codec_name;
711 webrtc::VideoEncoder::EncoderInfo encoder_info;
712 for (size_t video_idx = 0; video_idx < num_video_streams_; ++video_idx) {
713 video_send_configs_.push_back(VideoSendStream::Config(send_transport));
714 video_encoder_configs_.push_back(VideoEncoderConfig());
715 num_video_substreams = params_.ss[video_idx].streams.size();
716 RTC_CHECK_GT(num_video_substreams, 0);
717 for (size_t i = 0; i < num_video_substreams; ++i)
718 video_send_configs_[video_idx].rtp.ssrcs.push_back(
719 kVideoSendSsrcs[total_streams_used + i]);
720
721 int payload_type;
722 if (params_.video[video_idx].codec == "H264") {
723 payload_type = kPayloadTypeH264;
724 } else if (params_.video[video_idx].codec == "VP8") {
725 payload_type = kPayloadTypeVP8;
726 } else if (params_.video[video_idx].codec == "VP9") {
727 payload_type = kPayloadTypeVP9;
728 } else if (params_.video[video_idx].codec == "multiplex") {
729 payload_type = kPayloadTypeVP9;
730 } else if (params_.video[video_idx].codec == "FakeCodec") {
731 payload_type = kFakeVideoSendPayloadType;
732 } else {
733 RTC_CHECK(generic_codec_name.empty() ||
734 generic_codec_name == params_.video[video_idx].codec)
735 << "Supplying multiple generic codecs is unsupported.";
736 RTC_LOG(LS_INFO) << "Treating codec " << params_.video[video_idx].codec
737 << " as generic.";
738 payload_type = kPayloadTypeGeneric;
739 generic_codec_name = params_.video[video_idx].codec;
740 }
741 video_send_configs_[video_idx].encoder_settings.encoder_factory =
742 (video_idx == 0) ? &video_encoder_factory_with_analyzer_
743 : &video_encoder_factory_;
744 video_send_configs_[video_idx].encoder_settings.bitrate_allocator_factory =
745 video_bitrate_allocator_factory_.get();
746
747 video_send_configs_[video_idx].rtp.payload_name =
748 params_.video[video_idx].codec;
749 video_send_configs_[video_idx].rtp.payload_type = payload_type;
750 video_send_configs_[video_idx].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
751 video_send_configs_[video_idx].rtp.rtx.payload_type = kSendRtxPayloadType;
752 for (size_t i = 0; i < num_video_substreams; ++i) {
753 video_send_configs_[video_idx].rtp.rtx.ssrcs.push_back(
754 kSendRtxSsrcs[i + total_streams_used]);
755 }
756 video_send_configs_[video_idx].rtp.extensions.clear();
757 if (params_.call.send_side_bwe) {
758 video_send_configs_[video_idx].rtp.extensions.emplace_back(
759 RtpExtension::kTransportSequenceNumberUri,
760 kTransportSequenceNumberExtensionId);
761 } else {
762 video_send_configs_[video_idx].rtp.extensions.emplace_back(
763 RtpExtension::kAbsSendTimeUri, kAbsSendTimeExtensionId);
764 }
765
766 if (params_.call.generic_descriptor) {
767 video_send_configs_[video_idx].rtp.extensions.emplace_back(
768 RtpExtension::kGenericFrameDescriptorUri00,
769 kGenericFrameDescriptorExtensionId00);
770 }
771
772 video_send_configs_[video_idx].rtp.extensions.emplace_back(
773 RtpExtension::kVideoContentTypeUri, kVideoContentTypeExtensionId);
774 video_send_configs_[video_idx].rtp.extensions.emplace_back(
775 RtpExtension::kVideoTimingUri, kVideoTimingExtensionId);
776
777 video_encoder_configs_[video_idx].video_format.name =
778 params_.video[video_idx].codec;
779
780 video_encoder_configs_[video_idx].video_format.parameters =
781 params_.video[video_idx].sdp_params;
782
783 video_encoder_configs_[video_idx].codec_type =
784 PayloadStringToCodecType(params_.video[video_idx].codec);
785
786 video_encoder_configs_[video_idx].min_transmit_bitrate_bps =
787 params_.video[video_idx].min_transmit_bps;
788
789 video_send_configs_[video_idx].suspend_below_min_bitrate =
790 params_.video[video_idx].suspend_below_min_bitrate;
791
792 video_encoder_configs_[video_idx].number_of_streams =
793 params_.ss[video_idx].streams.size();
794 video_encoder_configs_[video_idx].max_bitrate_bps = 0;
795 for (size_t i = 0; i < params_.ss[video_idx].streams.size(); ++i) {
796 video_encoder_configs_[video_idx].max_bitrate_bps +=
797 params_.ss[video_idx].streams[i].max_bitrate_bps;
798 }
799 video_encoder_configs_[video_idx].simulcast_layers =
800 std::vector<VideoStream>(params_.ss[video_idx].streams.size());
801 if (!params_.ss[video_idx].infer_streams) {
802 video_encoder_configs_[video_idx].simulcast_layers =
803 params_.ss[video_idx].streams;
804 }
805 video_encoder_configs_[video_idx].video_stream_factory =
806 rtc::make_ref_counted<cricket::EncoderStreamFactory>(
807 params_.video[video_idx].codec,
808 params_.ss[video_idx].streams[0].max_qp,
809 params_.screenshare[video_idx].enabled, true, encoder_info);
810
811 video_encoder_configs_[video_idx].spatial_layers =
812 params_.ss[video_idx].spatial_layers;
813
814 video_encoder_configs_[video_idx].frame_drop_enabled = true;
815
816 decode_all_receive_streams = params_.ss[video_idx].selected_stream ==
817 params_.ss[video_idx].streams.size();
818 absl::optional<int> decode_sub_stream;
819 if (!decode_all_receive_streams)
820 decode_sub_stream = params_.ss[video_idx].selected_stream;
821 CreateMatchingVideoReceiveConfigs(
822 video_send_configs_[video_idx], recv_transport,
823 params_.call.send_side_bwe, &video_decoder_factory_, decode_sub_stream,
824 true, kNackRtpHistoryMs);
825
826 if (params_.screenshare[video_idx].enabled) {
827 // Fill out codec settings.
828 video_encoder_configs_[video_idx].content_type =
829 VideoEncoderConfig::ContentType::kScreen;
830 degradation_preference_ = DegradationPreference::MAINTAIN_RESOLUTION;
831 if (params_.video[video_idx].codec == "VP8") {
832 VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings();
833 vp8_settings.denoisingOn = false;
834 vp8_settings.numberOfTemporalLayers = static_cast<unsigned char>(
835 params_.video[video_idx].num_temporal_layers);
836 video_encoder_configs_[video_idx].encoder_specific_settings =
837 rtc::make_ref_counted<
838 VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings);
839 } else if (params_.video[video_idx].codec == "VP9") {
840 VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
841 vp9_settings.denoisingOn = false;
842 vp9_settings.automaticResizeOn = false;
843 vp9_settings.numberOfTemporalLayers = static_cast<unsigned char>(
844 params_.video[video_idx].num_temporal_layers);
845 vp9_settings.numberOfSpatialLayers = static_cast<unsigned char>(
846 params_.ss[video_idx].num_spatial_layers);
847 vp9_settings.interLayerPred = params_.ss[video_idx].inter_layer_pred;
848 // High FPS vp9 screenshare requires flexible mode.
849 if (params_.ss[video_idx].num_spatial_layers > 1) {
850 vp9_settings.flexibleMode = true;
851 }
852 video_encoder_configs_[video_idx].encoder_specific_settings =
853 rtc::make_ref_counted<
854 VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
855 }
856 } else if (params_.ss[video_idx].num_spatial_layers > 1) {
857 // If SVC mode without screenshare, still need to set codec specifics.
858 RTC_CHECK(params_.video[video_idx].codec == "VP9");
859 VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
860 vp9_settings.numberOfTemporalLayers = static_cast<unsigned char>(
861 params_.video[video_idx].num_temporal_layers);
862 vp9_settings.numberOfSpatialLayers =
863 static_cast<unsigned char>(params_.ss[video_idx].num_spatial_layers);
864 vp9_settings.interLayerPred = params_.ss[video_idx].inter_layer_pred;
865 vp9_settings.automaticResizeOn = false;
866 video_encoder_configs_[video_idx].encoder_specific_settings =
867 rtc::make_ref_counted<VideoEncoderConfig::Vp9EncoderSpecificSettings>(
868 vp9_settings);
869 RTC_DCHECK_EQ(video_encoder_configs_[video_idx].simulcast_layers.size(),
870 1);
871 // Min bitrate will be enforced by spatial layer config instead.
872 video_encoder_configs_[video_idx].simulcast_layers[0].min_bitrate_bps = 0;
873 } else if (params_.video[video_idx].automatic_scaling) {
874 if (params_.video[video_idx].codec == "VP8") {
875 VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings();
876 vp8_settings.automaticResizeOn = true;
877 video_encoder_configs_[video_idx].encoder_specific_settings =
878 rtc::make_ref_counted<
879 VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings);
880 } else if (params_.video[video_idx].codec == "VP9") {
881 VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
882 // Only enable quality scaler for single spatial layer.
883 vp9_settings.automaticResizeOn =
884 params_.ss[video_idx].num_spatial_layers == 1;
885 video_encoder_configs_[video_idx].encoder_specific_settings =
886 rtc::make_ref_counted<
887 VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
888 } else if (params_.video[video_idx].codec == "H264") {
889 // Quality scaling is always on for H.264.
890 } else if (params_.video[video_idx].codec == cricket::kAv1CodecName) {
891 // TODO(bugs.webrtc.org/11404): Propagate the flag to
892 // aom_codec_enc_cfg_t::rc_resize_mode in Av1 encoder wrapper.
893 // Until then do nothing, specially do not crash.
894 } else {
895 RTC_DCHECK_NOTREACHED()
896 << "Automatic scaling not supported for codec "
897 << params_.video[video_idx].codec << ", stream " << video_idx;
898 }
899 } else {
900 // Default mode. Single SL, no automatic_scaling,
901 if (params_.video[video_idx].codec == "VP8") {
902 VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings();
903 vp8_settings.automaticResizeOn = false;
904 video_encoder_configs_[video_idx].encoder_specific_settings =
905 rtc::make_ref_counted<
906 VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings);
907 } else if (params_.video[video_idx].codec == "VP9") {
908 VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
909 vp9_settings.automaticResizeOn = false;
910 video_encoder_configs_[video_idx].encoder_specific_settings =
911 rtc::make_ref_counted<
912 VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
913 } else if (params_.video[video_idx].codec == "H264") {
914 video_encoder_configs_[video_idx].encoder_specific_settings = nullptr;
915 }
916 }
917 total_streams_used += num_video_substreams;
918 }
919
920 // FEC supported only for single video stream mode yet.
921 if (params_.video[0].flexfec) {
922 if (decode_all_receive_streams) {
923 SetSendFecConfig(GetVideoSendConfig()->rtp.ssrcs);
924 } else {
925 SetSendFecConfig({kVideoSendSsrcs[params_.ss[0].selected_stream]});
926 }
927
928 CreateMatchingFecConfig(recv_transport, *GetVideoSendConfig());
929 GetFlexFecConfig()->rtp.transport_cc = params_.call.send_side_bwe;
930 if (params_.call.send_side_bwe) {
931 GetFlexFecConfig()->rtp.extensions.push_back(
932 RtpExtension(RtpExtension::kTransportSequenceNumberUri,
933 kTransportSequenceNumberExtensionId));
934 } else {
935 GetFlexFecConfig()->rtp.extensions.push_back(
936 RtpExtension(RtpExtension::kAbsSendTimeUri, kAbsSendTimeExtensionId));
937 }
938 }
939
940 if (params_.video[0].ulpfec) {
941 SetSendUlpFecConfig(GetVideoSendConfig());
942 if (decode_all_receive_streams) {
943 for (auto& receive_config : video_receive_configs_) {
944 SetReceiveUlpFecConfig(&receive_config);
945 }
946 } else {
947 SetReceiveUlpFecConfig(
948 &video_receive_configs_[params_.ss[0].selected_stream]);
949 }
950 }
951 }
952
SetupThumbnails(Transport * send_transport,Transport * recv_transport)953 void VideoQualityTest::SetupThumbnails(Transport* send_transport,
954 Transport* recv_transport) {
955 for (int i = 0; i < params_.call.num_thumbnails; ++i) {
956 // Thumbnails will be send in the other way: from receiver_call to
957 // sender_call.
958 VideoSendStream::Config thumbnail_send_config(recv_transport);
959 thumbnail_send_config.rtp.ssrcs.push_back(kThumbnailSendSsrcStart + i);
960 thumbnail_send_config.encoder_settings.encoder_factory =
961 &video_encoder_factory_;
962 thumbnail_send_config.encoder_settings.bitrate_allocator_factory =
963 video_bitrate_allocator_factory_.get();
964 thumbnail_send_config.rtp.payload_name = params_.video[0].codec;
965 thumbnail_send_config.rtp.payload_type = kPayloadTypeVP8;
966 thumbnail_send_config.rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
967 thumbnail_send_config.rtp.rtx.payload_type = kSendRtxPayloadType;
968 thumbnail_send_config.rtp.rtx.ssrcs.push_back(kThumbnailRtxSsrcStart + i);
969 thumbnail_send_config.rtp.extensions.clear();
970 if (params_.call.send_side_bwe) {
971 thumbnail_send_config.rtp.extensions.push_back(
972 RtpExtension(RtpExtension::kTransportSequenceNumberUri,
973 kTransportSequenceNumberExtensionId));
974 } else {
975 thumbnail_send_config.rtp.extensions.push_back(
976 RtpExtension(RtpExtension::kAbsSendTimeUri, kAbsSendTimeExtensionId));
977 }
978
979 VideoEncoderConfig thumbnail_encoder_config;
980 thumbnail_encoder_config.codec_type = kVideoCodecVP8;
981 thumbnail_encoder_config.video_format.name = "VP8";
982 thumbnail_encoder_config.min_transmit_bitrate_bps = 7500;
983 thumbnail_send_config.suspend_below_min_bitrate =
984 params_.video[0].suspend_below_min_bitrate;
985 thumbnail_encoder_config.number_of_streams = 1;
986 thumbnail_encoder_config.max_bitrate_bps = 50000;
987 std::vector<VideoStream> streams{params_.ss[0].streams[0]};
988 thumbnail_encoder_config.video_stream_factory =
989 rtc::make_ref_counted<VideoStreamFactory>(streams);
990 thumbnail_encoder_config.spatial_layers = params_.ss[0].spatial_layers;
991
992 thumbnail_encoder_configs_.push_back(thumbnail_encoder_config.Copy());
993 thumbnail_send_configs_.push_back(thumbnail_send_config.Copy());
994
995 AddMatchingVideoReceiveConfigs(
996 &thumbnail_receive_configs_, thumbnail_send_config, send_transport,
997 params_.call.send_side_bwe, &video_decoder_factory_, absl::nullopt,
998 false, kNackRtpHistoryMs);
999 }
1000 for (size_t i = 0; i < thumbnail_send_configs_.size(); ++i) {
1001 thumbnail_send_streams_.push_back(receiver_call_->CreateVideoSendStream(
1002 thumbnail_send_configs_[i].Copy(),
1003 thumbnail_encoder_configs_[i].Copy()));
1004 }
1005 for (size_t i = 0; i < thumbnail_receive_configs_.size(); ++i) {
1006 thumbnail_receive_streams_.push_back(sender_call_->CreateVideoReceiveStream(
1007 thumbnail_receive_configs_[i].Copy()));
1008 }
1009 }
1010
DestroyThumbnailStreams()1011 void VideoQualityTest::DestroyThumbnailStreams() {
1012 for (VideoSendStream* thumbnail_send_stream : thumbnail_send_streams_) {
1013 receiver_call_->DestroyVideoSendStream(thumbnail_send_stream);
1014 }
1015 thumbnail_send_streams_.clear();
1016 for (VideoReceiveStreamInterface* thumbnail_receive_stream :
1017 thumbnail_receive_streams_) {
1018 sender_call_->DestroyVideoReceiveStream(thumbnail_receive_stream);
1019 }
1020 thumbnail_send_streams_.clear();
1021 thumbnail_receive_streams_.clear();
1022 for (std::unique_ptr<rtc::VideoSourceInterface<VideoFrame>>& video_capturer :
1023 thumbnail_capturers_) {
1024 video_capturer.reset();
1025 }
1026 }
1027
SetupThumbnailCapturers(size_t num_thumbnail_streams)1028 void VideoQualityTest::SetupThumbnailCapturers(size_t num_thumbnail_streams) {
1029 VideoStream thumbnail = DefaultThumbnailStream();
1030 for (size_t i = 0; i < num_thumbnail_streams; ++i) {
1031 auto frame_generator_capturer =
1032 std::make_unique<test::FrameGeneratorCapturer>(
1033 clock_,
1034 test::CreateSquareFrameGenerator(static_cast<int>(thumbnail.width),
1035 static_cast<int>(thumbnail.height),
1036 absl::nullopt, absl::nullopt),
1037 thumbnail.max_framerate, *task_queue_factory_);
1038 EXPECT_TRUE(frame_generator_capturer->Init());
1039 thumbnail_capturers_.push_back(std::move(frame_generator_capturer));
1040 }
1041 }
1042
1043 std::unique_ptr<test::FrameGeneratorInterface>
CreateFrameGenerator(size_t video_idx)1044 VideoQualityTest::CreateFrameGenerator(size_t video_idx) {
1045 // Setup frame generator.
1046 const size_t kWidth = 1850;
1047 const size_t kHeight = 1110;
1048 std::unique_ptr<test::FrameGeneratorInterface> frame_generator;
1049 if (params_.screenshare[video_idx].generate_slides) {
1050 frame_generator = test::CreateSlideFrameGenerator(
1051 kWidth, kHeight,
1052 params_.screenshare[video_idx].slide_change_interval *
1053 params_.video[video_idx].fps);
1054 } else {
1055 std::vector<std::string> slides = params_.screenshare[video_idx].slides;
1056 if (slides.empty()) {
1057 slides.push_back(test::ResourcePath("web_screenshot_1850_1110", "yuv"));
1058 slides.push_back(test::ResourcePath("presentation_1850_1110", "yuv"));
1059 slides.push_back(test::ResourcePath("photo_1850_1110", "yuv"));
1060 slides.push_back(test::ResourcePath("difficult_photo_1850_1110", "yuv"));
1061 }
1062 if (params_.screenshare[video_idx].scroll_duration == 0) {
1063 // Cycle image every slide_change_interval seconds.
1064 frame_generator = test::CreateFromYuvFileFrameGenerator(
1065 slides, kWidth, kHeight,
1066 params_.screenshare[video_idx].slide_change_interval *
1067 params_.video[video_idx].fps);
1068 } else {
1069 RTC_CHECK_LE(params_.video[video_idx].width, kWidth);
1070 RTC_CHECK_LE(params_.video[video_idx].height, kHeight);
1071 RTC_CHECK_GT(params_.screenshare[video_idx].slide_change_interval, 0);
1072 const int kPauseDurationMs =
1073 (params_.screenshare[video_idx].slide_change_interval -
1074 params_.screenshare[video_idx].scroll_duration) *
1075 1000;
1076 RTC_CHECK_LE(params_.screenshare[video_idx].scroll_duration,
1077 params_.screenshare[video_idx].slide_change_interval);
1078
1079 frame_generator = test::CreateScrollingInputFromYuvFilesFrameGenerator(
1080 clock_, slides, kWidth, kHeight, params_.video[video_idx].width,
1081 params_.video[video_idx].height,
1082 params_.screenshare[video_idx].scroll_duration * 1000,
1083 kPauseDurationMs);
1084 }
1085 }
1086 return frame_generator;
1087 }
1088
CreateCapturers()1089 void VideoQualityTest::CreateCapturers() {
1090 RTC_DCHECK(video_sources_.empty());
1091 video_sources_.resize(num_video_streams_);
1092 for (size_t video_idx = 0; video_idx < num_video_streams_; ++video_idx) {
1093 std::unique_ptr<test::FrameGeneratorInterface> frame_generator;
1094 if (params_.screenshare[video_idx].enabled) {
1095 frame_generator = CreateFrameGenerator(video_idx);
1096 } else if (params_.video[video_idx].clip_path == "Generator") {
1097 frame_generator = test::CreateSquareFrameGenerator(
1098 static_cast<int>(params_.video[video_idx].width),
1099 static_cast<int>(params_.video[video_idx].height), absl::nullopt,
1100 absl::nullopt);
1101 } else if (params_.video[video_idx].clip_path == "GeneratorI420A") {
1102 frame_generator = test::CreateSquareFrameGenerator(
1103 static_cast<int>(params_.video[video_idx].width),
1104 static_cast<int>(params_.video[video_idx].height),
1105 test::FrameGeneratorInterface::OutputType::kI420A, absl::nullopt);
1106 } else if (params_.video[video_idx].clip_path == "GeneratorI010") {
1107 frame_generator = test::CreateSquareFrameGenerator(
1108 static_cast<int>(params_.video[video_idx].width),
1109 static_cast<int>(params_.video[video_idx].height),
1110 test::FrameGeneratorInterface::OutputType::kI010, absl::nullopt);
1111 } else if (params_.video[video_idx].clip_path == "GeneratorNV12") {
1112 frame_generator = test::CreateSquareFrameGenerator(
1113 static_cast<int>(params_.video[video_idx].width),
1114 static_cast<int>(params_.video[video_idx].height),
1115 test::FrameGeneratorInterface::OutputType::kNV12, absl::nullopt);
1116 } else if (params_.video[video_idx].clip_path.empty()) {
1117 video_sources_[video_idx] = test::CreateVideoCapturer(
1118 params_.video[video_idx].width, params_.video[video_idx].height,
1119 params_.video[video_idx].fps,
1120 params_.video[video_idx].capture_device_index);
1121 if (video_sources_[video_idx]) {
1122 continue;
1123 } else {
1124 // Failed to get actual camera, use chroma generator as backup.
1125 frame_generator = test::CreateSquareFrameGenerator(
1126 static_cast<int>(params_.video[video_idx].width),
1127 static_cast<int>(params_.video[video_idx].height), absl::nullopt,
1128 absl::nullopt);
1129 }
1130 } else {
1131 frame_generator = test::CreateFromYuvFileFrameGenerator(
1132 {params_.video[video_idx].clip_path}, params_.video[video_idx].width,
1133 params_.video[video_idx].height, 1);
1134 ASSERT_TRUE(frame_generator) << "Could not create capturer for "
1135 << params_.video[video_idx].clip_path
1136 << ".yuv. Is this file present?";
1137 }
1138 ASSERT_TRUE(frame_generator);
1139 auto frame_generator_capturer =
1140 std::make_unique<test::FrameGeneratorCapturer>(
1141 clock_, std::move(frame_generator), params_.video[video_idx].fps,
1142 *task_queue_factory_);
1143 EXPECT_TRUE(frame_generator_capturer->Init());
1144 video_sources_[video_idx] = std::move(frame_generator_capturer);
1145 }
1146 }
1147
StartAudioStreams()1148 void VideoQualityTest::StartAudioStreams() {
1149 audio_send_stream_->Start();
1150 for (AudioReceiveStreamInterface* audio_recv_stream : audio_receive_streams_)
1151 audio_recv_stream->Start();
1152 }
1153
StartThumbnails()1154 void VideoQualityTest::StartThumbnails() {
1155 for (VideoSendStream* send_stream : thumbnail_send_streams_)
1156 send_stream->Start();
1157 for (VideoReceiveStreamInterface* receive_stream : thumbnail_receive_streams_)
1158 receive_stream->Start();
1159 }
1160
StopThumbnails()1161 void VideoQualityTest::StopThumbnails() {
1162 for (VideoReceiveStreamInterface* receive_stream : thumbnail_receive_streams_)
1163 receive_stream->Stop();
1164 for (VideoSendStream* send_stream : thumbnail_send_streams_)
1165 send_stream->Stop();
1166 }
1167
1168 std::unique_ptr<test::LayerFilteringTransport>
CreateSendTransport()1169 VideoQualityTest::CreateSendTransport() {
1170 std::unique_ptr<NetworkBehaviorInterface> network_behavior = nullptr;
1171 if (injection_components_->sender_network == nullptr) {
1172 network_behavior = std::make_unique<SimulatedNetwork>(*params_.config);
1173 } else {
1174 network_behavior = std::move(injection_components_->sender_network);
1175 }
1176 return std::make_unique<test::LayerFilteringTransport>(
1177 task_queue(),
1178 std::make_unique<FakeNetworkPipe>(clock_, std::move(network_behavior)),
1179 sender_call_.get(), kPayloadTypeVP8, kPayloadTypeVP9,
1180 params_.video[0].selected_tl, params_.ss[0].selected_sl,
1181 payload_type_map_, kVideoSendSsrcs[0],
1182 static_cast<uint32_t>(kVideoSendSsrcs[0] + params_.ss[0].streams.size() -
1183 1));
1184 }
1185
1186 std::unique_ptr<test::DirectTransport>
CreateReceiveTransport()1187 VideoQualityTest::CreateReceiveTransport() {
1188 std::unique_ptr<NetworkBehaviorInterface> network_behavior = nullptr;
1189 if (injection_components_->receiver_network == nullptr) {
1190 network_behavior = std::make_unique<SimulatedNetwork>(*params_.config);
1191 } else {
1192 network_behavior = std::move(injection_components_->receiver_network);
1193 }
1194 return std::make_unique<test::DirectTransport>(
1195 task_queue(),
1196 std::make_unique<FakeNetworkPipe>(clock_, std::move(network_behavior)),
1197 receiver_call_.get(), payload_type_map_);
1198 }
1199
RunWithAnalyzer(const Params & params)1200 void VideoQualityTest::RunWithAnalyzer(const Params& params) {
1201 num_video_streams_ = params.call.dual_video ? 2 : 1;
1202 std::unique_ptr<test::LayerFilteringTransport> send_transport;
1203 std::unique_ptr<test::DirectTransport> recv_transport;
1204 FILE* graph_data_output_file = nullptr;
1205
1206 params_ = params;
1207 // TODO(ivica): Merge with RunWithRenderer and use a flag / argument to
1208 // differentiate between the analyzer and the renderer case.
1209 CheckParamsAndInjectionComponents();
1210
1211 if (!params_.analyzer.graph_data_output_filename.empty()) {
1212 graph_data_output_file =
1213 fopen(params_.analyzer.graph_data_output_filename.c_str(), "w");
1214 RTC_CHECK(graph_data_output_file)
1215 << "Can't open the file " << params_.analyzer.graph_data_output_filename
1216 << "!";
1217 }
1218
1219 if (!params.logging.rtc_event_log_name.empty()) {
1220 send_event_log_ = rtc_event_log_factory_.CreateRtcEventLog(
1221 RtcEventLog::EncodingType::Legacy);
1222 recv_event_log_ = rtc_event_log_factory_.CreateRtcEventLog(
1223 RtcEventLog::EncodingType::Legacy);
1224 std::unique_ptr<RtcEventLogOutputFile> send_output(
1225 std::make_unique<RtcEventLogOutputFile>(
1226 params.logging.rtc_event_log_name + "_send",
1227 RtcEventLog::kUnlimitedOutput));
1228 std::unique_ptr<RtcEventLogOutputFile> recv_output(
1229 std::make_unique<RtcEventLogOutputFile>(
1230 params.logging.rtc_event_log_name + "_recv",
1231 RtcEventLog::kUnlimitedOutput));
1232 bool event_log_started =
1233 send_event_log_->StartLogging(std::move(send_output),
1234 RtcEventLog::kImmediateOutput) &&
1235 recv_event_log_->StartLogging(std::move(recv_output),
1236 RtcEventLog::kImmediateOutput);
1237 RTC_DCHECK(event_log_started);
1238 } else {
1239 send_event_log_ = std::make_unique<RtcEventLogNull>();
1240 recv_event_log_ = std::make_unique<RtcEventLogNull>();
1241 }
1242
1243 SendTask(task_queue(), [this, ¶ms, &send_transport, &recv_transport]() {
1244 Call::Config send_call_config(send_event_log_.get());
1245 Call::Config recv_call_config(recv_event_log_.get());
1246 send_call_config.bitrate_config = params.call.call_bitrate_config;
1247 recv_call_config.bitrate_config = params.call.call_bitrate_config;
1248 if (params_.audio.enabled)
1249 InitializeAudioDevice(&send_call_config, &recv_call_config,
1250 params_.audio.use_real_adm);
1251
1252 CreateCalls(send_call_config, recv_call_config);
1253 send_transport = CreateSendTransport();
1254 recv_transport = CreateReceiveTransport();
1255 });
1256
1257 std::string graph_title = params_.analyzer.graph_title;
1258 if (graph_title.empty())
1259 graph_title = VideoQualityTest::GenerateGraphTitle();
1260 bool is_quick_test_enabled = field_trial::IsEnabled("WebRTC-QuickPerfTest");
1261 analyzer_ = std::make_unique<VideoAnalyzer>(
1262 send_transport.get(), params_.analyzer.test_label,
1263 params_.analyzer.avg_psnr_threshold, params_.analyzer.avg_ssim_threshold,
1264 is_quick_test_enabled
1265 ? kFramesSentInQuickTest
1266 : params_.analyzer.test_durations_secs * params_.video[0].fps,
1267 is_quick_test_enabled
1268 ? TimeDelta::Millis(1)
1269 : TimeDelta::Seconds(params_.analyzer.test_durations_secs),
1270 graph_data_output_file, graph_title,
1271 kVideoSendSsrcs[params_.ss[0].selected_stream],
1272 kSendRtxSsrcs[params_.ss[0].selected_stream],
1273 static_cast<size_t>(params_.ss[0].selected_stream),
1274 params.ss[0].selected_sl, params_.video[0].selected_tl,
1275 is_quick_test_enabled, clock_, params_.logging.rtp_dump_name,
1276 task_queue());
1277
1278 SendTask(task_queue(), [&]() {
1279 analyzer_->SetCall(sender_call_.get());
1280 analyzer_->SetReceiver(receiver_call_->Receiver());
1281 send_transport->SetReceiver(analyzer_.get());
1282 recv_transport->SetReceiver(sender_call_->Receiver());
1283
1284 SetupVideo(analyzer_.get(), recv_transport.get());
1285 SetupThumbnails(analyzer_.get(), recv_transport.get());
1286 video_receive_configs_[params_.ss[0].selected_stream].renderer =
1287 analyzer_.get();
1288
1289 CreateFlexfecStreams();
1290 CreateVideoStreams();
1291 analyzer_->SetSendStream(video_send_streams_[0]);
1292 analyzer_->SetReceiveStream(
1293 video_receive_streams_[params_.ss[0].selected_stream]);
1294
1295 GetVideoSendStream()->SetSource(analyzer_->OutputInterface(),
1296 degradation_preference_);
1297 SetupThumbnailCapturers(params_.call.num_thumbnails);
1298 for (size_t i = 0; i < thumbnail_send_streams_.size(); ++i) {
1299 thumbnail_send_streams_[i]->SetSource(thumbnail_capturers_[i].get(),
1300 degradation_preference_);
1301 }
1302
1303 CreateCapturers();
1304
1305 analyzer_->SetSource(video_sources_[0].get(), true);
1306
1307 for (size_t video_idx = 1; video_idx < num_video_streams_; ++video_idx) {
1308 video_send_streams_[video_idx]->SetSource(video_sources_[video_idx].get(),
1309 degradation_preference_);
1310 }
1311
1312 if (params_.audio.enabled) {
1313 SetupAudio(send_transport.get());
1314 StartAudioStreams();
1315 analyzer_->SetAudioReceiveStream(audio_receive_streams_[0]);
1316 }
1317 StartVideoStreams();
1318 StartThumbnails();
1319 analyzer_->StartMeasuringCpuProcessTime();
1320 });
1321
1322 analyzer_->Wait();
1323
1324 SendTask(task_queue(), [&]() {
1325 StopThumbnails();
1326 Stop();
1327
1328 DestroyStreams();
1329 DestroyThumbnailStreams();
1330
1331 if (graph_data_output_file)
1332 fclose(graph_data_output_file);
1333
1334 send_transport.reset();
1335 recv_transport.reset();
1336
1337 DestroyCalls();
1338 });
1339 analyzer_ = nullptr;
1340 }
1341
CreateAudioDevice()1342 rtc::scoped_refptr<AudioDeviceModule> VideoQualityTest::CreateAudioDevice() {
1343 #ifdef WEBRTC_WIN
1344 RTC_LOG(LS_INFO) << "Using latest version of ADM on Windows";
1345 // We must initialize the COM library on a thread before we calling any of
1346 // the library functions. All COM functions in the ADM will return
1347 // CO_E_NOTINITIALIZED otherwise. The legacy ADM for Windows used internal
1348 // COM initialization but the new ADM requires COM to be initialized
1349 // externally.
1350 com_initializer_ =
1351 std::make_unique<ScopedCOMInitializer>(ScopedCOMInitializer::kMTA);
1352 RTC_CHECK(com_initializer_->Succeeded());
1353 RTC_CHECK(webrtc_win::core_audio_utility::IsSupported());
1354 RTC_CHECK(webrtc_win::core_audio_utility::IsMMCSSSupported());
1355 return CreateWindowsCoreAudioAudioDeviceModule(task_queue_factory_.get());
1356 #else
1357 // Use legacy factory method on all platforms except Windows.
1358 return AudioDeviceModule::Create(AudioDeviceModule::kPlatformDefaultAudio,
1359 task_queue_factory_.get());
1360 #endif
1361 }
1362
InitializeAudioDevice(Call::Config * send_call_config,Call::Config * recv_call_config,bool use_real_adm)1363 void VideoQualityTest::InitializeAudioDevice(Call::Config* send_call_config,
1364 Call::Config* recv_call_config,
1365 bool use_real_adm) {
1366 rtc::scoped_refptr<AudioDeviceModule> audio_device;
1367 if (use_real_adm) {
1368 // Run test with real ADM (using default audio devices) if user has
1369 // explicitly set the --audio and --use_real_adm command-line flags.
1370 audio_device = CreateAudioDevice();
1371 } else {
1372 // By default, create a test ADM which fakes audio.
1373 audio_device = TestAudioDeviceModule::Create(
1374 task_queue_factory_.get(),
1375 TestAudioDeviceModule::CreatePulsedNoiseCapturer(32000, 48000),
1376 TestAudioDeviceModule::CreateDiscardRenderer(48000), 1.f);
1377 }
1378 RTC_CHECK(audio_device);
1379
1380 AudioState::Config audio_state_config;
1381 audio_state_config.audio_mixer = AudioMixerImpl::Create();
1382 audio_state_config.audio_processing = AudioProcessingBuilder().Create();
1383 audio_state_config.audio_device_module = audio_device;
1384 send_call_config->audio_state = AudioState::Create(audio_state_config);
1385 recv_call_config->audio_state = AudioState::Create(audio_state_config);
1386 if (use_real_adm) {
1387 // The real ADM requires extra initialization: setting default devices,
1388 // setting up number of channels etc. Helper class also calls
1389 // AudioDeviceModule::Init().
1390 webrtc::adm_helpers::Init(audio_device.get());
1391 } else {
1392 audio_device->Init();
1393 }
1394 // Always initialize the ADM before injecting a valid audio transport.
1395 RTC_CHECK(audio_device->RegisterAudioCallback(
1396 send_call_config->audio_state->audio_transport()) == 0);
1397 }
1398
SetupAudio(Transport * transport)1399 void VideoQualityTest::SetupAudio(Transport* transport) {
1400 AudioSendStream::Config audio_send_config(transport);
1401 audio_send_config.rtp.ssrc = kAudioSendSsrc;
1402
1403 // Add extension to enable audio send side BWE, and allow audio bit rate
1404 // adaptation.
1405 audio_send_config.rtp.extensions.clear();
1406 audio_send_config.send_codec_spec = AudioSendStream::Config::SendCodecSpec(
1407 kAudioSendPayloadType,
1408 {"OPUS",
1409 48000,
1410 2,
1411 {{"usedtx", (params_.audio.dtx ? "1" : "0")}, {"stereo", "1"}}});
1412
1413 if (params_.call.send_side_bwe) {
1414 audio_send_config.rtp.extensions.push_back(
1415 webrtc::RtpExtension(webrtc::RtpExtension::kTransportSequenceNumberUri,
1416 kTransportSequenceNumberExtensionId));
1417 audio_send_config.min_bitrate_bps = kOpusMinBitrateBps;
1418 audio_send_config.max_bitrate_bps = kOpusBitrateFbBps;
1419 audio_send_config.send_codec_spec->transport_cc_enabled = true;
1420 // Only allow ANA when send-side BWE is enabled.
1421 audio_send_config.audio_network_adaptor_config = params_.audio.ana_config;
1422 }
1423 audio_send_config.encoder_factory = audio_encoder_factory_;
1424 SetAudioConfig(audio_send_config);
1425
1426 std::string sync_group;
1427 if (params_.video[0].enabled && params_.audio.sync_video)
1428 sync_group = kSyncGroup;
1429
1430 CreateMatchingAudioConfigs(transport, sync_group);
1431 CreateAudioStreams();
1432 }
1433
RunWithRenderers(const Params & params)1434 void VideoQualityTest::RunWithRenderers(const Params& params) {
1435 RTC_LOG(LS_INFO) << __FUNCTION__;
1436 num_video_streams_ = params.call.dual_video ? 2 : 1;
1437 std::unique_ptr<test::LayerFilteringTransport> send_transport;
1438 std::unique_ptr<test::DirectTransport> recv_transport;
1439 std::unique_ptr<test::VideoRenderer> local_preview;
1440 std::vector<std::unique_ptr<test::VideoRenderer>> loopback_renderers;
1441
1442 if (!params.logging.rtc_event_log_name.empty()) {
1443 send_event_log_ = rtc_event_log_factory_.CreateRtcEventLog(
1444 RtcEventLog::EncodingType::Legacy);
1445 recv_event_log_ = rtc_event_log_factory_.CreateRtcEventLog(
1446 RtcEventLog::EncodingType::Legacy);
1447 std::unique_ptr<RtcEventLogOutputFile> send_output(
1448 std::make_unique<RtcEventLogOutputFile>(
1449 params.logging.rtc_event_log_name + "_send",
1450 RtcEventLog::kUnlimitedOutput));
1451 std::unique_ptr<RtcEventLogOutputFile> recv_output(
1452 std::make_unique<RtcEventLogOutputFile>(
1453 params.logging.rtc_event_log_name + "_recv",
1454 RtcEventLog::kUnlimitedOutput));
1455 bool event_log_started =
1456 send_event_log_->StartLogging(std::move(send_output),
1457 /*output_period_ms=*/5000) &&
1458 recv_event_log_->StartLogging(std::move(recv_output),
1459 /*output_period_ms=*/5000);
1460 RTC_DCHECK(event_log_started);
1461 } else {
1462 send_event_log_ = std::make_unique<RtcEventLogNull>();
1463 recv_event_log_ = std::make_unique<RtcEventLogNull>();
1464 }
1465
1466 SendTask(task_queue(), [&]() {
1467 params_ = params;
1468 CheckParamsAndInjectionComponents();
1469
1470 // TODO(ivica): Remove bitrate_config and use the default Call::Config(), to
1471 // match the full stack tests.
1472 Call::Config send_call_config(send_event_log_.get());
1473 send_call_config.bitrate_config = params_.call.call_bitrate_config;
1474 Call::Config recv_call_config(recv_event_log_.get());
1475
1476 if (params_.audio.enabled)
1477 InitializeAudioDevice(&send_call_config, &recv_call_config,
1478 params_.audio.use_real_adm);
1479
1480 CreateCalls(send_call_config, recv_call_config);
1481
1482 // TODO(minyue): consider if this is a good transport even for audio only
1483 // calls.
1484 send_transport = CreateSendTransport();
1485
1486 recv_transport = CreateReceiveTransport();
1487
1488 // TODO(ivica): Use two calls to be able to merge with RunWithAnalyzer or at
1489 // least share as much code as possible. That way this test would also match
1490 // the full stack tests better.
1491 send_transport->SetReceiver(receiver_call_->Receiver());
1492 recv_transport->SetReceiver(sender_call_->Receiver());
1493
1494 if (params_.video[0].enabled) {
1495 // Create video renderers.
1496 SetupVideo(send_transport.get(), recv_transport.get());
1497 size_t num_streams_processed = 0;
1498 for (size_t video_idx = 0; video_idx < num_video_streams_; ++video_idx) {
1499 const size_t selected_stream_id = params_.ss[video_idx].selected_stream;
1500 const size_t num_streams = params_.ss[video_idx].streams.size();
1501 if (selected_stream_id == num_streams) {
1502 for (size_t stream_id = 0; stream_id < num_streams; ++stream_id) {
1503 rtc::StringBuilder oss;
1504 oss << "Loopback Video #" << video_idx << " - Stream #"
1505 << static_cast<int>(stream_id);
1506 loopback_renderers.emplace_back(test::VideoRenderer::Create(
1507 oss.str().c_str(),
1508 params_.ss[video_idx].streams[stream_id].width,
1509 params_.ss[video_idx].streams[stream_id].height));
1510 video_receive_configs_[stream_id + num_streams_processed].renderer =
1511 loopback_renderers.back().get();
1512 if (params_.audio.enabled && params_.audio.sync_video)
1513 video_receive_configs_[stream_id + num_streams_processed]
1514 .sync_group = kSyncGroup;
1515 }
1516 } else {
1517 rtc::StringBuilder oss;
1518 oss << "Loopback Video #" << video_idx;
1519 loopback_renderers.emplace_back(test::VideoRenderer::Create(
1520 oss.str().c_str(),
1521 params_.ss[video_idx].streams[selected_stream_id].width,
1522 params_.ss[video_idx].streams[selected_stream_id].height));
1523 video_receive_configs_[selected_stream_id + num_streams_processed]
1524 .renderer = loopback_renderers.back().get();
1525 if (params_.audio.enabled && params_.audio.sync_video)
1526 video_receive_configs_[num_streams_processed + selected_stream_id]
1527 .sync_group = kSyncGroup;
1528 }
1529 num_streams_processed += num_streams;
1530 }
1531 CreateFlexfecStreams();
1532 CreateVideoStreams();
1533
1534 CreateCapturers();
1535 if (params_.video[0].enabled) {
1536 // Create local preview
1537 local_preview.reset(test::VideoRenderer::Create(
1538 "Local Preview", params_.video[0].width, params_.video[0].height));
1539
1540 video_sources_[0]->AddOrUpdateSink(local_preview.get(),
1541 rtc::VideoSinkWants());
1542 }
1543 ConnectVideoSourcesToStreams();
1544 }
1545
1546 if (params_.audio.enabled) {
1547 SetupAudio(send_transport.get());
1548 }
1549
1550 Start();
1551 });
1552
1553 PressEnterToContinue(task_queue());
1554
1555 SendTask(task_queue(), [&]() {
1556 Stop();
1557 DestroyStreams();
1558
1559 send_transport.reset();
1560 recv_transport.reset();
1561
1562 local_preview.reset();
1563 loopback_renderers.clear();
1564
1565 DestroyCalls();
1566 });
1567 }
1568
1569 } // namespace webrtc
1570