1 /*
2 * Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10 #include "video/video_send_stream.h"
11
12 #include <utility>
13
14 #include "api/array_view.h"
15 #include "api/task_queue/task_queue_base.h"
16 #include "api/video/video_stream_encoder_settings.h"
17 #include "modules/rtp_rtcp/include/rtp_header_extension_map.h"
18 #include "modules/rtp_rtcp/source/rtp_header_extension_size.h"
19 #include "modules/rtp_rtcp/source/rtp_sender.h"
20 #include "rtc_base/checks.h"
21 #include "rtc_base/logging.h"
22 #include "rtc_base/strings/string_builder.h"
23 #include "system_wrappers/include/clock.h"
24 #include "video/adaptation/overuse_frame_detector.h"
25 #include "video/frame_cadence_adapter.h"
26 #include "video/video_stream_encoder.h"
27
28 namespace webrtc {
29
30 namespace {
31
CalculateMaxHeaderSize(const RtpConfig & config)32 size_t CalculateMaxHeaderSize(const RtpConfig& config) {
33 size_t header_size = kRtpHeaderSize;
34 size_t extensions_size = 0;
35 size_t fec_extensions_size = 0;
36 if (!config.extensions.empty()) {
37 RtpHeaderExtensionMap extensions_map(config.extensions);
38 extensions_size = RtpHeaderExtensionSize(RTPSender::VideoExtensionSizes(),
39 extensions_map);
40 fec_extensions_size =
41 RtpHeaderExtensionSize(RTPSender::FecExtensionSizes(), extensions_map);
42 }
43 header_size += extensions_size;
44 if (config.flexfec.payload_type >= 0) {
45 // All FEC extensions again plus maximum FlexFec overhead.
46 header_size += fec_extensions_size + 32;
47 } else {
48 if (config.ulpfec.ulpfec_payload_type >= 0) {
49 // Header with all the FEC extensions will be repeated plus maximum
50 // UlpFec overhead.
51 header_size += fec_extensions_size + 18;
52 }
53 if (config.ulpfec.red_payload_type >= 0) {
54 header_size += 1; // RED header.
55 }
56 }
57 // Additional room for Rtx.
58 if (config.rtx.payload_type >= 0)
59 header_size += kRtxHeaderSize;
60 return header_size;
61 }
62
63 VideoStreamEncoder::BitrateAllocationCallbackType
GetBitrateAllocationCallbackType(const VideoSendStream::Config & config,const FieldTrialsView & field_trials)64 GetBitrateAllocationCallbackType(const VideoSendStream::Config& config,
65 const FieldTrialsView& field_trials) {
66 if (webrtc::RtpExtension::FindHeaderExtensionByUri(
67 config.rtp.extensions,
68 webrtc::RtpExtension::kVideoLayersAllocationUri,
69 config.crypto_options.srtp.enable_encrypted_rtp_header_extensions
70 ? RtpExtension::Filter::kPreferEncryptedExtension
71 : RtpExtension::Filter::kDiscardEncryptedExtension)) {
72 return VideoStreamEncoder::BitrateAllocationCallbackType::
73 kVideoLayersAllocation;
74 }
75 if (field_trials.IsEnabled("WebRTC-Target-Bitrate-Rtcp")) {
76 return VideoStreamEncoder::BitrateAllocationCallbackType::
77 kVideoBitrateAllocation;
78 }
79 return VideoStreamEncoder::BitrateAllocationCallbackType::
80 kVideoBitrateAllocationWhenScreenSharing;
81 }
82
CreateFrameEncryptionConfig(const VideoSendStream::Config * config)83 RtpSenderFrameEncryptionConfig CreateFrameEncryptionConfig(
84 const VideoSendStream::Config* config) {
85 RtpSenderFrameEncryptionConfig frame_encryption_config;
86 frame_encryption_config.frame_encryptor = config->frame_encryptor.get();
87 frame_encryption_config.crypto_options = config->crypto_options;
88 return frame_encryption_config;
89 }
90
CreateObservers(RtcpRttStats * call_stats,EncoderRtcpFeedback * encoder_feedback,SendStatisticsProxy * stats_proxy,SendDelayStats * send_delay_stats)91 RtpSenderObservers CreateObservers(RtcpRttStats* call_stats,
92 EncoderRtcpFeedback* encoder_feedback,
93 SendStatisticsProxy* stats_proxy,
94 SendDelayStats* send_delay_stats) {
95 RtpSenderObservers observers;
96 observers.rtcp_rtt_stats = call_stats;
97 observers.intra_frame_callback = encoder_feedback;
98 observers.rtcp_loss_notification_observer = encoder_feedback;
99 observers.report_block_data_observer = stats_proxy;
100 observers.rtp_stats = stats_proxy;
101 observers.bitrate_observer = stats_proxy;
102 observers.frame_count_observer = stats_proxy;
103 observers.rtcp_type_observer = stats_proxy;
104 observers.send_delay_observer = stats_proxy;
105 observers.send_packet_observer = send_delay_stats;
106 return observers;
107 }
108
CreateVideoStreamEncoder(Clock * clock,int num_cpu_cores,TaskQueueFactory * task_queue_factory,SendStatisticsProxy * stats_proxy,const VideoStreamEncoderSettings & encoder_settings,VideoStreamEncoder::BitrateAllocationCallbackType bitrate_allocation_callback_type,const FieldTrialsView & field_trials,webrtc::VideoEncoderFactory::EncoderSelectorInterface * encoder_selector)109 std::unique_ptr<VideoStreamEncoder> CreateVideoStreamEncoder(
110 Clock* clock,
111 int num_cpu_cores,
112 TaskQueueFactory* task_queue_factory,
113 SendStatisticsProxy* stats_proxy,
114 const VideoStreamEncoderSettings& encoder_settings,
115 VideoStreamEncoder::BitrateAllocationCallbackType
116 bitrate_allocation_callback_type,
117 const FieldTrialsView& field_trials,
118 webrtc::VideoEncoderFactory::EncoderSelectorInterface* encoder_selector) {
119 std::unique_ptr<TaskQueueBase, TaskQueueDeleter> encoder_queue =
120 task_queue_factory->CreateTaskQueue("EncoderQueue",
121 TaskQueueFactory::Priority::NORMAL);
122 TaskQueueBase* encoder_queue_ptr = encoder_queue.get();
123 return std::make_unique<VideoStreamEncoder>(
124 clock, num_cpu_cores, stats_proxy, encoder_settings,
125 std::make_unique<OveruseFrameDetector>(stats_proxy, field_trials),
126 FrameCadenceAdapterInterface::Create(clock, encoder_queue_ptr,
127 field_trials),
128 std::move(encoder_queue), bitrate_allocation_callback_type, field_trials,
129 encoder_selector);
130 }
131
132 } // namespace
133
134 namespace internal {
135
VideoSendStream(Clock * clock,int num_cpu_cores,TaskQueueFactory * task_queue_factory,TaskQueueBase * network_queue,RtcpRttStats * call_stats,RtpTransportControllerSendInterface * transport,BitrateAllocatorInterface * bitrate_allocator,SendDelayStats * send_delay_stats,RtcEventLog * event_log,VideoSendStream::Config config,VideoEncoderConfig encoder_config,const std::map<uint32_t,RtpState> & suspended_ssrcs,const std::map<uint32_t,RtpPayloadState> & suspended_payload_states,std::unique_ptr<FecController> fec_controller,const FieldTrialsView & field_trials)136 VideoSendStream::VideoSendStream(
137 Clock* clock,
138 int num_cpu_cores,
139 TaskQueueFactory* task_queue_factory,
140 TaskQueueBase* network_queue,
141 RtcpRttStats* call_stats,
142 RtpTransportControllerSendInterface* transport,
143 BitrateAllocatorInterface* bitrate_allocator,
144 SendDelayStats* send_delay_stats,
145 RtcEventLog* event_log,
146 VideoSendStream::Config config,
147 VideoEncoderConfig encoder_config,
148 const std::map<uint32_t, RtpState>& suspended_ssrcs,
149 const std::map<uint32_t, RtpPayloadState>& suspended_payload_states,
150 std::unique_ptr<FecController> fec_controller,
151 const FieldTrialsView& field_trials)
152 : rtp_transport_queue_(transport->GetWorkerQueue()),
153 transport_(transport),
154 stats_proxy_(clock, config, encoder_config.content_type, field_trials),
155 config_(std::move(config)),
156 content_type_(encoder_config.content_type),
157 video_stream_encoder_(CreateVideoStreamEncoder(
158 clock,
159 num_cpu_cores,
160 task_queue_factory,
161 &stats_proxy_,
162 config_.encoder_settings,
163 GetBitrateAllocationCallbackType(config_, field_trials),
164 field_trials,
165 config_.encoder_selector)),
166 encoder_feedback_(
167 clock,
168 config_.rtp.ssrcs,
169 video_stream_encoder_.get(),
170 [this](uint32_t ssrc, const std::vector<uint16_t>& seq_nums) {
171 return rtp_video_sender_->GetSentRtpPacketInfos(ssrc, seq_nums);
172 }),
173 rtp_video_sender_(
174 transport->CreateRtpVideoSender(suspended_ssrcs,
175 suspended_payload_states,
176 config_.rtp,
177 config_.rtcp_report_interval_ms,
178 config_.send_transport,
179 CreateObservers(call_stats,
180 &encoder_feedback_,
181 &stats_proxy_,
182 send_delay_stats),
183 event_log,
184 std::move(fec_controller),
185 CreateFrameEncryptionConfig(&config_),
186 config_.frame_transformer)),
187 send_stream_(clock,
188 &stats_proxy_,
189 transport,
190 bitrate_allocator,
191 video_stream_encoder_.get(),
192 &config_,
193 encoder_config.max_bitrate_bps,
194 encoder_config.bitrate_priority,
195 encoder_config.content_type,
196 rtp_video_sender_,
197 field_trials) {
198 RTC_DCHECK(config_.encoder_settings.encoder_factory);
199 RTC_DCHECK(config_.encoder_settings.bitrate_allocator_factory);
200
201 video_stream_encoder_->SetFecControllerOverride(rtp_video_sender_);
202
203 ReconfigureVideoEncoder(std::move(encoder_config));
204 }
205
~VideoSendStream()206 VideoSendStream::~VideoSendStream() {
207 RTC_DCHECK_RUN_ON(&thread_checker_);
208 RTC_DCHECK(!running_);
209 transport_->DestroyRtpVideoSender(rtp_video_sender_);
210 }
211
Start()212 void VideoSendStream::Start() {
213 const std::vector<bool> active_layers(config_.rtp.ssrcs.size(), true);
214 StartPerRtpStream(active_layers);
215 }
216
StartPerRtpStream(const std::vector<bool> active_layers)217 void VideoSendStream::StartPerRtpStream(const std::vector<bool> active_layers) {
218 RTC_DCHECK_RUN_ON(&thread_checker_);
219
220 // Keep our `running_` flag expected state in sync with active layers since
221 // the `send_stream_` will be implicitly stopped/started depending on the
222 // state of the layers.
223 bool running = false;
224
225 rtc::StringBuilder active_layers_string;
226 active_layers_string << "{";
227 for (size_t i = 0; i < active_layers.size(); ++i) {
228 if (active_layers[i]) {
229 running = true;
230 active_layers_string << "1";
231 } else {
232 active_layers_string << "0";
233 }
234 if (i < active_layers.size() - 1) {
235 active_layers_string << ", ";
236 }
237 }
238 active_layers_string << "}";
239 RTC_LOG(LS_INFO) << "StartPerRtpStream: " << active_layers_string.str();
240
241 rtp_transport_queue_->RunOrPost(
242 SafeTask(transport_queue_safety_, [this, active_layers] {
243 send_stream_.StartPerRtpStream(active_layers);
244 }));
245
246 running_ = running;
247 }
248
Stop()249 void VideoSendStream::Stop() {
250 RTC_DCHECK_RUN_ON(&thread_checker_);
251 if (!running_)
252 return;
253 RTC_DLOG(LS_INFO) << "VideoSendStream::Stop";
254 running_ = false;
255 rtp_transport_queue_->RunOrPost(SafeTask(transport_queue_safety_, [this] {
256 // As the stream can get re-used and implicitly restarted via changing
257 // the state of the active layers, we do not mark the
258 // `transport_queue_safety_` flag with `SetNotAlive()` here. That's only
259 // done when we stop permanently via `StopPermanentlyAndGetRtpStates()`.
260 send_stream_.Stop();
261 }));
262 }
263
started()264 bool VideoSendStream::started() {
265 RTC_DCHECK_RUN_ON(&thread_checker_);
266 return running_;
267 }
268
AddAdaptationResource(rtc::scoped_refptr<Resource> resource)269 void VideoSendStream::AddAdaptationResource(
270 rtc::scoped_refptr<Resource> resource) {
271 RTC_DCHECK_RUN_ON(&thread_checker_);
272 video_stream_encoder_->AddAdaptationResource(resource);
273 }
274
275 std::vector<rtc::scoped_refptr<Resource>>
GetAdaptationResources()276 VideoSendStream::GetAdaptationResources() {
277 RTC_DCHECK_RUN_ON(&thread_checker_);
278 return video_stream_encoder_->GetAdaptationResources();
279 }
280
SetSource(rtc::VideoSourceInterface<webrtc::VideoFrame> * source,const DegradationPreference & degradation_preference)281 void VideoSendStream::SetSource(
282 rtc::VideoSourceInterface<webrtc::VideoFrame>* source,
283 const DegradationPreference& degradation_preference) {
284 RTC_DCHECK_RUN_ON(&thread_checker_);
285 video_stream_encoder_->SetSource(source, degradation_preference);
286 }
287
ReconfigureVideoEncoder(VideoEncoderConfig config)288 void VideoSendStream::ReconfigureVideoEncoder(VideoEncoderConfig config) {
289 ReconfigureVideoEncoder(std::move(config), nullptr);
290 }
291
ReconfigureVideoEncoder(VideoEncoderConfig config,SetParametersCallback callback)292 void VideoSendStream::ReconfigureVideoEncoder(VideoEncoderConfig config,
293 SetParametersCallback callback) {
294 RTC_DCHECK_RUN_ON(&thread_checker_);
295 RTC_DCHECK_EQ(content_type_, config.content_type);
296 video_stream_encoder_->ConfigureEncoder(
297 std::move(config),
298 config_.rtp.max_packet_size - CalculateMaxHeaderSize(config_.rtp),
299 std::move(callback));
300 }
301
GetStats()302 VideoSendStream::Stats VideoSendStream::GetStats() {
303 // TODO(perkj, solenberg): Some test cases in EndToEndTest call GetStats from
304 // a network thread. See comment in Call::GetStats().
305 // RTC_DCHECK_RUN_ON(&thread_checker_);
306 return stats_proxy_.GetStats();
307 }
308
GetPacingFactorOverride() const309 absl::optional<float> VideoSendStream::GetPacingFactorOverride() const {
310 return send_stream_.configured_pacing_factor();
311 }
312
StopPermanentlyAndGetRtpStates(VideoSendStream::RtpStateMap * rtp_state_map,VideoSendStream::RtpPayloadStateMap * payload_state_map)313 void VideoSendStream::StopPermanentlyAndGetRtpStates(
314 VideoSendStream::RtpStateMap* rtp_state_map,
315 VideoSendStream::RtpPayloadStateMap* payload_state_map) {
316 RTC_DCHECK_RUN_ON(&thread_checker_);
317 video_stream_encoder_->Stop();
318
319 running_ = false;
320 // Always run these cleanup steps regardless of whether running_ was set
321 // or not. This will unregister callbacks before destruction.
322 // See `VideoSendStreamImpl::StopVideoSendStream` for more.
323 rtp_transport_queue_->RunSynchronous(
324 [this, rtp_state_map, payload_state_map]() {
325 transport_queue_safety_->SetNotAlive();
326 send_stream_.Stop();
327 *rtp_state_map = send_stream_.GetRtpStates();
328 *payload_state_map = send_stream_.GetRtpPayloadStates();
329 });
330 }
331
DeliverRtcp(const uint8_t * packet,size_t length)332 void VideoSendStream::DeliverRtcp(const uint8_t* packet, size_t length) {
333 RTC_DCHECK_RUN_ON(&thread_checker_);
334 send_stream_.DeliverRtcp(packet, length);
335 }
336
GenerateKeyFrame(const std::vector<std::string> & rids)337 void VideoSendStream::GenerateKeyFrame(const std::vector<std::string>& rids) {
338 // Map rids to layers. If rids is empty, generate a keyframe for all layers.
339 std::vector<VideoFrameType> next_frames(config_.rtp.ssrcs.size(),
340 VideoFrameType::kVideoFrameKey);
341 if (!config_.rtp.rids.empty() && !rids.empty()) {
342 std::fill(next_frames.begin(), next_frames.end(),
343 VideoFrameType::kVideoFrameDelta);
344 for (const auto& rid : rids) {
345 for (size_t i = 0; i < config_.rtp.rids.size(); i++) {
346 if (config_.rtp.rids[i] == rid) {
347 next_frames[i] = VideoFrameType::kVideoFrameKey;
348 break;
349 }
350 }
351 }
352 }
353 if (video_stream_encoder_) {
354 video_stream_encoder_->SendKeyFrame(next_frames);
355 }
356 }
357
358 } // namespace internal
359 } // namespace webrtc
360