1 /*
2 * Copyright (c) 2016 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "modules/rtp_rtcp/source/rtp_header_extensions.h"
12
13 #include <string.h>
14
15 #include <cmath>
16 #include <cstdint>
17 #include <limits>
18
19 #include "absl/strings/string_view.h"
20 #include "modules/rtp_rtcp/include/rtp_cvo.h"
21 #include "modules/rtp_rtcp/source/byte_io.h"
22 // TODO(bug:9855) Move kNoSpatialIdx from vp9_globals.h to common_constants
23 #include "modules/video_coding/codecs/interface/common_constants.h"
24 #include "modules/video_coding/codecs/vp9/include/vp9_globals.h"
25 #include "rtc_base/checks.h"
26
27 namespace webrtc {
28 // Absolute send time in RTP streams.
29 //
30 // The absolute send time is signaled to the receiver in-band using the
31 // general mechanism for RTP header extensions [RFC8285]. The payload
32 // of this extension (the transmitted value) is a 24-bit unsigned integer
33 // containing the sender's current time in seconds as a fixed point number
34 // with 18 bits fractional part.
35 //
36 // The form of the absolute send time extension block:
37 //
38 // 0 1 2 3
39 // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
40 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
41 // | ID | len=2 | absolute send time |
42 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
43 constexpr RTPExtensionType AbsoluteSendTime::kId;
44 constexpr uint8_t AbsoluteSendTime::kValueSizeBytes;
45
Parse(rtc::ArrayView<const uint8_t> data,uint32_t * time_24bits)46 bool AbsoluteSendTime::Parse(rtc::ArrayView<const uint8_t> data,
47 uint32_t* time_24bits) {
48 if (data.size() != 3)
49 return false;
50 *time_24bits = ByteReader<uint32_t, 3>::ReadBigEndian(data.data());
51 return true;
52 }
53
Write(rtc::ArrayView<uint8_t> data,uint32_t time_24bits)54 bool AbsoluteSendTime::Write(rtc::ArrayView<uint8_t> data,
55 uint32_t time_24bits) {
56 RTC_DCHECK_EQ(data.size(), 3);
57 RTC_DCHECK_LE(time_24bits, 0x00FFFFFF);
58 ByteWriter<uint32_t, 3>::WriteBigEndian(data.data(), time_24bits);
59 return true;
60 }
61
62 // Absolute Capture Time
63 //
64 // The Absolute Capture Time extension is used to stamp RTP packets with a NTP
65 // timestamp showing when the first audio or video frame in a packet was
66 // originally captured. The intent of this extension is to provide a way to
67 // accomplish audio-to-video synchronization when RTCP-terminating intermediate
68 // systems (e.g. mixers) are involved.
69 //
70 // Data layout of the shortened version of abs-capture-time:
71 //
72 // 0 1 2 3
73 // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
74 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
75 // | ID | len=7 | absolute capture timestamp (bit 0-23) |
76 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
77 // | absolute capture timestamp (bit 24-55) |
78 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
79 // | ... (56-63) |
80 // +-+-+-+-+-+-+-+-+
81 //
82 // Data layout of the extended version of abs-capture-time:
83 //
84 // 0 1 2 3
85 // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
86 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
87 // | ID | len=15| absolute capture timestamp (bit 0-23) |
88 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
89 // | absolute capture timestamp (bit 24-55) |
90 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
91 // | ... (56-63) | estimated capture clock offset (bit 0-23) |
92 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
93 // | estimated capture clock offset (bit 24-55) |
94 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
95 // | ... (56-63) |
96 // +-+-+-+-+-+-+-+-+
97 constexpr RTPExtensionType AbsoluteCaptureTimeExtension::kId;
98 constexpr uint8_t AbsoluteCaptureTimeExtension::kValueSizeBytes;
99 constexpr uint8_t AbsoluteCaptureTimeExtension::
100 kValueSizeBytesWithoutEstimatedCaptureClockOffset;
101
Parse(rtc::ArrayView<const uint8_t> data,AbsoluteCaptureTime * extension)102 bool AbsoluteCaptureTimeExtension::Parse(rtc::ArrayView<const uint8_t> data,
103 AbsoluteCaptureTime* extension) {
104 if (data.size() != kValueSizeBytes &&
105 data.size() != kValueSizeBytesWithoutEstimatedCaptureClockOffset) {
106 return false;
107 }
108
109 extension->absolute_capture_timestamp =
110 ByteReader<uint64_t>::ReadBigEndian(data.data());
111
112 if (data.size() != kValueSizeBytesWithoutEstimatedCaptureClockOffset) {
113 extension->estimated_capture_clock_offset =
114 ByteReader<int64_t>::ReadBigEndian(data.data() + 8);
115 }
116
117 return true;
118 }
119
ValueSize(const AbsoluteCaptureTime & extension)120 size_t AbsoluteCaptureTimeExtension::ValueSize(
121 const AbsoluteCaptureTime& extension) {
122 if (extension.estimated_capture_clock_offset != absl::nullopt) {
123 return kValueSizeBytes;
124 } else {
125 return kValueSizeBytesWithoutEstimatedCaptureClockOffset;
126 }
127 }
128
Write(rtc::ArrayView<uint8_t> data,const AbsoluteCaptureTime & extension)129 bool AbsoluteCaptureTimeExtension::Write(rtc::ArrayView<uint8_t> data,
130 const AbsoluteCaptureTime& extension) {
131 RTC_DCHECK_EQ(data.size(), ValueSize(extension));
132
133 ByteWriter<uint64_t>::WriteBigEndian(data.data(),
134 extension.absolute_capture_timestamp);
135
136 if (data.size() != kValueSizeBytesWithoutEstimatedCaptureClockOffset) {
137 ByteWriter<int64_t>::WriteBigEndian(
138 data.data() + 8, extension.estimated_capture_clock_offset.value());
139 }
140
141 return true;
142 }
143
144 // An RTP Header Extension for Client-to-Mixer Audio Level Indication
145 //
146 // https://tools.ietf.org/html/rfc6464
147 //
148 // The form of the audio level extension block:
149 //
150 // 0 1
151 // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
152 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
153 // | ID | len=0 |V| level |
154 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
155 // Sample Audio Level Encoding Using the One-Byte Header Format
156 //
157 // 0 1 2
158 // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3
159 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
160 // | ID | len=1 |V| level |
161 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
162 // Sample Audio Level Encoding Using the Two-Byte Header Format
163
164 constexpr RTPExtensionType AudioLevel::kId;
165 constexpr uint8_t AudioLevel::kValueSizeBytes;
166
Parse(rtc::ArrayView<const uint8_t> data,bool * voice_activity,uint8_t * audio_level)167 bool AudioLevel::Parse(rtc::ArrayView<const uint8_t> data,
168 bool* voice_activity,
169 uint8_t* audio_level) {
170 // One-byte and two-byte format share the same data definition.
171 if (data.size() != 1)
172 return false;
173 *voice_activity = (data[0] & 0x80) != 0;
174 *audio_level = data[0] & 0x7F;
175 return true;
176 }
177
Write(rtc::ArrayView<uint8_t> data,bool voice_activity,uint8_t audio_level)178 bool AudioLevel::Write(rtc::ArrayView<uint8_t> data,
179 bool voice_activity,
180 uint8_t audio_level) {
181 // One-byte and two-byte format share the same data definition.
182 RTC_DCHECK_EQ(data.size(), 1);
183 RTC_CHECK_LE(audio_level, 0x7f);
184 data[0] = (voice_activity ? 0x80 : 0x00) | audio_level;
185 return true;
186 }
187
188 // An RTP Header Extension for Mixer-to-Client Audio Level Indication
189 //
190 // https://tools.ietf.org/html/rfc6465
191 //
192 // The form of the audio level extension block:
193 //
194 // 0 1 2 3
195 // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
196 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
197 // | ID | len=2 |0| level 1 |0| level 2 |0| level 3 |
198 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
199 // Sample Audio Level Encoding Using the One-Byte Header Format
200 //
201 // 0 1 2 3
202 // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
203 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
204 // | ID | len=3 |0| level 1 |0| level 2 |
205 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
206 // |0| level 3 | 0 (pad) | ... |
207 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
208 // Sample Audio Level Encoding Using the Two-Byte Header Format
209 constexpr RTPExtensionType CsrcAudioLevel::kId;
210 constexpr uint8_t CsrcAudioLevel::kMaxValueSizeBytes;
211
Parse(rtc::ArrayView<const uint8_t> data,std::vector<uint8_t> * csrc_audio_levels)212 bool CsrcAudioLevel::Parse(rtc::ArrayView<const uint8_t> data,
213 std::vector<uint8_t>* csrc_audio_levels) {
214 if (data.size() > kRtpCsrcSize) {
215 return false;
216 }
217 csrc_audio_levels->resize(data.size());
218 for (size_t i = 0; i < data.size(); i++) {
219 (*csrc_audio_levels)[i] = data[i] & 0x7F;
220 }
221 return true;
222 }
223
ValueSize(rtc::ArrayView<const uint8_t> csrc_audio_levels)224 size_t CsrcAudioLevel::ValueSize(
225 rtc::ArrayView<const uint8_t> csrc_audio_levels) {
226 return csrc_audio_levels.size();
227 }
228
Write(rtc::ArrayView<uint8_t> data,rtc::ArrayView<const uint8_t> csrc_audio_levels)229 bool CsrcAudioLevel::Write(rtc::ArrayView<uint8_t> data,
230 rtc::ArrayView<const uint8_t> csrc_audio_levels) {
231 RTC_CHECK_LE(csrc_audio_levels.size(), kRtpCsrcSize);
232 if (csrc_audio_levels.size() != data.size()) {
233 return false;
234 }
235 for (size_t i = 0; i < csrc_audio_levels.size(); i++) {
236 data[i] = csrc_audio_levels[i] & 0x7F;
237 }
238 return true;
239 }
240
241 // From RFC 5450: Transmission Time Offsets in RTP Streams.
242 //
243 // The transmission time is signaled to the receiver in-band using the
244 // general mechanism for RTP header extensions [RFC8285]. The payload
245 // of this extension (the transmitted value) is a 24-bit signed integer.
246 // When added to the RTP timestamp of the packet, it represents the
247 // "effective" RTP transmission time of the packet, on the RTP
248 // timescale.
249 //
250 // The form of the transmission offset extension block:
251 //
252 // 0 1 2 3
253 // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
254 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
255 // | ID | len=2 | transmission offset |
256 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
257 constexpr RTPExtensionType TransmissionOffset::kId;
258 constexpr uint8_t TransmissionOffset::kValueSizeBytes;
259
Parse(rtc::ArrayView<const uint8_t> data,int32_t * rtp_time)260 bool TransmissionOffset::Parse(rtc::ArrayView<const uint8_t> data,
261 int32_t* rtp_time) {
262 if (data.size() != 3)
263 return false;
264 *rtp_time = ByteReader<int32_t, 3>::ReadBigEndian(data.data());
265 return true;
266 }
267
Write(rtc::ArrayView<uint8_t> data,int32_t rtp_time)268 bool TransmissionOffset::Write(rtc::ArrayView<uint8_t> data, int32_t rtp_time) {
269 RTC_DCHECK_EQ(data.size(), 3);
270 RTC_DCHECK_LE(rtp_time, 0x00ffffff);
271 ByteWriter<int32_t, 3>::WriteBigEndian(data.data(), rtp_time);
272 return true;
273 }
274
275 // TransportSequenceNumber
276 //
277 // 0 1 2
278 // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3
279 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
280 // | ID | L=1 |transport-wide sequence number |
281 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
282 constexpr RTPExtensionType TransportSequenceNumber::kId;
283 constexpr uint8_t TransportSequenceNumber::kValueSizeBytes;
284
Parse(rtc::ArrayView<const uint8_t> data,uint16_t * transport_sequence_number)285 bool TransportSequenceNumber::Parse(rtc::ArrayView<const uint8_t> data,
286 uint16_t* transport_sequence_number) {
287 if (data.size() != kValueSizeBytes)
288 return false;
289 *transport_sequence_number = ByteReader<uint16_t>::ReadBigEndian(data.data());
290 return true;
291 }
292
Write(rtc::ArrayView<uint8_t> data,uint16_t transport_sequence_number)293 bool TransportSequenceNumber::Write(rtc::ArrayView<uint8_t> data,
294 uint16_t transport_sequence_number) {
295 RTC_DCHECK_EQ(data.size(), ValueSize(transport_sequence_number));
296 ByteWriter<uint16_t>::WriteBigEndian(data.data(), transport_sequence_number);
297 return true;
298 }
299
300 // TransportSequenceNumberV2
301 //
302 // In addition to the format used for TransportSequencNumber, V2 also supports
303 // the following packet format where two extra bytes are used to specify that
304 // the sender requests immediate feedback.
305 // 0 1 2 3
306 // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
307 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
308 // | ID | L=3 |transport-wide sequence number |T| seq count |
309 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
310 // |seq count cont.|
311 // +-+-+-+-+-+-+-+-+
312 //
313 // The bit `T` determines whether the feedback should include timing information
314 // or not and `seq_count` determines how many packets the feedback packet should
315 // cover including the current packet. If `seq_count` is zero no feedback is
316 // requested.
317 constexpr RTPExtensionType TransportSequenceNumberV2::kId;
318 constexpr uint8_t TransportSequenceNumberV2::kValueSizeBytes;
319 constexpr uint8_t
320 TransportSequenceNumberV2::kValueSizeBytesWithoutFeedbackRequest;
321 constexpr uint16_t TransportSequenceNumberV2::kIncludeTimestampsBit;
322
Parse(rtc::ArrayView<const uint8_t> data,uint16_t * transport_sequence_number,absl::optional<FeedbackRequest> * feedback_request)323 bool TransportSequenceNumberV2::Parse(
324 rtc::ArrayView<const uint8_t> data,
325 uint16_t* transport_sequence_number,
326 absl::optional<FeedbackRequest>* feedback_request) {
327 if (data.size() != kValueSizeBytes &&
328 data.size() != kValueSizeBytesWithoutFeedbackRequest)
329 return false;
330
331 *transport_sequence_number = ByteReader<uint16_t>::ReadBigEndian(data.data());
332
333 *feedback_request = absl::nullopt;
334 if (data.size() == kValueSizeBytes) {
335 uint16_t feedback_request_raw =
336 ByteReader<uint16_t>::ReadBigEndian(data.data() + 2);
337 bool include_timestamps =
338 (feedback_request_raw & kIncludeTimestampsBit) != 0;
339 uint16_t sequence_count = feedback_request_raw & ~kIncludeTimestampsBit;
340
341 // If `sequence_count` is zero no feedback is requested.
342 if (sequence_count != 0) {
343 *feedback_request = {include_timestamps, sequence_count};
344 }
345 }
346 return true;
347 }
348
Write(rtc::ArrayView<uint8_t> data,uint16_t transport_sequence_number,const absl::optional<FeedbackRequest> & feedback_request)349 bool TransportSequenceNumberV2::Write(
350 rtc::ArrayView<uint8_t> data,
351 uint16_t transport_sequence_number,
352 const absl::optional<FeedbackRequest>& feedback_request) {
353 RTC_DCHECK_EQ(data.size(),
354 ValueSize(transport_sequence_number, feedback_request));
355
356 ByteWriter<uint16_t>::WriteBigEndian(data.data(), transport_sequence_number);
357
358 if (feedback_request) {
359 RTC_DCHECK_GE(feedback_request->sequence_count, 0);
360 RTC_DCHECK_LT(feedback_request->sequence_count, kIncludeTimestampsBit);
361 uint16_t feedback_request_raw =
362 feedback_request->sequence_count |
363 (feedback_request->include_timestamps ? kIncludeTimestampsBit : 0);
364 ByteWriter<uint16_t>::WriteBigEndian(data.data() + 2, feedback_request_raw);
365 }
366 return true;
367 }
368
369 // Coordination of Video Orientation in RTP streams.
370 //
371 // Coordination of Video Orientation consists in signaling of the current
372 // orientation of the image captured on the sender side to the receiver for
373 // appropriate rendering and displaying.
374 //
375 // 0 1
376 // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
377 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
378 // | ID | len=0 |0 0 0 0 C F R R|
379 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
380 constexpr RTPExtensionType VideoOrientation::kId;
381 constexpr uint8_t VideoOrientation::kValueSizeBytes;
382
Parse(rtc::ArrayView<const uint8_t> data,VideoRotation * rotation)383 bool VideoOrientation::Parse(rtc::ArrayView<const uint8_t> data,
384 VideoRotation* rotation) {
385 if (data.size() != 1)
386 return false;
387 *rotation = ConvertCVOByteToVideoRotation(data[0]);
388 return true;
389 }
390
Write(rtc::ArrayView<uint8_t> data,VideoRotation rotation)391 bool VideoOrientation::Write(rtc::ArrayView<uint8_t> data,
392 VideoRotation rotation) {
393 RTC_DCHECK_EQ(data.size(), 1);
394 data[0] = ConvertVideoRotationToCVOByte(rotation);
395 return true;
396 }
397
Parse(rtc::ArrayView<const uint8_t> data,uint8_t * value)398 bool VideoOrientation::Parse(rtc::ArrayView<const uint8_t> data,
399 uint8_t* value) {
400 if (data.size() != 1)
401 return false;
402 *value = data[0];
403 return true;
404 }
405
Write(rtc::ArrayView<uint8_t> data,uint8_t value)406 bool VideoOrientation::Write(rtc::ArrayView<uint8_t> data, uint8_t value) {
407 RTC_DCHECK_EQ(data.size(), 1);
408 data[0] = value;
409 return true;
410 }
411
412 // 0 1 2 3
413 // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
414 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
415 // | ID | len=2 | MIN delay | MAX delay |
416 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
417 constexpr RTPExtensionType PlayoutDelayLimits::kId;
418 constexpr uint8_t PlayoutDelayLimits::kValueSizeBytes;
419
Parse(rtc::ArrayView<const uint8_t> data,VideoPlayoutDelay * playout_delay)420 bool PlayoutDelayLimits::Parse(rtc::ArrayView<const uint8_t> data,
421 VideoPlayoutDelay* playout_delay) {
422 RTC_DCHECK(playout_delay);
423 if (data.size() != 3)
424 return false;
425 uint32_t raw = ByteReader<uint32_t, 3>::ReadBigEndian(data.data());
426 uint16_t min_raw = (raw >> 12);
427 uint16_t max_raw = (raw & 0xfff);
428 if (min_raw > max_raw)
429 return false;
430 playout_delay->min_ms = min_raw * kGranularityMs;
431 playout_delay->max_ms = max_raw * kGranularityMs;
432 return true;
433 }
434
Write(rtc::ArrayView<uint8_t> data,const VideoPlayoutDelay & playout_delay)435 bool PlayoutDelayLimits::Write(rtc::ArrayView<uint8_t> data,
436 const VideoPlayoutDelay& playout_delay) {
437 RTC_DCHECK_EQ(data.size(), 3);
438 RTC_DCHECK_LE(0, playout_delay.min_ms);
439 RTC_DCHECK_LE(playout_delay.min_ms, playout_delay.max_ms);
440 RTC_DCHECK_LE(playout_delay.max_ms, kMaxMs);
441 // Convert MS to value to be sent on extension header.
442 uint32_t min_delay = playout_delay.min_ms / kGranularityMs;
443 uint32_t max_delay = playout_delay.max_ms / kGranularityMs;
444 ByteWriter<uint32_t, 3>::WriteBigEndian(data.data(),
445 (min_delay << 12) | max_delay);
446 return true;
447 }
448
449 // Video Content Type.
450 //
451 // E.g. default video or screenshare.
452 //
453 // 0 1
454 // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
455 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
456 // | ID | len=0 | Content type |
457 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
458 constexpr RTPExtensionType VideoContentTypeExtension::kId;
459 constexpr uint8_t VideoContentTypeExtension::kValueSizeBytes;
460
Parse(rtc::ArrayView<const uint8_t> data,VideoContentType * content_type)461 bool VideoContentTypeExtension::Parse(rtc::ArrayView<const uint8_t> data,
462 VideoContentType* content_type) {
463 if (data.size() == 1 &&
464 videocontenttypehelpers::IsValidContentType(data[0])) {
465 *content_type = static_cast<VideoContentType>(data[0]);
466 return true;
467 }
468 return false;
469 }
470
Write(rtc::ArrayView<uint8_t> data,VideoContentType content_type)471 bool VideoContentTypeExtension::Write(rtc::ArrayView<uint8_t> data,
472 VideoContentType content_type) {
473 RTC_DCHECK_EQ(data.size(), 1);
474 data[0] = static_cast<uint8_t>(content_type);
475 return true;
476 }
477
478 // Video Timing.
479 // 6 timestamps in milliseconds counted from capture time stored in rtp header:
480 // encode start/finish, packetization complete, pacer exit and reserved for
481 // modification by the network modification. `flags` is a bitmask and has the
482 // following allowed values:
483 // 0 = Valid data, but no flags available (backwards compatibility)
484 // 1 = Frame marked as timing frame due to cyclic timer.
485 // 2 = Frame marked as timing frame due to size being outside limit.
486 // 255 = Invalid. The whole timing frame extension should be ignored.
487 //
488 // 0 1 2 3
489 // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
490 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
491 // | ID | len=12| flags | encode start ms delta |
492 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
493 // | encode finish ms delta | packetizer finish ms delta |
494 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
495 // | pacer exit ms delta | network timestamp ms delta |
496 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
497 // | network2 timestamp ms delta |
498 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
499
500 constexpr RTPExtensionType VideoTimingExtension::kId;
501 constexpr uint8_t VideoTimingExtension::kValueSizeBytes;
502 constexpr uint8_t VideoTimingExtension::kFlagsOffset;
503 constexpr uint8_t VideoTimingExtension::kEncodeStartDeltaOffset;
504 constexpr uint8_t VideoTimingExtension::kEncodeFinishDeltaOffset;
505 constexpr uint8_t VideoTimingExtension::kPacketizationFinishDeltaOffset;
506 constexpr uint8_t VideoTimingExtension::kPacerExitDeltaOffset;
507 constexpr uint8_t VideoTimingExtension::kNetworkTimestampDeltaOffset;
508 constexpr uint8_t VideoTimingExtension::kNetwork2TimestampDeltaOffset;
509
Parse(rtc::ArrayView<const uint8_t> data,VideoSendTiming * timing)510 bool VideoTimingExtension::Parse(rtc::ArrayView<const uint8_t> data,
511 VideoSendTiming* timing) {
512 RTC_DCHECK(timing);
513 // TODO(sprang): Deprecate support for old wire format.
514 ptrdiff_t off = 0;
515 switch (data.size()) {
516 case kValueSizeBytes - 1:
517 timing->flags = 0;
518 off = 1; // Old wire format without the flags field.
519 break;
520 case kValueSizeBytes:
521 timing->flags = ByteReader<uint8_t>::ReadBigEndian(data.data());
522 break;
523 default:
524 return false;
525 }
526
527 timing->encode_start_delta_ms = ByteReader<uint16_t>::ReadBigEndian(
528 data.data() + kEncodeStartDeltaOffset - off);
529 timing->encode_finish_delta_ms = ByteReader<uint16_t>::ReadBigEndian(
530 data.data() + kEncodeFinishDeltaOffset - off);
531 timing->packetization_finish_delta_ms = ByteReader<uint16_t>::ReadBigEndian(
532 data.data() + kPacketizationFinishDeltaOffset - off);
533 timing->pacer_exit_delta_ms = ByteReader<uint16_t>::ReadBigEndian(
534 data.data() + kPacerExitDeltaOffset - off);
535 timing->network_timestamp_delta_ms = ByteReader<uint16_t>::ReadBigEndian(
536 data.data() + kNetworkTimestampDeltaOffset - off);
537 timing->network2_timestamp_delta_ms = ByteReader<uint16_t>::ReadBigEndian(
538 data.data() + kNetwork2TimestampDeltaOffset - off);
539 return true;
540 }
541
Write(rtc::ArrayView<uint8_t> data,const VideoSendTiming & timing)542 bool VideoTimingExtension::Write(rtc::ArrayView<uint8_t> data,
543 const VideoSendTiming& timing) {
544 RTC_DCHECK_EQ(data.size(), 1 + 2 * 6);
545 ByteWriter<uint8_t>::WriteBigEndian(data.data() + kFlagsOffset, timing.flags);
546 ByteWriter<uint16_t>::WriteBigEndian(data.data() + kEncodeStartDeltaOffset,
547 timing.encode_start_delta_ms);
548 ByteWriter<uint16_t>::WriteBigEndian(data.data() + kEncodeFinishDeltaOffset,
549 timing.encode_finish_delta_ms);
550 ByteWriter<uint16_t>::WriteBigEndian(
551 data.data() + kPacketizationFinishDeltaOffset,
552 timing.packetization_finish_delta_ms);
553 ByteWriter<uint16_t>::WriteBigEndian(data.data() + kPacerExitDeltaOffset,
554 timing.pacer_exit_delta_ms);
555 ByteWriter<uint16_t>::WriteBigEndian(
556 data.data() + kNetworkTimestampDeltaOffset,
557 timing.network_timestamp_delta_ms);
558 ByteWriter<uint16_t>::WriteBigEndian(
559 data.data() + kNetwork2TimestampDeltaOffset,
560 timing.network2_timestamp_delta_ms);
561 return true;
562 }
563
Write(rtc::ArrayView<uint8_t> data,uint16_t time_delta_ms,uint8_t offset)564 bool VideoTimingExtension::Write(rtc::ArrayView<uint8_t> data,
565 uint16_t time_delta_ms,
566 uint8_t offset) {
567 RTC_DCHECK_GE(data.size(), offset + 2);
568 RTC_DCHECK_LE(offset, kValueSizeBytes - sizeof(uint16_t));
569 ByteWriter<uint16_t>::WriteBigEndian(data.data() + offset, time_delta_ms);
570 return true;
571 }
572
573 // Color space including HDR metadata as an optional field.
574 //
575 // RTP header extension to carry color space information and optionally HDR
576 // metadata. The float values in the HDR metadata struct are upscaled by a
577 // static factor and transmitted as unsigned integers.
578 //
579 // Data layout of color space with HDR metadata (two-byte RTP header extension)
580 // 0 1 2 3
581 // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
582 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
583 // | ID | length=28 | primaries | transfer |
584 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
585 // | matrix |range+chr.sit. | luminance_max |
586 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
587 // | luminance_min | mastering_metadata.|
588 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
589 // |primary_r.x and .y | mastering_metadata.|
590 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
591 // |primary_g.x and .y | mastering_metadata.|
592 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
593 // |primary_b.x and .y | mastering_metadata.|
594 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
595 // |white.x and .y | max_content_light_level |
596 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
597 // | max_frame_average_light_level |
598 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
599 //
600 // Data layout of color space w/o HDR metadata (one-byte RTP header extension)
601 // 0 1 2 3
602 // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1
603 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
604 // | ID | L = 3 | primaries | transfer | matrix |
605 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
606 // |range+chr.sit. |
607 // +-+-+-+-+-+-+-+-+
608
609 constexpr RTPExtensionType ColorSpaceExtension::kId;
610 constexpr uint8_t ColorSpaceExtension::kValueSizeBytes;
611
Parse(rtc::ArrayView<const uint8_t> data,ColorSpace * color_space)612 bool ColorSpaceExtension::Parse(rtc::ArrayView<const uint8_t> data,
613 ColorSpace* color_space) {
614 RTC_DCHECK(color_space);
615 if (data.size() != kValueSizeBytes &&
616 data.size() != kValueSizeBytesWithoutHdrMetadata)
617 return false;
618
619 size_t offset = 0;
620 // Read color space information.
621 if (!color_space->set_primaries_from_uint8(data[offset++]))
622 return false;
623 if (!color_space->set_transfer_from_uint8(data[offset++]))
624 return false;
625 if (!color_space->set_matrix_from_uint8(data[offset++]))
626 return false;
627
628 uint8_t range_and_chroma_siting = data[offset++];
629 if (!color_space->set_range_from_uint8((range_and_chroma_siting >> 4) & 0x03))
630 return false;
631 if (!color_space->set_chroma_siting_horizontal_from_uint8(
632 (range_and_chroma_siting >> 2) & 0x03))
633 return false;
634 if (!color_space->set_chroma_siting_vertical_from_uint8(
635 range_and_chroma_siting & 0x03))
636 return false;
637
638 // Read HDR metadata if it exists, otherwise clear it.
639 if (data.size() == kValueSizeBytesWithoutHdrMetadata) {
640 color_space->set_hdr_metadata(nullptr);
641 } else {
642 HdrMetadata hdr_metadata;
643 offset += ParseHdrMetadata(data.subview(offset), &hdr_metadata);
644 if (!hdr_metadata.Validate())
645 return false;
646 color_space->set_hdr_metadata(&hdr_metadata);
647 }
648 RTC_DCHECK_EQ(ValueSize(*color_space), offset);
649 return true;
650 }
651
Write(rtc::ArrayView<uint8_t> data,const ColorSpace & color_space)652 bool ColorSpaceExtension::Write(rtc::ArrayView<uint8_t> data,
653 const ColorSpace& color_space) {
654 RTC_DCHECK_EQ(data.size(), ValueSize(color_space));
655 size_t offset = 0;
656 // Write color space information.
657 data[offset++] = static_cast<uint8_t>(color_space.primaries());
658 data[offset++] = static_cast<uint8_t>(color_space.transfer());
659 data[offset++] = static_cast<uint8_t>(color_space.matrix());
660 data[offset++] = CombineRangeAndChromaSiting(
661 color_space.range(), color_space.chroma_siting_horizontal(),
662 color_space.chroma_siting_vertical());
663
664 // Write HDR metadata if it exists.
665 if (color_space.hdr_metadata()) {
666 offset +=
667 WriteHdrMetadata(data.subview(offset), *color_space.hdr_metadata());
668 }
669 RTC_DCHECK_EQ(ValueSize(color_space), offset);
670 return true;
671 }
672
673 // Combines range and chroma siting into one byte with the following bit layout:
674 // bits 0-1 Chroma siting vertical.
675 // 2-3 Chroma siting horizontal.
676 // 4-5 Range.
677 // 6-7 Unused.
CombineRangeAndChromaSiting(ColorSpace::RangeID range,ColorSpace::ChromaSiting chroma_siting_horizontal,ColorSpace::ChromaSiting chroma_siting_vertical)678 uint8_t ColorSpaceExtension::CombineRangeAndChromaSiting(
679 ColorSpace::RangeID range,
680 ColorSpace::ChromaSiting chroma_siting_horizontal,
681 ColorSpace::ChromaSiting chroma_siting_vertical) {
682 RTC_DCHECK_LE(static_cast<uint8_t>(range), 3);
683 RTC_DCHECK_LE(static_cast<uint8_t>(chroma_siting_horizontal), 3);
684 RTC_DCHECK_LE(static_cast<uint8_t>(chroma_siting_vertical), 3);
685 return (static_cast<uint8_t>(range) << 4) |
686 (static_cast<uint8_t>(chroma_siting_horizontal) << 2) |
687 static_cast<uint8_t>(chroma_siting_vertical);
688 }
689
ParseHdrMetadata(rtc::ArrayView<const uint8_t> data,HdrMetadata * hdr_metadata)690 size_t ColorSpaceExtension::ParseHdrMetadata(rtc::ArrayView<const uint8_t> data,
691 HdrMetadata* hdr_metadata) {
692 RTC_DCHECK_EQ(data.size(),
693 kValueSizeBytes - kValueSizeBytesWithoutHdrMetadata);
694 size_t offset = 0;
695 offset += ParseLuminance(data.data() + offset,
696 &hdr_metadata->mastering_metadata.luminance_max,
697 kLuminanceMaxDenominator);
698 offset += ParseLuminance(data.data() + offset,
699 &hdr_metadata->mastering_metadata.luminance_min,
700 kLuminanceMinDenominator);
701 offset += ParseChromaticity(data.data() + offset,
702 &hdr_metadata->mastering_metadata.primary_r);
703 offset += ParseChromaticity(data.data() + offset,
704 &hdr_metadata->mastering_metadata.primary_g);
705 offset += ParseChromaticity(data.data() + offset,
706 &hdr_metadata->mastering_metadata.primary_b);
707 offset += ParseChromaticity(data.data() + offset,
708 &hdr_metadata->mastering_metadata.white_point);
709 hdr_metadata->max_content_light_level =
710 ByteReader<uint16_t>::ReadBigEndian(data.data() + offset);
711 offset += 2;
712 hdr_metadata->max_frame_average_light_level =
713 ByteReader<uint16_t>::ReadBigEndian(data.data() + offset);
714 offset += 2;
715 return offset;
716 }
717
ParseChromaticity(const uint8_t * data,HdrMasteringMetadata::Chromaticity * p)718 size_t ColorSpaceExtension::ParseChromaticity(
719 const uint8_t* data,
720 HdrMasteringMetadata::Chromaticity* p) {
721 uint16_t chromaticity_x_scaled = ByteReader<uint16_t>::ReadBigEndian(data);
722 uint16_t chromaticity_y_scaled =
723 ByteReader<uint16_t>::ReadBigEndian(data + 2);
724 p->x = static_cast<float>(chromaticity_x_scaled) / kChromaticityDenominator;
725 p->y = static_cast<float>(chromaticity_y_scaled) / kChromaticityDenominator;
726 return 4; // Return number of bytes read.
727 }
728
ParseLuminance(const uint8_t * data,float * f,int denominator)729 size_t ColorSpaceExtension::ParseLuminance(const uint8_t* data,
730 float* f,
731 int denominator) {
732 uint16_t luminance_scaled = ByteReader<uint16_t>::ReadBigEndian(data);
733 *f = static_cast<float>(luminance_scaled) / denominator;
734 return 2; // Return number of bytes read.
735 }
736
WriteHdrMetadata(rtc::ArrayView<uint8_t> data,const HdrMetadata & hdr_metadata)737 size_t ColorSpaceExtension::WriteHdrMetadata(rtc::ArrayView<uint8_t> data,
738 const HdrMetadata& hdr_metadata) {
739 RTC_DCHECK_EQ(data.size(),
740 kValueSizeBytes - kValueSizeBytesWithoutHdrMetadata);
741 RTC_DCHECK(hdr_metadata.Validate());
742 size_t offset = 0;
743 offset += WriteLuminance(data.data() + offset,
744 hdr_metadata.mastering_metadata.luminance_max,
745 kLuminanceMaxDenominator);
746 offset += WriteLuminance(data.data() + offset,
747 hdr_metadata.mastering_metadata.luminance_min,
748 kLuminanceMinDenominator);
749 offset += WriteChromaticity(data.data() + offset,
750 hdr_metadata.mastering_metadata.primary_r);
751 offset += WriteChromaticity(data.data() + offset,
752 hdr_metadata.mastering_metadata.primary_g);
753 offset += WriteChromaticity(data.data() + offset,
754 hdr_metadata.mastering_metadata.primary_b);
755 offset += WriteChromaticity(data.data() + offset,
756 hdr_metadata.mastering_metadata.white_point);
757
758 ByteWriter<uint16_t>::WriteBigEndian(data.data() + offset,
759 hdr_metadata.max_content_light_level);
760 offset += 2;
761 ByteWriter<uint16_t>::WriteBigEndian(
762 data.data() + offset, hdr_metadata.max_frame_average_light_level);
763 offset += 2;
764 return offset;
765 }
766
WriteChromaticity(uint8_t * data,const HdrMasteringMetadata::Chromaticity & p)767 size_t ColorSpaceExtension::WriteChromaticity(
768 uint8_t* data,
769 const HdrMasteringMetadata::Chromaticity& p) {
770 RTC_DCHECK_GE(p.x, 0.0f);
771 RTC_DCHECK_LE(p.x, 1.0f);
772 RTC_DCHECK_GE(p.y, 0.0f);
773 RTC_DCHECK_LE(p.y, 1.0f);
774 ByteWriter<uint16_t>::WriteBigEndian(
775 data, std::round(p.x * kChromaticityDenominator));
776 ByteWriter<uint16_t>::WriteBigEndian(
777 data + 2, std::round(p.y * kChromaticityDenominator));
778 return 4; // Return number of bytes written.
779 }
780
WriteLuminance(uint8_t * data,float f,int denominator)781 size_t ColorSpaceExtension::WriteLuminance(uint8_t* data,
782 float f,
783 int denominator) {
784 RTC_DCHECK_GE(f, 0.0f);
785 float upscaled_value = f * denominator;
786 RTC_DCHECK_LE(upscaled_value, std::numeric_limits<uint16_t>::max());
787 ByteWriter<uint16_t>::WriteBigEndian(data, std::round(upscaled_value));
788 return 2; // Return number of bytes written.
789 }
790
Parse(rtc::ArrayView<const uint8_t> data,std::string * str)791 bool BaseRtpStringExtension::Parse(rtc::ArrayView<const uint8_t> data,
792 std::string* str) {
793 if (data.empty() || data[0] == 0) // Valid string extension can't be empty.
794 return false;
795 const char* cstr = reinterpret_cast<const char*>(data.data());
796 // If there is a \0 character in the middle of the `data`, treat it as end
797 // of the string. Well-formed string extensions shouldn't contain it.
798 str->assign(cstr, strnlen(cstr, data.size()));
799 RTC_DCHECK(!str->empty());
800 return true;
801 }
802
Write(rtc::ArrayView<uint8_t> data,absl::string_view str)803 bool BaseRtpStringExtension::Write(rtc::ArrayView<uint8_t> data,
804 absl::string_view str) {
805 if (str.size() > kMaxValueSizeBytes) {
806 return false;
807 }
808 RTC_DCHECK_EQ(data.size(), str.size());
809 RTC_DCHECK_GE(str.size(), 1);
810 memcpy(data.data(), str.data(), str.size());
811 return true;
812 }
813
814 // Constant declarations for RTP header extension types.
815 constexpr RTPExtensionType RtpStreamId::kId;
816 constexpr RTPExtensionType RepairedRtpStreamId::kId;
817 constexpr RTPExtensionType RtpMid::kId;
818
819 // An RTP Header Extension for Inband Comfort Noise
820 //
821 // The form of the audio level extension block:
822 //
823 // 0 1
824 // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5
825 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
826 // | ID | len=0 |N| level |
827 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
828 // Sample Audio Level Encoding Using the One-Byte Header Format
829 //
830 // 0 1 2
831 // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3
832 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
833 // | ID | len=1 |N| level |
834 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
835 // Sample Audio Level Encoding Using the Two-Byte Header Format
836
837 constexpr RTPExtensionType InbandComfortNoiseExtension::kId;
838 constexpr uint8_t InbandComfortNoiseExtension::kValueSizeBytes;
839 constexpr const char InbandComfortNoiseExtension::kUri[];
840
Parse(rtc::ArrayView<const uint8_t> data,absl::optional<uint8_t> * level)841 bool InbandComfortNoiseExtension::Parse(rtc::ArrayView<const uint8_t> data,
842 absl::optional<uint8_t>* level) {
843 if (data.size() != kValueSizeBytes)
844 return false;
845 *level = (data[0] & 0b1000'0000) != 0
846 ? absl::nullopt
847 : absl::make_optional(data[0] & 0b0111'1111);
848 return true;
849 }
850
Write(rtc::ArrayView<uint8_t> data,absl::optional<uint8_t> level)851 bool InbandComfortNoiseExtension::Write(rtc::ArrayView<uint8_t> data,
852 absl::optional<uint8_t> level) {
853 RTC_DCHECK_EQ(data.size(), kValueSizeBytes);
854 data[0] = 0b0000'0000;
855 if (level) {
856 if (*level > 127) {
857 return false;
858 }
859 data[0] = 0b1000'0000 | *level;
860 }
861 return true;
862 }
863
864 // VideoFrameTrackingIdExtension
865 //
866 // 0 1 2
867 // 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3
868 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
869 // | ID | L=1 | video-frame-tracking-id |
870 // +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+
871
872 constexpr RTPExtensionType VideoFrameTrackingIdExtension::kId;
873 constexpr uint8_t VideoFrameTrackingIdExtension::kValueSizeBytes;
874
Parse(rtc::ArrayView<const uint8_t> data,uint16_t * video_frame_tracking_id)875 bool VideoFrameTrackingIdExtension::Parse(rtc::ArrayView<const uint8_t> data,
876 uint16_t* video_frame_tracking_id) {
877 if (data.size() != kValueSizeBytes) {
878 return false;
879 }
880 *video_frame_tracking_id = ByteReader<uint16_t>::ReadBigEndian(data.data());
881 return true;
882 }
883
Write(rtc::ArrayView<uint8_t> data,uint16_t video_frame_tracking_id)884 bool VideoFrameTrackingIdExtension::Write(rtc::ArrayView<uint8_t> data,
885 uint16_t video_frame_tracking_id) {
886 RTC_DCHECK_EQ(data.size(), kValueSizeBytes);
887 ByteWriter<uint16_t>::WriteBigEndian(data.data(), video_frame_tracking_id);
888 return true;
889 }
890
891 } // namespace webrtc
892