1 /*
2 * Copyright 2017 The WebRTC Project Authors. All rights reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "video/rtp_video_stream_receiver.h"
12
13 #include <memory>
14 #include <utility>
15
16 #include "api/video/video_codec_type.h"
17 #include "api/video/video_frame_type.h"
18 #include "common_video/h264/h264_common.h"
19 #include "media/base/media_constants.h"
20 #include "modules/rtp_rtcp/source/rtp_descriptor_authentication.h"
21 #include "modules/rtp_rtcp/source/rtp_format.h"
22 #include "modules/rtp_rtcp/source/rtp_format_vp9.h"
23 #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h"
24 #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h"
25 #include "modules/rtp_rtcp/source/rtp_header_extensions.h"
26 #include "modules/rtp_rtcp/source/rtp_packet_received.h"
27 #include "modules/rtp_rtcp/source/rtp_packet_to_send.h"
28 #include "modules/utility/include/process_thread.h"
29 #include "modules/video_coding/frame_object.h"
30 #include "modules/video_coding/include/video_coding_defines.h"
31 #include "modules/video_coding/rtp_frame_reference_finder.h"
32 #include "rtc_base/byte_buffer.h"
33 #include "rtc_base/logging.h"
34 #include "system_wrappers/include/clock.h"
35 #include "system_wrappers/include/field_trial.h"
36 #include "test/field_trial.h"
37 #include "test/gmock.h"
38 #include "test/gtest.h"
39 #include "test/mock_frame_transformer.h"
40
41 using ::testing::_;
42 using ::testing::ElementsAre;
43 using ::testing::Invoke;
44 using ::testing::SizeIs;
45 using ::testing::Values;
46
47 namespace webrtc {
48
49 namespace {
50
51 const uint8_t kH264StartCode[] = {0x00, 0x00, 0x00, 0x01};
52
GetAbsoluteCaptureTimestamps(const EncodedFrame * frame)53 std::vector<uint64_t> GetAbsoluteCaptureTimestamps(const EncodedFrame* frame) {
54 std::vector<uint64_t> result;
55 for (const auto& packet_info : frame->PacketInfos()) {
56 if (packet_info.absolute_capture_time()) {
57 result.push_back(
58 packet_info.absolute_capture_time()->absolute_capture_timestamp);
59 }
60 }
61 return result;
62 }
63
GetGenericVideoHeader(VideoFrameType frame_type)64 RTPVideoHeader GetGenericVideoHeader(VideoFrameType frame_type) {
65 RTPVideoHeader video_header;
66 video_header.is_first_packet_in_frame = true;
67 video_header.is_last_packet_in_frame = true;
68 video_header.codec = kVideoCodecGeneric;
69 video_header.frame_type = frame_type;
70 return video_header;
71 }
72
73 class MockTransport : public Transport {
74 public:
75 MOCK_METHOD(bool,
76 SendRtp,
77 (const uint8_t*, size_t length, const PacketOptions& options),
78 (override));
79 MOCK_METHOD(bool, SendRtcp, (const uint8_t*, size_t length), (override));
80 };
81
82 class MockNackSender : public NackSender {
83 public:
84 MOCK_METHOD(void,
85 SendNack,
86 (const std::vector<uint16_t>& sequence_numbers,
87 bool buffering_allowed),
88 (override));
89 };
90
91 class MockKeyFrameRequestSender : public KeyFrameRequestSender {
92 public:
93 MOCK_METHOD(void, RequestKeyFrame, (), (override));
94 };
95
96 class MockOnCompleteFrameCallback : public OnCompleteFrameCallback {
97 public:
98 MOCK_METHOD(void, DoOnCompleteFrame, (EncodedFrame*), ());
99 MOCK_METHOD(void, DoOnCompleteFrameFailNullptr, (EncodedFrame*), ());
100 MOCK_METHOD(void, DoOnCompleteFrameFailLength, (EncodedFrame*), ());
101 MOCK_METHOD(void, DoOnCompleteFrameFailBitstream, (EncodedFrame*), ());
OnCompleteFrame(std::unique_ptr<EncodedFrame> frame)102 void OnCompleteFrame(std::unique_ptr<EncodedFrame> frame) override {
103 if (!frame) {
104 DoOnCompleteFrameFailNullptr(nullptr);
105 return;
106 }
107 EXPECT_EQ(buffer_.Length(), frame->size());
108 if (buffer_.Length() != frame->size()) {
109 DoOnCompleteFrameFailLength(frame.get());
110 return;
111 }
112 if (frame->size() != buffer_.Length() ||
113 memcmp(buffer_.Data(), frame->data(), buffer_.Length()) != 0) {
114 DoOnCompleteFrameFailBitstream(frame.get());
115 return;
116 }
117 DoOnCompleteFrame(frame.get());
118 }
119
ClearExpectedBitstream()120 void ClearExpectedBitstream() { buffer_.Clear(); }
121
AppendExpectedBitstream(const uint8_t data[],size_t size_in_bytes)122 void AppendExpectedBitstream(const uint8_t data[], size_t size_in_bytes) {
123 // TODO(Johan): Let rtc::ByteBuffer handle uint8_t* instead of char*.
124 buffer_.WriteBytes(reinterpret_cast<const char*>(data), size_in_bytes);
125 }
126 rtc::ByteBufferWriter buffer_;
127 };
128
129 class MockRtpPacketSink : public RtpPacketSinkInterface {
130 public:
131 MOCK_METHOD(void, OnRtpPacket, (const RtpPacketReceived&), (override));
132 };
133
134 constexpr uint32_t kSsrc = 111;
135 constexpr uint16_t kSequenceNumber = 222;
136 constexpr int kPayloadType = 100;
137 constexpr int kRedPayloadType = 125;
138
CreateRtpPacketReceived()139 std::unique_ptr<RtpPacketReceived> CreateRtpPacketReceived() {
140 auto packet = std::make_unique<RtpPacketReceived>();
141 packet->SetSsrc(kSsrc);
142 packet->SetSequenceNumber(kSequenceNumber);
143 packet->SetPayloadType(kPayloadType);
144 return packet;
145 }
146
147 MATCHER_P(SamePacketAs, other, "") {
148 return arg.Ssrc() == other.Ssrc() &&
149 arg.SequenceNumber() == other.SequenceNumber();
150 }
151
152 } // namespace
153
154 class RtpVideoStreamReceiverTest : public ::testing::Test {
155 public:
RtpVideoStreamReceiverTest()156 RtpVideoStreamReceiverTest() : RtpVideoStreamReceiverTest("") {}
RtpVideoStreamReceiverTest(std::string field_trials)157 explicit RtpVideoStreamReceiverTest(std::string field_trials)
158 : override_field_trials_(field_trials),
159 config_(CreateConfig()),
160 process_thread_(ProcessThread::Create("TestThread")) {
161 rtp_receive_statistics_ =
162 ReceiveStatistics::Create(Clock::GetRealTimeClock());
163 rtp_video_stream_receiver_ = std::make_unique<RtpVideoStreamReceiver>(
164 Clock::GetRealTimeClock(), &mock_transport_, nullptr, nullptr, &config_,
165 rtp_receive_statistics_.get(), nullptr, nullptr, process_thread_.get(),
166 &mock_nack_sender_, &mock_key_frame_request_sender_,
167 &mock_on_complete_frame_callback_, nullptr, nullptr);
168 VideoCodec codec;
169 codec.codecType = kVideoCodecGeneric;
170 rtp_video_stream_receiver_->AddReceiveCodec(kPayloadType, codec, {},
171 /*raw_payload=*/false);
172 }
173
GetDefaultH264VideoHeader()174 RTPVideoHeader GetDefaultH264VideoHeader() {
175 RTPVideoHeader video_header;
176 video_header.codec = kVideoCodecH264;
177 video_header.video_type_header.emplace<RTPVideoHeaderH264>();
178 return video_header;
179 }
180
181 // TODO(Johan): refactor h264_sps_pps_tracker_unittests.cc to avoid duplicate
182 // code.
AddSps(RTPVideoHeader * video_header,uint8_t sps_id,rtc::CopyOnWriteBuffer * data)183 void AddSps(RTPVideoHeader* video_header,
184 uint8_t sps_id,
185 rtc::CopyOnWriteBuffer* data) {
186 NaluInfo info;
187 info.type = H264::NaluType::kSps;
188 info.sps_id = sps_id;
189 info.pps_id = -1;
190 data->AppendData<uint8_t, 2>({H264::NaluType::kSps, sps_id});
191 auto& h264 = absl::get<RTPVideoHeaderH264>(video_header->video_type_header);
192 h264.nalus[h264.nalus_length++] = info;
193 }
194
AddPps(RTPVideoHeader * video_header,uint8_t sps_id,uint8_t pps_id,rtc::CopyOnWriteBuffer * data)195 void AddPps(RTPVideoHeader* video_header,
196 uint8_t sps_id,
197 uint8_t pps_id,
198 rtc::CopyOnWriteBuffer* data) {
199 NaluInfo info;
200 info.type = H264::NaluType::kPps;
201 info.sps_id = sps_id;
202 info.pps_id = pps_id;
203 data->AppendData<uint8_t, 2>({H264::NaluType::kPps, pps_id});
204 auto& h264 = absl::get<RTPVideoHeaderH264>(video_header->video_type_header);
205 h264.nalus[h264.nalus_length++] = info;
206 }
207
AddIdr(RTPVideoHeader * video_header,int pps_id)208 void AddIdr(RTPVideoHeader* video_header, int pps_id) {
209 NaluInfo info;
210 info.type = H264::NaluType::kIdr;
211 info.sps_id = -1;
212 info.pps_id = pps_id;
213 auto& h264 = absl::get<RTPVideoHeaderH264>(video_header->video_type_header);
214 h264.nalus[h264.nalus_length++] = info;
215 }
216
217 protected:
CreateConfig()218 static VideoReceiveStream::Config CreateConfig() {
219 VideoReceiveStream::Config config(nullptr);
220 config.rtp.remote_ssrc = 1111;
221 config.rtp.local_ssrc = 2222;
222 config.rtp.red_payload_type = kRedPayloadType;
223 return config;
224 }
225
226 const webrtc::test::ScopedFieldTrials override_field_trials_;
227 VideoReceiveStream::Config config_;
228 MockNackSender mock_nack_sender_;
229 MockKeyFrameRequestSender mock_key_frame_request_sender_;
230 MockTransport mock_transport_;
231 MockOnCompleteFrameCallback mock_on_complete_frame_callback_;
232 std::unique_ptr<ProcessThread> process_thread_;
233 std::unique_ptr<ReceiveStatistics> rtp_receive_statistics_;
234 std::unique_ptr<RtpVideoStreamReceiver> rtp_video_stream_receiver_;
235 };
236
TEST_F(RtpVideoStreamReceiverTest,CacheColorSpaceFromLastPacketOfKeyframe)237 TEST_F(RtpVideoStreamReceiverTest, CacheColorSpaceFromLastPacketOfKeyframe) {
238 // Test that color space is cached from the last packet of a key frame and
239 // that it's not reset by padding packets without color space.
240 constexpr int kVp9PayloadType = 99;
241 const ColorSpace kColorSpace(
242 ColorSpace::PrimaryID::kFILM, ColorSpace::TransferID::kBT2020_12,
243 ColorSpace::MatrixID::kBT2020_NCL, ColorSpace::RangeID::kFull);
244 const std::vector<uint8_t> kKeyFramePayload = {0, 1, 2, 3, 4, 5,
245 6, 7, 8, 9, 10};
246 const std::vector<uint8_t> kDeltaFramePayload = {0, 1, 2, 3, 4};
247
248 // Anonymous helper class that generates received packets.
249 class {
250 public:
251 void SetPayload(const std::vector<uint8_t>& payload,
252 VideoFrameType video_frame_type) {
253 video_frame_type_ = video_frame_type;
254 RtpPacketizer::PayloadSizeLimits pay_load_size_limits;
255 // Reduce max payload length to make sure the key frame generates two
256 // packets.
257 pay_load_size_limits.max_payload_len = 8;
258 RTPVideoHeaderVP9 rtp_video_header_vp9;
259 rtp_video_header_vp9.InitRTPVideoHeaderVP9();
260 rtp_video_header_vp9.inter_pic_predicted =
261 (video_frame_type == VideoFrameType::kVideoFrameDelta);
262 rtp_packetizer_ = std::make_unique<RtpPacketizerVp9>(
263 payload, pay_load_size_limits, rtp_video_header_vp9);
264 }
265
266 size_t NumPackets() { return rtp_packetizer_->NumPackets(); }
267 void SetColorSpace(const ColorSpace& color_space) {
268 color_space_ = color_space;
269 }
270
271 RtpPacketReceived NextPacket() {
272 RtpHeaderExtensionMap extension_map;
273 extension_map.Register<ColorSpaceExtension>(1);
274 RtpPacketToSend packet_to_send(&extension_map);
275 packet_to_send.SetSequenceNumber(sequence_number_++);
276 packet_to_send.SetSsrc(kSsrc);
277 packet_to_send.SetPayloadType(kVp9PayloadType);
278 bool include_color_space =
279 (rtp_packetizer_->NumPackets() == 1u &&
280 video_frame_type_ == VideoFrameType::kVideoFrameKey);
281 if (include_color_space) {
282 EXPECT_TRUE(
283 packet_to_send.SetExtension<ColorSpaceExtension>(color_space_));
284 }
285 rtp_packetizer_->NextPacket(&packet_to_send);
286
287 RtpPacketReceived received_packet(&extension_map);
288 received_packet.Parse(packet_to_send.data(), packet_to_send.size());
289 return received_packet;
290 }
291
292 private:
293 uint16_t sequence_number_ = 0;
294 VideoFrameType video_frame_type_;
295 ColorSpace color_space_;
296 std::unique_ptr<RtpPacketizer> rtp_packetizer_;
297 } received_packet_generator;
298 received_packet_generator.SetColorSpace(kColorSpace);
299
300 // Prepare the receiver for VP9.
301 VideoCodec codec;
302 codec.codecType = kVideoCodecVP9;
303 std::map<std::string, std::string> codec_params;
304 rtp_video_stream_receiver_->AddReceiveCodec(kVp9PayloadType, codec,
305 codec_params,
306 /*raw_payload=*/false);
307
308 // Generate key frame packets.
309 received_packet_generator.SetPayload(kKeyFramePayload,
310 VideoFrameType::kVideoFrameKey);
311 EXPECT_EQ(received_packet_generator.NumPackets(), 2u);
312 RtpPacketReceived key_frame_packet1 = received_packet_generator.NextPacket();
313 RtpPacketReceived key_frame_packet2 = received_packet_generator.NextPacket();
314
315 // Generate delta frame packet.
316 received_packet_generator.SetPayload(kDeltaFramePayload,
317 VideoFrameType::kVideoFrameDelta);
318 EXPECT_EQ(received_packet_generator.NumPackets(), 1u);
319 RtpPacketReceived delta_frame_packet = received_packet_generator.NextPacket();
320
321 rtp_video_stream_receiver_->StartReceive();
322 mock_on_complete_frame_callback_.AppendExpectedBitstream(
323 kKeyFramePayload.data(), kKeyFramePayload.size());
324
325 // Send the key frame and expect a callback with color space information.
326 EXPECT_FALSE(key_frame_packet1.GetExtension<ColorSpaceExtension>());
327 EXPECT_TRUE(key_frame_packet2.GetExtension<ColorSpaceExtension>());
328 rtp_video_stream_receiver_->OnRtpPacket(key_frame_packet1);
329 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_))
330 .WillOnce(Invoke([kColorSpace](EncodedFrame* frame) {
331 ASSERT_TRUE(frame->EncodedImage().ColorSpace());
332 EXPECT_EQ(*frame->EncodedImage().ColorSpace(), kColorSpace);
333 }));
334 rtp_video_stream_receiver_->OnRtpPacket(key_frame_packet2);
335 // Resend the first key frame packet to simulate padding for example.
336 rtp_video_stream_receiver_->OnRtpPacket(key_frame_packet1);
337
338 mock_on_complete_frame_callback_.ClearExpectedBitstream();
339 mock_on_complete_frame_callback_.AppendExpectedBitstream(
340 kDeltaFramePayload.data(), kDeltaFramePayload.size());
341
342 // Expect delta frame to have color space set even though color space not
343 // included in the RTP packet.
344 EXPECT_FALSE(delta_frame_packet.GetExtension<ColorSpaceExtension>());
345 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_))
346 .WillOnce(Invoke([kColorSpace](EncodedFrame* frame) {
347 ASSERT_TRUE(frame->EncodedImage().ColorSpace());
348 EXPECT_EQ(*frame->EncodedImage().ColorSpace(), kColorSpace);
349 }));
350 rtp_video_stream_receiver_->OnRtpPacket(delta_frame_packet);
351 }
352
TEST_F(RtpVideoStreamReceiverTest,GenericKeyFrame)353 TEST_F(RtpVideoStreamReceiverTest, GenericKeyFrame) {
354 RtpPacketReceived rtp_packet;
355 rtc::CopyOnWriteBuffer data({1, 2, 3, 4});
356 rtp_packet.SetPayloadType(kPayloadType);
357 rtp_packet.SetSequenceNumber(1);
358 RTPVideoHeader video_header =
359 GetGenericVideoHeader(VideoFrameType::kVideoFrameKey);
360 mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
361 data.size());
362 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
363 rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
364 video_header);
365 }
366
TEST_F(RtpVideoStreamReceiverTest,PacketInfoIsPropagatedIntoVideoFrames)367 TEST_F(RtpVideoStreamReceiverTest, PacketInfoIsPropagatedIntoVideoFrames) {
368 constexpr uint64_t kAbsoluteCaptureTimestamp = 12;
369 constexpr int kId0 = 1;
370
371 RtpHeaderExtensionMap extension_map;
372 extension_map.Register<AbsoluteCaptureTimeExtension>(kId0);
373 RtpPacketReceived rtp_packet(&extension_map);
374 rtp_packet.SetPayloadType(kPayloadType);
375 rtc::CopyOnWriteBuffer data({1, 2, 3, 4});
376 rtp_packet.SetSequenceNumber(1);
377 rtp_packet.SetTimestamp(1);
378 rtp_packet.SetSsrc(kSsrc);
379 rtp_packet.SetExtension<AbsoluteCaptureTimeExtension>(
380 AbsoluteCaptureTime{kAbsoluteCaptureTimestamp,
381 /*estimated_capture_clock_offset=*/absl::nullopt});
382
383 RTPVideoHeader video_header =
384 GetGenericVideoHeader(VideoFrameType::kVideoFrameKey);
385 mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
386 data.size());
387 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_))
388 .WillOnce(Invoke([kAbsoluteCaptureTimestamp](EncodedFrame* frame) {
389 EXPECT_THAT(GetAbsoluteCaptureTimestamps(frame),
390 ElementsAre(kAbsoluteCaptureTimestamp));
391 }));
392 rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
393 video_header);
394 }
395
TEST_F(RtpVideoStreamReceiverTest,MissingAbsoluteCaptureTimeIsFilledWithExtrapolatedValue)396 TEST_F(RtpVideoStreamReceiverTest,
397 MissingAbsoluteCaptureTimeIsFilledWithExtrapolatedValue) {
398 constexpr uint64_t kAbsoluteCaptureTimestamp = 12;
399 constexpr int kId0 = 1;
400
401 RtpHeaderExtensionMap extension_map;
402 extension_map.Register<AbsoluteCaptureTimeExtension>(kId0);
403 RtpPacketReceived rtp_packet(&extension_map);
404 rtp_packet.SetPayloadType(kPayloadType);
405
406 rtc::CopyOnWriteBuffer data({1, 2, 3, 4});
407 uint16_t sequence_number = 1;
408 uint32_t rtp_timestamp = 1;
409 rtp_packet.SetSequenceNumber(sequence_number);
410 rtp_packet.SetTimestamp(rtp_timestamp);
411 rtp_packet.SetSsrc(kSsrc);
412 rtp_packet.SetExtension<AbsoluteCaptureTimeExtension>(
413 AbsoluteCaptureTime{kAbsoluteCaptureTimestamp,
414 /*estimated_capture_clock_offset=*/absl::nullopt});
415
416 RTPVideoHeader video_header =
417 GetGenericVideoHeader(VideoFrameType::kVideoFrameKey);
418 mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
419 data.size());
420 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
421 rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
422 video_header);
423
424 // Rtp packet without absolute capture time.
425 rtp_packet = RtpPacketReceived(&extension_map);
426 rtp_packet.SetPayloadType(kPayloadType);
427 rtp_packet.SetSequenceNumber(++sequence_number);
428 rtp_packet.SetTimestamp(++rtp_timestamp);
429 rtp_packet.SetSsrc(kSsrc);
430
431 // There is no absolute capture time in the second packet.
432 // Expect rtp video stream receiver to extrapolate it for the resulting video
433 // frame using absolute capture time from the previous packet.
434 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_))
435 .WillOnce(Invoke([](EncodedFrame* frame) {
436 EXPECT_THAT(GetAbsoluteCaptureTimestamps(frame), SizeIs(1));
437 }));
438 rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
439 video_header);
440 }
441
TEST_F(RtpVideoStreamReceiverTest,NoInfiniteRecursionOnEncapsulatedRedPacket)442 TEST_F(RtpVideoStreamReceiverTest, NoInfiniteRecursionOnEncapsulatedRedPacket) {
443 const std::vector<uint8_t> data({
444 0x80, // RTP version.
445 kRedPayloadType, // Payload type.
446 0, 0, 0, 0, 0, 0, // Don't care.
447 0, 0, 0x4, 0x57, // SSRC
448 kRedPayloadType, // RED header.
449 0, 0, 0, 0, 0 // Don't care.
450 });
451 RtpPacketReceived packet;
452 EXPECT_TRUE(packet.Parse(data.data(), data.size()));
453 rtp_video_stream_receiver_->StartReceive();
454 rtp_video_stream_receiver_->OnRtpPacket(packet);
455 }
456
TEST_F(RtpVideoStreamReceiverTest,DropsPacketWithRedPayloadTypeAndEmptyPayload)457 TEST_F(RtpVideoStreamReceiverTest,
458 DropsPacketWithRedPayloadTypeAndEmptyPayload) {
459 const uint8_t kRedPayloadType = 125;
460 config_.rtp.red_payload_type = kRedPayloadType;
461 SetUp(); // re-create rtp_video_stream_receiver with red payload type.
462 // clang-format off
463 const uint8_t data[] = {
464 0x80, // RTP version.
465 kRedPayloadType, // Payload type.
466 0, 0, 0, 0, 0, 0, // Don't care.
467 0, 0, 0x4, 0x57, // SSRC
468 // Empty rtp payload.
469 };
470 // clang-format on
471 RtpPacketReceived packet;
472 // Manually convert to CopyOnWriteBuffer to be sure capacity == size
473 // and asan bot can catch read buffer overflow.
474 EXPECT_TRUE(packet.Parse(rtc::CopyOnWriteBuffer(data)));
475 rtp_video_stream_receiver_->StartReceive();
476 rtp_video_stream_receiver_->OnRtpPacket(packet);
477 // Expect asan doesn't find anything.
478 }
479
TEST_F(RtpVideoStreamReceiverTest,GenericKeyFrameBitstreamError)480 TEST_F(RtpVideoStreamReceiverTest, GenericKeyFrameBitstreamError) {
481 RtpPacketReceived rtp_packet;
482 rtp_packet.SetPayloadType(kPayloadType);
483 rtc::CopyOnWriteBuffer data({1, 2, 3, 4});
484 rtp_packet.SetSequenceNumber(1);
485 RTPVideoHeader video_header =
486 GetGenericVideoHeader(VideoFrameType::kVideoFrameKey);
487 constexpr uint8_t expected_bitsteam[] = {1, 2, 3, 0xff};
488 mock_on_complete_frame_callback_.AppendExpectedBitstream(
489 expected_bitsteam, sizeof(expected_bitsteam));
490 EXPECT_CALL(mock_on_complete_frame_callback_,
491 DoOnCompleteFrameFailBitstream(_));
492 rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
493 video_header);
494 }
495
496 class RtpVideoStreamReceiverTestH264
497 : public RtpVideoStreamReceiverTest,
498 public ::testing::WithParamInterface<std::string> {
499 protected:
RtpVideoStreamReceiverTestH264()500 RtpVideoStreamReceiverTestH264() : RtpVideoStreamReceiverTest(GetParam()) {}
501 };
502
503 INSTANTIATE_TEST_SUITE_P(SpsPpsIdrIsKeyframe,
504 RtpVideoStreamReceiverTestH264,
505 Values("", "WebRTC-SpsPpsIdrIsH264Keyframe/Enabled/"));
506
TEST_P(RtpVideoStreamReceiverTestH264,InBandSpsPps)507 TEST_P(RtpVideoStreamReceiverTestH264, InBandSpsPps) {
508 rtc::CopyOnWriteBuffer sps_data;
509 RtpPacketReceived rtp_packet;
510 RTPVideoHeader sps_video_header = GetDefaultH264VideoHeader();
511 AddSps(&sps_video_header, 0, &sps_data);
512 rtp_packet.SetSequenceNumber(0);
513 rtp_packet.SetPayloadType(kPayloadType);
514 sps_video_header.is_first_packet_in_frame = true;
515 sps_video_header.frame_type = VideoFrameType::kEmptyFrame;
516 mock_on_complete_frame_callback_.AppendExpectedBitstream(
517 kH264StartCode, sizeof(kH264StartCode));
518 mock_on_complete_frame_callback_.AppendExpectedBitstream(sps_data.data(),
519 sps_data.size());
520 rtp_video_stream_receiver_->OnReceivedPayloadData(sps_data, rtp_packet,
521 sps_video_header);
522
523 rtc::CopyOnWriteBuffer pps_data;
524 RTPVideoHeader pps_video_header = GetDefaultH264VideoHeader();
525 AddPps(&pps_video_header, 0, 1, &pps_data);
526 rtp_packet.SetSequenceNumber(1);
527 pps_video_header.is_first_packet_in_frame = true;
528 pps_video_header.frame_type = VideoFrameType::kEmptyFrame;
529 mock_on_complete_frame_callback_.AppendExpectedBitstream(
530 kH264StartCode, sizeof(kH264StartCode));
531 mock_on_complete_frame_callback_.AppendExpectedBitstream(pps_data.data(),
532 pps_data.size());
533 rtp_video_stream_receiver_->OnReceivedPayloadData(pps_data, rtp_packet,
534 pps_video_header);
535
536 rtc::CopyOnWriteBuffer idr_data;
537 RTPVideoHeader idr_video_header = GetDefaultH264VideoHeader();
538 AddIdr(&idr_video_header, 1);
539 rtp_packet.SetSequenceNumber(2);
540 idr_video_header.is_first_packet_in_frame = true;
541 idr_video_header.is_last_packet_in_frame = true;
542 idr_video_header.frame_type = VideoFrameType::kVideoFrameKey;
543 const uint8_t idr[] = {0x65, 1, 2, 3};
544 idr_data.AppendData(idr);
545 mock_on_complete_frame_callback_.AppendExpectedBitstream(
546 kH264StartCode, sizeof(kH264StartCode));
547 mock_on_complete_frame_callback_.AppendExpectedBitstream(idr_data.data(),
548 idr_data.size());
549 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
550 rtp_video_stream_receiver_->OnReceivedPayloadData(idr_data, rtp_packet,
551 idr_video_header);
552 }
553
TEST_P(RtpVideoStreamReceiverTestH264,OutOfBandFmtpSpsPps)554 TEST_P(RtpVideoStreamReceiverTestH264, OutOfBandFmtpSpsPps) {
555 constexpr int kPayloadType = 99;
556 VideoCodec codec;
557 std::map<std::string, std::string> codec_params;
558 // Example parameter sets from https://tools.ietf.org/html/rfc3984#section-8.2
559 // .
560 codec_params.insert(
561 {cricket::kH264FmtpSpropParameterSets, "Z0IACpZTBYmI,aMljiA=="});
562 rtp_video_stream_receiver_->AddReceiveCodec(kPayloadType, codec, codec_params,
563 /*raw_payload=*/false);
564 const uint8_t binary_sps[] = {0x67, 0x42, 0x00, 0x0a, 0x96,
565 0x53, 0x05, 0x89, 0x88};
566 mock_on_complete_frame_callback_.AppendExpectedBitstream(
567 kH264StartCode, sizeof(kH264StartCode));
568 mock_on_complete_frame_callback_.AppendExpectedBitstream(binary_sps,
569 sizeof(binary_sps));
570 const uint8_t binary_pps[] = {0x68, 0xc9, 0x63, 0x88};
571 mock_on_complete_frame_callback_.AppendExpectedBitstream(
572 kH264StartCode, sizeof(kH264StartCode));
573 mock_on_complete_frame_callback_.AppendExpectedBitstream(binary_pps,
574 sizeof(binary_pps));
575
576 RtpPacketReceived rtp_packet;
577 RTPVideoHeader video_header = GetDefaultH264VideoHeader();
578 AddIdr(&video_header, 0);
579 rtp_packet.SetPayloadType(kPayloadType);
580 rtp_packet.SetSequenceNumber(2);
581 video_header.is_first_packet_in_frame = true;
582 video_header.is_last_packet_in_frame = true;
583 video_header.codec = kVideoCodecH264;
584 video_header.frame_type = VideoFrameType::kVideoFrameKey;
585 rtc::CopyOnWriteBuffer data({1, 2, 3});
586 mock_on_complete_frame_callback_.AppendExpectedBitstream(
587 kH264StartCode, sizeof(kH264StartCode));
588 mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
589 data.size());
590 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
591 rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
592 video_header);
593 }
594
TEST_P(RtpVideoStreamReceiverTestH264,ForceSpsPpsIdrIsKeyframe)595 TEST_P(RtpVideoStreamReceiverTestH264, ForceSpsPpsIdrIsKeyframe) {
596 constexpr int kPayloadType = 99;
597 VideoCodec codec;
598 std::map<std::string, std::string> codec_params;
599 if (GetParam() ==
600 "") { // Forcing can be done either with field trial or codec_params.
601 codec_params.insert({cricket::kH264FmtpSpsPpsIdrInKeyframe, ""});
602 }
603 rtp_video_stream_receiver_->AddReceiveCodec(kPayloadType, codec, codec_params,
604 /*raw_payload=*/false);
605 rtc::CopyOnWriteBuffer sps_data;
606 RtpPacketReceived rtp_packet;
607 RTPVideoHeader sps_video_header = GetDefaultH264VideoHeader();
608 AddSps(&sps_video_header, 0, &sps_data);
609 rtp_packet.SetSequenceNumber(0);
610 rtp_packet.SetPayloadType(kPayloadType);
611 sps_video_header.is_first_packet_in_frame = true;
612 sps_video_header.frame_type = VideoFrameType::kEmptyFrame;
613 mock_on_complete_frame_callback_.AppendExpectedBitstream(
614 kH264StartCode, sizeof(kH264StartCode));
615 mock_on_complete_frame_callback_.AppendExpectedBitstream(sps_data.data(),
616 sps_data.size());
617 rtp_video_stream_receiver_->OnReceivedPayloadData(sps_data, rtp_packet,
618 sps_video_header);
619
620 rtc::CopyOnWriteBuffer pps_data;
621 RTPVideoHeader pps_video_header = GetDefaultH264VideoHeader();
622 AddPps(&pps_video_header, 0, 1, &pps_data);
623 rtp_packet.SetSequenceNumber(1);
624 pps_video_header.is_first_packet_in_frame = true;
625 pps_video_header.frame_type = VideoFrameType::kEmptyFrame;
626 mock_on_complete_frame_callback_.AppendExpectedBitstream(
627 kH264StartCode, sizeof(kH264StartCode));
628 mock_on_complete_frame_callback_.AppendExpectedBitstream(pps_data.data(),
629 pps_data.size());
630 rtp_video_stream_receiver_->OnReceivedPayloadData(pps_data, rtp_packet,
631 pps_video_header);
632
633 rtc::CopyOnWriteBuffer idr_data;
634 RTPVideoHeader idr_video_header = GetDefaultH264VideoHeader();
635 AddIdr(&idr_video_header, 1);
636 rtp_packet.SetSequenceNumber(2);
637 idr_video_header.is_first_packet_in_frame = true;
638 idr_video_header.is_last_packet_in_frame = true;
639 idr_video_header.frame_type = VideoFrameType::kVideoFrameKey;
640 const uint8_t idr[] = {0x65, 1, 2, 3};
641 idr_data.AppendData(idr);
642 mock_on_complete_frame_callback_.AppendExpectedBitstream(
643 kH264StartCode, sizeof(kH264StartCode));
644 mock_on_complete_frame_callback_.AppendExpectedBitstream(idr_data.data(),
645 idr_data.size());
646 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
647 .WillOnce(
648 [&](EncodedFrame* frame) { EXPECT_TRUE(frame->is_keyframe()); });
649 rtp_video_stream_receiver_->OnReceivedPayloadData(idr_data, rtp_packet,
650 idr_video_header);
651 mock_on_complete_frame_callback_.ClearExpectedBitstream();
652 mock_on_complete_frame_callback_.AppendExpectedBitstream(
653 kH264StartCode, sizeof(kH264StartCode));
654 mock_on_complete_frame_callback_.AppendExpectedBitstream(idr_data.data(),
655 idr_data.size());
656 rtp_packet.SetSequenceNumber(3);
657 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
658 .WillOnce(
659 [&](EncodedFrame* frame) { EXPECT_FALSE(frame->is_keyframe()); });
660 rtp_video_stream_receiver_->OnReceivedPayloadData(idr_data, rtp_packet,
661 idr_video_header);
662 }
663
TEST_F(RtpVideoStreamReceiverTest,PaddingInMediaStream)664 TEST_F(RtpVideoStreamReceiverTest, PaddingInMediaStream) {
665 RtpPacketReceived rtp_packet;
666 RTPVideoHeader video_header = GetDefaultH264VideoHeader();
667 rtc::CopyOnWriteBuffer data({1, 2, 3});
668 rtp_packet.SetPayloadType(kPayloadType);
669 rtp_packet.SetSequenceNumber(2);
670 video_header.is_first_packet_in_frame = true;
671 video_header.is_last_packet_in_frame = true;
672 video_header.codec = kVideoCodecGeneric;
673 video_header.frame_type = VideoFrameType::kVideoFrameKey;
674 mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
675 data.size());
676
677 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
678 rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
679 video_header);
680
681 rtp_packet.SetSequenceNumber(3);
682 rtp_video_stream_receiver_->OnReceivedPayloadData({}, rtp_packet,
683 video_header);
684
685 rtp_packet.SetSequenceNumber(4);
686 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
687 video_header.frame_type = VideoFrameType::kVideoFrameDelta;
688 rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
689 video_header);
690
691 rtp_packet.SetSequenceNumber(6);
692 rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
693 video_header);
694
695 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_));
696 rtp_packet.SetSequenceNumber(5);
697 rtp_video_stream_receiver_->OnReceivedPayloadData({}, rtp_packet,
698 video_header);
699 }
700
TEST_F(RtpVideoStreamReceiverTest,RequestKeyframeIfFirstFrameIsDelta)701 TEST_F(RtpVideoStreamReceiverTest, RequestKeyframeIfFirstFrameIsDelta) {
702 RtpPacketReceived rtp_packet;
703 rtp_packet.SetPayloadType(kPayloadType);
704 rtc::CopyOnWriteBuffer data({1, 2, 3, 4});
705 rtp_packet.SetSequenceNumber(1);
706 RTPVideoHeader video_header =
707 GetGenericVideoHeader(VideoFrameType::kVideoFrameDelta);
708 EXPECT_CALL(mock_key_frame_request_sender_, RequestKeyFrame());
709 rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
710 video_header);
711 }
712
TEST_F(RtpVideoStreamReceiverTest,RequestKeyframeWhenPacketBufferGetsFull)713 TEST_F(RtpVideoStreamReceiverTest, RequestKeyframeWhenPacketBufferGetsFull) {
714 constexpr int kPacketBufferMaxSize = 2048;
715
716 RtpPacketReceived rtp_packet;
717 rtp_packet.SetPayloadType(kPayloadType);
718 rtc::CopyOnWriteBuffer data({1, 2, 3, 4});
719 RTPVideoHeader video_header =
720 GetGenericVideoHeader(VideoFrameType::kVideoFrameDelta);
721 // Incomplete frames so that the packet buffer is filling up.
722 video_header.is_last_packet_in_frame = false;
723 uint16_t start_sequence_number = 1234;
724 rtp_packet.SetSequenceNumber(start_sequence_number);
725 while (rtp_packet.SequenceNumber() - start_sequence_number <
726 kPacketBufferMaxSize) {
727 rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
728 video_header);
729 rtp_packet.SetSequenceNumber(rtp_packet.SequenceNumber() + 2);
730 }
731
732 EXPECT_CALL(mock_key_frame_request_sender_, RequestKeyFrame());
733 rtp_video_stream_receiver_->OnReceivedPayloadData(data, rtp_packet,
734 video_header);
735 }
736
TEST_F(RtpVideoStreamReceiverTest,SecondarySinksGetRtpNotifications)737 TEST_F(RtpVideoStreamReceiverTest, SecondarySinksGetRtpNotifications) {
738 rtp_video_stream_receiver_->StartReceive();
739
740 MockRtpPacketSink secondary_sink_1;
741 MockRtpPacketSink secondary_sink_2;
742
743 rtp_video_stream_receiver_->AddSecondarySink(&secondary_sink_1);
744 rtp_video_stream_receiver_->AddSecondarySink(&secondary_sink_2);
745
746 auto rtp_packet = CreateRtpPacketReceived();
747 EXPECT_CALL(secondary_sink_1, OnRtpPacket(SamePacketAs(*rtp_packet)));
748 EXPECT_CALL(secondary_sink_2, OnRtpPacket(SamePacketAs(*rtp_packet)));
749
750 rtp_video_stream_receiver_->OnRtpPacket(*rtp_packet);
751
752 // Test tear-down.
753 rtp_video_stream_receiver_->StopReceive();
754 rtp_video_stream_receiver_->RemoveSecondarySink(&secondary_sink_1);
755 rtp_video_stream_receiver_->RemoveSecondarySink(&secondary_sink_2);
756 }
757
TEST_F(RtpVideoStreamReceiverTest,RemovedSecondarySinksGetNoRtpNotifications)758 TEST_F(RtpVideoStreamReceiverTest, RemovedSecondarySinksGetNoRtpNotifications) {
759 rtp_video_stream_receiver_->StartReceive();
760
761 MockRtpPacketSink secondary_sink;
762
763 rtp_video_stream_receiver_->AddSecondarySink(&secondary_sink);
764 rtp_video_stream_receiver_->RemoveSecondarySink(&secondary_sink);
765
766 auto rtp_packet = CreateRtpPacketReceived();
767
768 EXPECT_CALL(secondary_sink, OnRtpPacket(_)).Times(0);
769
770 rtp_video_stream_receiver_->OnRtpPacket(*rtp_packet);
771
772 // Test tear-down.
773 rtp_video_stream_receiver_->StopReceive();
774 }
775
TEST_F(RtpVideoStreamReceiverTest,OnlyRemovedSecondarySinksExcludedFromNotifications)776 TEST_F(RtpVideoStreamReceiverTest,
777 OnlyRemovedSecondarySinksExcludedFromNotifications) {
778 rtp_video_stream_receiver_->StartReceive();
779
780 MockRtpPacketSink kept_secondary_sink;
781 MockRtpPacketSink removed_secondary_sink;
782
783 rtp_video_stream_receiver_->AddSecondarySink(&kept_secondary_sink);
784 rtp_video_stream_receiver_->AddSecondarySink(&removed_secondary_sink);
785 rtp_video_stream_receiver_->RemoveSecondarySink(&removed_secondary_sink);
786
787 auto rtp_packet = CreateRtpPacketReceived();
788 EXPECT_CALL(kept_secondary_sink, OnRtpPacket(SamePacketAs(*rtp_packet)));
789
790 rtp_video_stream_receiver_->OnRtpPacket(*rtp_packet);
791
792 // Test tear-down.
793 rtp_video_stream_receiver_->StopReceive();
794 rtp_video_stream_receiver_->RemoveSecondarySink(&kept_secondary_sink);
795 }
796
TEST_F(RtpVideoStreamReceiverTest,SecondariesOfNonStartedStreamGetNoNotifications)797 TEST_F(RtpVideoStreamReceiverTest,
798 SecondariesOfNonStartedStreamGetNoNotifications) {
799 // Explicitly showing that the stream is not in the |started| state,
800 // regardless of whether streams start out |started| or |stopped|.
801 rtp_video_stream_receiver_->StopReceive();
802
803 MockRtpPacketSink secondary_sink;
804 rtp_video_stream_receiver_->AddSecondarySink(&secondary_sink);
805
806 auto rtp_packet = CreateRtpPacketReceived();
807 EXPECT_CALL(secondary_sink, OnRtpPacket(_)).Times(0);
808
809 rtp_video_stream_receiver_->OnRtpPacket(*rtp_packet);
810
811 // Test tear-down.
812 rtp_video_stream_receiver_->RemoveSecondarySink(&secondary_sink);
813 }
814
TEST_F(RtpVideoStreamReceiverTest,ParseGenericDescriptorOnePacket)815 TEST_F(RtpVideoStreamReceiverTest, ParseGenericDescriptorOnePacket) {
816 const std::vector<uint8_t> data = {0, 1, 2, 3, 4};
817 const int kSpatialIndex = 1;
818
819 rtp_video_stream_receiver_->StartReceive();
820
821 RtpHeaderExtensionMap extension_map;
822 extension_map.Register<RtpGenericFrameDescriptorExtension00>(5);
823 RtpPacketReceived rtp_packet(&extension_map);
824 rtp_packet.SetPayloadType(kPayloadType);
825
826 RtpGenericFrameDescriptor generic_descriptor;
827 generic_descriptor.SetFirstPacketInSubFrame(true);
828 generic_descriptor.SetLastPacketInSubFrame(true);
829 generic_descriptor.SetFrameId(100);
830 generic_descriptor.SetSpatialLayersBitmask(1 << kSpatialIndex);
831 generic_descriptor.AddFrameDependencyDiff(90);
832 generic_descriptor.AddFrameDependencyDiff(80);
833 ASSERT_TRUE(rtp_packet.SetExtension<RtpGenericFrameDescriptorExtension00>(
834 generic_descriptor));
835
836 uint8_t* payload = rtp_packet.SetPayloadSize(data.size());
837 memcpy(payload, data.data(), data.size());
838 // The first byte is the header, so we ignore the first byte of |data|.
839 mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data() + 1,
840 data.size() - 1);
841
842 rtp_packet.SetMarker(true);
843 rtp_packet.SetPayloadType(kPayloadType);
844 rtp_packet.SetSequenceNumber(1);
845
846 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
847 .WillOnce(Invoke([kSpatialIndex](EncodedFrame* frame) {
848 EXPECT_EQ(frame->num_references, 2U);
849 EXPECT_EQ(frame->references[0], frame->Id() - 90);
850 EXPECT_EQ(frame->references[1], frame->Id() - 80);
851 EXPECT_EQ(frame->SpatialIndex(), kSpatialIndex);
852 EXPECT_THAT(frame->PacketInfos(), SizeIs(1));
853 }));
854
855 rtp_video_stream_receiver_->OnRtpPacket(rtp_packet);
856 }
857
TEST_F(RtpVideoStreamReceiverTest,ParseGenericDescriptorTwoPackets)858 TEST_F(RtpVideoStreamReceiverTest, ParseGenericDescriptorTwoPackets) {
859 const std::vector<uint8_t> data = {0, 1, 2, 3, 4};
860 const int kSpatialIndex = 1;
861
862 rtp_video_stream_receiver_->StartReceive();
863
864 RtpHeaderExtensionMap extension_map;
865 extension_map.Register<RtpGenericFrameDescriptorExtension00>(5);
866 RtpPacketReceived first_packet(&extension_map);
867
868 RtpGenericFrameDescriptor first_packet_descriptor;
869 first_packet_descriptor.SetFirstPacketInSubFrame(true);
870 first_packet_descriptor.SetLastPacketInSubFrame(false);
871 first_packet_descriptor.SetFrameId(100);
872 first_packet_descriptor.SetSpatialLayersBitmask(1 << kSpatialIndex);
873 first_packet_descriptor.SetResolution(480, 360);
874 ASSERT_TRUE(first_packet.SetExtension<RtpGenericFrameDescriptorExtension00>(
875 first_packet_descriptor));
876
877 uint8_t* first_packet_payload = first_packet.SetPayloadSize(data.size());
878 memcpy(first_packet_payload, data.data(), data.size());
879 // The first byte is the header, so we ignore the first byte of |data|.
880 mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data() + 1,
881 data.size() - 1);
882
883 first_packet.SetPayloadType(kPayloadType);
884 first_packet.SetSequenceNumber(1);
885 rtp_video_stream_receiver_->OnRtpPacket(first_packet);
886
887 RtpPacketReceived second_packet(&extension_map);
888 RtpGenericFrameDescriptor second_packet_descriptor;
889 second_packet_descriptor.SetFirstPacketInSubFrame(false);
890 second_packet_descriptor.SetLastPacketInSubFrame(true);
891 ASSERT_TRUE(second_packet.SetExtension<RtpGenericFrameDescriptorExtension00>(
892 second_packet_descriptor));
893
894 second_packet.SetMarker(true);
895 second_packet.SetPayloadType(kPayloadType);
896 second_packet.SetSequenceNumber(2);
897
898 uint8_t* second_packet_payload = second_packet.SetPayloadSize(data.size());
899 memcpy(second_packet_payload, data.data(), data.size());
900 // The first byte is the header, so we ignore the first byte of |data|.
901 mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data() + 1,
902 data.size() - 1);
903
904 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
905 .WillOnce(Invoke([kSpatialIndex](EncodedFrame* frame) {
906 EXPECT_EQ(frame->num_references, 0U);
907 EXPECT_EQ(frame->SpatialIndex(), kSpatialIndex);
908 EXPECT_EQ(frame->EncodedImage()._encodedWidth, 480u);
909 EXPECT_EQ(frame->EncodedImage()._encodedHeight, 360u);
910 EXPECT_THAT(frame->PacketInfos(), SizeIs(2));
911 }));
912
913 rtp_video_stream_receiver_->OnRtpPacket(second_packet);
914 }
915
TEST_F(RtpVideoStreamReceiverTest,ParseGenericDescriptorRawPayload)916 TEST_F(RtpVideoStreamReceiverTest, ParseGenericDescriptorRawPayload) {
917 const std::vector<uint8_t> data = {0, 1, 2, 3, 4};
918 const int kRawPayloadType = 123;
919
920 VideoCodec codec;
921 rtp_video_stream_receiver_->AddReceiveCodec(kRawPayloadType, codec, {},
922 /*raw_payload=*/true);
923 rtp_video_stream_receiver_->StartReceive();
924
925 RtpHeaderExtensionMap extension_map;
926 extension_map.Register<RtpGenericFrameDescriptorExtension00>(5);
927 RtpPacketReceived rtp_packet(&extension_map);
928
929 RtpGenericFrameDescriptor generic_descriptor;
930 generic_descriptor.SetFirstPacketInSubFrame(true);
931 generic_descriptor.SetLastPacketInSubFrame(true);
932 ASSERT_TRUE(rtp_packet.SetExtension<RtpGenericFrameDescriptorExtension00>(
933 generic_descriptor));
934
935 uint8_t* payload = rtp_packet.SetPayloadSize(data.size());
936 memcpy(payload, data.data(), data.size());
937 mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
938 data.size());
939
940 rtp_packet.SetMarker(true);
941 rtp_packet.SetPayloadType(kRawPayloadType);
942 rtp_packet.SetSequenceNumber(1);
943
944 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame);
945 rtp_video_stream_receiver_->OnRtpPacket(rtp_packet);
946 }
947
TEST_F(RtpVideoStreamReceiverTest,UnwrapsFrameId)948 TEST_F(RtpVideoStreamReceiverTest, UnwrapsFrameId) {
949 const std::vector<uint8_t> data = {0, 1, 2, 3, 4};
950 const int kPayloadType = 123;
951
952 VideoCodec codec;
953 rtp_video_stream_receiver_->AddReceiveCodec(kPayloadType, codec, {},
954 /*raw_payload=*/true);
955 rtp_video_stream_receiver_->StartReceive();
956 RtpHeaderExtensionMap extension_map;
957 extension_map.Register<RtpGenericFrameDescriptorExtension00>(5);
958
959 uint16_t rtp_sequence_number = 1;
960 auto inject_packet = [&](uint16_t wrapped_frame_id) {
961 RtpPacketReceived rtp_packet(&extension_map);
962
963 RtpGenericFrameDescriptor generic_descriptor;
964 generic_descriptor.SetFirstPacketInSubFrame(true);
965 generic_descriptor.SetLastPacketInSubFrame(true);
966 generic_descriptor.SetFrameId(wrapped_frame_id);
967 ASSERT_TRUE(rtp_packet.SetExtension<RtpGenericFrameDescriptorExtension00>(
968 generic_descriptor));
969
970 uint8_t* payload = rtp_packet.SetPayloadSize(data.size());
971 ASSERT_TRUE(payload);
972 memcpy(payload, data.data(), data.size());
973 mock_on_complete_frame_callback_.ClearExpectedBitstream();
974 mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
975 data.size());
976 rtp_packet.SetMarker(true);
977 rtp_packet.SetPayloadType(kPayloadType);
978 rtp_packet.SetSequenceNumber(++rtp_sequence_number);
979 rtp_video_stream_receiver_->OnRtpPacket(rtp_packet);
980 };
981
982 int64_t first_picture_id;
983 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
984 .WillOnce([&](EncodedFrame* frame) { first_picture_id = frame->Id(); });
985 inject_packet(/*wrapped_frame_id=*/0xffff);
986
987 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
988 .WillOnce([&](EncodedFrame* frame) {
989 EXPECT_EQ(frame->Id() - first_picture_id, 3);
990 });
991 inject_packet(/*wrapped_frame_id=*/0x0002);
992 }
993
994 class RtpVideoStreamReceiverDependencyDescriptorTest
995 : public RtpVideoStreamReceiverTest {
996 public:
RtpVideoStreamReceiverDependencyDescriptorTest()997 RtpVideoStreamReceiverDependencyDescriptorTest() {
998 VideoCodec codec;
999 rtp_video_stream_receiver_->AddReceiveCodec(payload_type_, codec, {},
1000 /*raw_payload=*/true);
1001 extension_map_.Register<RtpDependencyDescriptorExtension>(7);
1002 rtp_video_stream_receiver_->StartReceive();
1003 }
1004
1005 // Returns some valid structure for the DependencyDescriptors.
1006 // First template of that structure always fit for a key frame.
CreateStreamStructure()1007 static FrameDependencyStructure CreateStreamStructure() {
1008 FrameDependencyStructure stream_structure;
1009 stream_structure.num_decode_targets = 1;
1010 stream_structure.templates = {
1011 FrameDependencyTemplate().Dtis("S"),
1012 FrameDependencyTemplate().Dtis("S").FrameDiffs({1}),
1013 };
1014 return stream_structure;
1015 }
1016
InjectPacketWith(const FrameDependencyStructure & stream_structure,const DependencyDescriptor & dependency_descriptor)1017 void InjectPacketWith(const FrameDependencyStructure& stream_structure,
1018 const DependencyDescriptor& dependency_descriptor) {
1019 const std::vector<uint8_t> data = {0, 1, 2, 3, 4};
1020 RtpPacketReceived rtp_packet(&extension_map_);
1021 ASSERT_TRUE(rtp_packet.SetExtension<RtpDependencyDescriptorExtension>(
1022 stream_structure, dependency_descriptor));
1023 uint8_t* payload = rtp_packet.SetPayloadSize(data.size());
1024 ASSERT_TRUE(payload);
1025 memcpy(payload, data.data(), data.size());
1026 mock_on_complete_frame_callback_.ClearExpectedBitstream();
1027 mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
1028 data.size());
1029 rtp_packet.SetMarker(true);
1030 rtp_packet.SetPayloadType(payload_type_);
1031 rtp_packet.SetSequenceNumber(++rtp_sequence_number_);
1032 rtp_video_stream_receiver_->OnRtpPacket(rtp_packet);
1033 }
1034
1035 private:
1036 const int payload_type_ = 123;
1037 RtpHeaderExtensionMap extension_map_;
1038 uint16_t rtp_sequence_number_ = 321;
1039 };
1040
TEST_F(RtpVideoStreamReceiverDependencyDescriptorTest,UnwrapsFrameId)1041 TEST_F(RtpVideoStreamReceiverDependencyDescriptorTest, UnwrapsFrameId) {
1042 FrameDependencyStructure stream_structure = CreateStreamStructure();
1043
1044 DependencyDescriptor keyframe_descriptor;
1045 keyframe_descriptor.attached_structure =
1046 std::make_unique<FrameDependencyStructure>(stream_structure);
1047 keyframe_descriptor.frame_dependencies = stream_structure.templates[0];
1048 keyframe_descriptor.frame_number = 0xfff0;
1049 // DependencyDescriptor doesn't support reordering delta frame before
1050 // keyframe. Thus feed a key frame first, then test reodered delta frames.
1051 int64_t first_picture_id;
1052 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
1053 .WillOnce([&](EncodedFrame* frame) { first_picture_id = frame->Id(); });
1054 InjectPacketWith(stream_structure, keyframe_descriptor);
1055
1056 DependencyDescriptor deltaframe1_descriptor;
1057 deltaframe1_descriptor.frame_dependencies = stream_structure.templates[1];
1058 deltaframe1_descriptor.frame_number = 0xfffe;
1059
1060 DependencyDescriptor deltaframe2_descriptor;
1061 deltaframe1_descriptor.frame_dependencies = stream_structure.templates[1];
1062 deltaframe2_descriptor.frame_number = 0x0002;
1063
1064 // Parser should unwrap frame ids correctly even if packets were reordered by
1065 // the network.
1066 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
1067 .WillOnce([&](EncodedFrame* frame) {
1068 // 0x0002 - 0xfff0
1069 EXPECT_EQ(frame->Id() - first_picture_id, 18);
1070 })
1071 .WillOnce([&](EncodedFrame* frame) {
1072 // 0xfffe - 0xfff0
1073 EXPECT_EQ(frame->Id() - first_picture_id, 14);
1074 });
1075 InjectPacketWith(stream_structure, deltaframe2_descriptor);
1076 InjectPacketWith(stream_structure, deltaframe1_descriptor);
1077 }
1078
TEST_F(RtpVideoStreamReceiverDependencyDescriptorTest,DropsLateDeltaFramePacketWithDependencyDescriptorExtension)1079 TEST_F(RtpVideoStreamReceiverDependencyDescriptorTest,
1080 DropsLateDeltaFramePacketWithDependencyDescriptorExtension) {
1081 FrameDependencyStructure stream_structure1 = CreateStreamStructure();
1082 FrameDependencyStructure stream_structure2 = CreateStreamStructure();
1083 // Make sure template ids for these two structures do not collide:
1084 // adjust structure_id (that is also used as template id offset).
1085 stream_structure1.structure_id = 13;
1086 stream_structure2.structure_id =
1087 stream_structure1.structure_id + stream_structure1.templates.size();
1088
1089 DependencyDescriptor keyframe1_descriptor;
1090 keyframe1_descriptor.attached_structure =
1091 std::make_unique<FrameDependencyStructure>(stream_structure1);
1092 keyframe1_descriptor.frame_dependencies = stream_structure1.templates[0];
1093 keyframe1_descriptor.frame_number = 1;
1094 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame);
1095 InjectPacketWith(stream_structure1, keyframe1_descriptor);
1096
1097 // Pass in 2nd key frame with different structure.
1098 DependencyDescriptor keyframe2_descriptor;
1099 keyframe2_descriptor.attached_structure =
1100 std::make_unique<FrameDependencyStructure>(stream_structure2);
1101 keyframe2_descriptor.frame_dependencies = stream_structure2.templates[0];
1102 keyframe2_descriptor.frame_number = 3;
1103 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame);
1104 InjectPacketWith(stream_structure2, keyframe2_descriptor);
1105
1106 // Pass in late delta frame that uses structure of the 1st key frame.
1107 DependencyDescriptor deltaframe_descriptor;
1108 deltaframe_descriptor.frame_dependencies = stream_structure1.templates[0];
1109 deltaframe_descriptor.frame_number = 2;
1110 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame).Times(0);
1111 InjectPacketWith(stream_structure1, deltaframe_descriptor);
1112 }
1113
TEST_F(RtpVideoStreamReceiverDependencyDescriptorTest,DropsLateKeyFramePacketWithDependencyDescriptorExtension)1114 TEST_F(RtpVideoStreamReceiverDependencyDescriptorTest,
1115 DropsLateKeyFramePacketWithDependencyDescriptorExtension) {
1116 FrameDependencyStructure stream_structure1 = CreateStreamStructure();
1117 FrameDependencyStructure stream_structure2 = CreateStreamStructure();
1118 // Make sure template ids for these two structures do not collide:
1119 // adjust structure_id (that is also used as template id offset).
1120 stream_structure1.structure_id = 13;
1121 stream_structure2.structure_id =
1122 stream_structure1.structure_id + stream_structure1.templates.size();
1123
1124 DependencyDescriptor keyframe1_descriptor;
1125 keyframe1_descriptor.attached_structure =
1126 std::make_unique<FrameDependencyStructure>(stream_structure1);
1127 keyframe1_descriptor.frame_dependencies = stream_structure1.templates[0];
1128 keyframe1_descriptor.frame_number = 1;
1129
1130 DependencyDescriptor keyframe2_descriptor;
1131 keyframe2_descriptor.attached_structure =
1132 std::make_unique<FrameDependencyStructure>(stream_structure2);
1133 keyframe2_descriptor.frame_dependencies = stream_structure2.templates[0];
1134 keyframe2_descriptor.frame_number = 3;
1135
1136 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
1137 .WillOnce(
1138 [&](EncodedFrame* frame) { EXPECT_EQ(frame->Id() & 0xFFFF, 3); });
1139 InjectPacketWith(stream_structure2, keyframe2_descriptor);
1140 InjectPacketWith(stream_structure1, keyframe1_descriptor);
1141
1142 // Pass in delta frame that uses structure of the 2nd key frame. Late key
1143 // frame shouldn't block it.
1144 DependencyDescriptor deltaframe_descriptor;
1145 deltaframe_descriptor.frame_dependencies = stream_structure2.templates[0];
1146 deltaframe_descriptor.frame_number = 4;
1147 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame)
1148 .WillOnce(
1149 [&](EncodedFrame* frame) { EXPECT_EQ(frame->Id() & 0xFFFF, 4); });
1150 InjectPacketWith(stream_structure2, deltaframe_descriptor);
1151 }
1152
1153 #if RTC_DCHECK_IS_ON && GTEST_HAS_DEATH_TEST && !defined(WEBRTC_ANDROID)
1154 using RtpVideoStreamReceiverDeathTest = RtpVideoStreamReceiverTest;
TEST_F(RtpVideoStreamReceiverDeathTest,RepeatedSecondarySinkDisallowed)1155 TEST_F(RtpVideoStreamReceiverDeathTest, RepeatedSecondarySinkDisallowed) {
1156 MockRtpPacketSink secondary_sink;
1157
1158 rtp_video_stream_receiver_->AddSecondarySink(&secondary_sink);
1159 EXPECT_DEATH(rtp_video_stream_receiver_->AddSecondarySink(&secondary_sink),
1160 "");
1161
1162 // Test tear-down.
1163 rtp_video_stream_receiver_->RemoveSecondarySink(&secondary_sink);
1164 }
1165 #endif
1166
TEST_F(RtpVideoStreamReceiverTest,TransformFrame)1167 TEST_F(RtpVideoStreamReceiverTest, TransformFrame) {
1168 rtc::scoped_refptr<MockFrameTransformer> mock_frame_transformer =
1169 new rtc::RefCountedObject<testing::NiceMock<MockFrameTransformer>>();
1170 EXPECT_CALL(*mock_frame_transformer,
1171 RegisterTransformedFrameSinkCallback(_, config_.rtp.remote_ssrc));
1172 auto receiver = std::make_unique<RtpVideoStreamReceiver>(
1173 Clock::GetRealTimeClock(), &mock_transport_, nullptr, nullptr, &config_,
1174 rtp_receive_statistics_.get(), nullptr, nullptr, process_thread_.get(),
1175 &mock_nack_sender_, nullptr, &mock_on_complete_frame_callback_, nullptr,
1176 mock_frame_transformer);
1177 VideoCodec video_codec;
1178 video_codec.codecType = kVideoCodecGeneric;
1179 receiver->AddReceiveCodec(kPayloadType, video_codec, {},
1180 /*raw_payload=*/false);
1181
1182 RtpPacketReceived rtp_packet;
1183 rtp_packet.SetPayloadType(kPayloadType);
1184 rtc::CopyOnWriteBuffer data({1, 2, 3, 4});
1185 rtp_packet.SetSequenceNumber(1);
1186 RTPVideoHeader video_header =
1187 GetGenericVideoHeader(VideoFrameType::kVideoFrameKey);
1188 mock_on_complete_frame_callback_.AppendExpectedBitstream(data.data(),
1189 data.size());
1190 EXPECT_CALL(*mock_frame_transformer, Transform(_));
1191 receiver->OnReceivedPayloadData(data, rtp_packet, video_header);
1192
1193 EXPECT_CALL(*mock_frame_transformer,
1194 UnregisterTransformedFrameSinkCallback(config_.rtp.remote_ssrc));
1195 receiver = nullptr;
1196 }
1197
1198 // Test default behavior and when playout delay is overridden by field trial.
1199 const VideoPlayoutDelay kTransmittedPlayoutDelay = {100, 200};
1200 const VideoPlayoutDelay kForcedPlayoutDelay = {70, 90};
1201 struct PlayoutDelayOptions {
1202 std::string field_trial;
1203 VideoPlayoutDelay expected_delay;
1204 };
1205 const PlayoutDelayOptions kDefaultBehavior = {
1206 /*field_trial=*/"", /*expected_delay=*/kTransmittedPlayoutDelay};
1207 const PlayoutDelayOptions kOverridePlayoutDelay = {
1208 /*field_trial=*/"WebRTC-ForcePlayoutDelay/min_ms:70,max_ms:90/",
1209 /*expected_delay=*/kForcedPlayoutDelay};
1210
1211 class RtpVideoStreamReceiverTestPlayoutDelay
1212 : public RtpVideoStreamReceiverTest,
1213 public ::testing::WithParamInterface<PlayoutDelayOptions> {
1214 protected:
RtpVideoStreamReceiverTestPlayoutDelay()1215 RtpVideoStreamReceiverTestPlayoutDelay()
1216 : RtpVideoStreamReceiverTest(GetParam().field_trial) {}
1217 };
1218
1219 INSTANTIATE_TEST_SUITE_P(PlayoutDelay,
1220 RtpVideoStreamReceiverTestPlayoutDelay,
1221 Values(kDefaultBehavior, kOverridePlayoutDelay));
1222
TEST_P(RtpVideoStreamReceiverTestPlayoutDelay,PlayoutDelay)1223 TEST_P(RtpVideoStreamReceiverTestPlayoutDelay, PlayoutDelay) {
1224 rtc::CopyOnWriteBuffer payload_data({1, 2, 3, 4});
1225 RtpHeaderExtensionMap extension_map;
1226 extension_map.Register<PlayoutDelayLimits>(1);
1227 RtpPacketToSend packet_to_send(&extension_map);
1228 packet_to_send.SetPayloadType(kPayloadType);
1229 packet_to_send.SetSequenceNumber(1);
1230
1231 // Set playout delay on outgoing packet.
1232 EXPECT_TRUE(packet_to_send.SetExtension<PlayoutDelayLimits>(
1233 kTransmittedPlayoutDelay));
1234 uint8_t* payload = packet_to_send.AllocatePayload(payload_data.size());
1235 memcpy(payload, payload_data.data(), payload_data.size());
1236
1237 RtpPacketReceived received_packet(&extension_map);
1238 received_packet.Parse(packet_to_send.data(), packet_to_send.size());
1239
1240 RTPVideoHeader video_header =
1241 GetGenericVideoHeader(VideoFrameType::kVideoFrameKey);
1242 mock_on_complete_frame_callback_.AppendExpectedBitstream(payload_data.data(),
1243 payload_data.size());
1244 // Expect the playout delay of encoded frame to be the same as the transmitted
1245 // playout delay unless it was overridden by a field trial.
1246 EXPECT_CALL(mock_on_complete_frame_callback_, DoOnCompleteFrame(_))
1247 .WillOnce(Invoke([expected_playout_delay =
1248 GetParam().expected_delay](EncodedFrame* frame) {
1249 EXPECT_EQ(frame->EncodedImage().playout_delay_, expected_playout_delay);
1250 }));
1251 rtp_video_stream_receiver_->OnReceivedPayloadData(
1252 received_packet.PayloadBuffer(), received_packet, video_header);
1253 }
1254
1255 } // namespace webrtc
1256