1 /*
2  *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 #include "video/rtp_video_stream_receiver2.h"
12 
13 #include <algorithm>
14 #include <limits>
15 #include <memory>
16 #include <utility>
17 #include <vector>
18 
19 #include "absl/algorithm/container.h"
20 #include "absl/base/macros.h"
21 #include "absl/memory/memory.h"
22 #include "absl/types/optional.h"
23 #include "media/base/media_constants.h"
24 #include "modules/pacing/packet_router.h"
25 #include "modules/remote_bitrate_estimator/include/remote_bitrate_estimator.h"
26 #include "modules/rtp_rtcp/include/receive_statistics.h"
27 #include "modules/rtp_rtcp/include/rtp_cvo.h"
28 #include "modules/rtp_rtcp/include/ulpfec_receiver.h"
29 #include "modules/rtp_rtcp/source/create_video_rtp_depacketizer.h"
30 #include "modules/rtp_rtcp/source/rtp_dependency_descriptor_extension.h"
31 #include "modules/rtp_rtcp/source/rtp_format.h"
32 #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor.h"
33 #include "modules/rtp_rtcp/source/rtp_generic_frame_descriptor_extension.h"
34 #include "modules/rtp_rtcp/source/rtp_header_extensions.h"
35 #include "modules/rtp_rtcp/source/rtp_packet_received.h"
36 #include "modules/rtp_rtcp/source/rtp_rtcp_config.h"
37 #include "modules/rtp_rtcp/source/video_rtp_depacketizer.h"
38 #include "modules/rtp_rtcp/source/video_rtp_depacketizer_raw.h"
39 #include "modules/utility/include/process_thread.h"
40 #include "modules/video_coding/frame_object.h"
41 #include "modules/video_coding/h264_sprop_parameter_sets.h"
42 #include "modules/video_coding/h264_sps_pps_tracker.h"
43 #include "modules/video_coding/nack_module2.h"
44 #include "modules/video_coding/packet_buffer.h"
45 #include "rtc_base/checks.h"
46 #include "rtc_base/location.h"
47 #include "rtc_base/logging.h"
48 #include "rtc_base/strings/string_builder.h"
49 #include "rtc_base/trace_event.h"
50 #include "system_wrappers/include/field_trial.h"
51 #include "system_wrappers/include/metrics.h"
52 #include "system_wrappers/include/ntp_time.h"
53 #include "video/receive_statistics_proxy2.h"
54 
55 namespace webrtc {
56 
57 namespace {
58 // TODO(philipel): Change kPacketBufferStartSize back to 32 in M63 see:
59 //                 crbug.com/752886
60 constexpr int kPacketBufferStartSize = 512;
61 constexpr int kPacketBufferMaxSize = 2048;
62 
PacketBufferMaxSize()63 int PacketBufferMaxSize() {
64   // The group here must be a positive power of 2, in which case that is used as
65   // size. All other values shall result in the default value being used.
66   const std::string group_name =
67       webrtc::field_trial::FindFullName("WebRTC-PacketBufferMaxSize");
68   int packet_buffer_max_size = kPacketBufferMaxSize;
69   if (!group_name.empty() &&
70       (sscanf(group_name.c_str(), "%d", &packet_buffer_max_size) != 1 ||
71        packet_buffer_max_size <= 0 ||
72        // Verify that the number is a positive power of 2.
73        (packet_buffer_max_size & (packet_buffer_max_size - 1)) != 0)) {
74     RTC_LOG(LS_WARNING) << "Invalid packet buffer max size: " << group_name;
75     packet_buffer_max_size = kPacketBufferMaxSize;
76   }
77   return packet_buffer_max_size;
78 }
79 
CreateRtpRtcpModule(Clock * clock,ReceiveStatistics * receive_statistics,Transport * outgoing_transport,RtcpRttStats * rtt_stats,RtcpPacketTypeCounterObserver * rtcp_packet_type_counter_observer,RtcpCnameCallback * rtcp_cname_callback,uint32_t local_ssrc,RtcpEventObserver * rtcp_event_observer)80 std::unique_ptr<ModuleRtpRtcpImpl2> CreateRtpRtcpModule(
81     Clock* clock,
82     ReceiveStatistics* receive_statistics,
83     Transport* outgoing_transport,
84     RtcpRttStats* rtt_stats,
85     RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer,
86     RtcpCnameCallback* rtcp_cname_callback,
87     uint32_t local_ssrc,
88     RtcpEventObserver* rtcp_event_observer) {
89   RtpRtcpInterface::Configuration configuration;
90   configuration.clock = clock;
91   configuration.audio = false;
92   configuration.receiver_only = true;
93   configuration.receive_statistics = receive_statistics;
94   configuration.outgoing_transport = outgoing_transport;
95   configuration.rtt_stats = rtt_stats;
96   configuration.rtcp_packet_type_counter_observer =
97       rtcp_packet_type_counter_observer;
98   configuration.rtcp_cname_callback = rtcp_cname_callback;
99   configuration.local_media_ssrc = local_ssrc;
100   configuration.rtcp_event_observer = rtcp_event_observer;
101 
102   std::unique_ptr<ModuleRtpRtcpImpl2> rtp_rtcp =
103       ModuleRtpRtcpImpl2::Create(configuration);
104   rtp_rtcp->SetRTCPStatus(RtcpMode::kCompound);
105 
106   return rtp_rtcp;
107 }
108 
MaybeConstructNackModule(TaskQueueBase * current_queue,const VideoReceiveStream::Config & config,Clock * clock,NackSender * nack_sender,KeyFrameRequestSender * keyframe_request_sender)109 std::unique_ptr<NackModule2> MaybeConstructNackModule(
110     TaskQueueBase* current_queue,
111     const VideoReceiveStream::Config& config,
112     Clock* clock,
113     NackSender* nack_sender,
114     KeyFrameRequestSender* keyframe_request_sender) {
115   if (config.rtp.nack.rtp_history_ms == 0)
116     return nullptr;
117 
118   return std::make_unique<NackModule2>(current_queue, clock, nack_sender,
119                                        keyframe_request_sender);
120 }
121 
122 static const int kPacketLogIntervalMs = 10000;
123 
124 }  // namespace
125 
RtcpFeedbackBuffer(KeyFrameRequestSender * key_frame_request_sender,NackSender * nack_sender,LossNotificationSender * loss_notification_sender)126 RtpVideoStreamReceiver2::RtcpFeedbackBuffer::RtcpFeedbackBuffer(
127     KeyFrameRequestSender* key_frame_request_sender,
128     NackSender* nack_sender,
129     LossNotificationSender* loss_notification_sender)
130     : key_frame_request_sender_(key_frame_request_sender),
131       nack_sender_(nack_sender),
132       loss_notification_sender_(loss_notification_sender),
133       request_key_frame_(false) {
134   RTC_DCHECK(key_frame_request_sender_);
135   RTC_DCHECK(nack_sender_);
136   RTC_DCHECK(loss_notification_sender_);
137 }
138 
RequestKeyFrame()139 void RtpVideoStreamReceiver2::RtcpFeedbackBuffer::RequestKeyFrame() {
140   RTC_DCHECK_RUN_ON(&worker_task_checker_);
141   request_key_frame_ = true;
142 }
143 
SendNack(const std::vector<uint16_t> & sequence_numbers,bool buffering_allowed)144 void RtpVideoStreamReceiver2::RtcpFeedbackBuffer::SendNack(
145     const std::vector<uint16_t>& sequence_numbers,
146     bool buffering_allowed) {
147   RTC_DCHECK_RUN_ON(&worker_task_checker_);
148   RTC_DCHECK(!sequence_numbers.empty());
149   nack_sequence_numbers_.insert(nack_sequence_numbers_.end(),
150                                 sequence_numbers.cbegin(),
151                                 sequence_numbers.cend());
152   if (!buffering_allowed) {
153     // Note that while *buffering* is not allowed, *batching* is, meaning that
154     // previously buffered messages may be sent along with the current message.
155     SendBufferedRtcpFeedback();
156   }
157 }
158 
SendLossNotification(uint16_t last_decoded_seq_num,uint16_t last_received_seq_num,bool decodability_flag,bool buffering_allowed)159 void RtpVideoStreamReceiver2::RtcpFeedbackBuffer::SendLossNotification(
160     uint16_t last_decoded_seq_num,
161     uint16_t last_received_seq_num,
162     bool decodability_flag,
163     bool buffering_allowed) {
164   RTC_DCHECK_RUN_ON(&worker_task_checker_);
165   RTC_DCHECK(buffering_allowed);
166   RTC_DCHECK(!lntf_state_)
167       << "SendLossNotification() called twice in a row with no call to "
168          "SendBufferedRtcpFeedback() in between.";
169   lntf_state_ = absl::make_optional<LossNotificationState>(
170       last_decoded_seq_num, last_received_seq_num, decodability_flag);
171 }
172 
SendBufferedRtcpFeedback()173 void RtpVideoStreamReceiver2::RtcpFeedbackBuffer::SendBufferedRtcpFeedback() {
174   RTC_DCHECK_RUN_ON(&worker_task_checker_);
175 
176   bool request_key_frame = false;
177   std::vector<uint16_t> nack_sequence_numbers;
178   absl::optional<LossNotificationState> lntf_state;
179 
180   std::swap(request_key_frame, request_key_frame_);
181   std::swap(nack_sequence_numbers, nack_sequence_numbers_);
182   std::swap(lntf_state, lntf_state_);
183 
184   if (lntf_state) {
185     // If either a NACK or a key frame request is sent, we should buffer
186     // the LNTF and wait for them (NACK or key frame request) to trigger
187     // the compound feedback message.
188     // Otherwise, the LNTF should be sent out immediately.
189     const bool buffering_allowed =
190         request_key_frame || !nack_sequence_numbers.empty();
191 
192     loss_notification_sender_->SendLossNotification(
193         lntf_state->last_decoded_seq_num, lntf_state->last_received_seq_num,
194         lntf_state->decodability_flag, buffering_allowed);
195   }
196 
197   if (request_key_frame) {
198     key_frame_request_sender_->RequestKeyFrame();
199   } else if (!nack_sequence_numbers.empty()) {
200     nack_sender_->SendNack(nack_sequence_numbers, true);
201   }
202 }
203 
RtpVideoStreamReceiver2(TaskQueueBase * current_queue,Clock * clock,Transport * transport,RtcpRttStats * rtt_stats,PacketRouter * packet_router,const VideoReceiveStream::Config * config,ReceiveStatistics * rtp_receive_statistics,RtcpPacketTypeCounterObserver * rtcp_packet_type_counter_observer,RtcpCnameCallback * rtcp_cname_callback,VCMReceiveStatisticsCallback * vcm_receive_statistics,ProcessThread * process_thread,NackSender * nack_sender,KeyFrameRequestSender * keyframe_request_sender,video_coding::OnCompleteFrameCallback * complete_frame_callback,rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor,rtc::scoped_refptr<FrameTransformerInterface> frame_transformer)204 RtpVideoStreamReceiver2::RtpVideoStreamReceiver2(
205     TaskQueueBase* current_queue,
206     Clock* clock,
207     Transport* transport,
208     RtcpRttStats* rtt_stats,
209     PacketRouter* packet_router,
210     const VideoReceiveStream::Config* config,
211     ReceiveStatistics* rtp_receive_statistics,
212     RtcpPacketTypeCounterObserver* rtcp_packet_type_counter_observer,
213     RtcpCnameCallback* rtcp_cname_callback,
214     VCMReceiveStatisticsCallback* vcm_receive_statistics,
215     ProcessThread* process_thread,
216     NackSender* nack_sender,
217     KeyFrameRequestSender* keyframe_request_sender,
218     video_coding::OnCompleteFrameCallback* complete_frame_callback,
219     rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor,
220     rtc::scoped_refptr<FrameTransformerInterface> frame_transformer)
221     : clock_(clock),
222       config_(*config),
223       packet_router_(packet_router),
224       process_thread_(process_thread),
225       ntp_estimator_(clock),
226       rtp_header_extensions_(config_.rtp.extensions),
227       forced_playout_delay_max_ms_("max_ms", absl::nullopt),
228       forced_playout_delay_min_ms_("min_ms", absl::nullopt),
229       rtp_receive_statistics_(rtp_receive_statistics),
230       ulpfec_receiver_(UlpfecReceiver::Create(config->rtp.remote_ssrc,
231                                               this,
232                                               config->rtp.extensions)),
233       receiving_(false),
234       last_packet_log_ms_(-1),
235       rtp_rtcp_(CreateRtpRtcpModule(clock,
236                                     rtp_receive_statistics_,
237                                     transport,
238                                     rtt_stats,
239                                     rtcp_packet_type_counter_observer,
240                                     rtcp_cname_callback,
241                                     config_.rtp.local_ssrc,
242                                     config_.rtp.rtcp_event_observer)),
243       complete_frame_callback_(complete_frame_callback),
244       keyframe_request_sender_(keyframe_request_sender),
245       keyframe_request_method_(config_.rtp.keyframe_method),
246       // TODO(bugs.webrtc.org/10336): Let |rtcp_feedback_buffer_| communicate
247       // directly with |rtp_rtcp_|.
248       rtcp_feedback_buffer_(this, nack_sender, this),
249       nack_module_(MaybeConstructNackModule(current_queue,
250                                             config_,
251                                             clock_,
252                                             &rtcp_feedback_buffer_,
253                                             &rtcp_feedback_buffer_)),
254       vcm_receive_statistics_(vcm_receive_statistics),
255       packet_buffer_(clock_, kPacketBufferStartSize, PacketBufferMaxSize()),
256       has_received_frame_(false),
257       frames_decryptable_(false),
258       absolute_capture_time_receiver_(clock) {
259   if (packet_router_)
260     packet_router_->AddReceiveRtpModule(rtp_rtcp_.get(), config_.rtp.remb);
261 
262   RTC_DCHECK(config_.rtp.rtcp_mode != RtcpMode::kOff)
263       << "A stream should not be configured with RTCP disabled. This value is "
264          "reserved for internal usage.";
265   // TODO(pbos): What's an appropriate local_ssrc for receive-only streams?
266   RTC_DCHECK(config_.rtp.local_ssrc != 0);
267   RTC_DCHECK(config_.rtp.remote_ssrc != config_.rtp.local_ssrc);
268 
269   rtp_rtcp_->SetRTCPStatus(config_.rtp.rtcp_mode);
270   rtp_rtcp_->SetRemoteSSRC(config_.rtp.remote_ssrc);
271 
272   static const int kMaxPacketAgeToNack = 450;
273   const int max_reordering_threshold = (config_.rtp.nack.rtp_history_ms > 0)
274                                            ? kMaxPacketAgeToNack
275                                            : kDefaultMaxReorderingThreshold;
276   rtp_receive_statistics_->SetMaxReorderingThreshold(config_.rtp.remote_ssrc,
277                                                      max_reordering_threshold);
278   // TODO(nisse): For historic reasons, we applied the above
279   // max_reordering_threshold also for RTX stats, which makes little sense since
280   // we don't NACK rtx packets. Consider deleting the below block, and rely on
281   // the default threshold.
282   if (config_.rtp.rtx_ssrc) {
283     rtp_receive_statistics_->SetMaxReorderingThreshold(
284         config_.rtp.rtx_ssrc, max_reordering_threshold);
285   }
286   if (config_.rtp.rtcp_xr.receiver_reference_time_report)
287     rtp_rtcp_->SetRtcpXrRrtrStatus(true);
288 
289   ParseFieldTrial(
290       {&forced_playout_delay_max_ms_, &forced_playout_delay_min_ms_},
291       field_trial::FindFullName("WebRTC-ForcePlayoutDelay"));
292 
293   process_thread_->RegisterModule(rtp_rtcp_.get(), RTC_FROM_HERE);
294 
295   if (config_.rtp.lntf.enabled) {
296     loss_notification_controller_ =
297         std::make_unique<LossNotificationController>(&rtcp_feedback_buffer_,
298                                                      &rtcp_feedback_buffer_);
299   }
300 
301   reference_finder_ =
302       std::make_unique<video_coding::RtpFrameReferenceFinder>(this);
303 
304   // Only construct the encrypted receiver if frame encryption is enabled.
305   if (config_.crypto_options.sframe.require_frame_encryption) {
306     buffered_frame_decryptor_ =
307         std::make_unique<BufferedFrameDecryptor>(this, this);
308     if (frame_decryptor != nullptr) {
309       buffered_frame_decryptor_->SetFrameDecryptor(std::move(frame_decryptor));
310     }
311   }
312 
313   if (frame_transformer) {
314     frame_transformer_delegate_ = new rtc::RefCountedObject<
315         RtpVideoStreamReceiverFrameTransformerDelegate>(
316         this, std::move(frame_transformer), rtc::Thread::Current(),
317         config_.rtp.remote_ssrc);
318     frame_transformer_delegate_->Init();
319   }
320 }
321 
~RtpVideoStreamReceiver2()322 RtpVideoStreamReceiver2::~RtpVideoStreamReceiver2() {
323   RTC_DCHECK(secondary_sinks_.empty());
324 
325   process_thread_->DeRegisterModule(rtp_rtcp_.get());
326 
327   if (packet_router_)
328     packet_router_->RemoveReceiveRtpModule(rtp_rtcp_.get());
329   UpdateHistograms();
330   if (frame_transformer_delegate_)
331     frame_transformer_delegate_->Reset();
332 }
333 
AddReceiveCodec(uint8_t payload_type,const VideoCodec & video_codec,const std::map<std::string,std::string> & codec_params,bool raw_payload)334 void RtpVideoStreamReceiver2::AddReceiveCodec(
335     uint8_t payload_type,
336     const VideoCodec& video_codec,
337     const std::map<std::string, std::string>& codec_params,
338     bool raw_payload) {
339   RTC_DCHECK_RUN_ON(&worker_task_checker_);
340   if (codec_params.count(cricket::kH264FmtpSpsPpsIdrInKeyframe) ||
341       field_trial::IsEnabled("WebRTC-SpsPpsIdrIsH264Keyframe")) {
342     packet_buffer_.ForceSpsPpsIdrIsH264Keyframe();
343   }
344   payload_type_map_.emplace(
345       payload_type, raw_payload
346                         ? std::make_unique<VideoRtpDepacketizerRaw>()
347                         : CreateVideoRtpDepacketizer(video_codec.codecType));
348   pt_codec_params_.emplace(payload_type, codec_params);
349 }
350 
GetSyncInfo() const351 absl::optional<Syncable::Info> RtpVideoStreamReceiver2::GetSyncInfo() const {
352   RTC_DCHECK_RUN_ON(&worker_task_checker_);
353   Syncable::Info info;
354   if (rtp_rtcp_->RemoteNTP(&info.capture_time_ntp_secs,
355                            &info.capture_time_ntp_frac, nullptr, nullptr,
356                            &info.capture_time_source_clock) != 0) {
357     return absl::nullopt;
358   }
359 
360   if (!last_received_rtp_timestamp_ || !last_received_rtp_system_time_ms_) {
361     return absl::nullopt;
362   }
363   info.latest_received_capture_timestamp = *last_received_rtp_timestamp_;
364   info.latest_receive_time_ms = *last_received_rtp_system_time_ms_;
365 
366   // Leaves info.current_delay_ms uninitialized.
367   return info;
368 }
369 
370 RtpVideoStreamReceiver2::ParseGenericDependenciesResult
ParseGenericDependenciesExtension(const RtpPacketReceived & rtp_packet,RTPVideoHeader * video_header)371 RtpVideoStreamReceiver2::ParseGenericDependenciesExtension(
372     const RtpPacketReceived& rtp_packet,
373     RTPVideoHeader* video_header) {
374   if (rtp_packet.HasExtension<RtpDependencyDescriptorExtension>()) {
375     webrtc::DependencyDescriptor dependency_descriptor;
376     if (!rtp_packet.GetExtension<RtpDependencyDescriptorExtension>(
377             video_structure_.get(), &dependency_descriptor)) {
378       // Descriptor is there, but failed to parse. Either it is invalid,
379       // or too old packet (after relevant video_structure_ changed),
380       // or too new packet (before relevant video_structure_ arrived).
381       // Drop such packet to be on the safe side.
382       // TODO(bugs.webrtc.org/10342): Stash too new packet.
383       RTC_LOG(LS_WARNING) << "ssrc: " << rtp_packet.Ssrc()
384                           << " Failed to parse dependency descriptor.";
385       return kDropPacket;
386     }
387     if (dependency_descriptor.attached_structure != nullptr &&
388         !dependency_descriptor.first_packet_in_frame) {
389       RTC_LOG(LS_WARNING) << "ssrc: " << rtp_packet.Ssrc()
390                           << "Invalid dependency descriptor: structure "
391                              "attached to non first packet of a frame.";
392       return kDropPacket;
393     }
394     video_header->is_first_packet_in_frame =
395         dependency_descriptor.first_packet_in_frame;
396     video_header->is_last_packet_in_frame =
397         dependency_descriptor.last_packet_in_frame;
398 
399     int64_t frame_id =
400         frame_id_unwrapper_.Unwrap(dependency_descriptor.frame_number);
401     auto& generic_descriptor_info = video_header->generic.emplace();
402     generic_descriptor_info.frame_id = frame_id;
403     generic_descriptor_info.spatial_index =
404         dependency_descriptor.frame_dependencies.spatial_id;
405     generic_descriptor_info.temporal_index =
406         dependency_descriptor.frame_dependencies.temporal_id;
407     for (int fdiff : dependency_descriptor.frame_dependencies.frame_diffs) {
408       generic_descriptor_info.dependencies.push_back(frame_id - fdiff);
409     }
410     generic_descriptor_info.decode_target_indications =
411         dependency_descriptor.frame_dependencies.decode_target_indications;
412     if (dependency_descriptor.resolution) {
413       video_header->width = dependency_descriptor.resolution->Width();
414       video_header->height = dependency_descriptor.resolution->Height();
415     }
416 
417     // FrameDependencyStructure is sent in dependency descriptor of the first
418     // packet of a key frame and required for parsed dependency descriptor in
419     // all the following packets until next key frame.
420     // Save it if there is a (potentially) new structure.
421     if (dependency_descriptor.attached_structure) {
422       RTC_DCHECK(dependency_descriptor.first_packet_in_frame);
423       if (video_structure_frame_id_ > frame_id) {
424         RTC_LOG(LS_WARNING)
425             << "Arrived key frame with id " << frame_id << " and structure id "
426             << dependency_descriptor.attached_structure->structure_id
427             << " is older than the latest received key frame with id "
428             << *video_structure_frame_id_ << " and structure id "
429             << video_structure_->structure_id;
430         return kDropPacket;
431       }
432       video_structure_ = std::move(dependency_descriptor.attached_structure);
433       video_structure_frame_id_ = frame_id;
434       video_header->frame_type = VideoFrameType::kVideoFrameKey;
435     } else {
436       video_header->frame_type = VideoFrameType::kVideoFrameDelta;
437     }
438     return kHasGenericDescriptor;
439   }
440 
441   RtpGenericFrameDescriptor generic_frame_descriptor;
442   if (!rtp_packet.GetExtension<RtpGenericFrameDescriptorExtension00>(
443           &generic_frame_descriptor)) {
444     return kNoGenericDescriptor;
445   }
446 
447   video_header->is_first_packet_in_frame =
448       generic_frame_descriptor.FirstPacketInSubFrame();
449   video_header->is_last_packet_in_frame =
450       generic_frame_descriptor.LastPacketInSubFrame();
451 
452   if (generic_frame_descriptor.FirstPacketInSubFrame()) {
453     video_header->frame_type =
454         generic_frame_descriptor.FrameDependenciesDiffs().empty()
455             ? VideoFrameType::kVideoFrameKey
456             : VideoFrameType::kVideoFrameDelta;
457 
458     auto& generic_descriptor_info = video_header->generic.emplace();
459     int64_t frame_id =
460         frame_id_unwrapper_.Unwrap(generic_frame_descriptor.FrameId());
461     generic_descriptor_info.frame_id = frame_id;
462     generic_descriptor_info.spatial_index =
463         generic_frame_descriptor.SpatialLayer();
464     generic_descriptor_info.temporal_index =
465         generic_frame_descriptor.TemporalLayer();
466     for (uint16_t fdiff : generic_frame_descriptor.FrameDependenciesDiffs()) {
467       generic_descriptor_info.dependencies.push_back(frame_id - fdiff);
468     }
469   }
470   video_header->width = generic_frame_descriptor.Width();
471   video_header->height = generic_frame_descriptor.Height();
472   return kHasGenericDescriptor;
473 }
474 
OnReceivedPayloadData(rtc::CopyOnWriteBuffer codec_payload,const RtpPacketReceived & rtp_packet,const RTPVideoHeader & video)475 void RtpVideoStreamReceiver2::OnReceivedPayloadData(
476     rtc::CopyOnWriteBuffer codec_payload,
477     const RtpPacketReceived& rtp_packet,
478     const RTPVideoHeader& video) {
479   RTC_DCHECK_RUN_ON(&worker_task_checker_);
480   auto packet = std::make_unique<video_coding::PacketBuffer::Packet>(
481       rtp_packet, video, ntp_estimator_.Estimate(rtp_packet.Timestamp()),
482       clock_->TimeInMilliseconds());
483 
484   // Try to extrapolate absolute capture time if it is missing.
485   packet->packet_info.set_absolute_capture_time(
486       absolute_capture_time_receiver_.OnReceivePacket(
487           AbsoluteCaptureTimeReceiver::GetSource(packet->packet_info.ssrc(),
488                                                  packet->packet_info.csrcs()),
489           packet->packet_info.rtp_timestamp(),
490           // Assume frequency is the same one for all video frames.
491           kVideoPayloadTypeFrequency,
492           packet->packet_info.absolute_capture_time()));
493 
494   RTPVideoHeader& video_header = packet->video_header;
495   video_header.rotation = kVideoRotation_0;
496   video_header.content_type = VideoContentType::UNSPECIFIED;
497   video_header.video_timing.flags = VideoSendTiming::kInvalid;
498   video_header.is_last_packet_in_frame |= rtp_packet.Marker();
499 
500   if (const auto* vp9_header =
501           absl::get_if<RTPVideoHeaderVP9>(&video_header.video_type_header)) {
502     video_header.is_last_packet_in_frame |= vp9_header->end_of_frame;
503     video_header.is_first_packet_in_frame |= vp9_header->beginning_of_frame;
504   }
505 
506   rtp_packet.GetExtension<VideoOrientation>(&video_header.rotation);
507   rtp_packet.GetExtension<VideoContentTypeExtension>(
508       &video_header.content_type);
509   rtp_packet.GetExtension<VideoTimingExtension>(&video_header.video_timing);
510   if (forced_playout_delay_max_ms_ && forced_playout_delay_min_ms_) {
511     video_header.playout_delay.max_ms = *forced_playout_delay_max_ms_;
512     video_header.playout_delay.min_ms = *forced_playout_delay_min_ms_;
513   } else {
514     rtp_packet.GetExtension<PlayoutDelayLimits>(&video_header.playout_delay);
515   }
516 
517   ParseGenericDependenciesResult generic_descriptor_state =
518       ParseGenericDependenciesExtension(rtp_packet, &video_header);
519   if (generic_descriptor_state == kDropPacket)
520     return;
521 
522   // Color space should only be transmitted in the last packet of a frame,
523   // therefore, neglect it otherwise so that last_color_space_ is not reset by
524   // mistake.
525   if (video_header.is_last_packet_in_frame) {
526     video_header.color_space = rtp_packet.GetExtension<ColorSpaceExtension>();
527     if (video_header.color_space ||
528         video_header.frame_type == VideoFrameType::kVideoFrameKey) {
529       // Store color space since it's only transmitted when changed or for key
530       // frames. Color space will be cleared if a key frame is transmitted
531       // without color space information.
532       last_color_space_ = video_header.color_space;
533     } else if (last_color_space_) {
534       video_header.color_space = last_color_space_;
535     }
536   }
537 
538   if (loss_notification_controller_) {
539     if (rtp_packet.recovered()) {
540       // TODO(bugs.webrtc.org/10336): Implement support for reordering.
541       RTC_LOG(LS_INFO)
542           << "LossNotificationController does not support reordering.";
543     } else if (generic_descriptor_state == kNoGenericDescriptor) {
544       RTC_LOG(LS_WARNING) << "LossNotificationController requires generic "
545                              "frame descriptor, but it is missing.";
546     } else {
547       if (video_header.is_first_packet_in_frame) {
548         RTC_DCHECK(video_header.generic);
549         LossNotificationController::FrameDetails frame;
550         frame.is_keyframe =
551             video_header.frame_type == VideoFrameType::kVideoFrameKey;
552         frame.frame_id = video_header.generic->frame_id;
553         frame.frame_dependencies = video_header.generic->dependencies;
554         loss_notification_controller_->OnReceivedPacket(
555             rtp_packet.SequenceNumber(), &frame);
556       } else {
557         loss_notification_controller_->OnReceivedPacket(
558             rtp_packet.SequenceNumber(), nullptr);
559       }
560     }
561   }
562 
563   if (nack_module_) {
564     const bool is_keyframe =
565         video_header.is_first_packet_in_frame &&
566         video_header.frame_type == VideoFrameType::kVideoFrameKey;
567 
568     packet->times_nacked = nack_module_->OnReceivedPacket(
569         rtp_packet.SequenceNumber(), is_keyframe, rtp_packet.recovered());
570   } else {
571     packet->times_nacked = -1;
572   }
573 
574   if (codec_payload.size() == 0) {
575     NotifyReceiverOfEmptyPacket(packet->seq_num);
576     rtcp_feedback_buffer_.SendBufferedRtcpFeedback();
577     return;
578   }
579 
580   if (packet->codec() == kVideoCodecH264) {
581     // Only when we start to receive packets will we know what payload type
582     // that will be used. When we know the payload type insert the correct
583     // sps/pps into the tracker.
584     if (packet->payload_type != last_payload_type_) {
585       last_payload_type_ = packet->payload_type;
586       InsertSpsPpsIntoTracker(packet->payload_type);
587     }
588 
589     video_coding::H264SpsPpsTracker::FixedBitstream fixed =
590         tracker_.CopyAndFixBitstream(
591             rtc::MakeArrayView(codec_payload.cdata(), codec_payload.size()),
592             &packet->video_header);
593 
594     switch (fixed.action) {
595       case video_coding::H264SpsPpsTracker::kRequestKeyframe:
596         rtcp_feedback_buffer_.RequestKeyFrame();
597         rtcp_feedback_buffer_.SendBufferedRtcpFeedback();
598         ABSL_FALLTHROUGH_INTENDED;
599       case video_coding::H264SpsPpsTracker::kDrop:
600         return;
601       case video_coding::H264SpsPpsTracker::kInsert:
602         packet->video_payload = std::move(fixed.bitstream);
603         break;
604     }
605 
606   } else {
607     packet->video_payload = std::move(codec_payload);
608   }
609 
610   rtcp_feedback_buffer_.SendBufferedRtcpFeedback();
611   frame_counter_.Add(packet->timestamp);
612   OnInsertedPacket(packet_buffer_.InsertPacket(std::move(packet)));
613 }
614 
OnRecoveredPacket(const uint8_t * rtp_packet,size_t rtp_packet_length)615 void RtpVideoStreamReceiver2::OnRecoveredPacket(const uint8_t* rtp_packet,
616                                                 size_t rtp_packet_length) {
617   RtpPacketReceived packet;
618   if (!packet.Parse(rtp_packet, rtp_packet_length))
619     return;
620   if (packet.PayloadType() == config_.rtp.red_payload_type) {
621     RTC_LOG(LS_WARNING) << "Discarding recovered packet with RED encapsulation";
622     return;
623   }
624 
625   packet.IdentifyExtensions(rtp_header_extensions_);
626   packet.set_payload_type_frequency(kVideoPayloadTypeFrequency);
627   // TODO(nisse): UlpfecReceiverImpl::ProcessReceivedFec passes both
628   // original (decapsulated) media packets and recovered packets to
629   // this callback. We need a way to distinguish, for setting
630   // packet.recovered() correctly. Ideally, move RED decapsulation out
631   // of the Ulpfec implementation.
632 
633   ReceivePacket(packet);
634 }
635 
636 // This method handles both regular RTP packets and packets recovered
637 // via FlexFEC.
OnRtpPacket(const RtpPacketReceived & packet)638 void RtpVideoStreamReceiver2::OnRtpPacket(const RtpPacketReceived& packet) {
639   RTC_DCHECK_RUN_ON(&worker_task_checker_);
640 
641   if (!receiving_) {
642     return;
643   }
644 
645   if (!packet.recovered()) {
646     // TODO(nisse): Exclude out-of-order packets?
647     int64_t now_ms = clock_->TimeInMilliseconds();
648 
649     last_received_rtp_timestamp_ = packet.Timestamp();
650     last_received_rtp_system_time_ms_ = now_ms;
651 
652     // Periodically log the RTP header of incoming packets.
653     if (now_ms - last_packet_log_ms_ > kPacketLogIntervalMs) {
654       rtc::StringBuilder ss;
655       ss << "Packet received on SSRC: " << packet.Ssrc()
656          << " with payload type: " << static_cast<int>(packet.PayloadType())
657          << ", timestamp: " << packet.Timestamp()
658          << ", sequence number: " << packet.SequenceNumber()
659          << ", arrival time: " << packet.arrival_time_ms();
660       int32_t time_offset;
661       if (packet.GetExtension<TransmissionOffset>(&time_offset)) {
662         ss << ", toffset: " << time_offset;
663       }
664       uint32_t send_time;
665       if (packet.GetExtension<AbsoluteSendTime>(&send_time)) {
666         ss << ", abs send time: " << send_time;
667       }
668       RTC_LOG(LS_INFO) << ss.str();
669       last_packet_log_ms_ = now_ms;
670     }
671   }
672 
673   ReceivePacket(packet);
674 
675   // Update receive statistics after ReceivePacket.
676   // Receive statistics will be reset if the payload type changes (make sure
677   // that the first packet is included in the stats).
678   if (!packet.recovered()) {
679     rtp_receive_statistics_->OnRtpPacket(packet);
680   }
681 
682   for (RtpPacketSinkInterface* secondary_sink : secondary_sinks_) {
683     secondary_sink->OnRtpPacket(packet);
684   }
685 }
686 
RequestKeyFrame()687 void RtpVideoStreamReceiver2::RequestKeyFrame() {
688   RTC_DCHECK_RUN_ON(&worker_task_checker_);
689   TRACE_EVENT2("webrtc", "RtpVideoStreamReceiver2::RequestKeyFrame",
690                "remote_ssrc", config_.rtp.remote_ssrc, "method",
691                keyframe_request_sender_ ? "KFRSender"
692                : keyframe_request_method_ == KeyFrameReqMethod::kPliRtcp   ? "PLI"
693                : keyframe_request_method_ == KeyFrameReqMethod::kFirRtcp ? "FIR"
694                : keyframe_request_method_ == KeyFrameReqMethod::kNone ? "None"
695                                                                       : "Other");
696   // TODO(bugs.webrtc.org/10336): Allow the sender to ignore key frame requests
697   // issued by anything other than the LossNotificationController if it (the
698   // sender) is relying on LNTF alone.
699   if (keyframe_request_sender_) {
700     keyframe_request_sender_->RequestKeyFrame();
701   } else if (keyframe_request_method_ == KeyFrameReqMethod::kPliRtcp) {
702     rtp_rtcp_->SendPictureLossIndication();
703   } else if (keyframe_request_method_ == KeyFrameReqMethod::kFirRtcp) {
704     rtp_rtcp_->SendFullIntraRequest();
705   }
706 }
707 
SendLossNotification(uint16_t last_decoded_seq_num,uint16_t last_received_seq_num,bool decodability_flag,bool buffering_allowed)708 void RtpVideoStreamReceiver2::SendLossNotification(
709     uint16_t last_decoded_seq_num,
710     uint16_t last_received_seq_num,
711     bool decodability_flag,
712     bool buffering_allowed) {
713   RTC_DCHECK(config_.rtp.lntf.enabled);
714   rtp_rtcp_->SendLossNotification(last_decoded_seq_num, last_received_seq_num,
715                                   decodability_flag, buffering_allowed);
716 }
717 
IsUlpfecEnabled() const718 bool RtpVideoStreamReceiver2::IsUlpfecEnabled() const {
719   return config_.rtp.ulpfec_payload_type != -1;
720 }
721 
IsRetransmissionsEnabled() const722 bool RtpVideoStreamReceiver2::IsRetransmissionsEnabled() const {
723   return config_.rtp.nack.rtp_history_ms > 0;
724 }
725 
RequestPacketRetransmit(const std::vector<uint16_t> & sequence_numbers)726 void RtpVideoStreamReceiver2::RequestPacketRetransmit(
727     const std::vector<uint16_t>& sequence_numbers) {
728   RTC_DCHECK_RUN_ON(&worker_task_checker_);
729   rtp_rtcp_->SendNack(sequence_numbers);
730 }
731 
IsDecryptable() const732 bool RtpVideoStreamReceiver2::IsDecryptable() const {
733   RTC_DCHECK_RUN_ON(&worker_task_checker_);
734   return frames_decryptable_;
735 }
736 
OnInsertedPacket(video_coding::PacketBuffer::InsertResult result)737 void RtpVideoStreamReceiver2::OnInsertedPacket(
738     video_coding::PacketBuffer::InsertResult result) {
739   RTC_DCHECK_RUN_ON(&worker_task_checker_);
740   video_coding::PacketBuffer::Packet* first_packet = nullptr;
741   int max_nack_count;
742   int64_t min_recv_time;
743   int64_t max_recv_time;
744   std::vector<rtc::ArrayView<const uint8_t>> payloads;
745   RtpPacketInfos::vector_type packet_infos;
746 
747   bool frame_boundary = true;
748   for (auto& packet : result.packets) {
749     // PacketBuffer promisses frame boundaries are correctly set on each
750     // packet. Document that assumption with the DCHECKs.
751     RTC_DCHECK_EQ(frame_boundary, packet->is_first_packet_in_frame());
752     if (packet->is_first_packet_in_frame()) {
753       first_packet = packet.get();
754       max_nack_count = packet->times_nacked;
755       min_recv_time = packet->packet_info.receive_time_ms();
756       max_recv_time = packet->packet_info.receive_time_ms();
757       payloads.clear();
758       packet_infos.clear();
759     } else {
760       max_nack_count = std::max(max_nack_count, packet->times_nacked);
761       min_recv_time =
762           std::min(min_recv_time, packet->packet_info.receive_time_ms());
763       max_recv_time =
764           std::max(max_recv_time, packet->packet_info.receive_time_ms());
765     }
766     payloads.emplace_back(packet->video_payload);
767     packet_infos.push_back(packet->packet_info);
768 
769     frame_boundary = packet->is_last_packet_in_frame();
770     if (packet->is_last_packet_in_frame()) {
771       auto depacketizer_it = payload_type_map_.find(first_packet->payload_type);
772       RTC_CHECK(depacketizer_it != payload_type_map_.end());
773 
774       rtc::scoped_refptr<EncodedImageBuffer> bitstream =
775           depacketizer_it->second->AssembleFrame(payloads);
776       if (!bitstream) {
777         // Failed to assemble a frame. Discard and continue.
778         continue;
779       }
780 
781       const video_coding::PacketBuffer::Packet& last_packet = *packet;
782       OnAssembledFrame(std::make_unique<video_coding::RtpFrameObject>(
783           first_packet->seq_num,                    //
784           last_packet.seq_num,                      //
785           last_packet.marker_bit,                   //
786           max_nack_count,                           //
787           min_recv_time,                            //
788           max_recv_time,                            //
789           first_packet->timestamp,                  //
790           first_packet->ntp_time_ms,                //
791           last_packet.video_header.video_timing,    //
792           first_packet->payload_type,               //
793           first_packet->codec(),                    //
794           last_packet.video_header.rotation,        //
795           last_packet.video_header.content_type,    //
796           first_packet->video_header,               //
797           last_packet.video_header.color_space,     //
798           RtpPacketInfos(std::move(packet_infos)),  //
799           std::move(bitstream)));
800     }
801   }
802   RTC_DCHECK(frame_boundary);
803   if (result.buffer_cleared) {
804     RequestKeyFrame();
805   }
806 }
807 
OnAssembledFrame(std::unique_ptr<video_coding::RtpFrameObject> frame)808 void RtpVideoStreamReceiver2::OnAssembledFrame(
809     std::unique_ptr<video_coding::RtpFrameObject> frame) {
810   RTC_DCHECK_RUN_ON(&worker_task_checker_);
811   RTC_DCHECK(frame);
812 
813   const absl::optional<RTPVideoHeader::GenericDescriptorInfo>& descriptor =
814       frame->GetRtpVideoHeader().generic;
815 
816   if (loss_notification_controller_ && descriptor) {
817     loss_notification_controller_->OnAssembledFrame(
818         frame->first_seq_num(), descriptor->frame_id,
819         absl::c_linear_search(descriptor->decode_target_indications,
820                               DecodeTargetIndication::kDiscardable),
821         descriptor->dependencies);
822   }
823 
824   // If frames arrive before a key frame, they would not be decodable.
825   // In that case, request a key frame ASAP.
826   if (!has_received_frame_) {
827     if (frame->FrameType() != VideoFrameType::kVideoFrameKey) {
828       // |loss_notification_controller_|, if present, would have already
829       // requested a key frame when the first packet for the non-key frame
830       // had arrived, so no need to replicate the request.
831       if (!loss_notification_controller_) {
832         RequestKeyFrame();
833       }
834     }
835     has_received_frame_ = true;
836   }
837 
838   // Reset |reference_finder_| if |frame| is new and the codec have changed.
839   if (current_codec_) {
840     bool frame_is_newer =
841         AheadOf(frame->Timestamp(), last_assembled_frame_rtp_timestamp_);
842 
843     if (frame->codec_type() != current_codec_) {
844       if (frame_is_newer) {
845         // When we reset the |reference_finder_| we don't want new picture ids
846         // to overlap with old picture ids. To ensure that doesn't happen we
847         // start from the |last_completed_picture_id_| and add an offset in case
848         // of reordering.
849         reference_finder_ =
850             std::make_unique<video_coding::RtpFrameReferenceFinder>(
851                 this, last_completed_picture_id_ +
852                           std::numeric_limits<uint16_t>::max());
853         current_codec_ = frame->codec_type();
854       } else {
855         // Old frame from before the codec switch, discard it.
856         return;
857       }
858     }
859 
860     if (frame_is_newer) {
861       last_assembled_frame_rtp_timestamp_ = frame->Timestamp();
862     }
863   } else {
864     current_codec_ = frame->codec_type();
865     last_assembled_frame_rtp_timestamp_ = frame->Timestamp();
866   }
867 
868   if (buffered_frame_decryptor_ != nullptr) {
869     buffered_frame_decryptor_->ManageEncryptedFrame(std::move(frame));
870   } else if (frame_transformer_delegate_) {
871     frame_transformer_delegate_->TransformFrame(std::move(frame));
872   } else {
873     reference_finder_->ManageFrame(std::move(frame));
874   }
875 }
876 
OnCompleteFrame(std::unique_ptr<video_coding::EncodedFrame> frame)877 void RtpVideoStreamReceiver2::OnCompleteFrame(
878     std::unique_ptr<video_coding::EncodedFrame> frame) {
879   RTC_DCHECK_RUN_ON(&worker_task_checker_);
880   video_coding::RtpFrameObject* rtp_frame =
881       static_cast<video_coding::RtpFrameObject*>(frame.get());
882   last_seq_num_for_pic_id_[rtp_frame->id.picture_id] =
883       rtp_frame->last_seq_num();
884 
885   last_completed_picture_id_ =
886       std::max(last_completed_picture_id_, frame->id.picture_id);
887   complete_frame_callback_->OnCompleteFrame(std::move(frame));
888 }
889 
OnDecryptedFrame(std::unique_ptr<video_coding::RtpFrameObject> frame)890 void RtpVideoStreamReceiver2::OnDecryptedFrame(
891     std::unique_ptr<video_coding::RtpFrameObject> frame) {
892   RTC_DCHECK_RUN_ON(&worker_task_checker_);
893   reference_finder_->ManageFrame(std::move(frame));
894 }
895 
OnDecryptionStatusChange(FrameDecryptorInterface::Status status)896 void RtpVideoStreamReceiver2::OnDecryptionStatusChange(
897     FrameDecryptorInterface::Status status) {
898   RTC_DCHECK_RUN_ON(&worker_task_checker_);
899   // Called from BufferedFrameDecryptor::DecryptFrame.
900   frames_decryptable_ =
901       (status == FrameDecryptorInterface::Status::kOk) ||
902       (status == FrameDecryptorInterface::Status::kRecoverable);
903 }
904 
SetFrameDecryptor(rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor)905 void RtpVideoStreamReceiver2::SetFrameDecryptor(
906     rtc::scoped_refptr<FrameDecryptorInterface> frame_decryptor) {
907   RTC_DCHECK_RUN_ON(&worker_task_checker_);
908   if (buffered_frame_decryptor_ == nullptr) {
909     buffered_frame_decryptor_ =
910         std::make_unique<BufferedFrameDecryptor>(this, this);
911   }
912   buffered_frame_decryptor_->SetFrameDecryptor(std::move(frame_decryptor));
913 }
914 
SetDepacketizerToDecoderFrameTransformer(rtc::scoped_refptr<FrameTransformerInterface> frame_transformer)915 void RtpVideoStreamReceiver2::SetDepacketizerToDecoderFrameTransformer(
916     rtc::scoped_refptr<FrameTransformerInterface> frame_transformer) {
917   RTC_DCHECK_RUN_ON(&worker_task_checker_);
918   frame_transformer_delegate_ =
919       new rtc::RefCountedObject<RtpVideoStreamReceiverFrameTransformerDelegate>(
920           this, std::move(frame_transformer), rtc::Thread::Current(),
921           config_.rtp.remote_ssrc);
922   frame_transformer_delegate_->Init();
923 }
924 
UpdateRtt(int64_t max_rtt_ms)925 void RtpVideoStreamReceiver2::UpdateRtt(int64_t max_rtt_ms) {
926   RTC_DCHECK_RUN_ON(&worker_task_checker_);
927   if (nack_module_)
928     nack_module_->UpdateRtt(max_rtt_ms);
929 }
930 
LastReceivedPacketMs() const931 absl::optional<int64_t> RtpVideoStreamReceiver2::LastReceivedPacketMs() const {
932   return packet_buffer_.LastReceivedPacketMs();
933 }
934 
LastReceivedKeyframePacketMs() const935 absl::optional<int64_t> RtpVideoStreamReceiver2::LastReceivedKeyframePacketMs()
936     const {
937   return packet_buffer_.LastReceivedKeyframePacketMs();
938 }
939 
AddSecondarySink(RtpPacketSinkInterface * sink)940 void RtpVideoStreamReceiver2::AddSecondarySink(RtpPacketSinkInterface* sink) {
941   RTC_DCHECK_RUN_ON(&worker_task_checker_);
942   RTC_DCHECK(!absl::c_linear_search(secondary_sinks_, sink));
943   secondary_sinks_.push_back(sink);
944 }
945 
RemoveSecondarySink(const RtpPacketSinkInterface * sink)946 void RtpVideoStreamReceiver2::RemoveSecondarySink(
947     const RtpPacketSinkInterface* sink) {
948   RTC_DCHECK_RUN_ON(&worker_task_checker_);
949   auto it = absl::c_find(secondary_sinks_, sink);
950   if (it == secondary_sinks_.end()) {
951     // We might be rolling-back a call whose setup failed mid-way. In such a
952     // case, it's simpler to remove "everything" rather than remember what
953     // has already been added.
954     RTC_LOG(LS_WARNING) << "Removal of unknown sink.";
955     return;
956   }
957   secondary_sinks_.erase(it);
958 }
959 
960 // Mozilla modification: VideoReceiveStream2 and friends do not surface RTCP
961 // stats at all, and even on the most recent libwebrtc code there does not
962 // seem to be any support for these stats right now. So, we hack this in.
RemoteRTCPSenderInfo(uint32_t * packet_count,uint32_t * octet_count,int64_t * ntp_timestamp_ms,int64_t * remote_ntp_timestamp_ms) const963 void RtpVideoStreamReceiver2::RemoteRTCPSenderInfo(
964     uint32_t* packet_count, uint32_t* octet_count,
965     int64_t* ntp_timestamp_ms, int64_t* remote_ntp_timestamp_ms) const {
966   RTC_DCHECK_RUN_ON(&worker_task_checker_);
967   rtp_rtcp_->RemoteRTCPSenderInfo(packet_count, octet_count, ntp_timestamp_ms,
968                                   remote_ntp_timestamp_ms);
969 }
970 
ManageFrame(std::unique_ptr<video_coding::RtpFrameObject> frame)971 void RtpVideoStreamReceiver2::ManageFrame(
972     std::unique_ptr<video_coding::RtpFrameObject> frame) {
973   RTC_DCHECK_RUN_ON(&worker_task_checker_);
974   reference_finder_->ManageFrame(std::move(frame));
975 }
976 
ReceivePacket(const RtpPacketReceived & packet)977 void RtpVideoStreamReceiver2::ReceivePacket(const RtpPacketReceived& packet) {
978   RTC_DCHECK_RUN_ON(&worker_task_checker_);
979   if (packet.payload_size() == 0) {
980     // Padding or keep-alive packet.
981     // TODO(nisse): Could drop empty packets earlier, but need to figure out how
982     // they should be counted in stats.
983     NotifyReceiverOfEmptyPacket(packet.SequenceNumber());
984     return;
985   }
986   if (packet.PayloadType() == config_.rtp.red_payload_type) {
987     ParseAndHandleEncapsulatingHeader(packet);
988     return;
989   }
990 
991   const auto type_it = payload_type_map_.find(packet.PayloadType());
992   if (type_it == payload_type_map_.end()) {
993     return;
994   }
995   absl::optional<VideoRtpDepacketizer::ParsedRtpPayload> parsed_payload =
996       type_it->second->Parse(packet.PayloadBuffer());
997   if (parsed_payload == absl::nullopt) {
998     RTC_LOG(LS_WARNING) << "Failed parsing payload.";
999     return;
1000   }
1001 
1002   OnReceivedPayloadData(std::move(parsed_payload->video_payload), packet,
1003                         parsed_payload->video_header);
1004 }
1005 
ParseAndHandleEncapsulatingHeader(const RtpPacketReceived & packet)1006 void RtpVideoStreamReceiver2::ParseAndHandleEncapsulatingHeader(
1007     const RtpPacketReceived& packet) {
1008   RTC_DCHECK_RUN_ON(&worker_task_checker_);
1009   if (packet.PayloadType() == config_.rtp.red_payload_type &&
1010       packet.payload_size() > 0) {
1011     if (packet.payload()[0] == config_.rtp.ulpfec_payload_type) {
1012       // Notify video_receiver about received FEC packets to avoid NACKing these
1013       // packets.
1014       NotifyReceiverOfEmptyPacket(packet.SequenceNumber());
1015     }
1016     if (!ulpfec_receiver_->AddReceivedRedPacket(
1017             packet, config_.rtp.ulpfec_payload_type)) {
1018       return;
1019     }
1020     ulpfec_receiver_->ProcessReceivedFec();
1021   }
1022 }
1023 
1024 // In the case of a video stream without picture ids and no rtx the
1025 // RtpFrameReferenceFinder will need to know about padding to
1026 // correctly calculate frame references.
NotifyReceiverOfEmptyPacket(uint16_t seq_num)1027 void RtpVideoStreamReceiver2::NotifyReceiverOfEmptyPacket(uint16_t seq_num) {
1028   RTC_DCHECK_RUN_ON(&worker_task_checker_);
1029 
1030   reference_finder_->PaddingReceived(seq_num);
1031 
1032   OnInsertedPacket(packet_buffer_.InsertPadding(seq_num));
1033   if (nack_module_) {
1034     nack_module_->OnReceivedPacket(seq_num, /* is_keyframe = */ false,
1035                                    /* is _recovered = */ false);
1036   }
1037   if (loss_notification_controller_) {
1038     // TODO(bugs.webrtc.org/10336): Handle empty packets.
1039     RTC_LOG(LS_WARNING)
1040         << "LossNotificationController does not expect empty packets.";
1041   }
1042 }
1043 
DeliverRtcp(const uint8_t * rtcp_packet,size_t rtcp_packet_length)1044 bool RtpVideoStreamReceiver2::DeliverRtcp(const uint8_t* rtcp_packet,
1045                                           size_t rtcp_packet_length) {
1046   RTC_DCHECK_RUN_ON(&worker_task_checker_);
1047 
1048   if (!receiving_) {
1049     return false;
1050   }
1051 
1052   rtp_rtcp_->IncomingRtcpPacket(rtcp_packet, rtcp_packet_length);
1053 
1054   int64_t rtt = 0;
1055   rtp_rtcp_->RTT(config_.rtp.remote_ssrc, &rtt, nullptr, nullptr, nullptr);
1056   if (rtt == 0) {
1057     // Waiting for valid rtt.
1058     return true;
1059   }
1060   uint32_t ntp_secs = 0;
1061   uint32_t ntp_frac = 0;
1062   uint32_t rtp_timestamp = 0;
1063   uint32_t recieved_ntp_secs = 0;
1064   uint32_t recieved_ntp_frac = 0;
1065   if (rtp_rtcp_->RemoteNTP(&ntp_secs, &ntp_frac, &recieved_ntp_secs,
1066                            &recieved_ntp_frac, &rtp_timestamp) != 0) {
1067     // Waiting for RTCP.
1068     return true;
1069   }
1070   NtpTime recieved_ntp(recieved_ntp_secs, recieved_ntp_frac);
1071   int64_t time_since_recieved =
1072       clock_->CurrentNtpInMilliseconds() - recieved_ntp.ToMs();
1073   // Don't use old SRs to estimate time.
1074   if (time_since_recieved <= 1) {
1075     ntp_estimator_.UpdateRtcpTimestamp(rtt, ntp_secs, ntp_frac, rtp_timestamp);
1076     absl::optional<int64_t> remote_to_local_clock_offset_ms =
1077         ntp_estimator_.EstimateRemoteToLocalClockOffsetMs();
1078     if (remote_to_local_clock_offset_ms.has_value()) {
1079       absolute_capture_time_receiver_.SetRemoteToLocalClockOffset(
1080           Int64MsToQ32x32(*remote_to_local_clock_offset_ms));
1081     }
1082   }
1083 
1084   return true;
1085 }
1086 
FrameContinuous(int64_t picture_id)1087 void RtpVideoStreamReceiver2::FrameContinuous(int64_t picture_id) {
1088   RTC_DCHECK_RUN_ON(&worker_task_checker_);
1089   if (!nack_module_)
1090     return;
1091 
1092   int seq_num = -1;
1093   auto seq_num_it = last_seq_num_for_pic_id_.find(picture_id);
1094   if (seq_num_it != last_seq_num_for_pic_id_.end())
1095     seq_num = seq_num_it->second;
1096   if (seq_num != -1)
1097     nack_module_->ClearUpTo(seq_num);
1098 }
1099 
FrameDecoded(int64_t picture_id)1100 void RtpVideoStreamReceiver2::FrameDecoded(int64_t picture_id) {
1101   RTC_DCHECK_RUN_ON(&worker_task_checker_);
1102   // Running on the decoder thread.
1103   int seq_num = -1;
1104   auto seq_num_it = last_seq_num_for_pic_id_.find(picture_id);
1105   if (seq_num_it != last_seq_num_for_pic_id_.end()) {
1106     seq_num = seq_num_it->second;
1107     last_seq_num_for_pic_id_.erase(last_seq_num_for_pic_id_.begin(),
1108                                    ++seq_num_it);
1109   }
1110 
1111   if (seq_num != -1) {
1112     uint32_t num_packets_cleared = packet_buffer_.ClearTo(seq_num);
1113     if (num_packets_cleared > 0) {
1114       TRACE_EVENT2("webrtc",
1115                    "RtpVideoStreamReceiver2::FrameDecoded Cleared Old Packets",
1116                    "remote_ssrc", config_.rtp.remote_ssrc, "seq_num", seq_num);
1117       vcm_receive_statistics_->OnDiscardedPackets(num_packets_cleared);
1118     }
1119     reference_finder_->ClearTo(seq_num);
1120   }
1121 }
1122 
SignalNetworkState(NetworkState state)1123 void RtpVideoStreamReceiver2::SignalNetworkState(NetworkState state) {
1124   RTC_DCHECK_RUN_ON(&worker_task_checker_);
1125   rtp_rtcp_->SetRTCPStatus(state == kNetworkUp ? config_.rtp.rtcp_mode
1126                                                : RtcpMode::kOff);
1127 }
1128 
StartReceive()1129 void RtpVideoStreamReceiver2::StartReceive() {
1130   RTC_DCHECK_RUN_ON(&worker_task_checker_);
1131   receiving_ = true;
1132 }
1133 
StopReceive()1134 void RtpVideoStreamReceiver2::StopReceive() {
1135   RTC_DCHECK_RUN_ON(&worker_task_checker_);
1136   receiving_ = false;
1137 }
1138 
UpdateHistograms()1139 void RtpVideoStreamReceiver2::UpdateHistograms() {
1140   FecPacketCounter counter = ulpfec_receiver_->GetPacketCounter();
1141   if (counter.first_packet_time_ms == -1)
1142     return;
1143 
1144   int64_t elapsed_sec =
1145       (clock_->TimeInMilliseconds() - counter.first_packet_time_ms) / 1000;
1146   if (elapsed_sec < metrics::kMinRunTimeInSeconds)
1147     return;
1148 
1149   if (counter.num_packets > 0) {
1150     RTC_HISTOGRAM_PERCENTAGE(
1151         "WebRTC.Video.ReceivedFecPacketsInPercent",
1152         static_cast<int>(counter.num_fec_packets * 100 / counter.num_packets));
1153   }
1154   if (counter.num_fec_packets > 0) {
1155     RTC_HISTOGRAM_PERCENTAGE("WebRTC.Video.RecoveredMediaPacketsInPercentOfFec",
1156                              static_cast<int>(counter.num_recovered_packets *
1157                                               100 / counter.num_fec_packets));
1158   }
1159   if (config_.rtp.ulpfec_payload_type != -1) {
1160     RTC_HISTOGRAM_COUNTS_10000(
1161         "WebRTC.Video.FecBitrateReceivedInKbps",
1162         static_cast<int>(counter.num_bytes * 8 / elapsed_sec / 1000));
1163   }
1164 }
1165 
InsertSpsPpsIntoTracker(uint8_t payload_type)1166 void RtpVideoStreamReceiver2::InsertSpsPpsIntoTracker(uint8_t payload_type) {
1167   RTC_DCHECK_RUN_ON(&worker_task_checker_);
1168 
1169   auto codec_params_it = pt_codec_params_.find(payload_type);
1170   if (codec_params_it == pt_codec_params_.end())
1171     return;
1172 
1173   RTC_LOG(LS_INFO) << "Found out of band supplied codec parameters for"
1174                       " payload type: "
1175                    << static_cast<int>(payload_type);
1176 
1177   H264SpropParameterSets sprop_decoder;
1178   auto sprop_base64_it =
1179       codec_params_it->second.find(cricket::kH264FmtpSpropParameterSets);
1180 
1181   if (sprop_base64_it == codec_params_it->second.end())
1182     return;
1183 
1184   if (!sprop_decoder.DecodeSprop(sprop_base64_it->second.c_str()))
1185     return;
1186 
1187   tracker_.InsertSpsPpsNalus(sprop_decoder.sps_nalu(),
1188                              sprop_decoder.pps_nalu());
1189 }
1190 
1191 }  // namespace webrtc
1192