1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include "video/video_stream_encoder.h"
12
13 #include <algorithm>
14 #include <array>
15 #include <limits>
16 #include <memory>
17 #include <numeric>
18 #include <utility>
19
20 #include "absl/algorithm/container.h"
21 #include "absl/types/optional.h"
22 #include "api/sequence_checker.h"
23 #include "api/task_queue/queued_task.h"
24 #include "api/task_queue/task_queue_base.h"
25 #include "api/video/encoded_image.h"
26 #include "api/video/i420_buffer.h"
27 #include "api/video/video_adaptation_reason.h"
28 #include "api/video/video_bitrate_allocator_factory.h"
29 #include "api/video/video_codec_constants.h"
30 #include "api/video/video_layers_allocation.h"
31 #include "api/video_codecs/video_encoder.h"
32 #include "call/adaptation/resource_adaptation_processor.h"
33 #include "call/adaptation/video_stream_adapter.h"
34 #include "modules/video_coding/codecs/vp9/svc_rate_allocator.h"
35 #include "modules/video_coding/include/video_codec_initializer.h"
36 #include "rtc_base/arraysize.h"
37 #include "rtc_base/checks.h"
38 #include "rtc_base/constructor_magic.h"
39 #include "rtc_base/event.h"
40 #include "rtc_base/experiments/alr_experiment.h"
41 #include "rtc_base/experiments/encoder_info_settings.h"
42 #include "rtc_base/experiments/rate_control_settings.h"
43 #include "rtc_base/location.h"
44 #include "rtc_base/logging.h"
45 #include "rtc_base/strings/string_builder.h"
46 #include "rtc_base/system/no_unique_address.h"
47 #include "rtc_base/thread_annotations.h"
48 #include "rtc_base/trace_event.h"
49 #include "system_wrappers/include/field_trial.h"
50 #include "video/adaptation/video_stream_encoder_resource_manager.h"
51 #include "video/alignment_adjuster.h"
52
53 namespace webrtc {
54
55 namespace {
56
57 // Time interval for logging frame counts.
58 const int64_t kFrameLogIntervalMs = 60000;
59
60 // Time to keep a single cached pending frame in paused state.
61 const int64_t kPendingFrameTimeoutMs = 1000;
62
63 constexpr char kFrameDropperFieldTrial[] = "WebRTC-FrameDropper";
64
65 // Averaging window spanning 90 frames at default 30fps, matching old media
66 // optimization module defaults.
67 const int64_t kFrameRateAvergingWindowSizeMs = (1000 / 30) * 90;
68
69 const size_t kDefaultPayloadSize = 1440;
70
71 const int64_t kParameterUpdateIntervalMs = 1000;
72
73 // Animation is capped to 720p.
74 constexpr int kMaxAnimationPixels = 1280 * 720;
75
RequiresEncoderReset(const VideoCodec & prev_send_codec,const VideoCodec & new_send_codec,bool was_encode_called_since_last_initialization)76 bool RequiresEncoderReset(const VideoCodec& prev_send_codec,
77 const VideoCodec& new_send_codec,
78 bool was_encode_called_since_last_initialization) {
79 // Does not check max/minBitrate or maxFramerate.
80 if (new_send_codec.codecType != prev_send_codec.codecType ||
81 new_send_codec.width != prev_send_codec.width ||
82 new_send_codec.height != prev_send_codec.height ||
83 new_send_codec.qpMax != prev_send_codec.qpMax ||
84 new_send_codec.numberOfSimulcastStreams !=
85 prev_send_codec.numberOfSimulcastStreams ||
86 new_send_codec.mode != prev_send_codec.mode) {
87 return true;
88 }
89
90 if (!was_encode_called_since_last_initialization &&
91 (new_send_codec.startBitrate != prev_send_codec.startBitrate)) {
92 // If start bitrate has changed reconfigure encoder only if encoding had not
93 // yet started.
94 return true;
95 }
96
97 switch (new_send_codec.codecType) {
98 case kVideoCodecVP8:
99 if (new_send_codec.VP8() != prev_send_codec.VP8()) {
100 return true;
101 }
102 break;
103
104 case kVideoCodecVP9:
105 if (new_send_codec.VP9() != prev_send_codec.VP9()) {
106 return true;
107 }
108 break;
109
110 case kVideoCodecH264:
111 if (new_send_codec.H264() != prev_send_codec.H264()) {
112 return true;
113 }
114 break;
115 #ifndef DISABLE_H265
116 case kVideoCodecH265:
117 if (new_send_codec.H265() != prev_send_codec.H265()) {
118 return true;
119 }
120 break;
121 #endif
122
123 default:
124 break;
125 }
126
127 for (unsigned char i = 0; i < new_send_codec.numberOfSimulcastStreams; ++i) {
128 if (!new_send_codec.simulcastStream[i].active) {
129 // No need to reset when stream is inactive.
130 continue;
131 }
132
133 if (!prev_send_codec.simulcastStream[i].active ||
134 new_send_codec.simulcastStream[i].width !=
135 prev_send_codec.simulcastStream[i].width ||
136 new_send_codec.simulcastStream[i].height !=
137 prev_send_codec.simulcastStream[i].height ||
138 new_send_codec.simulcastStream[i].numberOfTemporalLayers !=
139 prev_send_codec.simulcastStream[i].numberOfTemporalLayers ||
140 new_send_codec.simulcastStream[i].qpMax !=
141 prev_send_codec.simulcastStream[i].qpMax) {
142 return true;
143 }
144 }
145
146 if (new_send_codec.codecType == kVideoCodecVP9) {
147 size_t num_spatial_layers = new_send_codec.VP9().numberOfSpatialLayers;
148 for (unsigned char i = 0; i < num_spatial_layers; ++i) {
149 if (new_send_codec.spatialLayers[i].width !=
150 prev_send_codec.spatialLayers[i].width ||
151 new_send_codec.spatialLayers[i].height !=
152 prev_send_codec.spatialLayers[i].height ||
153 new_send_codec.spatialLayers[i].numberOfTemporalLayers !=
154 prev_send_codec.spatialLayers[i].numberOfTemporalLayers ||
155 new_send_codec.spatialLayers[i].qpMax !=
156 prev_send_codec.spatialLayers[i].qpMax) {
157 return true;
158 }
159 }
160 }
161
162 if (new_send_codec.ScalabilityMode() != prev_send_codec.ScalabilityMode()) {
163 return true;
164 }
165
166 return false;
167 }
168
GetExperimentGroups()169 std::array<uint8_t, 2> GetExperimentGroups() {
170 std::array<uint8_t, 2> experiment_groups;
171 absl::optional<AlrExperimentSettings> experiment_settings =
172 AlrExperimentSettings::CreateFromFieldTrial(
173 AlrExperimentSettings::kStrictPacingAndProbingExperimentName);
174 if (experiment_settings) {
175 experiment_groups[0] = experiment_settings->group_id + 1;
176 } else {
177 experiment_groups[0] = 0;
178 }
179 experiment_settings = AlrExperimentSettings::CreateFromFieldTrial(
180 AlrExperimentSettings::kScreenshareProbingBweExperimentName);
181 if (experiment_settings) {
182 experiment_groups[1] = experiment_settings->group_id + 1;
183 } else {
184 experiment_groups[1] = 0;
185 }
186 return experiment_groups;
187 }
188
189 // Limit allocation across TLs in bitrate allocation according to number of TLs
190 // in EncoderInfo.
UpdateAllocationFromEncoderInfo(const VideoBitrateAllocation & allocation,const VideoEncoder::EncoderInfo & encoder_info)191 VideoBitrateAllocation UpdateAllocationFromEncoderInfo(
192 const VideoBitrateAllocation& allocation,
193 const VideoEncoder::EncoderInfo& encoder_info) {
194 if (allocation.get_sum_bps() == 0) {
195 return allocation;
196 }
197 VideoBitrateAllocation new_allocation;
198 for (int si = 0; si < kMaxSpatialLayers; ++si) {
199 if (encoder_info.fps_allocation[si].size() == 1 &&
200 allocation.IsSpatialLayerUsed(si)) {
201 // One TL is signalled to be used by the encoder. Do not distribute
202 // bitrate allocation across TLs (use sum at ti:0).
203 new_allocation.SetBitrate(si, 0, allocation.GetSpatialLayerSum(si));
204 } else {
205 for (int ti = 0; ti < kMaxTemporalStreams; ++ti) {
206 if (allocation.HasBitrate(si, ti))
207 new_allocation.SetBitrate(si, ti, allocation.GetBitrate(si, ti));
208 }
209 }
210 }
211 new_allocation.set_bw_limited(allocation.is_bw_limited());
212 return new_allocation;
213 }
214
215 // Converts a VideoBitrateAllocation that contains allocated bitrate per layer,
216 // and an EncoderInfo that contains information about the actual encoder
217 // structure used by a codec. Stream structures can be Ksvc, Full SVC, Simulcast
218 // etc.
CreateVideoLayersAllocation(const VideoCodec & encoder_config,const VideoEncoder::RateControlParameters & current_rate,const VideoEncoder::EncoderInfo & encoder_info)219 VideoLayersAllocation CreateVideoLayersAllocation(
220 const VideoCodec& encoder_config,
221 const VideoEncoder::RateControlParameters& current_rate,
222 const VideoEncoder::EncoderInfo& encoder_info) {
223 const VideoBitrateAllocation& target_bitrate = current_rate.target_bitrate;
224 VideoLayersAllocation layers_allocation;
225 if (target_bitrate.get_sum_bps() == 0) {
226 return layers_allocation;
227 }
228
229 if (encoder_config.numberOfSimulcastStreams > 1) {
230 layers_allocation.resolution_and_frame_rate_is_valid = true;
231 for (int si = 0; si < encoder_config.numberOfSimulcastStreams; ++si) {
232 if (!target_bitrate.IsSpatialLayerUsed(si) ||
233 target_bitrate.GetSpatialLayerSum(si) == 0) {
234 continue;
235 }
236 layers_allocation.active_spatial_layers.emplace_back();
237 VideoLayersAllocation::SpatialLayer& spatial_layer =
238 layers_allocation.active_spatial_layers.back();
239 spatial_layer.width = encoder_config.simulcastStream[si].width;
240 spatial_layer.height = encoder_config.simulcastStream[si].height;
241 spatial_layer.rtp_stream_index = si;
242 spatial_layer.spatial_id = 0;
243 auto frame_rate_fraction =
244 VideoEncoder::EncoderInfo::kMaxFramerateFraction;
245 if (encoder_info.fps_allocation[si].size() == 1) {
246 // One TL is signalled to be used by the encoder. Do not distribute
247 // bitrate allocation across TLs (use sum at tl:0).
248 spatial_layer.target_bitrate_per_temporal_layer.push_back(
249 DataRate::BitsPerSec(target_bitrate.GetSpatialLayerSum(si)));
250 frame_rate_fraction = encoder_info.fps_allocation[si][0];
251 } else { // Temporal layers are supported.
252 uint32_t temporal_layer_bitrate_bps = 0;
253 for (size_t ti = 0;
254 ti < encoder_config.simulcastStream[si].numberOfTemporalLayers;
255 ++ti) {
256 if (!target_bitrate.HasBitrate(si, ti)) {
257 break;
258 }
259 if (ti < encoder_info.fps_allocation[si].size()) {
260 // Use frame rate of the top used temporal layer.
261 frame_rate_fraction = encoder_info.fps_allocation[si][ti];
262 }
263 temporal_layer_bitrate_bps += target_bitrate.GetBitrate(si, ti);
264 spatial_layer.target_bitrate_per_temporal_layer.push_back(
265 DataRate::BitsPerSec(temporal_layer_bitrate_bps));
266 }
267 }
268 // Encoder may drop frames internally if `maxFramerate` is set.
269 spatial_layer.frame_rate_fps = std::min<uint8_t>(
270 encoder_config.simulcastStream[si].maxFramerate,
271 rtc::saturated_cast<uint8_t>(
272 (current_rate.framerate_fps * frame_rate_fraction) /
273 VideoEncoder::EncoderInfo::kMaxFramerateFraction));
274 }
275 } else if (encoder_config.numberOfSimulcastStreams == 1) {
276 // TODO(bugs.webrtc.org/12000): Implement support for AV1 with
277 // scalability.
278 const bool higher_spatial_depend_on_lower =
279 encoder_config.codecType == kVideoCodecVP9 &&
280 encoder_config.VP9().interLayerPred == InterLayerPredMode::kOn;
281 layers_allocation.resolution_and_frame_rate_is_valid = true;
282
283 std::vector<DataRate> aggregated_spatial_bitrate(
284 webrtc::kMaxTemporalStreams, DataRate::Zero());
285 for (int si = 0; si < webrtc::kMaxSpatialLayers; ++si) {
286 layers_allocation.resolution_and_frame_rate_is_valid = true;
287 if (!target_bitrate.IsSpatialLayerUsed(si) ||
288 target_bitrate.GetSpatialLayerSum(si) == 0) {
289 break;
290 }
291 layers_allocation.active_spatial_layers.emplace_back();
292 VideoLayersAllocation::SpatialLayer& spatial_layer =
293 layers_allocation.active_spatial_layers.back();
294 spatial_layer.width = encoder_config.spatialLayers[si].width;
295 spatial_layer.height = encoder_config.spatialLayers[si].height;
296 spatial_layer.rtp_stream_index = 0;
297 spatial_layer.spatial_id = si;
298 auto frame_rate_fraction =
299 VideoEncoder::EncoderInfo::kMaxFramerateFraction;
300 if (encoder_info.fps_allocation[si].size() == 1) {
301 // One TL is signalled to be used by the encoder. Do not distribute
302 // bitrate allocation across TLs (use sum at tl:0).
303 DataRate aggregated_temporal_bitrate =
304 DataRate::BitsPerSec(target_bitrate.GetSpatialLayerSum(si));
305 aggregated_spatial_bitrate[0] += aggregated_temporal_bitrate;
306 if (higher_spatial_depend_on_lower) {
307 spatial_layer.target_bitrate_per_temporal_layer.push_back(
308 aggregated_spatial_bitrate[0]);
309 } else {
310 spatial_layer.target_bitrate_per_temporal_layer.push_back(
311 aggregated_temporal_bitrate);
312 }
313 frame_rate_fraction = encoder_info.fps_allocation[si][0];
314 } else { // Temporal layers are supported.
315 DataRate aggregated_temporal_bitrate = DataRate::Zero();
316 for (size_t ti = 0;
317 ti < encoder_config.spatialLayers[si].numberOfTemporalLayers;
318 ++ti) {
319 if (!target_bitrate.HasBitrate(si, ti)) {
320 break;
321 }
322 if (ti < encoder_info.fps_allocation[si].size()) {
323 // Use frame rate of the top used temporal layer.
324 frame_rate_fraction = encoder_info.fps_allocation[si][ti];
325 }
326 aggregated_temporal_bitrate +=
327 DataRate::BitsPerSec(target_bitrate.GetBitrate(si, ti));
328 if (higher_spatial_depend_on_lower) {
329 spatial_layer.target_bitrate_per_temporal_layer.push_back(
330 aggregated_temporal_bitrate + aggregated_spatial_bitrate[ti]);
331 aggregated_spatial_bitrate[ti] += aggregated_temporal_bitrate;
332 } else {
333 spatial_layer.target_bitrate_per_temporal_layer.push_back(
334 aggregated_temporal_bitrate);
335 }
336 }
337 }
338 // Encoder may drop frames internally if `maxFramerate` is set.
339 spatial_layer.frame_rate_fps = std::min<uint8_t>(
340 encoder_config.spatialLayers[si].maxFramerate,
341 rtc::saturated_cast<uint8_t>(
342 (current_rate.framerate_fps * frame_rate_fraction) /
343 VideoEncoder::EncoderInfo::kMaxFramerateFraction));
344 }
345 }
346
347 return layers_allocation;
348 }
349
GetEncoderInfoWithBitrateLimitUpdate(const VideoEncoder::EncoderInfo & info,const VideoEncoderConfig & encoder_config,bool default_limits_allowed)350 VideoEncoder::EncoderInfo GetEncoderInfoWithBitrateLimitUpdate(
351 const VideoEncoder::EncoderInfo& info,
352 const VideoEncoderConfig& encoder_config,
353 bool default_limits_allowed) {
354 if (!default_limits_allowed || !info.resolution_bitrate_limits.empty() ||
355 encoder_config.simulcast_layers.size() <= 1) {
356 return info;
357 }
358 // Bitrate limits are not configured and more than one layer is used, use
359 // the default limits (bitrate limits are not used for simulcast).
360 VideoEncoder::EncoderInfo new_info = info;
361 new_info.resolution_bitrate_limits =
362 EncoderInfoSettings::GetDefaultSinglecastBitrateLimits(
363 encoder_config.codec_type);
364 return new_info;
365 }
366
NumActiveStreams(const std::vector<VideoStream> & streams)367 int NumActiveStreams(const std::vector<VideoStream>& streams) {
368 int num_active = 0;
369 for (const auto& stream : streams) {
370 if (stream.active)
371 ++num_active;
372 }
373 return num_active;
374 }
375
ApplyVp9BitrateLimits(const VideoEncoder::EncoderInfo & encoder_info,const VideoEncoderConfig & encoder_config,VideoCodec * codec)376 void ApplyVp9BitrateLimits(const VideoEncoder::EncoderInfo& encoder_info,
377 const VideoEncoderConfig& encoder_config,
378 VideoCodec* codec) {
379 if (codec->codecType != VideoCodecType::kVideoCodecVP9 ||
380 encoder_config.simulcast_layers.size() <= 1 ||
381 VideoStreamEncoderResourceManager::IsSimulcast(encoder_config)) {
382 // Resolution bitrate limits usage is restricted to singlecast.
383 return;
384 }
385
386 // Get bitrate limits for active stream.
387 absl::optional<uint32_t> pixels =
388 VideoStreamAdapter::GetSingleActiveLayerPixels(*codec);
389 if (!pixels.has_value()) {
390 return;
391 }
392 absl::optional<VideoEncoder::ResolutionBitrateLimits> bitrate_limits =
393 encoder_info.GetEncoderBitrateLimitsForResolution(*pixels);
394 if (!bitrate_limits.has_value()) {
395 return;
396 }
397
398 // Index for the active stream.
399 absl::optional<size_t> index;
400 for (size_t i = 0; i < encoder_config.simulcast_layers.size(); ++i) {
401 if (encoder_config.simulcast_layers[i].active)
402 index = i;
403 }
404 if (!index.has_value()) {
405 return;
406 }
407
408 int min_bitrate_bps;
409 if (encoder_config.simulcast_layers[*index].min_bitrate_bps <= 0) {
410 min_bitrate_bps = bitrate_limits->min_bitrate_bps;
411 } else {
412 min_bitrate_bps =
413 std::max(bitrate_limits->min_bitrate_bps,
414 encoder_config.simulcast_layers[*index].min_bitrate_bps);
415 }
416 int max_bitrate_bps;
417 if (encoder_config.simulcast_layers[*index].max_bitrate_bps <= 0) {
418 max_bitrate_bps = bitrate_limits->max_bitrate_bps;
419 } else {
420 max_bitrate_bps =
421 std::min(bitrate_limits->max_bitrate_bps,
422 encoder_config.simulcast_layers[*index].max_bitrate_bps);
423 }
424 if (min_bitrate_bps >= max_bitrate_bps) {
425 RTC_LOG(LS_WARNING) << "Bitrate limits not used, min_bitrate_bps "
426 << min_bitrate_bps << " >= max_bitrate_bps "
427 << max_bitrate_bps;
428 return;
429 }
430
431 for (int i = 0; i < codec->VP9()->numberOfSpatialLayers; ++i) {
432 if (codec->spatialLayers[i].active) {
433 codec->spatialLayers[i].minBitrate = min_bitrate_bps / 1000;
434 codec->spatialLayers[i].maxBitrate = max_bitrate_bps / 1000;
435 codec->spatialLayers[i].targetBitrate =
436 std::min(codec->spatialLayers[i].targetBitrate,
437 codec->spatialLayers[i].maxBitrate);
438 break;
439 }
440 }
441 }
442
ApplyEncoderBitrateLimitsIfSingleActiveStream(const VideoEncoder::EncoderInfo & encoder_info,const std::vector<VideoStream> & encoder_config_layers,std::vector<VideoStream> * streams)443 void ApplyEncoderBitrateLimitsIfSingleActiveStream(
444 const VideoEncoder::EncoderInfo& encoder_info,
445 const std::vector<VideoStream>& encoder_config_layers,
446 std::vector<VideoStream>* streams) {
447 // Apply limits if simulcast with one active stream (expect lowest).
448 bool single_active_stream =
449 streams->size() > 1 && NumActiveStreams(*streams) == 1 &&
450 !streams->front().active && NumActiveStreams(encoder_config_layers) == 1;
451 if (!single_active_stream) {
452 return;
453 }
454
455 // Index for the active stream.
456 size_t index = 0;
457 for (size_t i = 0; i < encoder_config_layers.size(); ++i) {
458 if (encoder_config_layers[i].active)
459 index = i;
460 }
461 if (streams->size() < (index + 1) || !(*streams)[index].active) {
462 return;
463 }
464
465 // Get bitrate limits for active stream.
466 absl::optional<VideoEncoder::ResolutionBitrateLimits> encoder_bitrate_limits =
467 encoder_info.GetEncoderBitrateLimitsForResolution(
468 (*streams)[index].width * (*streams)[index].height);
469 if (!encoder_bitrate_limits) {
470 return;
471 }
472
473 // If bitrate limits are set by RtpEncodingParameters, use intersection.
474 int min_bitrate_bps;
475 if (encoder_config_layers[index].min_bitrate_bps <= 0) {
476 min_bitrate_bps = encoder_bitrate_limits->min_bitrate_bps;
477 } else {
478 min_bitrate_bps = std::max(encoder_bitrate_limits->min_bitrate_bps,
479 (*streams)[index].min_bitrate_bps);
480 }
481 int max_bitrate_bps;
482 if (encoder_config_layers[index].max_bitrate_bps <= 0) {
483 max_bitrate_bps = encoder_bitrate_limits->max_bitrate_bps;
484 } else {
485 max_bitrate_bps = std::min(encoder_bitrate_limits->max_bitrate_bps,
486 (*streams)[index].max_bitrate_bps);
487 }
488 if (min_bitrate_bps >= max_bitrate_bps) {
489 RTC_LOG(LS_WARNING) << "Encoder bitrate limits"
490 << " (min=" << encoder_bitrate_limits->min_bitrate_bps
491 << ", max=" << encoder_bitrate_limits->max_bitrate_bps
492 << ") do not intersect with stream limits"
493 << " (min=" << (*streams)[index].min_bitrate_bps
494 << ", max=" << (*streams)[index].max_bitrate_bps
495 << "). Encoder bitrate limits not used.";
496 return;
497 }
498
499 (*streams)[index].min_bitrate_bps = min_bitrate_bps;
500 (*streams)[index].max_bitrate_bps = max_bitrate_bps;
501 (*streams)[index].target_bitrate_bps =
502 std::min((*streams)[index].target_bitrate_bps,
503 encoder_bitrate_limits->max_bitrate_bps);
504 }
505
506 } // namespace
507
EncoderRateSettings()508 VideoStreamEncoder::EncoderRateSettings::EncoderRateSettings()
509 : rate_control(),
510 encoder_target(DataRate::Zero()),
511 stable_encoder_target(DataRate::Zero()) {}
512
EncoderRateSettings(const VideoBitrateAllocation & bitrate,double framerate_fps,DataRate bandwidth_allocation,DataRate encoder_target,DataRate stable_encoder_target)513 VideoStreamEncoder::EncoderRateSettings::EncoderRateSettings(
514 const VideoBitrateAllocation& bitrate,
515 double framerate_fps,
516 DataRate bandwidth_allocation,
517 DataRate encoder_target,
518 DataRate stable_encoder_target)
519 : rate_control(bitrate, framerate_fps, bandwidth_allocation),
520 encoder_target(encoder_target),
521 stable_encoder_target(stable_encoder_target) {}
522
operator ==(const EncoderRateSettings & rhs) const523 bool VideoStreamEncoder::EncoderRateSettings::operator==(
524 const EncoderRateSettings& rhs) const {
525 return rate_control == rhs.rate_control &&
526 encoder_target == rhs.encoder_target &&
527 stable_encoder_target == rhs.stable_encoder_target;
528 }
529
operator !=(const EncoderRateSettings & rhs) const530 bool VideoStreamEncoder::EncoderRateSettings::operator!=(
531 const EncoderRateSettings& rhs) const {
532 return !(*this == rhs);
533 }
534
535 class VideoStreamEncoder::DegradationPreferenceManager
536 : public DegradationPreferenceProvider {
537 public:
DegradationPreferenceManager(VideoStreamAdapter * video_stream_adapter)538 explicit DegradationPreferenceManager(
539 VideoStreamAdapter* video_stream_adapter)
540 : degradation_preference_(DegradationPreference::DISABLED),
541 is_screenshare_(false),
542 effective_degradation_preference_(DegradationPreference::DISABLED),
543 video_stream_adapter_(video_stream_adapter) {
544 RTC_DCHECK(video_stream_adapter_);
545 sequence_checker_.Detach();
546 }
547
548 ~DegradationPreferenceManager() override = default;
549
degradation_preference() const550 DegradationPreference degradation_preference() const override {
551 RTC_DCHECK_RUN_ON(&sequence_checker_);
552 return effective_degradation_preference_;
553 }
554
SetDegradationPreference(DegradationPreference degradation_preference)555 void SetDegradationPreference(DegradationPreference degradation_preference) {
556 RTC_DCHECK_RUN_ON(&sequence_checker_);
557 degradation_preference_ = degradation_preference;
558 MaybeUpdateEffectiveDegradationPreference();
559 }
560
SetIsScreenshare(bool is_screenshare)561 void SetIsScreenshare(bool is_screenshare) {
562 RTC_DCHECK_RUN_ON(&sequence_checker_);
563 is_screenshare_ = is_screenshare;
564 MaybeUpdateEffectiveDegradationPreference();
565 }
566
567 private:
MaybeUpdateEffectiveDegradationPreference()568 void MaybeUpdateEffectiveDegradationPreference()
569 RTC_RUN_ON(&sequence_checker_) {
570 DegradationPreference effective_degradation_preference =
571 (is_screenshare_ &&
572 degradation_preference_ == DegradationPreference::BALANCED)
573 ? DegradationPreference::MAINTAIN_RESOLUTION
574 : degradation_preference_;
575
576 if (effective_degradation_preference != effective_degradation_preference_) {
577 effective_degradation_preference_ = effective_degradation_preference;
578 video_stream_adapter_->SetDegradationPreference(
579 effective_degradation_preference);
580 }
581 }
582
583 RTC_NO_UNIQUE_ADDRESS SequenceChecker sequence_checker_;
584 DegradationPreference degradation_preference_
585 RTC_GUARDED_BY(&sequence_checker_);
586 bool is_screenshare_ RTC_GUARDED_BY(&sequence_checker_);
587 DegradationPreference effective_degradation_preference_
588 RTC_GUARDED_BY(&sequence_checker_);
589 VideoStreamAdapter* video_stream_adapter_ RTC_GUARDED_BY(&sequence_checker_);
590 };
591
VideoStreamEncoder(Clock * clock,uint32_t number_of_cores,VideoStreamEncoderObserver * encoder_stats_observer,const VideoStreamEncoderSettings & settings,std::unique_ptr<OveruseFrameDetector> overuse_detector,TaskQueueFactory * task_queue_factory,BitrateAllocationCallbackType allocation_cb_type)592 VideoStreamEncoder::VideoStreamEncoder(
593 Clock* clock,
594 uint32_t number_of_cores,
595 VideoStreamEncoderObserver* encoder_stats_observer,
596 const VideoStreamEncoderSettings& settings,
597 std::unique_ptr<OveruseFrameDetector> overuse_detector,
598 TaskQueueFactory* task_queue_factory,
599 BitrateAllocationCallbackType allocation_cb_type)
600 : main_queue_(TaskQueueBase::Current()),
601 number_of_cores_(number_of_cores),
602 sink_(nullptr),
603 settings_(settings),
604 allocation_cb_type_(allocation_cb_type),
605 rate_control_settings_(RateControlSettings::ParseFromFieldTrials()),
606 encoder_selector_(settings.encoder_factory->GetEncoderSelector()),
607 encoder_stats_observer_(encoder_stats_observer),
608 encoder_initialized_(false),
609 max_framerate_(-1),
610 pending_encoder_reconfiguration_(false),
611 pending_encoder_creation_(false),
612 crop_width_(0),
613 crop_height_(0),
614 encoder_target_bitrate_bps_(absl::nullopt),
615 max_data_payload_length_(0),
616 encoder_paused_and_dropped_frame_(false),
617 was_encode_called_since_last_initialization_(false),
618 encoder_failed_(false),
619 clock_(clock),
620 posted_frames_waiting_for_encode_(0),
621 last_captured_timestamp_(0),
622 delta_ntp_internal_ms_(clock_->CurrentNtpInMilliseconds() -
623 clock_->TimeInMilliseconds()),
624 last_frame_log_ms_(clock_->TimeInMilliseconds()),
625 captured_frame_count_(0),
626 dropped_frame_cwnd_pushback_count_(0),
627 dropped_frame_encoder_block_count_(0),
628 pending_frame_post_time_us_(0),
629 accumulated_update_rect_{0, 0, 0, 0},
630 accumulated_update_rect_is_valid_(true),
631 animation_start_time_(Timestamp::PlusInfinity()),
632 cap_resolution_due_to_video_content_(false),
633 expect_resize_state_(ExpectResizeState::kNoResize),
634 fec_controller_override_(nullptr),
635 force_disable_frame_dropper_(false),
636 input_framerate_(kFrameRateAvergingWindowSizeMs, 1000),
637 pending_frame_drops_(0),
638 cwnd_frame_counter_(0),
639 next_frame_types_(1, VideoFrameType::kVideoFrameDelta),
640 frame_encode_metadata_writer_(this),
641 experiment_groups_(GetExperimentGroups()),
642 automatic_animation_detection_experiment_(
643 ParseAutomatincAnimationDetectionFieldTrial()),
644 input_state_provider_(encoder_stats_observer),
645 video_stream_adapter_(
646 std::make_unique<VideoStreamAdapter>(&input_state_provider_,
647 encoder_stats_observer)),
648 resource_adaptation_processor_(
649 std::make_unique<ResourceAdaptationProcessor>(
650 video_stream_adapter_.get())),
651 degradation_preference_manager_(
652 std::make_unique<DegradationPreferenceManager>(
653 video_stream_adapter_.get())),
654 adaptation_constraints_(),
655 stream_resource_manager_(&input_state_provider_,
656 encoder_stats_observer,
657 clock_,
658 settings_.experiment_cpu_load_estimator,
659 std::move(overuse_detector),
660 degradation_preference_manager_.get()),
661 video_source_sink_controller_(/*sink=*/this,
662 /*source=*/nullptr),
663 default_limits_allowed_(
664 !field_trial::IsEnabled("WebRTC-DefaultBitrateLimitsKillSwitch")),
665 qp_parsing_allowed_(
666 !field_trial::IsEnabled("WebRTC-QpParsingKillSwitch")),
667 encoder_queue_(task_queue_factory->CreateTaskQueue(
668 "EncoderQueue",
669 TaskQueueFactory::Priority::NORMAL)) {
670 RTC_DCHECK(main_queue_);
671 RTC_DCHECK(encoder_stats_observer);
672 RTC_DCHECK_GE(number_of_cores, 1);
673
674 stream_resource_manager_.Initialize(&encoder_queue_);
675
676 rtc::Event initialize_processor_event;
__anon475b0bc00202null677 encoder_queue_.PostTask([this, &initialize_processor_event] {
678 RTC_DCHECK_RUN_ON(&encoder_queue_);
679 resource_adaptation_processor_->SetTaskQueue(encoder_queue_.Get());
680 stream_resource_manager_.SetAdaptationProcessor(
681 resource_adaptation_processor_.get(), video_stream_adapter_.get());
682 resource_adaptation_processor_->AddResourceLimitationsListener(
683 &stream_resource_manager_);
684 video_stream_adapter_->AddRestrictionsListener(&stream_resource_manager_);
685 video_stream_adapter_->AddRestrictionsListener(this);
686 stream_resource_manager_.MaybeInitializePixelLimitResource();
687
688 // Add the stream resource manager's resources to the processor.
689 adaptation_constraints_ = stream_resource_manager_.AdaptationConstraints();
690 for (auto* constraint : adaptation_constraints_) {
691 video_stream_adapter_->AddAdaptationConstraint(constraint);
692 }
693 initialize_processor_event.Set();
694 });
695 initialize_processor_event.Wait(rtc::Event::kForever);
696 }
697
~VideoStreamEncoder()698 VideoStreamEncoder::~VideoStreamEncoder() {
699 RTC_DCHECK_RUN_ON(main_queue_);
700 RTC_DCHECK(!video_source_sink_controller_.HasSource())
701 << "Must call ::Stop() before destruction.";
702 }
703
Stop()704 void VideoStreamEncoder::Stop() {
705 RTC_DCHECK_RUN_ON(main_queue_);
706 video_source_sink_controller_.SetSource(nullptr);
707
708 rtc::Event shutdown_event;
709
710 encoder_queue_.PostTask([this, &shutdown_event] {
711 RTC_DCHECK_RUN_ON(&encoder_queue_);
712 if (resource_adaptation_processor_) {
713 stream_resource_manager_.StopManagedResources();
714 for (auto* constraint : adaptation_constraints_) {
715 video_stream_adapter_->RemoveAdaptationConstraint(constraint);
716 }
717 for (auto& resource : additional_resources_) {
718 stream_resource_manager_.RemoveResource(resource);
719 }
720 additional_resources_.clear();
721 video_stream_adapter_->RemoveRestrictionsListener(this);
722 video_stream_adapter_->RemoveRestrictionsListener(
723 &stream_resource_manager_);
724 resource_adaptation_processor_->RemoveResourceLimitationsListener(
725 &stream_resource_manager_);
726 stream_resource_manager_.SetAdaptationProcessor(nullptr, nullptr);
727 resource_adaptation_processor_.reset();
728 }
729 rate_allocator_ = nullptr;
730 ReleaseEncoder();
731 encoder_ = nullptr;
732 shutdown_event.Set();
733 });
734 shutdown_event.Wait(rtc::Event::kForever);
735 }
736
SetFecControllerOverride(FecControllerOverride * fec_controller_override)737 void VideoStreamEncoder::SetFecControllerOverride(
738 FecControllerOverride* fec_controller_override) {
739 encoder_queue_.PostTask([this, fec_controller_override] {
740 RTC_DCHECK_RUN_ON(&encoder_queue_);
741 RTC_DCHECK(!fec_controller_override_);
742 fec_controller_override_ = fec_controller_override;
743 if (encoder_) {
744 encoder_->SetFecControllerOverride(fec_controller_override_);
745 }
746 });
747 }
748
AddAdaptationResource(rtc::scoped_refptr<Resource> resource)749 void VideoStreamEncoder::AddAdaptationResource(
750 rtc::scoped_refptr<Resource> resource) {
751 RTC_DCHECK_RUN_ON(main_queue_);
752 // Map any externally added resources as kCpu for the sake of stats reporting.
753 // TODO(hbos): Make the manager map any unknown resources to kCpu and get rid
754 // of this MapResourceToReason() call.
755 rtc::Event map_resource_event;
756 encoder_queue_.PostTask([this, resource, &map_resource_event] {
757 RTC_DCHECK_RUN_ON(&encoder_queue_);
758 additional_resources_.push_back(resource);
759 stream_resource_manager_.AddResource(resource, VideoAdaptationReason::kCpu);
760 map_resource_event.Set();
761 });
762 map_resource_event.Wait(rtc::Event::kForever);
763 }
764
765 std::vector<rtc::scoped_refptr<Resource>>
GetAdaptationResources()766 VideoStreamEncoder::GetAdaptationResources() {
767 RTC_DCHECK_RUN_ON(main_queue_);
768 return resource_adaptation_processor_->GetResources();
769 }
770
SetSource(rtc::VideoSourceInterface<VideoFrame> * source,const DegradationPreference & degradation_preference)771 void VideoStreamEncoder::SetSource(
772 rtc::VideoSourceInterface<VideoFrame>* source,
773 const DegradationPreference& degradation_preference) {
774 RTC_DCHECK_RUN_ON(main_queue_);
775 video_source_sink_controller_.SetSource(source);
776 input_state_provider_.OnHasInputChanged(source);
777
778 // This may trigger reconfiguring the QualityScaler on the encoder queue.
779 encoder_queue_.PostTask([this, degradation_preference] {
780 RTC_DCHECK_RUN_ON(&encoder_queue_);
781 degradation_preference_manager_->SetDegradationPreference(
782 degradation_preference);
783 stream_resource_manager_.SetDegradationPreferences(degradation_preference);
784 if (encoder_) {
785 stream_resource_manager_.ConfigureQualityScaler(
786 encoder_->GetEncoderInfo());
787 }
788 });
789 }
790
SetSink(EncoderSink * sink,bool rotation_applied)791 void VideoStreamEncoder::SetSink(EncoderSink* sink, bool rotation_applied) {
792 RTC_DCHECK_RUN_ON(main_queue_);
793 video_source_sink_controller_.SetRotationApplied(rotation_applied);
794 video_source_sink_controller_.PushSourceSinkSettings();
795
796 encoder_queue_.PostTask([this, sink] {
797 RTC_DCHECK_RUN_ON(&encoder_queue_);
798 sink_ = sink;
799 });
800 }
801
SetStartBitrate(int start_bitrate_bps)802 void VideoStreamEncoder::SetStartBitrate(int start_bitrate_bps) {
803 encoder_queue_.PostTask([this, start_bitrate_bps] {
804 RTC_DCHECK_RUN_ON(&encoder_queue_);
805 RTC_LOG(LS_INFO) << "SetStartBitrate " << start_bitrate_bps;
806 encoder_target_bitrate_bps_ =
807 start_bitrate_bps != 0 ? absl::optional<uint32_t>(start_bitrate_bps)
808 : absl::nullopt;
809 stream_resource_manager_.SetStartBitrate(
810 DataRate::BitsPerSec(start_bitrate_bps));
811 });
812 }
813
ConfigureEncoder(VideoEncoderConfig config,size_t max_data_payload_length)814 void VideoStreamEncoder::ConfigureEncoder(VideoEncoderConfig config,
815 size_t max_data_payload_length) {
816 encoder_queue_.PostTask(
817 [this, config = std::move(config), max_data_payload_length]() mutable {
818 RTC_DCHECK_RUN_ON(&encoder_queue_);
819 RTC_DCHECK(sink_);
820 RTC_LOG(LS_INFO) << "ConfigureEncoder requested.";
821
822 pending_encoder_creation_ =
823 (!encoder_ || encoder_config_.video_format != config.video_format ||
824 max_data_payload_length_ != max_data_payload_length);
825 encoder_config_ = std::move(config);
826 max_data_payload_length_ = max_data_payload_length;
827 pending_encoder_reconfiguration_ = true;
828
829 // Reconfigure the encoder now if the encoder has an internal source or
830 // if the frame resolution is known. Otherwise, the reconfiguration is
831 // deferred until the next frame to minimize the number of
832 // reconfigurations. The codec configuration depends on incoming video
833 // frame size.
834 if (last_frame_info_) {
835 ReconfigureEncoder();
836 } else {
837 codec_info_ = settings_.encoder_factory->QueryVideoEncoder(
838 encoder_config_.video_format);
839 if (HasInternalSource()) {
840 last_frame_info_ = VideoFrameInfo(kDefaultInputPixelsWidth,
841 kDefaultInputPixelsHeight, false);
842 ReconfigureEncoder();
843 }
844 }
845 });
846 }
847
848 // TODO(bugs.webrtc.org/8807): Currently this always does a hard
849 // reconfiguration, but this isn't always necessary. Add in logic to only update
850 // the VideoBitrateAllocator and call OnEncoderConfigurationChanged with a
851 // "soft" reconfiguration.
ReconfigureEncoder()852 void VideoStreamEncoder::ReconfigureEncoder() {
853 // Running on the encoder queue.
854 RTC_DCHECK(pending_encoder_reconfiguration_);
855
856 bool encoder_reset_required = false;
857 if (pending_encoder_creation_) {
858 // Destroy existing encoder instance before creating a new one. Otherwise
859 // attempt to create another instance will fail if encoder factory
860 // supports only single instance of encoder of given type.
861 encoder_.reset();
862
863 encoder_ = settings_.encoder_factory->CreateVideoEncoder(
864 encoder_config_.video_format);
865 // TODO(nisse): What to do if creating the encoder fails? Crash,
866 // or just discard incoming frames?
867 RTC_CHECK(encoder_);
868
869 if (encoder_selector_) {
870 encoder_selector_->OnCurrentEncoder(encoder_config_.video_format);
871 }
872
873 encoder_->SetFecControllerOverride(fec_controller_override_);
874
875 codec_info_ = settings_.encoder_factory->QueryVideoEncoder(
876 encoder_config_.video_format);
877
878 encoder_reset_required = true;
879 }
880
881 // Possibly adjusts scale_resolution_down_by in |encoder_config_| to limit the
882 // alignment value.
883 AlignmentAdjuster::GetAlignmentAndMaybeAdjustScaleFactors(
884 encoder_->GetEncoderInfo(), &encoder_config_, absl::nullopt);
885
886 std::vector<VideoStream> streams =
887 encoder_config_.video_stream_factory->CreateEncoderStreams(
888 last_frame_info_->width, last_frame_info_->height, encoder_config_);
889
890 // Get alignment when actual number of layers are known.
891 int alignment = AlignmentAdjuster::GetAlignmentAndMaybeAdjustScaleFactors(
892 encoder_->GetEncoderInfo(), &encoder_config_, streams.size());
893
894 // Check that the higher layers do not try to set number of temporal layers
895 // to less than 1.
896 // TODO(brandtr): Get rid of the wrapping optional as it serves no purpose
897 // at this layer.
898 #if RTC_DCHECK_IS_ON
899 for (const auto& stream : streams) {
900 RTC_DCHECK_GE(stream.num_temporal_layers.value_or(1), 1);
901 }
902 #endif
903
904 // TODO(ilnik): If configured resolution is significantly less than provided,
905 // e.g. because there are not enough SSRCs for all simulcast streams,
906 // signal new resolutions via SinkWants to video source.
907
908 // Stream dimensions may be not equal to given because of a simulcast
909 // restrictions.
910 auto highest_stream = absl::c_max_element(
911 streams, [](const webrtc::VideoStream& a, const webrtc::VideoStream& b) {
912 return std::tie(a.width, a.height) < std::tie(b.width, b.height);
913 });
914 int highest_stream_width = static_cast<int>(highest_stream->width);
915 int highest_stream_height = static_cast<int>(highest_stream->height);
916 // Dimension may be reduced to be, e.g. divisible by 4.
917 RTC_CHECK_GE(last_frame_info_->width, highest_stream_width);
918 RTC_CHECK_GE(last_frame_info_->height, highest_stream_height);
919 crop_width_ = last_frame_info_->width - highest_stream_width;
920 crop_height_ = last_frame_info_->height - highest_stream_height;
921
922 absl::optional<VideoEncoder::ResolutionBitrateLimits> encoder_bitrate_limits =
923 encoder_->GetEncoderInfo().GetEncoderBitrateLimitsForResolution(
924 last_frame_info_->width * last_frame_info_->height);
925
926 if (encoder_bitrate_limits) {
927 if (streams.size() == 1 && encoder_config_.simulcast_layers.size() == 1) {
928 // Bitrate limits can be set by app (in SDP or RtpEncodingParameters)
929 // or/and can be provided by encoder. In presence of both set of limits,
930 // the final set is derived as their intersection.
931 int min_bitrate_bps;
932 if (encoder_config_.simulcast_layers.empty() ||
933 encoder_config_.simulcast_layers[0].min_bitrate_bps <= 0) {
934 min_bitrate_bps = encoder_bitrate_limits->min_bitrate_bps;
935 } else {
936 min_bitrate_bps = std::max(encoder_bitrate_limits->min_bitrate_bps,
937 streams.back().min_bitrate_bps);
938 }
939
940 int max_bitrate_bps;
941 // We don't check encoder_config_.simulcast_layers[0].max_bitrate_bps
942 // here since encoder_config_.max_bitrate_bps is derived from it (as
943 // well as from other inputs).
944 if (encoder_config_.max_bitrate_bps <= 0) {
945 max_bitrate_bps = encoder_bitrate_limits->max_bitrate_bps;
946 } else {
947 max_bitrate_bps = std::min(encoder_bitrate_limits->max_bitrate_bps,
948 streams.back().max_bitrate_bps);
949 }
950
951 if (min_bitrate_bps < max_bitrate_bps) {
952 streams.back().min_bitrate_bps = min_bitrate_bps;
953 streams.back().max_bitrate_bps = max_bitrate_bps;
954 streams.back().target_bitrate_bps =
955 std::min(streams.back().target_bitrate_bps,
956 encoder_bitrate_limits->max_bitrate_bps);
957 } else {
958 RTC_LOG(LS_WARNING)
959 << "Bitrate limits provided by encoder"
960 << " (min=" << encoder_bitrate_limits->min_bitrate_bps
961 << ", max=" << encoder_bitrate_limits->max_bitrate_bps
962 << ") do not intersect with limits set by app"
963 << " (min=" << streams.back().min_bitrate_bps
964 << ", max=" << encoder_config_.max_bitrate_bps
965 << "). The app bitrate limits will be used.";
966 }
967 }
968 }
969
970 ApplyEncoderBitrateLimitsIfSingleActiveStream(
971 GetEncoderInfoWithBitrateLimitUpdate(
972 encoder_->GetEncoderInfo(), encoder_config_, default_limits_allowed_),
973 encoder_config_.simulcast_layers, &streams);
974
975 VideoCodec codec;
976 if (!VideoCodecInitializer::SetupCodec(encoder_config_, streams, &codec)) {
977 RTC_LOG(LS_ERROR) << "Failed to create encoder configuration.";
978 }
979
980 if (encoder_config_.codec_type == kVideoCodecVP9) {
981 // Spatial layers configuration might impose some parity restrictions,
982 // thus some cropping might be needed.
983 crop_width_ = last_frame_info_->width - codec.width;
984 crop_height_ = last_frame_info_->height - codec.height;
985 ApplyVp9BitrateLimits(GetEncoderInfoWithBitrateLimitUpdate(
986 encoder_->GetEncoderInfo(), encoder_config_,
987 default_limits_allowed_),
988 encoder_config_, &codec);
989 }
990
991 char log_stream_buf[4 * 1024];
992 rtc::SimpleStringBuilder log_stream(log_stream_buf);
993 log_stream << "ReconfigureEncoder:\n";
994 log_stream << "Simulcast streams:\n";
995 for (size_t i = 0; i < codec.numberOfSimulcastStreams; ++i) {
996 log_stream << i << ": " << codec.simulcastStream[i].width << "x"
997 << codec.simulcastStream[i].height
998 << " fps: " << codec.simulcastStream[i].maxFramerate
999 << " min_kbps: " << codec.simulcastStream[i].minBitrate
1000 << " target_kbps: " << codec.simulcastStream[i].targetBitrate
1001 << " max_kbps: " << codec.simulcastStream[i].maxBitrate
1002 << " max_fps: " << codec.simulcastStream[i].maxFramerate
1003 << " max_qp: " << codec.simulcastStream[i].qpMax
1004 << " num_tl: " << codec.simulcastStream[i].numberOfTemporalLayers
1005 << " active: "
1006 << (codec.simulcastStream[i].active ? "true" : "false") << "\n";
1007 }
1008 if (encoder_config_.codec_type == kVideoCodecVP9) {
1009 size_t num_spatial_layers = codec.VP9()->numberOfSpatialLayers;
1010 log_stream << "Spatial layers:\n";
1011 for (size_t i = 0; i < num_spatial_layers; ++i) {
1012 log_stream << i << ": " << codec.spatialLayers[i].width << "x"
1013 << codec.spatialLayers[i].height
1014 << " fps: " << codec.spatialLayers[i].maxFramerate
1015 << " min_kbps: " << codec.spatialLayers[i].minBitrate
1016 << " target_kbps: " << codec.spatialLayers[i].targetBitrate
1017 << " max_kbps: " << codec.spatialLayers[i].maxBitrate
1018 << " max_qp: " << codec.spatialLayers[i].qpMax
1019 << " num_tl: " << codec.spatialLayers[i].numberOfTemporalLayers
1020 << " active: "
1021 << (codec.spatialLayers[i].active ? "true" : "false") << "\n";
1022 }
1023 }
1024 RTC_LOG(LS_INFO) << log_stream.str();
1025
1026 codec.startBitrate = std::max(encoder_target_bitrate_bps_.value_or(0) / 1000,
1027 codec.minBitrate);
1028 codec.startBitrate = std::min(codec.startBitrate, codec.maxBitrate);
1029 codec.expect_encode_from_texture = last_frame_info_->is_texture;
1030 // Make sure the start bit rate is sane...
1031 RTC_DCHECK_LE(codec.startBitrate, 1000000);
1032 max_framerate_ = codec.maxFramerate;
1033
1034 // Inform source about max configured framerate.
1035 int max_framerate = 0;
1036 for (const auto& stream : streams) {
1037 max_framerate = std::max(stream.max_framerate, max_framerate);
1038 }
1039
1040 // The resolutions that we're actually encoding with.
1041 std::vector<rtc::VideoSinkWants::FrameSize> encoder_resolutions;
1042 // TODO(hbos): For the case of SVC, also make use of |codec.spatialLayers|.
1043 // For now, SVC layers are handled by the VP9 encoder.
1044 for (const auto& simulcastStream : codec.simulcastStream) {
1045 if (!simulcastStream.active)
1046 continue;
1047 encoder_resolutions.emplace_back(simulcastStream.width,
1048 simulcastStream.height);
1049 }
1050 main_queue_->PostTask(ToQueuedTask(
1051 task_safety_, [this, max_framerate, alignment,
1052 encoder_resolutions = std::move(encoder_resolutions)]() {
1053 RTC_DCHECK_RUN_ON(main_queue_);
1054 if (max_framerate !=
1055 video_source_sink_controller_.frame_rate_upper_limit() ||
1056 alignment != video_source_sink_controller_.resolution_alignment() ||
1057 encoder_resolutions !=
1058 video_source_sink_controller_.resolutions()) {
1059 video_source_sink_controller_.SetFrameRateUpperLimit(max_framerate);
1060 video_source_sink_controller_.SetResolutionAlignment(alignment);
1061 video_source_sink_controller_.SetResolutions(
1062 std::move(encoder_resolutions));
1063 video_source_sink_controller_.PushSourceSinkSettings();
1064 }
1065 }));
1066
1067 if (codec.maxBitrate == 0) {
1068 // max is one bit per pixel
1069 codec.maxBitrate =
1070 (static_cast<int>(codec.height) * static_cast<int>(codec.width) *
1071 static_cast<int>(codec.maxFramerate)) /
1072 1000;
1073 if (codec.startBitrate > codec.maxBitrate) {
1074 // But if the user tries to set a higher start bit rate we will
1075 // increase the max accordingly.
1076 codec.maxBitrate = codec.startBitrate;
1077 }
1078 }
1079
1080 if (codec.startBitrate > codec.maxBitrate) {
1081 codec.startBitrate = codec.maxBitrate;
1082 }
1083
1084 rate_allocator_ =
1085 settings_.bitrate_allocator_factory->CreateVideoBitrateAllocator(codec);
1086 rate_allocator_->SetLegacyConferenceMode(
1087 encoder_config_.legacy_conference_mode);
1088
1089 // Reset (release existing encoder) if one exists and anything except
1090 // start bitrate or max framerate has changed.
1091 if (!encoder_reset_required) {
1092 encoder_reset_required = RequiresEncoderReset(
1093 send_codec_, codec, was_encode_called_since_last_initialization_);
1094 }
1095 send_codec_ = codec;
1096
1097 // Keep the same encoder, as long as the video_format is unchanged.
1098 // Encoder creation block is split in two since EncoderInfo needed to start
1099 // CPU adaptation with the correct settings should be polled after
1100 // encoder_->InitEncode().
1101 bool success = true;
1102 if (encoder_reset_required) {
1103 ReleaseEncoder();
1104 const size_t max_data_payload_length = max_data_payload_length_ > 0
1105 ? max_data_payload_length_
1106 : kDefaultPayloadSize;
1107 if (encoder_->InitEncode(
1108 &send_codec_,
1109 VideoEncoder::Settings(settings_.capabilities, number_of_cores_,
1110 max_data_payload_length)) != 0) {
1111 RTC_LOG(LS_ERROR) << "Failed to initialize the encoder associated with "
1112 "codec type: "
1113 << CodecTypeToPayloadString(send_codec_.codecType)
1114 << " (" << send_codec_.codecType << ")";
1115 ReleaseEncoder();
1116 success = false;
1117 } else {
1118 encoder_initialized_ = true;
1119 encoder_->RegisterEncodeCompleteCallback(this);
1120 frame_encode_metadata_writer_.OnEncoderInit(send_codec_,
1121 HasInternalSource());
1122 next_frame_types_.clear();
1123 next_frame_types_.resize(
1124 std::max(static_cast<int>(codec.numberOfSimulcastStreams), 1),
1125 VideoFrameType::kVideoFrameKey);
1126 }
1127
1128 frame_encode_metadata_writer_.Reset();
1129 last_encode_info_ms_ = absl::nullopt;
1130 was_encode_called_since_last_initialization_ = false;
1131 }
1132
1133 // Inform dependents of updated encoder settings.
1134 OnEncoderSettingsChanged();
1135
1136 if (success) {
1137 RTC_LOG(LS_VERBOSE) << " max bitrate " << codec.maxBitrate
1138 << " start bitrate " << codec.startBitrate
1139 << " max frame rate " << codec.maxFramerate
1140 << " max payload size " << max_data_payload_length_;
1141 } else {
1142 RTC_LOG(LS_ERROR) << "Failed to configure encoder.";
1143 rate_allocator_ = nullptr;
1144 }
1145
1146 if (pending_encoder_creation_) {
1147 stream_resource_manager_.ConfigureEncodeUsageResource();
1148 pending_encoder_creation_ = false;
1149 }
1150
1151 int num_layers;
1152 if (codec.codecType == kVideoCodecVP8) {
1153 num_layers = codec.VP8()->numberOfTemporalLayers;
1154 } else if (codec.codecType == kVideoCodecVP9) {
1155 num_layers = codec.VP9()->numberOfTemporalLayers;
1156 } else if (codec.codecType == kVideoCodecH264) {
1157 num_layers = codec.H264()->numberOfTemporalLayers;
1158 } else if (codec.codecType == kVideoCodecGeneric &&
1159 codec.numberOfSimulcastStreams > 0) {
1160 // This is mainly for unit testing, disabling frame dropping.
1161 // TODO(sprang): Add a better way to disable frame dropping.
1162 num_layers = codec.simulcastStream[0].numberOfTemporalLayers;
1163 } else {
1164 num_layers = 1;
1165 }
1166
1167 frame_dropper_.Reset();
1168 frame_dropper_.SetRates(codec.startBitrate, max_framerate_);
1169 // Force-disable frame dropper if either:
1170 // * We have screensharing with layers.
1171 // * "WebRTC-FrameDropper" field trial is "Disabled".
1172 force_disable_frame_dropper_ =
1173 field_trial::IsDisabled(kFrameDropperFieldTrial) ||
1174 (num_layers > 1 && codec.mode == VideoCodecMode::kScreensharing);
1175
1176 VideoEncoder::EncoderInfo info = encoder_->GetEncoderInfo();
1177 if (rate_control_settings_.UseEncoderBitrateAdjuster()) {
1178 bitrate_adjuster_ = std::make_unique<EncoderBitrateAdjuster>(codec);
1179 bitrate_adjuster_->OnEncoderInfo(info);
1180 }
1181
1182 if (rate_allocator_ && last_encoder_rate_settings_) {
1183 // We have a new rate allocator instance and already configured target
1184 // bitrate. Update the rate allocation and notify observers.
1185 // We must invalidate the last_encoder_rate_settings_ to ensure
1186 // the changes get propagated to all listeners.
1187 EncoderRateSettings rate_settings = *last_encoder_rate_settings_;
1188 last_encoder_rate_settings_.reset();
1189 rate_settings.rate_control.framerate_fps = GetInputFramerateFps();
1190
1191 SetEncoderRates(UpdateBitrateAllocation(rate_settings));
1192 }
1193
1194 encoder_stats_observer_->OnEncoderReconfigured(encoder_config_, streams);
1195
1196 pending_encoder_reconfiguration_ = false;
1197
1198 bool is_svc = false;
1199 // Set min_bitrate_bps, max_bitrate_bps, and max padding bit rate for VP9
1200 // and leave only one stream containing all necessary information.
1201 if (encoder_config_.codec_type == kVideoCodecVP9) {
1202 // Lower max bitrate to the level codec actually can produce.
1203 streams[0].max_bitrate_bps =
1204 std::min(streams[0].max_bitrate_bps,
1205 SvcRateAllocator::GetMaxBitrate(codec).bps<int>());
1206 streams[0].min_bitrate_bps = codec.spatialLayers[0].minBitrate * 1000;
1207 // target_bitrate_bps specifies the maximum padding bitrate.
1208 streams[0].target_bitrate_bps =
1209 SvcRateAllocator::GetPaddingBitrate(codec).bps<int>();
1210 streams[0].width = streams.back().width;
1211 streams[0].height = streams.back().height;
1212 is_svc = codec.VP9()->numberOfSpatialLayers > 1;
1213 streams.resize(1);
1214 }
1215
1216 sink_->OnEncoderConfigurationChanged(
1217 std::move(streams), is_svc, encoder_config_.content_type,
1218 encoder_config_.min_transmit_bitrate_bps);
1219
1220 stream_resource_manager_.ConfigureQualityScaler(info);
1221 }
1222
OnEncoderSettingsChanged()1223 void VideoStreamEncoder::OnEncoderSettingsChanged() {
1224 EncoderSettings encoder_settings(
1225 GetEncoderInfoWithBitrateLimitUpdate(
1226 encoder_->GetEncoderInfo(), encoder_config_, default_limits_allowed_),
1227 encoder_config_.Copy(), send_codec_);
1228 stream_resource_manager_.SetEncoderSettings(encoder_settings);
1229 input_state_provider_.OnEncoderSettingsChanged(encoder_settings);
1230 bool is_screenshare = encoder_settings.encoder_config().content_type ==
1231 VideoEncoderConfig::ContentType::kScreen;
1232 degradation_preference_manager_->SetIsScreenshare(is_screenshare);
1233 }
1234
OnFrame(const VideoFrame & video_frame)1235 void VideoStreamEncoder::OnFrame(const VideoFrame& video_frame) {
1236 RTC_DCHECK_RUNS_SERIALIZED(&incoming_frame_race_checker_);
1237 VideoFrame incoming_frame = video_frame;
1238
1239 // Local time in webrtc time base.
1240 Timestamp now = clock_->CurrentTime();
1241
1242 // In some cases, e.g., when the frame from decoder is fed to encoder,
1243 // the timestamp may be set to the future. As the encoding pipeline assumes
1244 // capture time to be less than present time, we should reset the capture
1245 // timestamps here. Otherwise there may be issues with RTP send stream.
1246 if (incoming_frame.timestamp_us() > now.us())
1247 incoming_frame.set_timestamp_us(now.us());
1248
1249 // Capture time may come from clock with an offset and drift from clock_.
1250 int64_t capture_ntp_time_ms;
1251 if (video_frame.ntp_time_ms() > 0) {
1252 capture_ntp_time_ms = video_frame.ntp_time_ms();
1253 } else if (video_frame.render_time_ms() != 0) {
1254 capture_ntp_time_ms = video_frame.render_time_ms() + delta_ntp_internal_ms_;
1255 } else {
1256 capture_ntp_time_ms = now.ms() + delta_ntp_internal_ms_;
1257 }
1258 incoming_frame.set_ntp_time_ms(capture_ntp_time_ms);
1259
1260 // Convert NTP time, in ms, to RTP timestamp.
1261 const int kMsToRtpTimestamp = 90;
1262 incoming_frame.set_timestamp(
1263 kMsToRtpTimestamp * static_cast<uint32_t>(incoming_frame.ntp_time_ms()));
1264
1265 if (incoming_frame.ntp_time_ms() <= last_captured_timestamp_) {
1266 // We don't allow the same capture time for two frames, drop this one.
1267 RTC_LOG(LS_WARNING) << "Same/old NTP timestamp ("
1268 << incoming_frame.ntp_time_ms()
1269 << " <= " << last_captured_timestamp_
1270 << ") for incoming frame. Dropping.";
1271 encoder_queue_.PostTask([this, incoming_frame]() {
1272 RTC_DCHECK_RUN_ON(&encoder_queue_);
1273 accumulated_update_rect_.Union(incoming_frame.update_rect());
1274 accumulated_update_rect_is_valid_ &= incoming_frame.has_update_rect();
1275 });
1276 return;
1277 }
1278
1279 bool log_stats = false;
1280 if (now.ms() - last_frame_log_ms_ > kFrameLogIntervalMs) {
1281 last_frame_log_ms_ = now.ms();
1282 log_stats = true;
1283 }
1284
1285 last_captured_timestamp_ = incoming_frame.ntp_time_ms();
1286
1287 int64_t post_time_us = clock_->CurrentTime().us();
1288 ++posted_frames_waiting_for_encode_;
1289
1290 encoder_queue_.PostTask(
1291 [this, incoming_frame, post_time_us, log_stats]() {
1292 RTC_DCHECK_RUN_ON(&encoder_queue_);
1293 encoder_stats_observer_->OnIncomingFrame(incoming_frame.width(),
1294 incoming_frame.height());
1295 ++captured_frame_count_;
1296 const int posted_frames_waiting_for_encode =
1297 posted_frames_waiting_for_encode_.fetch_sub(1);
1298 RTC_DCHECK_GT(posted_frames_waiting_for_encode, 0);
1299 CheckForAnimatedContent(incoming_frame, post_time_us);
1300 bool cwnd_frame_drop =
1301 cwnd_frame_drop_interval_ &&
1302 (cwnd_frame_counter_++ % cwnd_frame_drop_interval_.value() == 0);
1303 if (posted_frames_waiting_for_encode == 1 && !cwnd_frame_drop) {
1304 MaybeEncodeVideoFrame(incoming_frame, post_time_us);
1305 } else {
1306 if (cwnd_frame_drop) {
1307 // Frame drop by congestion window pushback. Do not encode this
1308 // frame.
1309 ++dropped_frame_cwnd_pushback_count_;
1310 encoder_stats_observer_->OnFrameDropped(
1311 VideoStreamEncoderObserver::DropReason::kCongestionWindow);
1312 } else {
1313 // There is a newer frame in flight. Do not encode this frame.
1314 RTC_LOG(LS_VERBOSE)
1315 << "Incoming frame dropped due to that the encoder is blocked.";
1316 ++dropped_frame_encoder_block_count_;
1317 encoder_stats_observer_->OnFrameDropped(
1318 VideoStreamEncoderObserver::DropReason::kEncoderQueue);
1319 }
1320 accumulated_update_rect_.Union(incoming_frame.update_rect());
1321 accumulated_update_rect_is_valid_ &= incoming_frame.has_update_rect();
1322 }
1323 if (log_stats) {
1324 RTC_LOG(LS_INFO) << "Number of frames: captured "
1325 << captured_frame_count_
1326 << ", dropped (due to congestion window pushback) "
1327 << dropped_frame_cwnd_pushback_count_
1328 << ", dropped (due to encoder blocked) "
1329 << dropped_frame_encoder_block_count_
1330 << ", interval_ms " << kFrameLogIntervalMs;
1331 captured_frame_count_ = 0;
1332 dropped_frame_cwnd_pushback_count_ = 0;
1333 dropped_frame_encoder_block_count_ = 0;
1334 }
1335 });
1336 }
1337
OnDiscardedFrame()1338 void VideoStreamEncoder::OnDiscardedFrame() {
1339 encoder_stats_observer_->OnFrameDropped(
1340 VideoStreamEncoderObserver::DropReason::kSource);
1341 }
1342
EncoderPaused() const1343 bool VideoStreamEncoder::EncoderPaused() const {
1344 RTC_DCHECK_RUN_ON(&encoder_queue_);
1345 // Pause video if paused by caller or as long as the network is down or the
1346 // pacer queue has grown too large in buffered mode.
1347 // If the pacer queue has grown too large or the network is down,
1348 // |last_encoder_rate_settings_->encoder_target| will be 0.
1349 return !last_encoder_rate_settings_ ||
1350 last_encoder_rate_settings_->encoder_target == DataRate::Zero();
1351 }
1352
TraceFrameDropStart()1353 void VideoStreamEncoder::TraceFrameDropStart() {
1354 RTC_DCHECK_RUN_ON(&encoder_queue_);
1355 // Start trace event only on the first frame after encoder is paused.
1356 if (!encoder_paused_and_dropped_frame_) {
1357 TRACE_EVENT_ASYNC_BEGIN0("webrtc", "EncoderPaused", this);
1358 }
1359 encoder_paused_and_dropped_frame_ = true;
1360 }
1361
TraceFrameDropEnd()1362 void VideoStreamEncoder::TraceFrameDropEnd() {
1363 RTC_DCHECK_RUN_ON(&encoder_queue_);
1364 // End trace event on first frame after encoder resumes, if frame was dropped.
1365 if (encoder_paused_and_dropped_frame_) {
1366 TRACE_EVENT_ASYNC_END0("webrtc", "EncoderPaused", this);
1367 }
1368 encoder_paused_and_dropped_frame_ = false;
1369 }
1370
1371 VideoStreamEncoder::EncoderRateSettings
UpdateBitrateAllocation(const EncoderRateSettings & rate_settings)1372 VideoStreamEncoder::UpdateBitrateAllocation(
1373 const EncoderRateSettings& rate_settings) {
1374 VideoBitrateAllocation new_allocation;
1375 // Only call allocators if bitrate > 0 (ie, not suspended), otherwise they
1376 // might cap the bitrate to the min bitrate configured.
1377 if (rate_allocator_ && rate_settings.encoder_target > DataRate::Zero()) {
1378 new_allocation = rate_allocator_->Allocate(VideoBitrateAllocationParameters(
1379 rate_settings.encoder_target, rate_settings.stable_encoder_target,
1380 rate_settings.rate_control.framerate_fps));
1381 }
1382
1383 EncoderRateSettings new_rate_settings = rate_settings;
1384 new_rate_settings.rate_control.target_bitrate = new_allocation;
1385 new_rate_settings.rate_control.bitrate = new_allocation;
1386 // VideoBitrateAllocator subclasses may allocate a bitrate higher than the
1387 // target in order to sustain the min bitrate of the video codec. In this
1388 // case, make sure the bandwidth allocation is at least equal the allocation
1389 // as that is part of the document contract for that field.
1390 new_rate_settings.rate_control.bandwidth_allocation =
1391 std::max(new_rate_settings.rate_control.bandwidth_allocation,
1392 DataRate::BitsPerSec(
1393 new_rate_settings.rate_control.bitrate.get_sum_bps()));
1394
1395 if (bitrate_adjuster_) {
1396 VideoBitrateAllocation adjusted_allocation =
1397 bitrate_adjuster_->AdjustRateAllocation(new_rate_settings.rate_control);
1398 RTC_LOG(LS_VERBOSE) << "Adjusting allocation, fps = "
1399 << rate_settings.rate_control.framerate_fps << ", from "
1400 << new_allocation.ToString() << ", to "
1401 << adjusted_allocation.ToString();
1402 new_rate_settings.rate_control.bitrate = adjusted_allocation;
1403 }
1404
1405 return new_rate_settings;
1406 }
1407
GetInputFramerateFps()1408 uint32_t VideoStreamEncoder::GetInputFramerateFps() {
1409 const uint32_t default_fps = max_framerate_ != -1 ? max_framerate_ : 30;
1410 absl::optional<uint32_t> input_fps =
1411 input_framerate_.Rate(clock_->TimeInMilliseconds());
1412 if (!input_fps || *input_fps == 0) {
1413 return default_fps;
1414 }
1415 return *input_fps;
1416 }
1417
SetEncoderRates(const EncoderRateSettings & rate_settings)1418 void VideoStreamEncoder::SetEncoderRates(
1419 const EncoderRateSettings& rate_settings) {
1420 RTC_DCHECK_GT(rate_settings.rate_control.framerate_fps, 0.0);
1421 bool rate_control_changed =
1422 (!last_encoder_rate_settings_.has_value() ||
1423 last_encoder_rate_settings_->rate_control != rate_settings.rate_control);
1424 // For layer allocation signal we care only about the target bitrate (not the
1425 // adjusted one) and the target fps.
1426 bool layer_allocation_changed =
1427 !last_encoder_rate_settings_.has_value() ||
1428 last_encoder_rate_settings_->rate_control.target_bitrate !=
1429 rate_settings.rate_control.target_bitrate ||
1430 last_encoder_rate_settings_->rate_control.framerate_fps !=
1431 rate_settings.rate_control.framerate_fps;
1432
1433 if (last_encoder_rate_settings_ != rate_settings) {
1434 last_encoder_rate_settings_ = rate_settings;
1435 }
1436
1437 if (!encoder_) {
1438 return;
1439 }
1440
1441 // |bitrate_allocation| is 0 it means that the network is down or the send
1442 // pacer is full. We currently only report this if the encoder has an internal
1443 // source. If the encoder does not have an internal source, higher levels
1444 // are expected to not call AddVideoFrame. We do this since it is unclear
1445 // how current encoder implementations behave when given a zero target
1446 // bitrate.
1447 // TODO(perkj): Make sure all known encoder implementations handle zero
1448 // target bitrate and remove this check.
1449 if (!HasInternalSource() &&
1450 rate_settings.rate_control.bitrate.get_sum_bps() == 0) {
1451 return;
1452 }
1453
1454 if (rate_control_changed) {
1455 encoder_->SetRates(rate_settings.rate_control);
1456
1457 encoder_stats_observer_->OnBitrateAllocationUpdated(
1458 send_codec_, rate_settings.rate_control.bitrate);
1459 frame_encode_metadata_writer_.OnSetRates(
1460 rate_settings.rate_control.bitrate,
1461 static_cast<uint32_t>(rate_settings.rate_control.framerate_fps + 0.5));
1462 stream_resource_manager_.SetEncoderRates(rate_settings.rate_control);
1463 if (layer_allocation_changed &&
1464 allocation_cb_type_ ==
1465 BitrateAllocationCallbackType::kVideoLayersAllocation) {
1466 sink_->OnVideoLayersAllocationUpdated(CreateVideoLayersAllocation(
1467 send_codec_, rate_settings.rate_control, encoder_->GetEncoderInfo()));
1468 }
1469 }
1470 if ((allocation_cb_type_ ==
1471 BitrateAllocationCallbackType::kVideoBitrateAllocation) ||
1472 (encoder_config_.content_type ==
1473 VideoEncoderConfig::ContentType::kScreen &&
1474 allocation_cb_type_ == BitrateAllocationCallbackType::
1475 kVideoBitrateAllocationWhenScreenSharing)) {
1476 sink_->OnBitrateAllocationUpdated(
1477 // Update allocation according to info from encoder. An encoder may
1478 // choose to not use all layers due to for example HW.
1479 UpdateAllocationFromEncoderInfo(
1480 rate_settings.rate_control.target_bitrate,
1481 encoder_->GetEncoderInfo()));
1482 }
1483 }
1484
MaybeEncodeVideoFrame(const VideoFrame & video_frame,int64_t time_when_posted_us)1485 void VideoStreamEncoder::MaybeEncodeVideoFrame(const VideoFrame& video_frame,
1486 int64_t time_when_posted_us) {
1487 RTC_DCHECK_RUN_ON(&encoder_queue_);
1488 input_state_provider_.OnFrameSizeObserved(video_frame.size());
1489
1490 if (!last_frame_info_ || video_frame.width() != last_frame_info_->width ||
1491 video_frame.height() != last_frame_info_->height ||
1492 video_frame.is_texture() != last_frame_info_->is_texture) {
1493 pending_encoder_reconfiguration_ = true;
1494 last_frame_info_ = VideoFrameInfo(video_frame.width(), video_frame.height(),
1495 video_frame.is_texture());
1496 RTC_LOG(LS_INFO) << "Video frame parameters changed: dimensions="
1497 << last_frame_info_->width << "x"
1498 << last_frame_info_->height
1499 << ", texture=" << last_frame_info_->is_texture << ".";
1500 // Force full frame update, since resolution has changed.
1501 accumulated_update_rect_ =
1502 VideoFrame::UpdateRect{0, 0, video_frame.width(), video_frame.height()};
1503 }
1504
1505 // We have to create the encoder before the frame drop logic,
1506 // because the latter depends on encoder_->GetScalingSettings.
1507 // According to the testcase
1508 // InitialFrameDropOffWhenEncoderDisabledScaling, the return value
1509 // from GetScalingSettings should enable or disable the frame drop.
1510
1511 // Update input frame rate before we start using it. If we update it after
1512 // any potential frame drop we are going to artificially increase frame sizes.
1513 // Poll the rate before updating, otherwise we risk the rate being estimated
1514 // a little too high at the start of the call when then window is small.
1515 uint32_t framerate_fps = GetInputFramerateFps();
1516 input_framerate_.Update(1u, clock_->TimeInMilliseconds());
1517
1518 int64_t now_ms = clock_->TimeInMilliseconds();
1519 if (pending_encoder_reconfiguration_) {
1520 ReconfigureEncoder();
1521 last_parameters_update_ms_.emplace(now_ms);
1522 } else if (!last_parameters_update_ms_ ||
1523 now_ms - *last_parameters_update_ms_ >=
1524 kParameterUpdateIntervalMs) {
1525 if (last_encoder_rate_settings_) {
1526 // Clone rate settings before update, so that SetEncoderRates() will
1527 // actually detect the change between the input and
1528 // |last_encoder_rate_setings_|, triggering the call to SetRate() on the
1529 // encoder.
1530 EncoderRateSettings new_rate_settings = *last_encoder_rate_settings_;
1531 new_rate_settings.rate_control.framerate_fps =
1532 static_cast<double>(framerate_fps);
1533 SetEncoderRates(UpdateBitrateAllocation(new_rate_settings));
1534 }
1535 last_parameters_update_ms_.emplace(now_ms);
1536 }
1537
1538 // Because pending frame will be dropped in any case, we need to
1539 // remember its updated region.
1540 if (pending_frame_) {
1541 encoder_stats_observer_->OnFrameDropped(
1542 VideoStreamEncoderObserver::DropReason::kEncoderQueue);
1543 accumulated_update_rect_.Union(pending_frame_->update_rect());
1544 accumulated_update_rect_is_valid_ &= pending_frame_->has_update_rect();
1545 }
1546
1547 if (DropDueToSize(video_frame.size())) {
1548 RTC_LOG(LS_INFO) << "Dropping frame. Too large for target bitrate.";
1549 stream_resource_manager_.OnFrameDroppedDueToSize();
1550 // Storing references to a native buffer risks blocking frame capture.
1551 if (video_frame.video_frame_buffer()->type() !=
1552 VideoFrameBuffer::Type::kNative) {
1553 pending_frame_ = video_frame;
1554 pending_frame_post_time_us_ = time_when_posted_us;
1555 } else {
1556 // Ensure that any previously stored frame is dropped.
1557 pending_frame_.reset();
1558 accumulated_update_rect_.Union(video_frame.update_rect());
1559 accumulated_update_rect_is_valid_ &= video_frame.has_update_rect();
1560 }
1561 return;
1562 }
1563 stream_resource_manager_.OnMaybeEncodeFrame();
1564
1565 if (EncoderPaused()) {
1566 // Storing references to a native buffer risks blocking frame capture.
1567 if (video_frame.video_frame_buffer()->type() !=
1568 VideoFrameBuffer::Type::kNative) {
1569 if (pending_frame_)
1570 TraceFrameDropStart();
1571 pending_frame_ = video_frame;
1572 pending_frame_post_time_us_ = time_when_posted_us;
1573 } else {
1574 // Ensure that any previously stored frame is dropped.
1575 pending_frame_.reset();
1576 TraceFrameDropStart();
1577 accumulated_update_rect_.Union(video_frame.update_rect());
1578 accumulated_update_rect_is_valid_ &= video_frame.has_update_rect();
1579 }
1580 return;
1581 }
1582
1583 pending_frame_.reset();
1584
1585 frame_dropper_.Leak(framerate_fps);
1586 // Frame dropping is enabled iff frame dropping is not force-disabled, and
1587 // rate controller is not trusted.
1588 const bool frame_dropping_enabled =
1589 !force_disable_frame_dropper_ &&
1590 !encoder_info_.has_trusted_rate_controller;
1591 frame_dropper_.Enable(frame_dropping_enabled);
1592 if (frame_dropping_enabled && frame_dropper_.DropFrame()) {
1593 RTC_LOG(LS_VERBOSE)
1594 << "Drop Frame: "
1595 "target bitrate "
1596 << (last_encoder_rate_settings_
1597 ? last_encoder_rate_settings_->encoder_target.bps()
1598 : 0)
1599 << ", input frame rate " << framerate_fps;
1600 OnDroppedFrame(
1601 EncodedImageCallback::DropReason::kDroppedByMediaOptimizations);
1602 accumulated_update_rect_.Union(video_frame.update_rect());
1603 accumulated_update_rect_is_valid_ &= video_frame.has_update_rect();
1604 return;
1605 }
1606
1607 EncodeVideoFrame(video_frame, time_when_posted_us);
1608 }
1609
EncodeVideoFrame(const VideoFrame & video_frame,int64_t time_when_posted_us)1610 void VideoStreamEncoder::EncodeVideoFrame(const VideoFrame& video_frame,
1611 int64_t time_when_posted_us) {
1612 RTC_DCHECK_RUN_ON(&encoder_queue_);
1613
1614 // If the encoder fail we can't continue to encode frames. When this happens
1615 // the WebrtcVideoSender is notified and the whole VideoSendStream is
1616 // recreated.
1617 if (encoder_failed_)
1618 return;
1619
1620 TraceFrameDropEnd();
1621
1622 // Encoder metadata needs to be updated before encode complete callback.
1623 VideoEncoder::EncoderInfo info = encoder_->GetEncoderInfo();
1624 if (info.implementation_name != encoder_info_.implementation_name) {
1625 encoder_stats_observer_->OnEncoderImplementationChanged(
1626 info.implementation_name);
1627 if (bitrate_adjuster_) {
1628 // Encoder implementation changed, reset overshoot detector states.
1629 bitrate_adjuster_->Reset();
1630 }
1631 }
1632
1633 if (encoder_info_ != info) {
1634 OnEncoderSettingsChanged();
1635 stream_resource_manager_.ConfigureEncodeUsageResource();
1636 RTC_LOG(LS_INFO) << "Encoder settings changed from "
1637 << encoder_info_.ToString() << " to " << info.ToString();
1638 }
1639
1640 if (bitrate_adjuster_) {
1641 for (size_t si = 0; si < kMaxSpatialLayers; ++si) {
1642 if (info.fps_allocation[si] != encoder_info_.fps_allocation[si]) {
1643 bitrate_adjuster_->OnEncoderInfo(info);
1644 break;
1645 }
1646 }
1647 }
1648 encoder_info_ = info;
1649 last_encode_info_ms_ = clock_->TimeInMilliseconds();
1650
1651 VideoFrame out_frame(video_frame);
1652 // Crop or scale the frame if needed. Dimension may be reduced to fit encoder
1653 // requirements, e.g. some encoders may require them to be divisible by 4.
1654 if ((crop_width_ > 0 || crop_height_ > 0) &&
1655 (out_frame.video_frame_buffer()->type() !=
1656 VideoFrameBuffer::Type::kNative ||
1657 !info.supports_native_handle)) {
1658 int cropped_width = video_frame.width() - crop_width_;
1659 int cropped_height = video_frame.height() - crop_height_;
1660 rtc::scoped_refptr<VideoFrameBuffer> cropped_buffer;
1661 // TODO(ilnik): Remove scaling if cropping is too big, as it should never
1662 // happen after SinkWants signaled correctly from ReconfigureEncoder.
1663 VideoFrame::UpdateRect update_rect = video_frame.update_rect();
1664 if (crop_width_ < 4 && crop_height_ < 4) {
1665 // The difference is small, crop without scaling.
1666 cropped_buffer = video_frame.video_frame_buffer()->CropAndScale(
1667 crop_width_ / 2, crop_height_ / 2, cropped_width, cropped_height,
1668 cropped_width, cropped_height);
1669 update_rect.offset_x -= crop_width_ / 2;
1670 update_rect.offset_y -= crop_height_ / 2;
1671 update_rect.Intersect(
1672 VideoFrame::UpdateRect{0, 0, cropped_width, cropped_height});
1673
1674 } else {
1675 // The difference is large, scale it.
1676 cropped_buffer = video_frame.video_frame_buffer()->Scale(cropped_width,
1677 cropped_height);
1678 if (!update_rect.IsEmpty()) {
1679 // Since we can't reason about pixels after scaling, we invalidate whole
1680 // picture, if anything changed.
1681 update_rect =
1682 VideoFrame::UpdateRect{0, 0, cropped_width, cropped_height};
1683 }
1684 }
1685 if (!cropped_buffer) {
1686 RTC_LOG(LS_ERROR) << "Cropping and scaling frame failed, dropping frame.";
1687 return;
1688 }
1689
1690 out_frame.set_video_frame_buffer(cropped_buffer);
1691 out_frame.set_update_rect(update_rect);
1692 out_frame.set_ntp_time_ms(video_frame.ntp_time_ms());
1693 // Since accumulated_update_rect_ is constructed before cropping,
1694 // we can't trust it. If any changes were pending, we invalidate whole
1695 // frame here.
1696 if (!accumulated_update_rect_.IsEmpty()) {
1697 accumulated_update_rect_ =
1698 VideoFrame::UpdateRect{0, 0, out_frame.width(), out_frame.height()};
1699 accumulated_update_rect_is_valid_ = false;
1700 }
1701 }
1702
1703 if (!accumulated_update_rect_is_valid_) {
1704 out_frame.clear_update_rect();
1705 } else if (!accumulated_update_rect_.IsEmpty() &&
1706 out_frame.has_update_rect()) {
1707 accumulated_update_rect_.Union(out_frame.update_rect());
1708 accumulated_update_rect_.Intersect(
1709 VideoFrame::UpdateRect{0, 0, out_frame.width(), out_frame.height()});
1710 out_frame.set_update_rect(accumulated_update_rect_);
1711 accumulated_update_rect_.MakeEmptyUpdate();
1712 }
1713 accumulated_update_rect_is_valid_ = true;
1714
1715 TRACE_EVENT_ASYNC_STEP0("webrtc", "Video", video_frame.render_time_ms(),
1716 "Encode");
1717
1718 stream_resource_manager_.OnEncodeStarted(out_frame, time_when_posted_us);
1719
1720 // The encoder should get the size that it expects.
1721 RTC_DCHECK(send_codec_.width <= out_frame.width() &&
1722 send_codec_.height <= out_frame.height())
1723 << "Encoder configured to " << send_codec_.width << "x"
1724 << send_codec_.height << " received a too small frame "
1725 << out_frame.width() << "x" << out_frame.height();
1726
1727 TRACE_EVENT1("webrtc", "VCMGenericEncoder::Encode", "timestamp",
1728 out_frame.timestamp());
1729
1730 frame_encode_metadata_writer_.OnEncodeStarted(out_frame);
1731
1732 const int32_t encode_status = encoder_->Encode(out_frame, &next_frame_types_);
1733 was_encode_called_since_last_initialization_ = true;
1734
1735 if (encode_status < 0) {
1736 if (encode_status == WEBRTC_VIDEO_CODEC_ENCODER_FAILURE) {
1737 RTC_LOG(LS_ERROR) << "Encoder failed, failing encoder format: "
1738 << encoder_config_.video_format.ToString();
1739
1740 if (settings_.encoder_switch_request_callback) {
1741 if (encoder_selector_) {
1742 if (auto encoder = encoder_selector_->OnEncoderBroken()) {
1743 QueueRequestEncoderSwitch(*encoder);
1744 }
1745 } else {
1746 encoder_failed_ = true;
1747 main_queue_->PostTask(ToQueuedTask(task_safety_, [this]() {
1748 RTC_DCHECK_RUN_ON(main_queue_);
1749 settings_.encoder_switch_request_callback->RequestEncoderFallback();
1750 }));
1751 }
1752 } else {
1753 RTC_LOG(LS_ERROR)
1754 << "Encoder failed but no encoder fallback callback is registered";
1755 }
1756 } else {
1757 RTC_LOG(LS_ERROR) << "Failed to encode frame. Error code: "
1758 << encode_status;
1759 }
1760
1761 return;
1762 }
1763
1764 for (auto& it : next_frame_types_) {
1765 it = VideoFrameType::kVideoFrameDelta;
1766 }
1767 }
1768
SendKeyFrame()1769 void VideoStreamEncoder::SendKeyFrame() {
1770 if (!encoder_queue_.IsCurrent()) {
1771 encoder_queue_.PostTask([this] { SendKeyFrame(); });
1772 return;
1773 }
1774 RTC_DCHECK_RUN_ON(&encoder_queue_);
1775 TRACE_EVENT0("webrtc", "OnKeyFrameRequest");
1776 RTC_DCHECK(!next_frame_types_.empty());
1777
1778 // TODO(webrtc:10615): Map keyframe request to spatial layer.
1779 std::fill(next_frame_types_.begin(), next_frame_types_.end(),
1780 VideoFrameType::kVideoFrameKey);
1781
1782 if (HasInternalSource()) {
1783 // Try to request the frame if we have an external encoder with
1784 // internal source since AddVideoFrame never will be called.
1785
1786 // TODO(nisse): Used only with internal source. Delete as soon as
1787 // that feature is removed. The only implementation I've been able
1788 // to find ignores what's in the frame. With one exception: It seems
1789 // a few test cases, e.g.,
1790 // VideoSendStreamTest.VideoSendStreamStopSetEncoderRateToZero, set
1791 // internal_source to true and use FakeEncoder. And the latter will
1792 // happily encode this 1x1 frame and pass it on down the pipeline.
1793 if (encoder_->Encode(VideoFrame::Builder()
1794 .set_video_frame_buffer(I420Buffer::Create(1, 1))
1795 .set_rotation(kVideoRotation_0)
1796 .set_timestamp_us(0)
1797 .build(),
1798 &next_frame_types_) == WEBRTC_VIDEO_CODEC_OK) {
1799 // Try to remove just-performed keyframe request, if stream still exists.
1800 std::fill(next_frame_types_.begin(), next_frame_types_.end(),
1801 VideoFrameType::kVideoFrameDelta);
1802 }
1803 }
1804 }
1805
OnLossNotification(const VideoEncoder::LossNotification & loss_notification)1806 void VideoStreamEncoder::OnLossNotification(
1807 const VideoEncoder::LossNotification& loss_notification) {
1808 if (!encoder_queue_.IsCurrent()) {
1809 encoder_queue_.PostTask(
1810 [this, loss_notification] { OnLossNotification(loss_notification); });
1811 return;
1812 }
1813
1814 RTC_DCHECK_RUN_ON(&encoder_queue_);
1815 if (encoder_) {
1816 encoder_->OnLossNotification(loss_notification);
1817 }
1818 }
1819
OnEncodedImage(const EncodedImage & encoded_image,const CodecSpecificInfo * codec_specific_info)1820 EncodedImageCallback::Result VideoStreamEncoder::OnEncodedImage(
1821 const EncodedImage& encoded_image,
1822 const CodecSpecificInfo* codec_specific_info) {
1823 TRACE_EVENT_INSTANT1("webrtc", "VCMEncodedFrameCallback::Encoded",
1824 "timestamp", encoded_image.Timestamp());
1825 const size_t spatial_idx = encoded_image.SpatialIndex().value_or(0);
1826 EncodedImage image_copy(encoded_image);
1827
1828 frame_encode_metadata_writer_.FillTimingInfo(spatial_idx, &image_copy);
1829
1830 frame_encode_metadata_writer_.UpdateBitstream(codec_specific_info,
1831 &image_copy);
1832
1833 VideoCodecType codec_type = codec_specific_info
1834 ? codec_specific_info->codecType
1835 : VideoCodecType::kVideoCodecGeneric;
1836
1837 if (image_copy.qp_ < 0 && qp_parsing_allowed_) {
1838 // Parse encoded frame QP if that was not provided by encoder.
1839 image_copy.qp_ = qp_parser_
1840 .Parse(codec_type, spatial_idx, image_copy.data(),
1841 image_copy.size())
1842 .value_or(-1);
1843 }
1844
1845 // Piggyback ALR experiment group id and simulcast id into the content type.
1846 const uint8_t experiment_id =
1847 experiment_groups_[videocontenttypehelpers::IsScreenshare(
1848 image_copy.content_type_)];
1849
1850 // TODO(ilnik): This will force content type extension to be present even
1851 // for realtime video. At the expense of miniscule overhead we will get
1852 // sliced receive statistics.
1853 RTC_CHECK(videocontenttypehelpers::SetExperimentId(&image_copy.content_type_,
1854 experiment_id));
1855 // We count simulcast streams from 1 on the wire. That's why we set simulcast
1856 // id in content type to +1 of that is actual simulcast index. This is because
1857 // value 0 on the wire is reserved for 'no simulcast stream specified'.
1858 RTC_CHECK(videocontenttypehelpers::SetSimulcastId(
1859 &image_copy.content_type_, static_cast<uint8_t>(spatial_idx + 1)));
1860
1861 // Currently internal quality scaler is used for VP9 instead of webrtc qp
1862 // scaler (in no-svc case or if only a single spatial layer is encoded).
1863 // It has to be explicitly detected and reported to adaptation metrics.
1864 // Post a task because |send_codec_| requires |encoder_queue_| lock.
1865 unsigned int image_width = image_copy._encodedWidth;
1866 unsigned int image_height = image_copy._encodedHeight;
1867 encoder_queue_.PostTask([this, codec_type, image_width, image_height] {
1868 RTC_DCHECK_RUN_ON(&encoder_queue_);
1869 if (codec_type == VideoCodecType::kVideoCodecVP9 &&
1870 send_codec_.VP9()->automaticResizeOn) {
1871 unsigned int expected_width = send_codec_.width;
1872 unsigned int expected_height = send_codec_.height;
1873 int num_active_layers = 0;
1874 for (int i = 0; i < send_codec_.VP9()->numberOfSpatialLayers; ++i) {
1875 if (send_codec_.spatialLayers[i].active) {
1876 ++num_active_layers;
1877 expected_width = send_codec_.spatialLayers[i].width;
1878 expected_height = send_codec_.spatialLayers[i].height;
1879 }
1880 }
1881 RTC_DCHECK_LE(num_active_layers, 1)
1882 << "VP9 quality scaling is enabled for "
1883 "SVC with several active layers.";
1884 encoder_stats_observer_->OnEncoderInternalScalerUpdate(
1885 image_width < expected_width || image_height < expected_height);
1886 }
1887 });
1888
1889 // Encoded is called on whatever thread the real encoder implementation run
1890 // on. In the case of hardware encoders, there might be several encoders
1891 // running in parallel on different threads.
1892 encoder_stats_observer_->OnSendEncodedImage(image_copy, codec_specific_info);
1893
1894 // The simulcast id is signaled in the SpatialIndex. This makes it impossible
1895 // to do simulcast for codecs that actually support spatial layers since we
1896 // can't distinguish between an actual spatial layer and a simulcast stream.
1897 // TODO(bugs.webrtc.org/10520): Signal the simulcast id explicitly.
1898 int simulcast_id = 0;
1899 if (codec_specific_info &&
1900 (codec_specific_info->codecType == kVideoCodecVP8 ||
1901 codec_specific_info->codecType == kVideoCodecH264 ||
1902 codec_specific_info->codecType == kVideoCodecGeneric)) {
1903 simulcast_id = encoded_image.SpatialIndex().value_or(0);
1904 }
1905
1906 EncodedImageCallback::Result result =
1907 sink_->OnEncodedImage(image_copy, codec_specific_info);
1908
1909 // We are only interested in propagating the meta-data about the image, not
1910 // encoded data itself, to the post encode function. Since we cannot be sure
1911 // the pointer will still be valid when run on the task queue, set it to null.
1912 DataSize frame_size = DataSize::Bytes(image_copy.size());
1913 image_copy.ClearEncodedData();
1914
1915 int temporal_index = 0;
1916 if (codec_specific_info) {
1917 if (codec_specific_info->codecType == kVideoCodecVP9) {
1918 temporal_index = codec_specific_info->codecSpecific.VP9.temporal_idx;
1919 } else if (codec_specific_info->codecType == kVideoCodecVP8) {
1920 temporal_index = codec_specific_info->codecSpecific.VP8.temporalIdx;
1921 }
1922 }
1923 if (temporal_index == kNoTemporalIdx) {
1924 temporal_index = 0;
1925 }
1926
1927 RunPostEncode(image_copy, clock_->CurrentTime().us(), temporal_index,
1928 frame_size);
1929
1930 if (result.error == Result::OK) {
1931 // In case of an internal encoder running on a separate thread, the
1932 // decision to drop a frame might be a frame late and signaled via
1933 // atomic flag. This is because we can't easily wait for the worker thread
1934 // without risking deadlocks, eg during shutdown when the worker thread
1935 // might be waiting for the internal encoder threads to stop.
1936 if (pending_frame_drops_.load() > 0) {
1937 int pending_drops = pending_frame_drops_.fetch_sub(1);
1938 RTC_DCHECK_GT(pending_drops, 0);
1939 result.drop_next_frame = true;
1940 }
1941 }
1942
1943 return result;
1944 }
1945
OnDroppedFrame(DropReason reason)1946 void VideoStreamEncoder::OnDroppedFrame(DropReason reason) {
1947 switch (reason) {
1948 case DropReason::kDroppedByMediaOptimizations:
1949 encoder_stats_observer_->OnFrameDropped(
1950 VideoStreamEncoderObserver::DropReason::kMediaOptimization);
1951 break;
1952 case DropReason::kDroppedByEncoder:
1953 encoder_stats_observer_->OnFrameDropped(
1954 VideoStreamEncoderObserver::DropReason::kEncoder);
1955 break;
1956 }
1957 sink_->OnDroppedFrame(reason);
1958 encoder_queue_.PostTask([this, reason] {
1959 RTC_DCHECK_RUN_ON(&encoder_queue_);
1960 stream_resource_manager_.OnFrameDropped(reason);
1961 });
1962 }
1963
UpdateTargetBitrate(DataRate target_bitrate,double cwnd_reduce_ratio)1964 DataRate VideoStreamEncoder::UpdateTargetBitrate(DataRate target_bitrate,
1965 double cwnd_reduce_ratio) {
1966 RTC_DCHECK_RUN_ON(&encoder_queue_);
1967 DataRate updated_target_bitrate = target_bitrate;
1968
1969 // Drop frames when congestion window pushback ratio is larger than 1
1970 // percent and target bitrate is larger than codec min bitrate.
1971 // When target_bitrate is 0 means codec is paused, skip frame dropping.
1972 if (cwnd_reduce_ratio > 0.01 && target_bitrate.bps() > 0 &&
1973 target_bitrate.bps() > send_codec_.minBitrate * 1000) {
1974 int reduce_bitrate_bps = std::min(
1975 static_cast<int>(target_bitrate.bps() * cwnd_reduce_ratio),
1976 static_cast<int>(target_bitrate.bps() - send_codec_.minBitrate * 1000));
1977 if (reduce_bitrate_bps > 0) {
1978 // At maximum the congestion window can drop 1/2 frames.
1979 cwnd_frame_drop_interval_ = std::max(
1980 2, static_cast<int>(target_bitrate.bps() / reduce_bitrate_bps));
1981 // Reduce target bitrate accordingly.
1982 updated_target_bitrate =
1983 target_bitrate - (target_bitrate / cwnd_frame_drop_interval_.value());
1984 return updated_target_bitrate;
1985 }
1986 }
1987 cwnd_frame_drop_interval_.reset();
1988 return updated_target_bitrate;
1989 }
1990
OnBitrateUpdated(DataRate target_bitrate,DataRate stable_target_bitrate,DataRate link_allocation,uint8_t fraction_lost,int64_t round_trip_time_ms,double cwnd_reduce_ratio)1991 void VideoStreamEncoder::OnBitrateUpdated(DataRate target_bitrate,
1992 DataRate stable_target_bitrate,
1993 DataRate link_allocation,
1994 uint8_t fraction_lost,
1995 int64_t round_trip_time_ms,
1996 double cwnd_reduce_ratio) {
1997 RTC_DCHECK_GE(link_allocation, target_bitrate);
1998 if (!encoder_queue_.IsCurrent()) {
1999 encoder_queue_.PostTask([this, target_bitrate, stable_target_bitrate,
2000 link_allocation, fraction_lost, round_trip_time_ms,
2001 cwnd_reduce_ratio] {
2002 DataRate updated_target_bitrate =
2003 UpdateTargetBitrate(target_bitrate, cwnd_reduce_ratio);
2004 OnBitrateUpdated(updated_target_bitrate, stable_target_bitrate,
2005 link_allocation, fraction_lost, round_trip_time_ms,
2006 cwnd_reduce_ratio);
2007 });
2008 return;
2009 }
2010 RTC_DCHECK_RUN_ON(&encoder_queue_);
2011
2012 const bool video_is_suspended = target_bitrate == DataRate::Zero();
2013 const bool video_suspension_changed = video_is_suspended != EncoderPaused();
2014
2015 if (!video_is_suspended && settings_.encoder_switch_request_callback &&
2016 encoder_selector_) {
2017 if (auto encoder = encoder_selector_->OnAvailableBitrate(link_allocation)) {
2018 QueueRequestEncoderSwitch(*encoder);
2019 }
2020 }
2021
2022 RTC_DCHECK(sink_) << "sink_ must be set before the encoder is active.";
2023
2024 RTC_LOG(LS_VERBOSE) << "OnBitrateUpdated, bitrate " << target_bitrate.bps()
2025 << " stable bitrate = " << stable_target_bitrate.bps()
2026 << " link allocation bitrate = " << link_allocation.bps()
2027 << " packet loss " << static_cast<int>(fraction_lost)
2028 << " rtt " << round_trip_time_ms;
2029
2030 if (encoder_) {
2031 encoder_->OnPacketLossRateUpdate(static_cast<float>(fraction_lost) / 256.f);
2032 encoder_->OnRttUpdate(round_trip_time_ms);
2033 }
2034
2035 uint32_t framerate_fps = GetInputFramerateFps();
2036 frame_dropper_.SetRates((target_bitrate.bps() + 500) / 1000, framerate_fps);
2037
2038 EncoderRateSettings new_rate_settings{
2039 VideoBitrateAllocation(), static_cast<double>(framerate_fps),
2040 link_allocation, target_bitrate, stable_target_bitrate};
2041 SetEncoderRates(UpdateBitrateAllocation(new_rate_settings));
2042
2043 if (target_bitrate.bps() != 0)
2044 encoder_target_bitrate_bps_ = target_bitrate.bps();
2045
2046 stream_resource_manager_.SetTargetBitrate(target_bitrate);
2047
2048 if (video_suspension_changed) {
2049 RTC_LOG(LS_INFO) << "Video suspend state changed to: "
2050 << (video_is_suspended ? "suspended" : "not suspended");
2051 encoder_stats_observer_->OnSuspendChange(video_is_suspended);
2052 }
2053 if (video_suspension_changed && !video_is_suspended && pending_frame_ &&
2054 !DropDueToSize(pending_frame_->size())) {
2055 int64_t pending_time_us =
2056 clock_->CurrentTime().us() - pending_frame_post_time_us_;
2057 if (pending_time_us < kPendingFrameTimeoutMs * 1000)
2058 EncodeVideoFrame(*pending_frame_, pending_frame_post_time_us_);
2059 pending_frame_.reset();
2060 }
2061 }
2062
DropDueToSize(uint32_t pixel_count) const2063 bool VideoStreamEncoder::DropDueToSize(uint32_t pixel_count) const {
2064 if (!stream_resource_manager_.DropInitialFrames() ||
2065 !encoder_target_bitrate_bps_.has_value()) {
2066 return false;
2067 }
2068
2069 bool simulcast_or_svc =
2070 (send_codec_.codecType == VideoCodecType::kVideoCodecVP9 &&
2071 send_codec_.VP9().numberOfSpatialLayers > 1) ||
2072 (send_codec_.numberOfSimulcastStreams > 1 ||
2073 encoder_config_.simulcast_layers.size() > 1);
2074
2075 if (simulcast_or_svc) {
2076 if (stream_resource_manager_.SingleActiveStreamPixels()) {
2077 pixel_count = stream_resource_manager_.SingleActiveStreamPixels().value();
2078 } else {
2079 return false;
2080 }
2081 }
2082
2083 uint32_t bitrate_bps =
2084 stream_resource_manager_.UseBandwidthAllocationBps().value_or(
2085 encoder_target_bitrate_bps_.value());
2086
2087 absl::optional<VideoEncoder::ResolutionBitrateLimits> encoder_bitrate_limits =
2088 GetEncoderInfoWithBitrateLimitUpdate(
2089 encoder_->GetEncoderInfo(), encoder_config_, default_limits_allowed_)
2090 .GetEncoderBitrateLimitsForResolution(pixel_count);
2091
2092 if (encoder_bitrate_limits.has_value()) {
2093 // Use bitrate limits provided by encoder.
2094 return bitrate_bps <
2095 static_cast<uint32_t>(encoder_bitrate_limits->min_start_bitrate_bps);
2096 }
2097
2098 if (bitrate_bps < 300000 /* qvga */) {
2099 return pixel_count > 320 * 240;
2100 } else if (bitrate_bps < 500000 /* vga */) {
2101 return pixel_count > 640 * 480;
2102 }
2103 return false;
2104 }
2105
OnVideoSourceRestrictionsUpdated(VideoSourceRestrictions restrictions,const VideoAdaptationCounters & adaptation_counters,rtc::scoped_refptr<Resource> reason,const VideoSourceRestrictions & unfiltered_restrictions)2106 void VideoStreamEncoder::OnVideoSourceRestrictionsUpdated(
2107 VideoSourceRestrictions restrictions,
2108 const VideoAdaptationCounters& adaptation_counters,
2109 rtc::scoped_refptr<Resource> reason,
2110 const VideoSourceRestrictions& unfiltered_restrictions) {
2111 RTC_DCHECK_RUN_ON(&encoder_queue_);
2112 RTC_LOG(INFO) << "Updating sink restrictions from "
2113 << (reason ? reason->Name() : std::string("<null>")) << " to "
2114 << restrictions.ToString();
2115 main_queue_->PostTask(ToQueuedTask(
2116 task_safety_, [this, restrictions = std::move(restrictions)]() {
2117 RTC_DCHECK_RUN_ON(main_queue_);
2118 video_source_sink_controller_.SetRestrictions(std::move(restrictions));
2119 video_source_sink_controller_.PushSourceSinkSettings();
2120 }));
2121 }
2122
RunPostEncode(const EncodedImage & encoded_image,int64_t time_sent_us,int temporal_index,DataSize frame_size)2123 void VideoStreamEncoder::RunPostEncode(const EncodedImage& encoded_image,
2124 int64_t time_sent_us,
2125 int temporal_index,
2126 DataSize frame_size) {
2127 if (!encoder_queue_.IsCurrent()) {
2128 encoder_queue_.PostTask([this, encoded_image, time_sent_us, temporal_index,
2129 frame_size] {
2130 RunPostEncode(encoded_image, time_sent_us, temporal_index, frame_size);
2131 });
2132 return;
2133 }
2134
2135 RTC_DCHECK_RUN_ON(&encoder_queue_);
2136
2137 absl::optional<int> encode_duration_us;
2138 if (encoded_image.timing_.flags != VideoSendTiming::kInvalid) {
2139 encode_duration_us =
2140 // TODO(nisse): Maybe use capture_time_ms_ rather than encode_start_ms_?
2141 TimeDelta::Millis(encoded_image.timing_.encode_finish_ms -
2142 encoded_image.timing_.encode_start_ms)
2143 .us();
2144 }
2145
2146 // Run post encode tasks, such as overuse detection and frame rate/drop
2147 // stats for internal encoders.
2148 const bool keyframe =
2149 encoded_image._frameType == VideoFrameType::kVideoFrameKey;
2150
2151 if (!frame_size.IsZero()) {
2152 frame_dropper_.Fill(frame_size.bytes(), !keyframe);
2153 }
2154
2155 if (HasInternalSource()) {
2156 // Update frame dropper after the fact for internal sources.
2157 input_framerate_.Update(1u, clock_->TimeInMilliseconds());
2158 frame_dropper_.Leak(GetInputFramerateFps());
2159 // Signal to encoder to drop next frame.
2160 if (frame_dropper_.DropFrame()) {
2161 pending_frame_drops_.fetch_add(1);
2162 }
2163 }
2164
2165 stream_resource_manager_.OnEncodeCompleted(encoded_image, time_sent_us,
2166 encode_duration_us);
2167 if (bitrate_adjuster_) {
2168 bitrate_adjuster_->OnEncodedFrame(
2169 frame_size, encoded_image.SpatialIndex().value_or(0), temporal_index);
2170 }
2171 }
2172
HasInternalSource() const2173 bool VideoStreamEncoder::HasInternalSource() const {
2174 // TODO(sprang): Checking both info from encoder and from encoder factory
2175 // until we have deprecated and removed the encoder factory info.
2176 return codec_info_.has_internal_source || encoder_info_.has_internal_source;
2177 }
2178
ReleaseEncoder()2179 void VideoStreamEncoder::ReleaseEncoder() {
2180 if (!encoder_ || !encoder_initialized_) {
2181 return;
2182 }
2183 encoder_->Release();
2184 encoder_initialized_ = false;
2185 TRACE_EVENT0("webrtc", "VCMGenericEncoder::Release");
2186 }
2187
2188 VideoStreamEncoder::AutomaticAnimationDetectionExperiment
ParseAutomatincAnimationDetectionFieldTrial() const2189 VideoStreamEncoder::ParseAutomatincAnimationDetectionFieldTrial() const {
2190 AutomaticAnimationDetectionExperiment result;
2191
2192 result.Parser()->Parse(webrtc::field_trial::FindFullName(
2193 "WebRTC-AutomaticAnimationDetectionScreenshare"));
2194
2195 if (!result.enabled) {
2196 RTC_LOG(LS_INFO) << "Automatic animation detection experiment is disabled.";
2197 return result;
2198 }
2199
2200 RTC_LOG(LS_INFO) << "Automatic animation detection experiment settings:"
2201 " min_duration_ms="
2202 << result.min_duration_ms
2203 << " min_area_ration=" << result.min_area_ratio
2204 << " min_fps=" << result.min_fps;
2205
2206 return result;
2207 }
2208
CheckForAnimatedContent(const VideoFrame & frame,int64_t time_when_posted_in_us)2209 void VideoStreamEncoder::CheckForAnimatedContent(
2210 const VideoFrame& frame,
2211 int64_t time_when_posted_in_us) {
2212 if (!automatic_animation_detection_experiment_.enabled ||
2213 encoder_config_.content_type !=
2214 VideoEncoderConfig::ContentType::kScreen ||
2215 stream_resource_manager_.degradation_preference() !=
2216 DegradationPreference::BALANCED) {
2217 return;
2218 }
2219
2220 if (expect_resize_state_ == ExpectResizeState::kResize && last_frame_info_ &&
2221 last_frame_info_->width != frame.width() &&
2222 last_frame_info_->height != frame.height()) {
2223 // On applying resolution cap there will be one frame with no/different
2224 // update, which should be skipped.
2225 // It can be delayed by several frames.
2226 expect_resize_state_ = ExpectResizeState::kFirstFrameAfterResize;
2227 return;
2228 }
2229
2230 if (expect_resize_state_ == ExpectResizeState::kFirstFrameAfterResize) {
2231 // The first frame after resize should have new, scaled update_rect.
2232 if (frame.has_update_rect()) {
2233 last_update_rect_ = frame.update_rect();
2234 } else {
2235 last_update_rect_ = absl::nullopt;
2236 }
2237 expect_resize_state_ = ExpectResizeState::kNoResize;
2238 }
2239
2240 bool should_cap_resolution = false;
2241 if (!frame.has_update_rect()) {
2242 last_update_rect_ = absl::nullopt;
2243 animation_start_time_ = Timestamp::PlusInfinity();
2244 } else if ((!last_update_rect_ ||
2245 frame.update_rect() != *last_update_rect_)) {
2246 last_update_rect_ = frame.update_rect();
2247 animation_start_time_ = Timestamp::Micros(time_when_posted_in_us);
2248 } else {
2249 TimeDelta animation_duration =
2250 Timestamp::Micros(time_when_posted_in_us) - animation_start_time_;
2251 float area_ratio = static_cast<float>(last_update_rect_->width *
2252 last_update_rect_->height) /
2253 (frame.width() * frame.height());
2254 if (animation_duration.ms() >=
2255 automatic_animation_detection_experiment_.min_duration_ms &&
2256 area_ratio >=
2257 automatic_animation_detection_experiment_.min_area_ratio &&
2258 encoder_stats_observer_->GetInputFrameRate() >=
2259 automatic_animation_detection_experiment_.min_fps) {
2260 should_cap_resolution = true;
2261 }
2262 }
2263 if (cap_resolution_due_to_video_content_ != should_cap_resolution) {
2264 expect_resize_state_ = should_cap_resolution ? ExpectResizeState::kResize
2265 : ExpectResizeState::kNoResize;
2266 cap_resolution_due_to_video_content_ = should_cap_resolution;
2267 if (should_cap_resolution) {
2268 RTC_LOG(LS_INFO) << "Applying resolution cap due to animation detection.";
2269 } else {
2270 RTC_LOG(LS_INFO) << "Removing resolution cap due to no consistent "
2271 "animation detection.";
2272 }
2273 main_queue_->PostTask(ToQueuedTask(task_safety_, [this,
2274 should_cap_resolution]() {
2275 RTC_DCHECK_RUN_ON(main_queue_);
2276 video_source_sink_controller_.SetPixelsPerFrameUpperLimit(
2277 should_cap_resolution ? absl::optional<size_t>(kMaxAnimationPixels)
2278 : absl::nullopt);
2279 video_source_sink_controller_.PushSourceSinkSettings();
2280 }));
2281 }
2282 }
2283
2284 // RTC_RUN_ON(&encoder_queue_)
QueueRequestEncoderSwitch(const EncoderSwitchRequestCallback::Config & conf)2285 void VideoStreamEncoder::QueueRequestEncoderSwitch(
2286 const EncoderSwitchRequestCallback::Config& conf) {
2287 main_queue_->PostTask(ToQueuedTask(task_safety_, [this, conf]() {
2288 RTC_DCHECK_RUN_ON(main_queue_);
2289 settings_.encoder_switch_request_callback->RequestEncoderSwitch(conf);
2290 }));
2291 }
2292
2293 // RTC_RUN_ON(&encoder_queue_)
QueueRequestEncoderSwitch(const webrtc::SdpVideoFormat & format)2294 void VideoStreamEncoder::QueueRequestEncoderSwitch(
2295 const webrtc::SdpVideoFormat& format) {
2296 main_queue_->PostTask(ToQueuedTask(task_safety_, [this, format]() {
2297 RTC_DCHECK_RUN_ON(main_queue_);
2298 settings_.encoder_switch_request_callback->RequestEncoderSwitch(format);
2299 }));
2300 }
2301
InjectAdaptationResource(rtc::scoped_refptr<Resource> resource,VideoAdaptationReason reason)2302 void VideoStreamEncoder::InjectAdaptationResource(
2303 rtc::scoped_refptr<Resource> resource,
2304 VideoAdaptationReason reason) {
2305 rtc::Event map_resource_event;
2306 encoder_queue_.PostTask([this, resource, reason, &map_resource_event] {
2307 RTC_DCHECK_RUN_ON(&encoder_queue_);
2308 additional_resources_.push_back(resource);
2309 stream_resource_manager_.AddResource(resource, reason);
2310 map_resource_event.Set();
2311 });
2312 map_resource_event.Wait(rtc::Event::kForever);
2313 }
2314
InjectAdaptationConstraint(AdaptationConstraint * adaptation_constraint)2315 void VideoStreamEncoder::InjectAdaptationConstraint(
2316 AdaptationConstraint* adaptation_constraint) {
2317 rtc::Event event;
2318 encoder_queue_.PostTask([this, adaptation_constraint, &event] {
2319 RTC_DCHECK_RUN_ON(&encoder_queue_);
2320 if (!resource_adaptation_processor_) {
2321 // The VideoStreamEncoder was stopped and the processor destroyed before
2322 // this task had a chance to execute. No action needed.
2323 return;
2324 }
2325 adaptation_constraints_.push_back(adaptation_constraint);
2326 video_stream_adapter_->AddAdaptationConstraint(adaptation_constraint);
2327 event.Set();
2328 });
2329 event.Wait(rtc::Event::kForever);
2330 }
2331
AddRestrictionsListenerForTesting(VideoSourceRestrictionsListener * restrictions_listener)2332 void VideoStreamEncoder::AddRestrictionsListenerForTesting(
2333 VideoSourceRestrictionsListener* restrictions_listener) {
2334 rtc::Event event;
2335 encoder_queue_.PostTask([this, restrictions_listener, &event] {
2336 RTC_DCHECK_RUN_ON(&encoder_queue_);
2337 RTC_DCHECK(resource_adaptation_processor_);
2338 video_stream_adapter_->AddRestrictionsListener(restrictions_listener);
2339 event.Set();
2340 });
2341 event.Wait(rtc::Event::kForever);
2342 }
2343
RemoveRestrictionsListenerForTesting(VideoSourceRestrictionsListener * restrictions_listener)2344 void VideoStreamEncoder::RemoveRestrictionsListenerForTesting(
2345 VideoSourceRestrictionsListener* restrictions_listener) {
2346 rtc::Event event;
2347 encoder_queue_.PostTask([this, restrictions_listener, &event] {
2348 RTC_DCHECK_RUN_ON(&encoder_queue_);
2349 RTC_DCHECK(resource_adaptation_processor_);
2350 video_stream_adapter_->RemoveRestrictionsListener(restrictions_listener);
2351 event.Set();
2352 });
2353 event.Wait(rtc::Event::kForever);
2354 }
2355
2356 } // namespace webrtc
2357