1 /*
2  *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 #include "media/engine/simulcast_encoder_adapter.h"
12 
13 #include <stdio.h>
14 #include <string.h>
15 
16 #include <algorithm>
17 #include <cstdint>
18 #include <string>
19 #include <utility>
20 
21 #include "absl/algorithm/container.h"
22 #include "api/scoped_refptr.h"
23 #include "api/video/i420_buffer.h"
24 #include "api/video/video_codec_constants.h"
25 #include "api/video/video_frame_buffer.h"
26 #include "api/video/video_rotation.h"
27 #include "api/video_codecs/video_encoder.h"
28 #include "api/video_codecs/video_encoder_factory.h"
29 #include "api/video_codecs/video_encoder_software_fallback_wrapper.h"
30 #include "media/base/video_common.h"
31 #include "modules/video_coding/include/video_error_codes.h"
32 #include "modules/video_coding/utility/simulcast_rate_allocator.h"
33 #include "rtc_base/atomic_ops.h"
34 #include "rtc_base/checks.h"
35 #include "rtc_base/experiments/rate_control_settings.h"
36 #include "rtc_base/logging.h"
37 #include "system_wrappers/include/field_trial.h"
38 
39 namespace {
40 
41 const unsigned int kDefaultMinQp = 2;
42 const unsigned int kDefaultMaxQp = 56;
43 // Max qp for lowest spatial resolution when doing simulcast.
44 const unsigned int kLowestResMaxQp = 45;
45 
GetScreenshareBoostedQpValue()46 absl::optional<unsigned int> GetScreenshareBoostedQpValue() {
47   std::string experiment_group =
48       webrtc::field_trial::FindFullName("WebRTC-BoostedScreenshareQp");
49   unsigned int qp;
50   if (sscanf(experiment_group.c_str(), "%u", &qp) != 1)
51     return absl::nullopt;
52   qp = std::min(qp, 63u);
53   qp = std::max(qp, 1u);
54   return qp;
55 }
56 
SumStreamMaxBitrate(int streams,const webrtc::VideoCodec & codec)57 uint32_t SumStreamMaxBitrate(int streams, const webrtc::VideoCodec& codec) {
58   uint32_t bitrate_sum = 0;
59   for (int i = 0; i < streams; ++i) {
60     bitrate_sum += codec.simulcastStream[i].maxBitrate;
61   }
62   return bitrate_sum;
63 }
64 
CountAllStreams(const webrtc::VideoCodec & codec)65 int CountAllStreams(const webrtc::VideoCodec& codec) {
66   int total_streams_count =
67       codec.numberOfSimulcastStreams < 1 ? 1 : codec.numberOfSimulcastStreams;
68   uint32_t simulcast_max_bitrate =
69       SumStreamMaxBitrate(total_streams_count, codec);
70   if (simulcast_max_bitrate == 0) {
71     total_streams_count = 1;
72   }
73   return total_streams_count;
74 }
75 
CountActiveStreams(const webrtc::VideoCodec & codec)76 int CountActiveStreams(const webrtc::VideoCodec& codec) {
77   if (codec.numberOfSimulcastStreams < 1) {
78     return 1;
79   }
80   int total_streams_count = CountAllStreams(codec);
81   int active_streams_count = 0;
82   for (int i = 0; i < total_streams_count; ++i) {
83     if (codec.simulcastStream[i].active) {
84       ++active_streams_count;
85     }
86   }
87   return active_streams_count;
88 }
89 
VerifyCodec(const webrtc::VideoCodec * inst)90 int VerifyCodec(const webrtc::VideoCodec* inst) {
91   if (inst == nullptr) {
92     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
93   }
94   if (inst->maxFramerate < 1) {
95     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
96   }
97   // allow zero to represent an unspecified maxBitRate
98   if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) {
99     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
100   }
101   if (inst->width <= 1 || inst->height <= 1) {
102     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
103   }
104   if (inst->codecType == webrtc::kVideoCodecVP8 &&
105       inst->VP8().automaticResizeOn && CountActiveStreams(*inst) > 1) {
106     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
107   }
108   return WEBRTC_VIDEO_CODEC_OK;
109 }
110 
StreamQualityCompare(const webrtc::SpatialLayer & a,const webrtc::SpatialLayer & b)111 bool StreamQualityCompare(const webrtc::SpatialLayer& a,
112                           const webrtc::SpatialLayer& b) {
113   return std::tie(a.height, a.width, a.maxBitrate, a.maxFramerate) <
114          std::tie(b.height, b.width, b.maxBitrate, b.maxFramerate);
115 }
116 
GetLowestAndHighestQualityStreamIndixes(rtc::ArrayView<webrtc::SpatialLayer> streams,int * lowest_quality_stream_idx,int * highest_quality_stream_idx)117 void GetLowestAndHighestQualityStreamIndixes(
118     rtc::ArrayView<webrtc::SpatialLayer> streams,
119     int* lowest_quality_stream_idx,
120     int* highest_quality_stream_idx) {
121   const auto lowest_highest_quality_streams =
122       absl::c_minmax_element(streams, StreamQualityCompare);
123   *lowest_quality_stream_idx =
124       std::distance(streams.begin(), lowest_highest_quality_streams.first);
125   *highest_quality_stream_idx =
126       std::distance(streams.begin(), lowest_highest_quality_streams.second);
127 }
128 
GetStreamStartBitratesKbps(const webrtc::VideoCodec & codec)129 std::vector<uint32_t> GetStreamStartBitratesKbps(
130     const webrtc::VideoCodec& codec) {
131   std::vector<uint32_t> start_bitrates;
132   std::unique_ptr<webrtc::VideoBitrateAllocator> rate_allocator =
133       std::make_unique<webrtc::SimulcastRateAllocator>(codec);
134   webrtc::VideoBitrateAllocation allocation =
135       rate_allocator->Allocate(webrtc::VideoBitrateAllocationParameters(
136           codec.startBitrate * 1000, codec.maxFramerate));
137 
138   int total_streams_count = CountAllStreams(codec);
139   for (int i = 0; i < total_streams_count; ++i) {
140     uint32_t stream_bitrate = allocation.GetSpatialLayerSum(i) / 1000;
141     start_bitrates.push_back(stream_bitrate);
142   }
143   return start_bitrates;
144 }
145 
146 }  // namespace
147 
148 namespace webrtc {
149 
EncoderContext(std::unique_ptr<VideoEncoder> encoder,bool prefer_temporal_support)150 SimulcastEncoderAdapter::EncoderContext::EncoderContext(
151     std::unique_ptr<VideoEncoder> encoder,
152     bool prefer_temporal_support)
153     : encoder_(std::move(encoder)),
154       prefer_temporal_support_(prefer_temporal_support) {}
155 
Release()156 void SimulcastEncoderAdapter::EncoderContext::Release() {
157   if (encoder_) {
158     encoder_->RegisterEncodeCompleteCallback(nullptr);
159     encoder_->Release();
160   }
161 }
162 
StreamContext(SimulcastEncoderAdapter * parent,std::unique_ptr<EncoderContext> encoder_context,std::unique_ptr<FramerateController> framerate_controller,int stream_idx,uint16_t width,uint16_t height,bool is_paused)163 SimulcastEncoderAdapter::StreamContext::StreamContext(
164     SimulcastEncoderAdapter* parent,
165     std::unique_ptr<EncoderContext> encoder_context,
166     std::unique_ptr<FramerateController> framerate_controller,
167     int stream_idx,
168     uint16_t width,
169     uint16_t height,
170     bool is_paused)
171     : parent_(parent),
172       encoder_context_(std::move(encoder_context)),
173       framerate_controller_(std::move(framerate_controller)),
174       stream_idx_(stream_idx),
175       width_(width),
176       height_(height),
177       is_keyframe_needed_(false),
178       is_paused_(is_paused) {
179   if (parent_) {
180     encoder_context_->encoder().RegisterEncodeCompleteCallback(this);
181   }
182 }
183 
StreamContext(StreamContext && rhs)184 SimulcastEncoderAdapter::StreamContext::StreamContext(StreamContext&& rhs)
185     : parent_(rhs.parent_),
186       encoder_context_(std::move(rhs.encoder_context_)),
187       framerate_controller_(std::move(rhs.framerate_controller_)),
188       stream_idx_(rhs.stream_idx_),
189       width_(rhs.width_),
190       height_(rhs.height_),
191       is_keyframe_needed_(rhs.is_keyframe_needed_),
192       is_paused_(rhs.is_paused_) {
193   if (parent_) {
194     encoder_context_->encoder().RegisterEncodeCompleteCallback(this);
195   }
196 }
197 
~StreamContext()198 SimulcastEncoderAdapter::StreamContext::~StreamContext() {
199   if (encoder_context_) {
200     encoder_context_->Release();
201   }
202 }
203 
204 std::unique_ptr<SimulcastEncoderAdapter::EncoderContext>
ReleaseEncoderContext()205 SimulcastEncoderAdapter::StreamContext::ReleaseEncoderContext() && {
206   encoder_context_->Release();
207   return std::move(encoder_context_);
208 }
209 
OnKeyframe(Timestamp timestamp)210 void SimulcastEncoderAdapter::StreamContext::OnKeyframe(Timestamp timestamp) {
211   is_keyframe_needed_ = false;
212   if (framerate_controller_) {
213     framerate_controller_->AddFrame(timestamp.ms());
214   }
215 }
216 
ShouldDropFrame(Timestamp timestamp)217 bool SimulcastEncoderAdapter::StreamContext::ShouldDropFrame(
218     Timestamp timestamp) {
219   if (!framerate_controller_) {
220     return false;
221   }
222 
223   if (framerate_controller_->DropFrame(timestamp.ms())) {
224     return true;
225   }
226   framerate_controller_->AddFrame(timestamp.ms());
227   return false;
228 }
229 
230 EncodedImageCallback::Result
OnEncodedImage(const EncodedImage & encoded_image,const CodecSpecificInfo * codec_specific_info)231 SimulcastEncoderAdapter::StreamContext::OnEncodedImage(
232     const EncodedImage& encoded_image,
233     const CodecSpecificInfo* codec_specific_info) {
234   RTC_CHECK(parent_);  // If null, this method should never be called.
235   return parent_->OnEncodedImage(stream_idx_, encoded_image,
236                                  codec_specific_info);
237 }
238 
OnDroppedFrame(DropReason)239 void SimulcastEncoderAdapter::StreamContext::OnDroppedFrame(
240     DropReason /*reason*/) {
241   RTC_CHECK(parent_);  // If null, this method should never be called.
242   parent_->OnDroppedFrame(stream_idx_);
243 }
244 
SimulcastEncoderAdapter(VideoEncoderFactory * factory,const SdpVideoFormat & format)245 SimulcastEncoderAdapter::SimulcastEncoderAdapter(VideoEncoderFactory* factory,
246                                                  const SdpVideoFormat& format)
247     : SimulcastEncoderAdapter(factory, nullptr, format) {}
248 
SimulcastEncoderAdapter(VideoEncoderFactory * primary_factory,VideoEncoderFactory * fallback_factory,const SdpVideoFormat & format)249 SimulcastEncoderAdapter::SimulcastEncoderAdapter(
250     VideoEncoderFactory* primary_factory,
251     VideoEncoderFactory* fallback_factory,
252     const SdpVideoFormat& format)
253     : inited_(0),
254       primary_encoder_factory_(primary_factory),
255       fallback_encoder_factory_(fallback_factory),
256       video_format_(format),
257       total_streams_count_(0),
258       bypass_mode_(false),
259       encoded_complete_callback_(nullptr),
260       experimental_boosted_screenshare_qp_(GetScreenshareBoostedQpValue()),
261       boost_base_layer_quality_(RateControlSettings::ParseFromFieldTrials()
262                                     .Vp8BoostBaseLayerQuality()),
263       prefer_temporal_support_on_base_layer_(field_trial::IsEnabled(
264           "WebRTC-Video-PreferTemporalSupportOnBaseLayer")) {
265   RTC_DCHECK(primary_factory);
266 
267   // The adapter is typically created on the worker thread, but operated on
268   // the encoder task queue.
269   encoder_queue_.Detach();
270 }
271 
~SimulcastEncoderAdapter()272 SimulcastEncoderAdapter::~SimulcastEncoderAdapter() {
273   RTC_DCHECK(!Initialized());
274   DestroyStoredEncoders();
275 }
276 
SetFecControllerOverride(FecControllerOverride *)277 void SimulcastEncoderAdapter::SetFecControllerOverride(
278     FecControllerOverride* /*fec_controller_override*/) {
279   // Ignored.
280 }
281 
Release()282 int SimulcastEncoderAdapter::Release() {
283   RTC_DCHECK_RUN_ON(&encoder_queue_);
284 
285   while (!stream_contexts_.empty()) {
286     // Move the encoder instances and put it on the |cached_encoder_contexts_|
287     // where it may possibly be reused from (ordering does not matter).
288     cached_encoder_contexts_.push_front(
289         std::move(stream_contexts_.back()).ReleaseEncoderContext());
290     stream_contexts_.pop_back();
291   }
292 
293   bypass_mode_ = false;
294 
295   // It's legal to move the encoder to another queue now.
296   encoder_queue_.Detach();
297 
298   rtc::AtomicOps::ReleaseStore(&inited_, 0);
299 
300   return WEBRTC_VIDEO_CODEC_OK;
301 }
302 
InitEncode(const VideoCodec * inst,const VideoEncoder::Settings & settings)303 int SimulcastEncoderAdapter::InitEncode(
304     const VideoCodec* inst,
305     const VideoEncoder::Settings& settings) {
306   RTC_DCHECK_RUN_ON(&encoder_queue_);
307 
308   if (settings.number_of_cores < 1) {
309     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
310   }
311 
312   int ret = VerifyCodec(inst);
313   if (ret < 0) {
314     return ret;
315   }
316 
317   Release();
318 
319   codec_ = *inst;
320   total_streams_count_ = CountAllStreams(*inst);
321 
322   // TODO(ronghuawu): Remove once this is handled in LibvpxVp8Encoder.
323   if (codec_.qpMax < kDefaultMinQp) {
324     codec_.qpMax = kDefaultMaxQp;
325   }
326 
327   bool is_legacy_singlecast = codec_.numberOfSimulcastStreams == 0;
328   int lowest_quality_stream_idx = 0;
329   int highest_quality_stream_idx = 0;
330   if (!is_legacy_singlecast) {
331     GetLowestAndHighestQualityStreamIndixes(
332         rtc::ArrayView<SpatialLayer>(codec_.simulcastStream,
333                                      total_streams_count_),
334         &lowest_quality_stream_idx, &highest_quality_stream_idx);
335   }
336 
337   std::unique_ptr<EncoderContext> encoder_context = FetchOrCreateEncoderContext(
338       /*is_lowest_quality_stream=*/(
339           is_legacy_singlecast ||
340           codec_.simulcastStream[lowest_quality_stream_idx].active));
341   if (encoder_context == nullptr) {
342     return WEBRTC_VIDEO_CODEC_MEMORY;
343   }
344 
345   // Two distinct scenarios:
346   // * Singlecast (total_streams_count == 1) or simulcast with simulcast-capable
347   //   underlaying encoder implementation. SEA operates in bypass mode: original
348   //   settings are passed to the underlaying encoder, frame encode complete
349   //   callback is not intercepted.
350   // * Multi-encoder simulcast or singlecast if layers are deactivated
351   //   (total_streams_count > 1 and active_streams_count >= 1). SEA creates
352   //   N=active_streams_count encoders and configures each to produce a single
353   //   stream.
354 
355   // Singlecast or simulcast with simulcast-capable underlaying encoder.
356   if (total_streams_count_ == 1 ||
357       encoder_context->encoder().GetEncoderInfo().supports_simulcast) {
358     int ret = encoder_context->encoder().InitEncode(&codec_, settings);
359     if (ret >= 0) {
360       int active_streams_count = CountActiveStreams(*inst);
361       stream_contexts_.emplace_back(
362           /*parent=*/nullptr, std::move(encoder_context),
363           /*framerate_controller=*/nullptr, /*stream_idx=*/0, codec_.width,
364           codec_.height, /*is_paused=*/active_streams_count == 0);
365       bypass_mode_ = true;
366 
367       DestroyStoredEncoders();
368       rtc::AtomicOps::ReleaseStore(&inited_, 1);
369       return WEBRTC_VIDEO_CODEC_OK;
370     }
371 
372     encoder_context->Release();
373     if (total_streams_count_ == 1) {
374       // Failed to initialize singlecast encoder.
375       return ret;
376     }
377   }
378 
379   // Multi-encoder simulcast or singlecast (deactivated layers).
380   std::vector<uint32_t> stream_start_bitrate_kbps =
381       GetStreamStartBitratesKbps(codec_);
382 
383   for (int stream_idx = 0; stream_idx < total_streams_count_; ++stream_idx) {
384     if (!is_legacy_singlecast && !codec_.simulcastStream[stream_idx].active) {
385       continue;
386     }
387 
388     if (encoder_context == nullptr) {
389       encoder_context = FetchOrCreateEncoderContext(
390           /*is_lowest_quality_stream=*/stream_idx == lowest_quality_stream_idx);
391     }
392     if (encoder_context == nullptr) {
393       Release();
394       return WEBRTC_VIDEO_CODEC_MEMORY;
395     }
396 
397     VideoCodec stream_codec = MakeStreamCodec(
398         codec_, stream_idx, stream_start_bitrate_kbps[stream_idx],
399         /*is_lowest_quality_stream=*/stream_idx == lowest_quality_stream_idx,
400         /*is_highest_quality_stream=*/stream_idx == highest_quality_stream_idx);
401 
402     int ret = encoder_context->encoder().InitEncode(&stream_codec, settings);
403     if (ret < 0) {
404       encoder_context.reset();
405       Release();
406       return ret;
407     }
408 
409     // Intercept frame encode complete callback only for upper streams, where
410     // we need to set a correct stream index. Set |parent| to nullptr for the
411     // lowest stream to bypass the callback.
412     SimulcastEncoderAdapter* parent = stream_idx > 0 ? this : nullptr;
413 
414     bool is_paused = stream_start_bitrate_kbps[stream_idx] == 0;
415     stream_contexts_.emplace_back(
416         parent, std::move(encoder_context),
417         std::make_unique<FramerateController>(stream_codec.maxFramerate),
418         stream_idx, stream_codec.width, stream_codec.height, is_paused);
419   }
420 
421   // To save memory, don't store encoders that we don't use.
422   DestroyStoredEncoders();
423 
424   rtc::AtomicOps::ReleaseStore(&inited_, 1);
425   return WEBRTC_VIDEO_CODEC_OK;
426 }
427 
Encode(const VideoFrame & input_image,const std::vector<VideoFrameType> * frame_types)428 int SimulcastEncoderAdapter::Encode(
429     const VideoFrame& input_image,
430     const std::vector<VideoFrameType>* frame_types) {
431   RTC_DCHECK_RUN_ON(&encoder_queue_);
432 
433   if (!Initialized()) {
434     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
435   }
436   if (encoded_complete_callback_ == nullptr) {
437     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
438   }
439 
440   if (encoder_info_override_.requested_resolution_alignment()) {
441     const int alignment =
442         *encoder_info_override_.requested_resolution_alignment();
443     if (input_image.width() % alignment != 0 ||
444         input_image.height() % alignment != 0) {
445       RTC_LOG(LS_WARNING) << "Frame " << input_image.width() << "x"
446                           << input_image.height() << " not divisible by "
447                           << alignment;
448       return WEBRTC_VIDEO_CODEC_ERROR;
449     }
450     if (encoder_info_override_.apply_alignment_to_all_simulcast_layers()) {
451       for (const auto& layer : stream_contexts_) {
452         if (layer.width() % alignment != 0 || layer.height() % alignment != 0) {
453           RTC_LOG(LS_WARNING)
454               << "Codec " << layer.width() << "x" << layer.height()
455               << " not divisible by " << alignment;
456           return WEBRTC_VIDEO_CODEC_ERROR;
457         }
458       }
459     }
460   }
461 
462   // All active streams should generate a key frame if
463   // a key frame is requested by any stream.
464   bool is_keyframe_needed = false;
465   if (frame_types) {
466     for (const auto& frame_type : *frame_types) {
467       if (frame_type == VideoFrameType::kVideoFrameKey) {
468         is_keyframe_needed = true;
469         break;
470       }
471     }
472   }
473 
474   if (!is_keyframe_needed) {
475     for (const auto& layer : stream_contexts_) {
476       if (layer.is_keyframe_needed()) {
477         is_keyframe_needed = true;
478         break;
479       }
480     }
481   }
482 
483   // Temporary thay may hold the result of texture to i420 buffer conversion.
484   rtc::scoped_refptr<VideoFrameBuffer> src_buffer;
485   int src_width = input_image.width();
486   int src_height = input_image.height();
487 
488   for (auto& layer : stream_contexts_) {
489     // Don't encode frames in resolutions that we don't intend to send.
490     if (layer.is_paused()) {
491       continue;
492     }
493 
494     // Convert timestamp from RTP 90kHz clock.
495     const Timestamp frame_timestamp =
496         Timestamp::Micros((1000 * input_image.timestamp()) / 90);
497 
498     // If adapter is passed through and only one sw encoder does simulcast,
499     // frame types for all streams should be passed to the encoder unchanged.
500     // Otherwise a single per-encoder frame type is passed.
501     std::vector<VideoFrameType> stream_frame_types(
502         bypass_mode_ ? total_streams_count_ : 1);
503     if (is_keyframe_needed) {
504       std::fill(stream_frame_types.begin(), stream_frame_types.end(),
505                 VideoFrameType::kVideoFrameKey);
506       layer.OnKeyframe(frame_timestamp);
507     } else {
508       if (layer.ShouldDropFrame(frame_timestamp)) {
509         continue;
510       }
511       std::fill(stream_frame_types.begin(), stream_frame_types.end(),
512                 VideoFrameType::kVideoFrameDelta);
513     }
514 
515     // If scaling isn't required, because the input resolution
516     // matches the destination or the input image is empty (e.g.
517     // a keyframe request for encoders with internal camera
518     // sources) or the source image has a native handle, pass the image on
519     // directly. Otherwise, we'll scale it to match what the encoder expects
520     // (below).
521     // For texture frames, the underlying encoder is expected to be able to
522     // correctly sample/scale the source texture.
523     // TODO(perkj): ensure that works going forward, and figure out how this
524     // affects webrtc:5683.
525     if ((layer.width() == src_width && layer.height() == src_height) ||
526         (input_image.video_frame_buffer()->type() ==
527              VideoFrameBuffer::Type::kNative &&
528          layer.encoder().GetEncoderInfo().supports_native_handle)) {
529       int ret = layer.encoder().Encode(input_image, &stream_frame_types);
530       if (ret != WEBRTC_VIDEO_CODEC_OK) {
531         return ret;
532       }
533     } else {
534       if (src_buffer == nullptr) {
535         src_buffer = input_image.video_frame_buffer();
536       }
537       rtc::scoped_refptr<VideoFrameBuffer> dst_buffer =
538           src_buffer->Scale(layer.width(), layer.height());
539       if (!dst_buffer) {
540         RTC_LOG(LS_ERROR) << "Failed to scale video frame";
541         return WEBRTC_VIDEO_CODEC_ENCODER_FAILURE;
542       }
543 
544       // UpdateRect is not propagated to lower simulcast layers currently.
545       // TODO(ilnik): Consider scaling UpdateRect together with the buffer.
546       VideoFrame frame(input_image);
547       frame.set_video_frame_buffer(dst_buffer);
548       frame.set_rotation(webrtc::kVideoRotation_0);
549       frame.set_update_rect(
550           VideoFrame::UpdateRect{0, 0, frame.width(), frame.height()});
551       int ret = layer.encoder().Encode(frame, &stream_frame_types);
552       if (ret != WEBRTC_VIDEO_CODEC_OK) {
553         return ret;
554       }
555     }
556   }
557 
558   return WEBRTC_VIDEO_CODEC_OK;
559 }
560 
RegisterEncodeCompleteCallback(EncodedImageCallback * callback)561 int SimulcastEncoderAdapter::RegisterEncodeCompleteCallback(
562     EncodedImageCallback* callback) {
563   RTC_DCHECK_RUN_ON(&encoder_queue_);
564   encoded_complete_callback_ = callback;
565   if (!stream_contexts_.empty() && stream_contexts_.front().stream_idx() == 0) {
566     // Bypass frame encode complete callback for the lowest layer since there is
567     // no need to override frame's spatial index.
568     stream_contexts_.front().encoder().RegisterEncodeCompleteCallback(callback);
569   }
570   return WEBRTC_VIDEO_CODEC_OK;
571 }
572 
SetRates(const RateControlParameters & parameters)573 void SimulcastEncoderAdapter::SetRates(
574     const RateControlParameters& parameters) {
575   RTC_DCHECK_RUN_ON(&encoder_queue_);
576 
577   if (!Initialized()) {
578     RTC_LOG(LS_WARNING) << "SetRates while not initialized";
579     return;
580   }
581 
582   if (parameters.framerate_fps < 1.0) {
583     RTC_LOG(LS_WARNING) << "Invalid framerate: " << parameters.framerate_fps;
584     return;
585   }
586 
587   codec_.maxFramerate = static_cast<uint32_t>(parameters.framerate_fps + 0.5);
588 
589   if (bypass_mode_) {
590     stream_contexts_.front().encoder().SetRates(parameters);
591     return;
592   }
593 
594   for (StreamContext& layer_context : stream_contexts_) {
595     int stream_idx = layer_context.stream_idx();
596     uint32_t stream_bitrate_kbps =
597         parameters.bitrate.GetSpatialLayerSum(stream_idx) / 1000;
598 
599     // Need a key frame if we have not sent this stream before.
600     if (stream_bitrate_kbps > 0 && layer_context.is_paused()) {
601       layer_context.set_is_keyframe_needed();
602     }
603     layer_context.set_is_paused(stream_bitrate_kbps == 0);
604 
605     // Slice the temporal layers out of the full allocation and pass it on to
606     // the encoder handling the current simulcast stream.
607     RateControlParameters stream_parameters = parameters;
608     stream_parameters.bitrate = VideoBitrateAllocation();
609     for (int i = 0; i < kMaxTemporalStreams; ++i) {
610       if (parameters.bitrate.HasBitrate(stream_idx, i)) {
611         stream_parameters.bitrate.SetBitrate(
612             0, i, parameters.bitrate.GetBitrate(stream_idx, i));
613       }
614     }
615 
616     // Assign link allocation proportionally to spatial layer allocation.
617     if (!parameters.bandwidth_allocation.IsZero() &&
618         parameters.bitrate.get_sum_bps() > 0) {
619       stream_parameters.bandwidth_allocation =
620           DataRate::BitsPerSec((parameters.bandwidth_allocation.bps() *
621                                 stream_parameters.bitrate.get_sum_bps()) /
622                                parameters.bitrate.get_sum_bps());
623       // Make sure we don't allocate bandwidth lower than target bitrate.
624       if (stream_parameters.bandwidth_allocation.bps() <
625           stream_parameters.bitrate.get_sum_bps()) {
626         stream_parameters.bandwidth_allocation =
627             DataRate::BitsPerSec(stream_parameters.bitrate.get_sum_bps());
628       }
629     }
630 
631     stream_parameters.framerate_fps = std::min<double>(
632         parameters.framerate_fps,
633         layer_context.target_fps().value_or(parameters.framerate_fps));
634 
635     layer_context.encoder().SetRates(stream_parameters);
636   }
637 }
638 
OnPacketLossRateUpdate(float packet_loss_rate)639 void SimulcastEncoderAdapter::OnPacketLossRateUpdate(float packet_loss_rate) {
640   for (auto& c : stream_contexts_) {
641     c.encoder().OnPacketLossRateUpdate(packet_loss_rate);
642   }
643 }
644 
OnRttUpdate(int64_t rtt_ms)645 void SimulcastEncoderAdapter::OnRttUpdate(int64_t rtt_ms) {
646   for (auto& c : stream_contexts_) {
647     c.encoder().OnRttUpdate(rtt_ms);
648   }
649 }
650 
OnLossNotification(const LossNotification & loss_notification)651 void SimulcastEncoderAdapter::OnLossNotification(
652     const LossNotification& loss_notification) {
653   for (auto& c : stream_contexts_) {
654     c.encoder().OnLossNotification(loss_notification);
655   }
656 }
657 
658 // TODO(brandtr): Add task checker to this member function, when all encoder
659 // callbacks are coming in on the encoder queue.
OnEncodedImage(size_t stream_idx,const EncodedImage & encodedImage,const CodecSpecificInfo * codecSpecificInfo)660 EncodedImageCallback::Result SimulcastEncoderAdapter::OnEncodedImage(
661     size_t stream_idx,
662     const EncodedImage& encodedImage,
663     const CodecSpecificInfo* codecSpecificInfo) {
664   EncodedImage stream_image(encodedImage);
665   CodecSpecificInfo stream_codec_specific = *codecSpecificInfo;
666 
667   stream_image.SetSpatialIndex(stream_idx);
668 
669   return encoded_complete_callback_->OnEncodedImage(stream_image,
670                                                     &stream_codec_specific);
671 }
672 
OnDroppedFrame(size_t stream_idx)673 void SimulcastEncoderAdapter::OnDroppedFrame(size_t stream_idx) {
674   // Not yet implemented.
675 }
676 
Initialized() const677 bool SimulcastEncoderAdapter::Initialized() const {
678   return rtc::AtomicOps::AcquireLoad(&inited_) == 1;
679 }
680 
DestroyStoredEncoders()681 void SimulcastEncoderAdapter::DestroyStoredEncoders() {
682   while (!cached_encoder_contexts_.empty()) {
683     cached_encoder_contexts_.pop_back();
684   }
685 }
686 
687 std::unique_ptr<SimulcastEncoderAdapter::EncoderContext>
FetchOrCreateEncoderContext(bool is_lowest_quality_stream)688 SimulcastEncoderAdapter::FetchOrCreateEncoderContext(
689     bool is_lowest_quality_stream) {
690   bool prefer_temporal_support = fallback_encoder_factory_ != nullptr &&
691                                  is_lowest_quality_stream &&
692                                  prefer_temporal_support_on_base_layer_;
693 
694   // Toggling of |prefer_temporal_support| requires encoder recreation. Find
695   // and reuse encoder with desired |prefer_temporal_support|. Otherwise, if
696   // there is no such encoder in the cache, create a new instance.
697   auto encoder_context_iter =
698       std::find_if(cached_encoder_contexts_.begin(),
699                    cached_encoder_contexts_.end(), [&](auto& encoder_context) {
700                      return encoder_context->prefer_temporal_support() ==
701                             prefer_temporal_support;
702                    });
703 
704   std::unique_ptr<SimulcastEncoderAdapter::EncoderContext> encoder_context;
705   if (encoder_context_iter != cached_encoder_contexts_.end()) {
706     encoder_context = std::move(*encoder_context_iter);
707     cached_encoder_contexts_.erase(encoder_context_iter);
708   } else {
709     std::unique_ptr<VideoEncoder> encoder =
710         primary_encoder_factory_->CreateVideoEncoder(video_format_);
711     if (fallback_encoder_factory_ != nullptr) {
712       encoder = CreateVideoEncoderSoftwareFallbackWrapper(
713           fallback_encoder_factory_->CreateVideoEncoder(video_format_),
714           std::move(encoder), prefer_temporal_support);
715     }
716 
717     encoder_context = std::make_unique<SimulcastEncoderAdapter::EncoderContext>(
718         std::move(encoder), prefer_temporal_support);
719   }
720 
721   encoder_context->encoder().RegisterEncodeCompleteCallback(
722       encoded_complete_callback_);
723   return encoder_context;
724 }
725 
MakeStreamCodec(const webrtc::VideoCodec & codec,int stream_idx,uint32_t start_bitrate_kbps,bool is_lowest_quality_stream,bool is_highest_quality_stream)726 webrtc::VideoCodec SimulcastEncoderAdapter::MakeStreamCodec(
727     const webrtc::VideoCodec& codec,
728     int stream_idx,
729     uint32_t start_bitrate_kbps,
730     bool is_lowest_quality_stream,
731     bool is_highest_quality_stream) {
732   webrtc::VideoCodec codec_params = codec;
733   const SpatialLayer& stream_params = codec.simulcastStream[stream_idx];
734 
735   codec_params.numberOfSimulcastStreams = 0;
736   codec_params.width = stream_params.width;
737   codec_params.height = stream_params.height;
738   codec_params.maxBitrate = stream_params.maxBitrate;
739   codec_params.minBitrate = stream_params.minBitrate;
740   codec_params.maxFramerate = stream_params.maxFramerate;
741   codec_params.qpMax = stream_params.qpMax;
742   codec_params.active = stream_params.active;
743   // Settings that are based on stream/resolution.
744   if (is_lowest_quality_stream) {
745     // Settings for lowest spatial resolutions.
746     if (codec.mode == VideoCodecMode::kScreensharing) {
747       if (experimental_boosted_screenshare_qp_) {
748         codec_params.qpMax = *experimental_boosted_screenshare_qp_;
749       }
750     } else if (boost_base_layer_quality_) {
751       codec_params.qpMax = kLowestResMaxQp;
752     }
753   }
754   if (codec.codecType == webrtc::kVideoCodecVP8) {
755     codec_params.VP8()->numberOfTemporalLayers =
756         stream_params.numberOfTemporalLayers;
757     if (!is_highest_quality_stream) {
758       // For resolutions below CIF, set the codec |complexity| parameter to
759       // kComplexityHigher, which maps to cpu_used = -4.
760       int pixels_per_frame = codec_params.width * codec_params.height;
761       if (pixels_per_frame < 352 * 288) {
762         codec_params.VP8()->complexity =
763             webrtc::VideoCodecComplexity::kComplexityHigher;
764       }
765       // Turn off denoising for all streams but the highest resolution.
766       codec_params.VP8()->denoisingOn = false;
767     }
768   } else if (codec.codecType == webrtc::kVideoCodecH264) {
769     codec_params.H264()->numberOfTemporalLayers =
770         stream_params.numberOfTemporalLayers;
771   }
772 
773   // Cap start bitrate to the min bitrate in order to avoid strange codec
774   // behavior.
775   codec_params.startBitrate =
776       std::max(stream_params.minBitrate, start_bitrate_kbps);
777 
778   // Legacy screenshare mode is only enabled for the first simulcast layer
779   codec_params.legacy_conference_mode =
780       codec.legacy_conference_mode && stream_idx == 0;
781 
782   return codec_params;
783 }
784 
OverrideFromFieldTrial(VideoEncoder::EncoderInfo * info) const785 void SimulcastEncoderAdapter::OverrideFromFieldTrial(
786     VideoEncoder::EncoderInfo* info) const {
787   if (encoder_info_override_.requested_resolution_alignment()) {
788     info->requested_resolution_alignment =
789         *encoder_info_override_.requested_resolution_alignment();
790     info->apply_alignment_to_all_simulcast_layers =
791         encoder_info_override_.apply_alignment_to_all_simulcast_layers();
792   }
793   if (!encoder_info_override_.resolution_bitrate_limits().empty()) {
794     info->resolution_bitrate_limits =
795         encoder_info_override_.resolution_bitrate_limits();
796   }
797 }
798 
GetEncoderInfo() const799 VideoEncoder::EncoderInfo SimulcastEncoderAdapter::GetEncoderInfo() const {
800   if (stream_contexts_.size() == 1) {
801     // Not using simulcast adapting functionality, just pass through.
802     VideoEncoder::EncoderInfo info =
803         stream_contexts_.front().encoder().GetEncoderInfo();
804     OverrideFromFieldTrial(&info);
805     return info;
806   }
807 
808   VideoEncoder::EncoderInfo encoder_info;
809   encoder_info.implementation_name = "SimulcastEncoderAdapter";
810   encoder_info.requested_resolution_alignment = 1;
811   encoder_info.apply_alignment_to_all_simulcast_layers = false;
812   encoder_info.supports_native_handle = true;
813   encoder_info.scaling_settings.thresholds = absl::nullopt;
814   if (stream_contexts_.empty()) {
815     OverrideFromFieldTrial(&encoder_info);
816     return encoder_info;
817   }
818 
819   encoder_info.scaling_settings = VideoEncoder::ScalingSettings::kOff;
820 
821   for (size_t i = 0; i < stream_contexts_.size(); ++i) {
822     VideoEncoder::EncoderInfo encoder_impl_info =
823         stream_contexts_[i].encoder().GetEncoderInfo();
824 
825     if (i == 0) {
826       // Encoder name indicates names of all sub-encoders.
827       encoder_info.implementation_name += " (";
828       encoder_info.implementation_name += encoder_impl_info.implementation_name;
829 
830       encoder_info.supports_native_handle =
831           encoder_impl_info.supports_native_handle;
832       encoder_info.has_trusted_rate_controller =
833           encoder_impl_info.has_trusted_rate_controller;
834       encoder_info.is_hardware_accelerated =
835           encoder_impl_info.is_hardware_accelerated;
836       encoder_info.has_internal_source = encoder_impl_info.has_internal_source;
837     } else {
838       encoder_info.implementation_name += ", ";
839       encoder_info.implementation_name += encoder_impl_info.implementation_name;
840 
841       // Native handle supported if any encoder supports it.
842       encoder_info.supports_native_handle |=
843           encoder_impl_info.supports_native_handle;
844 
845       // Trusted rate controller only if all encoders have it.
846       encoder_info.has_trusted_rate_controller &=
847           encoder_impl_info.has_trusted_rate_controller;
848 
849       // Uses hardware support if any of the encoders uses it.
850       // For example, if we are having issues with down-scaling due to
851       // pipelining delay in HW encoders we need higher encoder usage
852       // thresholds in CPU adaptation.
853       encoder_info.is_hardware_accelerated |=
854           encoder_impl_info.is_hardware_accelerated;
855 
856       // Has internal source only if all encoders have it.
857       encoder_info.has_internal_source &= encoder_impl_info.has_internal_source;
858     }
859     encoder_info.fps_allocation[i] = encoder_impl_info.fps_allocation[0];
860     encoder_info.requested_resolution_alignment = cricket::LeastCommonMultiple(
861         encoder_info.requested_resolution_alignment,
862         encoder_impl_info.requested_resolution_alignment);
863     if (encoder_impl_info.apply_alignment_to_all_simulcast_layers) {
864       encoder_info.apply_alignment_to_all_simulcast_layers = true;
865     }
866   }
867   encoder_info.implementation_name += ")";
868 
869   OverrideFromFieldTrial(&encoder_info);
870 
871   return encoder_info;
872 }
873 
874 }  // namespace webrtc
875