1 /*
2  *  Copyright (c) 2014 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 #include "media/engine/simulcast_encoder_adapter.h"
12 
13 #include <algorithm>
14 
15 // NOTE(ajm): Path provided by gyp.
16 #include "libyuv/scale.h"  // NOLINT
17 
18 #include "api/video/i420_buffer.h"
19 #include "api/video_codecs/video_encoder_factory.h"
20 #include "media/engine/scopedvideoencoder.h"
21 #include "modules/video_coding/codecs/vp8/screenshare_layers.h"
22 #include "modules/video_coding/codecs/vp8/simulcast_rate_allocator.h"
23 #include "rtc_base/checks.h"
24 #include "system_wrappers/include/clock.h"
25 
26 namespace {
27 
28 const unsigned int kDefaultMinQp = 2;
29 const unsigned int kDefaultMaxQp = 56;
30 // Max qp for lowest spatial resolution when doing simulcast.
31 const unsigned int kLowestResMaxQp = 45;
32 
SumStreamMaxBitrate(int streams,const webrtc::VideoCodec & codec)33 uint32_t SumStreamMaxBitrate(int streams, const webrtc::VideoCodec& codec) {
34   uint32_t bitrate_sum = 0;
35   for (int i = 0; i < streams; ++i) {
36     bitrate_sum += codec.simulcastStream[i].maxBitrate;
37   }
38   return bitrate_sum;
39 }
40 
NumberOfStreams(const webrtc::VideoCodec & codec)41 int NumberOfStreams(const webrtc::VideoCodec& codec) {
42   int streams =
43       codec.numberOfSimulcastStreams < 1 ? 1 : codec.numberOfSimulcastStreams;
44   uint32_t simulcast_max_bitrate = SumStreamMaxBitrate(streams, codec);
45   if (simulcast_max_bitrate == 0) {
46     streams = 1;
47   }
48   return streams;
49 }
50 
VerifyCodec(const webrtc::VideoCodec * inst)51 int VerifyCodec(const webrtc::VideoCodec* inst) {
52   if (inst == nullptr) {
53     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
54   }
55   if (inst->maxFramerate < 1) {
56     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
57   }
58   // allow zero to represent an unspecified maxBitRate
59   if (inst->maxBitrate > 0 && inst->startBitrate > inst->maxBitrate) {
60     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
61   }
62   if (inst->width <= 1 || inst->height <= 1) {
63     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
64   }
65   if (inst->VP8().automaticResizeOn && inst->numberOfSimulcastStreams > 1) {
66     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
67   }
68   return WEBRTC_VIDEO_CODEC_OK;
69 }
70 
StreamResolutionCompare(const webrtc::SimulcastStream & a,const webrtc::SimulcastStream & b)71 bool StreamResolutionCompare(const webrtc::SimulcastStream& a,
72                              const webrtc::SimulcastStream& b) {
73   return std::tie(a.height, a.width, a.maxBitrate) <
74          std::tie(b.height, b.width, b.maxBitrate);
75 }
76 
77 // An EncodedImageCallback implementation that forwards on calls to a
78 // SimulcastEncoderAdapter, but with the stream index it's registered with as
79 // the first parameter to Encoded.
80 class AdapterEncodedImageCallback : public webrtc::EncodedImageCallback {
81  public:
AdapterEncodedImageCallback(webrtc::SimulcastEncoderAdapter * adapter,size_t stream_idx)82   AdapterEncodedImageCallback(webrtc::SimulcastEncoderAdapter* adapter,
83                               size_t stream_idx)
84       : adapter_(adapter), stream_idx_(stream_idx) {}
85 
OnEncodedImage(const webrtc::EncodedImage & encoded_image,const webrtc::CodecSpecificInfo * codec_specific_info,const webrtc::RTPFragmentationHeader * fragmentation)86   EncodedImageCallback::Result OnEncodedImage(
87       const webrtc::EncodedImage& encoded_image,
88       const webrtc::CodecSpecificInfo* codec_specific_info,
89       const webrtc::RTPFragmentationHeader* fragmentation) override {
90     return adapter_->OnEncodedImage(stream_idx_, encoded_image,
91                                     codec_specific_info, fragmentation);
92   }
93 
94  private:
95   webrtc::SimulcastEncoderAdapter* const adapter_;
96   const size_t stream_idx_;
97 };
98 
99 // Utility class used to adapt the simulcast id as reported by the temporal
100 // layers factory, since each sub-encoder will report stream 0.
101 class TemporalLayersFactoryAdapter : public webrtc::TemporalLayersFactory {
102  public:
TemporalLayersFactoryAdapter(int adapted_simulcast_id,const TemporalLayersFactory & tl_factory)103   TemporalLayersFactoryAdapter(int adapted_simulcast_id,
104                                const TemporalLayersFactory& tl_factory)
105       : adapted_simulcast_id_(adapted_simulcast_id), tl_factory_(tl_factory) {}
~TemporalLayersFactoryAdapter()106   ~TemporalLayersFactoryAdapter() override {}
Create(int simulcast_id,int temporal_layers,uint8_t initial_tl0_pic_idx) const107   webrtc::TemporalLayers* Create(int simulcast_id, int temporal_layers,
108                                  uint8_t initial_tl0_pic_idx) const override {
109     return tl_factory_.Create(adapted_simulcast_id_, temporal_layers,
110                               initial_tl0_pic_idx);
111   }
CreateChecker(int simulcast_id,int temporal_layers,uint8_t initial_tl0_pic_idx) const112   std::unique_ptr<webrtc::TemporalLayersChecker> CreateChecker(
113       int simulcast_id, int temporal_layers,
114       uint8_t initial_tl0_pic_idx) const override {
115     return tl_factory_.CreateChecker(adapted_simulcast_id_, temporal_layers,
116                                      initial_tl0_pic_idx);
117   }
118 
119   const int adapted_simulcast_id_;
120   const TemporalLayersFactory& tl_factory_;
121 };
122 
123 }  // namespace
124 
125 namespace webrtc {
126 
SimulcastEncoderAdapter(VideoEncoderFactory * factory)127 SimulcastEncoderAdapter::SimulcastEncoderAdapter(VideoEncoderFactory* factory)
128     : inited_(0),
129       factory_(factory),
130       cricket_factory_(nullptr),
131       encoded_complete_callback_(nullptr),
132       implementation_name_("SimulcastEncoderAdapter") {
133   // The adapter is typically created on the worker thread, but operated on
134   // the encoder task queue.
135   encoder_queue_.Detach();
136 
137   memset(&codec_, 0, sizeof(webrtc::VideoCodec));
138 }
139 
SimulcastEncoderAdapter(cricket::WebRtcVideoEncoderFactory * factory)140 SimulcastEncoderAdapter::SimulcastEncoderAdapter(
141     cricket::WebRtcVideoEncoderFactory* factory)
142     : inited_(0),
143       factory_(nullptr),
144       cricket_factory_(factory),
145       encoded_complete_callback_(nullptr),
146       implementation_name_("SimulcastEncoderAdapter") {
147   // The adapter is typically created on the worker thread, but operated on
148   // the encoder task queue.
149   encoder_queue_.Detach();
150 
151   memset(&codec_, 0, sizeof(webrtc::VideoCodec));
152 }
153 
~SimulcastEncoderAdapter()154 SimulcastEncoderAdapter::~SimulcastEncoderAdapter() {
155   RTC_DCHECK(!Initialized());
156   DestroyStoredEncoders();
157 }
158 
Release()159 int SimulcastEncoderAdapter::Release() {
160   RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_);
161 
162   while (!streaminfos_.empty()) {
163     std::unique_ptr<VideoEncoder> encoder =
164         std::move(streaminfos_.back().encoder);
165     // Even though it seems very unlikely, there are no guarantees that the
166     // encoder will not call back after being Release()'d. Therefore, we first
167     // disable the callbacks here.
168     encoder->RegisterEncodeCompleteCallback(nullptr);
169     encoder->Release();
170     streaminfos_.pop_back();  // Deletes callback adapter.
171     stored_encoders_.push(std::move(encoder));
172   }
173 
174   // It's legal to move the encoder to another queue now.
175   encoder_queue_.Detach();
176 
177   rtc::AtomicOps::ReleaseStore(&inited_, 0);
178 
179   return WEBRTC_VIDEO_CODEC_OK;
180 }
181 
InitEncode(const VideoCodec * inst,int number_of_cores,size_t max_payload_size)182 int SimulcastEncoderAdapter::InitEncode(const VideoCodec* inst,
183                                         int number_of_cores,
184                                         size_t max_payload_size) {
185   RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_);
186 
187   if (number_of_cores < 1) {
188     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
189   }
190 
191   int ret = VerifyCodec(inst);
192   if (ret < 0) {
193     return ret;
194   }
195 
196   ret = Release();
197   if (ret < 0) {
198     return ret;
199   }
200 
201   int number_of_streams = NumberOfStreams(*inst);
202   RTC_DCHECK_LE(number_of_streams, kMaxSimulcastStreams);
203   const bool doing_simulcast = (number_of_streams > 1);
204 
205   codec_ = *inst;
206   SimulcastRateAllocator rate_allocator(codec_, nullptr);
207   BitrateAllocation allocation = rate_allocator.GetAllocation(
208       codec_.startBitrate * 1000, codec_.maxFramerate);
209   std::vector<uint32_t> start_bitrates;
210   for (int i = 0; i < kMaxSimulcastStreams; ++i) {
211     uint32_t stream_bitrate = allocation.GetSpatialLayerSum(i) / 1000;
212     start_bitrates.push_back(stream_bitrate);
213   }
214 
215   std::string implementation_name;
216   // Create |number_of_streams| of encoder instances and init them.
217 
218   const auto minmax = std::minmax_element(
219       std::begin(codec_.simulcastStream),
220       std::begin(codec_.simulcastStream) + number_of_streams,
221       StreamResolutionCompare);
222   const auto lowest_resolution_stream_index =
223       std::distance(std::begin(codec_.simulcastStream), minmax.first);
224   const auto highest_resolution_stream_index =
225       std::distance(std::begin(codec_.simulcastStream), minmax.second);
226 
227   RTC_DCHECK_LT(lowest_resolution_stream_index, number_of_streams);
228   RTC_DCHECK_LT(highest_resolution_stream_index, number_of_streams);
229 
230   for (int i = 0; i < number_of_streams; ++i) {
231     VideoCodec stream_codec;
232     uint32_t start_bitrate_kbps = start_bitrates[i];
233     if (!doing_simulcast) {
234       stream_codec = codec_;
235       stream_codec.numberOfSimulcastStreams = 1;
236     } else {
237       // Cap start bitrate to the min bitrate in order to avoid strange codec
238       // behavior. Since sending sending will be false, this should not matter.
239       StreamResolution stream_resolution =
240           i == highest_resolution_stream_index
241               ? StreamResolution::HIGHEST
242               : i == lowest_resolution_stream_index ? StreamResolution::LOWEST
243                                                     : StreamResolution::OTHER;
244 
245       start_bitrate_kbps =
246           std::max(codec_.simulcastStream[i].minBitrate, start_bitrate_kbps);
247       PopulateStreamCodec(codec_, i, start_bitrate_kbps, stream_resolution,
248                           &stream_codec);
249     }
250     TemporalLayersFactoryAdapter tl_factory_adapter(i,
251                                                     *codec_.VP8()->tl_factory);
252     stream_codec.VP8()->tl_factory = &tl_factory_adapter;
253 
254     // TODO(ronghuawu): Remove once this is handled in VP8EncoderImpl.
255     if (stream_codec.qpMax < kDefaultMinQp) {
256       stream_codec.qpMax = kDefaultMaxQp;
257     }
258 
259     // If an existing encoder instance exists, reuse it.
260     // TODO(brandtr): Set initial RTP state (e.g., picture_id/tl0_pic_idx) here,
261     // when we start storing that state outside the encoder wrappers.
262     std::unique_ptr<VideoEncoder> encoder;
263     if (!stored_encoders_.empty()) {
264       encoder = std::move(stored_encoders_.top());
265       stored_encoders_.pop();
266     } else {
267       encoder = factory_ ? factory_->CreateVideoEncoder(SdpVideoFormat("VP8"))
268                          : CreateScopedVideoEncoder(cricket_factory_,
269                                                     cricket::VideoCodec("VP8"));
270     }
271 
272     ret = encoder->InitEncode(&stream_codec, number_of_cores, max_payload_size);
273     if (ret < 0) {
274       // Explicitly destroy the current encoder; because we haven't registered a
275       // StreamInfo for it yet, Release won't do anything about it.
276       encoder.reset();
277       Release();
278       return ret;
279     }
280     std::unique_ptr<EncodedImageCallback> callback(
281         new AdapterEncodedImageCallback(this, i));
282     encoder->RegisterEncodeCompleteCallback(callback.get());
283     streaminfos_.emplace_back(std::move(encoder), std::move(callback),
284                               stream_codec.width, stream_codec.height,
285                               start_bitrate_kbps > 0);
286 
287     if (i != 0) {
288       implementation_name += ", ";
289     }
290     implementation_name += streaminfos_[i].encoder->ImplementationName();
291   }
292 
293   if (doing_simulcast) {
294     implementation_name_ =
295         "SimulcastEncoderAdapter (" + implementation_name + ")";
296   } else {
297     implementation_name_ = implementation_name;
298   }
299 
300   // To save memory, don't store encoders that we don't use.
301   DestroyStoredEncoders();
302 
303   rtc::AtomicOps::ReleaseStore(&inited_, 1);
304 
305   return WEBRTC_VIDEO_CODEC_OK;
306 }
307 
Encode(const VideoFrame & input_image,const CodecSpecificInfo * codec_specific_info,const std::vector<FrameType> * frame_types)308 int SimulcastEncoderAdapter::Encode(
309     const VideoFrame& input_image, const CodecSpecificInfo* codec_specific_info,
310     const std::vector<FrameType>* frame_types) {
311   RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_);
312 
313   if (!Initialized()) {
314     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
315   }
316   if (encoded_complete_callback_ == nullptr) {
317     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
318   }
319 
320   // All active streams should generate a key frame if
321   // a key frame is requested by any stream.
322   bool send_key_frame = false;
323   if (frame_types) {
324     for (size_t i = 0; i < frame_types->size(); ++i) {
325       if (frame_types->at(i) == kVideoFrameKey) {
326         send_key_frame = true;
327         break;
328       }
329     }
330   }
331   for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) {
332     if (streaminfos_[stream_idx].key_frame_request &&
333         streaminfos_[stream_idx].send_stream) {
334       send_key_frame = true;
335       break;
336     }
337   }
338 
339   int src_width = input_image.width();
340   int src_height = input_image.height();
341   for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) {
342     // Don't encode frames in resolutions that we don't intend to send.
343     if (!streaminfos_[stream_idx].send_stream) {
344       continue;
345     }
346 
347     std::vector<FrameType> stream_frame_types;
348     if (send_key_frame) {
349       stream_frame_types.push_back(kVideoFrameKey);
350       streaminfos_[stream_idx].key_frame_request = false;
351     } else {
352       stream_frame_types.push_back(kVideoFrameDelta);
353     }
354 
355     int dst_width = streaminfos_[stream_idx].width;
356     int dst_height = streaminfos_[stream_idx].height;
357     // If scaling isn't required, because the input resolution
358     // matches the destination or the input image is empty (e.g.
359     // a keyframe request for encoders with internal camera
360     // sources) or the source image has a native handle, pass the image on
361     // directly. Otherwise, we'll scale it to match what the encoder expects
362     // (below).
363     // For texture frames, the underlying encoder is expected to be able to
364     // correctly sample/scale the source texture.
365     // TODO(perkj): ensure that works going forward, and figure out how this
366     // affects webrtc:5683.
367     if ((dst_width == src_width && dst_height == src_height) ||
368         input_image.video_frame_buffer()->type() ==
369             VideoFrameBuffer::Type::kNative) {
370       int ret = streaminfos_[stream_idx].encoder->Encode(
371           input_image, codec_specific_info, &stream_frame_types);
372       if (ret != WEBRTC_VIDEO_CODEC_OK) {
373         return ret;
374       }
375     } else {
376       rtc::scoped_refptr<I420Buffer> dst_buffer =
377           I420Buffer::Create(dst_width, dst_height);
378       rtc::scoped_refptr<I420BufferInterface> src_buffer =
379           input_image.video_frame_buffer()->ToI420();
380       libyuv::I420Scale(src_buffer->DataY(), src_buffer->StrideY(),
381                         src_buffer->DataU(), src_buffer->StrideU(),
382                         src_buffer->DataV(), src_buffer->StrideV(), src_width,
383                         src_height, dst_buffer->MutableDataY(),
384                         dst_buffer->StrideY(), dst_buffer->MutableDataU(),
385                         dst_buffer->StrideU(), dst_buffer->MutableDataV(),
386                         dst_buffer->StrideV(), dst_width, dst_height,
387                         libyuv::kFilterBilinear);
388 
389       int ret = streaminfos_[stream_idx].encoder->Encode(
390           VideoFrame(dst_buffer, input_image.timestamp(),
391                      input_image.render_time_ms(), webrtc::kVideoRotation_0),
392           codec_specific_info, &stream_frame_types);
393       if (ret != WEBRTC_VIDEO_CODEC_OK) {
394         return ret;
395       }
396     }
397   }
398 
399   return WEBRTC_VIDEO_CODEC_OK;
400 }
401 
RegisterEncodeCompleteCallback(EncodedImageCallback * callback)402 int SimulcastEncoderAdapter::RegisterEncodeCompleteCallback(
403     EncodedImageCallback* callback) {
404   RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_);
405   encoded_complete_callback_ = callback;
406   return WEBRTC_VIDEO_CODEC_OK;
407 }
408 
SetChannelParameters(uint32_t packet_loss,int64_t rtt)409 int SimulcastEncoderAdapter::SetChannelParameters(uint32_t packet_loss,
410                                                   int64_t rtt) {
411   RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_);
412   for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) {
413     streaminfos_[stream_idx].encoder->SetChannelParameters(packet_loss, rtt);
414   }
415   return WEBRTC_VIDEO_CODEC_OK;
416 }
417 
SetRateAllocation(const BitrateAllocation & bitrate,uint32_t new_framerate)418 int SimulcastEncoderAdapter::SetRateAllocation(const BitrateAllocation& bitrate,
419                                                uint32_t new_framerate) {
420   RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_);
421 
422   if (!Initialized()) {
423     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
424   }
425 
426   if (new_framerate < 1) {
427     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
428   }
429 
430   if (codec_.maxBitrate > 0 && bitrate.get_sum_kbps() > codec_.maxBitrate) {
431     return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
432   }
433 
434   if (bitrate.get_sum_bps() > 0) {
435     // Make sure the bitrate fits the configured min bitrates. 0 is a special
436     // value that means paused, though, so leave it alone.
437     if (bitrate.get_sum_kbps() < codec_.minBitrate) {
438       return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
439     }
440 
441     if (codec_.numberOfSimulcastStreams > 0 &&
442         bitrate.get_sum_kbps() < codec_.simulcastStream[0].minBitrate) {
443       return WEBRTC_VIDEO_CODEC_ERR_PARAMETER;
444     }
445   }
446 
447   codec_.maxFramerate = new_framerate;
448 
449   for (size_t stream_idx = 0; stream_idx < streaminfos_.size(); ++stream_idx) {
450     uint32_t stream_bitrate_kbps =
451         bitrate.GetSpatialLayerSum(stream_idx) / 1000;
452 
453     // Need a key frame if we have not sent this stream before.
454     if (stream_bitrate_kbps > 0 && !streaminfos_[stream_idx].send_stream) {
455       streaminfos_[stream_idx].key_frame_request = true;
456     }
457     streaminfos_[stream_idx].send_stream = stream_bitrate_kbps > 0;
458 
459     // Slice the temporal layers out of the full allocation and pass it on to
460     // the encoder handling the current simulcast stream.
461     BitrateAllocation stream_allocation;
462     for (int i = 0; i < kMaxTemporalStreams; ++i) {
463       if (bitrate.HasBitrate(stream_idx, i)) {
464         stream_allocation.SetBitrate(0, i, bitrate.GetBitrate(stream_idx, i));
465       }
466     }
467     streaminfos_[stream_idx].encoder->SetRateAllocation(stream_allocation,
468                                                         new_framerate);
469   }
470 
471   return WEBRTC_VIDEO_CODEC_OK;
472 }
473 
474 // TODO(brandtr): Add task checker to this member function, when all encoder
475 // callbacks are coming in on the encoder queue.
OnEncodedImage(size_t stream_idx,const EncodedImage & encodedImage,const CodecSpecificInfo * codecSpecificInfo,const RTPFragmentationHeader * fragmentation)476 EncodedImageCallback::Result SimulcastEncoderAdapter::OnEncodedImage(
477     size_t stream_idx, const EncodedImage& encodedImage,
478     const CodecSpecificInfo* codecSpecificInfo,
479     const RTPFragmentationHeader* fragmentation) {
480   CodecSpecificInfo stream_codec_specific = *codecSpecificInfo;
481   stream_codec_specific.codec_name = implementation_name_.c_str();
482   CodecSpecificInfoVP8* vp8Info = &(stream_codec_specific.codecSpecific.VP8);
483   vp8Info->simulcastIdx = stream_idx;
484 
485   return encoded_complete_callback_->OnEncodedImage(
486       encodedImage, &stream_codec_specific, fragmentation);
487 }
488 
PopulateStreamCodec(const webrtc::VideoCodec & inst,int stream_index,uint32_t start_bitrate_kbps,StreamResolution stream_resolution,webrtc::VideoCodec * stream_codec)489 void SimulcastEncoderAdapter::PopulateStreamCodec(
490     const webrtc::VideoCodec& inst, int stream_index,
491     uint32_t start_bitrate_kbps, StreamResolution stream_resolution,
492     webrtc::VideoCodec* stream_codec) {
493   *stream_codec = inst;
494 
495   // Stream specific settings.
496   stream_codec->VP8()->numberOfTemporalLayers =
497       inst.simulcastStream[stream_index].numberOfTemporalLayers;
498   stream_codec->numberOfSimulcastStreams = 0;
499   stream_codec->width = inst.simulcastStream[stream_index].width;
500   stream_codec->height = inst.simulcastStream[stream_index].height;
501   stream_codec->maxBitrate = inst.simulcastStream[stream_index].maxBitrate;
502   stream_codec->minBitrate = inst.simulcastStream[stream_index].minBitrate;
503   stream_codec->qpMax = inst.simulcastStream[stream_index].qpMax;
504   // Settings that are based on stream/resolution.
505   if (stream_resolution == StreamResolution::LOWEST) {
506     // Settings for lowest spatial resolutions.
507     stream_codec->qpMax = kLowestResMaxQp;
508   }
509   if (stream_resolution != StreamResolution::HIGHEST) {
510     // For resolutions below CIF, set the codec |complexity| parameter to
511     // kComplexityHigher, which maps to cpu_used = -4.
512     int pixels_per_frame = stream_codec->width * stream_codec->height;
513     if (pixels_per_frame < 352 * 288) {
514       stream_codec->VP8()->complexity = webrtc::kComplexityHigher;
515     }
516     // Turn off denoising for all streams but the highest resolution.
517     stream_codec->VP8()->denoisingOn = false;
518   }
519   // TODO(ronghuawu): what to do with targetBitrate.
520 
521   stream_codec->startBitrate = start_bitrate_kbps;
522 }
523 
Initialized() const524 bool SimulcastEncoderAdapter::Initialized() const {
525   return rtc::AtomicOps::AcquireLoad(&inited_) == 1;
526 }
527 
DestroyStoredEncoders()528 void SimulcastEncoderAdapter::DestroyStoredEncoders() {
529   while (!stored_encoders_.empty()) {
530     stored_encoders_.pop();
531   }
532 }
533 
SupportsNativeHandle() const534 bool SimulcastEncoderAdapter::SupportsNativeHandle() const {
535   RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_);
536   // We should not be calling this method before streaminfos_ are configured.
537   RTC_DCHECK(!streaminfos_.empty());
538   for (const auto& streaminfo : streaminfos_) {
539     if (!streaminfo.encoder->SupportsNativeHandle()) {
540       return false;
541     }
542   }
543   return true;
544 }
545 
GetScalingSettings() const546 VideoEncoder::ScalingSettings SimulcastEncoderAdapter::GetScalingSettings()
547     const {
548   // TODO(brandtr): Investigate why the sequence checker below fails on mac.
549   // RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_);
550   // Turn off quality scaling for simulcast.
551   if (!Initialized() || NumberOfStreams(codec_) != 1) {
552     return VideoEncoder::ScalingSettings(false);
553   }
554   return streaminfos_[0].encoder->GetScalingSettings();
555 }
556 
ImplementationName() const557 const char* SimulcastEncoderAdapter::ImplementationName() const {
558   RTC_DCHECK_CALLED_SEQUENTIALLY(&encoder_queue_);
559   return implementation_name_.c_str();
560 }
561 
562 }  // namespace webrtc
563