1 /*
2  *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 #include "modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h"
12 
13 #include <cstring>
14 
15 #include "api/video/encoded_image.h"
16 #include "api/video_codecs/video_encoder.h"
17 #include "common_video/include/video_frame_buffer.h"
18 #include "common_video/libyuv/include/webrtc_libyuv.h"
19 #include "media/base/video_common.h"
20 #include "modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h"
21 #include "rtc_base/keep_ref_until_done.h"
22 #include "rtc_base/logging.h"
23 
24 namespace webrtc {
25 
26 // Callback wrapper that helps distinguish returned results from |encoders_|
27 // instances.
28 class MultiplexEncoderAdapter::AdapterEncodedImageCallback
29     : public webrtc::EncodedImageCallback {
30  public:
AdapterEncodedImageCallback(webrtc::MultiplexEncoderAdapter * adapter,AlphaCodecStream stream_idx)31   AdapterEncodedImageCallback(webrtc::MultiplexEncoderAdapter* adapter,
32                               AlphaCodecStream stream_idx)
33       : adapter_(adapter), stream_idx_(stream_idx) {}
34 
OnEncodedImage(const EncodedImage & encoded_image,const CodecSpecificInfo * codec_specific_info)35   EncodedImageCallback::Result OnEncodedImage(
36       const EncodedImage& encoded_image,
37       const CodecSpecificInfo* codec_specific_info) override {
38     if (!adapter_)
39       return Result(Result::OK);
40     return adapter_->OnEncodedImage(stream_idx_, encoded_image,
41                                     codec_specific_info);
42   }
43 
44  private:
45   MultiplexEncoderAdapter* adapter_;
46   const AlphaCodecStream stream_idx_;
47 };
48 
MultiplexEncoderAdapter(VideoEncoderFactory * factory,const SdpVideoFormat & associated_format,bool supports_augmented_data)49 MultiplexEncoderAdapter::MultiplexEncoderAdapter(
50     VideoEncoderFactory* factory,
51     const SdpVideoFormat& associated_format,
52     bool supports_augmented_data)
53     : factory_(factory),
54       associated_format_(associated_format),
55       encoded_complete_callback_(nullptr),
56       key_frame_interval_(0),
57       supports_augmented_data_(supports_augmented_data) {}
58 
~MultiplexEncoderAdapter()59 MultiplexEncoderAdapter::~MultiplexEncoderAdapter() {
60   Release();
61 }
62 
SetFecControllerOverride(FecControllerOverride * fec_controller_override)63 void MultiplexEncoderAdapter::SetFecControllerOverride(
64     FecControllerOverride* fec_controller_override) {
65   // Ignored.
66 }
67 
InitEncode(const VideoCodec * inst,const VideoEncoder::Settings & settings)68 int MultiplexEncoderAdapter::InitEncode(
69     const VideoCodec* inst,
70     const VideoEncoder::Settings& settings) {
71   const size_t buffer_size =
72       CalcBufferSize(VideoType::kI420, inst->width, inst->height);
73   multiplex_dummy_planes_.resize(buffer_size);
74   // It is more expensive to encode 0x00, so use 0x80 instead.
75   std::fill(multiplex_dummy_planes_.begin(), multiplex_dummy_planes_.end(),
76             0x80);
77 
78   RTC_DCHECK_EQ(kVideoCodecMultiplex, inst->codecType);
79   VideoCodec video_codec = *inst;
80   video_codec.codecType = PayloadStringToCodecType(associated_format_.name);
81 
82   // Take over the key frame interval at adapter level, because we have to
83   // sync the key frames for both sub-encoders.
84   switch (video_codec.codecType) {
85     case kVideoCodecVP8:
86       key_frame_interval_ = video_codec.VP8()->keyFrameInterval;
87       video_codec.VP8()->keyFrameInterval = 0;
88       break;
89     case kVideoCodecVP9:
90       key_frame_interval_ = video_codec.VP9()->keyFrameInterval;
91       video_codec.VP9()->keyFrameInterval = 0;
92       break;
93     case kVideoCodecH264:
94       key_frame_interval_ = video_codec.H264()->keyFrameInterval;
95       video_codec.H264()->keyFrameInterval = 0;
96       break;
97     default:
98       break;
99   }
100 
101   encoder_info_ = EncoderInfo();
102   encoder_info_.implementation_name = "MultiplexEncoderAdapter (";
103   encoder_info_.requested_resolution_alignment = 1;
104   encoder_info_.apply_alignment_to_all_simulcast_layers = false;
105   // This needs to be false so that we can do the split in Encode().
106   encoder_info_.supports_native_handle = false;
107 
108   for (size_t i = 0; i < kAlphaCodecStreams; ++i) {
109     std::unique_ptr<VideoEncoder> encoder =
110         factory_->CreateVideoEncoder(associated_format_);
111     const int rv = encoder->InitEncode(&video_codec, settings);
112     if (rv) {
113       RTC_LOG(LS_ERROR) << "Failed to create multiplex codec index " << i;
114       return rv;
115     }
116     adapter_callbacks_.emplace_back(new AdapterEncodedImageCallback(
117         this, static_cast<AlphaCodecStream>(i)));
118     encoder->RegisterEncodeCompleteCallback(adapter_callbacks_.back().get());
119 
120     const EncoderInfo& encoder_impl_info = encoder->GetEncoderInfo();
121     encoder_info_.implementation_name += encoder_impl_info.implementation_name;
122     if (i != kAlphaCodecStreams - 1) {
123       encoder_info_.implementation_name += ", ";
124     }
125     // Uses hardware support if any of the encoders uses it.
126     // For example, if we are having issues with down-scaling due to
127     // pipelining delay in HW encoders we need higher encoder usage
128     // thresholds in CPU adaptation.
129     if (i == 0) {
130       encoder_info_.is_hardware_accelerated =
131           encoder_impl_info.is_hardware_accelerated;
132     } else {
133       encoder_info_.is_hardware_accelerated |=
134           encoder_impl_info.is_hardware_accelerated;
135     }
136 
137     encoder_info_.requested_resolution_alignment = cricket::LeastCommonMultiple(
138         encoder_info_.requested_resolution_alignment,
139         encoder_impl_info.requested_resolution_alignment);
140 
141     if (encoder_impl_info.apply_alignment_to_all_simulcast_layers) {
142       encoder_info_.apply_alignment_to_all_simulcast_layers = true;
143     }
144 
145     encoder_info_.has_internal_source = false;
146 
147     encoders_.emplace_back(std::move(encoder));
148   }
149   encoder_info_.implementation_name += ")";
150 
151   return WEBRTC_VIDEO_CODEC_OK;
152 }
153 
Encode(const VideoFrame & input_image,const std::vector<VideoFrameType> * frame_types)154 int MultiplexEncoderAdapter::Encode(
155     const VideoFrame& input_image,
156     const std::vector<VideoFrameType>* frame_types) {
157   if (!encoded_complete_callback_) {
158     return WEBRTC_VIDEO_CODEC_UNINITIALIZED;
159   }
160 
161   std::vector<VideoFrameType> adjusted_frame_types;
162   if (key_frame_interval_ > 0 && picture_index_ % key_frame_interval_ == 0) {
163     adjusted_frame_types.push_back(VideoFrameType::kVideoFrameKey);
164   } else {
165     adjusted_frame_types.push_back(VideoFrameType::kVideoFrameDelta);
166   }
167   const bool has_alpha = input_image.video_frame_buffer()->type() ==
168                          VideoFrameBuffer::Type::kI420A;
169   std::unique_ptr<uint8_t[]> augmenting_data = nullptr;
170   uint16_t augmenting_data_length = 0;
171   AugmentedVideoFrameBuffer* augmented_video_frame_buffer = nullptr;
172   if (supports_augmented_data_) {
173     augmented_video_frame_buffer = static_cast<AugmentedVideoFrameBuffer*>(
174         input_image.video_frame_buffer().get());
175     augmenting_data_length =
176         augmented_video_frame_buffer->GetAugmentingDataSize();
177     augmenting_data =
178         std::unique_ptr<uint8_t[]>(new uint8_t[augmenting_data_length]);
179     memcpy(augmenting_data.get(),
180            augmented_video_frame_buffer->GetAugmentingData(),
181            augmenting_data_length);
182     augmenting_data_size_ = augmenting_data_length;
183   }
184 
185   {
186     MutexLock lock(&mutex_);
187     stashed_images_.emplace(
188         std::piecewise_construct,
189         std::forward_as_tuple(input_image.timestamp()),
190         std::forward_as_tuple(
191             picture_index_, has_alpha ? kAlphaCodecStreams : 1,
192             std::move(augmenting_data), augmenting_data_length));
193   }
194 
195   ++picture_index_;
196 
197   // Encode YUV
198   int rv = encoders_[kYUVStream]->Encode(input_image, &adjusted_frame_types);
199 
200   // If we do not receive an alpha frame, we send a single frame for this
201   // |picture_index_|. The receiver will receive |frame_count| as 1 which
202   // specifies this case.
203   if (rv || !has_alpha)
204     return rv;
205 
206   // Encode AXX
207   const I420ABufferInterface* yuva_buffer =
208       supports_augmented_data_
209           ? augmented_video_frame_buffer->GetVideoFrameBuffer()->GetI420A()
210           : input_image.video_frame_buffer()->GetI420A();
211   rtc::scoped_refptr<I420BufferInterface> alpha_buffer =
212       WrapI420Buffer(input_image.width(), input_image.height(),
213                      yuva_buffer->DataA(), yuva_buffer->StrideA(),
214                      multiplex_dummy_planes_.data(), yuva_buffer->StrideU(),
215                      multiplex_dummy_planes_.data(), yuva_buffer->StrideV(),
216                      rtc::KeepRefUntilDone(input_image.video_frame_buffer()));
217   VideoFrame alpha_image = VideoFrame::Builder()
218                                .set_video_frame_buffer(alpha_buffer)
219                                .set_timestamp_rtp(input_image.timestamp())
220                                .set_timestamp_ms(input_image.render_time_ms())
221                                .set_rotation(input_image.rotation())
222                                .set_id(input_image.id())
223                                .set_packet_infos(input_image.packet_infos())
224                                .build();
225   rv = encoders_[kAXXStream]->Encode(alpha_image, &adjusted_frame_types);
226   return rv;
227 }
228 
RegisterEncodeCompleteCallback(EncodedImageCallback * callback)229 int MultiplexEncoderAdapter::RegisterEncodeCompleteCallback(
230     EncodedImageCallback* callback) {
231   encoded_complete_callback_ = callback;
232   return WEBRTC_VIDEO_CODEC_OK;
233 }
234 
SetRates(const RateControlParameters & parameters)235 void MultiplexEncoderAdapter::SetRates(
236     const RateControlParameters& parameters) {
237   VideoBitrateAllocation bitrate_allocation(parameters.bitrate);
238   bitrate_allocation.SetBitrate(
239       0, 0, parameters.bitrate.GetBitrate(0, 0) - augmenting_data_size_);
240   for (auto& encoder : encoders_) {
241     // TODO(emircan): |framerate| is used to calculate duration in encoder
242     // instances. We report the total frame rate to keep real time for now.
243     // Remove this after refactoring duration logic.
244     encoder->SetRates(RateControlParameters(
245         bitrate_allocation,
246         static_cast<uint32_t>(encoders_.size() * parameters.framerate_fps),
247         parameters.bandwidth_allocation -
248             DataRate::BitsPerSec(augmenting_data_size_)));
249   }
250 }
251 
OnPacketLossRateUpdate(float packet_loss_rate)252 void MultiplexEncoderAdapter::OnPacketLossRateUpdate(float packet_loss_rate) {
253   for (auto& encoder : encoders_) {
254     encoder->OnPacketLossRateUpdate(packet_loss_rate);
255   }
256 }
257 
OnRttUpdate(int64_t rtt_ms)258 void MultiplexEncoderAdapter::OnRttUpdate(int64_t rtt_ms) {
259   for (auto& encoder : encoders_) {
260     encoder->OnRttUpdate(rtt_ms);
261   }
262 }
263 
OnLossNotification(const LossNotification & loss_notification)264 void MultiplexEncoderAdapter::OnLossNotification(
265     const LossNotification& loss_notification) {
266   for (auto& encoder : encoders_) {
267     encoder->OnLossNotification(loss_notification);
268   }
269 }
270 
Release()271 int MultiplexEncoderAdapter::Release() {
272   for (auto& encoder : encoders_) {
273     const int rv = encoder->Release();
274     if (rv)
275       return rv;
276   }
277   encoders_.clear();
278   adapter_callbacks_.clear();
279   MutexLock lock(&mutex_);
280   stashed_images_.clear();
281 
282   return WEBRTC_VIDEO_CODEC_OK;
283 }
284 
GetEncoderInfo() const285 VideoEncoder::EncoderInfo MultiplexEncoderAdapter::GetEncoderInfo() const {
286   return encoder_info_;
287 }
288 
OnEncodedImage(AlphaCodecStream stream_idx,const EncodedImage & encodedImage,const CodecSpecificInfo * codecSpecificInfo)289 EncodedImageCallback::Result MultiplexEncoderAdapter::OnEncodedImage(
290     AlphaCodecStream stream_idx,
291     const EncodedImage& encodedImage,
292     const CodecSpecificInfo* codecSpecificInfo) {
293   // Save the image
294   MultiplexImageComponent image_component;
295   image_component.component_index = stream_idx;
296   image_component.codec_type =
297       PayloadStringToCodecType(associated_format_.name);
298   image_component.encoded_image = encodedImage;
299 
300   // If we don't already own the buffer, make a copy.
301   image_component.encoded_image.Retain();
302 
303   MutexLock lock(&mutex_);
304   const auto& stashed_image_itr =
305       stashed_images_.find(encodedImage.Timestamp());
306   const auto& stashed_image_next_itr = std::next(stashed_image_itr, 1);
307   RTC_DCHECK(stashed_image_itr != stashed_images_.end());
308   MultiplexImage& stashed_image = stashed_image_itr->second;
309   const uint8_t frame_count = stashed_image.component_count;
310 
311   stashed_image.image_components.push_back(image_component);
312 
313   if (stashed_image.image_components.size() == frame_count) {
314     // Complete case
315     for (auto iter = stashed_images_.begin();
316          iter != stashed_images_.end() && iter != stashed_image_next_itr;
317          iter++) {
318       // No image at all, skip.
319       if (iter->second.image_components.size() == 0)
320         continue;
321 
322       // We have to send out those stashed frames, otherwise the delta frame
323       // dependency chain is broken.
324       combined_image_ =
325           MultiplexEncodedImagePacker::PackAndRelease(iter->second);
326 
327       CodecSpecificInfo codec_info = *codecSpecificInfo;
328       codec_info.codecType = kVideoCodecMultiplex;
329       encoded_complete_callback_->OnEncodedImage(combined_image_, &codec_info);
330     }
331 
332     stashed_images_.erase(stashed_images_.begin(), stashed_image_next_itr);
333   }
334   return EncodedImageCallback::Result(EncodedImageCallback::Result::OK);
335 }
336 
337 }  // namespace webrtc
338