1 /*
2  *  Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 #include "modules/video_coding/codecs/multiplex/include/multiplex_decoder_adapter.h"
12 
13 #include "api/video/encoded_image.h"
14 #include "api/video/i420_buffer.h"
15 #include "api/video/video_frame_buffer.h"
16 #include "common_video/include/video_frame_buffer.h"
17 #include "common_video/libyuv/include/webrtc_libyuv.h"
18 #include "modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h"
19 #include "modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h"
20 #include "rtc_base/keep_ref_until_done.h"
21 #include "rtc_base/logging.h"
22 
23 namespace {
KeepBufferRefs(rtc::scoped_refptr<webrtc::VideoFrameBuffer>,rtc::scoped_refptr<webrtc::VideoFrameBuffer>)24 void KeepBufferRefs(rtc::scoped_refptr<webrtc::VideoFrameBuffer>,
25                     rtc::scoped_refptr<webrtc::VideoFrameBuffer>) {}
26 }  // anonymous namespace
27 
28 namespace webrtc {
29 
30 class MultiplexDecoderAdapter::AdapterDecodedImageCallback
31     : public webrtc::DecodedImageCallback {
32  public:
AdapterDecodedImageCallback(webrtc::MultiplexDecoderAdapter * adapter,AlphaCodecStream stream_idx)33   AdapterDecodedImageCallback(webrtc::MultiplexDecoderAdapter* adapter,
34                               AlphaCodecStream stream_idx)
35       : adapter_(adapter), stream_idx_(stream_idx) {}
36 
Decoded(VideoFrame & decoded_image,absl::optional<int32_t> decode_time_ms,absl::optional<uint8_t> qp)37   void Decoded(VideoFrame& decoded_image,
38                absl::optional<int32_t> decode_time_ms,
39                absl::optional<uint8_t> qp) override {
40     if (!adapter_)
41       return;
42     adapter_->Decoded(stream_idx_, &decoded_image, decode_time_ms, qp);
43   }
Decoded(VideoFrame & decoded_image)44   int32_t Decoded(VideoFrame& decoded_image) override {
45     RTC_NOTREACHED();
46     return WEBRTC_VIDEO_CODEC_OK;
47   }
Decoded(VideoFrame & decoded_image,int64_t decode_time_ms)48   int32_t Decoded(VideoFrame& decoded_image, int64_t decode_time_ms) override {
49     RTC_NOTREACHED();
50     return WEBRTC_VIDEO_CODEC_OK;
51   }
52 
53  private:
54   MultiplexDecoderAdapter* adapter_;
55   const AlphaCodecStream stream_idx_;
56 };
57 
58 struct MultiplexDecoderAdapter::DecodedImageData {
DecodedImageDatawebrtc::MultiplexDecoderAdapter::DecodedImageData59   explicit DecodedImageData(AlphaCodecStream stream_idx)
60       : stream_idx_(stream_idx),
61         decoded_image_(
62             VideoFrame::Builder()
63                 .set_video_frame_buffer(
64                     I420Buffer::Create(1 /* width */, 1 /* height */))
65                 .set_timestamp_rtp(0)
66                 .set_timestamp_us(0)
67                 .set_rotation(kVideoRotation_0)
68                 .build()) {
69     RTC_DCHECK_EQ(kAXXStream, stream_idx);
70   }
DecodedImageDatawebrtc::MultiplexDecoderAdapter::DecodedImageData71   DecodedImageData(AlphaCodecStream stream_idx,
72                    const VideoFrame& decoded_image,
73                    const absl::optional<int32_t>& decode_time_ms,
74                    const absl::optional<uint8_t>& qp)
75       : stream_idx_(stream_idx),
76         decoded_image_(decoded_image),
77         decode_time_ms_(decode_time_ms),
78         qp_(qp) {}
79 
80   DecodedImageData() = delete;
81   DecodedImageData(const DecodedImageData&) = delete;
82   DecodedImageData& operator=(const DecodedImageData&) = delete;
83 
84   const AlphaCodecStream stream_idx_;
85   VideoFrame decoded_image_;
86   const absl::optional<int32_t> decode_time_ms_;
87   const absl::optional<uint8_t> qp_;
88 };
89 
90 struct MultiplexDecoderAdapter::AugmentingData {
AugmentingDatawebrtc::MultiplexDecoderAdapter::AugmentingData91   AugmentingData(std::unique_ptr<uint8_t[]> augmenting_data, uint16_t data_size)
92       : data_(std::move(augmenting_data)), size_(data_size) {}
93   AugmentingData() = delete;
94   AugmentingData(const AugmentingData&) = delete;
95   AugmentingData& operator=(const AugmentingData&) = delete;
96 
97   std::unique_ptr<uint8_t[]> data_;
98   const uint16_t size_;
99 };
100 
MultiplexDecoderAdapter(VideoDecoderFactory * factory,const SdpVideoFormat & associated_format,bool supports_augmenting_data)101 MultiplexDecoderAdapter::MultiplexDecoderAdapter(
102     VideoDecoderFactory* factory,
103     const SdpVideoFormat& associated_format,
104     bool supports_augmenting_data)
105     : factory_(factory),
106       associated_format_(associated_format),
107       supports_augmenting_data_(supports_augmenting_data) {}
108 
~MultiplexDecoderAdapter()109 MultiplexDecoderAdapter::~MultiplexDecoderAdapter() {
110   Release();
111 }
112 
InitDecode(const VideoCodec * codec_settings,int32_t number_of_cores)113 int32_t MultiplexDecoderAdapter::InitDecode(const VideoCodec* codec_settings,
114                                             int32_t number_of_cores) {
115   RTC_DCHECK_EQ(kVideoCodecMultiplex, codec_settings->codecType);
116   VideoCodec settings = *codec_settings;
117   settings.codecType = PayloadStringToCodecType(associated_format_.name);
118   for (size_t i = 0; i < kAlphaCodecStreams; ++i) {
119     std::unique_ptr<VideoDecoder> decoder =
120         factory_->CreateVideoDecoder(associated_format_);
121     const int32_t rv = decoder->InitDecode(&settings, number_of_cores);
122     if (rv)
123       return rv;
124     adapter_callbacks_.emplace_back(
125         new MultiplexDecoderAdapter::AdapterDecodedImageCallback(
126             this, static_cast<AlphaCodecStream>(i)));
127     decoder->RegisterDecodeCompleteCallback(adapter_callbacks_.back().get());
128     decoders_.emplace_back(std::move(decoder));
129   }
130   return WEBRTC_VIDEO_CODEC_OK;
131 }
132 
Decode(const EncodedImage & input_image,bool missing_frames,int64_t render_time_ms)133 int32_t MultiplexDecoderAdapter::Decode(const EncodedImage& input_image,
134                                         bool missing_frames,
135                                         int64_t render_time_ms) {
136   MultiplexImage image = MultiplexEncodedImagePacker::Unpack(input_image);
137 
138   if (supports_augmenting_data_) {
139     RTC_DCHECK(decoded_augmenting_data_.find(input_image.Timestamp()) ==
140                decoded_augmenting_data_.end());
141     decoded_augmenting_data_.emplace(
142         std::piecewise_construct,
143         std::forward_as_tuple(input_image.Timestamp()),
144         std::forward_as_tuple(std::move(image.augmenting_data),
145                               image.augmenting_data_size));
146   }
147 
148   if (image.component_count == 1) {
149     RTC_DCHECK(decoded_data_.find(input_image.Timestamp()) ==
150                decoded_data_.end());
151     decoded_data_.emplace(std::piecewise_construct,
152                           std::forward_as_tuple(input_image.Timestamp()),
153                           std::forward_as_tuple(kAXXStream));
154   }
155   int32_t rv = 0;
156   for (size_t i = 0; i < image.image_components.size(); i++) {
157     rv = decoders_[image.image_components[i].component_index]->Decode(
158         image.image_components[i].encoded_image, missing_frames,
159         render_time_ms);
160     if (rv != WEBRTC_VIDEO_CODEC_OK)
161       return rv;
162   }
163   return rv;
164 }
165 
RegisterDecodeCompleteCallback(DecodedImageCallback * callback)166 int32_t MultiplexDecoderAdapter::RegisterDecodeCompleteCallback(
167     DecodedImageCallback* callback) {
168   decoded_complete_callback_ = callback;
169   return WEBRTC_VIDEO_CODEC_OK;
170 }
171 
Release()172 int32_t MultiplexDecoderAdapter::Release() {
173   for (auto& decoder : decoders_) {
174     const int32_t rv = decoder->Release();
175     if (rv)
176       return rv;
177   }
178   decoders_.clear();
179   adapter_callbacks_.clear();
180   return WEBRTC_VIDEO_CODEC_OK;
181 }
182 
Decoded(AlphaCodecStream stream_idx,VideoFrame * decoded_image,absl::optional<int32_t> decode_time_ms,absl::optional<uint8_t> qp)183 void MultiplexDecoderAdapter::Decoded(AlphaCodecStream stream_idx,
184                                       VideoFrame* decoded_image,
185                                       absl::optional<int32_t> decode_time_ms,
186                                       absl::optional<uint8_t> qp) {
187   const auto& other_decoded_data_it =
188       decoded_data_.find(decoded_image->timestamp());
189   const auto& augmenting_data_it =
190       decoded_augmenting_data_.find(decoded_image->timestamp());
191   const bool has_augmenting_data =
192       augmenting_data_it != decoded_augmenting_data_.end();
193   if (other_decoded_data_it != decoded_data_.end()) {
194     uint16_t augmenting_data_size =
195         has_augmenting_data ? augmenting_data_it->second.size_ : 0;
196     std::unique_ptr<uint8_t[]> augmenting_data =
197         has_augmenting_data ? std::move(augmenting_data_it->second.data_)
198                             : nullptr;
199     auto& other_image_data = other_decoded_data_it->second;
200     if (stream_idx == kYUVStream) {
201       RTC_DCHECK_EQ(kAXXStream, other_image_data.stream_idx_);
202       MergeAlphaImages(decoded_image, decode_time_ms, qp,
203                        &other_image_data.decoded_image_,
204                        other_image_data.decode_time_ms_, other_image_data.qp_,
205                        std::move(augmenting_data), augmenting_data_size);
206     } else {
207       RTC_DCHECK_EQ(kYUVStream, other_image_data.stream_idx_);
208       RTC_DCHECK_EQ(kAXXStream, stream_idx);
209       MergeAlphaImages(&other_image_data.decoded_image_,
210                        other_image_data.decode_time_ms_, other_image_data.qp_,
211                        decoded_image, decode_time_ms, qp,
212                        std::move(augmenting_data), augmenting_data_size);
213     }
214     decoded_data_.erase(decoded_data_.begin(), other_decoded_data_it);
215     if (has_augmenting_data) {
216       decoded_augmenting_data_.erase(decoded_augmenting_data_.begin(),
217                                      augmenting_data_it);
218     }
219     return;
220   }
221   RTC_DCHECK(decoded_data_.find(decoded_image->timestamp()) ==
222              decoded_data_.end());
223   decoded_data_.emplace(
224       std::piecewise_construct,
225       std::forward_as_tuple(decoded_image->timestamp()),
226       std::forward_as_tuple(stream_idx, *decoded_image, decode_time_ms, qp));
227 }
228 
MergeAlphaImages(VideoFrame * decoded_image,const absl::optional<int32_t> & decode_time_ms,const absl::optional<uint8_t> & qp,VideoFrame * alpha_decoded_image,const absl::optional<int32_t> & alpha_decode_time_ms,const absl::optional<uint8_t> & alpha_qp,std::unique_ptr<uint8_t[]> augmenting_data,uint16_t augmenting_data_length)229 void MultiplexDecoderAdapter::MergeAlphaImages(
230     VideoFrame* decoded_image,
231     const absl::optional<int32_t>& decode_time_ms,
232     const absl::optional<uint8_t>& qp,
233     VideoFrame* alpha_decoded_image,
234     const absl::optional<int32_t>& alpha_decode_time_ms,
235     const absl::optional<uint8_t>& alpha_qp,
236     std::unique_ptr<uint8_t[]> augmenting_data,
237     uint16_t augmenting_data_length) {
238   rtc::scoped_refptr<VideoFrameBuffer> merged_buffer;
239   if (!alpha_decoded_image->timestamp()) {
240     merged_buffer = decoded_image->video_frame_buffer();
241   } else {
242     rtc::scoped_refptr<webrtc::I420BufferInterface> yuv_buffer =
243         decoded_image->video_frame_buffer()->ToI420();
244     rtc::scoped_refptr<webrtc::I420BufferInterface> alpha_buffer =
245         alpha_decoded_image->video_frame_buffer()->ToI420();
246     RTC_DCHECK_EQ(yuv_buffer->width(), alpha_buffer->width());
247     RTC_DCHECK_EQ(yuv_buffer->height(), alpha_buffer->height());
248     merged_buffer = WrapI420ABuffer(
249         yuv_buffer->width(), yuv_buffer->height(), yuv_buffer->DataY(),
250         yuv_buffer->StrideY(), yuv_buffer->DataU(), yuv_buffer->StrideU(),
251         yuv_buffer->DataV(), yuv_buffer->StrideV(), alpha_buffer->DataY(),
252         alpha_buffer->StrideY(),
253         rtc::Bind(&KeepBufferRefs, yuv_buffer, alpha_buffer));
254   }
255   if (supports_augmenting_data_) {
256     merged_buffer = rtc::scoped_refptr<webrtc::AugmentedVideoFrameBuffer>(
257         new rtc::RefCountedObject<AugmentedVideoFrameBuffer>(
258             merged_buffer, std::move(augmenting_data), augmenting_data_length));
259   }
260 
261   VideoFrame merged_image = VideoFrame::Builder()
262                                 .set_video_frame_buffer(merged_buffer)
263                                 .set_timestamp_rtp(decoded_image->timestamp())
264                                 .set_timestamp_us(0)
265                                 .set_rotation(decoded_image->rotation())
266                                 .set_id(decoded_image->id())
267                                 .set_packet_infos(decoded_image->packet_infos())
268                                 .build();
269   decoded_complete_callback_->Decoded(merged_image, decode_time_ms, qp);
270 }
271 
272 }  // namespace webrtc
273