1 /*
2 * Copyright (c) 2017 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include <stddef.h>
12
13 #include <cstdint>
14 #include <memory>
15 #include <utility>
16 #include <vector>
17
18 #include "absl/types/optional.h"
19 #include "api/scoped_refptr.h"
20 #include "api/test/mock_video_decoder_factory.h"
21 #include "api/test/mock_video_encoder_factory.h"
22 #include "api/video/encoded_image.h"
23 #include "api/video/video_frame.h"
24 #include "api/video/video_frame_buffer.h"
25 #include "api/video/video_rotation.h"
26 #include "api/video_codecs/sdp_video_format.h"
27 #include "api/video_codecs/video_codec.h"
28 #include "api/video_codecs/video_decoder.h"
29 #include "api/video_codecs/video_encoder.h"
30 #include "common_video/include/video_frame_buffer.h"
31 #include "common_video/libyuv/include/webrtc_libyuv.h"
32 #include "media/base/media_constants.h"
33 #include "modules/video_coding/codecs/multiplex/include/augmented_video_frame_buffer.h"
34 #include "modules/video_coding/codecs/multiplex/include/multiplex_decoder_adapter.h"
35 #include "modules/video_coding/codecs/multiplex/include/multiplex_encoder_adapter.h"
36 #include "modules/video_coding/codecs/multiplex/multiplex_encoded_image_packer.h"
37 #include "modules/video_coding/codecs/test/video_codec_unittest.h"
38 #include "modules/video_coding/codecs/vp9/include/vp9.h"
39 #include "modules/video_coding/include/video_codec_interface.h"
40 #include "modules/video_coding/include/video_error_codes.h"
41 #include "rtc_base/keep_ref_until_done.h"
42 #include "rtc_base/ref_counted_object.h"
43 #include "test/gmock.h"
44 #include "test/gtest.h"
45 #include "test/video_codec_settings.h"
46
47 using ::testing::_;
48 using ::testing::Return;
49
50 namespace webrtc {
51
52 constexpr const char* kMultiplexAssociatedCodecName = cricket::kVp9CodecName;
53 const VideoCodecType kMultiplexAssociatedCodecType =
54 PayloadStringToCodecType(kMultiplexAssociatedCodecName);
55
56 class TestMultiplexAdapter : public VideoCodecUnitTest,
57 public ::testing::WithParamInterface<
58 bool /* supports_augmenting_data */> {
59 public:
TestMultiplexAdapter()60 TestMultiplexAdapter()
61 : decoder_factory_(new webrtc::MockVideoDecoderFactory),
62 encoder_factory_(new webrtc::MockVideoEncoderFactory),
63 supports_augmenting_data_(GetParam()) {}
64
65 protected:
CreateDecoder()66 std::unique_ptr<VideoDecoder> CreateDecoder() override {
67 return std::make_unique<MultiplexDecoderAdapter>(
68 decoder_factory_.get(), SdpVideoFormat(kMultiplexAssociatedCodecName),
69 supports_augmenting_data_);
70 }
71
CreateEncoder()72 std::unique_ptr<VideoEncoder> CreateEncoder() override {
73 return std::make_unique<MultiplexEncoderAdapter>(
74 encoder_factory_.get(), SdpVideoFormat(kMultiplexAssociatedCodecName),
75 supports_augmenting_data_);
76 }
77
ModifyCodecSettings(VideoCodec * codec_settings)78 void ModifyCodecSettings(VideoCodec* codec_settings) override {
79 webrtc::test::CodecSettings(kMultiplexAssociatedCodecType, codec_settings);
80 codec_settings->VP9()->numberOfTemporalLayers = 1;
81 codec_settings->VP9()->numberOfSpatialLayers = 1;
82 codec_settings->codecType = webrtc::kVideoCodecMultiplex;
83 }
84
CreateDataAugmentedInputFrame(VideoFrame * video_frame)85 std::unique_ptr<VideoFrame> CreateDataAugmentedInputFrame(
86 VideoFrame* video_frame) {
87 rtc::scoped_refptr<VideoFrameBuffer> video_buffer =
88 video_frame->video_frame_buffer();
89 std::unique_ptr<uint8_t[]> data =
90 std::unique_ptr<uint8_t[]>(new uint8_t[16]);
91 for (int i = 0; i < 16; i++) {
92 data[i] = i;
93 }
94 rtc::scoped_refptr<AugmentedVideoFrameBuffer> augmented_video_frame_buffer =
95 new rtc::RefCountedObject<AugmentedVideoFrameBuffer>(
96 video_buffer, std::move(data), 16);
97 return std::make_unique<VideoFrame>(
98 VideoFrame::Builder()
99 .set_video_frame_buffer(augmented_video_frame_buffer)
100 .set_timestamp_rtp(video_frame->timestamp())
101 .set_timestamp_ms(video_frame->render_time_ms())
102 .set_rotation(video_frame->rotation())
103 .set_id(video_frame->id())
104 .build());
105 }
106
CreateI420AInputFrame()107 std::unique_ptr<VideoFrame> CreateI420AInputFrame() {
108 VideoFrame input_frame = NextInputFrame();
109 rtc::scoped_refptr<webrtc::I420BufferInterface> yuv_buffer =
110 input_frame.video_frame_buffer()->ToI420();
111 rtc::scoped_refptr<I420ABufferInterface> yuva_buffer = WrapI420ABuffer(
112 yuv_buffer->width(), yuv_buffer->height(), yuv_buffer->DataY(),
113 yuv_buffer->StrideY(), yuv_buffer->DataU(), yuv_buffer->StrideU(),
114 yuv_buffer->DataV(), yuv_buffer->StrideV(), yuv_buffer->DataY(),
115 yuv_buffer->StrideY(), rtc::KeepRefUntilDone(yuv_buffer));
116 return std::make_unique<VideoFrame>(VideoFrame::Builder()
117 .set_video_frame_buffer(yuva_buffer)
118 .set_timestamp_rtp(123)
119 .set_timestamp_ms(345)
120 .set_rotation(kVideoRotation_0)
121 .build());
122 }
123
CreateInputFrame(bool contains_alpha)124 std::unique_ptr<VideoFrame> CreateInputFrame(bool contains_alpha) {
125 std::unique_ptr<VideoFrame> video_frame;
126 if (contains_alpha) {
127 video_frame = CreateI420AInputFrame();
128 } else {
129 VideoFrame next_frame = NextInputFrame();
130 video_frame = std::make_unique<VideoFrame>(
131 VideoFrame::Builder()
132 .set_video_frame_buffer(next_frame.video_frame_buffer())
133 .set_timestamp_rtp(next_frame.timestamp())
134 .set_timestamp_ms(next_frame.render_time_ms())
135 .set_rotation(next_frame.rotation())
136 .set_id(next_frame.id())
137 .build());
138 }
139 if (supports_augmenting_data_) {
140 video_frame = CreateDataAugmentedInputFrame(video_frame.get());
141 }
142
143 return video_frame;
144 }
145
CheckData(rtc::scoped_refptr<VideoFrameBuffer> video_frame_buffer)146 void CheckData(rtc::scoped_refptr<VideoFrameBuffer> video_frame_buffer) {
147 if (!supports_augmenting_data_) {
148 return;
149 }
150 AugmentedVideoFrameBuffer* augmented_buffer =
151 static_cast<AugmentedVideoFrameBuffer*>(video_frame_buffer.get());
152 EXPECT_EQ(augmented_buffer->GetAugmentingDataSize(), 16);
153 uint8_t* data = augmented_buffer->GetAugmentingData();
154 for (int i = 0; i < 16; i++) {
155 EXPECT_EQ(data[i], i);
156 }
157 }
158
ExtractAXXFrame(const VideoFrame & video_frame)159 std::unique_ptr<VideoFrame> ExtractAXXFrame(const VideoFrame& video_frame) {
160 rtc::scoped_refptr<VideoFrameBuffer> video_frame_buffer =
161 video_frame.video_frame_buffer();
162 if (supports_augmenting_data_) {
163 AugmentedVideoFrameBuffer* augmentedBuffer =
164 static_cast<AugmentedVideoFrameBuffer*>(video_frame_buffer.get());
165 video_frame_buffer = augmentedBuffer->GetVideoFrameBuffer();
166 }
167 const I420ABufferInterface* yuva_buffer = video_frame_buffer->GetI420A();
168 rtc::scoped_refptr<I420BufferInterface> axx_buffer = WrapI420Buffer(
169 yuva_buffer->width(), yuva_buffer->height(), yuva_buffer->DataA(),
170 yuva_buffer->StrideA(), yuva_buffer->DataU(), yuva_buffer->StrideU(),
171 yuva_buffer->DataV(), yuva_buffer->StrideV(),
172 rtc::KeepRefUntilDone(video_frame_buffer));
173 return std::make_unique<VideoFrame>(VideoFrame::Builder()
174 .set_video_frame_buffer(axx_buffer)
175 .set_timestamp_rtp(123)
176 .set_timestamp_ms(345)
177 .set_rotation(kVideoRotation_0)
178 .build());
179 }
180
181 private:
SetUp()182 void SetUp() override {
183 EXPECT_CALL(*decoder_factory_, Die);
184 // The decoders/encoders will be owned by the caller of
185 // CreateVideoDecoder()/CreateVideoEncoder().
186 EXPECT_CALL(*decoder_factory_, CreateVideoDecoder)
187 .Times(2)
188 .WillRepeatedly([] { return VP9Decoder::Create(); });
189
190 EXPECT_CALL(*encoder_factory_, Die);
191 EXPECT_CALL(*encoder_factory_, CreateVideoEncoder)
192 .Times(2)
193 .WillRepeatedly([] { return VP9Encoder::Create(); });
194
195 VideoCodecUnitTest::SetUp();
196 }
197
198 const std::unique_ptr<webrtc::MockVideoDecoderFactory> decoder_factory_;
199 const std::unique_ptr<webrtc::MockVideoEncoderFactory> encoder_factory_;
200 const bool supports_augmenting_data_;
201 };
202
203 // TODO(emircan): Currently VideoCodecUnitTest tests do a complete setup
204 // step that goes beyond constructing |decoder_|. Simplify these tests to do
205 // less.
TEST_P(TestMultiplexAdapter,ConstructAndDestructDecoder)206 TEST_P(TestMultiplexAdapter, ConstructAndDestructDecoder) {
207 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Release());
208 }
209
TEST_P(TestMultiplexAdapter,ConstructAndDestructEncoder)210 TEST_P(TestMultiplexAdapter, ConstructAndDestructEncoder) {
211 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
212 }
213
TEST_P(TestMultiplexAdapter,EncodeDecodeI420Frame)214 TEST_P(TestMultiplexAdapter, EncodeDecodeI420Frame) {
215 std::unique_ptr<VideoFrame> input_frame = CreateInputFrame(false);
216 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(*input_frame, nullptr));
217 EncodedImage encoded_frame;
218 CodecSpecificInfo codec_specific_info;
219 ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
220 EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType);
221
222 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encoded_frame, false, -1));
223 std::unique_ptr<VideoFrame> decoded_frame;
224 absl::optional<uint8_t> decoded_qp;
225 ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
226 ASSERT_TRUE(decoded_frame);
227 EXPECT_GT(I420PSNR(input_frame.get(), decoded_frame.get()), 36);
228 CheckData(decoded_frame->video_frame_buffer());
229 }
230
TEST_P(TestMultiplexAdapter,EncodeDecodeI420AFrame)231 TEST_P(TestMultiplexAdapter, EncodeDecodeI420AFrame) {
232 std::unique_ptr<VideoFrame> yuva_frame = CreateInputFrame(true);
233 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(*yuva_frame, nullptr));
234 EncodedImage encoded_frame;
235 CodecSpecificInfo codec_specific_info;
236 ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
237 EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType);
238
239 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encoded_frame, false, 0));
240 std::unique_ptr<VideoFrame> decoded_frame;
241 absl::optional<uint8_t> decoded_qp;
242 ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
243 ASSERT_TRUE(decoded_frame);
244 EXPECT_GT(I420PSNR(yuva_frame.get(), decoded_frame.get()), 36);
245
246 // Find PSNR for AXX bits.
247 std::unique_ptr<VideoFrame> input_axx_frame = ExtractAXXFrame(*yuva_frame);
248 std::unique_ptr<VideoFrame> output_axx_frame =
249 ExtractAXXFrame(*decoded_frame);
250 EXPECT_GT(I420PSNR(input_axx_frame.get(), output_axx_frame.get()), 47);
251
252 CheckData(decoded_frame->video_frame_buffer());
253 }
254
TEST_P(TestMultiplexAdapter,CheckSingleFrameEncodedBitstream)255 TEST_P(TestMultiplexAdapter, CheckSingleFrameEncodedBitstream) {
256 std::unique_ptr<VideoFrame> input_frame = CreateInputFrame(false);
257 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(*input_frame, nullptr));
258 EncodedImage encoded_frame;
259 CodecSpecificInfo codec_specific_info;
260 ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
261 EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType);
262 EXPECT_FALSE(encoded_frame.SpatialIndex());
263
264 const MultiplexImage& unpacked_frame =
265 MultiplexEncodedImagePacker::Unpack(encoded_frame);
266 EXPECT_EQ(0, unpacked_frame.image_index);
267 EXPECT_EQ(1, unpacked_frame.component_count);
268 const MultiplexImageComponent& component = unpacked_frame.image_components[0];
269 EXPECT_EQ(0, component.component_index);
270 EXPECT_NE(nullptr, component.encoded_image.data());
271 EXPECT_EQ(VideoFrameType::kVideoFrameKey, component.encoded_image._frameType);
272 }
273
TEST_P(TestMultiplexAdapter,CheckDoubleFramesEncodedBitstream)274 TEST_P(TestMultiplexAdapter, CheckDoubleFramesEncodedBitstream) {
275 std::unique_ptr<VideoFrame> yuva_frame = CreateInputFrame(true);
276 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(*yuva_frame, nullptr));
277 EncodedImage encoded_frame;
278 CodecSpecificInfo codec_specific_info;
279 ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
280 EXPECT_EQ(kVideoCodecMultiplex, codec_specific_info.codecType);
281 EXPECT_FALSE(encoded_frame.SpatialIndex());
282
283 const MultiplexImage& unpacked_frame =
284 MultiplexEncodedImagePacker::Unpack(encoded_frame);
285 EXPECT_EQ(0, unpacked_frame.image_index);
286 EXPECT_EQ(2, unpacked_frame.component_count);
287 EXPECT_EQ(unpacked_frame.image_components.size(),
288 unpacked_frame.component_count);
289 for (int i = 0; i < unpacked_frame.component_count; ++i) {
290 const MultiplexImageComponent& component =
291 unpacked_frame.image_components[i];
292 EXPECT_EQ(i, component.component_index);
293 EXPECT_NE(nullptr, component.encoded_image.data());
294 EXPECT_EQ(VideoFrameType::kVideoFrameKey,
295 component.encoded_image._frameType);
296 }
297 }
298
TEST_P(TestMultiplexAdapter,ImageIndexIncreases)299 TEST_P(TestMultiplexAdapter, ImageIndexIncreases) {
300 std::unique_ptr<VideoFrame> yuva_frame = CreateInputFrame(true);
301 const size_t expected_num_encoded_frames = 3;
302 for (size_t i = 0; i < expected_num_encoded_frames; ++i) {
303 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Encode(*yuva_frame, nullptr));
304 EncodedImage encoded_frame;
305 CodecSpecificInfo codec_specific_info;
306 ASSERT_TRUE(WaitForEncodedFrame(&encoded_frame, &codec_specific_info));
307 const MultiplexImage& unpacked_frame =
308 MultiplexEncodedImagePacker::Unpack(encoded_frame);
309 EXPECT_EQ(i, unpacked_frame.image_index);
310 EXPECT_EQ(
311 i ? VideoFrameType::kVideoFrameDelta : VideoFrameType::kVideoFrameKey,
312 encoded_frame._frameType);
313 }
314 }
315
316 INSTANTIATE_TEST_SUITE_P(TestMultiplexAdapter,
317 TestMultiplexAdapter,
318 ::testing::Bool());
319
320 } // namespace webrtc
321