1 /*
2 * Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3 *
4 * Use of this source code is governed by a BSD-style license
5 * that can be found in the LICENSE file in the root of the source
6 * tree. An additional intellectual property rights grant can be found
7 * in the file PATENTS. All contributing project authors may
8 * be found in the AUTHORS file in the root of the source tree.
9 */
10
11 #include <stdio.h>
12
13 #include <memory>
14
15 #include "api/test/create_frame_generator.h"
16 #include "api/test/frame_generator_interface.h"
17 #include "api/test/mock_video_decoder.h"
18 #include "api/test/mock_video_encoder.h"
19 #include "api/video_codecs/video_encoder.h"
20 #include "api/video_codecs/vp8_temporal_layers.h"
21 #include "common_video/libyuv/include/webrtc_libyuv.h"
22 #include "common_video/test/utilities.h"
23 #include "modules/video_coding/codecs/test/video_codec_unittest.h"
24 #include "modules/video_coding/codecs/vp8/include/vp8.h"
25 #include "modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h"
26 #include "modules/video_coding/codecs/vp8/test/mock_libvpx_interface.h"
27 #include "modules/video_coding/utility/vp8_header_parser.h"
28 #include "rtc_base/time_utils.h"
29 #include "test/field_trial.h"
30 #include "test/video_codec_settings.h"
31
32 namespace webrtc {
33
34 using ::testing::_;
35 using ::testing::AllOf;
36 using ::testing::ElementsAreArray;
37 using ::testing::Field;
38 using ::testing::Invoke;
39 using ::testing::NiceMock;
40 using ::testing::Return;
41 using EncoderInfo = webrtc::VideoEncoder::EncoderInfo;
42 using FramerateFractions =
43 absl::InlinedVector<uint8_t, webrtc::kMaxTemporalStreams>;
44
45 namespace {
46 constexpr uint32_t kLegacyScreenshareTl0BitrateKbps = 200;
47 constexpr uint32_t kLegacyScreenshareTl1BitrateKbps = 1000;
48 constexpr uint32_t kInitialTimestampRtp = 123;
49 constexpr int64_t kTestNtpTimeMs = 456;
50 constexpr int64_t kInitialTimestampMs = 789;
51 constexpr int kNumCores = 1;
52 constexpr size_t kMaxPayloadSize = 1440;
53 constexpr int kWidth = 172;
54 constexpr int kHeight = 144;
55 constexpr float kFramerateFps = 30;
56
57 const VideoEncoder::Capabilities kCapabilities(false);
58 const VideoEncoder::Settings kSettings(kCapabilities,
59 kNumCores,
60 kMaxPayloadSize);
61 } // namespace
62
63 class TestVp8Impl : public VideoCodecUnitTest {
64 protected:
CreateEncoder()65 std::unique_ptr<VideoEncoder> CreateEncoder() override {
66 return VP8Encoder::Create();
67 }
68
CreateDecoder()69 std::unique_ptr<VideoDecoder> CreateDecoder() override {
70 return VP8Decoder::Create();
71 }
72
ModifyCodecSettings(VideoCodec * codec_settings)73 void ModifyCodecSettings(VideoCodec* codec_settings) override {
74 webrtc::test::CodecSettings(kVideoCodecVP8, codec_settings);
75 codec_settings->width = kWidth;
76 codec_settings->height = kHeight;
77 codec_settings->VP8()->denoisingOn = true;
78 codec_settings->VP8()->frameDroppingOn = false;
79 codec_settings->VP8()->automaticResizeOn = false;
80 codec_settings->VP8()->complexity = VideoCodecComplexity::kComplexityNormal;
81 }
82
EncodeAndWaitForFrame(const VideoFrame & input_frame,EncodedImage * encoded_frame,CodecSpecificInfo * codec_specific_info,bool keyframe=false)83 void EncodeAndWaitForFrame(const VideoFrame& input_frame,
84 EncodedImage* encoded_frame,
85 CodecSpecificInfo* codec_specific_info,
86 bool keyframe = false) {
87 std::vector<VideoFrameType> frame_types;
88 if (keyframe) {
89 frame_types.emplace_back(VideoFrameType::kVideoFrameKey);
90 } else {
91 frame_types.emplace_back(VideoFrameType::kVideoFrameDelta);
92 }
93 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
94 encoder_->Encode(input_frame, &frame_types));
95 ASSERT_TRUE(WaitForEncodedFrame(encoded_frame, codec_specific_info));
96 VerifyQpParser(*encoded_frame);
97 EXPECT_EQ(kVideoCodecVP8, codec_specific_info->codecType);
98 EXPECT_EQ(0, encoded_frame->SpatialIndex());
99 }
100
EncodeAndExpectFrameWith(const VideoFrame & input_frame,uint8_t temporal_idx,bool keyframe=false)101 void EncodeAndExpectFrameWith(const VideoFrame& input_frame,
102 uint8_t temporal_idx,
103 bool keyframe = false) {
104 EncodedImage encoded_frame;
105 CodecSpecificInfo codec_specific_info;
106 EncodeAndWaitForFrame(input_frame, &encoded_frame, &codec_specific_info,
107 keyframe);
108 EXPECT_EQ(temporal_idx, codec_specific_info.codecSpecific.VP8.temporalIdx);
109 }
110
VerifyQpParser(const EncodedImage & encoded_frame) const111 void VerifyQpParser(const EncodedImage& encoded_frame) const {
112 int qp;
113 EXPECT_GT(encoded_frame.size(), 0u);
114 ASSERT_TRUE(vp8::GetQp(encoded_frame.data(), encoded_frame.size(), &qp));
115 EXPECT_EQ(encoded_frame.qp_, qp) << "Encoder QP != parsed bitstream QP.";
116 }
117 };
118
TEST_F(TestVp8Impl,ErrorResilienceDisabledForNoTemporalLayers)119 TEST_F(TestVp8Impl, ErrorResilienceDisabledForNoTemporalLayers) {
120 codec_settings_.simulcastStream[0].numberOfTemporalLayers = 1;
121
122 auto* const vpx = new NiceMock<MockLibvpxVp8Interface>();
123 LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
124 VP8Encoder::Settings());
125 EXPECT_CALL(*vpx,
126 codec_enc_init(
127 _, _, Field(&vpx_codec_enc_cfg_t::g_error_resilient, 0), _));
128 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
129 encoder.InitEncode(&codec_settings_, kSettings));
130 }
131
TEST_F(TestVp8Impl,DefaultErrorResilienceEnabledForTemporalLayers)132 TEST_F(TestVp8Impl, DefaultErrorResilienceEnabledForTemporalLayers) {
133 codec_settings_.simulcastStream[0].numberOfTemporalLayers = 2;
134 codec_settings_.VP8()->numberOfTemporalLayers = 2;
135
136 auto* const vpx = new NiceMock<MockLibvpxVp8Interface>();
137 LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
138 VP8Encoder::Settings());
139 EXPECT_CALL(*vpx,
140 codec_enc_init(_, _,
141 Field(&vpx_codec_enc_cfg_t::g_error_resilient,
142 VPX_ERROR_RESILIENT_DEFAULT),
143 _));
144 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
145 encoder.InitEncode(&codec_settings_, kSettings));
146 }
147
TEST_F(TestVp8Impl,PartitionErrorResilienceEnabledForTemporalLayersWithFieldTrial)148 TEST_F(TestVp8Impl,
149 PartitionErrorResilienceEnabledForTemporalLayersWithFieldTrial) {
150 test::ScopedFieldTrials field_trials(
151 "WebRTC-VP8-ForcePartitionResilience/Enabled/");
152 codec_settings_.simulcastStream[0].numberOfTemporalLayers = 2;
153 codec_settings_.VP8()->numberOfTemporalLayers = 2;
154
155 auto* const vpx = new NiceMock<MockLibvpxVp8Interface>();
156 LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
157 VP8Encoder::Settings());
158 EXPECT_CALL(*vpx,
159 codec_enc_init(_, _,
160 Field(&vpx_codec_enc_cfg_t::g_error_resilient,
161 VPX_ERROR_RESILIENT_PARTITIONS),
162 _));
163 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
164 encoder.InitEncode(&codec_settings_, kSettings));
165 }
166
TEST_F(TestVp8Impl,SetRates)167 TEST_F(TestVp8Impl, SetRates) {
168 auto* const vpx = new NiceMock<MockLibvpxVp8Interface>();
169 LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
170 VP8Encoder::Settings());
171 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
172 encoder.InitEncode(&codec_settings_,
173 VideoEncoder::Settings(kCapabilities, 1, 1000)));
174
175 const uint32_t kBitrateBps = 300000;
176 VideoBitrateAllocation bitrate_allocation;
177 bitrate_allocation.SetBitrate(0, 0, kBitrateBps);
178 EXPECT_CALL(
179 *vpx,
180 codec_enc_config_set(
181 _, AllOf(Field(&vpx_codec_enc_cfg_t::rc_target_bitrate,
182 kBitrateBps / 1000),
183 Field(&vpx_codec_enc_cfg_t::rc_undershoot_pct, 100u),
184 Field(&vpx_codec_enc_cfg_t::rc_overshoot_pct, 15u),
185 Field(&vpx_codec_enc_cfg_t::rc_buf_sz, 1000u),
186 Field(&vpx_codec_enc_cfg_t::rc_buf_optimal_sz, 600u),
187 Field(&vpx_codec_enc_cfg_t::rc_dropframe_thresh, 30u))))
188 .WillOnce(Return(VPX_CODEC_OK));
189 encoder.SetRates(VideoEncoder::RateControlParameters(
190 bitrate_allocation, static_cast<double>(codec_settings_.maxFramerate)));
191 }
192
TEST_F(TestVp8Impl,DynamicSetRates)193 TEST_F(TestVp8Impl, DynamicSetRates) {
194 test::ScopedFieldTrials field_trials(
195 "WebRTC-VideoRateControl/vp8_dynamic_rate:true/");
196 auto* const vpx = new NiceMock<MockLibvpxVp8Interface>();
197 LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
198 VP8Encoder::Settings());
199 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
200 encoder.InitEncode(&codec_settings_,
201 VideoEncoder::Settings(kCapabilities, 1, 1000)));
202
203 const uint32_t kBitrateBps = 300000;
204 VideoEncoder::RateControlParameters rate_settings;
205 rate_settings.bitrate.SetBitrate(0, 0, kBitrateBps);
206 rate_settings.framerate_fps =
207 static_cast<double>(codec_settings_.maxFramerate);
208
209 // Set rates with no headroom.
210 rate_settings.bandwidth_allocation = DataRate::BitsPerSec(kBitrateBps);
211 EXPECT_CALL(
212 *vpx,
213 codec_enc_config_set(
214 _, AllOf(Field(&vpx_codec_enc_cfg_t::rc_target_bitrate,
215 kBitrateBps / 1000),
216 Field(&vpx_codec_enc_cfg_t::rc_undershoot_pct, 1000u),
217 Field(&vpx_codec_enc_cfg_t::rc_overshoot_pct, 0u),
218 Field(&vpx_codec_enc_cfg_t::rc_buf_sz, 100u),
219 Field(&vpx_codec_enc_cfg_t::rc_buf_optimal_sz, 30u),
220 Field(&vpx_codec_enc_cfg_t::rc_dropframe_thresh, 40u))))
221 .WillOnce(Return(VPX_CODEC_OK));
222 encoder.SetRates(rate_settings);
223
224 // Set rates with max headroom.
225 rate_settings.bandwidth_allocation = DataRate::BitsPerSec(kBitrateBps * 2);
226 EXPECT_CALL(
227 *vpx, codec_enc_config_set(
228 _, AllOf(Field(&vpx_codec_enc_cfg_t::rc_target_bitrate,
229 kBitrateBps / 1000),
230 Field(&vpx_codec_enc_cfg_t::rc_undershoot_pct, 100u),
231 Field(&vpx_codec_enc_cfg_t::rc_overshoot_pct, 15u),
232 Field(&vpx_codec_enc_cfg_t::rc_buf_sz, 1000u),
233 Field(&vpx_codec_enc_cfg_t::rc_buf_optimal_sz, 600u),
234 Field(&vpx_codec_enc_cfg_t::rc_dropframe_thresh, 5u))))
235 .WillOnce(Return(VPX_CODEC_OK));
236 encoder.SetRates(rate_settings);
237
238 // Set rates with headroom half way.
239 rate_settings.bandwidth_allocation =
240 DataRate::BitsPerSec((3 * kBitrateBps) / 2);
241 EXPECT_CALL(
242 *vpx,
243 codec_enc_config_set(
244 _, AllOf(Field(&vpx_codec_enc_cfg_t::rc_target_bitrate,
245 kBitrateBps / 1000),
246 Field(&vpx_codec_enc_cfg_t::rc_undershoot_pct, 550u),
247 Field(&vpx_codec_enc_cfg_t::rc_overshoot_pct, 8u),
248 Field(&vpx_codec_enc_cfg_t::rc_buf_sz, 550u),
249 Field(&vpx_codec_enc_cfg_t::rc_buf_optimal_sz, 315u),
250 Field(&vpx_codec_enc_cfg_t::rc_dropframe_thresh, 23u))))
251 .WillOnce(Return(VPX_CODEC_OK));
252 encoder.SetRates(rate_settings);
253 }
254
TEST_F(TestVp8Impl,EncodeFrameAndRelease)255 TEST_F(TestVp8Impl, EncodeFrameAndRelease) {
256 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
257 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
258 encoder_->InitEncode(&codec_settings_, kSettings));
259
260 EncodedImage encoded_frame;
261 CodecSpecificInfo codec_specific_info;
262 EncodeAndWaitForFrame(NextInputFrame(), &encoded_frame, &codec_specific_info);
263
264 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
265 EXPECT_EQ(WEBRTC_VIDEO_CODEC_UNINITIALIZED,
266 encoder_->Encode(NextInputFrame(), nullptr));
267 }
268
TEST_F(TestVp8Impl,InitDecode)269 TEST_F(TestVp8Impl, InitDecode) {
270 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Release());
271 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
272 decoder_->InitDecode(&codec_settings_, kNumCores));
273 }
274
TEST_F(TestVp8Impl,OnEncodedImageReportsInfo)275 TEST_F(TestVp8Impl, OnEncodedImageReportsInfo) {
276 VideoFrame input_frame = NextInputFrame();
277 input_frame.set_timestamp(kInitialTimestampRtp);
278 input_frame.set_timestamp_us(kInitialTimestampMs *
279 rtc::kNumMicrosecsPerMillisec);
280 EncodedImage encoded_frame;
281 CodecSpecificInfo codec_specific_info;
282 EncodeAndWaitForFrame(input_frame, &encoded_frame, &codec_specific_info);
283
284 EXPECT_EQ(kInitialTimestampRtp, encoded_frame.Timestamp());
285 EXPECT_EQ(kWidth, static_cast<int>(encoded_frame._encodedWidth));
286 EXPECT_EQ(kHeight, static_cast<int>(encoded_frame._encodedHeight));
287 }
288
TEST_F(TestVp8Impl,DecodedQpEqualsEncodedQp)289 TEST_F(TestVp8Impl, DecodedQpEqualsEncodedQp) {
290 VideoFrame input_frame = NextInputFrame();
291 EncodedImage encoded_frame;
292 CodecSpecificInfo codec_specific_info;
293 EncodeAndWaitForFrame(input_frame, &encoded_frame, &codec_specific_info);
294
295 // First frame should be a key frame.
296 encoded_frame._frameType = VideoFrameType::kVideoFrameKey;
297 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encoded_frame, false, -1));
298 std::unique_ptr<VideoFrame> decoded_frame;
299 absl::optional<uint8_t> decoded_qp;
300 ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
301 ASSERT_TRUE(decoded_frame);
302 ASSERT_TRUE(decoded_qp);
303 EXPECT_GT(I420PSNR(&input_frame, decoded_frame.get()), 36);
304 EXPECT_EQ(encoded_frame.qp_, *decoded_qp);
305 }
306
TEST_F(TestVp8Impl,ChecksSimulcastSettings)307 TEST_F(TestVp8Impl, ChecksSimulcastSettings) {
308 codec_settings_.numberOfSimulcastStreams = 2;
309 // Resolutions are not in ascending order, temporal layers do not match.
310 codec_settings_.simulcastStream[0] = {kWidth, kHeight, kFramerateFps, 2,
311 4000, 3000, 2000, 80};
312 codec_settings_.simulcastStream[1] = {kWidth / 2, kHeight / 2, 30, 3,
313 4000, 3000, 2000, 80};
314 EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED,
315 encoder_->InitEncode(&codec_settings_, kSettings));
316 codec_settings_.numberOfSimulcastStreams = 3;
317 // Resolutions are not in ascending order.
318 codec_settings_.simulcastStream[0] = {
319 kWidth / 2, kHeight / 2, kFramerateFps, 1, 4000, 3000, 2000, 80};
320 codec_settings_.simulcastStream[1] = {
321 kWidth / 2 - 1, kHeight / 2 - 1, kFramerateFps, 1, 4000, 3000, 2000, 80};
322 codec_settings_.simulcastStream[2] = {kWidth, kHeight, 30, 1,
323 4000, 3000, 2000, 80};
324 EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED,
325 encoder_->InitEncode(&codec_settings_, kSettings));
326 // Resolutions are not in ascending order.
327 codec_settings_.simulcastStream[0] = {kWidth, kHeight, kFramerateFps, 1,
328 4000, 3000, 2000, 80};
329 codec_settings_.simulcastStream[1] = {kWidth, kHeight, kFramerateFps, 1,
330 4000, 3000, 2000, 80};
331 codec_settings_.simulcastStream[2] = {
332 kWidth - 1, kHeight - 1, kFramerateFps, 1, 4000, 3000, 2000, 80};
333 EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED,
334 encoder_->InitEncode(&codec_settings_, kSettings));
335 // Temporal layers do not match.
336 codec_settings_.simulcastStream[0] = {
337 kWidth / 4, kHeight / 4, kFramerateFps, 1, 4000, 3000, 2000, 80};
338 codec_settings_.simulcastStream[1] = {
339 kWidth / 2, kHeight / 2, kFramerateFps, 2, 4000, 3000, 2000, 80};
340 codec_settings_.simulcastStream[2] = {kWidth, kHeight, kFramerateFps, 3,
341 4000, 3000, 2000, 80};
342 EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED,
343 encoder_->InitEncode(&codec_settings_, kSettings));
344 // Resolutions do not match codec config.
345 codec_settings_.simulcastStream[0] = {
346 kWidth / 4 + 1, kHeight / 4 + 1, kFramerateFps, 1, 4000, 3000, 2000, 80};
347 codec_settings_.simulcastStream[1] = {
348 kWidth / 2 + 2, kHeight / 2 + 2, kFramerateFps, 1, 4000, 3000, 2000, 80};
349 codec_settings_.simulcastStream[2] = {
350 kWidth + 4, kHeight + 4, kFramerateFps, 1, 4000, 3000, 2000, 80};
351 EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED,
352 encoder_->InitEncode(&codec_settings_, kSettings));
353 // Everything fine: scaling by 2, top resolution matches video, temporal
354 // settings are the same for all layers.
355 codec_settings_.simulcastStream[0] = {
356 kWidth / 4, kHeight / 4, kFramerateFps, 1, 4000, 3000, 2000, 80};
357 codec_settings_.simulcastStream[1] = {
358 kWidth / 2, kHeight / 2, kFramerateFps, 1, 4000, 3000, 2000, 80};
359 codec_settings_.simulcastStream[2] = {kWidth, kHeight, kFramerateFps, 1,
360 4000, 3000, 2000, 80};
361 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
362 encoder_->InitEncode(&codec_settings_, kSettings));
363 // Everything fine: custom scaling, top resolution matches video, temporal
364 // settings are the same for all layers.
365 codec_settings_.simulcastStream[0] = {
366 kWidth / 4, kHeight / 4, kFramerateFps, 1, 4000, 3000, 2000, 80};
367 codec_settings_.simulcastStream[1] = {kWidth, kHeight, kFramerateFps, 1,
368 4000, 3000, 2000, 80};
369 codec_settings_.simulcastStream[2] = {kWidth, kHeight, kFramerateFps, 1,
370 4000, 3000, 2000, 80};
371 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
372 encoder_->InitEncode(&codec_settings_, kSettings));
373 }
374
375 #if defined(WEBRTC_ANDROID)
376 #define MAYBE_AlignedStrideEncodeDecode DISABLED_AlignedStrideEncodeDecode
377 #else
378 #define MAYBE_AlignedStrideEncodeDecode AlignedStrideEncodeDecode
379 #endif
TEST_F(TestVp8Impl,MAYBE_AlignedStrideEncodeDecode)380 TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) {
381 VideoFrame input_frame = NextInputFrame();
382 input_frame.set_timestamp(kInitialTimestampRtp);
383 input_frame.set_timestamp_us(kInitialTimestampMs *
384 rtc::kNumMicrosecsPerMillisec);
385 EncodedImage encoded_frame;
386 CodecSpecificInfo codec_specific_info;
387 EncodeAndWaitForFrame(input_frame, &encoded_frame, &codec_specific_info);
388
389 // First frame should be a key frame.
390 encoded_frame._frameType = VideoFrameType::kVideoFrameKey;
391 encoded_frame.ntp_time_ms_ = kTestNtpTimeMs;
392 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encoded_frame, false, -1));
393
394 std::unique_ptr<VideoFrame> decoded_frame;
395 absl::optional<uint8_t> decoded_qp;
396 ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
397 ASSERT_TRUE(decoded_frame);
398 // Compute PSNR on all planes (faster than SSIM).
399 EXPECT_GT(I420PSNR(&input_frame, decoded_frame.get()), 36);
400 EXPECT_EQ(kInitialTimestampRtp, decoded_frame->timestamp());
401 }
402
403 #if defined(WEBRTC_ANDROID)
404 #define MAYBE_DecodeWithACompleteKeyFrame DISABLED_DecodeWithACompleteKeyFrame
405 #else
406 #define MAYBE_DecodeWithACompleteKeyFrame DecodeWithACompleteKeyFrame
407 #endif
TEST_F(TestVp8Impl,MAYBE_DecodeWithACompleteKeyFrame)408 TEST_F(TestVp8Impl, MAYBE_DecodeWithACompleteKeyFrame) {
409 VideoFrame input_frame = NextInputFrame();
410 EncodedImage encoded_frame;
411 CodecSpecificInfo codec_specific_info;
412 EncodeAndWaitForFrame(input_frame, &encoded_frame, &codec_specific_info);
413
414 // Setting complete to false -> should return an error.
415 encoded_frame._completeFrame = false;
416 EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERROR,
417 decoder_->Decode(encoded_frame, false, -1));
418 // Setting complete back to true. Forcing a delta frame.
419 encoded_frame._frameType = VideoFrameType::kVideoFrameDelta;
420 encoded_frame._completeFrame = true;
421 EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERROR,
422 decoder_->Decode(encoded_frame, false, -1));
423 // Now setting a key frame.
424 encoded_frame._frameType = VideoFrameType::kVideoFrameKey;
425 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encoded_frame, false, -1));
426 std::unique_ptr<VideoFrame> decoded_frame;
427 absl::optional<uint8_t> decoded_qp;
428 ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
429 ASSERT_TRUE(decoded_frame);
430 EXPECT_GT(I420PSNR(&input_frame, decoded_frame.get()), 36);
431 }
432
TEST_F(TestVp8Impl,EncoderWith2TemporalLayers)433 TEST_F(TestVp8Impl, EncoderWith2TemporalLayers) {
434 codec_settings_.VP8()->numberOfTemporalLayers = 2;
435 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
436 encoder_->InitEncode(&codec_settings_, kSettings));
437
438 // Temporal layer 0.
439 EncodedImage encoded_frame;
440 CodecSpecificInfo codec_specific_info;
441 EncodeAndWaitForFrame(NextInputFrame(), &encoded_frame, &codec_specific_info);
442
443 EXPECT_EQ(0, codec_specific_info.codecSpecific.VP8.temporalIdx);
444 // Temporal layer 1.
445 EncodeAndExpectFrameWith(NextInputFrame(), 1);
446 // Temporal layer 0.
447 EncodeAndExpectFrameWith(NextInputFrame(), 0);
448 // Temporal layer 1.
449 EncodeAndExpectFrameWith(NextInputFrame(), 1);
450 }
451
TEST_F(TestVp8Impl,ScalingDisabledIfAutomaticResizeOff)452 TEST_F(TestVp8Impl, ScalingDisabledIfAutomaticResizeOff) {
453 codec_settings_.VP8()->frameDroppingOn = true;
454 codec_settings_.VP8()->automaticResizeOn = false;
455 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
456 encoder_->InitEncode(&codec_settings_, kSettings));
457
458 VideoEncoder::ScalingSettings settings =
459 encoder_->GetEncoderInfo().scaling_settings;
460 EXPECT_FALSE(settings.thresholds.has_value());
461 }
462
TEST_F(TestVp8Impl,ScalingEnabledIfAutomaticResizeOn)463 TEST_F(TestVp8Impl, ScalingEnabledIfAutomaticResizeOn) {
464 codec_settings_.VP8()->frameDroppingOn = true;
465 codec_settings_.VP8()->automaticResizeOn = true;
466 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
467 encoder_->InitEncode(&codec_settings_, kSettings));
468
469 VideoEncoder::ScalingSettings settings =
470 encoder_->GetEncoderInfo().scaling_settings;
471 EXPECT_TRUE(settings.thresholds.has_value());
472 EXPECT_EQ(kDefaultMinPixelsPerFrame, settings.min_pixels_per_frame);
473 }
474
TEST_F(TestVp8Impl,DontDropKeyframes)475 TEST_F(TestVp8Impl, DontDropKeyframes) {
476 // Set very high resolution to trigger overuse more easily.
477 const int kScreenWidth = 1920;
478 const int kScreenHeight = 1080;
479
480 codec_settings_.width = kScreenWidth;
481 codec_settings_.height = kScreenHeight;
482
483 // Screensharing has the internal frame dropper off, and instead per frame
484 // asks ScreenshareLayers to decide if it should be dropped or not.
485 codec_settings_.VP8()->frameDroppingOn = false;
486 codec_settings_.mode = VideoCodecMode::kScreensharing;
487 // ScreenshareLayers triggers on 2 temporal layers and 1000kbps max bitrate.
488 codec_settings_.VP8()->numberOfTemporalLayers = 2;
489 codec_settings_.maxBitrate = 1000;
490
491 // Reset the frame generator with large number of squares, leading to lots of
492 // details and high probability of overshoot.
493 input_frame_generator_ = test::CreateSquareFrameGenerator(
494 codec_settings_.width, codec_settings_.height,
495 test::FrameGeneratorInterface::OutputType::kI420,
496 /* num_squares = */ absl::optional<int>(300));
497
498 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
499 encoder_->InitEncode(&codec_settings_, kSettings));
500
501 VideoBitrateAllocation bitrate_allocation;
502 // Bitrate only enough for TL0.
503 bitrate_allocation.SetBitrate(0, 0, 200000);
504 encoder_->SetRates(
505 VideoEncoder::RateControlParameters(bitrate_allocation, 5.0));
506
507 EncodedImage encoded_frame;
508 CodecSpecificInfo codec_specific_info;
509 EncodeAndWaitForFrame(NextInputFrame(), &encoded_frame, &codec_specific_info,
510 true);
511 EncodeAndExpectFrameWith(NextInputFrame(), 0, true);
512 EncodeAndExpectFrameWith(NextInputFrame(), 0, true);
513 EncodeAndExpectFrameWith(NextInputFrame(), 0, true);
514 }
515
TEST_F(TestVp8Impl,KeepsTimestampOnReencode)516 TEST_F(TestVp8Impl, KeepsTimestampOnReencode) {
517 auto* const vpx = new NiceMock<MockLibvpxVp8Interface>();
518 LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
519 VP8Encoder::Settings());
520
521 // Settings needed to trigger ScreenshareLayers usage, which is required for
522 // overshoot-drop-reencode logic.
523 codec_settings_.maxBitrate = 1000;
524 codec_settings_.mode = VideoCodecMode::kScreensharing;
525 codec_settings_.VP8()->numberOfTemporalLayers = 2;
526
527 EXPECT_CALL(*vpx, img_wrap(_, _, _, _, _, _))
528 .WillOnce(Invoke([](vpx_image_t* img, vpx_img_fmt_t fmt, unsigned int d_w,
529 unsigned int d_h, unsigned int stride_align,
530 unsigned char* img_data) {
531 img->fmt = fmt;
532 img->d_w = d_w;
533 img->d_h = d_h;
534 img->img_data = img_data;
535 return img;
536 }));
537 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
538 encoder.InitEncode(&codec_settings_,
539 VideoEncoder::Settings(kCapabilities, 1, 1000)));
540 MockEncodedImageCallback callback;
541 encoder.RegisterEncodeCompleteCallback(&callback);
542
543 // Simulate overshoot drop, re-encode: encode function will be called twice
544 // with the same parameters. codec_get_cx_data() will by default return no
545 // image data and be interpreted as drop.
546 EXPECT_CALL(*vpx, codec_encode(_, _, /* pts = */ 0, _, _, _))
547 .Times(2)
548 .WillRepeatedly(Return(vpx_codec_err_t::VPX_CODEC_OK));
549
550 auto delta_frame =
551 std::vector<VideoFrameType>{VideoFrameType::kVideoFrameDelta};
552 encoder.Encode(NextInputFrame(), &delta_frame);
553 }
554
TEST(LibvpxVp8EncoderTest,GetEncoderInfoReturnsStaticInformation)555 TEST(LibvpxVp8EncoderTest, GetEncoderInfoReturnsStaticInformation) {
556 auto* const vpx = new NiceMock<MockLibvpxVp8Interface>();
557 LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
558 VP8Encoder::Settings());
559
560 const auto info = encoder.GetEncoderInfo();
561
562 EXPECT_FALSE(info.supports_native_handle);
563 EXPECT_FALSE(info.is_hardware_accelerated);
564 EXPECT_FALSE(info.has_internal_source);
565 EXPECT_TRUE(info.supports_simulcast);
566 EXPECT_EQ(info.implementation_name, "libvpx");
567 }
568
TEST(LibvpxVp8EncoderTest,GetEncoderInfoReturnsEmptyResolutionBitrateLimitsByDefault)569 TEST(LibvpxVp8EncoderTest,
570 GetEncoderInfoReturnsEmptyResolutionBitrateLimitsByDefault) {
571 auto* const vpx = new NiceMock<MockLibvpxVp8Interface>();
572 LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
573 VP8Encoder::Settings());
574
575 const auto info = encoder.GetEncoderInfo();
576
577 EXPECT_TRUE(info.resolution_bitrate_limits.empty());
578 }
579
TEST(LibvpxVp8EncoderTest,GetEncoderInfoReturnsResolutionBitrateLimitsAsConfigured)580 TEST(LibvpxVp8EncoderTest,
581 GetEncoderInfoReturnsResolutionBitrateLimitsAsConfigured) {
582 std::vector<VideoEncoder::ResolutionBitrateLimits> resolution_bitrate_limits =
583 {VideoEncoder::ResolutionBitrateLimits(/*frame_size_pixels=*/640 * 360,
584 /*min_start_bitrate_bps=*/300,
585 /*min_bitrate_bps=*/100,
586 /*max_bitrate_bps=*/1000),
587 VideoEncoder::ResolutionBitrateLimits(320 * 180, 100, 30, 500)};
588 VP8Encoder::Settings settings;
589 settings.resolution_bitrate_limits = resolution_bitrate_limits;
590
591 auto* const vpx = new NiceMock<MockLibvpxVp8Interface>();
592 LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
593 std::move(settings));
594
595 const auto info = encoder.GetEncoderInfo();
596
597 EXPECT_EQ(info.resolution_bitrate_limits, resolution_bitrate_limits);
598 }
599
TEST_F(TestVp8Impl,GetEncoderInfoFpsAllocationNoLayers)600 TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationNoLayers) {
601 FramerateFractions expected_fps_allocation[kMaxSpatialLayers] = {
602 FramerateFractions(1, EncoderInfo::kMaxFramerateFraction)};
603
604 EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation,
605 ::testing::ElementsAreArray(expected_fps_allocation));
606 }
607
TEST_F(TestVp8Impl,GetEncoderInfoFpsAllocationTwoTemporalLayers)608 TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationTwoTemporalLayers) {
609 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
610 codec_settings_.numberOfSimulcastStreams = 1;
611 codec_settings_.simulcastStream[0].active = true;
612 codec_settings_.simulcastStream[0].targetBitrate = 100;
613 codec_settings_.simulcastStream[0].maxBitrate = 100;
614 codec_settings_.simulcastStream[0].numberOfTemporalLayers = 2;
615 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
616 encoder_->InitEncode(&codec_settings_, kSettings));
617
618 FramerateFractions expected_fps_allocation[kMaxSpatialLayers];
619 expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction / 2);
620 expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction);
621
622 EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation,
623 ::testing::ElementsAreArray(expected_fps_allocation));
624 }
625
TEST_F(TestVp8Impl,GetEncoderInfoFpsAllocationThreeTemporalLayers)626 TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationThreeTemporalLayers) {
627 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
628 codec_settings_.numberOfSimulcastStreams = 1;
629 codec_settings_.simulcastStream[0].active = true;
630 codec_settings_.simulcastStream[0].targetBitrate = 100;
631 codec_settings_.simulcastStream[0].maxBitrate = 100;
632 codec_settings_.simulcastStream[0].numberOfTemporalLayers = 3;
633 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
634 encoder_->InitEncode(&codec_settings_, kSettings));
635
636 FramerateFractions expected_fps_allocation[kMaxSpatialLayers];
637 expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction / 4);
638 expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction / 2);
639 expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction);
640
641 EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation,
642 ::testing::ElementsAreArray(expected_fps_allocation));
643 }
644
TEST_F(TestVp8Impl,GetEncoderInfoFpsAllocationScreenshareLayers)645 TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationScreenshareLayers) {
646 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
647 codec_settings_.numberOfSimulcastStreams = 1;
648 codec_settings_.mode = VideoCodecMode::kScreensharing;
649 codec_settings_.simulcastStream[0].active = true;
650 codec_settings_.simulcastStream[0].minBitrate = 30;
651 codec_settings_.simulcastStream[0].targetBitrate =
652 kLegacyScreenshareTl0BitrateKbps;
653 codec_settings_.simulcastStream[0].maxBitrate =
654 kLegacyScreenshareTl1BitrateKbps;
655 codec_settings_.simulcastStream[0].numberOfTemporalLayers = 2;
656 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
657 encoder_->InitEncode(&codec_settings_, kSettings));
658
659 // Expect empty vector, since this mode doesn't have a fixed framerate.
660 FramerateFractions expected_fps_allocation[kMaxSpatialLayers];
661 EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation,
662 ::testing::ElementsAreArray(expected_fps_allocation));
663 }
664
TEST_F(TestVp8Impl,GetEncoderInfoFpsAllocationSimulcastVideo)665 TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationSimulcastVideo) {
666 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
667
668 // Set up three simulcast streams with three temporal layers each.
669 codec_settings_.numberOfSimulcastStreams = 3;
670 for (int i = 0; i < codec_settings_.numberOfSimulcastStreams; ++i) {
671 codec_settings_.simulcastStream[i].active = true;
672 codec_settings_.simulcastStream[i].minBitrate = 30;
673 codec_settings_.simulcastStream[i].targetBitrate = 30;
674 codec_settings_.simulcastStream[i].maxBitrate = 30;
675 codec_settings_.simulcastStream[i].numberOfTemporalLayers = 3;
676 codec_settings_.simulcastStream[i].width =
677 codec_settings_.width >>
678 (codec_settings_.numberOfSimulcastStreams - i - 1);
679 codec_settings_.simulcastStream[i].height =
680 codec_settings_.height >>
681 (codec_settings_.numberOfSimulcastStreams - i - 1);
682 }
683
684 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
685 encoder_->InitEncode(&codec_settings_, kSettings));
686
687 FramerateFractions expected_fps_allocation[kMaxSpatialLayers];
688 expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction / 4);
689 expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction / 2);
690 expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction);
691 expected_fps_allocation[1] = expected_fps_allocation[0];
692 expected_fps_allocation[2] = expected_fps_allocation[0];
693 EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation,
694 ::testing::ElementsAreArray(expected_fps_allocation));
695
696 // Release encoder and re-init without temporal layers.
697 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
698
699 // Sanity check fps allocation when not inited.
700 FramerateFractions default_fps_fraction[kMaxSpatialLayers];
701 default_fps_fraction[0].push_back(EncoderInfo::kMaxFramerateFraction);
702 EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation,
703 ::testing::ElementsAreArray(default_fps_fraction));
704
705 for (int i = 0; i < codec_settings_.numberOfSimulcastStreams; ++i) {
706 codec_settings_.simulcastStream[i].numberOfTemporalLayers = 1;
707 }
708 EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
709 encoder_->InitEncode(&codec_settings_, kSettings));
710
711 for (size_t i = 0; i < 3; ++i) {
712 expected_fps_allocation[i].clear();
713 expected_fps_allocation[i].push_back(EncoderInfo::kMaxFramerateFraction);
714 }
715 EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation,
716 ::testing::ElementsAreArray(expected_fps_allocation));
717 }
718
719 } // namespace webrtc
720