1 /*
2  *  Copyright (c) 2012 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 #include <stdio.h>
12 
13 #include <memory>
14 
15 #include "api/test/create_frame_generator.h"
16 #include "api/test/frame_generator_interface.h"
17 #include "api/test/mock_video_decoder.h"
18 #include "api/test/mock_video_encoder.h"
19 #include "api/video_codecs/video_encoder.h"
20 #include "api/video_codecs/vp8_temporal_layers.h"
21 #include "common_video/libyuv/include/webrtc_libyuv.h"
22 #include "common_video/test/utilities.h"
23 #include "modules/video_coding/codecs/interface/mock_libvpx_interface.h"
24 #include "modules/video_coding/codecs/test/video_codec_unittest.h"
25 #include "modules/video_coding/codecs/vp8/include/vp8.h"
26 #include "modules/video_coding/codecs/vp8/libvpx_vp8_encoder.h"
27 #include "modules/video_coding/utility/vp8_header_parser.h"
28 #include "rtc_base/time_utils.h"
29 #include "test/field_trial.h"
30 #include "test/mappable_native_buffer.h"
31 #include "test/video_codec_settings.h"
32 
33 namespace webrtc {
34 
35 using ::testing::_;
36 using ::testing::AllOf;
37 using ::testing::ElementsAre;
38 using ::testing::ElementsAreArray;
39 using ::testing::Field;
40 using ::testing::Invoke;
41 using ::testing::NiceMock;
42 using ::testing::Return;
43 using EncoderInfo = webrtc::VideoEncoder::EncoderInfo;
44 using FramerateFractions =
45     absl::InlinedVector<uint8_t, webrtc::kMaxTemporalStreams>;
46 
47 namespace {
48 constexpr uint32_t kLegacyScreenshareTl0BitrateKbps = 200;
49 constexpr uint32_t kLegacyScreenshareTl1BitrateKbps = 1000;
50 constexpr uint32_t kInitialTimestampRtp = 123;
51 constexpr int64_t kTestNtpTimeMs = 456;
52 constexpr int64_t kInitialTimestampMs = 789;
53 constexpr int kNumCores = 1;
54 constexpr size_t kMaxPayloadSize = 1440;
55 constexpr int kWidth = 172;
56 constexpr int kHeight = 144;
57 constexpr float kFramerateFps = 30;
58 
59 const VideoEncoder::Capabilities kCapabilities(false);
60 const VideoEncoder::Settings kSettings(kCapabilities,
61                                        kNumCores,
62                                        kMaxPayloadSize);
63 }  // namespace
64 
65 class TestVp8Impl : public VideoCodecUnitTest {
66  protected:
CreateEncoder()67   std::unique_ptr<VideoEncoder> CreateEncoder() override {
68     return VP8Encoder::Create();
69   }
70 
CreateDecoder()71   std::unique_ptr<VideoDecoder> CreateDecoder() override {
72     return VP8Decoder::Create();
73   }
74 
ModifyCodecSettings(VideoCodec * codec_settings)75   void ModifyCodecSettings(VideoCodec* codec_settings) override {
76     webrtc::test::CodecSettings(kVideoCodecVP8, codec_settings);
77     codec_settings->width = kWidth;
78     codec_settings->height = kHeight;
79     codec_settings->VP8()->denoisingOn = true;
80     codec_settings->VP8()->frameDroppingOn = false;
81     codec_settings->VP8()->automaticResizeOn = false;
82     codec_settings->VP8()->complexity = VideoCodecComplexity::kComplexityNormal;
83   }
84 
EncodeAndWaitForFrame(const VideoFrame & input_frame,EncodedImage * encoded_frame,CodecSpecificInfo * codec_specific_info,bool keyframe=false)85   void EncodeAndWaitForFrame(const VideoFrame& input_frame,
86                              EncodedImage* encoded_frame,
87                              CodecSpecificInfo* codec_specific_info,
88                              bool keyframe = false) {
89     std::vector<VideoFrameType> frame_types;
90     if (keyframe) {
91       frame_types.emplace_back(VideoFrameType::kVideoFrameKey);
92     } else {
93       frame_types.emplace_back(VideoFrameType::kVideoFrameDelta);
94     }
95     EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
96               encoder_->Encode(input_frame, &frame_types));
97     ASSERT_TRUE(WaitForEncodedFrame(encoded_frame, codec_specific_info));
98     VerifyQpParser(*encoded_frame);
99     EXPECT_EQ(kVideoCodecVP8, codec_specific_info->codecType);
100     EXPECT_EQ(0, encoded_frame->SpatialIndex());
101   }
102 
EncodeAndExpectFrameWith(const VideoFrame & input_frame,uint8_t temporal_idx,bool keyframe=false)103   void EncodeAndExpectFrameWith(const VideoFrame& input_frame,
104                                 uint8_t temporal_idx,
105                                 bool keyframe = false) {
106     EncodedImage encoded_frame;
107     CodecSpecificInfo codec_specific_info;
108     EncodeAndWaitForFrame(input_frame, &encoded_frame, &codec_specific_info,
109                           keyframe);
110     EXPECT_EQ(temporal_idx, codec_specific_info.codecSpecific.VP8.temporalIdx);
111   }
112 
VerifyQpParser(const EncodedImage & encoded_frame) const113   void VerifyQpParser(const EncodedImage& encoded_frame) const {
114     int qp;
115     EXPECT_GT(encoded_frame.size(), 0u);
116     ASSERT_TRUE(vp8::GetQp(encoded_frame.data(), encoded_frame.size(), &qp));
117     EXPECT_EQ(encoded_frame.qp_, qp) << "Encoder QP != parsed bitstream QP.";
118   }
119 };
120 
TEST_F(TestVp8Impl,ErrorResilienceDisabledForNoTemporalLayers)121 TEST_F(TestVp8Impl, ErrorResilienceDisabledForNoTemporalLayers) {
122   codec_settings_.simulcastStream[0].numberOfTemporalLayers = 1;
123 
124   auto* const vpx = new NiceMock<MockLibvpxInterface>();
125   LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
126                            VP8Encoder::Settings());
127   EXPECT_CALL(*vpx,
128               codec_enc_init(
129                   _, _, Field(&vpx_codec_enc_cfg_t::g_error_resilient, 0), _));
130   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
131             encoder.InitEncode(&codec_settings_, kSettings));
132 }
133 
TEST_F(TestVp8Impl,DefaultErrorResilienceEnabledForTemporalLayers)134 TEST_F(TestVp8Impl, DefaultErrorResilienceEnabledForTemporalLayers) {
135   codec_settings_.simulcastStream[0].numberOfTemporalLayers = 2;
136   codec_settings_.VP8()->numberOfTemporalLayers = 2;
137 
138   auto* const vpx = new NiceMock<MockLibvpxInterface>();
139   LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
140                            VP8Encoder::Settings());
141   EXPECT_CALL(*vpx,
142               codec_enc_init(_, _,
143                              Field(&vpx_codec_enc_cfg_t::g_error_resilient,
144                                    VPX_ERROR_RESILIENT_DEFAULT),
145                              _));
146   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
147             encoder.InitEncode(&codec_settings_, kSettings));
148 }
149 
TEST_F(TestVp8Impl,PartitionErrorResilienceEnabledForTemporalLayersWithFieldTrial)150 TEST_F(TestVp8Impl,
151        PartitionErrorResilienceEnabledForTemporalLayersWithFieldTrial) {
152   test::ScopedFieldTrials field_trials(
153       "WebRTC-VP8-ForcePartitionResilience/Enabled/");
154   codec_settings_.simulcastStream[0].numberOfTemporalLayers = 2;
155   codec_settings_.VP8()->numberOfTemporalLayers = 2;
156 
157   auto* const vpx = new NiceMock<MockLibvpxInterface>();
158   LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
159                            VP8Encoder::Settings());
160   EXPECT_CALL(*vpx,
161               codec_enc_init(_, _,
162                              Field(&vpx_codec_enc_cfg_t::g_error_resilient,
163                                    VPX_ERROR_RESILIENT_PARTITIONS),
164                              _));
165   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
166             encoder.InitEncode(&codec_settings_, kSettings));
167 }
168 
TEST_F(TestVp8Impl,SetRates)169 TEST_F(TestVp8Impl, SetRates) {
170   auto* const vpx = new NiceMock<MockLibvpxInterface>();
171   LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
172                            VP8Encoder::Settings());
173   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
174             encoder.InitEncode(&codec_settings_,
175                                VideoEncoder::Settings(kCapabilities, 1, 1000)));
176 
177   const uint32_t kBitrateBps = 300000;
178   VideoBitrateAllocation bitrate_allocation;
179   bitrate_allocation.SetBitrate(0, 0, kBitrateBps);
180   EXPECT_CALL(
181       *vpx,
182       codec_enc_config_set(
183           _, AllOf(Field(&vpx_codec_enc_cfg_t::rc_target_bitrate,
184                          kBitrateBps / 1000),
185                    Field(&vpx_codec_enc_cfg_t::rc_undershoot_pct, 100u),
186                    Field(&vpx_codec_enc_cfg_t::rc_overshoot_pct, 15u),
187                    Field(&vpx_codec_enc_cfg_t::rc_buf_sz, 1000u),
188                    Field(&vpx_codec_enc_cfg_t::rc_buf_optimal_sz, 600u),
189                    Field(&vpx_codec_enc_cfg_t::rc_dropframe_thresh, 30u))))
190       .WillOnce(Return(VPX_CODEC_OK));
191   encoder.SetRates(VideoEncoder::RateControlParameters(
192       bitrate_allocation, static_cast<double>(codec_settings_.maxFramerate)));
193 }
194 
TEST_F(TestVp8Impl,EncodeFrameAndRelease)195 TEST_F(TestVp8Impl, EncodeFrameAndRelease) {
196   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
197   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
198             encoder_->InitEncode(&codec_settings_, kSettings));
199 
200   EncodedImage encoded_frame;
201   CodecSpecificInfo codec_specific_info;
202   EncodeAndWaitForFrame(NextInputFrame(), &encoded_frame, &codec_specific_info);
203 
204   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
205   EXPECT_EQ(WEBRTC_VIDEO_CODEC_UNINITIALIZED,
206             encoder_->Encode(NextInputFrame(), nullptr));
207 }
208 
TEST_F(TestVp8Impl,EncodeNv12FrameSimulcast)209 TEST_F(TestVp8Impl, EncodeNv12FrameSimulcast) {
210   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
211   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
212             encoder_->InitEncode(&codec_settings_, kSettings));
213 
214   EncodedImage encoded_frame;
215   CodecSpecificInfo codec_specific_info;
216   input_frame_generator_ = test::CreateSquareFrameGenerator(
217       kWidth, kHeight, test::FrameGeneratorInterface::OutputType::kNV12,
218       absl::nullopt);
219   EncodeAndWaitForFrame(NextInputFrame(), &encoded_frame, &codec_specific_info);
220 
221   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
222   EXPECT_EQ(WEBRTC_VIDEO_CODEC_UNINITIALIZED,
223             encoder_->Encode(NextInputFrame(), nullptr));
224 }
225 
TEST_F(TestVp8Impl,EncodeI420FrameAfterNv12Frame)226 TEST_F(TestVp8Impl, EncodeI420FrameAfterNv12Frame) {
227   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
228   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
229             encoder_->InitEncode(&codec_settings_, kSettings));
230 
231   EncodedImage encoded_frame;
232   CodecSpecificInfo codec_specific_info;
233   input_frame_generator_ = test::CreateSquareFrameGenerator(
234       kWidth, kHeight, test::FrameGeneratorInterface::OutputType::kNV12,
235       absl::nullopt);
236   EncodeAndWaitForFrame(NextInputFrame(), &encoded_frame, &codec_specific_info);
237   input_frame_generator_ = test::CreateSquareFrameGenerator(
238       kWidth, kHeight, test::FrameGeneratorInterface::OutputType::kI420,
239       absl::nullopt);
240   EncodeAndWaitForFrame(NextInputFrame(), &encoded_frame, &codec_specific_info);
241 
242   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
243   EXPECT_EQ(WEBRTC_VIDEO_CODEC_UNINITIALIZED,
244             encoder_->Encode(NextInputFrame(), nullptr));
245 }
246 
TEST_F(TestVp8Impl,InitDecode)247 TEST_F(TestVp8Impl, InitDecode) {
248   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Release());
249   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
250             decoder_->InitDecode(&codec_settings_, kNumCores));
251 }
252 
TEST_F(TestVp8Impl,OnEncodedImageReportsInfo)253 TEST_F(TestVp8Impl, OnEncodedImageReportsInfo) {
254   VideoFrame input_frame = NextInputFrame();
255   input_frame.set_timestamp(kInitialTimestampRtp);
256   input_frame.set_timestamp_us(kInitialTimestampMs *
257                                rtc::kNumMicrosecsPerMillisec);
258   EncodedImage encoded_frame;
259   CodecSpecificInfo codec_specific_info;
260   EncodeAndWaitForFrame(input_frame, &encoded_frame, &codec_specific_info);
261 
262   EXPECT_EQ(kInitialTimestampRtp, encoded_frame.Timestamp());
263   EXPECT_EQ(kWidth, static_cast<int>(encoded_frame._encodedWidth));
264   EXPECT_EQ(kHeight, static_cast<int>(encoded_frame._encodedHeight));
265 }
266 
TEST_F(TestVp8Impl,EncoderFillsResolutionInCodecAgnosticSectionOfCodecSpecificInfo)267 TEST_F(TestVp8Impl,
268        EncoderFillsResolutionInCodecAgnosticSectionOfCodecSpecificInfo) {
269   EncodedImage encoded_frame;
270   CodecSpecificInfo codec_specific_info;
271   EncodeAndWaitForFrame(NextInputFrame(), &encoded_frame, &codec_specific_info);
272 
273   ASSERT_TRUE(codec_specific_info.template_structure);
274   EXPECT_THAT(codec_specific_info.template_structure->resolutions,
275               ElementsAre(RenderResolution(kWidth, kHeight)));
276 }
277 
TEST_F(TestVp8Impl,DecodedQpEqualsEncodedQp)278 TEST_F(TestVp8Impl, DecodedQpEqualsEncodedQp) {
279   VideoFrame input_frame = NextInputFrame();
280   EncodedImage encoded_frame;
281   CodecSpecificInfo codec_specific_info;
282   EncodeAndWaitForFrame(input_frame, &encoded_frame, &codec_specific_info);
283 
284   // First frame should be a key frame.
285   encoded_frame._frameType = VideoFrameType::kVideoFrameKey;
286   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encoded_frame, false, -1));
287   std::unique_ptr<VideoFrame> decoded_frame;
288   absl::optional<uint8_t> decoded_qp;
289   ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
290   ASSERT_TRUE(decoded_frame);
291   ASSERT_TRUE(decoded_qp);
292   EXPECT_GT(I420PSNR(&input_frame, decoded_frame.get()), 36);
293   EXPECT_EQ(encoded_frame.qp_, *decoded_qp);
294 }
295 
TEST_F(TestVp8Impl,ChecksSimulcastSettings)296 TEST_F(TestVp8Impl, ChecksSimulcastSettings) {
297   codec_settings_.numberOfSimulcastStreams = 2;
298   // Resolutions are not in ascending order, temporal layers do not match.
299   codec_settings_.simulcastStream[0] = {kWidth, kHeight, kFramerateFps, 2,
300                                         4000,   3000,    2000,          80};
301   codec_settings_.simulcastStream[1] = {kWidth / 2, kHeight / 2, 30,   3,
302                                         4000,       3000,        2000, 80};
303   EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED,
304             encoder_->InitEncode(&codec_settings_, kSettings));
305   codec_settings_.numberOfSimulcastStreams = 3;
306   // Resolutions are not in ascending order.
307   codec_settings_.simulcastStream[0] = {
308       kWidth / 2, kHeight / 2, kFramerateFps, 1, 4000, 3000, 2000, 80};
309   codec_settings_.simulcastStream[1] = {
310       kWidth / 2 - 1, kHeight / 2 - 1, kFramerateFps, 1, 4000, 3000, 2000, 80};
311   codec_settings_.simulcastStream[2] = {kWidth, kHeight, 30,   1,
312                                         4000,   3000,    2000, 80};
313   EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED,
314             encoder_->InitEncode(&codec_settings_, kSettings));
315   // Resolutions are not in ascending order.
316   codec_settings_.simulcastStream[0] = {kWidth, kHeight, kFramerateFps, 1,
317                                         4000,   3000,    2000,          80};
318   codec_settings_.simulcastStream[1] = {kWidth, kHeight, kFramerateFps, 1,
319                                         4000,   3000,    2000,          80};
320   codec_settings_.simulcastStream[2] = {
321       kWidth - 1, kHeight - 1, kFramerateFps, 1, 4000, 3000, 2000, 80};
322   EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED,
323             encoder_->InitEncode(&codec_settings_, kSettings));
324   // Temporal layers do not match.
325   codec_settings_.simulcastStream[0] = {
326       kWidth / 4, kHeight / 4, kFramerateFps, 1, 4000, 3000, 2000, 80};
327   codec_settings_.simulcastStream[1] = {
328       kWidth / 2, kHeight / 2, kFramerateFps, 2, 4000, 3000, 2000, 80};
329   codec_settings_.simulcastStream[2] = {kWidth, kHeight, kFramerateFps, 3,
330                                         4000,   3000,    2000,          80};
331   EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED,
332             encoder_->InitEncode(&codec_settings_, kSettings));
333   // Resolutions do not match codec config.
334   codec_settings_.simulcastStream[0] = {
335       kWidth / 4 + 1, kHeight / 4 + 1, kFramerateFps, 1, 4000, 3000, 2000, 80};
336   codec_settings_.simulcastStream[1] = {
337       kWidth / 2 + 2, kHeight / 2 + 2, kFramerateFps, 1, 4000, 3000, 2000, 80};
338   codec_settings_.simulcastStream[2] = {
339       kWidth + 4, kHeight + 4, kFramerateFps, 1, 4000, 3000, 2000, 80};
340   EXPECT_EQ(WEBRTC_VIDEO_CODEC_ERR_SIMULCAST_PARAMETERS_NOT_SUPPORTED,
341             encoder_->InitEncode(&codec_settings_, kSettings));
342   // Everything fine: scaling by 2, top resolution matches video, temporal
343   // settings are the same for all layers.
344   codec_settings_.simulcastStream[0] = {
345       kWidth / 4, kHeight / 4, kFramerateFps, 1, 4000, 3000, 2000, 80};
346   codec_settings_.simulcastStream[1] = {
347       kWidth / 2, kHeight / 2, kFramerateFps, 1, 4000, 3000, 2000, 80};
348   codec_settings_.simulcastStream[2] = {kWidth, kHeight, kFramerateFps, 1,
349                                         4000,   3000,    2000,          80};
350   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
351             encoder_->InitEncode(&codec_settings_, kSettings));
352   // Everything fine: custom scaling, top resolution matches video, temporal
353   // settings are the same for all layers.
354   codec_settings_.simulcastStream[0] = {
355       kWidth / 4, kHeight / 4, kFramerateFps, 1, 4000, 3000, 2000, 80};
356   codec_settings_.simulcastStream[1] = {kWidth, kHeight, kFramerateFps, 1,
357                                         4000,   3000,    2000,          80};
358   codec_settings_.simulcastStream[2] = {kWidth, kHeight, kFramerateFps, 1,
359                                         4000,   3000,    2000,          80};
360   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
361             encoder_->InitEncode(&codec_settings_, kSettings));
362 }
363 
364 #if defined(WEBRTC_ANDROID)
365 #define MAYBE_AlignedStrideEncodeDecode DISABLED_AlignedStrideEncodeDecode
366 #else
367 #define MAYBE_AlignedStrideEncodeDecode AlignedStrideEncodeDecode
368 #endif
TEST_F(TestVp8Impl,MAYBE_AlignedStrideEncodeDecode)369 TEST_F(TestVp8Impl, MAYBE_AlignedStrideEncodeDecode) {
370   VideoFrame input_frame = NextInputFrame();
371   input_frame.set_timestamp(kInitialTimestampRtp);
372   input_frame.set_timestamp_us(kInitialTimestampMs *
373                                rtc::kNumMicrosecsPerMillisec);
374   EncodedImage encoded_frame;
375   CodecSpecificInfo codec_specific_info;
376   EncodeAndWaitForFrame(input_frame, &encoded_frame, &codec_specific_info);
377 
378   // First frame should be a key frame.
379   encoded_frame._frameType = VideoFrameType::kVideoFrameKey;
380   encoded_frame.ntp_time_ms_ = kTestNtpTimeMs;
381   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, decoder_->Decode(encoded_frame, false, -1));
382 
383   std::unique_ptr<VideoFrame> decoded_frame;
384   absl::optional<uint8_t> decoded_qp;
385   ASSERT_TRUE(WaitForDecodedFrame(&decoded_frame, &decoded_qp));
386   ASSERT_TRUE(decoded_frame);
387   // Compute PSNR on all planes (faster than SSIM).
388   EXPECT_GT(I420PSNR(&input_frame, decoded_frame.get()), 36);
389   EXPECT_EQ(kInitialTimestampRtp, decoded_frame->timestamp());
390 }
391 
TEST_F(TestVp8Impl,EncoderWith2TemporalLayers)392 TEST_F(TestVp8Impl, EncoderWith2TemporalLayers) {
393   codec_settings_.VP8()->numberOfTemporalLayers = 2;
394   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
395             encoder_->InitEncode(&codec_settings_, kSettings));
396 
397   // Temporal layer 0.
398   EncodedImage encoded_frame;
399   CodecSpecificInfo codec_specific_info;
400   EncodeAndWaitForFrame(NextInputFrame(), &encoded_frame, &codec_specific_info);
401 
402   EXPECT_EQ(0, codec_specific_info.codecSpecific.VP8.temporalIdx);
403   // Temporal layer 1.
404   EncodeAndExpectFrameWith(NextInputFrame(), 1);
405   // Temporal layer 0.
406   EncodeAndExpectFrameWith(NextInputFrame(), 0);
407   // Temporal layer 1.
408   EncodeAndExpectFrameWith(NextInputFrame(), 1);
409 }
410 
TEST_F(TestVp8Impl,ScalingDisabledIfAutomaticResizeOff)411 TEST_F(TestVp8Impl, ScalingDisabledIfAutomaticResizeOff) {
412   codec_settings_.VP8()->frameDroppingOn = true;
413   codec_settings_.VP8()->automaticResizeOn = false;
414   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
415             encoder_->InitEncode(&codec_settings_, kSettings));
416 
417   VideoEncoder::ScalingSettings settings =
418       encoder_->GetEncoderInfo().scaling_settings;
419   EXPECT_FALSE(settings.thresholds.has_value());
420 }
421 
TEST_F(TestVp8Impl,ScalingEnabledIfAutomaticResizeOn)422 TEST_F(TestVp8Impl, ScalingEnabledIfAutomaticResizeOn) {
423   codec_settings_.VP8()->frameDroppingOn = true;
424   codec_settings_.VP8()->automaticResizeOn = true;
425   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
426             encoder_->InitEncode(&codec_settings_, kSettings));
427 
428   VideoEncoder::ScalingSettings settings =
429       encoder_->GetEncoderInfo().scaling_settings;
430   EXPECT_TRUE(settings.thresholds.has_value());
431   EXPECT_EQ(kDefaultMinPixelsPerFrame, settings.min_pixels_per_frame);
432 }
433 
TEST_F(TestVp8Impl,DontDropKeyframes)434 TEST_F(TestVp8Impl, DontDropKeyframes) {
435   // Set very high resolution to trigger overuse more easily.
436   const int kScreenWidth = 1920;
437   const int kScreenHeight = 1080;
438 
439   codec_settings_.width = kScreenWidth;
440   codec_settings_.height = kScreenHeight;
441 
442   // Screensharing has the internal frame dropper off, and instead per frame
443   // asks ScreenshareLayers to decide if it should be dropped or not.
444   codec_settings_.VP8()->frameDroppingOn = false;
445   codec_settings_.mode = VideoCodecMode::kScreensharing;
446   // ScreenshareLayers triggers on 2 temporal layers and 1000kbps max bitrate.
447   codec_settings_.VP8()->numberOfTemporalLayers = 2;
448   codec_settings_.maxBitrate = 1000;
449 
450   // Reset the frame generator with large number of squares, leading to lots of
451   // details and high probability of overshoot.
452   input_frame_generator_ = test::CreateSquareFrameGenerator(
453       codec_settings_.width, codec_settings_.height,
454       test::FrameGeneratorInterface::OutputType::kI420,
455       /* num_squares = */ absl::optional<int>(300));
456 
457   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
458             encoder_->InitEncode(&codec_settings_, kSettings));
459 
460   VideoBitrateAllocation bitrate_allocation;
461   // Bitrate only enough for TL0.
462   bitrate_allocation.SetBitrate(0, 0, 200000);
463   encoder_->SetRates(
464       VideoEncoder::RateControlParameters(bitrate_allocation, 5.0));
465 
466   EncodedImage encoded_frame;
467   CodecSpecificInfo codec_specific_info;
468   EncodeAndWaitForFrame(NextInputFrame(), &encoded_frame, &codec_specific_info,
469                         true);
470   EncodeAndExpectFrameWith(NextInputFrame(), 0, true);
471   EncodeAndExpectFrameWith(NextInputFrame(), 0, true);
472   EncodeAndExpectFrameWith(NextInputFrame(), 0, true);
473 }
474 
TEST_F(TestVp8Impl,KeepsTimestampOnReencode)475 TEST_F(TestVp8Impl, KeepsTimestampOnReencode) {
476   auto* const vpx = new NiceMock<MockLibvpxInterface>();
477   LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
478                            VP8Encoder::Settings());
479 
480   // Settings needed to trigger ScreenshareLayers usage, which is required for
481   // overshoot-drop-reencode logic.
482   codec_settings_.maxBitrate = 1000;
483   codec_settings_.mode = VideoCodecMode::kScreensharing;
484   codec_settings_.VP8()->numberOfTemporalLayers = 2;
485   codec_settings_.legacy_conference_mode = true;
486 
487   EXPECT_CALL(*vpx, img_wrap(_, _, _, _, _, _))
488       .WillOnce(Invoke([](vpx_image_t* img, vpx_img_fmt_t fmt, unsigned int d_w,
489                           unsigned int d_h, unsigned int stride_align,
490                           unsigned char* img_data) {
491         img->fmt = fmt;
492         img->d_w = d_w;
493         img->d_h = d_h;
494         img->img_data = img_data;
495         return img;
496       }));
497   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
498             encoder.InitEncode(&codec_settings_,
499                                VideoEncoder::Settings(kCapabilities, 1, 1000)));
500   MockEncodedImageCallback callback;
501   encoder.RegisterEncodeCompleteCallback(&callback);
502 
503   // Simulate overshoot drop, re-encode: encode function will be called twice
504   // with the same parameters. codec_get_cx_data() will by default return no
505   // image data and be interpreted as drop.
506   EXPECT_CALL(*vpx, codec_encode(_, _, /* pts = */ 0, _, _, _))
507       .Times(2)
508       .WillRepeatedly(Return(vpx_codec_err_t::VPX_CODEC_OK));
509 
510   auto delta_frame =
511       std::vector<VideoFrameType>{VideoFrameType::kVideoFrameDelta};
512   encoder.Encode(NextInputFrame(), &delta_frame);
513 }
514 
TEST(LibvpxVp8EncoderTest,GetEncoderInfoReturnsStaticInformation)515 TEST(LibvpxVp8EncoderTest, GetEncoderInfoReturnsStaticInformation) {
516   auto* const vpx = new NiceMock<MockLibvpxInterface>();
517   LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
518                            VP8Encoder::Settings());
519 
520   const auto info = encoder.GetEncoderInfo();
521 
522   EXPECT_FALSE(info.supports_native_handle);
523   EXPECT_FALSE(info.is_hardware_accelerated);
524   EXPECT_FALSE(info.has_internal_source);
525   EXPECT_TRUE(info.supports_simulcast);
526   EXPECT_EQ(info.implementation_name, "libvpx");
527   EXPECT_EQ(info.requested_resolution_alignment, 1);
528   EXPECT_THAT(info.preferred_pixel_formats,
529               testing::UnorderedElementsAre(VideoFrameBuffer::Type::kNV12,
530                                             VideoFrameBuffer::Type::kI420));
531 }
532 
TEST(LibvpxVp8EncoderTest,RequestedResolutionAlignmentFromFieldTrial)533 TEST(LibvpxVp8EncoderTest, RequestedResolutionAlignmentFromFieldTrial) {
534   test::ScopedFieldTrials field_trials(
535       "WebRTC-VP8-GetEncoderInfoOverride/"
536       "requested_resolution_alignment:10/");
537 
538   auto* const vpx = new NiceMock<MockLibvpxInterface>();
539   LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
540                            VP8Encoder::Settings());
541 
542   EXPECT_EQ(encoder.GetEncoderInfo().requested_resolution_alignment, 10);
543   EXPECT_FALSE(
544       encoder.GetEncoderInfo().apply_alignment_to_all_simulcast_layers);
545   EXPECT_TRUE(encoder.GetEncoderInfo().resolution_bitrate_limits.empty());
546 }
547 
TEST(LibvpxVp8EncoderTest,ResolutionBitrateLimitsFromFieldTrial)548 TEST(LibvpxVp8EncoderTest, ResolutionBitrateLimitsFromFieldTrial) {
549   test::ScopedFieldTrials field_trials(
550       "WebRTC-VP8-GetEncoderInfoOverride/"
551       "frame_size_pixels:123|456|789,"
552       "min_start_bitrate_bps:11000|22000|33000,"
553       "min_bitrate_bps:44000|55000|66000,"
554       "max_bitrate_bps:77000|88000|99000/");
555 
556   auto* const vpx = new NiceMock<MockLibvpxInterface>();
557   LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
558                            VP8Encoder::Settings());
559 
560   EXPECT_THAT(
561       encoder.GetEncoderInfo().resolution_bitrate_limits,
562       ::testing::ElementsAre(
563           VideoEncoder::ResolutionBitrateLimits{123, 11000, 44000, 77000},
564           VideoEncoder::ResolutionBitrateLimits{456, 22000, 55000, 88000},
565           VideoEncoder::ResolutionBitrateLimits{789, 33000, 66000, 99000}));
566 }
567 
TEST(LibvpxVp8EncoderTest,GetEncoderInfoReturnsEmptyResolutionBitrateLimitsByDefault)568 TEST(LibvpxVp8EncoderTest,
569      GetEncoderInfoReturnsEmptyResolutionBitrateLimitsByDefault) {
570   auto* const vpx = new NiceMock<MockLibvpxInterface>();
571   LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
572                            VP8Encoder::Settings());
573 
574   const auto info = encoder.GetEncoderInfo();
575 
576   EXPECT_TRUE(info.resolution_bitrate_limits.empty());
577 }
578 
TEST(LibvpxVp8EncoderTest,GetEncoderInfoReturnsResolutionBitrateLimitsAsConfigured)579 TEST(LibvpxVp8EncoderTest,
580      GetEncoderInfoReturnsResolutionBitrateLimitsAsConfigured) {
581   std::vector<VideoEncoder::ResolutionBitrateLimits> resolution_bitrate_limits =
582       {VideoEncoder::ResolutionBitrateLimits(/*frame_size_pixels=*/640 * 360,
583                                              /*min_start_bitrate_bps=*/300,
584                                              /*min_bitrate_bps=*/100,
585                                              /*max_bitrate_bps=*/1000),
586        VideoEncoder::ResolutionBitrateLimits(320 * 180, 100, 30, 500)};
587   VP8Encoder::Settings settings;
588   settings.resolution_bitrate_limits = resolution_bitrate_limits;
589 
590   auto* const vpx = new NiceMock<MockLibvpxInterface>();
591   LibvpxVp8Encoder encoder((std::unique_ptr<LibvpxInterface>(vpx)),
592                            std::move(settings));
593 
594   const auto info = encoder.GetEncoderInfo();
595 
596   EXPECT_EQ(info.resolution_bitrate_limits, resolution_bitrate_limits);
597 }
598 
TEST_F(TestVp8Impl,GetEncoderInfoFpsAllocationNoLayers)599 TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationNoLayers) {
600   FramerateFractions expected_fps_allocation[kMaxSpatialLayers] = {
601       FramerateFractions(1, EncoderInfo::kMaxFramerateFraction)};
602 
603   EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation,
604               ::testing::ElementsAreArray(expected_fps_allocation));
605 }
606 
TEST_F(TestVp8Impl,GetEncoderInfoFpsAllocationTwoTemporalLayers)607 TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationTwoTemporalLayers) {
608   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
609   codec_settings_.numberOfSimulcastStreams = 1;
610   codec_settings_.simulcastStream[0].active = true;
611   codec_settings_.simulcastStream[0].targetBitrate = 100;
612   codec_settings_.simulcastStream[0].maxBitrate = 100;
613   codec_settings_.simulcastStream[0].numberOfTemporalLayers = 2;
614   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
615             encoder_->InitEncode(&codec_settings_, kSettings));
616 
617   FramerateFractions expected_fps_allocation[kMaxSpatialLayers];
618   expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction / 2);
619   expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction);
620 
621   EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation,
622               ::testing::ElementsAreArray(expected_fps_allocation));
623 }
624 
TEST_F(TestVp8Impl,GetEncoderInfoFpsAllocationThreeTemporalLayers)625 TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationThreeTemporalLayers) {
626   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
627   codec_settings_.numberOfSimulcastStreams = 1;
628   codec_settings_.simulcastStream[0].active = true;
629   codec_settings_.simulcastStream[0].targetBitrate = 100;
630   codec_settings_.simulcastStream[0].maxBitrate = 100;
631   codec_settings_.simulcastStream[0].numberOfTemporalLayers = 3;
632   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
633             encoder_->InitEncode(&codec_settings_, kSettings));
634 
635   FramerateFractions expected_fps_allocation[kMaxSpatialLayers];
636   expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction / 4);
637   expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction / 2);
638   expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction);
639 
640   EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation,
641               ::testing::ElementsAreArray(expected_fps_allocation));
642 }
643 
TEST_F(TestVp8Impl,GetEncoderInfoFpsAllocationScreenshareLayers)644 TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationScreenshareLayers) {
645   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
646   codec_settings_.numberOfSimulcastStreams = 1;
647   codec_settings_.mode = VideoCodecMode::kScreensharing;
648   codec_settings_.simulcastStream[0].active = true;
649   codec_settings_.simulcastStream[0].minBitrate = 30;
650   codec_settings_.simulcastStream[0].targetBitrate =
651       kLegacyScreenshareTl0BitrateKbps;
652   codec_settings_.simulcastStream[0].maxBitrate =
653       kLegacyScreenshareTl1BitrateKbps;
654   codec_settings_.simulcastStream[0].numberOfTemporalLayers = 2;
655   codec_settings_.legacy_conference_mode = true;
656   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
657             encoder_->InitEncode(&codec_settings_, kSettings));
658 
659   // Expect empty vector, since this mode doesn't have a fixed framerate.
660   FramerateFractions expected_fps_allocation[kMaxSpatialLayers];
661   EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation,
662               ::testing::ElementsAreArray(expected_fps_allocation));
663 }
664 
TEST_F(TestVp8Impl,GetEncoderInfoFpsAllocationSimulcastVideo)665 TEST_F(TestVp8Impl, GetEncoderInfoFpsAllocationSimulcastVideo) {
666   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
667 
668   // Set up three simulcast streams with three temporal layers each.
669   codec_settings_.numberOfSimulcastStreams = 3;
670   for (int i = 0; i < codec_settings_.numberOfSimulcastStreams; ++i) {
671     codec_settings_.simulcastStream[i].active = true;
672     codec_settings_.simulcastStream[i].minBitrate = 30;
673     codec_settings_.simulcastStream[i].targetBitrate = 30;
674     codec_settings_.simulcastStream[i].maxBitrate = 30;
675     codec_settings_.simulcastStream[i].numberOfTemporalLayers = 3;
676     codec_settings_.simulcastStream[i].width =
677         codec_settings_.width >>
678         (codec_settings_.numberOfSimulcastStreams - i - 1);
679     codec_settings_.simulcastStream[i].height =
680         codec_settings_.height >>
681         (codec_settings_.numberOfSimulcastStreams - i - 1);
682   }
683 
684   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
685             encoder_->InitEncode(&codec_settings_, kSettings));
686 
687   FramerateFractions expected_fps_allocation[kMaxSpatialLayers];
688   expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction / 4);
689   expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction / 2);
690   expected_fps_allocation[0].push_back(EncoderInfo::kMaxFramerateFraction);
691   expected_fps_allocation[1] = expected_fps_allocation[0];
692   expected_fps_allocation[2] = expected_fps_allocation[0];
693   EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation,
694               ::testing::ElementsAreArray(expected_fps_allocation));
695 
696   // Release encoder and re-init without temporal layers.
697   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
698 
699   // Sanity check fps allocation when not inited.
700   FramerateFractions default_fps_fraction[kMaxSpatialLayers];
701   default_fps_fraction[0].push_back(EncoderInfo::kMaxFramerateFraction);
702   EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation,
703               ::testing::ElementsAreArray(default_fps_fraction));
704 
705   for (int i = 0; i < codec_settings_.numberOfSimulcastStreams; ++i) {
706     codec_settings_.simulcastStream[i].numberOfTemporalLayers = 1;
707   }
708   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
709             encoder_->InitEncode(&codec_settings_, kSettings));
710 
711   for (size_t i = 0; i < 3; ++i) {
712     expected_fps_allocation[i].clear();
713     expected_fps_allocation[i].push_back(EncoderInfo::kMaxFramerateFraction);
714   }
715   EXPECT_THAT(encoder_->GetEncoderInfo().fps_allocation,
716               ::testing::ElementsAreArray(expected_fps_allocation));
717 }
718 
719 class TestVp8ImplForPixelFormat
720     : public TestVp8Impl,
721       public ::testing::WithParamInterface<VideoFrameBuffer::Type> {
722  public:
TestVp8ImplForPixelFormat()723   TestVp8ImplForPixelFormat() : TestVp8Impl(), mappable_type_(GetParam()) {}
724 
725  protected:
726   VideoFrameBuffer::Type mappable_type_;
727 };
728 
TEST_P(TestVp8ImplForPixelFormat,EncodeNativeFrameSimulcast)729 TEST_P(TestVp8ImplForPixelFormat, EncodeNativeFrameSimulcast) {
730   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
731 
732   // Configure simulcast.
733   codec_settings_.numberOfSimulcastStreams = 3;
734   codec_settings_.simulcastStream[0] = {
735       kWidth / 4, kHeight / 4, kFramerateFps, 1, 4000, 3000, 2000, 80, true};
736   codec_settings_.simulcastStream[1] = {
737       kWidth / 2, kHeight / 2, kFramerateFps, 1, 4000, 3000, 2000, 80, true};
738   codec_settings_.simulcastStream[2] = {
739       kWidth, kHeight, kFramerateFps, 1, 4000, 3000, 2000, 80, true};
740   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK,
741             encoder_->InitEncode(&codec_settings_, kSettings));
742 
743   // Create a zero-conversion NV12 frame (calling ToI420 on it crashes).
744   VideoFrame input_frame =
745       test::CreateMappableNativeFrame(1, mappable_type_, kWidth, kHeight);
746 
747   EncodedImage encoded_frame;
748   CodecSpecificInfo codec_specific_info;
749   EncodeAndWaitForFrame(input_frame, &encoded_frame, &codec_specific_info);
750 
751   // After encoding, we expect one mapping per simulcast layer.
752   rtc::scoped_refptr<test::MappableNativeBuffer> mappable_buffer =
753       test::GetMappableNativeBufferFromVideoFrame(input_frame);
754   std::vector<rtc::scoped_refptr<VideoFrameBuffer>> mapped_buffers =
755       mappable_buffer->GetMappedFramedBuffers();
756   ASSERT_EQ(mapped_buffers.size(), 3u);
757   EXPECT_EQ(mapped_buffers[0]->type(), mappable_type_);
758   EXPECT_EQ(mapped_buffers[0]->width(), kWidth);
759   EXPECT_EQ(mapped_buffers[0]->height(), kHeight);
760   EXPECT_EQ(mapped_buffers[1]->type(), mappable_type_);
761   EXPECT_EQ(mapped_buffers[1]->width(), kWidth / 2);
762   EXPECT_EQ(mapped_buffers[1]->height(), kHeight / 2);
763   EXPECT_EQ(mapped_buffers[2]->type(), mappable_type_);
764   EXPECT_EQ(mapped_buffers[2]->width(), kWidth / 4);
765   EXPECT_EQ(mapped_buffers[2]->height(), kHeight / 4);
766   EXPECT_FALSE(mappable_buffer->DidConvertToI420());
767 
768   EXPECT_EQ(WEBRTC_VIDEO_CODEC_OK, encoder_->Release());
769 }
770 
771 INSTANTIATE_TEST_SUITE_P(All,
772                          TestVp8ImplForPixelFormat,
773                          ::testing::Values(VideoFrameBuffer::Type::kI420,
774                                            VideoFrameBuffer::Type::kNV12));
775 
776 }  // namespace webrtc
777