1 /*
2  *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 
11 #include <stdio.h>
12 
13 #include "rtc_base/flags.h"
14 #include "test/field_trial.h"
15 #include "test/gtest.h"
16 #include "test/run_test.h"
17 #include "video/video_quality_test.h"
18 
19 namespace webrtc {
20 namespace flags {
21 
22 // Flags common with screenshare loopback, with different default values.
23 DEFINE_int(width, 640, "Video width.");
Width()24 size_t Width() {
25   return static_cast<size_t>(FLAG_width);
26 }
27 
28 DEFINE_int(height, 480, "Video height.");
Height()29 size_t Height() {
30   return static_cast<size_t>(FLAG_height);
31 }
32 
33 DEFINE_int(fps, 30, "Frames per second.");
Fps()34 int Fps() {
35   return static_cast<int>(FLAG_fps);
36 }
37 
38 DEFINE_int(capture_device_index, 0, "Capture device to select");
GetCaptureDevice()39 size_t GetCaptureDevice() {
40   return static_cast<size_t>(FLAG_capture_device_index);
41 }
42 
43 DEFINE_int(min_bitrate, 50, "Call and stream min bitrate in kbps.");
MinBitrateKbps()44 int MinBitrateKbps() {
45   return static_cast<int>(FLAG_min_bitrate);
46 }
47 
48 DEFINE_int(start_bitrate, 300, "Call start bitrate in kbps.");
StartBitrateKbps()49 int StartBitrateKbps() {
50   return static_cast<int>(FLAG_start_bitrate);
51 }
52 
53 DEFINE_int(target_bitrate, 800, "Stream target bitrate in kbps.");
TargetBitrateKbps()54 int TargetBitrateKbps() {
55   return static_cast<int>(FLAG_target_bitrate);
56 }
57 
58 DEFINE_int(max_bitrate, 800, "Call and stream max bitrate in kbps.");
MaxBitrateKbps()59 int MaxBitrateKbps() {
60   return static_cast<int>(FLAG_max_bitrate);
61 }
62 
63 DEFINE_bool(suspend_below_min_bitrate,
64             false,
65             "Suspends video below the configured min bitrate.");
66 
67 DEFINE_int(num_temporal_layers,
68            1,
69            "Number of temporal layers. Set to 1-4 to override.");
NumTemporalLayers()70 int NumTemporalLayers() {
71   return static_cast<int>(FLAG_num_temporal_layers);
72 }
73 
74 // Flags common with screenshare loopback, with equal default values.
75 DEFINE_string(codec, "VP8", "Video codec to use.");
Codec()76 std::string Codec() {
77   return static_cast<std::string>(FLAG_codec);
78 }
79 
80 DEFINE_int(selected_tl,
81            -1,
82            "Temporal layer to show or analyze. -1 to disable filtering.");
SelectedTL()83 int SelectedTL() {
84   return static_cast<int>(FLAG_selected_tl);
85 }
86 
87 DEFINE_int(
88     duration,
89     0,
90     "Duration of the test in seconds. If 0, rendered will be shown instead.");
DurationSecs()91 int DurationSecs() {
92   return static_cast<int>(FLAG_duration);
93 }
94 
95 DEFINE_string(output_filename, "", "Target graph data filename.");
OutputFilename()96 std::string OutputFilename() {
97   return static_cast<std::string>(FLAG_output_filename);
98 }
99 
100 DEFINE_string(graph_title,
101               "",
102               "If empty, title will be generated automatically.");
GraphTitle()103 std::string GraphTitle() {
104   return static_cast<std::string>(FLAG_graph_title);
105 }
106 
107 DEFINE_int(loss_percent, 0, "Percentage of packets randomly lost.");
LossPercent()108 int LossPercent() {
109   return static_cast<int>(FLAG_loss_percent);
110 }
111 
112 DEFINE_int(avg_burst_loss_length, -1, "Average burst length of lost packets.");
AvgBurstLossLength()113 int AvgBurstLossLength() {
114   return static_cast<int>(FLAG_avg_burst_loss_length);
115 }
116 
117 DEFINE_int(link_capacity,
118            0,
119            "Capacity (kbps) of the fake link. 0 means infinite.");
LinkCapacityKbps()120 int LinkCapacityKbps() {
121   return static_cast<int>(FLAG_link_capacity);
122 }
123 
124 DEFINE_int(queue_size, 0, "Size of the bottleneck link queue in packets.");
QueueSize()125 int QueueSize() {
126   return static_cast<int>(FLAG_queue_size);
127 }
128 
129 DEFINE_int(avg_propagation_delay_ms,
130            0,
131            "Average link propagation delay in ms.");
AvgPropagationDelayMs()132 int AvgPropagationDelayMs() {
133   return static_cast<int>(FLAG_avg_propagation_delay_ms);
134 }
135 
136 DEFINE_string(rtc_event_log_name, "", "Filename for rtc event log.");
RtcEventLogName()137 std::string RtcEventLogName() {
138   return static_cast<std::string>(FLAG_rtc_event_log_name);
139 }
140 
141 DEFINE_string(rtp_dump_name, "", "Filename for dumped received RTP stream.");
RtpDumpName()142 std::string RtpDumpName() {
143   return static_cast<std::string>(FLAG_rtp_dump_name);
144 }
145 
146 DEFINE_int(std_propagation_delay_ms,
147            0,
148            "Link propagation delay standard deviation in ms.");
StdPropagationDelayMs()149 int StdPropagationDelayMs() {
150   return static_cast<int>(FLAG_std_propagation_delay_ms);
151 }
152 
153 DEFINE_int(num_streams, 0, "Number of streams to show or analyze.");
NumStreams()154 int NumStreams() {
155   return static_cast<int>(FLAG_num_streams);
156 }
157 
158 DEFINE_int(selected_stream,
159            0,
160            "ID of the stream to show or analyze. "
161            "Set to the number of streams to show them all.");
SelectedStream()162 int SelectedStream() {
163   return static_cast<int>(FLAG_selected_stream);
164 }
165 
166 DEFINE_int(num_spatial_layers, 1, "Number of spatial layers to use.");
NumSpatialLayers()167 int NumSpatialLayers() {
168   return static_cast<int>(FLAG_num_spatial_layers);
169 }
170 
171 DEFINE_int(selected_sl,
172            -1,
173            "Spatial layer to show or analyze. -1 to disable filtering.");
SelectedSL()174 int SelectedSL() {
175   return static_cast<int>(FLAG_selected_sl);
176 }
177 
178 DEFINE_string(stream0,
179               "",
180               "Comma separated values describing VideoStream for stream #0.");
Stream0()181 std::string Stream0() {
182   return static_cast<std::string>(FLAG_stream0);
183 }
184 
185 DEFINE_string(stream1,
186               "",
187               "Comma separated values describing VideoStream for stream #1.");
Stream1()188 std::string Stream1() {
189   return static_cast<std::string>(FLAG_stream1);
190 }
191 
192 DEFINE_string(sl0,
193               "",
194               "Comma separated values describing SpatialLayer for layer #0.");
SL0()195 std::string SL0() {
196   return static_cast<std::string>(FLAG_sl0);
197 }
198 
199 DEFINE_string(sl1,
200               "",
201               "Comma separated values describing SpatialLayer for layer #1.");
SL1()202 std::string SL1() {
203   return static_cast<std::string>(FLAG_sl1);
204 }
205 
206 DEFINE_string(encoded_frame_path,
207               "",
208               "The base path for encoded frame logs. Created files will have "
209               "the form <encoded_frame_path>.<n>.(recv|send.<m>).ivf");
EncodedFramePath()210 std::string EncodedFramePath() {
211   return static_cast<std::string>(FLAG_encoded_frame_path);
212 }
213 
214 DEFINE_bool(logs, false, "print logs to stderr");
215 
216 DEFINE_bool(send_side_bwe, true, "Use send-side bandwidth estimation");
217 
218 DEFINE_bool(allow_reordering, false, "Allow packet reordering to occur");
219 
220 DEFINE_bool(use_ulpfec, false, "Use RED+ULPFEC forward error correction.");
221 
222 DEFINE_bool(use_flexfec, false, "Use FlexFEC forward error correction.");
223 
224 DEFINE_bool(audio, false, "Add audio stream");
225 
226 DEFINE_bool(audio_video_sync, false, "Sync audio and video stream (no effect if"
227     " audio is false)");
228 
229 DEFINE_bool(audio_dtx, false, "Enable audio DTX (no effect if audio is false)");
230 
231 DEFINE_bool(video, true, "Add video stream");
232 
233 DEFINE_string(
234     force_fieldtrials,
235     "",
236     "Field trials control experimental feature code which can be forced. "
237     "E.g. running with --force_fieldtrials=WebRTC-FooFeature/Enable/"
238     " will assign the group Enable to field trial WebRTC-FooFeature. Multiple "
239     "trials are separated by \"/\"");
240 
241 // Video-specific flags.
242 DEFINE_string(clip,
243               "",
244               "Name of the clip to show. If empty, using chroma generator.");
Clip()245 std::string Clip() {
246   return static_cast<std::string>(FLAG_clip);
247 }
248 
249 DEFINE_bool(help, false, "prints this message");
250 
251 }  // namespace flags
252 
Loopback()253 void Loopback() {
254   FakeNetworkPipe::Config pipe_config;
255   pipe_config.loss_percent = flags::LossPercent();
256   pipe_config.avg_burst_loss_length = flags::AvgBurstLossLength();
257   pipe_config.link_capacity_kbps = flags::LinkCapacityKbps();
258   pipe_config.queue_length_packets = flags::QueueSize();
259   pipe_config.queue_delay_ms = flags::AvgPropagationDelayMs();
260   pipe_config.delay_standard_deviation_ms = flags::StdPropagationDelayMs();
261   pipe_config.allow_reordering = flags::FLAG_allow_reordering;
262 
263   Call::Config::BitrateConfig call_bitrate_config;
264   call_bitrate_config.min_bitrate_bps = flags::MinBitrateKbps() * 1000;
265   call_bitrate_config.start_bitrate_bps = flags::StartBitrateKbps() * 1000;
266   call_bitrate_config.max_bitrate_bps = flags::MaxBitrateKbps() * 1000;
267 
268   VideoQualityTest::Params params;
269   params.call = {flags::FLAG_send_side_bwe, call_bitrate_config, 0};
270   params.video = {flags::FLAG_video,
271                   flags::Width(),
272                   flags::Height(),
273                   flags::Fps(),
274                   flags::MinBitrateKbps() * 1000,
275                   flags::TargetBitrateKbps() * 1000,
276                   flags::MaxBitrateKbps() * 1000,
277                   flags::FLAG_suspend_below_min_bitrate,
278                   flags::Codec(),
279                   flags::NumTemporalLayers(),
280                   flags::SelectedTL(),
281                   0,  // No min transmit bitrate.
282                   flags::FLAG_use_ulpfec,
283                   flags::FLAG_use_flexfec,
284                   flags::Clip(),
285                   flags::GetCaptureDevice()};
286   params.audio = {flags::FLAG_audio, flags::FLAG_audio_video_sync,
287                   flags::FLAG_audio_dtx};
288   params.logging = {flags::FLAG_logs, flags::FLAG_rtc_event_log_name,
289                     flags::FLAG_rtp_dump_name, flags::FLAG_encoded_frame_path};
290   params.screenshare.enabled = false;
291   params.analyzer = {"video", 0.0, 0.0, flags::DurationSecs(),
292       flags::OutputFilename(), flags::GraphTitle()};
293   params.pipe = pipe_config;
294 
295   if (flags::NumStreams() > 1 && flags::Stream0().empty() &&
296       flags::Stream1().empty()) {
297     params.ss.infer_streams = true;
298   }
299 
300   std::vector<std::string> stream_descriptors;
301   stream_descriptors.push_back(flags::Stream0());
302   stream_descriptors.push_back(flags::Stream1());
303   std::vector<std::string> SL_descriptors;
304   SL_descriptors.push_back(flags::SL0());
305   SL_descriptors.push_back(flags::SL1());
306   VideoQualityTest::FillScalabilitySettings(
307       &params, stream_descriptors, flags::NumStreams(), flags::SelectedStream(),
308       flags::NumSpatialLayers(), flags::SelectedSL(), SL_descriptors);
309 
310   VideoQualityTest test;
311   if (flags::DurationSecs()) {
312     test.RunWithAnalyzer(params);
313   } else {
314     test.RunWithRenderers(params);
315   }
316 }
317 }  // namespace webrtc
318 
main(int argc,char * argv[])319 int main(int argc, char* argv[]) {
320   ::testing::InitGoogleTest(&argc, argv);
321   rtc::FlagList::SetFlagsFromCommandLine(&argc, argv, true);
322   if (webrtc::flags::FLAG_help) {
323     rtc::FlagList::Print(nullptr, false);
324     return 0;
325   }
326 
327   // InitFieldTrialsFromString needs a reference to an std::string instance,
328   // with a scope that outlives the test.
329   std::string field_trials = webrtc::flags::FLAG_force_fieldtrials;
330   webrtc::test::InitFieldTrialsFromString(field_trials);
331 
332   webrtc::test::RunTest(webrtc::Loopback);
333   return 0;
334 }
335