1 /*
2  *  Copyright (c) 2015 The WebRTC project authors. All Rights Reserved.
3  *
4  *  Use of this source code is governed by a BSD-style license
5  *  that can be found in the LICENSE file in the root of the source
6  *  tree. An additional intellectual property rights grant can be found
7  *  in the file PATENTS.  All contributing project authors may
8  *  be found in the AUTHORS file in the root of the source tree.
9  */
10 #include "video/video_quality_test.h"
11 
12 #include <stdio.h>
13 #include <algorithm>
14 #include <deque>
15 #include <map>
16 #include <sstream>
17 #include <string>
18 #include <vector>
19 
20 #include "api/optional.h"
21 #include "call/call.h"
22 #include "common_video/libyuv/include/webrtc_libyuv.h"
23 #include "logging/rtc_event_log/output/rtc_event_log_output_file.h"
24 #include "logging/rtc_event_log/rtc_event_log.h"
25 #include "media/engine/internalencoderfactory.h"
26 #include "media/engine/webrtcvideoengine.h"
27 #include "modules/audio_mixer/audio_mixer_impl.h"
28 #include "modules/rtp_rtcp/include/rtp_header_parser.h"
29 #include "modules/rtp_rtcp/source/rtp_format.h"
30 #include "modules/rtp_rtcp/source/rtp_utility.h"
31 #include "modules/video_coding/codecs/h264/include/h264.h"
32 #include "modules/video_coding/codecs/vp8/include/vp8.h"
33 #include "modules/video_coding/codecs/vp8/include/vp8_common_types.h"
34 #include "modules/video_coding/codecs/vp9/include/vp9.h"
35 #include "rtc_base/checks.h"
36 #include "rtc_base/cpu_time.h"
37 #include "rtc_base/event.h"
38 #include "rtc_base/flags.h"
39 #include "rtc_base/format_macros.h"
40 #include "rtc_base/logging.h"
41 #include "rtc_base/memory_usage.h"
42 #include "rtc_base/pathutils.h"
43 #include "rtc_base/platform_file.h"
44 #include "rtc_base/ptr_util.h"
45 #include "rtc_base/timeutils.h"
46 #include "system_wrappers/include/cpu_info.h"
47 #include "system_wrappers/include/field_trial.h"
48 #include "test/gtest.h"
49 #include "test/layer_filtering_transport.h"
50 #include "test/run_loop.h"
51 #include "test/statistics.h"
52 #include "test/testsupport/fileutils.h"
53 #include "test/testsupport/frame_writer.h"
54 #include "test/testsupport/test_artifacts.h"
55 #include "test/vcm_capturer.h"
56 #include "test/video_renderer.h"
57 #include "voice_engine/include/voe_base.h"
58 
59 #include "test/rtp_file_writer.h"
60 
61 DEFINE_bool(save_worst_frame,
62             false,
63             "Enable saving a frame with the lowest PSNR to a jpeg file in the "
64             "test_artifacts_dir");
65 
66 namespace {
67 
68 constexpr int kSendStatsPollingIntervalMs = 1000;
69 
70 constexpr size_t kMaxComparisons = 10;
71 constexpr char kSyncGroup[] = "av_sync";
72 constexpr int kOpusMinBitrateBps = 6000;
73 constexpr int kOpusBitrateFbBps = 32000;
74 constexpr int kFramesSentInQuickTest = 1;
75 constexpr uint32_t kThumbnailSendSsrcStart = 0xE0000;
76 constexpr uint32_t kThumbnailRtxSsrcStart = 0xF0000;
77 
78 constexpr int kDefaultMaxQp = cricket::WebRtcVideoChannel::kDefaultQpMax;
79 
80 struct VoiceEngineState {
VoiceEngineState__anon117ea8cc0111::VoiceEngineState81   VoiceEngineState()
82       : voice_engine(nullptr),
83         base(nullptr),
84         send_channel_id(-1),
85         receive_channel_id(-1) {}
86 
87   webrtc::VoiceEngine* voice_engine;
88   webrtc::VoEBase* base;
89   int send_channel_id;
90   int receive_channel_id;
91 };
92 
CreateVoiceEngine(VoiceEngineState * voe,webrtc::AudioDeviceModule * adm,webrtc::AudioProcessing * apm,rtc::scoped_refptr<webrtc::AudioDecoderFactory> decoder_factory)93 void CreateVoiceEngine(
94     VoiceEngineState* voe,
95     webrtc::AudioDeviceModule* adm,
96     webrtc::AudioProcessing* apm,
97     rtc::scoped_refptr<webrtc::AudioDecoderFactory> decoder_factory) {
98   voe->voice_engine = webrtc::VoiceEngine::Create();
99   voe->base = webrtc::VoEBase::GetInterface(voe->voice_engine);
100   EXPECT_EQ(0, adm->Init());
101   EXPECT_EQ(0, voe->base->Init(adm, apm, decoder_factory));
102   webrtc::VoEBase::ChannelConfig config;
103   config.enable_voice_pacing = true;
104   voe->send_channel_id = voe->base->CreateChannel(config);
105   EXPECT_GE(voe->send_channel_id, 0);
106   voe->receive_channel_id = voe->base->CreateChannel();
107   EXPECT_GE(voe->receive_channel_id, 0);
108 }
109 
DestroyVoiceEngine(VoiceEngineState * voe)110 void DestroyVoiceEngine(VoiceEngineState* voe) {
111   voe->base->DeleteChannel(voe->send_channel_id);
112   voe->send_channel_id = -1;
113   voe->base->DeleteChannel(voe->receive_channel_id);
114   voe->receive_channel_id = -1;
115   voe->base->Release();
116   voe->base = nullptr;
117 
118   webrtc::VoiceEngine::Delete(voe->voice_engine);
119   voe->voice_engine = nullptr;
120 }
121 
122 class VideoStreamFactory
123     : public webrtc::VideoEncoderConfig::VideoStreamFactoryInterface {
124  public:
VideoStreamFactory(const std::vector<webrtc::VideoStream> & streams)125   explicit VideoStreamFactory(const std::vector<webrtc::VideoStream>& streams)
126       : streams_(streams) {}
127 
128  private:
CreateEncoderStreams(int width,int height,const webrtc::VideoEncoderConfig & encoder_config)129   std::vector<webrtc::VideoStream> CreateEncoderStreams(
130       int width,
131       int height,
132       const webrtc::VideoEncoderConfig& encoder_config) override {
133     // The highest layer must match the incoming resolution.
134     std::vector<webrtc::VideoStream> streams = streams_;
135     streams[streams_.size() - 1].height = height;
136     streams[streams_.size() - 1].width = width;
137     return streams;
138   }
139 
140   std::vector<webrtc::VideoStream> streams_;
141 };
142 
IsFlexfec(int payload_type)143 bool IsFlexfec(int payload_type) {
144   return payload_type == webrtc::VideoQualityTest::kFlexfecPayloadType;
145 }
146 
147 }  // namespace
148 
149 namespace webrtc {
150 
151 class VideoAnalyzer : public PacketReceiver,
152                       public Transport,
153                       public rtc::VideoSinkInterface<VideoFrame> {
154  public:
VideoAnalyzer(test::LayerFilteringTransport * transport,const std::string & test_label,double avg_psnr_threshold,double avg_ssim_threshold,int duration_frames,FILE * graph_data_output_file,const std::string & graph_title,uint32_t ssrc_to_analyze,uint32_t rtx_ssrc_to_analyze,size_t selected_stream,int selected_sl,int selected_tl,bool is_quick_test_enabled,Clock * clock,std::string rtp_dump_name)155   VideoAnalyzer(test::LayerFilteringTransport* transport,
156                 const std::string& test_label,
157                 double avg_psnr_threshold,
158                 double avg_ssim_threshold,
159                 int duration_frames,
160                 FILE* graph_data_output_file,
161                 const std::string& graph_title,
162                 uint32_t ssrc_to_analyze,
163                 uint32_t rtx_ssrc_to_analyze,
164                 size_t selected_stream,
165                 int selected_sl,
166                 int selected_tl,
167                 bool is_quick_test_enabled,
168                 Clock* clock,
169                 std::string rtp_dump_name)
170       : transport_(transport),
171         receiver_(nullptr),
172         call_(nullptr),
173         send_stream_(nullptr),
174         receive_stream_(nullptr),
175         captured_frame_forwarder_(this, clock),
176         test_label_(test_label),
177         graph_data_output_file_(graph_data_output_file),
178         graph_title_(graph_title),
179         ssrc_to_analyze_(ssrc_to_analyze),
180         rtx_ssrc_to_analyze_(rtx_ssrc_to_analyze),
181         selected_stream_(selected_stream),
182         selected_sl_(selected_sl),
183         selected_tl_(selected_tl),
184         pre_encode_proxy_(this),
185         encode_timing_proxy_(this),
186         last_fec_bytes_(0),
187         frames_to_process_(duration_frames),
188         frames_recorded_(0),
189         frames_processed_(0),
190         dropped_frames_(0),
191         dropped_frames_before_first_encode_(0),
192         dropped_frames_before_rendering_(0),
193         last_render_time_(0),
194         rtp_timestamp_delta_(0),
195         total_media_bytes_(0),
196         first_sending_time_(0),
197         last_sending_time_(0),
198         cpu_time_(0),
199         wallclock_time_(0),
200         avg_psnr_threshold_(avg_psnr_threshold),
201         avg_ssim_threshold_(avg_ssim_threshold),
202         is_quick_test_enabled_(is_quick_test_enabled),
203         stats_polling_thread_(&PollStatsThread, this, "StatsPoller"),
204         comparison_available_event_(false, false),
205         done_(true, false),
206         clock_(clock),
207         start_ms_(clock->TimeInMilliseconds()) {
208     // Create thread pool for CPU-expensive PSNR/SSIM calculations.
209 
210     // Try to use about as many threads as cores, but leave kMinCoresLeft alone,
211     // so that we don't accidentally starve "real" worker threads (codec etc).
212     // Also, don't allocate more than kMaxComparisonThreads, even if there are
213     // spare cores.
214 
215     uint32_t num_cores = CpuInfo::DetectNumberOfCores();
216     RTC_DCHECK_GE(num_cores, 1);
217     static const uint32_t kMinCoresLeft = 4;
218     static const uint32_t kMaxComparisonThreads = 8;
219 
220     if (num_cores <= kMinCoresLeft) {
221       num_cores = 1;
222     } else {
223       num_cores -= kMinCoresLeft;
224       num_cores = std::min(num_cores, kMaxComparisonThreads);
225     }
226 
227     for (uint32_t i = 0; i < num_cores; ++i) {
228       rtc::PlatformThread* thread =
229           new rtc::PlatformThread(&FrameComparisonThread, this, "Analyzer");
230       thread->Start();
231       comparison_thread_pool_.push_back(thread);
232     }
233 
234     if (!rtp_dump_name.empty()) {
235       fprintf(stdout, "Writing rtp dump to %s\n", rtp_dump_name.c_str());
236       rtp_file_writer_.reset(test::RtpFileWriter::Create(
237           test::RtpFileWriter::kRtpDump, rtp_dump_name));
238     }
239   }
240 
~VideoAnalyzer()241   ~VideoAnalyzer() {
242     for (rtc::PlatformThread* thread : comparison_thread_pool_) {
243       thread->Stop();
244       delete thread;
245     }
246   }
247 
SetReceiver(PacketReceiver * receiver)248   virtual void SetReceiver(PacketReceiver* receiver) { receiver_ = receiver; }
249 
SetSource(test::VideoCapturer * video_capturer,bool respect_sink_wants)250   void SetSource(test::VideoCapturer* video_capturer, bool respect_sink_wants) {
251     if (respect_sink_wants)
252       captured_frame_forwarder_.SetSource(video_capturer);
253     rtc::VideoSinkWants wants;
254     video_capturer->AddOrUpdateSink(InputInterface(), wants);
255   }
256 
SetCall(Call * call)257   void SetCall(Call* call) {
258     rtc::CritScope lock(&crit_);
259     RTC_DCHECK(!call_);
260     call_ = call;
261   }
262 
SetSendStream(VideoSendStream * stream)263   void SetSendStream(VideoSendStream* stream) {
264     rtc::CritScope lock(&crit_);
265     RTC_DCHECK(!send_stream_);
266     send_stream_ = stream;
267   }
268 
SetReceiveStream(VideoReceiveStream * stream)269   void SetReceiveStream(VideoReceiveStream* stream) {
270     rtc::CritScope lock(&crit_);
271     RTC_DCHECK(!receive_stream_);
272     receive_stream_ = stream;
273   }
274 
InputInterface()275   rtc::VideoSinkInterface<VideoFrame>* InputInterface() {
276     return &captured_frame_forwarder_;
277   }
OutputInterface()278   rtc::VideoSourceInterface<VideoFrame>* OutputInterface() {
279     return &captured_frame_forwarder_;
280   }
281 
DeliverPacket(MediaType media_type,const uint8_t * packet,size_t length,const PacketTime & packet_time)282   DeliveryStatus DeliverPacket(MediaType media_type,
283                                const uint8_t* packet,
284                                size_t length,
285                                const PacketTime& packet_time) override {
286     // Ignore timestamps of RTCP packets. They're not synchronized with
287     // RTP packet timestamps and so they would confuse wrap_handler_.
288     if (RtpHeaderParser::IsRtcp(packet, length)) {
289       return receiver_->DeliverPacket(media_type, packet, length, packet_time);
290     }
291 
292     if (rtp_file_writer_) {
293       test::RtpPacket p;
294       memcpy(p.data, packet, length);
295       p.length = length;
296       p.original_length = length;
297       p.time_ms = clock_->TimeInMilliseconds() - start_ms_;
298       rtp_file_writer_->WritePacket(&p);
299     }
300 
301     RtpUtility::RtpHeaderParser parser(packet, length);
302     RTPHeader header;
303     parser.Parse(&header);
304     if (!IsFlexfec(header.payloadType) &&
305         (header.ssrc == ssrc_to_analyze_ ||
306          header.ssrc == rtx_ssrc_to_analyze_)) {
307       // Ignore FlexFEC timestamps, to avoid collisions with media timestamps.
308       // (FlexFEC and media are sent on different SSRCs, which have different
309       // timestamps spaces.)
310       // Also ignore packets from wrong SSRC, but include retransmits.
311       rtc::CritScope lock(&crit_);
312       int64_t timestamp =
313           wrap_handler_.Unwrap(header.timestamp - rtp_timestamp_delta_);
314       recv_times_[timestamp] =
315           Clock::GetRealTimeClock()->CurrentNtpInMilliseconds();
316     }
317 
318     return receiver_->DeliverPacket(media_type, packet, length, packet_time);
319   }
320 
MeasuredEncodeTiming(int64_t ntp_time_ms,int encode_time_ms)321   void MeasuredEncodeTiming(int64_t ntp_time_ms, int encode_time_ms) {
322     rtc::CritScope crit(&comparison_lock_);
323     samples_encode_time_ms_[ntp_time_ms] = encode_time_ms;
324   }
325 
PreEncodeOnFrame(const VideoFrame & video_frame)326   void PreEncodeOnFrame(const VideoFrame& video_frame) {
327     rtc::CritScope lock(&crit_);
328     if (!first_encoded_timestamp_) {
329       while (frames_.front().timestamp() != video_frame.timestamp()) {
330         ++dropped_frames_before_first_encode_;
331         frames_.pop_front();
332         RTC_CHECK(!frames_.empty());
333       }
334       first_encoded_timestamp_ =
335           rtc::Optional<uint32_t>(video_frame.timestamp());
336     }
337   }
338 
PostEncodeFrameCallback(const EncodedFrame & encoded_frame)339   void PostEncodeFrameCallback(const EncodedFrame& encoded_frame) {
340     rtc::CritScope lock(&crit_);
341     if (!first_sent_timestamp_ &&
342         encoded_frame.stream_id_ == selected_stream_) {
343       first_sent_timestamp_ = rtc::Optional<uint32_t>(encoded_frame.timestamp_);
344     }
345   }
346 
SendRtp(const uint8_t * packet,size_t length,const PacketOptions & options)347   bool SendRtp(const uint8_t* packet,
348                size_t length,
349                const PacketOptions& options) override {
350     RtpUtility::RtpHeaderParser parser(packet, length);
351     RTPHeader header;
352     parser.Parse(&header);
353 
354     int64_t current_time =
355         Clock::GetRealTimeClock()->CurrentNtpInMilliseconds();
356 
357     bool result = transport_->SendRtp(packet, length, options);
358     {
359       rtc::CritScope lock(&crit_);
360       if (rtp_timestamp_delta_ == 0 && header.ssrc == ssrc_to_analyze_) {
361         RTC_CHECK(static_cast<bool>(first_sent_timestamp_));
362         rtp_timestamp_delta_ = header.timestamp - *first_sent_timestamp_;
363       }
364 
365       if (!IsFlexfec(header.payloadType) && header.ssrc == ssrc_to_analyze_) {
366         // Ignore FlexFEC timestamps, to avoid collisions with media timestamps.
367         // (FlexFEC and media are sent on different SSRCs, which have different
368         // timestamps spaces.)
369         // Also ignore packets from wrong SSRC and retransmits.
370         int64_t timestamp =
371             wrap_handler_.Unwrap(header.timestamp - rtp_timestamp_delta_);
372         send_times_[timestamp] = current_time;
373 
374         if (IsInSelectedSpatialAndTemporalLayer(packet, length, header)) {
375           encoded_frame_sizes_[timestamp] +=
376               length - (header.headerLength + header.paddingLength);
377           total_media_bytes_ +=
378               length - (header.headerLength + header.paddingLength);
379         }
380         if (first_sending_time_ == 0)
381           first_sending_time_ = current_time;
382         last_sending_time_ = current_time;
383       }
384     }
385     return result;
386   }
387 
SendRtcp(const uint8_t * packet,size_t length)388   bool SendRtcp(const uint8_t* packet, size_t length) override {
389     return transport_->SendRtcp(packet, length);
390   }
391 
OnFrame(const VideoFrame & video_frame)392   void OnFrame(const VideoFrame& video_frame) override {
393     int64_t render_time_ms =
394         Clock::GetRealTimeClock()->CurrentNtpInMilliseconds();
395 
396     rtc::CritScope lock(&crit_);
397 
398     StartExcludingCpuThreadTime();
399 
400     int64_t send_timestamp =
401         wrap_handler_.Unwrap(video_frame.timestamp() - rtp_timestamp_delta_);
402 
403     while (wrap_handler_.Unwrap(frames_.front().timestamp()) < send_timestamp) {
404       if (!last_rendered_frame_) {
405         // No previous frame rendered, this one was dropped after sending but
406         // before rendering.
407         ++dropped_frames_before_rendering_;
408       } else {
409         AddFrameComparison(frames_.front(), *last_rendered_frame_, true,
410                            render_time_ms);
411       }
412       frames_.pop_front();
413       RTC_DCHECK(!frames_.empty());
414     }
415 
416     VideoFrame reference_frame = frames_.front();
417     frames_.pop_front();
418     int64_t reference_timestamp =
419         wrap_handler_.Unwrap(reference_frame.timestamp());
420     if (send_timestamp == reference_timestamp - 1) {
421       // TODO(ivica): Make this work for > 2 streams.
422       // Look at RTPSender::BuildRTPHeader.
423       ++send_timestamp;
424     }
425     ASSERT_EQ(reference_timestamp, send_timestamp);
426 
427     AddFrameComparison(reference_frame, video_frame, false, render_time_ms);
428 
429     last_rendered_frame_ = rtc::Optional<VideoFrame>(video_frame);
430 
431     StopExcludingCpuThreadTime();
432   }
433 
Wait()434   void Wait() {
435     // Frame comparisons can be very expensive. Wait for test to be done, but
436     // at time-out check if frames_processed is going up. If so, give it more
437     // time, otherwise fail. Hopefully this will reduce test flakiness.
438 
439     stats_polling_thread_.Start();
440 
441     int last_frames_processed = -1;
442     int iteration = 0;
443     while (!done_.Wait(VideoQualityTest::kDefaultTimeoutMs)) {
444       int frames_processed;
445       {
446         rtc::CritScope crit(&comparison_lock_);
447         frames_processed = frames_processed_;
448       }
449 
450       // Print some output so test infrastructure won't think we've crashed.
451       const char* kKeepAliveMessages[3] = {
452           "Uh, I'm-I'm not quite dead, sir.",
453           "Uh, I-I think uh, I could pull through, sir.",
454           "Actually, I think I'm all right to come with you--"};
455       printf("- %s\n", kKeepAliveMessages[iteration++ % 3]);
456 
457       if (last_frames_processed == -1) {
458         last_frames_processed = frames_processed;
459         continue;
460       }
461       if (frames_processed == last_frames_processed) {
462         EXPECT_GT(frames_processed, last_frames_processed)
463             << "Analyzer stalled while waiting for test to finish.";
464         done_.Set();
465         break;
466       }
467       last_frames_processed = frames_processed;
468     }
469 
470     if (iteration > 0)
471       printf("- Farewell, sweet Concorde!\n");
472 
473     stats_polling_thread_.Stop();
474   }
475 
pre_encode_proxy()476   rtc::VideoSinkInterface<VideoFrame>* pre_encode_proxy() {
477     return &pre_encode_proxy_;
478   }
encode_timing_proxy()479   EncodedFrameObserver* encode_timing_proxy() { return &encode_timing_proxy_; }
480 
StartMeasuringCpuProcessTime()481   void StartMeasuringCpuProcessTime() {
482     rtc::CritScope lock(&cpu_measurement_lock_);
483     cpu_time_ -= rtc::GetProcessCpuTimeNanos();
484     wallclock_time_ -= rtc::SystemTimeNanos();
485   }
486 
StopMeasuringCpuProcessTime()487   void StopMeasuringCpuProcessTime() {
488     rtc::CritScope lock(&cpu_measurement_lock_);
489     cpu_time_ += rtc::GetProcessCpuTimeNanos();
490     wallclock_time_ += rtc::SystemTimeNanos();
491   }
492 
StartExcludingCpuThreadTime()493   void StartExcludingCpuThreadTime() {
494     rtc::CritScope lock(&cpu_measurement_lock_);
495     cpu_time_ += rtc::GetThreadCpuTimeNanos();
496   }
497 
StopExcludingCpuThreadTime()498   void StopExcludingCpuThreadTime() {
499     rtc::CritScope lock(&cpu_measurement_lock_);
500     cpu_time_ -= rtc::GetThreadCpuTimeNanos();
501   }
502 
GetCpuUsagePercent()503   double GetCpuUsagePercent() {
504     rtc::CritScope lock(&cpu_measurement_lock_);
505     return static_cast<double>(cpu_time_) / wallclock_time_ * 100.0;
506   }
507 
508   test::LayerFilteringTransport* const transport_;
509   PacketReceiver* receiver_;
510 
511  private:
512   struct FrameComparison {
FrameComparisonwebrtc::VideoAnalyzer::FrameComparison513     FrameComparison()
514         : dropped(false),
515           input_time_ms(0),
516           send_time_ms(0),
517           recv_time_ms(0),
518           render_time_ms(0),
519           encoded_frame_size(0) {}
520 
FrameComparisonwebrtc::VideoAnalyzer::FrameComparison521     FrameComparison(const VideoFrame& reference,
522                     const VideoFrame& render,
523                     bool dropped,
524                     int64_t input_time_ms,
525                     int64_t send_time_ms,
526                     int64_t recv_time_ms,
527                     int64_t render_time_ms,
528                     size_t encoded_frame_size)
529         : reference(reference),
530           render(render),
531           dropped(dropped),
532           input_time_ms(input_time_ms),
533           send_time_ms(send_time_ms),
534           recv_time_ms(recv_time_ms),
535           render_time_ms(render_time_ms),
536           encoded_frame_size(encoded_frame_size) {}
537 
FrameComparisonwebrtc::VideoAnalyzer::FrameComparison538     FrameComparison(bool dropped,
539                     int64_t input_time_ms,
540                     int64_t send_time_ms,
541                     int64_t recv_time_ms,
542                     int64_t render_time_ms,
543                     size_t encoded_frame_size)
544         : dropped(dropped),
545           input_time_ms(input_time_ms),
546           send_time_ms(send_time_ms),
547           recv_time_ms(recv_time_ms),
548           render_time_ms(render_time_ms),
549           encoded_frame_size(encoded_frame_size) {}
550 
551     rtc::Optional<VideoFrame> reference;
552     rtc::Optional<VideoFrame> render;
553     bool dropped;
554     int64_t input_time_ms;
555     int64_t send_time_ms;
556     int64_t recv_time_ms;
557     int64_t render_time_ms;
558     size_t encoded_frame_size;
559   };
560 
561   struct Sample {
Samplewebrtc::VideoAnalyzer::Sample562     Sample(int dropped,
563            int64_t input_time_ms,
564            int64_t send_time_ms,
565            int64_t recv_time_ms,
566            int64_t render_time_ms,
567            size_t encoded_frame_size,
568            double psnr,
569            double ssim)
570         : dropped(dropped),
571           input_time_ms(input_time_ms),
572           send_time_ms(send_time_ms),
573           recv_time_ms(recv_time_ms),
574           render_time_ms(render_time_ms),
575           encoded_frame_size(encoded_frame_size),
576           psnr(psnr),
577           ssim(ssim) {}
578 
579     int dropped;
580     int64_t input_time_ms;
581     int64_t send_time_ms;
582     int64_t recv_time_ms;
583     int64_t render_time_ms;
584     size_t encoded_frame_size;
585     double psnr;
586     double ssim;
587   };
588 
589   // This class receives the send-side OnEncodeTiming and is provided to not
590   // conflict with the receiver-side pre_decode_callback.
591   class OnEncodeTimingProxy : public EncodedFrameObserver {
592    public:
OnEncodeTimingProxy(VideoAnalyzer * parent)593     explicit OnEncodeTimingProxy(VideoAnalyzer* parent) : parent_(parent) {}
594 
OnEncodeTiming(int64_t ntp_time_ms,int encode_time_ms)595     void OnEncodeTiming(int64_t ntp_time_ms, int encode_time_ms) override {
596       parent_->MeasuredEncodeTiming(ntp_time_ms, encode_time_ms);
597     }
EncodedFrameCallback(const EncodedFrame & frame)598     void EncodedFrameCallback(const EncodedFrame& frame) override {
599       parent_->PostEncodeFrameCallback(frame);
600     }
601 
602    private:
603     VideoAnalyzer* const parent_;
604   };
605 
606   // This class receives the send-side OnFrame callback and is provided to not
607   // conflict with the receiver-side renderer callback.
608   class PreEncodeProxy : public rtc::VideoSinkInterface<VideoFrame> {
609    public:
PreEncodeProxy(VideoAnalyzer * parent)610     explicit PreEncodeProxy(VideoAnalyzer* parent) : parent_(parent) {}
611 
OnFrame(const VideoFrame & video_frame)612     void OnFrame(const VideoFrame& video_frame) override {
613       parent_->PreEncodeOnFrame(video_frame);
614     }
615 
616    private:
617     VideoAnalyzer* const parent_;
618   };
619 
IsInSelectedSpatialAndTemporalLayer(const uint8_t * packet,size_t length,const RTPHeader & header)620   bool IsInSelectedSpatialAndTemporalLayer(const uint8_t* packet,
621                                            size_t length,
622                                            const RTPHeader& header) {
623     if (header.payloadType != test::CallTest::kPayloadTypeVP9 &&
624         header.payloadType != test::CallTest::kPayloadTypeVP8) {
625       return true;
626     } else {
627       // Get VP8 and VP9 specific header to check layers indexes.
628       const uint8_t* payload = packet + header.headerLength;
629       const size_t payload_length = length - header.headerLength;
630       const size_t payload_data_length = payload_length - header.paddingLength;
631       const bool is_vp8 = header.payloadType == test::CallTest::kPayloadTypeVP8;
632       std::unique_ptr<RtpDepacketizer> depacketizer(
633           RtpDepacketizer::Create(is_vp8 ? kRtpVideoVp8 : kRtpVideoVp9));
634       RtpDepacketizer::ParsedPayload parsed_payload;
635       bool result =
636           depacketizer->Parse(&parsed_payload, payload, payload_data_length);
637       RTC_DCHECK(result);
638       const int temporal_idx = static_cast<int>(
639           is_vp8 ? parsed_payload.type.Video.codecHeader.VP8.temporalIdx
640                  : parsed_payload.type.Video.codecHeader.VP9.temporal_idx);
641       const int spatial_idx = static_cast<int>(
642           is_vp8 ? kNoSpatialIdx
643                  : parsed_payload.type.Video.codecHeader.VP9.spatial_idx);
644       return (selected_tl_ < 0 || temporal_idx == kNoTemporalIdx ||
645               temporal_idx <= selected_tl_) &&
646              (selected_sl_ < 0 || spatial_idx == kNoSpatialIdx ||
647               spatial_idx <= selected_sl_);
648     }
649   }
650 
AddFrameComparison(const VideoFrame & reference,const VideoFrame & render,bool dropped,int64_t render_time_ms)651   void AddFrameComparison(const VideoFrame& reference,
652                           const VideoFrame& render,
653                           bool dropped,
654                           int64_t render_time_ms)
655       RTC_EXCLUSIVE_LOCKS_REQUIRED(crit_) {
656     int64_t reference_timestamp = wrap_handler_.Unwrap(reference.timestamp());
657     int64_t send_time_ms = send_times_[reference_timestamp];
658     send_times_.erase(reference_timestamp);
659     int64_t recv_time_ms = recv_times_[reference_timestamp];
660     recv_times_.erase(reference_timestamp);
661 
662     // TODO(ivica): Make this work for > 2 streams.
663     auto it = encoded_frame_sizes_.find(reference_timestamp);
664     if (it == encoded_frame_sizes_.end())
665       it = encoded_frame_sizes_.find(reference_timestamp - 1);
666     size_t encoded_size = it == encoded_frame_sizes_.end() ? 0 : it->second;
667     if (it != encoded_frame_sizes_.end())
668       encoded_frame_sizes_.erase(it);
669 
670     rtc::CritScope crit(&comparison_lock_);
671     if (comparisons_.size() < kMaxComparisons) {
672       comparisons_.push_back(FrameComparison(reference, render, dropped,
673                                              reference.ntp_time_ms(),
674                                              send_time_ms, recv_time_ms,
675                                              render_time_ms, encoded_size));
676     } else {
677       comparisons_.push_back(FrameComparison(dropped,
678                                              reference.ntp_time_ms(),
679                                              send_time_ms, recv_time_ms,
680                                              render_time_ms, encoded_size));
681     }
682     comparison_available_event_.Set();
683   }
684 
PollStatsThread(void * obj)685   static void PollStatsThread(void* obj) {
686     static_cast<VideoAnalyzer*>(obj)->PollStats();
687   }
688 
PollStats()689   void PollStats() {
690     while (!done_.Wait(kSendStatsPollingIntervalMs)) {
691       rtc::CritScope crit(&comparison_lock_);
692 
693       Call::Stats call_stats = call_->GetStats();
694       send_bandwidth_bps_.AddSample(call_stats.send_bandwidth_bps);
695 
696       VideoSendStream::Stats send_stats = send_stream_->GetStats();
697       // It's not certain that we yet have estimates for any of these stats.
698       // Check that they are positive before mixing them in.
699       if (send_stats.encode_frame_rate > 0)
700         encode_frame_rate_.AddSample(send_stats.encode_frame_rate);
701       if (send_stats.avg_encode_time_ms > 0)
702         encode_time_ms_.AddSample(send_stats.avg_encode_time_ms);
703       if (send_stats.encode_usage_percent > 0)
704         encode_usage_percent_.AddSample(send_stats.encode_usage_percent);
705       if (send_stats.media_bitrate_bps > 0)
706         media_bitrate_bps_.AddSample(send_stats.media_bitrate_bps);
707       size_t fec_bytes = 0;
708       for (auto kv : send_stats.substreams) {
709         fec_bytes += kv.second.rtp_stats.fec.payload_bytes +
710                      kv.second.rtp_stats.fec.padding_bytes;
711       }
712       fec_bitrate_bps_.AddSample((fec_bytes - last_fec_bytes_) * 8);
713       last_fec_bytes_ = fec_bytes;
714 
715       if (receive_stream_ != nullptr) {
716         VideoReceiveStream::Stats receive_stats = receive_stream_->GetStats();
717         if (receive_stats.decode_ms > 0)
718           decode_time_ms_.AddSample(receive_stats.decode_ms);
719         if (receive_stats.max_decode_ms > 0)
720           decode_time_max_ms_.AddSample(receive_stats.max_decode_ms);
721       }
722 
723       memory_usage_.AddSample(rtc::GetProcessResidentSizeBytes());
724     }
725   }
726 
FrameComparisonThread(void * obj)727   static bool FrameComparisonThread(void* obj) {
728     return static_cast<VideoAnalyzer*>(obj)->CompareFrames();
729   }
730 
CompareFrames()731   bool CompareFrames() {
732     if (AllFramesRecorded())
733       return false;
734 
735     FrameComparison comparison;
736 
737     if (!PopComparison(&comparison)) {
738       // Wait until new comparison task is available, or test is done.
739       // If done, wake up remaining threads waiting.
740       comparison_available_event_.Wait(1000);
741       if (AllFramesRecorded()) {
742         comparison_available_event_.Set();
743         return false;
744       }
745       return true;  // Try again.
746     }
747 
748     StartExcludingCpuThreadTime();
749 
750     PerformFrameComparison(comparison);
751 
752     StopExcludingCpuThreadTime();
753 
754     if (FrameProcessed()) {
755       PrintResults();
756       if (graph_data_output_file_)
757         PrintSamplesToFile();
758       done_.Set();
759       comparison_available_event_.Set();
760       return false;
761     }
762 
763     return true;
764   }
765 
PopComparison(FrameComparison * comparison)766   bool PopComparison(FrameComparison* comparison) {
767     rtc::CritScope crit(&comparison_lock_);
768     // If AllFramesRecorded() is true, it means we have already popped
769     // frames_to_process_ frames from comparisons_, so there is no more work
770     // for this thread to be done. frames_processed_ might still be lower if
771     // all comparisons are not done, but those frames are currently being
772     // worked on by other threads.
773     if (comparisons_.empty() || AllFramesRecorded())
774       return false;
775 
776     *comparison = comparisons_.front();
777     comparisons_.pop_front();
778 
779     FrameRecorded();
780     return true;
781   }
782 
783   // Increment counter for number of frames received for comparison.
FrameRecorded()784   void FrameRecorded() {
785     rtc::CritScope crit(&comparison_lock_);
786     ++frames_recorded_;
787   }
788 
789   // Returns true if all frames to be compared have been taken from the queue.
AllFramesRecorded()790   bool AllFramesRecorded() {
791     rtc::CritScope crit(&comparison_lock_);
792     assert(frames_recorded_ <= frames_to_process_);
793     return frames_recorded_ == frames_to_process_;
794   }
795 
796   // Increase count of number of frames processed. Returns true if this was the
797   // last frame to be processed.
FrameProcessed()798   bool FrameProcessed() {
799     rtc::CritScope crit(&comparison_lock_);
800     ++frames_processed_;
801     assert(frames_processed_ <= frames_to_process_);
802     return frames_processed_ == frames_to_process_;
803   }
804 
PrintResults()805   void PrintResults() {
806     StopMeasuringCpuProcessTime();
807     rtc::CritScope crit(&comparison_lock_);
808     PrintResult("psnr", psnr_, " dB");
809     PrintResult("ssim", ssim_, " score");
810     PrintResult("sender_time", sender_time_, " ms");
811     PrintResult("receiver_time", receiver_time_, " ms");
812     PrintResult("total_delay_incl_network", end_to_end_, " ms");
813     PrintResult("time_between_rendered_frames", rendered_delta_, " ms");
814     PrintResult("encode_frame_rate", encode_frame_rate_, " fps");
815     PrintResult("encode_time", encode_time_ms_, " ms");
816     PrintResult("media_bitrate", media_bitrate_bps_, " bps");
817     PrintResult("fec_bitrate", fec_bitrate_bps_, " bps");
818     PrintResult("send_bandwidth", send_bandwidth_bps_, " bps");
819 
820     if (worst_frame_) {
821       printf("RESULT min_psnr: %s = %lf dB\n", test_label_.c_str(),
822              worst_frame_->psnr);
823     }
824 
825     if (receive_stream_ != nullptr) {
826       PrintResult("decode_time", decode_time_ms_, " ms");
827     }
828 
829     printf("RESULT dropped_frames: %s = %d frames\n", test_label_.c_str(),
830            dropped_frames_);
831     printf("RESULT cpu_usage: %s = %lf %%\n", test_label_.c_str(),
832            GetCpuUsagePercent());
833 
834 #if defined(WEBRTC_WIN)
835       // On Linux and Mac in Resident Set some unused pages may be counted.
836       // Therefore this metric will depend on order in which tests are run and
837       // will be flaky.
838     PrintResult("memory_usage", memory_usage_, " bytes");
839 #endif
840 
841     // Saving only the worst frame for manual analysis. Intention here is to
842     // only detect video corruptions and not to track picture quality. Thus,
843     // jpeg is used here.
844     if (FLAG_save_worst_frame && worst_frame_) {
845       std::string output_dir;
846       test::GetTestArtifactsDir(&output_dir);
847       std::string output_path =
848           rtc::Pathname(output_dir, test_label_ + ".jpg").pathname();
849       RTC_LOG(LS_INFO) << "Saving worst frame to " << output_path;
850       test::JpegFrameWriter frame_writer(output_path);
851       RTC_CHECK(frame_writer.WriteFrame(worst_frame_->frame,
852                                         100 /*best quality*/));
853     }
854 
855     //  Disable quality check for quick test, as quality checks may fail
856     //  because too few samples were collected.
857     if (!is_quick_test_enabled_) {
858       EXPECT_GT(psnr_.Mean(), avg_psnr_threshold_);
859       EXPECT_GT(ssim_.Mean(), avg_ssim_threshold_);
860     }
861   }
862 
PerformFrameComparison(const FrameComparison & comparison)863   void PerformFrameComparison(const FrameComparison& comparison) {
864     // Perform expensive psnr and ssim calculations while not holding lock.
865     double psnr = -1.0;
866     double ssim = -1.0;
867     if (comparison.reference && !comparison.dropped) {
868       psnr = I420PSNR(&*comparison.reference, &*comparison.render);
869       ssim = I420SSIM(&*comparison.reference, &*comparison.render);
870     }
871 
872     rtc::CritScope crit(&comparison_lock_);
873 
874     if (psnr >= 0.0 && (!worst_frame_ || worst_frame_->psnr > psnr)) {
875       worst_frame_.emplace(FrameWithPsnr{psnr, *comparison.render});
876     }
877 
878     if (graph_data_output_file_) {
879       samples_.push_back(Sample(
880           comparison.dropped, comparison.input_time_ms, comparison.send_time_ms,
881           comparison.recv_time_ms, comparison.render_time_ms,
882           comparison.encoded_frame_size, psnr, ssim));
883     }
884     if (psnr >= 0.0)
885       psnr_.AddSample(psnr);
886     if (ssim >= 0.0)
887       ssim_.AddSample(ssim);
888 
889     if (comparison.dropped) {
890       ++dropped_frames_;
891       return;
892     }
893     if (last_render_time_ != 0)
894       rendered_delta_.AddSample(comparison.render_time_ms - last_render_time_);
895     last_render_time_ = comparison.render_time_ms;
896 
897     sender_time_.AddSample(comparison.send_time_ms - comparison.input_time_ms);
898     if (comparison.recv_time_ms > 0) {
899       // If recv_time_ms == 0, this frame consisted of a packets which were all
900       // lost in the transport. Since we were able to render the frame, however,
901       // the dropped packets were recovered by FlexFEC. The FlexFEC recovery
902       // happens internally in Call, and we can therefore here not know which
903       // FEC packets that protected the lost media packets. Consequently, we
904       // were not able to record a meaningful recv_time_ms. We therefore skip
905       // this sample.
906       //
907       // The reasoning above does not hold for ULPFEC and RTX, as for those
908       // strategies the timestamp of the received packets is set to the
909       // timestamp of the protected/retransmitted media packet. I.e., then
910       // recv_time_ms != 0, even though the media packets were lost.
911       receiver_time_.AddSample(comparison.render_time_ms -
912                                comparison.recv_time_ms);
913     }
914     end_to_end_.AddSample(comparison.render_time_ms - comparison.input_time_ms);
915     encoded_frame_size_.AddSample(comparison.encoded_frame_size);
916   }
917 
PrintResult(const char * result_type,test::Statistics stats,const char * unit)918   void PrintResult(const char* result_type,
919                    test::Statistics stats,
920                    const char* unit) {
921     printf("RESULT %s: %s = {%f, %f}%s\n",
922            result_type,
923            test_label_.c_str(),
924            stats.Mean(),
925            stats.StandardDeviation(),
926            unit);
927   }
928 
PrintSamplesToFile(void)929   void PrintSamplesToFile(void) {
930     FILE* out = graph_data_output_file_;
931     rtc::CritScope crit(&comparison_lock_);
932     std::sort(samples_.begin(), samples_.end(),
933               [](const Sample& A, const Sample& B) -> bool {
934                 return A.input_time_ms < B.input_time_ms;
935               });
936 
937     fprintf(out, "%s\n", graph_title_.c_str());
938     fprintf(out, "%" PRIuS "\n", samples_.size());
939     fprintf(out,
940             "dropped "
941             "input_time_ms "
942             "send_time_ms "
943             "recv_time_ms "
944             "render_time_ms "
945             "encoded_frame_size "
946             "psnr "
947             "ssim "
948             "encode_time_ms\n");
949     int missing_encode_time_samples = 0;
950     for (const Sample& sample : samples_) {
951       auto it = samples_encode_time_ms_.find(sample.input_time_ms);
952       int encode_time_ms;
953       if (it != samples_encode_time_ms_.end()) {
954         encode_time_ms = it->second;
955       } else {
956         ++missing_encode_time_samples;
957         encode_time_ms = -1;
958       }
959       fprintf(out, "%d %" PRId64 " %" PRId64 " %" PRId64 " %" PRId64 " %" PRIuS
960                    " %lf %lf %d\n",
961               sample.dropped, sample.input_time_ms, sample.send_time_ms,
962               sample.recv_time_ms, sample.render_time_ms,
963               sample.encoded_frame_size, sample.psnr, sample.ssim,
964               encode_time_ms);
965     }
966     if (missing_encode_time_samples) {
967       fprintf(stderr,
968               "Warning: Missing encode_time_ms samples for %d frame(s).\n",
969               missing_encode_time_samples);
970     }
971   }
972 
GetAverageMediaBitrateBps()973   double GetAverageMediaBitrateBps() {
974     if (last_sending_time_ == first_sending_time_) {
975       return 0;
976     } else {
977       return static_cast<double>(total_media_bytes_) * 8 /
978              (last_sending_time_ - first_sending_time_) *
979              rtc::kNumMillisecsPerSec;
980     }
981   }
982 
983   // Implements VideoSinkInterface to receive captured frames from a
984   // FrameGeneratorCapturer. Implements VideoSourceInterface to be able to act
985   // as a source to VideoSendStream.
986   // It forwards all input frames to the VideoAnalyzer for later comparison and
987   // forwards the captured frames to the VideoSendStream.
988   class CapturedFrameForwarder : public rtc::VideoSinkInterface<VideoFrame>,
989                                  public rtc::VideoSourceInterface<VideoFrame> {
990    public:
CapturedFrameForwarder(VideoAnalyzer * analyzer,Clock * clock)991     explicit CapturedFrameForwarder(VideoAnalyzer* analyzer, Clock* clock)
992         : analyzer_(analyzer),
993           send_stream_input_(nullptr),
994           video_capturer_(nullptr),
995           clock_(clock) {}
996 
SetSource(test::VideoCapturer * video_capturer)997     void SetSource(test::VideoCapturer* video_capturer) {
998       video_capturer_ = video_capturer;
999     }
1000 
1001    private:
OnFrame(const VideoFrame & video_frame)1002     void OnFrame(const VideoFrame& video_frame) override {
1003       VideoFrame copy = video_frame;
1004       // Frames from the capturer does not have a rtp timestamp.
1005       // Create one so it can be used for comparison.
1006       RTC_DCHECK_EQ(0, video_frame.timestamp());
1007       if (video_frame.ntp_time_ms() == 0)
1008         copy.set_ntp_time_ms(clock_->CurrentNtpInMilliseconds());
1009       copy.set_timestamp(copy.ntp_time_ms() * 90);
1010       analyzer_->AddCapturedFrameForComparison(copy);
1011       rtc::CritScope lock(&crit_);
1012       if (send_stream_input_)
1013         send_stream_input_->OnFrame(copy);
1014     }
1015 
1016     // Called when |send_stream_.SetSource()| is called.
AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame> * sink,const rtc::VideoSinkWants & wants)1017     void AddOrUpdateSink(rtc::VideoSinkInterface<VideoFrame>* sink,
1018                          const rtc::VideoSinkWants& wants) override {
1019       {
1020         rtc::CritScope lock(&crit_);
1021         RTC_DCHECK(!send_stream_input_ || send_stream_input_ == sink);
1022         send_stream_input_ = sink;
1023       }
1024       if (video_capturer_) {
1025         video_capturer_->AddOrUpdateSink(this, wants);
1026       }
1027     }
1028 
1029     // Called by |send_stream_| when |send_stream_.SetSource()| is called.
RemoveSink(rtc::VideoSinkInterface<VideoFrame> * sink)1030     void RemoveSink(rtc::VideoSinkInterface<VideoFrame>* sink) override {
1031       rtc::CritScope lock(&crit_);
1032       RTC_DCHECK(sink == send_stream_input_);
1033       send_stream_input_ = nullptr;
1034     }
1035 
1036     VideoAnalyzer* const analyzer_;
1037     rtc::CriticalSection crit_;
1038     rtc::VideoSinkInterface<VideoFrame>* send_stream_input_
1039         RTC_GUARDED_BY(crit_);
1040     test::VideoCapturer* video_capturer_;
1041     Clock* clock_;
1042   };
1043 
AddCapturedFrameForComparison(const VideoFrame & video_frame)1044   void AddCapturedFrameForComparison(const VideoFrame& video_frame) {
1045     rtc::CritScope lock(&crit_);
1046     frames_.push_back(video_frame);
1047   }
1048 
1049   Call* call_;
1050   VideoSendStream* send_stream_;
1051   VideoReceiveStream* receive_stream_;
1052   CapturedFrameForwarder captured_frame_forwarder_;
1053   const std::string test_label_;
1054   FILE* const graph_data_output_file_;
1055   const std::string graph_title_;
1056   const uint32_t ssrc_to_analyze_;
1057   const uint32_t rtx_ssrc_to_analyze_;
1058   const size_t selected_stream_;
1059   const int selected_sl_;
1060   const int selected_tl_;
1061   PreEncodeProxy pre_encode_proxy_;
1062   OnEncodeTimingProxy encode_timing_proxy_;
1063   std::vector<Sample> samples_ RTC_GUARDED_BY(comparison_lock_);
1064   std::map<int64_t, int> samples_encode_time_ms_
1065       RTC_GUARDED_BY(comparison_lock_);
1066   test::Statistics sender_time_ RTC_GUARDED_BY(comparison_lock_);
1067   test::Statistics receiver_time_ RTC_GUARDED_BY(comparison_lock_);
1068   test::Statistics psnr_ RTC_GUARDED_BY(comparison_lock_);
1069   test::Statistics ssim_ RTC_GUARDED_BY(comparison_lock_);
1070   test::Statistics end_to_end_ RTC_GUARDED_BY(comparison_lock_);
1071   test::Statistics rendered_delta_ RTC_GUARDED_BY(comparison_lock_);
1072   test::Statistics encoded_frame_size_ RTC_GUARDED_BY(comparison_lock_);
1073   test::Statistics encode_frame_rate_ RTC_GUARDED_BY(comparison_lock_);
1074   test::Statistics encode_time_ms_ RTC_GUARDED_BY(comparison_lock_);
1075   test::Statistics encode_usage_percent_ RTC_GUARDED_BY(comparison_lock_);
1076   test::Statistics decode_time_ms_ RTC_GUARDED_BY(comparison_lock_);
1077   test::Statistics decode_time_max_ms_ RTC_GUARDED_BY(comparison_lock_);
1078   test::Statistics media_bitrate_bps_ RTC_GUARDED_BY(comparison_lock_);
1079   test::Statistics fec_bitrate_bps_ RTC_GUARDED_BY(comparison_lock_);
1080   test::Statistics send_bandwidth_bps_ RTC_GUARDED_BY(comparison_lock_);
1081   test::Statistics memory_usage_ RTC_GUARDED_BY(comparison_lock_);
1082 
1083   struct FrameWithPsnr {
1084     double psnr;
1085     VideoFrame frame;
1086   };
1087 
1088   // Rendered frame with worst PSNR is saved for further analysis.
1089   rtc::Optional<FrameWithPsnr> worst_frame_ RTC_GUARDED_BY(comparison_lock_);
1090 
1091   size_t last_fec_bytes_;
1092 
1093   const int frames_to_process_;
1094   int frames_recorded_;
1095   int frames_processed_;
1096   int dropped_frames_;
1097   int dropped_frames_before_first_encode_;
1098   int dropped_frames_before_rendering_;
1099   int64_t last_render_time_;
1100   uint32_t rtp_timestamp_delta_;
1101   int64_t total_media_bytes_;
1102   int64_t first_sending_time_;
1103   int64_t last_sending_time_;
1104 
1105   int64_t cpu_time_ RTC_GUARDED_BY(cpu_measurement_lock_);
1106   int64_t wallclock_time_ RTC_GUARDED_BY(cpu_measurement_lock_);
1107   rtc::CriticalSection cpu_measurement_lock_;
1108 
1109   rtc::CriticalSection crit_;
1110   std::deque<VideoFrame> frames_ RTC_GUARDED_BY(crit_);
1111   rtc::Optional<VideoFrame> last_rendered_frame_ RTC_GUARDED_BY(crit_);
1112   rtc::TimestampWrapAroundHandler wrap_handler_ RTC_GUARDED_BY(crit_);
1113   std::map<int64_t, int64_t> send_times_ RTC_GUARDED_BY(crit_);
1114   std::map<int64_t, int64_t> recv_times_ RTC_GUARDED_BY(crit_);
1115   std::map<int64_t, size_t> encoded_frame_sizes_ RTC_GUARDED_BY(crit_);
1116   rtc::Optional<uint32_t> first_encoded_timestamp_ RTC_GUARDED_BY(crit_);
1117   rtc::Optional<uint32_t> first_sent_timestamp_ RTC_GUARDED_BY(crit_);
1118   const double avg_psnr_threshold_;
1119   const double avg_ssim_threshold_;
1120   bool is_quick_test_enabled_;
1121 
1122   rtc::CriticalSection comparison_lock_;
1123   std::vector<rtc::PlatformThread*> comparison_thread_pool_;
1124   rtc::PlatformThread stats_polling_thread_;
1125   rtc::Event comparison_available_event_;
1126   std::deque<FrameComparison> comparisons_ RTC_GUARDED_BY(comparison_lock_);
1127   rtc::Event done_;
1128 
1129   std::unique_ptr<test::RtpFileWriter> rtp_file_writer_;
1130   Clock* const clock_;
1131   const int64_t start_ms_;
1132 };
1133 
VideoQualityTest()1134 VideoQualityTest::VideoQualityTest()
1135     : clock_(Clock::GetRealTimeClock()), receive_logs_(0), send_logs_(0) {
1136   payload_type_map_ = test::CallTest::payload_type_map_;
1137   RTC_DCHECK(payload_type_map_.find(kPayloadTypeH264) ==
1138              payload_type_map_.end());
1139   RTC_DCHECK(payload_type_map_.find(kPayloadTypeVP8) ==
1140              payload_type_map_.end());
1141   RTC_DCHECK(payload_type_map_.find(kPayloadTypeVP9) ==
1142              payload_type_map_.end());
1143   payload_type_map_[kPayloadTypeH264] = webrtc::MediaType::VIDEO;
1144   payload_type_map_[kPayloadTypeVP8] = webrtc::MediaType::VIDEO;
1145   payload_type_map_[kPayloadTypeVP9] = webrtc::MediaType::VIDEO;
1146 }
1147 
Params()1148 VideoQualityTest::Params::Params()
1149     : call({false, Call::Config::BitrateConfig(), 0}),
1150       video({false, 640, 480, 30, 50, 800, 800, false, "VP8", 1, -1, 0, false,
1151              false, ""}),
1152       audio({false, false, false}),
1153       screenshare({false, false, 10, 0}),
1154       analyzer({"", 0.0, 0.0, 0, "", ""}),
1155       pipe(),
1156       ss({std::vector<VideoStream>(), 0, 0, -1, std::vector<SpatialLayer>()}),
1157       logging({false, "", "", ""}) {}
1158 
1159 VideoQualityTest::Params::~Params() = default;
1160 
TestBody()1161 void VideoQualityTest::TestBody() {}
1162 
GenerateGraphTitle() const1163 std::string VideoQualityTest::GenerateGraphTitle() const {
1164   std::stringstream ss;
1165   ss << params_.video.codec;
1166   ss << " (" << params_.video.target_bitrate_bps / 1000 << "kbps";
1167   ss << ", " << params_.video.fps << " FPS";
1168   if (params_.screenshare.scroll_duration)
1169     ss << ", " << params_.screenshare.scroll_duration << "s scroll";
1170   if (params_.ss.streams.size() > 1)
1171     ss << ", Stream #" << params_.ss.selected_stream;
1172   if (params_.ss.num_spatial_layers > 1)
1173     ss << ", Layer #" << params_.ss.selected_sl;
1174   ss << ")";
1175   return ss.str();
1176 }
1177 
CheckParams()1178 void VideoQualityTest::CheckParams() {
1179   if (!params_.video.enabled)
1180     return;
1181   // Add a default stream in none specified.
1182   if (params_.ss.streams.empty())
1183     params_.ss.streams.push_back(VideoQualityTest::DefaultVideoStream(params_));
1184   if (params_.ss.num_spatial_layers == 0)
1185     params_.ss.num_spatial_layers = 1;
1186 
1187   if (params_.pipe.loss_percent != 0 ||
1188       params_.pipe.queue_length_packets != 0) {
1189     // Since LayerFilteringTransport changes the sequence numbers, we can't
1190     // use that feature with pack loss, since the NACK request would end up
1191     // retransmitting the wrong packets.
1192     RTC_CHECK(params_.ss.selected_sl == -1 ||
1193               params_.ss.selected_sl == params_.ss.num_spatial_layers - 1);
1194     RTC_CHECK(params_.video.selected_tl == -1 ||
1195               params_.video.selected_tl ==
1196                   params_.video.num_temporal_layers - 1);
1197   }
1198 
1199   // TODO(ivica): Should max_bitrate_bps == -1 represent inf max bitrate, as it
1200   // does in some parts of the code?
1201   RTC_CHECK_GE(params_.video.max_bitrate_bps, params_.video.target_bitrate_bps);
1202   RTC_CHECK_GE(params_.video.target_bitrate_bps, params_.video.min_bitrate_bps);
1203   RTC_CHECK_LT(params_.video.selected_tl, params_.video.num_temporal_layers);
1204   RTC_CHECK_LE(params_.ss.selected_stream, params_.ss.streams.size());
1205   for (const VideoStream& stream : params_.ss.streams) {
1206     RTC_CHECK_GE(stream.min_bitrate_bps, 0);
1207     RTC_CHECK_GE(stream.target_bitrate_bps, stream.min_bitrate_bps);
1208     RTC_CHECK_GE(stream.max_bitrate_bps, stream.target_bitrate_bps);
1209   }
1210   // TODO(ivica): Should we check if the sum of all streams/layers is equal to
1211   // the total bitrate? We anyway have to update them in the case bitrate
1212   // estimator changes the total bitrates.
1213   RTC_CHECK_GE(params_.ss.num_spatial_layers, 1);
1214   RTC_CHECK_LE(params_.ss.selected_sl, params_.ss.num_spatial_layers);
1215   RTC_CHECK(params_.ss.spatial_layers.empty() ||
1216             params_.ss.spatial_layers.size() ==
1217                 static_cast<size_t>(params_.ss.num_spatial_layers));
1218   if (params_.video.codec == "VP8") {
1219     RTC_CHECK_EQ(params_.ss.num_spatial_layers, 1);
1220   } else if (params_.video.codec == "VP9") {
1221     RTC_CHECK_EQ(params_.ss.streams.size(), 1);
1222   }
1223   RTC_CHECK_GE(params_.call.num_thumbnails, 0);
1224   if (params_.call.num_thumbnails > 0) {
1225     RTC_CHECK_EQ(params_.ss.num_spatial_layers, 1);
1226     RTC_CHECK_EQ(params_.ss.streams.size(), 3);
1227     RTC_CHECK_EQ(params_.video.num_temporal_layers, 3);
1228     RTC_CHECK_EQ(params_.video.codec, "VP8");
1229   }
1230 }
1231 
1232 // Static.
ParseCSV(const std::string & str)1233 std::vector<int> VideoQualityTest::ParseCSV(const std::string& str) {
1234   // Parse comma separated nonnegative integers, where some elements may be
1235   // empty. The empty values are replaced with -1.
1236   // E.g. "10,-20,,30,40" --> {10, 20, -1, 30,40}
1237   // E.g. ",,10,,20," --> {-1, -1, 10, -1, 20, -1}
1238   std::vector<int> result;
1239   if (str.empty())
1240     return result;
1241 
1242   const char* p = str.c_str();
1243   int value = -1;
1244   int pos;
1245   while (*p) {
1246     if (*p == ',') {
1247       result.push_back(value);
1248       value = -1;
1249       ++p;
1250       continue;
1251     }
1252     RTC_CHECK_EQ(sscanf(p, "%d%n", &value, &pos), 1)
1253         << "Unexpected non-number value.";
1254     p += pos;
1255   }
1256   result.push_back(value);
1257   return result;
1258 }
1259 
1260 // Static.
DefaultVideoStream(const Params & params)1261 VideoStream VideoQualityTest::DefaultVideoStream(const Params& params) {
1262   VideoStream stream;
1263   stream.width = params.video.width;
1264   stream.height = params.video.height;
1265   stream.max_framerate = params.video.fps;
1266   stream.min_bitrate_bps = params.video.min_bitrate_bps;
1267   stream.target_bitrate_bps = params.video.target_bitrate_bps;
1268   stream.max_bitrate_bps = params.video.max_bitrate_bps;
1269   stream.max_qp = kDefaultMaxQp;
1270   // TODO(sprang): Can we make this less of a hack?
1271   if (params.video.num_temporal_layers == 2) {
1272     stream.temporal_layer_thresholds_bps.push_back(stream.target_bitrate_bps);
1273   } else if (params.video.num_temporal_layers == 3) {
1274     stream.temporal_layer_thresholds_bps.push_back(stream.max_bitrate_bps / 4);
1275     stream.temporal_layer_thresholds_bps.push_back(stream.target_bitrate_bps);
1276   } else {
1277     RTC_CHECK_LE(params.video.num_temporal_layers, kMaxTemporalStreams);
1278     for (int i = 0; i < params.video.num_temporal_layers - 1; ++i) {
1279       stream.temporal_layer_thresholds_bps.push_back(static_cast<int>(
1280           stream.max_bitrate_bps * kVp8LayerRateAlloction[0][i] + 0.5));
1281     }
1282   }
1283   return stream;
1284 }
1285 
1286 // Static.
DefaultThumbnailStream()1287 VideoStream VideoQualityTest::DefaultThumbnailStream() {
1288   VideoStream stream;
1289   stream.width = 320;
1290   stream.height = 180;
1291   stream.max_framerate = 7;
1292   stream.min_bitrate_bps = 7500;
1293   stream.target_bitrate_bps = 37500;
1294   stream.max_bitrate_bps = 50000;
1295   stream.max_qp = kDefaultMaxQp;
1296   return stream;
1297 }
1298 
1299 // Static.
FillScalabilitySettings(Params * params,const std::vector<std::string> & stream_descriptors,int num_streams,size_t selected_stream,int num_spatial_layers,int selected_sl,const std::vector<std::string> & sl_descriptors)1300 void VideoQualityTest::FillScalabilitySettings(
1301     Params* params,
1302     const std::vector<std::string>& stream_descriptors,
1303     int num_streams,
1304     size_t selected_stream,
1305     int num_spatial_layers,
1306     int selected_sl,
1307     const std::vector<std::string>& sl_descriptors) {
1308   if (params->ss.streams.empty() && params->ss.infer_streams) {
1309     webrtc::VideoEncoderConfig encoder_config;
1310     encoder_config.content_type =
1311         params->screenshare.enabled
1312             ? webrtc::VideoEncoderConfig::ContentType::kScreen
1313             : webrtc::VideoEncoderConfig::ContentType::kRealtimeVideo;
1314     encoder_config.max_bitrate_bps = params->video.max_bitrate_bps;
1315     encoder_config.min_transmit_bitrate_bps = params->video.min_transmit_bps;
1316     encoder_config.number_of_streams = num_streams;
1317     encoder_config.spatial_layers = params->ss.spatial_layers;
1318     encoder_config.video_stream_factory =
1319         new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
1320             params->video.codec, kDefaultMaxQp, params->video.fps,
1321             params->screenshare.enabled, true);
1322     params->ss.streams =
1323         encoder_config.video_stream_factory->CreateEncoderStreams(
1324             static_cast<int>(params->video.width),
1325             static_cast<int>(params->video.height), encoder_config);
1326   } else {
1327     // Read VideoStream and SpatialLayer elements from a list of comma separated
1328     // lists. To use a default value for an element, use -1 or leave empty.
1329     // Validity checks performed in CheckParams.
1330     RTC_CHECK(params->ss.streams.empty());
1331     for (auto descriptor : stream_descriptors) {
1332       if (descriptor.empty())
1333         continue;
1334       VideoStream stream = VideoQualityTest::DefaultVideoStream(*params);
1335       std::vector<int> v = VideoQualityTest::ParseCSV(descriptor);
1336       if (v[0] != -1)
1337         stream.width = static_cast<size_t>(v[0]);
1338       if (v[1] != -1)
1339         stream.height = static_cast<size_t>(v[1]);
1340       if (v[2] != -1)
1341         stream.max_framerate = v[2];
1342       if (v[3] != -1)
1343         stream.min_bitrate_bps = v[3];
1344       if (v[4] != -1)
1345         stream.target_bitrate_bps = v[4];
1346       if (v[5] != -1)
1347         stream.max_bitrate_bps = v[5];
1348       if (v.size() > 6 && v[6] != -1)
1349         stream.max_qp = v[6];
1350       if (v.size() > 7) {
1351         stream.temporal_layer_thresholds_bps.clear();
1352         stream.temporal_layer_thresholds_bps.insert(
1353             stream.temporal_layer_thresholds_bps.end(), v.begin() + 7, v.end());
1354       } else {
1355         // Automatic TL thresholds for more than two layers not supported.
1356         RTC_CHECK_LE(params->video.num_temporal_layers, 2);
1357       }
1358       params->ss.streams.push_back(stream);
1359     }
1360   }
1361 
1362   params->ss.num_spatial_layers = std::max(1, num_spatial_layers);
1363   params->ss.selected_stream = selected_stream;
1364 
1365   params->ss.selected_sl = selected_sl;
1366   RTC_CHECK(params->ss.spatial_layers.empty());
1367   for (auto descriptor : sl_descriptors) {
1368     if (descriptor.empty())
1369       continue;
1370     std::vector<int> v = VideoQualityTest::ParseCSV(descriptor);
1371     RTC_CHECK_GT(v[2], 0);
1372 
1373     SpatialLayer layer;
1374     layer.scaling_factor_num = v[0] == -1 ? 1 : v[0];
1375     layer.scaling_factor_den = v[1] == -1 ? 1 : v[1];
1376     layer.target_bitrate_bps = v[2];
1377     params->ss.spatial_layers.push_back(layer);
1378   }
1379 }
1380 
SetupVideo(Transport * send_transport,Transport * recv_transport)1381 void VideoQualityTest::SetupVideo(Transport* send_transport,
1382                                   Transport* recv_transport) {
1383   size_t num_video_streams = params_.ss.streams.size();
1384   size_t num_flexfec_streams = params_.video.flexfec ? 1 : 0;
1385   CreateSendConfig(num_video_streams, 0, num_flexfec_streams, send_transport);
1386 
1387   int payload_type;
1388   if (params_.video.codec == "H264") {
1389     video_encoder_ = H264Encoder::Create(cricket::VideoCodec("H264"));
1390     payload_type = kPayloadTypeH264;
1391   } else if (params_.video.codec == "VP8") {
1392     if (params_.screenshare.enabled && params_.ss.streams.size() > 1) {
1393       // Simulcast screenshare needs a simulcast encoder adapter to work, since
1394       // encoders usually can't natively do simulcast with different frame rates
1395       // for the different layers.
1396       video_encoder_.reset(
1397           new SimulcastEncoderAdapter(new InternalEncoderFactory()));
1398     } else {
1399       video_encoder_ = VP8Encoder::Create();
1400     }
1401     payload_type = kPayloadTypeVP8;
1402   } else if (params_.video.codec == "VP9") {
1403     video_encoder_ = VP9Encoder::Create();
1404     payload_type = kPayloadTypeVP9;
1405   } else {
1406     RTC_NOTREACHED() << "Codec not supported!";
1407     return;
1408   }
1409   video_send_config_.encoder_settings.encoder = video_encoder_.get();
1410   video_send_config_.encoder_settings.payload_name = params_.video.codec;
1411   video_send_config_.encoder_settings.payload_type = payload_type;
1412   video_send_config_.rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
1413   video_send_config_.rtp.rtx.payload_type = kSendRtxPayloadType;
1414   for (size_t i = 0; i < num_video_streams; ++i)
1415     video_send_config_.rtp.rtx.ssrcs.push_back(kSendRtxSsrcs[i]);
1416 
1417   video_send_config_.rtp.extensions.clear();
1418   if (params_.call.send_side_bwe) {
1419     video_send_config_.rtp.extensions.push_back(
1420         RtpExtension(RtpExtension::kTransportSequenceNumberUri,
1421                      test::kTransportSequenceNumberExtensionId));
1422   } else {
1423     video_send_config_.rtp.extensions.push_back(RtpExtension(
1424         RtpExtension::kAbsSendTimeUri, test::kAbsSendTimeExtensionId));
1425   }
1426   video_send_config_.rtp.extensions.push_back(RtpExtension(
1427       RtpExtension::kVideoContentTypeUri, test::kVideoContentTypeExtensionId));
1428   video_send_config_.rtp.extensions.push_back(RtpExtension(
1429       RtpExtension::kVideoTimingUri, test::kVideoTimingExtensionId));
1430 
1431   video_encoder_config_.min_transmit_bitrate_bps =
1432       params_.video.min_transmit_bps;
1433 
1434   video_send_config_.suspend_below_min_bitrate =
1435       params_.video.suspend_below_min_bitrate;
1436 
1437   video_encoder_config_.number_of_streams = params_.ss.streams.size();
1438   video_encoder_config_.max_bitrate_bps = 0;
1439   for (size_t i = 0; i < params_.ss.streams.size(); ++i) {
1440     video_encoder_config_.max_bitrate_bps +=
1441         params_.ss.streams[i].max_bitrate_bps;
1442   }
1443   if (params_.ss.infer_streams) {
1444     video_encoder_config_.video_stream_factory =
1445         new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
1446             params_.video.codec, params_.ss.streams[0].max_qp,
1447             params_.video.fps, params_.screenshare.enabled, true);
1448   } else {
1449     video_encoder_config_.video_stream_factory =
1450         new rtc::RefCountedObject<VideoStreamFactory>(params_.ss.streams);
1451   }
1452 
1453   video_encoder_config_.spatial_layers = params_.ss.spatial_layers;
1454 
1455   CreateMatchingReceiveConfigs(recv_transport);
1456 
1457   const bool decode_all_receive_streams =
1458       params_.ss.selected_stream == params_.ss.streams.size();
1459 
1460   for (size_t i = 0; i < num_video_streams; ++i) {
1461     video_receive_configs_[i].rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
1462     video_receive_configs_[i].rtp.rtx_ssrc = kSendRtxSsrcs[i];
1463     video_receive_configs_[i]
1464         .rtp.rtx_associated_payload_types[kSendRtxPayloadType] = payload_type;
1465     video_receive_configs_[i].rtp.transport_cc = params_.call.send_side_bwe;
1466     video_receive_configs_[i].rtp.remb = !params_.call.send_side_bwe;
1467     // Enable RTT calculation so NTP time estimator will work.
1468     video_receive_configs_[i].rtp.rtcp_xr.receiver_reference_time_report = true;
1469     // Force fake decoders on non-selected simulcast streams.
1470     if (!decode_all_receive_streams && i != params_.ss.selected_stream) {
1471       VideoReceiveStream::Decoder decoder;
1472       decoder.decoder = new test::FakeDecoder();
1473       decoder.payload_type = video_send_config_.encoder_settings.payload_type;
1474       decoder.payload_name = video_send_config_.encoder_settings.payload_name;
1475       video_receive_configs_[i].decoders.clear();
1476       allocated_decoders_.emplace_back(decoder.decoder);
1477       video_receive_configs_[i].decoders.push_back(decoder);
1478     }
1479   }
1480 
1481   if (params_.video.flexfec) {
1482     // Override send config constructed by CreateSendConfig.
1483     if (decode_all_receive_streams) {
1484       for (uint32_t media_ssrc : video_send_config_.rtp.ssrcs) {
1485         video_send_config_.rtp.flexfec.protected_media_ssrcs.push_back(
1486             media_ssrc);
1487       }
1488     } else {
1489       video_send_config_.rtp.flexfec.protected_media_ssrcs = {
1490           kVideoSendSsrcs[params_.ss.selected_stream]};
1491     }
1492 
1493     // The matching receive config is _not_ created by
1494     // CreateMatchingReceiveConfigs, since VideoQualityTest is not a BaseTest.
1495     // Set up the receive config manually instead.
1496     FlexfecReceiveStream::Config flexfec_receive_config(recv_transport);
1497     flexfec_receive_config.payload_type =
1498         video_send_config_.rtp.flexfec.payload_type;
1499     flexfec_receive_config.remote_ssrc = video_send_config_.rtp.flexfec.ssrc;
1500     flexfec_receive_config.protected_media_ssrcs =
1501         video_send_config_.rtp.flexfec.protected_media_ssrcs;
1502     flexfec_receive_config.local_ssrc = kReceiverLocalVideoSsrc;
1503     flexfec_receive_config.transport_cc = params_.call.send_side_bwe;
1504     if (params_.call.send_side_bwe) {
1505       flexfec_receive_config.rtp_header_extensions.push_back(
1506           RtpExtension(RtpExtension::kTransportSequenceNumberUri,
1507                        test::kTransportSequenceNumberExtensionId));
1508     } else {
1509       flexfec_receive_config.rtp_header_extensions.push_back(RtpExtension(
1510           RtpExtension::kAbsSendTimeUri, test::kAbsSendTimeExtensionId));
1511     }
1512     flexfec_receive_configs_.push_back(flexfec_receive_config);
1513     if (num_video_streams > 0) {
1514       video_receive_configs_[0].rtp.protected_by_flexfec = true;
1515     }
1516   }
1517 
1518   if (params_.video.ulpfec) {
1519     video_send_config_.rtp.ulpfec.red_payload_type = kRedPayloadType;
1520     video_send_config_.rtp.ulpfec.ulpfec_payload_type = kUlpfecPayloadType;
1521     video_send_config_.rtp.ulpfec.red_rtx_payload_type = kRtxRedPayloadType;
1522 
1523     if (decode_all_receive_streams) {
1524       for (auto it = video_receive_configs_.begin();
1525            it != video_receive_configs_.end(); ++it) {
1526         it->rtp.red_payload_type =
1527             video_send_config_.rtp.ulpfec.red_payload_type;
1528         it->rtp.ulpfec_payload_type =
1529             video_send_config_.rtp.ulpfec.ulpfec_payload_type;
1530         it->rtp.rtx_associated_payload_types[video_send_config_.rtp.ulpfec
1531                                                  .red_rtx_payload_type] =
1532             video_send_config_.rtp.ulpfec.red_payload_type;
1533       }
1534     } else {
1535       video_receive_configs_[params_.ss.selected_stream].rtp.red_payload_type =
1536           video_send_config_.rtp.ulpfec.red_payload_type;
1537       video_receive_configs_[params_.ss.selected_stream]
1538           .rtp.ulpfec_payload_type =
1539           video_send_config_.rtp.ulpfec.ulpfec_payload_type;
1540       video_receive_configs_[params_.ss.selected_stream]
1541           .rtp.rtx_associated_payload_types[video_send_config_.rtp.ulpfec
1542                                                 .red_rtx_payload_type] =
1543           video_send_config_.rtp.ulpfec.red_payload_type;
1544     }
1545   }
1546 }
1547 
SetupThumbnails(Transport * send_transport,Transport * recv_transport)1548 void VideoQualityTest::SetupThumbnails(Transport* send_transport,
1549                                        Transport* recv_transport) {
1550   for (int i = 0; i < params_.call.num_thumbnails; ++i) {
1551     thumbnail_encoders_.emplace_back(VP8Encoder::Create());
1552 
1553     // Thumbnails will be send in the other way: from receiver_call to
1554     // sender_call.
1555     VideoSendStream::Config thumbnail_send_config(recv_transport);
1556     thumbnail_send_config.rtp.ssrcs.push_back(kThumbnailSendSsrcStart + i);
1557     thumbnail_send_config.encoder_settings.encoder =
1558         thumbnail_encoders_.back().get();
1559     thumbnail_send_config.encoder_settings.payload_name = params_.video.codec;
1560     thumbnail_send_config.encoder_settings.payload_type = kPayloadTypeVP8;
1561     thumbnail_send_config.rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
1562     thumbnail_send_config.rtp.rtx.payload_type = kSendRtxPayloadType;
1563     thumbnail_send_config.rtp.rtx.ssrcs.push_back(kThumbnailRtxSsrcStart + i);
1564     thumbnail_send_config.rtp.extensions.clear();
1565     if (params_.call.send_side_bwe) {
1566       thumbnail_send_config.rtp.extensions.push_back(
1567           RtpExtension(RtpExtension::kTransportSequenceNumberUri,
1568                        test::kTransportSequenceNumberExtensionId));
1569     } else {
1570       thumbnail_send_config.rtp.extensions.push_back(RtpExtension(
1571           RtpExtension::kAbsSendTimeUri, test::kAbsSendTimeExtensionId));
1572     }
1573 
1574     VideoEncoderConfig thumbnail_encoder_config;
1575     thumbnail_encoder_config.min_transmit_bitrate_bps = 7500;
1576     thumbnail_send_config.suspend_below_min_bitrate =
1577         params_.video.suspend_below_min_bitrate;
1578     thumbnail_encoder_config.number_of_streams = 1;
1579     thumbnail_encoder_config.max_bitrate_bps = 50000;
1580     if (params_.ss.infer_streams) {
1581       thumbnail_encoder_config.video_stream_factory =
1582           new rtc::RefCountedObject<VideoStreamFactory>(params_.ss.streams);
1583     } else {
1584       thumbnail_encoder_config.video_stream_factory =
1585           new rtc::RefCountedObject<cricket::EncoderStreamFactory>(
1586               params_.video.codec, params_.ss.streams[0].max_qp,
1587               params_.video.fps, params_.screenshare.enabled, true);
1588     }
1589     thumbnail_encoder_config.spatial_layers = params_.ss.spatial_layers;
1590 
1591     VideoReceiveStream::Config thumbnail_receive_config(send_transport);
1592     thumbnail_receive_config.rtp.remb = false;
1593     thumbnail_receive_config.rtp.transport_cc = true;
1594     thumbnail_receive_config.rtp.local_ssrc = kReceiverLocalVideoSsrc;
1595     for (const RtpExtension& extension : thumbnail_send_config.rtp.extensions)
1596       thumbnail_receive_config.rtp.extensions.push_back(extension);
1597     thumbnail_receive_config.renderer = &fake_renderer_;
1598 
1599     VideoReceiveStream::Decoder decoder =
1600         test::CreateMatchingDecoder(thumbnail_send_config.encoder_settings);
1601     allocated_decoders_.push_back(
1602         std::unique_ptr<VideoDecoder>(decoder.decoder));
1603     thumbnail_receive_config.decoders.clear();
1604     thumbnail_receive_config.decoders.push_back(decoder);
1605     thumbnail_receive_config.rtp.remote_ssrc =
1606         thumbnail_send_config.rtp.ssrcs[0];
1607 
1608     thumbnail_receive_config.rtp.nack.rtp_history_ms = kNackRtpHistoryMs;
1609     thumbnail_receive_config.rtp.rtx_ssrc = kThumbnailRtxSsrcStart + i;
1610     thumbnail_receive_config.rtp
1611         .rtx_associated_payload_types[kSendRtxPayloadType] = kPayloadTypeVP8;
1612     thumbnail_receive_config.rtp.transport_cc = params_.call.send_side_bwe;
1613     thumbnail_receive_config.rtp.remb = !params_.call.send_side_bwe;
1614 
1615     thumbnail_encoder_configs_.push_back(thumbnail_encoder_config.Copy());
1616     thumbnail_send_configs_.push_back(thumbnail_send_config.Copy());
1617     thumbnail_receive_configs_.push_back(thumbnail_receive_config.Copy());
1618   }
1619 
1620   for (int i = 0; i < params_.call.num_thumbnails; ++i) {
1621     thumbnail_send_streams_.push_back(receiver_call_->CreateVideoSendStream(
1622         thumbnail_send_configs_[i].Copy(),
1623         thumbnail_encoder_configs_[i].Copy()));
1624     thumbnail_receive_streams_.push_back(sender_call_->CreateVideoReceiveStream(
1625         thumbnail_receive_configs_[i].Copy()));
1626   }
1627 }
1628 
DestroyThumbnailStreams()1629 void VideoQualityTest::DestroyThumbnailStreams() {
1630   for (VideoSendStream* thumbnail_send_stream : thumbnail_send_streams_)
1631     receiver_call_->DestroyVideoSendStream(thumbnail_send_stream);
1632   thumbnail_send_streams_.clear();
1633   for (VideoReceiveStream* thumbnail_receive_stream :
1634        thumbnail_receive_streams_)
1635     sender_call_->DestroyVideoReceiveStream(thumbnail_receive_stream);
1636   thumbnail_send_streams_.clear();
1637   thumbnail_receive_streams_.clear();
1638   for (std::unique_ptr<test::VideoCapturer>& video_caputurer :
1639        thumbnail_capturers_) {
1640     video_caputurer.reset();
1641   }
1642 }
1643 
SetupScreenshareOrSVC()1644 void VideoQualityTest::SetupScreenshareOrSVC() {
1645   if (params_.screenshare.enabled) {
1646     // Fill out codec settings.
1647     video_encoder_config_.content_type =
1648         VideoEncoderConfig::ContentType::kScreen;
1649     degradation_preference_ =
1650         VideoSendStream::DegradationPreference::kMaintainResolution;
1651     if (params_.video.codec == "VP8") {
1652       VideoCodecVP8 vp8_settings = VideoEncoder::GetDefaultVp8Settings();
1653       vp8_settings.denoisingOn = false;
1654       vp8_settings.frameDroppingOn = false;
1655       vp8_settings.numberOfTemporalLayers =
1656           static_cast<unsigned char>(params_.video.num_temporal_layers);
1657       video_encoder_config_.encoder_specific_settings =
1658           new rtc::RefCountedObject<
1659               VideoEncoderConfig::Vp8EncoderSpecificSettings>(vp8_settings);
1660     } else if (params_.video.codec == "VP9") {
1661       VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
1662       vp9_settings.denoisingOn = false;
1663       vp9_settings.frameDroppingOn = false;
1664       vp9_settings.numberOfTemporalLayers =
1665           static_cast<unsigned char>(params_.video.num_temporal_layers);
1666       vp9_settings.numberOfSpatialLayers =
1667           static_cast<unsigned char>(params_.ss.num_spatial_layers);
1668       video_encoder_config_.encoder_specific_settings =
1669           new rtc::RefCountedObject<
1670               VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
1671     }
1672     // Setup frame generator.
1673     const size_t kWidth = 1850;
1674     const size_t kHeight = 1110;
1675     if (params_.screenshare.generate_slides) {
1676       frame_generator_ = test::FrameGenerator::CreateSlideGenerator(
1677           kWidth, kHeight,
1678           params_.screenshare.slide_change_interval * params_.video.fps);
1679     } else {
1680       std::vector<std::string> slides = params_.screenshare.slides;
1681       if (slides.size() == 0) {
1682         slides.push_back(test::ResourcePath("web_screenshot_1850_1110", "yuv"));
1683         slides.push_back(test::ResourcePath("presentation_1850_1110", "yuv"));
1684         slides.push_back(test::ResourcePath("photo_1850_1110", "yuv"));
1685         slides.push_back(
1686             test::ResourcePath("difficult_photo_1850_1110", "yuv"));
1687       }
1688       if (params_.screenshare.scroll_duration == 0) {
1689         // Cycle image every slide_change_interval seconds.
1690         frame_generator_ = test::FrameGenerator::CreateFromYuvFile(
1691             slides, kWidth, kHeight,
1692             params_.screenshare.slide_change_interval * params_.video.fps);
1693       } else {
1694         RTC_CHECK_LE(params_.video.width, kWidth);
1695         RTC_CHECK_LE(params_.video.height, kHeight);
1696         RTC_CHECK_GT(params_.screenshare.slide_change_interval, 0);
1697         const int kPauseDurationMs =
1698             (params_.screenshare.slide_change_interval -
1699              params_.screenshare.scroll_duration) *
1700             1000;
1701         RTC_CHECK_LE(params_.screenshare.scroll_duration,
1702                      params_.screenshare.slide_change_interval);
1703 
1704         frame_generator_ =
1705             test::FrameGenerator::CreateScrollingInputFromYuvFiles(
1706                 clock_, slides, kWidth, kHeight, params_.video.width,
1707                 params_.video.height,
1708                 params_.screenshare.scroll_duration * 1000, kPauseDurationMs);
1709       }
1710     }
1711   } else if (params_.ss.num_spatial_layers > 1) {  // For non-screenshare case.
1712     RTC_CHECK(params_.video.codec == "VP9");
1713     VideoCodecVP9 vp9_settings = VideoEncoder::GetDefaultVp9Settings();
1714     vp9_settings.numberOfTemporalLayers =
1715         static_cast<unsigned char>(params_.video.num_temporal_layers);
1716     vp9_settings.numberOfSpatialLayers =
1717         static_cast<unsigned char>(params_.ss.num_spatial_layers);
1718     video_encoder_config_.encoder_specific_settings = new rtc::RefCountedObject<
1719         VideoEncoderConfig::Vp9EncoderSpecificSettings>(vp9_settings);
1720   }
1721 }
1722 
SetupThumbnailCapturers(size_t num_thumbnail_streams)1723 void VideoQualityTest::SetupThumbnailCapturers(size_t num_thumbnail_streams) {
1724   VideoStream thumbnail = DefaultThumbnailStream();
1725   for (size_t i = 0; i < num_thumbnail_streams; ++i) {
1726     thumbnail_capturers_.emplace_back(test::FrameGeneratorCapturer::Create(
1727         static_cast<int>(thumbnail.width), static_cast<int>(thumbnail.height),
1728         thumbnail.max_framerate, clock_));
1729     RTC_DCHECK(thumbnail_capturers_.back());
1730   }
1731 }
1732 
CreateCapturer()1733 void VideoQualityTest::CreateCapturer() {
1734   if (params_.screenshare.enabled) {
1735     test::FrameGeneratorCapturer* frame_generator_capturer =
1736         new test::FrameGeneratorCapturer(clock_, std::move(frame_generator_),
1737                                          params_.video.fps);
1738     EXPECT_TRUE(frame_generator_capturer->Init());
1739     video_capturer_.reset(frame_generator_capturer);
1740   } else {
1741     if (params_.video.clip_name == "Generator") {
1742       video_capturer_.reset(test::FrameGeneratorCapturer::Create(
1743           static_cast<int>(params_.video.width),
1744           static_cast<int>(params_.video.height), params_.video.fps, clock_));
1745     } else if (params_.video.clip_name.empty()) {
1746       video_capturer_.reset(test::VcmCapturer::Create(
1747           params_.video.width, params_.video.height, params_.video.fps,
1748           params_.video.capture_device_index));
1749       if (!video_capturer_) {
1750         // Failed to get actual camera, use chroma generator as backup.
1751         video_capturer_.reset(test::FrameGeneratorCapturer::Create(
1752             static_cast<int>(params_.video.width),
1753             static_cast<int>(params_.video.height), params_.video.fps, clock_));
1754       }
1755     } else {
1756       video_capturer_.reset(test::FrameGeneratorCapturer::CreateFromYuvFile(
1757           test::ResourcePath(params_.video.clip_name, "yuv"),
1758           params_.video.width, params_.video.height, params_.video.fps,
1759           clock_));
1760       ASSERT_TRUE(video_capturer_) << "Could not create capturer for "
1761                                    << params_.video.clip_name
1762                                    << ".yuv. Is this resource file present?";
1763     }
1764   }
1765   RTC_DCHECK(video_capturer_.get());
1766 }
1767 
1768 std::unique_ptr<test::LayerFilteringTransport>
CreateSendTransport()1769 VideoQualityTest::CreateSendTransport() {
1770   return rtc::MakeUnique<test::LayerFilteringTransport>(
1771       &task_queue_, params_.pipe, sender_call_.get(), kPayloadTypeVP8,
1772       kPayloadTypeVP9, params_.video.selected_tl, params_.ss.selected_sl,
1773       payload_type_map_);
1774 }
1775 
1776 std::unique_ptr<test::DirectTransport>
CreateReceiveTransport()1777 VideoQualityTest::CreateReceiveTransport() {
1778   return rtc::MakeUnique<test::DirectTransport>(
1779       &task_queue_, params_.pipe, receiver_call_.get(), payload_type_map_);
1780 }
1781 
RunWithAnalyzer(const Params & params)1782 void VideoQualityTest::RunWithAnalyzer(const Params& params) {
1783   std::unique_ptr<test::LayerFilteringTransport> send_transport;
1784   std::unique_ptr<test::DirectTransport> recv_transport;
1785   FILE* graph_data_output_file = nullptr;
1786   std::unique_ptr<VideoAnalyzer> analyzer;
1787 
1788   params_ = params;
1789 
1790   RTC_CHECK(!params_.audio.enabled);
1791   // TODO(ivica): Merge with RunWithRenderer and use a flag / argument to
1792   // differentiate between the analyzer and the renderer case.
1793   CheckParams();
1794 
1795   if (!params_.analyzer.graph_data_output_filename.empty()) {
1796     graph_data_output_file =
1797         fopen(params_.analyzer.graph_data_output_filename.c_str(), "w");
1798     RTC_CHECK(graph_data_output_file)
1799         << "Can't open the file " << params_.analyzer.graph_data_output_filename
1800         << "!";
1801   }
1802 
1803   if (!params.logging.rtc_event_log_name.empty()) {
1804     event_log_ = RtcEventLog::Create(clock_, RtcEventLog::EncodingType::Legacy);
1805     std::unique_ptr<RtcEventLogOutputFile> output(
1806         rtc::MakeUnique<RtcEventLogOutputFile>(
1807             params.logging.rtc_event_log_name, RtcEventLog::kUnlimitedOutput));
1808     bool event_log_started = event_log_->StartLogging(
1809         std::move(output), RtcEventLog::kImmediateOutput);
1810     RTC_DCHECK(event_log_started);
1811   }
1812 
1813   Call::Config call_config(event_log_.get());
1814   call_config.bitrate_config = params.call.call_bitrate_config;
1815 
1816   task_queue_.SendTask(
1817       [this, &call_config, &send_transport, &recv_transport]() {
1818         CreateCalls(call_config, call_config);
1819         send_transport = CreateSendTransport();
1820         recv_transport = CreateReceiveTransport();
1821       });
1822 
1823   std::string graph_title = params_.analyzer.graph_title;
1824   if (graph_title.empty())
1825     graph_title = VideoQualityTest::GenerateGraphTitle();
1826   bool is_quick_test_enabled = field_trial::IsEnabled("WebRTC-QuickPerfTest");
1827   analyzer = rtc::MakeUnique<VideoAnalyzer>(
1828       send_transport.get(), params_.analyzer.test_label,
1829       params_.analyzer.avg_psnr_threshold, params_.analyzer.avg_ssim_threshold,
1830       is_quick_test_enabled
1831           ? kFramesSentInQuickTest
1832           : params_.analyzer.test_durations_secs * params_.video.fps,
1833       graph_data_output_file, graph_title,
1834       kVideoSendSsrcs[params_.ss.selected_stream],
1835       kSendRtxSsrcs[params_.ss.selected_stream],
1836       static_cast<size_t>(params_.ss.selected_stream), params.ss.selected_sl,
1837       params_.video.selected_tl, is_quick_test_enabled, clock_,
1838       params_.logging.rtp_dump_name);
1839 
1840   task_queue_.SendTask([&]() {
1841     analyzer->SetCall(sender_call_.get());
1842     analyzer->SetReceiver(receiver_call_->Receiver());
1843     send_transport->SetReceiver(analyzer.get());
1844     recv_transport->SetReceiver(sender_call_->Receiver());
1845 
1846     SetupVideo(analyzer.get(), recv_transport.get());
1847     SetupThumbnails(analyzer.get(), recv_transport.get());
1848     video_receive_configs_[params_.ss.selected_stream].renderer =
1849         analyzer.get();
1850     video_send_config_.pre_encode_callback = analyzer->pre_encode_proxy();
1851     RTC_DCHECK(!video_send_config_.post_encode_callback);
1852     video_send_config_.post_encode_callback = analyzer->encode_timing_proxy();
1853 
1854     SetupScreenshareOrSVC();
1855 
1856     CreateFlexfecStreams();
1857     CreateVideoStreams();
1858     analyzer->SetSendStream(video_send_stream_);
1859     if (video_receive_streams_.size() == 1)
1860       analyzer->SetReceiveStream(video_receive_streams_[0]);
1861 
1862     video_send_stream_->SetSource(analyzer->OutputInterface(),
1863                                   degradation_preference_);
1864 
1865     SetupThumbnailCapturers(params_.call.num_thumbnails);
1866     for (size_t i = 0; i < thumbnail_send_streams_.size(); ++i) {
1867       thumbnail_send_streams_[i]->SetSource(thumbnail_capturers_[i].get(),
1868                                             degradation_preference_);
1869     }
1870 
1871     CreateCapturer();
1872 
1873     analyzer->SetSource(video_capturer_.get(), params_.ss.infer_streams);
1874 
1875     StartEncodedFrameLogs(video_send_stream_);
1876     StartEncodedFrameLogs(video_receive_streams_[params_.ss.selected_stream]);
1877     video_send_stream_->Start();
1878     for (VideoSendStream* thumbnail_send_stream : thumbnail_send_streams_)
1879       thumbnail_send_stream->Start();
1880     for (VideoReceiveStream* receive_stream : video_receive_streams_)
1881       receive_stream->Start();
1882     for (VideoReceiveStream* thumbnail_receive_stream :
1883          thumbnail_receive_streams_)
1884       thumbnail_receive_stream->Start();
1885 
1886     analyzer->StartMeasuringCpuProcessTime();
1887 
1888     video_capturer_->Start();
1889     for (std::unique_ptr<test::VideoCapturer>& video_caputurer :
1890          thumbnail_capturers_) {
1891       video_caputurer->Start();
1892     }
1893   });
1894 
1895   analyzer->Wait();
1896 
1897   event_log_->StopLogging();
1898 
1899   task_queue_.SendTask([&]() {
1900     for (std::unique_ptr<test::VideoCapturer>& video_caputurer :
1901          thumbnail_capturers_)
1902       video_caputurer->Stop();
1903     video_capturer_->Stop();
1904     for (VideoReceiveStream* thumbnail_receive_stream :
1905          thumbnail_receive_streams_)
1906       thumbnail_receive_stream->Stop();
1907     for (VideoReceiveStream* receive_stream : video_receive_streams_)
1908       receive_stream->Stop();
1909     for (VideoSendStream* thumbnail_send_stream : thumbnail_send_streams_)
1910       thumbnail_send_stream->Stop();
1911     video_send_stream_->Stop();
1912 
1913     DestroyStreams();
1914     DestroyThumbnailStreams();
1915 
1916     if (graph_data_output_file)
1917       fclose(graph_data_output_file);
1918 
1919     video_capturer_.reset();
1920     send_transport.reset();
1921     recv_transport.reset();
1922 
1923     DestroyCalls();
1924   });
1925 }
1926 
SetupAudio(int send_channel_id,int receive_channel_id,Transport * transport,AudioReceiveStream ** audio_receive_stream)1927 void VideoQualityTest::SetupAudio(int send_channel_id,
1928                                   int receive_channel_id,
1929                                   Transport* transport,
1930                                   AudioReceiveStream** audio_receive_stream) {
1931   audio_send_config_ = AudioSendStream::Config(transport);
1932   audio_send_config_.voe_channel_id = send_channel_id;
1933   audio_send_config_.rtp.ssrc = kAudioSendSsrc;
1934 
1935   // Add extension to enable audio send side BWE, and allow audio bit rate
1936   // adaptation.
1937   audio_send_config_.rtp.extensions.clear();
1938   if (params_.call.send_side_bwe) {
1939     audio_send_config_.rtp.extensions.push_back(
1940         webrtc::RtpExtension(webrtc::RtpExtension::kTransportSequenceNumberUri,
1941                              test::kTransportSequenceNumberExtensionId));
1942     audio_send_config_.min_bitrate_bps = kOpusMinBitrateBps;
1943     audio_send_config_.max_bitrate_bps = kOpusBitrateFbBps;
1944   }
1945   audio_send_config_.send_codec_spec =
1946       rtc::Optional<AudioSendStream::Config::SendCodecSpec>(
1947           {kAudioSendPayloadType,
1948            {"OPUS", 48000, 2,
1949             {{"usedtx", (params_.audio.dtx ? "1" : "0")},
1950               {"stereo", "1"}}}});
1951   audio_send_config_.encoder_factory = encoder_factory_;
1952   audio_send_stream_ = sender_call_->CreateAudioSendStream(audio_send_config_);
1953 
1954   AudioReceiveStream::Config audio_config;
1955   audio_config.rtp.local_ssrc = kReceiverLocalAudioSsrc;
1956   audio_config.rtcp_send_transport = transport;
1957   audio_config.voe_channel_id = receive_channel_id;
1958   audio_config.rtp.remote_ssrc = audio_send_config_.rtp.ssrc;
1959   audio_config.rtp.transport_cc = params_.call.send_side_bwe;
1960   audio_config.rtp.extensions = audio_send_config_.rtp.extensions;
1961   audio_config.decoder_factory = decoder_factory_;
1962   audio_config.decoder_map = {{kAudioSendPayloadType, {"OPUS", 48000, 2}}};
1963   if (params_.video.enabled && params_.audio.sync_video)
1964     audio_config.sync_group = kSyncGroup;
1965 
1966   *audio_receive_stream =
1967       receiver_call_->CreateAudioReceiveStream(audio_config);
1968 }
1969 
RunWithRenderers(const Params & params)1970 void VideoQualityTest::RunWithRenderers(const Params& params) {
1971   std::unique_ptr<test::LayerFilteringTransport> send_transport;
1972   std::unique_ptr<test::DirectTransport> recv_transport;
1973   std::unique_ptr<test::FakeAudioDevice> fake_audio_device;
1974   ::VoiceEngineState voe;
1975   std::unique_ptr<test::VideoRenderer> local_preview;
1976   std::vector<std::unique_ptr<test::VideoRenderer>> loopback_renderers;
1977   AudioReceiveStream* audio_receive_stream = nullptr;
1978 
1979   task_queue_.SendTask([&]() {
1980     params_ = params;
1981     CheckParams();
1982 
1983     // TODO(ivica): Remove bitrate_config and use the default Call::Config(), to
1984     // match the full stack tests.
1985     Call::Config call_config(event_log_.get());
1986     call_config.bitrate_config = params_.call.call_bitrate_config;
1987 
1988     fake_audio_device.reset(new test::FakeAudioDevice(
1989         test::FakeAudioDevice::CreatePulsedNoiseCapturer(32000, 48000),
1990         test::FakeAudioDevice::CreateDiscardRenderer(48000),
1991         1.f));
1992 
1993     rtc::scoped_refptr<webrtc::AudioProcessing> audio_processing(
1994         webrtc::AudioProcessing::Create());
1995 
1996     if (params_.audio.enabled) {
1997       CreateVoiceEngine(&voe, fake_audio_device.get(), audio_processing.get(),
1998                         decoder_factory_);
1999       AudioState::Config audio_state_config;
2000       audio_state_config.voice_engine = voe.voice_engine;
2001       audio_state_config.audio_mixer = AudioMixerImpl::Create();
2002       audio_state_config.audio_processing = audio_processing;
2003       call_config.audio_state = AudioState::Create(audio_state_config);
2004       fake_audio_device->RegisterAudioCallback(
2005           call_config.audio_state->audio_transport());
2006     }
2007 
2008     CreateCalls(call_config, call_config);
2009 
2010     // TODO(minyue): consider if this is a good transport even for audio only
2011     // calls.
2012     send_transport = rtc::MakeUnique<test::LayerFilteringTransport>(
2013         &task_queue_, params.pipe, sender_call_.get(), kPayloadTypeVP8,
2014         kPayloadTypeVP9, params.video.selected_tl, params_.ss.selected_sl,
2015         payload_type_map_);
2016 
2017     recv_transport = rtc::MakeUnique<test::DirectTransport>(
2018         &task_queue_, params_.pipe, receiver_call_.get(), payload_type_map_);
2019 
2020     // TODO(ivica): Use two calls to be able to merge with RunWithAnalyzer or at
2021     // least share as much code as possible. That way this test would also match
2022     // the full stack tests better.
2023     send_transport->SetReceiver(receiver_call_->Receiver());
2024     recv_transport->SetReceiver(sender_call_->Receiver());
2025 
2026     if (params_.video.enabled) {
2027       // Create video renderers.
2028       local_preview.reset(test::VideoRenderer::Create(
2029           "Local Preview", params_.video.width, params_.video.height));
2030 
2031       const size_t selected_stream_id = params_.ss.selected_stream;
2032       const size_t num_streams = params_.ss.streams.size();
2033 
2034       if (selected_stream_id == num_streams) {
2035         for (size_t stream_id = 0; stream_id < num_streams; ++stream_id) {
2036           std::ostringstream oss;
2037           oss << "Loopback Video - Stream #" << static_cast<int>(stream_id);
2038           loopback_renderers.emplace_back(test::VideoRenderer::Create(
2039               oss.str().c_str(), params_.ss.streams[stream_id].width,
2040               params_.ss.streams[stream_id].height));
2041         }
2042       } else {
2043         loopback_renderers.emplace_back(test::VideoRenderer::Create(
2044             "Loopback Video", params_.ss.streams[selected_stream_id].width,
2045             params_.ss.streams[selected_stream_id].height));
2046       }
2047 
2048       SetupVideo(send_transport.get(), recv_transport.get());
2049 
2050       video_send_config_.pre_encode_callback = local_preview.get();
2051       if (selected_stream_id == num_streams) {
2052         for (size_t stream_id = 0; stream_id < num_streams; ++stream_id) {
2053           video_receive_configs_[stream_id].renderer =
2054               loopback_renderers[stream_id].get();
2055           if (params_.audio.enabled && params_.audio.sync_video)
2056             video_receive_configs_[stream_id].sync_group = kSyncGroup;
2057         }
2058       } else {
2059         video_receive_configs_[selected_stream_id].renderer =
2060             loopback_renderers.back().get();
2061         if (params_.audio.enabled && params_.audio.sync_video)
2062           video_receive_configs_[selected_stream_id].sync_group = kSyncGroup;
2063       }
2064 
2065       SetupScreenshareOrSVC();
2066 
2067       CreateFlexfecStreams();
2068       CreateVideoStreams();
2069 
2070       CreateCapturer();
2071       video_send_stream_->SetSource(video_capturer_.get(),
2072                                     degradation_preference_);
2073     }
2074 
2075     if (params_.audio.enabled) {
2076       SetupAudio(voe.send_channel_id, voe.receive_channel_id,
2077                  send_transport.get(), &audio_receive_stream);
2078     }
2079 
2080     for (VideoReceiveStream* receive_stream : video_receive_streams_)
2081       StartEncodedFrameLogs(receive_stream);
2082     StartEncodedFrameLogs(video_send_stream_);
2083 
2084     // Start sending and receiving video.
2085     if (params_.video.enabled) {
2086       for (VideoReceiveStream* video_receive_stream : video_receive_streams_)
2087         video_receive_stream->Start();
2088 
2089       video_send_stream_->Start();
2090       video_capturer_->Start();
2091     }
2092 
2093     if (params_.audio.enabled) {
2094       // Start receiving audio.
2095       audio_receive_stream->Start();
2096       EXPECT_EQ(0, voe.base->StartPlayout(voe.receive_channel_id));
2097 
2098       // Start sending audio.
2099       audio_send_stream_->Start();
2100       EXPECT_EQ(0, voe.base->StartSend(voe.send_channel_id));
2101     }
2102   });
2103 
2104   test::PressEnterToContinue();
2105 
2106   task_queue_.SendTask([&]() {
2107     if (params_.audio.enabled) {
2108       // Stop sending audio.
2109       EXPECT_EQ(0, voe.base->StopSend(voe.send_channel_id));
2110       audio_send_stream_->Stop();
2111 
2112       // Stop receiving audio.
2113       EXPECT_EQ(0, voe.base->StopPlayout(voe.receive_channel_id));
2114       audio_receive_stream->Stop();
2115       sender_call_->DestroyAudioSendStream(audio_send_stream_);
2116       receiver_call_->DestroyAudioReceiveStream(audio_receive_stream);
2117     }
2118 
2119     // Stop receiving and sending video.
2120     if (params_.video.enabled) {
2121       video_capturer_->Stop();
2122       video_send_stream_->Stop();
2123       for (FlexfecReceiveStream* flexfec_receive_stream :
2124            flexfec_receive_streams_) {
2125         for (VideoReceiveStream* video_receive_stream :
2126              video_receive_streams_) {
2127           video_receive_stream->RemoveSecondarySink(flexfec_receive_stream);
2128         }
2129         receiver_call_->DestroyFlexfecReceiveStream(flexfec_receive_stream);
2130       }
2131       for (VideoReceiveStream* receive_stream : video_receive_streams_) {
2132         receive_stream->Stop();
2133         receiver_call_->DestroyVideoReceiveStream(receive_stream);
2134       }
2135       sender_call_->DestroyVideoSendStream(video_send_stream_);
2136     }
2137 
2138     video_capturer_.reset();
2139     send_transport.reset();
2140     recv_transport.reset();
2141 
2142     if (params_.audio.enabled)
2143       DestroyVoiceEngine(&voe);
2144 
2145     local_preview.reset();
2146     loopback_renderers.clear();
2147 
2148     DestroyCalls();
2149   });
2150 }
2151 
StartEncodedFrameLogs(VideoSendStream * stream)2152 void VideoQualityTest::StartEncodedFrameLogs(VideoSendStream* stream) {
2153   if (!params_.logging.encoded_frame_base_path.empty()) {
2154     std::ostringstream str;
2155     str << send_logs_++;
2156     std::string prefix =
2157         params_.logging.encoded_frame_base_path + "." + str.str() + ".send.";
2158     stream->EnableEncodedFrameRecording(
2159         std::vector<rtc::PlatformFile>(
2160             {rtc::CreatePlatformFile(prefix + "1.ivf"),
2161              rtc::CreatePlatformFile(prefix + "2.ivf"),
2162              rtc::CreatePlatformFile(prefix + "3.ivf")}),
2163         100000000);
2164   }
2165 }
2166 
StartEncodedFrameLogs(VideoReceiveStream * stream)2167 void VideoQualityTest::StartEncodedFrameLogs(VideoReceiveStream* stream) {
2168   if (!params_.logging.encoded_frame_base_path.empty()) {
2169     std::ostringstream str;
2170     str << receive_logs_++;
2171     std::string path =
2172         params_.logging.encoded_frame_base_path + "." + str.str() + ".recv.ivf";
2173     stream->EnableEncodedFrameRecording(rtc::CreatePlatformFile(path),
2174                                         100000000);
2175   }
2176 }
2177 }  // namespace webrtc
2178