1 // Copyright 2014 The Chromium Authors. All rights reserved.
2 // Use of this source code is governed by a BSD-style license that can be
3 // found in the LICENSE file.
4
5 // Simulate end to end streaming.
6 //
7 // Input:
8 // --source=
9 // WebM used as the source of video and audio frames.
10 // --output=
11 // File path to writing out the raw event log of the simulation session.
12 // --sim-id=
13 // Unique simulation ID.
14 // --target-delay-ms=
15 // Target playout delay to configure (integer number of milliseconds).
16 // Optional; default is 400.
17 // --max-frame-rate=
18 // The maximum frame rate allowed at any time during the Cast session.
19 // Optional; default is 30.
20 // --source-frame-rate=
21 // Overrides the playback rate; the source video will play faster/slower.
22 // --run-time=
23 // In seconds, how long the Cast session runs for.
24 // Optional; default is 180.
25 // --metrics-output=
26 // File path to write PSNR and SSIM metrics between source frames and
27 // decoded frames. Assumes all encoded frames are decoded.
28 // --yuv-output=
29 // File path to write YUV decoded frames in YUV4MPEG2 format.
30 // --no-simulation
31 // Do not run network simulation.
32 //
33 // Output:
34 // - Raw event log of the simulation session tagged with the unique test ID,
35 // written out to the specified file path.
36
37 #include <stddef.h>
38 #include <stdint.h>
39
40 #include <memory>
41 #include <utility>
42
43 #include "base/at_exit.h"
44 #include "base/base_paths.h"
45 #include "base/bind.h"
46 #include "base/command_line.h"
47 #include "base/containers/queue.h"
48 #include "base/files/file_path.h"
49 #include "base/files/file_util.h"
50 #include "base/files/memory_mapped_file.h"
51 #include "base/files/scoped_file.h"
52 #include "base/json/json_writer.h"
53 #include "base/logging.h"
54 #include "base/macros.h"
55 #include "base/memory/ptr_util.h"
56 #include "base/path_service.h"
57 #include "base/strings/string_number_conversions.h"
58 #include "base/strings/stringprintf.h"
59 #include "base/test/simple_test_tick_clock.h"
60 #include "base/threading/thread_task_runner_handle.h"
61 #include "base/time/tick_clock.h"
62 #include "base/values.h"
63 #include "media/base/audio_bus.h"
64 #include "media/base/fake_single_thread_task_runner.h"
65 #include "media/base/media.h"
66 #include "media/base/video_frame.h"
67 #include "media/cast/cast_config.h"
68 #include "media/cast/cast_environment.h"
69 #include "media/cast/cast_receiver.h"
70 #include "media/cast/cast_sender.h"
71 #include "media/cast/logging/encoding_event_subscriber.h"
72 #include "media/cast/logging/logging_defines.h"
73 #include "media/cast/logging/proto/raw_events.pb.h"
74 #include "media/cast/logging/raw_event_subscriber_bundle.h"
75 #include "media/cast/logging/simple_event_subscriber.h"
76 #include "media/cast/net/cast_transport.h"
77 #include "media/cast/net/cast_transport_config.h"
78 #include "media/cast/net/cast_transport_defines.h"
79 #include "media/cast/net/cast_transport_impl.h"
80 #include "media/cast/test/fake_media_source.h"
81 #include "media/cast/test/loopback_transport.h"
82 #include "media/cast/test/proto/network_simulation_model.pb.h"
83 #include "media/cast/test/skewed_tick_clock.h"
84 #include "media/cast/test/utility/audio_utility.h"
85 #include "media/cast/test/utility/default_config.h"
86 #include "media/cast/test/utility/test_util.h"
87 #include "media/cast/test/utility/udp_proxy.h"
88 #include "media/cast/test/utility/video_utility.h"
89
90 using media::cast::proto::IPPModel;
91 using media::cast::proto::NetworkSimulationModel;
92 using media::cast::proto::NetworkSimulationModelType;
93
94 namespace media {
95 namespace cast {
96 namespace {
97 const char kLibDir[] = "lib-dir";
98 const char kModelPath[] = "model";
99 const char kMetricsOutputPath[] = "metrics-output";
100 const char kOutputPath[] = "output";
101 const char kMaxFrameRate[] = "max-frame-rate";
102 const char kNoSimulation[] = "no-simulation";
103 const char kRunTime[] = "run-time";
104 const char kSimulationId[] = "sim-id";
105 const char kSourcePath[] = "source";
106 const char kSourceFrameRate[] = "source-frame-rate";
107 const char kTargetDelay[] = "target-delay-ms";
108 const char kYuvOutputPath[] = "yuv-output";
109
GetIntegerSwitchValue(const char * switch_name,int default_value)110 int GetIntegerSwitchValue(const char* switch_name, int default_value) {
111 const std::string as_str =
112 base::CommandLine::ForCurrentProcess()->GetSwitchValueASCII(switch_name);
113 if (as_str.empty())
114 return default_value;
115 int as_int;
116 CHECK(base::StringToInt(as_str, &as_int));
117 CHECK_GT(as_int, 0);
118 return as_int;
119 }
120
LogAudioOperationalStatus(OperationalStatus status)121 void LogAudioOperationalStatus(OperationalStatus status) {
122 LOG(INFO) << "Audio status: " << status;
123 }
124
LogVideoOperationalStatus(OperationalStatus status)125 void LogVideoOperationalStatus(OperationalStatus status) {
126 LOG(INFO) << "Video status: " << status;
127 }
128
129 struct PacketProxy {
PacketProxymedia::cast::__anonb7b2acb10111::PacketProxy130 PacketProxy() : receiver(nullptr) {}
ReceivePacketmedia::cast::__anonb7b2acb10111::PacketProxy131 void ReceivePacket(std::unique_ptr<Packet> packet) {
132 if (receiver)
133 receiver->ReceivePacket(std::move(packet));
134 }
135 CastReceiver* receiver;
136 };
137
138 class TransportClient : public CastTransport::Client {
139 public:
TransportClient(LogEventDispatcher * log_event_dispatcher,PacketProxy * packet_proxy)140 TransportClient(LogEventDispatcher* log_event_dispatcher,
141 PacketProxy* packet_proxy)
142 : log_event_dispatcher_(log_event_dispatcher),
143 packet_proxy_(packet_proxy) {}
144
OnStatusChanged(CastTransportStatus status)145 void OnStatusChanged(CastTransportStatus status) final {
146 LOG(INFO) << "Cast transport status: " << status;
147 }
OnLoggingEventsReceived(std::unique_ptr<std::vector<FrameEvent>> frame_events,std::unique_ptr<std::vector<PacketEvent>> packet_events)148 void OnLoggingEventsReceived(
149 std::unique_ptr<std::vector<FrameEvent>> frame_events,
150 std::unique_ptr<std::vector<PacketEvent>> packet_events) final {
151 DCHECK(log_event_dispatcher_);
152 log_event_dispatcher_->DispatchBatchOfEvents(std::move(frame_events),
153 std::move(packet_events));
154 }
ProcessRtpPacket(std::unique_ptr<Packet> packet)155 void ProcessRtpPacket(std::unique_ptr<Packet> packet) final {
156 if (packet_proxy_)
157 packet_proxy_->ReceivePacket(std::move(packet));
158 }
159
160 private:
161 LogEventDispatcher* const log_event_dispatcher_; // Not owned by this class.
162 PacketProxy* const packet_proxy_; // Not owned by this class.
163
164 DISALLOW_COPY_AND_ASSIGN(TransportClient);
165 };
166
167 // Maintains a queue of encoded video frames.
168 // This works by tracking FRAME_CAPTURE_END and FRAME_ENCODED events.
169 // If a video frame is detected to be encoded it transfers a frame
170 // from FakeMediaSource to its internal queue. Otherwise it drops a
171 // frame from FakeMediaSource.
172 class EncodedVideoFrameTracker : public RawEventSubscriber {
173 public:
EncodedVideoFrameTracker(FakeMediaSource * media_source)174 EncodedVideoFrameTracker(FakeMediaSource* media_source)
175 : media_source_(media_source),
176 last_frame_event_type_(UNKNOWN) {}
~EncodedVideoFrameTracker()177 ~EncodedVideoFrameTracker() final {}
178
179 // RawEventSubscriber implementations.
OnReceiveFrameEvent(const FrameEvent & frame_event)180 void OnReceiveFrameEvent(const FrameEvent& frame_event) final {
181 // This method only cares about video FRAME_CAPTURE_END and
182 // FRAME_ENCODED events.
183 if (frame_event.media_type != VIDEO_EVENT) {
184 return;
185 }
186 if (frame_event.type != FRAME_CAPTURE_END &&
187 frame_event.type != FRAME_ENCODED) {
188 return;
189 }
190 // If there are two consecutive FRAME_CAPTURE_END events that means
191 // a frame is dropped.
192 if (last_frame_event_type_ == FRAME_CAPTURE_END &&
193 frame_event.type == FRAME_CAPTURE_END) {
194 media_source_->PopOldestInsertedVideoFrame();
195 }
196 if (frame_event.type == FRAME_ENCODED) {
197 video_frames_.push(media_source_->PopOldestInsertedVideoFrame());
198 }
199 last_frame_event_type_ = frame_event.type;
200 }
201
OnReceivePacketEvent(const PacketEvent & packet_event)202 void OnReceivePacketEvent(const PacketEvent& packet_event) final {
203 // Don't care.
204 }
205
PopOldestEncodedFrame()206 scoped_refptr<media::VideoFrame> PopOldestEncodedFrame() {
207 CHECK(!video_frames_.empty());
208 scoped_refptr<media::VideoFrame> video_frame = video_frames_.front();
209 video_frames_.pop();
210 return video_frame;
211 }
212
213 private:
214 FakeMediaSource* media_source_;
215 CastLoggingEvent last_frame_event_type_;
216 base::queue<scoped_refptr<media::VideoFrame>> video_frames_;
217
218 DISALLOW_COPY_AND_ASSIGN(EncodedVideoFrameTracker);
219 };
220
221 // Appends a YUV frame in I420 format to the file located at |path|.
AppendYuvToFile(const base::FilePath & path,scoped_refptr<media::VideoFrame> frame)222 void AppendYuvToFile(const base::FilePath& path,
223 scoped_refptr<media::VideoFrame> frame) {
224 // Write YUV420 format to file.
225 std::string header;
226 base::StringAppendF(
227 &header, "FRAME W%d H%d\n",
228 frame->coded_size().width(),
229 frame->coded_size().height());
230 AppendToFile(path, header.data(), header.size());
231 AppendToFile(path,
232 reinterpret_cast<char*>(frame->data(media::VideoFrame::kYPlane)),
233 frame->stride(media::VideoFrame::kYPlane) *
234 frame->rows(media::VideoFrame::kYPlane));
235 AppendToFile(path,
236 reinterpret_cast<char*>(frame->data(media::VideoFrame::kUPlane)),
237 frame->stride(media::VideoFrame::kUPlane) *
238 frame->rows(media::VideoFrame::kUPlane));
239 AppendToFile(path,
240 reinterpret_cast<char*>(frame->data(media::VideoFrame::kVPlane)),
241 frame->stride(media::VideoFrame::kVPlane) *
242 frame->rows(media::VideoFrame::kVPlane));
243 }
244
245 // A container to save output of GotVideoFrame() for computation based
246 // on output frames.
247 struct GotVideoFrameOutput {
GotVideoFrameOutputmedia::cast::__anonb7b2acb10111::GotVideoFrameOutput248 GotVideoFrameOutput() : counter(0) {}
249 int counter;
250 std::vector<double> psnr;
251 std::vector<double> ssim;
252 };
253
GotVideoFrame(GotVideoFrameOutput * metrics_output,const base::FilePath & yuv_output,EncodedVideoFrameTracker * video_frame_tracker,CastReceiver * cast_receiver,scoped_refptr<media::VideoFrame> video_frame,base::TimeTicks render_time,bool continuous)254 void GotVideoFrame(GotVideoFrameOutput* metrics_output,
255 const base::FilePath& yuv_output,
256 EncodedVideoFrameTracker* video_frame_tracker,
257 CastReceiver* cast_receiver,
258 scoped_refptr<media::VideoFrame> video_frame,
259 base::TimeTicks render_time,
260 bool continuous) {
261 ++metrics_output->counter;
262 cast_receiver->RequestDecodedVideoFrame(
263 base::BindRepeating(&GotVideoFrame, metrics_output, yuv_output,
264 video_frame_tracker, cast_receiver));
265
266 // If |video_frame_tracker| is available that means we're computing
267 // quality metrices.
268 if (video_frame_tracker) {
269 scoped_refptr<media::VideoFrame> src_frame =
270 video_frame_tracker->PopOldestEncodedFrame();
271 metrics_output->psnr.push_back(I420PSNR(*src_frame, *video_frame));
272 metrics_output->ssim.push_back(I420SSIM(*src_frame, *video_frame));
273 }
274
275 if (!yuv_output.empty()) {
276 AppendYuvToFile(yuv_output, std::move(video_frame));
277 }
278 }
279
GotAudioFrame(int * counter,CastReceiver * cast_receiver,std::unique_ptr<AudioBus> audio_bus,base::TimeTicks playout_time,bool is_continuous)280 void GotAudioFrame(int* counter,
281 CastReceiver* cast_receiver,
282 std::unique_ptr<AudioBus> audio_bus,
283 base::TimeTicks playout_time,
284 bool is_continuous) {
285 ++*counter;
286 cast_receiver->RequestDecodedAudioFrame(
287 base::BindRepeating(&GotAudioFrame, counter, cast_receiver));
288 }
289
290 // Run simulation once.
291 //
292 // |log_output_path| is the path to write serialized log.
293 // |extra_data| is extra tagging information to write to log.
RunSimulation(const base::FilePath & source_path,const base::FilePath & log_output_path,const base::FilePath & metrics_output_path,const base::FilePath & yuv_output_path,const std::string & extra_data,const NetworkSimulationModel & model)294 void RunSimulation(const base::FilePath& source_path,
295 const base::FilePath& log_output_path,
296 const base::FilePath& metrics_output_path,
297 const base::FilePath& yuv_output_path,
298 const std::string& extra_data,
299 const NetworkSimulationModel& model) {
300 // Fake clock. Make sure start time is non zero.
301 base::SimpleTestTickClock testing_clock;
302 testing_clock.Advance(base::TimeDelta::FromSeconds(1));
303
304 // Task runner.
305 scoped_refptr<FakeSingleThreadTaskRunner> task_runner =
306 new FakeSingleThreadTaskRunner(&testing_clock);
307 base::ThreadTaskRunnerHandle task_runner_handle(task_runner);
308
309 // CastEnvironments.
310 test::SkewedTickClock sender_clock(&testing_clock);
311 scoped_refptr<CastEnvironment> sender_env =
312 new CastEnvironment(&sender_clock, task_runner, task_runner, task_runner);
313 test::SkewedTickClock receiver_clock(&testing_clock);
314 scoped_refptr<CastEnvironment> receiver_env = new CastEnvironment(
315 &receiver_clock, task_runner, task_runner, task_runner);
316
317 // Event subscriber. Store at most 1 hour of events.
318 EncodingEventSubscriber audio_event_subscriber(AUDIO_EVENT,
319 100 * 60 * 60);
320 EncodingEventSubscriber video_event_subscriber(VIDEO_EVENT,
321 30 * 60 * 60);
322 sender_env->logger()->Subscribe(&audio_event_subscriber);
323 sender_env->logger()->Subscribe(&video_event_subscriber);
324
325 // Audio sender config.
326 FrameSenderConfig audio_sender_config = GetDefaultAudioSenderConfig();
327 audio_sender_config.min_playout_delay =
328 audio_sender_config.max_playout_delay = base::TimeDelta::FromMilliseconds(
329 GetIntegerSwitchValue(kTargetDelay, 400));
330
331 // Audio receiver config.
332 FrameReceiverConfig audio_receiver_config =
333 GetDefaultAudioReceiverConfig();
334 audio_receiver_config.rtp_max_delay_ms =
335 audio_sender_config.max_playout_delay.InMilliseconds();
336
337 // Video sender config.
338 FrameSenderConfig video_sender_config = GetDefaultVideoSenderConfig();
339 video_sender_config.max_bitrate = 2500000;
340 video_sender_config.min_bitrate = 2000000;
341 video_sender_config.start_bitrate = 2000000;
342 video_sender_config.min_playout_delay =
343 video_sender_config.max_playout_delay =
344 audio_sender_config.max_playout_delay;
345 video_sender_config.max_frame_rate = GetIntegerSwitchValue(kMaxFrameRate, 30);
346
347 // Video receiver config.
348 FrameReceiverConfig video_receiver_config =
349 GetDefaultVideoReceiverConfig();
350 video_receiver_config.rtp_max_delay_ms =
351 video_sender_config.max_playout_delay.InMilliseconds();
352
353 // Loopback transport. Owned by CastTransport.
354 LoopBackTransport* receiver_to_sender = new LoopBackTransport(receiver_env);
355 LoopBackTransport* sender_to_receiver = new LoopBackTransport(sender_env);
356
357 PacketProxy packet_proxy;
358
359 // Cast receiver.
360 std::unique_ptr<CastTransport> transport_receiver(new CastTransportImpl(
361 &testing_clock, base::TimeDelta::FromSeconds(1),
362 std::make_unique<TransportClient>(receiver_env->logger(), &packet_proxy),
363 base::WrapUnique(receiver_to_sender), task_runner));
364 std::unique_ptr<CastReceiver> cast_receiver(
365 CastReceiver::Create(receiver_env, audio_receiver_config,
366 video_receiver_config, transport_receiver.get()));
367
368 packet_proxy.receiver = cast_receiver.get();
369
370 // Cast sender and transport sender.
371 std::unique_ptr<CastTransport> transport_sender(new CastTransportImpl(
372 &testing_clock, base::TimeDelta::FromSeconds(1),
373 std::make_unique<TransportClient>(sender_env->logger(), nullptr),
374 base::WrapUnique(sender_to_receiver), task_runner));
375 std::unique_ptr<CastSender> cast_sender(
376 CastSender::Create(sender_env, transport_sender.get()));
377
378 // Initialize network simulation model.
379 const bool use_network_simulation =
380 model.type() == media::cast::proto::INTERRUPTED_POISSON_PROCESS;
381 std::unique_ptr<test::InterruptedPoissonProcess> ipp;
382 if (use_network_simulation) {
383 LOG(INFO) << "Running Poisson based network simulation.";
384 const IPPModel& ipp_model = model.ipp();
385 std::vector<double> average_rates(ipp_model.average_rate_size());
386 std::copy(ipp_model.average_rate().begin(),
387 ipp_model.average_rate().end(),
388 average_rates.begin());
389 ipp.reset(new test::InterruptedPoissonProcess(
390 average_rates,
391 ipp_model.coef_burstiness(), ipp_model.coef_variance(), 0));
392 receiver_to_sender->Initialize(ipp->NewBuffer(128 * 1024),
393 transport_sender->PacketReceiverForTesting(),
394 task_runner, &testing_clock);
395 sender_to_receiver->Initialize(
396 ipp->NewBuffer(128 * 1024),
397 transport_receiver->PacketReceiverForTesting(), task_runner,
398 &testing_clock);
399 } else {
400 LOG(INFO) << "No network simulation.";
401 receiver_to_sender->Initialize(std::unique_ptr<test::PacketPipe>(),
402 transport_sender->PacketReceiverForTesting(),
403 task_runner, &testing_clock);
404 sender_to_receiver->Initialize(
405 std::unique_ptr<test::PacketPipe>(),
406 transport_receiver->PacketReceiverForTesting(), task_runner,
407 &testing_clock);
408 }
409
410 // Initialize a fake media source and a tracker to encoded video frames.
411 const bool quality_test = !metrics_output_path.empty();
412 FakeMediaSource media_source(task_runner,
413 &testing_clock,
414 audio_sender_config,
415 video_sender_config,
416 quality_test);
417 std::unique_ptr<EncodedVideoFrameTracker> video_frame_tracker;
418 if (quality_test) {
419 video_frame_tracker.reset(new EncodedVideoFrameTracker(&media_source));
420 sender_env->logger()->Subscribe(video_frame_tracker.get());
421 }
422
423 // Quality metrics computed for each frame decoded.
424 GotVideoFrameOutput metrics_output;
425
426 // Start receiver.
427 int audio_frame_count = 0;
428 cast_receiver->RequestDecodedVideoFrame(
429 base::BindRepeating(&GotVideoFrame, &metrics_output, yuv_output_path,
430 video_frame_tracker.get(), cast_receiver.get()));
431 cast_receiver->RequestDecodedAudioFrame(base::BindRepeating(
432 &GotAudioFrame, &audio_frame_count, cast_receiver.get()));
433
434 // Initializing audio and video senders.
435 cast_sender->InitializeAudio(audio_sender_config,
436 base::BindOnce(&LogAudioOperationalStatus));
437 cast_sender->InitializeVideo(media_source.get_video_config(),
438 base::BindRepeating(&LogVideoOperationalStatus),
439 CreateDefaultVideoEncodeAcceleratorCallback(),
440 CreateDefaultVideoEncodeMemoryCallback());
441 task_runner->RunTasks();
442
443 // Truncate YUV files to prepare for writing.
444 if (!yuv_output_path.empty()) {
445 base::ScopedFILE file(base::OpenFile(yuv_output_path, "wb"));
446 if (!file.get()) {
447 LOG(ERROR) << "Cannot save YUV output to file.";
448 return;
449 }
450 LOG(INFO) << "Writing YUV output to file: " << yuv_output_path.value();
451
452 // Write YUV4MPEG2 header.
453 const std::string header("YUV4MPEG2 W1280 H720 F30000:1001 Ip A1:1 C420\n");
454 AppendToFile(yuv_output_path, header.data(), header.size());
455 }
456
457 // Start sending.
458 if (!source_path.empty()) {
459 // 0 means using the FPS from the file.
460 media_source.SetSourceFile(source_path,
461 GetIntegerSwitchValue(kSourceFrameRate, 0));
462 }
463 media_source.Start(cast_sender->audio_frame_input(),
464 cast_sender->video_frame_input());
465
466 // By default runs simulation for 3 minutes or the desired duration
467 // by using --run-time= flag.
468 base::TimeDelta elapsed_time;
469 const base::TimeDelta desired_run_time =
470 base::TimeDelta::FromSeconds(GetIntegerSwitchValue(kRunTime, 180));
471 while (elapsed_time < desired_run_time) {
472 // Each step is 100us.
473 base::TimeDelta step = base::TimeDelta::FromMicroseconds(100);
474 task_runner->Sleep(step);
475 elapsed_time += step;
476 }
477
478 // Unsubscribe from logging events.
479 sender_env->logger()->Unsubscribe(&audio_event_subscriber);
480 sender_env->logger()->Unsubscribe(&video_event_subscriber);
481 if (quality_test)
482 sender_env->logger()->Unsubscribe(video_frame_tracker.get());
483
484 // Get event logs for audio and video.
485 media::cast::proto::LogMetadata audio_metadata, video_metadata;
486 media::cast::FrameEventList audio_frame_events, video_frame_events;
487 media::cast::PacketEventList audio_packet_events, video_packet_events;
488 audio_metadata.set_extra_data(extra_data);
489 video_metadata.set_extra_data(extra_data);
490 audio_event_subscriber.GetEventsAndReset(
491 &audio_metadata, &audio_frame_events, &audio_packet_events);
492 video_event_subscriber.GetEventsAndReset(
493 &video_metadata, &video_frame_events, &video_packet_events);
494
495 // Print simulation results.
496
497 // Compute and print statistics for video:
498 //
499 // * Total video frames captured.
500 // * Total video frames encoded.
501 // * Total video frames dropped.
502 // * Total video frames received late.
503 // * Average target bitrate.
504 // * Average encoded bitrate.
505 int total_video_frames = 0;
506 int encoded_video_frames = 0;
507 int dropped_video_frames = 0;
508 int late_video_frames = 0;
509 int64_t total_delay_of_late_frames_ms = 0;
510 int64_t encoded_size = 0;
511 int64_t target_bitrate = 0;
512 for (size_t i = 0; i < video_frame_events.size(); ++i) {
513 const media::cast::proto::AggregatedFrameEvent& event =
514 *video_frame_events[i];
515 ++total_video_frames;
516 if (event.has_encoded_frame_size()) {
517 ++encoded_video_frames;
518 encoded_size += event.encoded_frame_size();
519 target_bitrate += event.target_bitrate();
520 } else {
521 ++dropped_video_frames;
522 }
523 if (event.has_delay_millis() && event.delay_millis() < 0) {
524 ++late_video_frames;
525 total_delay_of_late_frames_ms += -event.delay_millis();
526 }
527 }
528
529 // Subtract fraction of dropped frames from |elapsed_time| before estimating
530 // the average encoded bitrate.
531 const base::TimeDelta elapsed_time_undropped =
532 total_video_frames <= 0
533 ? base::TimeDelta()
534 : (elapsed_time * (total_video_frames - dropped_video_frames) /
535 total_video_frames);
536 constexpr double kKilobitsPerByte = 8.0 / 1000;
537 const double avg_encoded_bitrate =
538 elapsed_time_undropped <= base::TimeDelta()
539 ? 0
540 : encoded_size * kKilobitsPerByte * elapsed_time_undropped.ToHz();
541 double avg_target_bitrate =
542 encoded_video_frames ? target_bitrate / encoded_video_frames / 1000 : 0;
543
544 LOG(INFO) << "Configured target playout delay (ms): "
545 << video_receiver_config.rtp_max_delay_ms;
546 LOG(INFO) << "Audio frame count: " << audio_frame_count;
547 LOG(INFO) << "Inserted video frames: " << total_video_frames;
548 LOG(INFO) << "Decoded video frames: " << metrics_output.counter;
549 LOG(INFO) << "Dropped video frames: " << dropped_video_frames;
550 LOG(INFO) << "Late video frames: " << late_video_frames
551 << " (average lateness: "
552 << (late_video_frames > 0 ?
553 static_cast<double>(total_delay_of_late_frames_ms) /
554 late_video_frames :
555 0)
556 << " ms)";
557 LOG(INFO) << "Average encoded bitrate (kbps): " << avg_encoded_bitrate;
558 LOG(INFO) << "Average target bitrate (kbps): " << avg_target_bitrate;
559 LOG(INFO) << "Writing log: " << log_output_path.value();
560
561 // Truncate file and then write serialized log.
562 {
563 base::ScopedFILE file(base::OpenFile(log_output_path, "wb"));
564 if (!file.get()) {
565 LOG(INFO) << "Cannot write to log.";
566 return;
567 }
568 }
569
570 // Write quality metrics.
571 if (quality_test) {
572 LOG(INFO) << "Writing quality metrics: " << metrics_output_path.value();
573 std::string line;
574 for (size_t i = 0; i < metrics_output.psnr.size() &&
575 i < metrics_output.ssim.size(); ++i) {
576 base::StringAppendF(&line, "%f %f\n", metrics_output.psnr[i],
577 metrics_output.ssim[i]);
578 }
579 WriteFile(metrics_output_path, line.data(), line.length());
580 }
581 }
582
DefaultModel()583 NetworkSimulationModel DefaultModel() {
584 NetworkSimulationModel model;
585 model.set_type(cast::proto::INTERRUPTED_POISSON_PROCESS);
586 IPPModel* ipp = model.mutable_ipp();
587 ipp->set_coef_burstiness(0.609);
588 ipp->set_coef_variance(4.1);
589
590 ipp->add_average_rate(0.609);
591 ipp->add_average_rate(0.495);
592 ipp->add_average_rate(0.561);
593 ipp->add_average_rate(0.458);
594 ipp->add_average_rate(0.538);
595 ipp->add_average_rate(0.513);
596 ipp->add_average_rate(0.585);
597 ipp->add_average_rate(0.592);
598 ipp->add_average_rate(0.658);
599 ipp->add_average_rate(0.556);
600 ipp->add_average_rate(0.371);
601 ipp->add_average_rate(0.595);
602 ipp->add_average_rate(0.490);
603 ipp->add_average_rate(0.980);
604 ipp->add_average_rate(0.781);
605 ipp->add_average_rate(0.463);
606
607 return model;
608 }
609
IsModelValid(const NetworkSimulationModel & model)610 bool IsModelValid(const NetworkSimulationModel& model) {
611 if (!model.has_type())
612 return false;
613 NetworkSimulationModelType type = model.type();
614 if (type == media::cast::proto::INTERRUPTED_POISSON_PROCESS) {
615 if (!model.has_ipp())
616 return false;
617 const IPPModel& ipp = model.ipp();
618 if (ipp.coef_burstiness() <= 0.0 || ipp.coef_variance() <= 0.0)
619 return false;
620 if (ipp.average_rate_size() == 0)
621 return false;
622 for (int i = 0; i < ipp.average_rate_size(); i++) {
623 if (ipp.average_rate(i) <= 0.0)
624 return false;
625 }
626 }
627
628 return true;
629 }
630
LoadModel(const base::FilePath & model_path)631 NetworkSimulationModel LoadModel(const base::FilePath& model_path) {
632 if (base::CommandLine::ForCurrentProcess()->HasSwitch(kNoSimulation)) {
633 NetworkSimulationModel model;
634 model.set_type(media::cast::proto::NO_SIMULATION);
635 return model;
636 }
637 if (model_path.empty()) {
638 LOG(ERROR) << "Model path not set; Using default model.";
639 return DefaultModel();
640 }
641 std::string model_str;
642 if (!base::ReadFileToString(model_path, &model_str)) {
643 LOG(ERROR) << "Failed to read model file.";
644 return DefaultModel();
645 }
646
647 NetworkSimulationModel model;
648 if (!model.ParseFromString(model_str)) {
649 LOG(ERROR) << "Failed to parse model.";
650 return DefaultModel();
651 }
652 if (!IsModelValid(model)) {
653 LOG(ERROR) << "Invalid model.";
654 return DefaultModel();
655 }
656
657 return model;
658 }
659
660 } // namespace
661 } // namespace cast
662 } // namespace media
663
main(int argc,char ** argv)664 int main(int argc, char** argv) {
665 base::AtExitManager at_exit;
666 base::CommandLine::Init(argc, argv);
667 InitLogging(logging::LoggingSettings());
668
669 const base::CommandLine* cmd = base::CommandLine::ForCurrentProcess();
670 base::FilePath media_path = cmd->GetSwitchValuePath(media::cast::kLibDir);
671 if (media_path.empty()) {
672 if (!base::PathService::Get(base::DIR_MODULE, &media_path)) {
673 LOG(ERROR) << "Failed to load FFmpeg.";
674 return 1;
675 }
676 }
677
678 media::InitializeMediaLibrary();
679
680 base::FilePath source_path = cmd->GetSwitchValuePath(
681 media::cast::kSourcePath);
682 base::FilePath log_output_path = cmd->GetSwitchValuePath(
683 media::cast::kOutputPath);
684 if (log_output_path.empty()) {
685 base::GetTempDir(&log_output_path);
686 log_output_path = log_output_path.AppendASCII("sim-events.gz");
687 }
688 base::FilePath metrics_output_path = cmd->GetSwitchValuePath(
689 media::cast::kMetricsOutputPath);
690 base::FilePath yuv_output_path = cmd->GetSwitchValuePath(
691 media::cast::kYuvOutputPath);
692 std::string sim_id = cmd->GetSwitchValueASCII(media::cast::kSimulationId);
693
694 NetworkSimulationModel model = media::cast::LoadModel(
695 cmd->GetSwitchValuePath(media::cast::kModelPath));
696
697 base::DictionaryValue values;
698 values.SetBoolean("sim", true);
699 values.SetString("sim-id", sim_id);
700
701 std::string extra_data;
702 base::JSONWriter::Write(values, &extra_data);
703
704 // Run.
705 media::cast::RunSimulation(source_path, log_output_path, metrics_output_path,
706 yuv_output_path, extra_data, model);
707 return 0;
708 }
709