1 /* This Source Code Form is subject to the terms of the Mozilla Public
2  * License, v. 2.0. If a copy of the MPL was not distributed with this
3  * file, You can obtain one at http://mozilla.org/MPL/2.0/. */
4 
5 use crate::NotifierEvent;
6 use crate::WindowWrapper;
7 use serde_json;
8 use std::collections::{HashMap, HashSet};
9 use std::fs::File;
10 use std::io::{BufRead, BufReader};
11 use std::io::{Read, Write};
12 use std::path::{Path, PathBuf};
13 use std::sync::mpsc::Receiver;
14 use crate::wrench::{Wrench, WrenchThing};
15 use crate::yaml_frame_reader::YamlFrameReader;
16 use webrender::DebugFlags;
17 use webrender::render_api::DebugCommand;
18 
19 const COLOR_DEFAULT: &str = "\x1b[0m";
20 const COLOR_RED: &str = "\x1b[31m";
21 const COLOR_GREEN: &str = "\x1b[32m";
22 const COLOR_MAGENTA: &str = "\x1b[95m";
23 
24 const MIN_SAMPLE_COUNT: usize = 50;
25 const SAMPLE_EXCLUDE_COUNT: usize = 10;
26 
27 pub struct Benchmark {
28     pub test: PathBuf,
29 }
30 
31 pub struct BenchmarkManifest {
32     pub benchmarks: Vec<Benchmark>,
33 }
34 
35 impl BenchmarkManifest {
new(manifest: &Path) -> BenchmarkManifest36     pub fn new(manifest: &Path) -> BenchmarkManifest {
37         let dir = manifest.parent().unwrap();
38         let f =
39             File::open(manifest).expect(&format!("couldn't open manifest: {}", manifest.display()));
40         let file = BufReader::new(&f);
41 
42         let mut benchmarks = Vec::new();
43 
44         for line in file.lines() {
45             let l = line.unwrap();
46 
47             // strip the comments
48             let s = &l[0 .. l.find('#').unwrap_or(l.len())];
49             let s = s.trim();
50             if s.is_empty() {
51                 continue;
52             }
53 
54             let mut items = s.split_whitespace();
55 
56             match items.next() {
57                 Some("include") => {
58                     let include = dir.join(items.next().unwrap());
59 
60                     benchmarks.append(&mut BenchmarkManifest::new(include.as_path()).benchmarks);
61                 }
62                 Some(name) => {
63                     let test = dir.join(name);
64                     benchmarks.push(Benchmark { test });
65                 }
66                 _ => panic!(),
67             };
68         }
69 
70         BenchmarkManifest {
71             benchmarks: benchmarks,
72         }
73     }
74 }
75 
76 #[derive(Clone, Serialize, Deserialize)]
77 struct TestProfileRange {
78     min: u64,
79     avg: u64,
80     max: u64,
81 }
82 
83 #[derive(Clone, Serialize, Deserialize)]
84 struct TestProfile {
85     name: String,
86     backend_time_ns: TestProfileRange,
87     composite_time_ns: TestProfileRange,
88     paint_time_ns: TestProfileRange,
89     draw_calls: usize,
90 }
91 
92 impl TestProfile {
csv_header() -> String93     fn csv_header() -> String {
94         "name,\
95         backend_time_ns min, avg, max,\
96         composite_time_ns min, avg, max,\
97         paint_time_ns min, avg, max,\
98         draw_calls\n".to_string()
99     }
100 
convert_to_csv(&self) -> String101     fn convert_to_csv(&self) -> String {
102         format!("{},\
103                  {},{},{},\
104                  {},{},{},\
105                  {},{},{},\
106                  {}\n",
107                 self.name,
108                 self.backend_time_ns.min,   self.backend_time_ns.avg,   self.backend_time_ns.max,
109                 self.composite_time_ns.min, self.composite_time_ns.avg, self.composite_time_ns.max,
110                 self.paint_time_ns.min,     self.paint_time_ns.avg,     self.paint_time_ns.max,
111                 self.draw_calls)
112     }
113 }
114 
115 #[derive(Serialize, Deserialize)]
116 struct Profile {
117     tests: Vec<TestProfile>,
118 }
119 
120 impl Profile {
new() -> Profile121     fn new() -> Profile {
122         Profile { tests: Vec::new() }
123     }
124 
add(&mut self, profile: TestProfile)125     fn add(&mut self, profile: TestProfile) {
126         self.tests.push(profile);
127     }
128 
save(&self, filename: &str, as_csv: bool)129     fn save(&self, filename: &str, as_csv: bool) {
130         let mut file = File::create(&filename).unwrap();
131         if as_csv {
132             file.write_all(&TestProfile::csv_header().into_bytes()).unwrap();
133             for test in &self.tests {
134                 file.write_all(&test.convert_to_csv().into_bytes()).unwrap();
135             }
136         } else {
137             let s = serde_json::to_string_pretty(self).unwrap();
138             file.write_all(&s.into_bytes()).unwrap();
139             file.write_all(b"\n").unwrap();
140         }
141     }
142 
load(filename: &str) -> Profile143     fn load(filename: &str) -> Profile {
144         let mut file = File::open(&filename).unwrap();
145         let mut string = String::new();
146         file.read_to_string(&mut string).unwrap();
147         serde_json::from_str(&string).expect("Unable to load profile!")
148     }
149 
build_set_and_map_of_tests(&self) -> (HashSet<String>, HashMap<String, TestProfile>)150     fn build_set_and_map_of_tests(&self) -> (HashSet<String>, HashMap<String, TestProfile>) {
151         let mut hash_set = HashSet::new();
152         let mut hash_map = HashMap::new();
153 
154         for test in &self.tests {
155             hash_set.insert(test.name.clone());
156             hash_map.insert(test.name.clone(), test.clone());
157         }
158 
159         (hash_set, hash_map)
160     }
161 }
162 
163 pub struct PerfHarness<'a> {
164     wrench: &'a mut Wrench,
165     window: &'a mut WindowWrapper,
166     rx: Receiver<NotifierEvent>,
167     warmup_frames: usize,
168     sample_count: usize,
169 }
170 
171 impl<'a> PerfHarness<'a> {
new(wrench: &'a mut Wrench, window: &'a mut WindowWrapper, rx: Receiver<NotifierEvent>, warmup_frames: Option<usize>, sample_count: Option<usize>) -> Self172     pub fn new(wrench: &'a mut Wrench,
173                window: &'a mut WindowWrapper,
174                rx: Receiver<NotifierEvent>,
175                warmup_frames: Option<usize>,
176                sample_count: Option<usize>) -> Self {
177         PerfHarness {
178             wrench,
179             window,
180             rx,
181             warmup_frames: warmup_frames.unwrap_or(0usize),
182             sample_count: sample_count.unwrap_or(MIN_SAMPLE_COUNT),
183         }
184     }
185 
run(mut self, base_manifest: &Path, filename: &str, as_csv: bool)186     pub fn run(mut self, base_manifest: &Path, filename: &str, as_csv: bool) {
187         let manifest = BenchmarkManifest::new(base_manifest);
188 
189         let mut profile = Profile::new();
190 
191         for t in manifest.benchmarks {
192             let stats = self.render_yaml(t.test.as_path());
193             profile.add(stats);
194         }
195 
196         profile.save(filename, as_csv);
197     }
198 
render_yaml(&mut self, filename: &Path) -> TestProfile199     fn render_yaml(&mut self, filename: &Path) -> TestProfile {
200         let mut reader = YamlFrameReader::new(filename);
201 
202         // Loop until we get a reasonable number of CPU and GPU
203         // frame profiles. Then take the mean.
204         let mut cpu_frame_profiles = Vec::new();
205         let mut gpu_frame_profiles = Vec::new();
206 
207         let mut debug_flags = DebugFlags::empty();
208         debug_flags.set(DebugFlags::GPU_TIME_QUERIES | DebugFlags::GPU_SAMPLE_QUERIES, true);
209         self.wrench.api.send_debug_cmd(DebugCommand::SetFlags(debug_flags));
210 
211         let mut frame_count = 0;
212 
213         while cpu_frame_profiles.len() < self.sample_count ||
214             gpu_frame_profiles.len() < self.sample_count
215         {
216             reader.do_frame(self.wrench);
217             self.rx.recv().unwrap();
218             self.wrench.render();
219             self.window.swap_buffers();
220             let (cpu_profiles, gpu_profiles) = self.wrench.get_frame_profiles();
221             if frame_count >= self.warmup_frames {
222                 cpu_frame_profiles.extend(cpu_profiles);
223                 gpu_frame_profiles.extend(gpu_profiles);
224             }
225             frame_count = frame_count + 1;
226         }
227 
228         // Ensure the draw calls match in every sample.
229         let draw_calls = cpu_frame_profiles[0].draw_calls;
230         let draw_calls_same =
231             cpu_frame_profiles
232                 .iter()
233                 .all(|s| s.draw_calls == draw_calls);
234 
235         // this can be normal in cases where some elements are cached (eg. linear
236         // gradients), but print a warning in case it's not (which could make the
237         // benchmark produce unexpected results).
238         if !draw_calls_same {
239             println!("Warning: not every frame has the same number of draw calls");
240         }
241 
242         let composite_time_ns = extract_sample(&mut cpu_frame_profiles, |a| a.composite_time_ns);
243         let paint_time_ns = extract_sample(&mut gpu_frame_profiles, |a| a.paint_time_ns);
244         let backend_time_ns = extract_sample(&mut cpu_frame_profiles, |a| a.backend_time_ns);
245 
246         TestProfile {
247             name: filename.to_str().unwrap().to_string(),
248             composite_time_ns,
249             paint_time_ns,
250             backend_time_ns,
251             draw_calls,
252         }
253     }
254 }
255 
256 // returns min, average, max, after removing the lowest and highest SAMPLE_EXCLUDE_COUNT
257 // samples (each).
extract_sample<F, T>(profiles: &mut [T], f: F) -> TestProfileRange where F: Fn(&T) -> u64,258 fn extract_sample<F, T>(profiles: &mut [T], f: F) -> TestProfileRange
259 where
260     F: Fn(&T) -> u64,
261 {
262     let mut samples: Vec<u64> = profiles.iter().map(f).collect();
263     samples.sort();
264     let useful_samples = &samples[SAMPLE_EXCLUDE_COUNT .. samples.len() - SAMPLE_EXCLUDE_COUNT];
265     let total_time: u64 = useful_samples.iter().sum();
266     TestProfileRange {
267         min: useful_samples[0],
268         avg: total_time / useful_samples.len() as u64,
269         max: useful_samples[useful_samples.len()-1]
270     }
271 }
272 
select_color(base: f32, value: f32) -> &'static str273 fn select_color(base: f32, value: f32) -> &'static str {
274     let tolerance = base * 0.1;
275     if (value - base).abs() < tolerance {
276         COLOR_DEFAULT
277     } else if value > base {
278         COLOR_RED
279     } else {
280         COLOR_GREEN
281     }
282 }
283 
compare(first_filename: &str, second_filename: &str)284 pub fn compare(first_filename: &str, second_filename: &str) {
285     let profile0 = Profile::load(first_filename);
286     let profile1 = Profile::load(second_filename);
287 
288     let (set0, map0) = profile0.build_set_and_map_of_tests();
289     let (set1, map1) = profile1.build_set_and_map_of_tests();
290 
291     print!("+------------------------------------------------");
292     println!("+--------------+------------------+------------------+");
293     print!("|  Test name                                     ");
294     println!("| Draw Calls   | Composite (ms)   | Paint (ms)       |");
295     print!("+------------------------------------------------");
296     println!("+--------------+------------------+------------------+");
297 
298     for test_name in set0.symmetric_difference(&set1) {
299         println!(
300             "| {}{:47}{}|{:14}|{:18}|{:18}|",
301             COLOR_MAGENTA,
302             test_name,
303             COLOR_DEFAULT,
304             " -",
305             " -",
306             " -"
307         );
308     }
309 
310     for test_name in set0.intersection(&set1) {
311         let test0 = &map0[test_name];
312         let test1 = &map1[test_name];
313 
314         let composite_time0 = test0.composite_time_ns.avg as f32 / 1000000.0;
315         let composite_time1 = test1.composite_time_ns.avg as f32 / 1000000.0;
316 
317         let paint_time0 = test0.paint_time_ns.avg as f32 / 1000000.0;
318         let paint_time1 = test1.paint_time_ns.avg as f32 / 1000000.0;
319 
320         let draw_calls_color = if test0.draw_calls == test1.draw_calls {
321             COLOR_DEFAULT
322         } else if test0.draw_calls > test1.draw_calls {
323             COLOR_GREEN
324         } else {
325             COLOR_RED
326         };
327 
328         let composite_time_color = select_color(composite_time0, composite_time1);
329         let paint_time_color = select_color(paint_time0, paint_time1);
330 
331         let draw_call_string = format!(" {} -> {}", test0.draw_calls, test1.draw_calls);
332         let composite_time_string = format!(" {:.2} -> {:.2}", composite_time0, composite_time1);
333         let paint_time_string = format!(" {:.2} -> {:.2}", paint_time0, paint_time1);
334 
335         println!(
336             "| {:47}|{}{:14}{}|{}{:18}{}|{}{:18}{}|",
337             test_name,
338             draw_calls_color,
339             draw_call_string,
340             COLOR_DEFAULT,
341             composite_time_color,
342             composite_time_string,
343             COLOR_DEFAULT,
344             paint_time_color,
345             paint_time_string,
346             COLOR_DEFAULT
347         );
348     }
349 
350     print!("+------------------------------------------------");
351     println!("+--------------+------------------+------------------+");
352 }
353