1#!/usr/bin/env python
2# Copyright 2016 the V8 project authors. All rights reserved.
3# Use of this source code is governed by a BSD-style license that can be
4# found in the LICENSE file.
5'''
6Usage: callstats.py [-h] <command> ...
7
8Optional arguments:
9  -h, --help  show this help message and exit
10
11Commands:
12  run         run chrome with --runtime-call-stats and generate logs
13  stats       process logs and print statistics
14  json        process logs from several versions and generate JSON
15  help        help information
16
17For each command, you can try ./runtime-call-stats.py help command.
18'''
19
20import argparse
21import json
22import os
23import re
24import shutil
25import subprocess
26import sys
27import tempfile
28import operator
29
30import numpy
31import scipy
32import scipy.stats
33from math import sqrt
34
35
36# Run benchmarks.
37
38def print_command(cmd_args):
39  def fix_for_printing(arg):
40    m = re.match(r'^--([^=]+)=(.*)$', arg)
41    if m and (' ' in m.group(2) or m.group(2).startswith('-')):
42      arg = "--{}='{}'".format(m.group(1), m.group(2))
43    elif ' ' in arg:
44      arg = "'{}'".format(arg)
45    return arg
46  print " ".join(map(fix_for_printing, cmd_args))
47
48
49def start_replay_server(args, sites, discard_output=True):
50  with tempfile.NamedTemporaryFile(prefix='callstats-inject-', suffix='.js',
51                                   mode='wt', delete=False) as f:
52    injection = f.name
53    generate_injection(f, sites, args.refresh)
54  http_port = 4080 + args.port_offset
55  https_port = 4443 + args.port_offset
56  cmd_args = [
57      args.replay_bin,
58      "--port=%s" % http_port,
59      "--ssl_port=%s" % https_port,
60      "--no-dns_forwarding",
61      "--use_closest_match",
62      "--no-diff_unknown_requests",
63      "--inject_scripts=deterministic.js,{}".format(injection),
64      args.replay_wpr,
65  ]
66  print "=" * 80
67  print_command(cmd_args)
68  if discard_output:
69    with open(os.devnull, 'w') as null:
70      server = subprocess.Popen(cmd_args, stdout=null, stderr=null)
71  else:
72      server = subprocess.Popen(cmd_args)
73  print "RUNNING REPLAY SERVER: %s with PID=%s" % (args.replay_bin, server.pid)
74  print "=" * 80
75  return {'process': server, 'injection': injection}
76
77
78def stop_replay_server(server):
79  print("SHUTTING DOWN REPLAY SERVER %s" % server['process'].pid)
80  server['process'].terminate()
81  os.remove(server['injection'])
82
83
84def generate_injection(f, sites, refreshes=0):
85  print >> f, """\
86(function() {
87  var s = window.sessionStorage.getItem("refreshCounter");
88  var refreshTotal = """, refreshes, """;
89  var refreshCounter = s ? parseInt(s) : refreshTotal;
90  var refreshId = refreshTotal - refreshCounter;
91  if (refreshCounter > 0) {
92    window.sessionStorage.setItem("refreshCounter", refreshCounter-1);
93  }
94  function match(url, item) {
95    if ('regexp' in item) { return url.match(item.regexp) !== null };
96    var url_wanted = item.url;
97    /* Allow automatic redirections from http to https. */
98    if (url_wanted.startsWith("http://") && url.startsWith("https://")) {
99      url_wanted = "https://" + url_wanted.substr(7);
100    }
101    return url.startsWith(url_wanted);
102  };
103  function onLoad(url) {
104    for (var item of sites) {
105      if (!match(url, item)) continue;
106      var timeout = 'timeline' in item ? 2000 * item.timeline
107                  : 'timeout'  in item ? 1000 * (item.timeout - 3)
108                  : 10000;
109      console.log("Setting time out of " + timeout + " for: " + url);
110      window.setTimeout(function() {
111        console.log("Time is out for: " + url);
112        var msg = "STATS: (" + refreshId + ") " + url;
113        %GetAndResetRuntimeCallStats(1, msg);
114        if (refreshCounter > 0) {
115          console.log(
116              "Refresh counter is " + refreshCounter + ", refreshing: " + url);
117          window.location.reload();
118        }
119      }, timeout);
120      return;
121    }
122    console.log("Ignoring: " + url);
123  };
124  var sites =
125    """, json.dumps(sites), """;
126  onLoad(window.location.href);
127})();"""
128
129def get_chrome_flags(js_flags, user_data_dir, arg_delimiter=""):
130  return [
131      "--no-default-browser-check",
132      "--no-sandbox",
133      "--disable-translate",
134      "--enable-benchmarking",
135      "--enable-stats-table",
136      "--js-flags={}{}{}".format(arg_delimiter, js_flags, arg_delimiter),
137      "--no-first-run",
138      "--user-data-dir={}{}{}".format(arg_delimiter, user_data_dir,
139                                      arg_delimiter),
140    ]
141
142def get_chrome_replay_flags(args, arg_delimiter=""):
143  http_port = 4080 + args.port_offset
144  https_port = 4443 + args.port_offset
145  return [
146      "--host-resolver-rules=%sMAP *:80 localhost:%s, "  \
147                              "MAP *:443 localhost:%s, " \
148                              "EXCLUDE localhost%s" % (
149                               arg_delimiter, http_port, https_port,
150                               arg_delimiter),
151      "--ignore-certificate-errors",
152      "--disable-seccomp-sandbox",
153      "--disable-web-security",
154      "--reduce-security-for-testing",
155      "--allow-insecure-localhost",
156    ]
157
158def run_site(site, domain, args, timeout=None):
159  print "="*80
160  print "RUNNING DOMAIN %s" % domain
161  print "="*80
162  result_template = "{domain}#{count}.txt" if args.repeat else "{domain}.txt"
163  count = 0
164  if timeout is None: timeout = args.timeout
165  if args.replay_wpr:
166    timeout *= 1 + args.refresh
167    timeout += 1
168  retries_since_good_run = 0
169  while count == 0 or args.repeat is not None and count < args.repeat:
170    count += 1
171    result = result_template.format(domain=domain, count=count)
172    retries = 0
173    while args.retries is None or retries < args.retries:
174      retries += 1
175      try:
176        if args.user_data_dir:
177          user_data_dir = args.user_data_dir
178        else:
179          user_data_dir = tempfile.mkdtemp(prefix="chr_")
180        js_flags = "--runtime-call-stats --noconcurrent-recompilation"
181        if args.replay_wpr: js_flags += " --allow-natives-syntax"
182        if args.js_flags: js_flags += " " + args.js_flags
183        chrome_flags = get_chrome_flags(js_flags, user_data_dir)
184        if args.replay_wpr:
185          chrome_flags += get_chrome_replay_flags(args)
186        else:
187          chrome_flags += [ "--single-process", ]
188        if args.chrome_flags:
189          chrome_flags += args.chrome_flags.split()
190        cmd_args = [
191            "timeout", str(timeout),
192            args.with_chrome
193        ] + chrome_flags + [ site ]
194        print "- " * 40
195        print_command(cmd_args)
196        print "- " * 40
197        with open(result, "wt") as f:
198          with open(args.log_stderr or os.devnull, 'at') as err:
199            status = subprocess.call(cmd_args, stdout=f, stderr=err)
200        # 124 means timeout killed chrome, 0 means the user was bored first!
201        # If none of these two happened, then chrome apparently crashed, so
202        # it must be called again.
203        if status != 124 and status != 0:
204          print("CHROME CRASHED, REPEATING RUN");
205          continue
206        # If the stats file is empty, chrome must be called again.
207        if os.path.isfile(result) and os.path.getsize(result) > 0:
208          if args.print_url:
209            with open(result, "at") as f:
210              print >> f
211              print >> f, "URL: {}".format(site)
212          retries_since_good_run = 0
213          break
214        if retries_since_good_run < 6:
215          timeout += 2 ** retries_since_good_run
216          retries_since_good_run += 1
217        print("EMPTY RESULT, REPEATING RUN ({})".format(
218            retries_since_good_run));
219      finally:
220        if not args.user_data_dir:
221          shutil.rmtree(user_data_dir)
222
223
224def read_sites_file(args):
225  try:
226    sites = []
227    try:
228      with open(args.sites_file, "rt") as f:
229        for item in json.load(f):
230          if 'timeout' not in item:
231            # This is more-or-less arbitrary.
232            item['timeout'] = int(1.5 * item['timeline'] + 7)
233          if item['timeout'] > args.timeout: item['timeout'] = args.timeout
234          sites.append(item)
235    except ValueError:
236      with open(args.sites_file, "rt") as f:
237        for line in f:
238          line = line.strip()
239          if not line or line.startswith('#'): continue
240          sites.append({'url': line, 'timeout': args.timeout})
241    return sites
242  except IOError as e:
243    args.error("Cannot read from {}. {}.".format(args.sites_file, e.strerror))
244    sys.exit(1)
245
246
247def read_sites(args):
248  # Determine the websites to benchmark.
249  if args.sites_file:
250    return read_sites_file(args)
251  return [{'url': site, 'timeout': args.timeout} for site in args.sites]
252
253def do_run(args):
254  sites = read_sites(args)
255  replay_server = start_replay_server(args, sites) if args.replay_wpr else None
256  # Disambiguate domains, if needed.
257  L = []
258  domains = {}
259  for item in sites:
260    site = item['url']
261    domain = None
262    if args.domain:
263      domain = args.domain
264    elif 'domain' in item:
265      domain = item['domain']
266    else:
267      m = re.match(r'^(https?://)?([^/]+)(/.*)?$', site)
268      if not m:
269        args.error("Invalid URL {}.".format(site))
270        continue
271      domain = m.group(2)
272    entry = [site, domain, None, item['timeout']]
273    if domain not in domains:
274      domains[domain] = entry
275    else:
276      if not isinstance(domains[domain], int):
277        domains[domain][2] = 1
278        domains[domain] = 1
279      domains[domain] += 1
280      entry[2] = domains[domain]
281    L.append(entry)
282  try:
283    # Run them.
284    for site, domain, count, timeout in L:
285      if count is not None: domain = "{}%{}".format(domain, count)
286      print(site, domain, timeout)
287      run_site(site, domain, args, timeout)
288  finally:
289    if replay_server:
290      stop_replay_server(replay_server)
291
292
293def do_run_replay_server(args):
294  sites = read_sites(args)
295  print("- " * 40)
296  print("Available URLs:")
297  for site in sites:
298    print("    "+site['url'])
299  print("- " * 40)
300  print("Launch chromium with the following commands for debugging:")
301  flags = get_chrome_flags("--runtime-call-stats --allow-natives-syntax",
302                           "/var/tmp/`date +%s`", '"')
303  flags += get_chrome_replay_flags(args, "'")
304  print("    $CHROMIUM_DIR/out/Release/chrome " + (" ".join(flags)) + " <URL>")
305  print("- " * 40)
306  replay_server = start_replay_server(args, sites, discard_output=False)
307  try:
308    replay_server['process'].wait()
309  finally:
310   stop_replay_server(replay_server)
311
312
313# Calculate statistics.
314
315def statistics(data):
316  N = len(data)
317  average = numpy.average(data)
318  median = numpy.median(data)
319  low = numpy.min(data)
320  high= numpy.max(data)
321  if N > 1:
322    # evaluate sample variance by setting delta degrees of freedom (ddof) to
323    # 1. The degree used in calculations is N - ddof
324    stddev = numpy.std(data, ddof=1)
325    # Get the endpoints of the range that contains 95% of the distribution
326    t_bounds = scipy.stats.t.interval(0.95, N-1)
327    #assert abs(t_bounds[0] + t_bounds[1]) < 1e-6
328    # sum mean to the confidence interval
329    ci = {
330        'abs': t_bounds[1] * stddev / sqrt(N),
331        'low': average + t_bounds[0] * stddev / sqrt(N),
332        'high': average + t_bounds[1] * stddev / sqrt(N)
333    }
334  else:
335    stddev = 0
336    ci = { 'abs': 0, 'low': average, 'high': average }
337  if abs(stddev) > 0.0001 and abs(average) > 0.0001:
338    ci['perc'] = t_bounds[1] * stddev / sqrt(N) / average * 100
339  else:
340    ci['perc'] = 0
341  return { 'samples': N, 'average': average, 'median': median,
342           'stddev': stddev, 'min': low, 'max': high, 'ci': ci }
343
344
345def read_stats(path, domain, args):
346  groups = [];
347  if args.aggregate:
348    groups = [
349        ('Group-IC', re.compile(".*IC_.*")),
350        ('Group-Optimize',
351         re.compile("StackGuard|.*Optimize.*|.*Deoptimize.*|Recompile.*")),
352        ('Group-CompileBackground', re.compile("(.*CompileBackground.*)")),
353        ('Group-Compile', re.compile("(^Compile.*)|(.*_Compile.*)")),
354        ('Group-ParseBackground', re.compile(".*ParseBackground.*")),
355        ('Group-Parse', re.compile(".*Parse.*")),
356        ('Group-Callback', re.compile(".*Callback.*")),
357        ('Group-API', re.compile(".*API.*")),
358        ('Group-GC-Custom', re.compile("GC_Custom_.*")),
359        ('Group-GC-Background', re.compile(".*GC.*BACKGROUND.*")),
360        ('Group-GC', re.compile("GC_.*|AllocateInTargetSpace")),
361        ('Group-JavaScript', re.compile("JS_Execution")),
362        ('Group-Runtime', re.compile(".*"))]
363  with open(path, "rt") as f:
364    # Process the whole file and sum repeating entries.
365    entries = { 'Sum': {'time': 0, 'count': 0} }
366    for group_name, regexp in groups:
367      entries[group_name] = { 'time': 0, 'count': 0 }
368    for line in f:
369      line = line.strip()
370      # Discard headers and footers.
371      if not line: continue
372      if line.startswith("Runtime Function"): continue
373      if line.startswith("===="): continue
374      if line.startswith("----"): continue
375      if line.startswith("URL:"): continue
376      if line.startswith("STATS:"): continue
377      # We have a regular line.
378      fields = line.split()
379      key = fields[0]
380      time = float(fields[1].replace("ms", ""))
381      count = int(fields[3])
382      if key not in entries: entries[key] = { 'time': 0, 'count': 0 }
383      entries[key]['time'] += time
384      entries[key]['count'] += count
385      # We calculate the sum, if it's not the "total" line.
386      if key != "Total":
387        entries['Sum']['time'] += time
388        entries['Sum']['count'] += count
389        for group_name, regexp in groups:
390          if not regexp.match(key): continue
391          entries[group_name]['time'] += time
392          entries[group_name]['count'] += count
393          break
394    # Calculate the V8-Total (all groups except Callback)
395    group_data = { 'time': 0, 'count': 0 }
396    for group_name, regexp in groups:
397      if group_name == 'Group-Callback': continue
398      group_data['time'] += entries[group_name]['time']
399      group_data['count'] += entries[group_name]['count']
400    entries['Group-Total-V8'] = group_data
401    # Calculate the Parse-Total group
402    group_data = { 'time': 0, 'count': 0 }
403    for group_name, regexp in groups:
404      if not group_name.startswith('Group-Parse'): continue
405      group_data['time'] += entries[group_name]['time']
406      group_data['count'] += entries[group_name]['count']
407    entries['Group-Parse-Total'] = group_data
408    # Calculate the Compile-Total group
409    group_data = { 'time': 0, 'count': 0 }
410    for group_name, regexp in groups:
411      if not group_name.startswith('Group-Compile'): continue
412      group_data['time'] += entries[group_name]['time']
413      group_data['count'] += entries[group_name]['count']
414    entries['Group-Compile-Total'] = group_data
415    # Append the sums as single entries to domain.
416    for key in entries:
417      if key not in domain: domain[key] = { 'time_list': [], 'count_list': [] }
418      domain[key]['time_list'].append(entries[key]['time'])
419      domain[key]['count_list'].append(entries[key]['count'])
420
421
422def print_stats(S, args):
423  # Sort by ascending/descending time average, then by ascending/descending
424  # count average, then by ascending name.
425  def sort_asc_func(item):
426    return (item[1]['time_stat']['average'],
427            item[1]['count_stat']['average'],
428            item[0])
429  def sort_desc_func(item):
430    return (-item[1]['time_stat']['average'],
431            -item[1]['count_stat']['average'],
432            item[0])
433  # Sorting order is in the commend-line arguments.
434  sort_func = sort_asc_func if args.sort == "asc" else sort_desc_func
435  # Possibly limit how many elements to print.
436  L = [item for item in sorted(S.items(), key=sort_func)
437       if item[0] not in ["Total", "Sum"]]
438  N = len(L)
439  if args.limit == 0:
440    low, high = 0, N
441  elif args.sort == "desc":
442    low, high = 0, args.limit
443  else:
444    low, high = N-args.limit, N
445  # How to print entries.
446  def print_entry(key, value):
447    def stats(s, units=""):
448      conf = "{:0.1f}({:0.2f}%)".format(s['ci']['abs'], s['ci']['perc'])
449      return "{:8.1f}{} +/- {:15s}".format(s['average'], units, conf)
450    print "{:>50s}  {}  {}".format(
451      key,
452      stats(value['time_stat'], units="ms"),
453      stats(value['count_stat'])
454    )
455  # Print and calculate partial sums, if necessary.
456  for i in range(low, high):
457    print_entry(*L[i])
458    if args.totals and args.limit != 0 and not args.aggregate:
459      if i == low:
460        partial = { 'time_list': [0] * len(L[i][1]['time_list']),
461                    'count_list': [0] * len(L[i][1]['count_list']) }
462      assert len(partial['time_list']) == len(L[i][1]['time_list'])
463      assert len(partial['count_list']) == len(L[i][1]['count_list'])
464      for j, v in enumerate(L[i][1]['time_list']):
465        partial['time_list'][j] += v
466      for j, v in enumerate(L[i][1]['count_list']):
467        partial['count_list'][j] += v
468  # Print totals, if necessary.
469  if args.totals:
470    print '-' * 80
471    if args.limit != 0 and not args.aggregate:
472      partial['time_stat'] = statistics(partial['time_list'])
473      partial['count_stat'] = statistics(partial['count_list'])
474      print_entry("Partial", partial)
475    print_entry("Sum", S["Sum"])
476    print_entry("Total", S["Total"])
477
478
479def do_stats(args):
480  domains = {}
481  for path in args.logfiles:
482    filename = os.path.basename(path)
483    m = re.match(r'^([^#]+)(#.*)?$', filename)
484    domain = m.group(1)
485    if domain not in domains: domains[domain] = {}
486    read_stats(path, domains[domain], args)
487  if args.aggregate:
488    create_total_page_stats(domains, args)
489  for i, domain in enumerate(sorted(domains)):
490    if len(domains) > 1:
491      if i > 0: print
492      print "{}:".format(domain)
493      print '=' * 80
494    domain_stats = domains[domain]
495    for key in domain_stats:
496      domain_stats[key]['time_stat'] = \
497          statistics(domain_stats[key]['time_list'])
498      domain_stats[key]['count_stat'] = \
499          statistics(domain_stats[key]['count_list'])
500    print_stats(domain_stats, args)
501
502
503# Create a Total page with all entries summed up.
504def create_total_page_stats(domains, args):
505  total = {}
506  def sum_up(parent, key, other):
507    sums = parent[key]
508    for i, item in enumerate(other[key]):
509      if i >= len(sums):
510        sums.extend([0] * (i - len(sums) + 1))
511      if item is not None:
512        sums[i] += item
513  # Exclude adwords and speedometer pages from aggrigate total, since adwords
514  # dominates execution time and speedometer is measured elsewhere.
515  excluded_domains = ['adwords.google.com', 'speedometer-angular',
516                      'speedometer-jquery', 'speedometer-backbone',
517                      'speedometer-ember', 'speedometer-vanilla'];
518  # Sum up all the entries/metrics from all non-excluded domains
519  for domain, entries in domains.items():
520    if domain in excluded_domains:
521      continue;
522    for key, domain_stats in entries.items():
523      if key not in total:
524        total[key] = {}
525        total[key]['time_list'] = list(domain_stats['time_list'])
526        total[key]['count_list'] = list(domain_stats['count_list'])
527      else:
528        sum_up(total[key], 'time_list', domain_stats)
529        sum_up(total[key], 'count_list', domain_stats)
530  # Add a new "Total" page containing the summed up metrics.
531  domains['Total'] = total
532
533
534# Generate JSON file.
535
536def do_json(args):
537  versions = {}
538  for path in args.logdirs:
539    if os.path.isdir(path):
540      for root, dirs, files in os.walk(path):
541        version = os.path.basename(root)
542        if version not in versions: versions[version] = {}
543        for filename in files:
544          if filename.endswith(".txt"):
545            m = re.match(r'^([^#]+)(#.*)?\.txt$', filename)
546            domain = m.group(1)
547            if domain not in versions[version]: versions[version][domain] = {}
548            read_stats(os.path.join(root, filename),
549                       versions[version][domain], args)
550  for version, domains in versions.items():
551    if args.aggregate:
552      create_total_page_stats(domains, args)
553    for domain, entries in domains.items():
554      stats = []
555      for name, value in entries.items():
556        # We don't want the calculated sum in the JSON file.
557        if name == "Sum": continue
558        entry = [name]
559        for x in ['time_list', 'count_list']:
560          s = statistics(entries[name][x])
561          entry.append(round(s['average'], 1))
562          entry.append(round(s['ci']['abs'], 1))
563          entry.append(round(s['ci']['perc'], 2))
564        stats.append(entry)
565      domains[domain] = stats
566  print json.dumps(versions, separators=(',', ':'))
567
568
569# Help.
570
571def do_help(parser, subparsers, args):
572  if args.help_cmd:
573    if args.help_cmd in subparsers:
574      subparsers[args.help_cmd].print_help()
575    else:
576      args.error("Unknown command '{}'".format(args.help_cmd))
577  else:
578    parser.print_help()
579
580
581# Main program, parse command line and execute.
582
583def coexist(*l):
584  given = sum(1 for x in l if x)
585  return given == 0 or given == len(l)
586
587def main():
588  parser = argparse.ArgumentParser()
589  subparser_adder = parser.add_subparsers(title="commands", dest="command",
590                                          metavar="<command>")
591  subparsers = {}
592  # Command: run.
593  subparsers["run"] = subparser_adder.add_parser(
594      "run", help="Replay websites and collect runtime stats data.")
595  subparsers["run"].set_defaults(
596      func=do_run, error=subparsers["run"].error)
597  subparsers["run"].add_argument(
598      "--chrome-flags", type=str, default="",
599      help="specify additional chrome flags")
600  subparsers["run"].add_argument(
601      "--js-flags", type=str, default="",
602      help="specify additional V8 flags")
603  subparsers["run"].add_argument(
604      "-u", "--user-data-dir", type=str, metavar="<path>",
605      help="specify user data dir (default is temporary)")
606  subparsers["run"].add_argument(
607      "-c", "--with-chrome", type=str, metavar="<path>",
608      default="/usr/bin/google-chrome",
609      help="specify chrome executable to use")
610  subparsers["run"].add_argument(
611      "-r", "--retries", type=int, metavar="<num>",
612      help="specify retries if website is down (default: forever)")
613  subparsers["run"].add_argument(
614      "--no-url", dest="print_url", action="store_false", default=True,
615      help="do not include url in statistics file")
616  subparsers["run"].add_argument(
617      "--domain", type=str, default="",
618      help="specify the output file domain name")
619  subparsers["run"].add_argument(
620      "-n", "--repeat", type=int, metavar="<num>",
621      help="specify iterations for each website (default: once)")
622
623  def add_replay_args(subparser):
624    subparser.add_argument(
625        "-k", "--refresh", type=int, metavar="<num>", default=0,
626        help="specify refreshes for each iteration (default: 0)")
627    subparser.add_argument(
628        "--replay-wpr", type=str, metavar="<path>",
629        help="use the specified web page replay (.wpr) archive")
630    subparser.add_argument(
631        "--replay-bin", type=str, metavar="<path>",
632        help="specify the replay.py script typically located in " \
633             "$CHROMIUM/src/third_party/webpagereplay/replay.py")
634    subparser.add_argument(
635        "-f", "--sites-file", type=str, metavar="<path>",
636        help="specify file containing benchmark websites")
637    subparser.add_argument(
638        "-t", "--timeout", type=int, metavar="<seconds>", default=60,
639        help="specify seconds before chrome is killed")
640    subparser.add_argument(
641        "-p", "--port-offset", type=int, metavar="<offset>", default=0,
642        help="specify the offset for the replay server's default ports")
643    subparser.add_argument(
644        "-l", "--log-stderr", type=str, metavar="<path>",
645        help="specify where chrome's stderr should go (default: /dev/null)")
646    subparser.add_argument(
647        "sites", type=str, metavar="<URL>", nargs="*",
648        help="specify benchmark website")
649  add_replay_args(subparsers["run"])
650
651  # Command: replay-server
652  subparsers["replay"] = subparser_adder.add_parser(
653      "replay", help="Run the replay server for debugging purposes")
654  subparsers["replay"].set_defaults(
655      func=do_run_replay_server, error=subparsers["replay"].error)
656  add_replay_args(subparsers["replay"])
657
658  # Command: stats.
659  subparsers["stats"] = subparser_adder.add_parser(
660      "stats", help="Analize the results file create by the 'run' command.")
661  subparsers["stats"].set_defaults(
662      func=do_stats, error=subparsers["stats"].error)
663  subparsers["stats"].add_argument(
664      "-l", "--limit", type=int, metavar="<num>", default=0,
665      help="limit how many items to print (default: none)")
666  subparsers["stats"].add_argument(
667      "-s", "--sort", choices=["asc", "desc"], default="asc",
668      help="specify sorting order (default: ascending)")
669  subparsers["stats"].add_argument(
670      "-n", "--no-total", dest="totals", action="store_false", default=True,
671      help="do not print totals")
672  subparsers["stats"].add_argument(
673      "logfiles", type=str, metavar="<logfile>", nargs="*",
674      help="specify log files to parse")
675  subparsers["stats"].add_argument(
676      "--aggregate", dest="aggregate", action="store_true", default=False,
677      help="Create aggregated entries. Adds Group-* entries at the toplevel. " \
678      "Additionally creates a Total page with all entries.")
679
680  # Command: json.
681  subparsers["json"] = subparser_adder.add_parser(
682      "json", help="Collect results file created by the 'run' command into" \
683          "a single json file.")
684  subparsers["json"].set_defaults(
685      func=do_json, error=subparsers["json"].error)
686  subparsers["json"].add_argument(
687      "logdirs", type=str, metavar="<logdir>", nargs="*",
688      help="specify directories with log files to parse")
689  subparsers["json"].add_argument(
690      "--aggregate", dest="aggregate", action="store_true", default=False,
691      help="Create aggregated entries. Adds Group-* entries at the toplevel. " \
692      "Additionally creates a Total page with all entries.")
693
694  # Command: help.
695  subparsers["help"] = subparser_adder.add_parser(
696      "help", help="help information")
697  subparsers["help"].set_defaults(
698      func=lambda args: do_help(parser, subparsers, args),
699      error=subparsers["help"].error)
700  subparsers["help"].add_argument(
701      "help_cmd", type=str, metavar="<command>", nargs="?",
702      help="command for which to display help")
703
704  # Execute the command.
705  args = parser.parse_args()
706  setattr(args, 'script_path', os.path.dirname(sys.argv[0]))
707  if args.command == "run" and coexist(args.sites_file, args.sites):
708    args.error("use either option --sites-file or site URLs")
709    sys.exit(1)
710  elif args.command == "run" and not coexist(args.replay_wpr, args.replay_bin):
711    args.error("options --replay-wpr and --replay-bin must be used together")
712    sys.exit(1)
713  else:
714    args.func(args)
715
716if __name__ == "__main__":
717  sys.exit(main())
718