1# Copyright 2018 the V8 project authors. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5# for py2/py3 compatibility
6from __future__ import print_function
7from __future__ import absolute_import
8
9import datetime
10import json
11import os
12import platform
13import sys
14import time
15
16from . import base
17from . import util
18
19
20def print_failure_header(test):
21  if test.output_proc.negative:
22    negative_marker = '[negative] '
23  else:
24    negative_marker = ''
25  print("=== %(label)s %(negative)s===" % {
26    'label': test,
27    'negative': negative_marker,
28  })
29
30
31class ResultsTracker(base.TestProcObserver):
32  """Tracks number of results and stops to run tests if max_failures reached."""
33  def __init__(self, max_failures):
34    super(ResultsTracker, self).__init__()
35    self._requirement = base.DROP_OUTPUT
36
37    self.failed = 0
38    self.remaining = 0
39    self.total = 0
40    self.max_failures = max_failures
41
42  def _on_next_test(self, test):
43    self.total += 1
44    self.remaining += 1
45
46  def _on_result_for(self, test, result):
47    self.remaining -= 1
48    if result.has_unexpected_output:
49      self.failed += 1
50      if self.max_failures and self.failed >= self.max_failures:
51        print('>>> Too many failures, exiting...')
52        self.stop()
53
54
55class ProgressIndicator(base.TestProcObserver):
56  def __init__(self):
57    super(base.TestProcObserver, self).__init__()
58    self.options = None
59
60  def finished(self):
61    pass
62
63  def configure(self, options):
64    self.options = options
65
66
67class SimpleProgressIndicator(ProgressIndicator):
68  def __init__(self):
69    super(SimpleProgressIndicator, self).__init__()
70    self._requirement = base.DROP_PASS_OUTPUT
71
72    self._failed = []
73
74  def _on_result_for(self, test, result):
75    # TODO(majeski): Support for dummy/grouped results
76    if result.has_unexpected_output:
77      self._failed.append((test, result))
78
79  def finished(self):
80    crashed = 0
81    print()
82    for test, result in self._failed:
83      print_failure_header(test)
84      if result.output.stderr:
85        print("--- stderr ---")
86        print(result.output.stderr.strip())
87      if result.output.stdout:
88        print("--- stdout ---")
89        print(result.output.stdout.strip())
90      print("Command: %s" % result.cmd.to_string())
91      if result.output.HasCrashed():
92        print("exit code: %s" % result.output.exit_code_string)
93        print("--- CRASHED ---")
94        crashed += 1
95      if result.output.HasTimedOut():
96        print("--- TIMEOUT ---")
97    if len(self._failed) == 0:
98      print("===")
99      print("=== All tests succeeded")
100      print("===")
101    else:
102      print()
103      print("===")
104      print("=== %i tests failed" % len(self._failed))
105      if crashed > 0:
106        print("=== %i tests CRASHED" % crashed)
107      print("===")
108
109
110class StreamProgressIndicator(ProgressIndicator):
111  def __init__(self):
112    super(StreamProgressIndicator, self).__init__()
113    self._requirement = base.DROP_PASS_OUTPUT
114
115  def _on_result_for(self, test, result):
116      if not result.has_unexpected_output:
117        self.print('PASS', test)
118      elif result.output.HasCrashed():
119        self.print("CRASH", test)
120      elif result.output.HasTimedOut():
121        self.print("TIMEOUT", test)
122      else:
123        if test.is_fail:
124          self.print("UNEXPECTED PASS", test)
125        else:
126          self.print("FAIL", test)
127
128  def print(self, prefix, test):
129    print('%s: %ss' % (prefix, test))
130    sys.stdout.flush()
131
132class VerboseProgressIndicator(SimpleProgressIndicator):
133  def __init__(self):
134    super(VerboseProgressIndicator, self).__init__()
135    self._last_printed_time = time.time()
136
137  def _print(self, text):
138    print(text)
139    sys.stdout.flush()
140    self._last_printed_time = time.time()
141
142  def _message(self, test, result):
143    # TODO(majeski): Support for dummy/grouped results
144    if result.has_unexpected_output:
145      if result.output.HasCrashed():
146        outcome = 'CRASH'
147      else:
148        outcome = 'FAIL'
149    else:
150      outcome = 'pass'
151    return '%s %s: %s' % (
152      test, test.variant or 'default', outcome)
153
154  def _on_result_for(self, test, result):
155    super(VerboseProgressIndicator, self)._on_result_for(test, result)
156    self._print(self._message(test, result))
157
158  # TODO(machenbach): Remove this platform specific hack and implement a proper
159  # feedback channel from the workers, providing which tests are currently run.
160  def _print_processes_linux(self):
161    if platform.system() == 'Linux':
162      self._print('List of processes:')
163      for pid, cmd in util.list_processes_linux():
164        # Show command with pid, but other process info cut off.
165        self._print('pid: %d cmd: %s' % (pid, cmd))
166
167  def _ensure_delay(self, delay):
168    return time.time() - self._last_printed_time > delay
169
170  def _on_heartbeat(self):
171    if self._ensure_delay(30):
172      # Print something every 30 seconds to not get killed by an output
173      # timeout.
174      self._print('Still working...')
175      self._print_processes_linux()
176
177  def _on_event(self, event):
178    self._print(event)
179    self._print_processes_linux()
180
181
182class CIProgressIndicator(VerboseProgressIndicator):
183  def _on_result_for(self, test, result):
184    super(VerboseProgressIndicator, self)._on_result_for(test, result)
185    if self.options.ci_test_completion:
186      with open(self.options.ci_test_completion, "a") as f:
187        f.write(self._message(test, result) + "\n")
188    self._output_feedback()
189
190  def _output_feedback(self):
191    """Reduced the verbosity leads to getting killed by an ouput timeout.
192    We ensure output every minute.
193    """
194    if self._ensure_delay(60):
195      dt = time.time()
196      st = datetime.datetime.fromtimestamp(dt).strftime('%Y-%m-%d %H:%M:%S')
197      self._print(st)
198
199
200class DotsProgressIndicator(SimpleProgressIndicator):
201  def __init__(self):
202    super(DotsProgressIndicator, self).__init__()
203    self._count = 0
204
205  def _on_result_for(self, test, result):
206    super(DotsProgressIndicator, self)._on_result_for(test, result)
207    # TODO(majeski): Support for dummy/grouped results
208    self._count += 1
209    if self._count > 1 and self._count % 50 == 1:
210      sys.stdout.write('\n')
211    if result.has_unexpected_output:
212      if result.output.HasCrashed():
213        sys.stdout.write('C')
214        sys.stdout.flush()
215      elif result.output.HasTimedOut():
216        sys.stdout.write('T')
217        sys.stdout.flush()
218      else:
219        sys.stdout.write('F')
220        sys.stdout.flush()
221    else:
222      sys.stdout.write('.')
223      sys.stdout.flush()
224
225
226class CompactProgressIndicator(ProgressIndicator):
227  def __init__(self, templates):
228    super(CompactProgressIndicator, self).__init__()
229    self._requirement = base.DROP_PASS_OUTPUT
230
231    self._templates = templates
232    self._last_status_length = 0
233    self._start_time = time.time()
234
235    self._passed = 0
236    self._failed = 0
237
238  def set_test_count(self, test_count):
239    self._total = test_count
240
241  def _on_result_for(self, test, result):
242    # TODO(majeski): Support for dummy/grouped results
243    if result.has_unexpected_output:
244      self._failed += 1
245    else:
246      self._passed += 1
247
248    self._print_progress(str(test))
249    if result.has_unexpected_output:
250      output = result.output
251      stdout = output.stdout.strip()
252      stderr = output.stderr.strip()
253
254      self._clear_line(self._last_status_length)
255      print_failure_header(test)
256      if len(stdout):
257        self.printFormatted('stdout', stdout)
258      if len(stderr):
259        self.printFormatted('stderr', stderr)
260      self.printFormatted(
261          'command', "Command: %s" % result.cmd.to_string(relative=True))
262      if output.HasCrashed():
263        self.printFormatted(
264            'failure', "exit code: %s" % output.exit_code_string)
265        self.printFormatted('failure', "--- CRASHED ---")
266      elif output.HasTimedOut():
267        self.printFormatted('failure', "--- TIMEOUT ---")
268      else:
269        if test.is_fail:
270          self.printFormatted('failure', "--- UNEXPECTED PASS ---")
271          if test.expected_failure_reason != None:
272            self.printFormatted('failure', test.expected_failure_reason)
273        else:
274          self.printFormatted('failure', "--- FAILED ---")
275
276  def finished(self):
277    self._print_progress('Done')
278    print()
279
280  def _print_progress(self, name):
281    self._clear_line(self._last_status_length)
282    elapsed = time.time() - self._start_time
283    if self._total:
284      progress = (self._passed + self._failed) * 100 // self._total
285    else:
286      progress = 0
287    status = self._templates['status_line'] % {
288      'passed': self._passed,
289      'progress': progress,
290      'failed': self._failed,
291      'test': name,
292      'mins': int(elapsed) // 60,
293      'secs': int(elapsed) % 60
294    }
295    status = self._truncateStatusLine(status, 78)
296    self._last_status_length = len(status)
297    print(status, end='')
298    sys.stdout.flush()
299
300  def _truncateStatusLine(self, string, length):
301    if length and len(string) > (length - 3):
302      return string[:(length - 3)] + "..."
303    else:
304      return string
305
306  def _clear_line(self, last_length):
307    raise NotImplementedError()
308
309
310class ColorProgressIndicator(CompactProgressIndicator):
311  def __init__(self):
312    templates = {
313      'status_line': ("[%(mins)02i:%(secs)02i|"
314                      "\033[34m%%%(progress) 4d\033[0m|"
315                      "\033[32m+%(passed) 4d\033[0m|"
316                      "\033[31m-%(failed) 4d\033[0m]: %(test)s"),
317      'stdout': "\033[1m%s\033[0m",
318      'stderr': "\033[31m%s\033[0m",
319      'failure': "\033[1;31m%s\033[0m",
320      'command': "\033[33m%s\033[0m",
321    }
322    super(ColorProgressIndicator, self).__init__(templates)
323
324  def printFormatted(self, format, string):
325    print(self._templates[format] % string)
326
327  def _truncateStatusLine(self, string, length):
328    # Add some slack for the color control chars
329    return super(ColorProgressIndicator, self)._truncateStatusLine(
330        string, length + 3*9)
331
332  def _clear_line(self, last_length):
333    print("\033[1K\r", end='')
334
335
336class MonochromeProgressIndicator(CompactProgressIndicator):
337  def __init__(self):
338   templates = {
339     'status_line': ("[%(mins)02i:%(secs)02i|%%%(progress) 4d|"
340                     "+%(passed) 4d|-%(failed) 4d]: %(test)s"),
341   }
342   super(MonochromeProgressIndicator, self).__init__(templates)
343
344  def printFormatted(self, format, string):
345    print(string)
346
347  def _clear_line(self, last_length):
348    print(("\r" + (" " * last_length) + "\r"), end='')
349
350
351class JsonTestProgressIndicator(ProgressIndicator):
352  def __init__(self, framework_name):
353    super(JsonTestProgressIndicator, self).__init__()
354    # We want to drop stdout/err for all passed tests on the first try, but we
355    # need to get outputs for all runs after the first one. To accommodate that,
356    # reruns are set to keep the result no matter what requirement says, i.e.
357    # keep_output set to True in the RerunProc.
358    self._requirement = base.DROP_PASS_STDOUT
359
360    self.framework_name = framework_name
361    self.results = []
362    self.duration_sum = 0
363    self.test_count = 0
364
365  def configure(self, options):
366    super(JsonTestProgressIndicator, self).configure(options)
367    self.tests = util.FixedSizeTopList(
368        self.options.slow_tests_cutoff,
369        key=lambda rec: rec['duration'])
370
371  def _on_result_for(self, test, result):
372    if result.is_rerun:
373      self.process_results(test, result.results)
374    else:
375      self.process_results(test, [result])
376
377  def process_results(self, test, results):
378    for run, result in enumerate(results):
379      # TODO(majeski): Support for dummy/grouped results
380      output = result.output
381
382      self._buffer_slow_tests(test, result, output, run)
383
384      # Omit tests that run as expected on the first try.
385      # Everything that happens after the first run is included in the output
386      # even if it flakily passes.
387      if not result.has_unexpected_output and run == 0:
388        continue
389
390      record = self._test_record(test, result, output, run)
391      record.update({
392          "result": test.output_proc.get_outcome(output),
393          "stdout": output.stdout,
394          "stderr": output.stderr,
395        })
396      self.results.append(record)
397
398  def _buffer_slow_tests(self, test, result, output, run):
399    def result_value(test, result, output):
400      if not result.has_unexpected_output:
401        return ""
402      return test.output_proc.get_outcome(output)
403
404    record = self._test_record(test, result, output, run)
405    record.update({
406        "result": result_value(test, result, output),
407        "marked_slow": test.is_slow,
408      })
409    self.tests.add(record)
410    self.duration_sum += record['duration']
411    self.test_count += 1
412
413  def _test_record(self, test, result, output, run):
414    return {
415        "name": str(test),
416        "flags": result.cmd.args,
417        "command": result.cmd.to_string(relative=True),
418        "run": run + 1,
419        "exit_code": output.exit_code,
420        "expected": test.expected_outcomes,
421        "duration": output.duration,
422        "random_seed": test.random_seed,
423        "target_name": test.get_shell(),
424        "variant": test.variant,
425        "variant_flags": test.variant_flags,
426        "framework_name": self.framework_name,
427      }
428
429  def finished(self):
430    duration_mean = None
431    if self.test_count:
432      duration_mean = self.duration_sum / self.test_count
433
434    result = {
435      "results": self.results,
436      "slowest_tests": self.tests.as_list(),
437      "duration_mean": duration_mean,
438      "test_total": self.test_count,
439    }
440
441    with open(self.options.json_test_results, "w") as f:
442      json.dump(result, f)
443