1#!/usr/bin/env python
2#
3# Copyright 2012 the V8 project authors. All rights reserved.
4# Redistribution and use in source and binary forms, with or without
5# modification, are permitted provided that the following conditions are
6# met:
7#
8#     * Redistributions of source code must retain the above copyright
9#       notice, this list of conditions and the following disclaimer.
10#     * Redistributions in binary form must reproduce the above
11#       copyright notice, this list of conditions and the following
12#       disclaimer in the documentation and/or other materials provided
13#       with the distribution.
14#     * Neither the name of Google Inc. nor the names of its
15#       contributors may be used to endorse or promote products derived
16#       from this software without specific prior written permission.
17#
18# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
19# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
20# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
21# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
22# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
23# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
24# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
25# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
26# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
27# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
28# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
29
30
31# for py2/py3 compatibility
32from __future__ import absolute_import
33from __future__ import print_function
34
35try:
36  import hashlib
37  md5er = hashlib.md5
38except ImportError as e:
39  import md5
40  md5er = md5.new
41
42
43import json
44import optparse
45import os
46from os.path import abspath, join, dirname, basename, exists
47import pickle
48import re
49import sys
50import subprocess
51import multiprocessing
52from subprocess import PIPE
53
54from testrunner.local import statusfile
55from testrunner.local import testsuite
56from testrunner.local import utils
57
58# Special LINT rules diverging from default and reason.
59# build/header_guard: Our guards have the form "V8_FOO_H_", not "SRC_FOO_H_".
60#   We now run our own header guard check in PRESUBMIT.py.
61# build/include_what_you_use: Started giving false positives for variables
62#   named "string" and "map" assuming that you needed to include STL headers.
63# runtime/references: As of May 2020 the C++ style guide suggests using
64#   references for out parameters, see
65#   https://google.github.io/styleguide/cppguide.html#Inputs_and_Outputs.
66
67LINT_RULES = """
68-build/header_guard
69-build/include_what_you_use
70-readability/fn_size
71-readability/multiline_comment
72-runtime/references
73-whitespace/comments
74""".split()
75
76LINT_OUTPUT_PATTERN = re.compile(r'^.+[:(]\d+[:)]|^Done processing')
77FLAGS_LINE = re.compile("//\s*Flags:.*--([A-z0-9-])+_[A-z0-9].*\n")
78ASSERT_OPTIMIZED_PATTERN = re.compile("assertOptimized")
79FLAGS_ENABLE_OPT = re.compile("//\s*Flags:.*--opt[^-].*\n")
80ASSERT_UNOPTIMIZED_PATTERN = re.compile("assertUnoptimized")
81FLAGS_NO_ALWAYS_OPT = re.compile("//\s*Flags:.*--no-?always-opt.*\n")
82
83TOOLS_PATH = dirname(abspath(__file__))
84
85def CppLintWorker(command):
86  try:
87    process = subprocess.Popen(command, stderr=subprocess.PIPE)
88    process.wait()
89    out_lines = ""
90    error_count = -1
91    while True:
92      out_line = process.stderr.readline()
93      if out_line == '' and process.poll() != None:
94        if error_count == -1:
95          print("Failed to process %s" % command.pop())
96          return 1
97        break
98      m = LINT_OUTPUT_PATTERN.match(out_line)
99      if m:
100        out_lines += out_line
101        error_count += 1
102    sys.stdout.write(out_lines)
103    return error_count
104  except KeyboardInterrupt:
105    process.kill()
106  except:
107    print('Error running cpplint.py. Please make sure you have depot_tools' +
108          ' in your $PATH. Lint check skipped.')
109    process.kill()
110
111def TorqueLintWorker(command):
112  try:
113    process = subprocess.Popen(command, stderr=subprocess.PIPE)
114    process.wait()
115    out_lines = ""
116    error_count = 0
117    while True:
118      out_line = process.stderr.readline()
119      if out_line == '' and process.poll() != None:
120        break
121      out_lines += out_line
122      error_count += 1
123    sys.stdout.write(out_lines)
124    if error_count != 0:
125        sys.stdout.write(
126          "warning: formatting and overwriting unformatted Torque files\n")
127    return error_count
128  except KeyboardInterrupt:
129    process.kill()
130  except:
131    print('Error running format-torque.py')
132    process.kill()
133
134def JSLintWorker(command):
135  def format_file(command):
136    try:
137      file_name = command[-1]
138      with open(file_name, "r") as file_handle:
139        contents = file_handle.read()
140
141      process = subprocess.Popen(command, stdout=PIPE, stderr=subprocess.PIPE)
142      output, err = process.communicate()
143      rc = process.returncode
144      if rc != 0:
145        sys.stdout.write("error code " + str(rc) + " running clang-format.\n")
146        return rc
147
148      if output != contents:
149        return 1
150
151      return 0
152    except KeyboardInterrupt:
153      process.kill()
154    except Exception:
155      print('Error running clang-format. Please make sure you have depot_tools' +
156            ' in your $PATH. Lint check skipped.')
157      process.kill()
158
159  rc = format_file(command)
160  if rc == 1:
161    # There are files that need to be formatted, let's format them in place.
162    file_name = command[-1]
163    sys.stdout.write("Formatting %s.\n" % (file_name))
164    rc = format_file(command[:-1] + ["-i", file_name])
165  return rc
166
167class FileContentsCache(object):
168
169  def __init__(self, sums_file_name):
170    self.sums = {}
171    self.sums_file_name = sums_file_name
172
173  def Load(self):
174    try:
175      sums_file = None
176      try:
177        sums_file = open(self.sums_file_name, 'r')
178        self.sums = pickle.load(sums_file)
179      except:
180        # Cannot parse pickle for any reason. Not much we can do about it.
181        pass
182    finally:
183      if sums_file:
184        sums_file.close()
185
186  def Save(self):
187    try:
188      sums_file = open(self.sums_file_name, 'w')
189      pickle.dump(self.sums, sums_file)
190    except:
191      # Failed to write pickle. Try to clean-up behind us.
192      if sums_file:
193        sums_file.close()
194      try:
195        os.unlink(self.sums_file_name)
196      except:
197        pass
198    finally:
199      sums_file.close()
200
201  def FilterUnchangedFiles(self, files):
202    changed_or_new = []
203    for file in files:
204      try:
205        handle = open(file, "r")
206        file_sum = md5er(handle.read()).digest()
207        if not file in self.sums or self.sums[file] != file_sum:
208          changed_or_new.append(file)
209          self.sums[file] = file_sum
210      finally:
211        handle.close()
212    return changed_or_new
213
214  def RemoveFile(self, file):
215    if file in self.sums:
216      self.sums.pop(file)
217
218
219class SourceFileProcessor(object):
220  """
221  Utility class that can run through a directory structure, find all relevant
222  files and invoke a custom check on the files.
223  """
224
225  def RunOnPath(self, path):
226    """Runs processor on all files under the given path."""
227
228    all_files = []
229    for file in self.GetPathsToSearch():
230      all_files += self.FindFilesIn(join(path, file))
231    return self.ProcessFiles(all_files)
232
233  def RunOnFiles(self, files):
234    """Runs processor only on affected files."""
235
236    # Helper for getting directory pieces.
237    dirs = lambda f: dirname(f).split(os.sep)
238
239    # Path offsets where to look (to be in sync with RunOnPath).
240    # Normalize '.' to check for it with str.startswith.
241    search_paths = [('' if p == '.' else p) for p in self.GetPathsToSearch()]
242
243    all_files = [
244      f.AbsoluteLocalPath()
245      for f in files
246      if (not self.IgnoreFile(f.LocalPath()) and
247          self.IsRelevant(f.LocalPath()) and
248          all(not self.IgnoreDir(d) for d in dirs(f.LocalPath())) and
249          any(map(f.LocalPath().startswith, search_paths)))
250    ]
251
252    return self.ProcessFiles(all_files)
253
254  def IgnoreDir(self, name):
255    return (name.startswith('.') or
256            name in ('buildtools', 'data', 'gmock', 'gtest', 'kraken',
257                     'octane', 'sunspider', 'traces-arm64'))
258
259  def IgnoreFile(self, name):
260    return name.startswith('.')
261
262  def FindFilesIn(self, path):
263    result = []
264    for (root, dirs, files) in os.walk(path):
265      for ignored in [x for x in dirs if self.IgnoreDir(x)]:
266        dirs.remove(ignored)
267      for file in files:
268        if not self.IgnoreFile(file) and self.IsRelevant(file):
269          result.append(join(root, file))
270    return result
271
272
273class CacheableSourceFileProcessor(SourceFileProcessor):
274  """Utility class that allows caching ProcessFiles() method calls.
275
276  In order to use it, create a ProcessFilesWithoutCaching method that returns
277  the files requiring intervention after processing the source files.
278  """
279
280  def __init__(self, use_cache, cache_file_path, file_type):
281    self.use_cache = use_cache
282    self.cache_file_path = cache_file_path
283    self.file_type = file_type
284
285  def GetProcessorWorker(self):
286    """Expected to return the worker function to run the formatter."""
287    raise NotImplementedError
288
289  def GetProcessorScript(self):
290    """Expected to return a tuple
291    (path to the format processor script, list of arguments)."""
292    raise NotImplementedError
293
294  def GetProcessorCommand(self):
295    format_processor, options = self.GetProcessorScript()
296    if not format_processor:
297      print('Could not find the formatter for % files' % self.file_type)
298      sys.exit(1)
299
300    command = [sys.executable, format_processor]
301    command.extend(options)
302
303    return command
304
305  def ProcessFiles(self, files):
306    if self.use_cache:
307      cache = FileContentsCache(self.cache_file_path)
308      cache.Load()
309      files = cache.FilterUnchangedFiles(files)
310
311    if len(files) == 0:
312      print('No changes in %s files detected. Skipping check' % self.file_type)
313      return True
314
315    files_requiring_changes = self.DetectFilesToChange(files)
316    print (
317      'Total %s files found that require formatting: %d' %
318      (self.file_type, len(files_requiring_changes)))
319    if self.use_cache:
320      for file in files_requiring_changes:
321        cache.RemoveFile(file)
322
323      cache.Save()
324
325    return files_requiring_changes == []
326
327  def DetectFilesToChange(self, files):
328    command = self.GetProcessorCommand()
329    worker = self.GetProcessorWorker()
330
331    commands = [command + [file] for file in files]
332    count = multiprocessing.cpu_count()
333    pool = multiprocessing.Pool(count)
334    try:
335      results = pool.map_async(worker, commands).get(timeout=240)
336    except KeyboardInterrupt:
337      print("\nCaught KeyboardInterrupt, terminating workers.")
338      pool.terminate()
339      pool.join()
340      sys.exit(1)
341
342    unformatted_files = []
343    for index, errors in enumerate(results):
344      if errors > 0:
345        unformatted_files.append(files[index])
346
347    return unformatted_files
348
349
350class CppLintProcessor(CacheableSourceFileProcessor):
351  """
352  Lint files to check that they follow the google code style.
353  """
354
355  def __init__(self, use_cache=True):
356    super(CppLintProcessor, self).__init__(
357      use_cache=use_cache, cache_file_path='.cpplint-cache', file_type='C/C++')
358
359  def IsRelevant(self, name):
360    return name.endswith('.cc') or name.endswith('.h')
361
362  def IgnoreDir(self, name):
363    return (super(CppLintProcessor, self).IgnoreDir(name)
364            or (name == 'third_party'))
365
366  IGNORE_LINT = [
367    'export-template.h',
368    'flag-definitions.h',
369    'gay-fixed.cc',
370    'gay-precision.cc',
371    'gay-shortest.cc',
372  ]
373
374  def IgnoreFile(self, name):
375    return (super(CppLintProcessor, self).IgnoreFile(name)
376              or (name in CppLintProcessor.IGNORE_LINT))
377
378  def GetPathsToSearch(self):
379    dirs = ['include', 'samples', 'src']
380    test_dirs = ['cctest', 'common', 'fuzzer', 'inspector', 'unittests']
381    return dirs + [join('test', dir) for dir in test_dirs]
382
383  def GetProcessorWorker(self):
384    return CppLintWorker
385
386  def GetProcessorScript(self):
387    filters = ','.join([n for n in LINT_RULES])
388    arguments = ['--filter', filters]
389    for path in [TOOLS_PATH] + os.environ["PATH"].split(os.pathsep):
390      path = path.strip('"')
391      cpplint = os.path.join(path, 'cpplint.py')
392      if os.path.isfile(cpplint):
393        return cpplint, arguments
394
395    return None, arguments
396
397
398class TorqueLintProcessor(CacheableSourceFileProcessor):
399  """
400  Check .tq files to verify they follow the Torque style guide.
401  """
402
403  def __init__(self, use_cache=True):
404    super(TorqueLintProcessor, self).__init__(
405      use_cache=use_cache, cache_file_path='.torquelint-cache',
406      file_type='Torque')
407
408  def IsRelevant(self, name):
409    return name.endswith('.tq')
410
411  def GetPathsToSearch(self):
412    dirs = ['third_party', 'src']
413    test_dirs = ['torque']
414    return dirs + [join('test', dir) for dir in test_dirs]
415
416  def GetProcessorWorker(self):
417    return TorqueLintWorker
418
419  def GetProcessorScript(self):
420    torque_tools = os.path.join(TOOLS_PATH, "torque")
421    torque_path = os.path.join(torque_tools, "format-torque.py")
422    arguments = ["-il"]
423    if os.path.isfile(torque_path):
424      return torque_path, arguments
425
426    return None, arguments
427
428class JSLintProcessor(CacheableSourceFileProcessor):
429  """
430  Check .{m}js file to verify they follow the JS Style guide.
431  """
432  def __init__(self, use_cache=True):
433    super(JSLintProcessor, self).__init__(
434      use_cache=use_cache, cache_file_path='.jslint-cache',
435      file_type='JavaScript')
436
437  def IsRelevant(self, name):
438    return name.endswith('.js') or name.endswith('.mjs')
439
440  def GetPathsToSearch(self):
441    return ['tools/system-analyzer']
442
443  def GetProcessorWorker(self):
444    return JSLintWorker
445
446  def GetProcessorScript(self):
447    for path in [TOOLS_PATH] + os.environ["PATH"].split(os.pathsep):
448      path = path.strip('"')
449      clang_format = os.path.join(path, 'clang_format.py')
450      if os.path.isfile(clang_format):
451        return clang_format, []
452
453    return None, []
454
455COPYRIGHT_HEADER_PATTERN = re.compile(
456    r'Copyright [\d-]*20[0-2][0-9] the V8 project authors. All rights reserved.')
457
458class SourceProcessor(SourceFileProcessor):
459  """
460  Check that all files include a copyright notice and no trailing whitespaces.
461  """
462
463  RELEVANT_EXTENSIONS = ['.js', '.cc', '.h', '.py', '.c', '.status', '.tq', '.g4']
464
465  def __init__(self):
466    self.runtime_function_call_pattern = self.CreateRuntimeFunctionCallMatcher()
467
468  def CreateRuntimeFunctionCallMatcher(self):
469    runtime_h_path = join(dirname(TOOLS_PATH), 'src/runtime/runtime.h')
470    pattern = re.compile(r'\s+F\(([^,]*),.*\)')
471    runtime_functions = []
472    with open(runtime_h_path) as f:
473      for line in f.readlines():
474        m = pattern.match(line)
475        if m:
476          runtime_functions.append(m.group(1))
477    if len(runtime_functions) < 250:
478      print ("Runtime functions list is suspiciously short. "
479             "Consider updating the presubmit script.")
480      sys.exit(1)
481    str = '(\%\s+(' + '|'.join(runtime_functions) + '))[\s\(]'
482    return re.compile(str)
483
484  # Overwriting the one in the parent class.
485  def FindFilesIn(self, path):
486    if os.path.exists(path+'/.git'):
487      output = subprocess.Popen('git ls-files --full-name',
488                                stdout=PIPE, cwd=path, shell=True)
489      result = []
490      for file in output.stdout.read().split():
491        for dir_part in os.path.dirname(file).replace(os.sep, '/').split('/'):
492          if self.IgnoreDir(dir_part):
493            break
494        else:
495          if (self.IsRelevant(file) and os.path.exists(file)
496              and not self.IgnoreFile(file)):
497            result.append(join(path, file))
498      if output.wait() == 0:
499        return result
500    return super(SourceProcessor, self).FindFilesIn(path)
501
502  def IsRelevant(self, name):
503    for ext in SourceProcessor.RELEVANT_EXTENSIONS:
504      if name.endswith(ext):
505        return True
506    return False
507
508  def GetPathsToSearch(self):
509    return ['.']
510
511  def IgnoreDir(self, name):
512    return (super(SourceProcessor, self).IgnoreDir(name) or
513            name in ('third_party', 'out', 'obj', 'DerivedSources'))
514
515  IGNORE_COPYRIGHTS = ['box2d.js',
516                       'cpplint.py',
517                       'copy.js',
518                       'corrections.js',
519                       'crypto.js',
520                       'daemon.py',
521                       'earley-boyer.js',
522                       'fannkuch.js',
523                       'fasta.js',
524                       'injected-script.cc',
525                       'injected-script.h',
526                       'libraries.cc',
527                       'libraries-empty.cc',
528                       'lua_binarytrees.js',
529                       'meta-123.js',
530                       'memops.js',
531                       'poppler.js',
532                       'primes.js',
533                       'raytrace.js',
534                       'regexp-pcre.js',
535                       'resources-123.js',
536                       'sqlite.js',
537                       'sqlite-change-heap.js',
538                       'sqlite-pointer-masking.js',
539                       'sqlite-safe-heap.js',
540                       'v8-debugger-script.h',
541                       'v8-inspector-impl.cc',
542                       'v8-inspector-impl.h',
543                       'v8-runtime-agent-impl.cc',
544                       'v8-runtime-agent-impl.h',
545                       'gnuplot-4.6.3-emscripten.js',
546                       'zlib.js']
547  IGNORE_TABS = IGNORE_COPYRIGHTS + ['unicode-test.js', 'html-comments.js']
548
549  IGNORE_COPYRIGHTS_DIRECTORIES = [
550      "test/test262/local-tests",
551      "test/mjsunit/wasm/bulk-memory-spec",
552  ]
553
554  def EndOfDeclaration(self, line):
555    return line == "}" or line == "};"
556
557  def StartOfDeclaration(self, line):
558    return line.find("//") == 0 or \
559           line.find("/*") == 0 or \
560           line.find(") {") != -1
561
562  def ProcessContents(self, name, contents):
563    result = True
564    base = basename(name)
565    if not base in SourceProcessor.IGNORE_TABS:
566      if '\t' in contents:
567        print("%s contains tabs" % name)
568        result = False
569    if not base in SourceProcessor.IGNORE_COPYRIGHTS and \
570        not any(ignore_dir in name for ignore_dir
571                in SourceProcessor.IGNORE_COPYRIGHTS_DIRECTORIES):
572      if not COPYRIGHT_HEADER_PATTERN.search(contents):
573        print("%s is missing a correct copyright header." % name)
574        result = False
575    if ' \n' in contents or contents.endswith(' '):
576      line = 0
577      lines = []
578      parts = contents.split(' \n')
579      if not contents.endswith(' '):
580        parts.pop()
581      for part in parts:
582        line += part.count('\n') + 1
583        lines.append(str(line))
584      linenumbers = ', '.join(lines)
585      if len(lines) > 1:
586        print("%s has trailing whitespaces in lines %s." % (name, linenumbers))
587      else:
588        print("%s has trailing whitespaces in line %s." % (name, linenumbers))
589      result = False
590    if not contents.endswith('\n') or contents.endswith('\n\n'):
591      print("%s does not end with a single new line." % name)
592      result = False
593    # Sanitize flags for fuzzer.
594    if (".js" in name or ".mjs" in name) and ("mjsunit" in name or "debugger" in name):
595      match = FLAGS_LINE.search(contents)
596      if match:
597        print("%s Flags should use '-' (not '_')" % name)
598        result = False
599      if (not "mjsunit/mjsunit.js" in name and
600          not "mjsunit/mjsunit_numfuzz.js" in name):
601        if ASSERT_OPTIMIZED_PATTERN.search(contents) and \
602            not FLAGS_ENABLE_OPT.search(contents):
603          print("%s Flag --opt should be set if " \
604                "assertOptimized() is used" % name)
605          result = False
606        if ASSERT_UNOPTIMIZED_PATTERN.search(contents) and \
607            not FLAGS_NO_ALWAYS_OPT.search(contents):
608          print("%s Flag --no-always-opt should be set if " \
609                "assertUnoptimized() is used" % name)
610          result = False
611
612      match = self.runtime_function_call_pattern.search(contents)
613      if match:
614        print("%s has unexpected spaces in a runtime call '%s'" % (name, match.group(1)))
615        result = False
616    return result
617
618  def ProcessFiles(self, files):
619    success = True
620    violations = 0
621    for file in files:
622      try:
623        handle = open(file)
624        contents = handle.read()
625        if len(contents) > 0 and not self.ProcessContents(file, contents):
626          success = False
627          violations += 1
628      finally:
629        handle.close()
630    print("Total violating files: %s" % violations)
631    return success
632
633def _CheckStatusFileForDuplicateKeys(filepath):
634  comma_space_bracket = re.compile(", *]")
635  lines = []
636  with open(filepath) as f:
637    for line in f.readlines():
638      # Skip all-comment lines.
639      if line.lstrip().startswith("#"): continue
640      # Strip away comments at the end of the line.
641      comment_start = line.find("#")
642      if comment_start != -1:
643        line = line[:comment_start]
644      line = line.strip()
645      # Strip away trailing commas within the line.
646      line = comma_space_bracket.sub("]", line)
647      if len(line) > 0:
648        lines.append(line)
649
650  # Strip away trailing commas at line ends. Ugh.
651  for i in range(len(lines) - 1):
652    if (lines[i].endswith(",") and len(lines[i + 1]) > 0 and
653        lines[i + 1][0] in ("}", "]")):
654      lines[i] = lines[i][:-1]
655
656  contents = "\n".join(lines)
657  # JSON wants double-quotes.
658  contents = contents.replace("'", '"')
659  # Fill in keywords (like PASS, SKIP).
660  for key in statusfile.KEYWORDS:
661    contents = re.sub(r"\b%s\b" % key, "\"%s\"" % key, contents)
662
663  status = {"success": True}
664  def check_pairs(pairs):
665    keys = {}
666    for key, value in pairs:
667      if key in keys:
668        print("%s: Error: duplicate key %s" % (filepath, key))
669        status["success"] = False
670      keys[key] = True
671
672  json.loads(contents, object_pairs_hook=check_pairs)
673  return status["success"]
674
675
676class StatusFilesProcessor(SourceFileProcessor):
677  """Checks status files for incorrect syntax and duplicate keys."""
678
679  def IsRelevant(self, name):
680    # Several changes to files under the test directories could impact status
681    # files.
682    return True
683
684  def GetPathsToSearch(self):
685    return ['test', 'tools/testrunner']
686
687  def ProcessFiles(self, files):
688    success = True
689    for status_file_path in sorted(self._GetStatusFiles(files)):
690      success &= statusfile.PresubmitCheck(status_file_path)
691      success &= _CheckStatusFileForDuplicateKeys(status_file_path)
692    return success
693
694  def _GetStatusFiles(self, files):
695    test_path = join(dirname(TOOLS_PATH), 'test')
696    testrunner_path = join(TOOLS_PATH, 'testrunner')
697    status_files = set()
698
699    for file_path in files:
700      if file_path.startswith(testrunner_path):
701        for suitepath in os.listdir(test_path):
702          suitename = os.path.basename(suitepath)
703          status_file = os.path.join(
704              test_path, suitename, suitename + ".status")
705          if os.path.exists(status_file):
706            status_files.add(status_file)
707        return status_files
708
709    for file_path in files:
710      if file_path.startswith(test_path):
711        # Strip off absolute path prefix pointing to test suites.
712        pieces = file_path[len(test_path):].lstrip(os.sep).split(os.sep)
713        if pieces:
714          # Infer affected status file name. Only care for existing status
715          # files. Some directories under "test" don't have any.
716          if not os.path.isdir(join(test_path, pieces[0])):
717            continue
718          status_file = join(test_path, pieces[0], pieces[0] + ".status")
719          if not os.path.exists(status_file):
720            continue
721          status_files.add(status_file)
722    return status_files
723
724
725def CheckDeps(workspace):
726  checkdeps_py = join(workspace, 'buildtools', 'checkdeps', 'checkdeps.py')
727  return subprocess.call([sys.executable, checkdeps_py, workspace]) == 0
728
729
730def PyTests(workspace):
731  result = True
732  for script in [
733      join(workspace, 'tools', 'clusterfuzz', 'v8_foozzie_test.py'),
734      join(workspace, 'tools', 'release', 'test_scripts.py'),
735      join(workspace, 'tools', 'unittests', 'run_tests_test.py'),
736      join(workspace, 'tools', 'unittests', 'run_perf_test.py'),
737      join(workspace, 'tools', 'testrunner', 'testproc', 'variant_unittest.py'),
738    ]:
739    print('Running ' + script)
740    result &= subprocess.call(
741        [sys.executable, script], stdout=subprocess.PIPE) == 0
742
743  return result
744
745
746def GetOptions():
747  result = optparse.OptionParser()
748  result.add_option('--no-lint', help="Do not run cpplint", default=False,
749                    action="store_true")
750  result.add_option('--no-linter-cache', help="Do not cache linter results",
751                    default=False, action="store_true")
752
753  return result
754
755
756def Main():
757  workspace = abspath(join(dirname(sys.argv[0]), '..'))
758  parser = GetOptions()
759  (options, args) = parser.parse_args()
760  success = True
761  print("Running checkdeps...")
762  success &= CheckDeps(workspace)
763  use_linter_cache = not options.no_linter_cache
764  if not options.no_lint:
765    print("Running C++ lint check...")
766    success &= CppLintProcessor(use_cache=use_linter_cache).RunOnPath(workspace)
767
768  print("Running Torque formatting check...")
769  success &= TorqueLintProcessor(use_cache=use_linter_cache).RunOnPath(
770    workspace)
771  print("Running JavaScript formatting check...")
772  success &= JSLintProcessor(use_cache=use_linter_cache).RunOnPath(
773    workspace)
774  print("Running copyright header, trailing whitespaces and " \
775        "two empty lines between declarations check...")
776  success &= SourceProcessor().RunOnPath(workspace)
777  print("Running status-files check...")
778  success &= StatusFilesProcessor().RunOnPath(workspace)
779  print("Running python tests...")
780  success &= PyTests(workspace)
781  if success:
782    return 0
783  else:
784    return 1
785
786
787if __name__ == '__main__':
788  sys.exit(Main())
789