1# Copyright (c) 2012 Google Inc. All rights reserved.
2# Use of this source code is governed by a BSD-style license that can be
3# found in the LICENSE file.
4
5from __future__ import print_function
6
7import ast
8
9import gyp.common
10import gyp.simple_copy
11import multiprocessing
12import optparse
13import os.path
14import re
15import shlex
16import signal
17import subprocess
18import sys
19import threading
20import time
21import traceback
22from gyp.common import GypError
23from gyp.common import OrderedSet
24
25PY3 = bytes != str
26
27# A list of types that are treated as linkable.
28linkable_types = [
29  'executable',
30  'shared_library',
31  'loadable_module',
32  'mac_kernel_extension',
33]
34
35# A list of sections that contain links to other targets.
36dependency_sections = ['dependencies', 'export_dependent_settings']
37
38# base_path_sections is a list of sections defined by GYP that contain
39# pathnames.  The generators can provide more keys, the two lists are merged
40# into path_sections, but you should call IsPathSection instead of using either
41# list directly.
42base_path_sections = [
43  'destination',
44  'files',
45  'include_dirs',
46  'inputs',
47  'libraries',
48  'outputs',
49  'sources',
50]
51path_sections = set()
52
53# These per-process dictionaries are used to cache build file data when loading
54# in parallel mode.
55per_process_data = {}
56per_process_aux_data = {}
57
58def IsPathSection(section):
59  # If section ends in one of the '=+?!' characters, it's applied to a section
60  # without the trailing characters.  '/' is notably absent from this list,
61  # because there's no way for a regular expression to be treated as a path.
62  while section and section[-1:] in '=+?!':
63    section = section[:-1]
64
65  if section in path_sections:
66    return True
67
68  # Sections mathing the regexp '_(dir|file|path)s?$' are also
69  # considered PathSections. Using manual string matching since that
70  # is much faster than the regexp and this can be called hundreds of
71  # thousands of times so micro performance matters.
72  if "_" in section:
73    tail = section[-6:]
74    if tail[-1] == 's':
75      tail = tail[:-1]
76    if tail[-5:] in ('_file', '_path'):
77      return True
78    return tail[-4:] == '_dir'
79
80  return False
81
82# base_non_configuration_keys is a list of key names that belong in the target
83# itself and should not be propagated into its configurations.  It is merged
84# with a list that can come from the generator to
85# create non_configuration_keys.
86base_non_configuration_keys = [
87  # Sections that must exist inside targets and not configurations.
88  'actions',
89  'configurations',
90  'copies',
91  'default_configuration',
92  'dependencies',
93  'dependencies_original',
94  'libraries',
95  'postbuilds',
96  'product_dir',
97  'product_extension',
98  'product_name',
99  'product_prefix',
100  'rules',
101  'run_as',
102  'sources',
103  'standalone_static_library',
104  'suppress_wildcard',
105  'target_name',
106  'toolset',
107  'toolsets',
108  'type',
109
110  # Sections that can be found inside targets or configurations, but that
111  # should not be propagated from targets into their configurations.
112  'variables',
113]
114non_configuration_keys = []
115
116# Keys that do not belong inside a configuration dictionary.
117invalid_configuration_keys = [
118  'actions',
119  'all_dependent_settings',
120  'configurations',
121  'dependencies',
122  'direct_dependent_settings',
123  'libraries',
124  'link_settings',
125  'sources',
126  'standalone_static_library',
127  'target_name',
128  'type',
129]
130
131# Controls whether or not the generator supports multiple toolsets.
132multiple_toolsets = False
133
134# Paths for converting filelist paths to output paths: {
135#   toplevel,
136#   qualified_output_dir,
137# }
138generator_filelist_paths = None
139
140def GetIncludedBuildFiles(build_file_path, aux_data, included=None):
141  """Return a list of all build files included into build_file_path.
142
143  The returned list will contain build_file_path as well as all other files
144  that it included, either directly or indirectly.  Note that the list may
145  contain files that were included into a conditional section that evaluated
146  to false and was not merged into build_file_path's dict.
147
148  aux_data is a dict containing a key for each build file or included build
149  file.  Those keys provide access to dicts whose "included" keys contain
150  lists of all other files included by the build file.
151
152  included should be left at its default None value by external callers.  It
153  is used for recursion.
154
155  The returned list will not contain any duplicate entries.  Each build file
156  in the list will be relative to the current directory.
157  """
158
159  if included is None:
160    included = []
161
162  if build_file_path in included:
163    return included
164
165  included.append(build_file_path)
166
167  for included_build_file in aux_data[build_file_path].get('included', []):
168    GetIncludedBuildFiles(included_build_file, aux_data, included)
169
170  return included
171
172
173def CheckedEval(file_contents):
174  """Return the eval of a gyp file.
175
176  The gyp file is restricted to dictionaries and lists only, and
177  repeated keys are not allowed.
178
179  Note that this is slower than eval() is.
180  """
181
182  syntax_tree = ast.parse(file_contents)
183  assert isinstance(syntax_tree, ast.Module)
184  c1 = syntax_tree.body
185  assert len(c1) == 1
186  c2 = c1[0]
187  assert isinstance(c2, ast.Expr)
188  return CheckNode(c2.value, [])
189
190
191def CheckNode(node, keypath):
192  if isinstance(node, ast.Dict):
193    c = node.getChildren()
194    dict = {}
195    for key, value in zip(node.keys, node.values):
196      assert isinstance(key, ast.Str)
197      key = key.s
198      if key in dict:
199        raise GypError("Key '" + key + "' repeated at level " +
200              repr(len(keypath) + 1) + " with key path '" +
201              '.'.join(keypath) + "'")
202      kp = list(keypath)  # Make a copy of the list for descending this node.
203      kp.append(key)
204      dict[key] = CheckNode(value, kp)
205    return dict
206  elif isinstance(node, ast.List):
207    children = []
208    for index, child in enumerate(node.elts):
209      kp = list(keypath)  # Copy list.
210      kp.append(repr(index))
211      children.append(CheckNode(child, kp))
212    return children
213  elif isinstance(node, ast.Str):
214    return node.s
215  else:
216    raise TypeError("Unknown AST node at key path '" + '.'.join(keypath) +
217         "': " + repr(node))
218
219
220def LoadOneBuildFile(build_file_path, data, aux_data, includes,
221                     is_target, check):
222  if build_file_path in data:
223    return data[build_file_path]
224
225  if os.path.exists(build_file_path):
226    # Open the build file for read ('r') with universal-newlines mode ('U')
227    # to make sure platform specific newlines ('\r\n' or '\r') are converted to '\n'
228    # which otherwise will fail eval()
229    if sys.platform == 'zos':
230      # On z/OS, universal-newlines mode treats the file as an ascii file. But since
231      # node-gyp produces ebcdic files, do not use that mode.
232      build_file_contents = open(build_file_path, 'r').read()
233    else:
234      build_file_contents = open(build_file_path, 'rU').read()
235  else:
236    raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd()))
237
238  build_file_data = None
239  try:
240    if check:
241      build_file_data = CheckedEval(build_file_contents)
242    else:
243      build_file_data = eval(build_file_contents, {'__builtins__': {}},
244                             None)
245  except SyntaxError as e:
246    e.filename = build_file_path
247    raise
248  except Exception as e:
249    gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path)
250    raise
251
252  if type(build_file_data) is not dict:
253    raise GypError("%s does not evaluate to a dictionary." % build_file_path)
254
255  data[build_file_path] = build_file_data
256  aux_data[build_file_path] = {}
257
258  # Scan for includes and merge them in.
259  if ('skip_includes' not in build_file_data or
260      not build_file_data['skip_includes']):
261    try:
262      if is_target:
263        LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
264                                      aux_data, includes, check)
265      else:
266        LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data,
267                                      aux_data, None, check)
268    except Exception as e:
269      gyp.common.ExceptionAppend(e,
270                                 'while reading includes of ' + build_file_path)
271      raise
272
273  return build_file_data
274
275
276def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data,
277                                  includes, check):
278  includes_list = []
279  if includes != None:
280    includes_list.extend(includes)
281  if 'includes' in subdict:
282    for include in subdict['includes']:
283      # "include" is specified relative to subdict_path, so compute the real
284      # path to include by appending the provided "include" to the directory
285      # in which subdict_path resides.
286      relative_include = \
287          os.path.normpath(os.path.join(os.path.dirname(subdict_path), include))
288      includes_list.append(relative_include)
289    # Unhook the includes list, it's no longer needed.
290    del subdict['includes']
291
292  # Merge in the included files.
293  for include in includes_list:
294    if not 'included' in aux_data[subdict_path]:
295      aux_data[subdict_path]['included'] = []
296    aux_data[subdict_path]['included'].append(include)
297
298    gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include)
299
300    MergeDicts(subdict,
301               LoadOneBuildFile(include, data, aux_data, None, False, check),
302               subdict_path, include)
303
304  # Recurse into subdictionaries.
305  for k, v in subdict.items():
306    if type(v) is dict:
307      LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data,
308                                    None, check)
309    elif type(v) is list:
310      LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data,
311                                    check)
312
313
314# This recurses into lists so that it can look for dicts.
315def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, check):
316  for item in sublist:
317    if type(item) is dict:
318      LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data,
319                                    None, check)
320    elif type(item) is list:
321      LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, check)
322
323# Processes toolsets in all the targets. This recurses into condition entries
324# since they can contain toolsets as well.
325def ProcessToolsetsInDict(data):
326  if 'targets' in data:
327    target_list = data['targets']
328    new_target_list = []
329    for target in target_list:
330      # If this target already has an explicit 'toolset', and no 'toolsets'
331      # list, don't modify it further.
332      if 'toolset' in target and 'toolsets' not in target:
333        new_target_list.append(target)
334        continue
335      if multiple_toolsets:
336        toolsets = target.get('toolsets', ['target'])
337      else:
338        toolsets = ['target']
339      # Make sure this 'toolsets' definition is only processed once.
340      if 'toolsets' in target:
341        del target['toolsets']
342      if len(toolsets) > 0:
343        # Optimization: only do copies if more than one toolset is specified.
344        for build in toolsets[1:]:
345          new_target = gyp.simple_copy.deepcopy(target)
346          new_target['toolset'] = build
347          new_target_list.append(new_target)
348        target['toolset'] = toolsets[0]
349        new_target_list.append(target)
350    data['targets'] = new_target_list
351  if 'conditions' in data:
352    for condition in data['conditions']:
353      if type(condition) is list:
354        for condition_dict in condition[1:]:
355          if type(condition_dict) is dict:
356            ProcessToolsetsInDict(condition_dict)
357
358
359# TODO(mark): I don't love this name.  It just means that it's going to load
360# a build file that contains targets and is expected to provide a targets dict
361# that contains the targets...
362def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes,
363                        depth, check, load_dependencies):
364  # If depth is set, predefine the DEPTH variable to be a relative path from
365  # this build file's directory to the directory identified by depth.
366  if depth:
367    # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
368    # temporary measure. This should really be addressed by keeping all paths
369    # in POSIX until actual project generation.
370    d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path))
371    if d == '':
372      variables['DEPTH'] = '.'
373    else:
374      variables['DEPTH'] = d.replace('\\', '/')
375
376  # The 'target_build_files' key is only set when loading target build files in
377  # the non-parallel code path, where LoadTargetBuildFile is called
378  # recursively.  In the parallel code path, we don't need to check whether the
379  # |build_file_path| has already been loaded, because the 'scheduled' set in
380  # ParallelState guarantees that we never load the same |build_file_path|
381  # twice.
382  if 'target_build_files' in data:
383    if build_file_path in data['target_build_files']:
384      # Already loaded.
385      return False
386    data['target_build_files'].add(build_file_path)
387
388  gyp.DebugOutput(gyp.DEBUG_INCLUDES,
389                  "Loading Target Build File '%s'", build_file_path)
390
391  build_file_data = LoadOneBuildFile(build_file_path, data, aux_data,
392                                     includes, True, check)
393
394  # Store DEPTH for later use in generators.
395  build_file_data['_DEPTH'] = depth
396
397  # Set up the included_files key indicating which .gyp files contributed to
398  # this target dict.
399  if 'included_files' in build_file_data:
400    raise GypError(build_file_path + ' must not contain included_files key')
401
402  included = GetIncludedBuildFiles(build_file_path, aux_data)
403  build_file_data['included_files'] = []
404  for included_file in included:
405    # included_file is relative to the current directory, but it needs to
406    # be made relative to build_file_path's directory.
407    included_relative = \
408        gyp.common.RelativePath(included_file,
409                                os.path.dirname(build_file_path))
410    build_file_data['included_files'].append(included_relative)
411
412  # Do a first round of toolsets expansion so that conditions can be defined
413  # per toolset.
414  ProcessToolsetsInDict(build_file_data)
415
416  # Apply "pre"/"early" variable expansions and condition evaluations.
417  ProcessVariablesAndConditionsInDict(
418      build_file_data, PHASE_EARLY, variables, build_file_path)
419
420  # Since some toolsets might have been defined conditionally, perform
421  # a second round of toolsets expansion now.
422  ProcessToolsetsInDict(build_file_data)
423
424  # Look at each project's target_defaults dict, and merge settings into
425  # targets.
426  if 'target_defaults' in build_file_data:
427    if 'targets' not in build_file_data:
428      raise GypError("Unable to find targets in build file %s" %
429                     build_file_path)
430
431    index = 0
432    while index < len(build_file_data['targets']):
433      # This procedure needs to give the impression that target_defaults is
434      # used as defaults, and the individual targets inherit from that.
435      # The individual targets need to be merged into the defaults.  Make
436      # a deep copy of the defaults for each target, merge the target dict
437      # as found in the input file into that copy, and then hook up the
438      # copy with the target-specific data merged into it as the replacement
439      # target dict.
440      old_target_dict = build_file_data['targets'][index]
441      new_target_dict = gyp.simple_copy.deepcopy(
442        build_file_data['target_defaults'])
443      MergeDicts(new_target_dict, old_target_dict,
444                 build_file_path, build_file_path)
445      build_file_data['targets'][index] = new_target_dict
446      index += 1
447
448    # No longer needed.
449    del build_file_data['target_defaults']
450
451  # Look for dependencies.  This means that dependency resolution occurs
452  # after "pre" conditionals and variable expansion, but before "post" -
453  # in other words, you can't put a "dependencies" section inside a "post"
454  # conditional within a target.
455
456  dependencies = []
457  if 'targets' in build_file_data:
458    for target_dict in build_file_data['targets']:
459      if 'dependencies' not in target_dict:
460        continue
461      for dependency in target_dict['dependencies']:
462        dependencies.append(
463            gyp.common.ResolveTarget(build_file_path, dependency, None)[0])
464
465  if load_dependencies:
466    for dependency in dependencies:
467      try:
468        LoadTargetBuildFile(dependency, data, aux_data, variables,
469                            includes, depth, check, load_dependencies)
470      except Exception as e:
471        gyp.common.ExceptionAppend(
472          e, 'while loading dependencies of %s' % build_file_path)
473        raise
474  else:
475    return (build_file_path, dependencies)
476
477def CallLoadTargetBuildFile(global_flags,
478                            build_file_path, variables,
479                            includes, depth, check,
480                            generator_input_info):
481  """Wrapper around LoadTargetBuildFile for parallel processing.
482
483     This wrapper is used when LoadTargetBuildFile is executed in
484     a worker process.
485  """
486
487  try:
488    signal.signal(signal.SIGINT, signal.SIG_IGN)
489
490    # Apply globals so that the worker process behaves the same.
491    for key, value in global_flags.items():
492      globals()[key] = value
493
494    SetGeneratorGlobals(generator_input_info)
495    result = LoadTargetBuildFile(build_file_path, per_process_data,
496                                 per_process_aux_data, variables,
497                                 includes, depth, check, False)
498    if not result:
499      return result
500
501    (build_file_path, dependencies) = result
502
503    # We can safely pop the build_file_data from per_process_data because it
504    # will never be referenced by this process again, so we don't need to keep
505    # it in the cache.
506    build_file_data = per_process_data.pop(build_file_path)
507
508    # This gets serialized and sent back to the main process via a pipe.
509    # It's handled in LoadTargetBuildFileCallback.
510    return (build_file_path,
511            build_file_data,
512            dependencies)
513  except GypError as e:
514    sys.stderr.write("gyp: %s\n" % e)
515    return None
516  except Exception as e:
517    print('Exception:', e, file=sys.stderr)
518    print(traceback.format_exc(), file=sys.stderr)
519    return None
520
521
522class ParallelProcessingError(Exception):
523  pass
524
525
526class ParallelState(object):
527  """Class to keep track of state when processing input files in parallel.
528
529  If build files are loaded in parallel, use this to keep track of
530  state during farming out and processing parallel jobs. It's stored
531  in a global so that the callback function can have access to it.
532  """
533
534  def __init__(self):
535    # The multiprocessing pool.
536    self.pool = None
537    # The condition variable used to protect this object and notify
538    # the main loop when there might be more data to process.
539    self.condition = None
540    # The "data" dict that was passed to LoadTargetBuildFileParallel
541    self.data = None
542    # The number of parallel calls outstanding; decremented when a response
543    # was received.
544    self.pending = 0
545    # The set of all build files that have been scheduled, so we don't
546    # schedule the same one twice.
547    self.scheduled = set()
548    # A list of dependency build file paths that haven't been scheduled yet.
549    self.dependencies = []
550    # Flag to indicate if there was an error in a child process.
551    self.error = False
552
553  def LoadTargetBuildFileCallback(self, result):
554    """Handle the results of running LoadTargetBuildFile in another process.
555    """
556    self.condition.acquire()
557    if not result:
558      self.error = True
559      self.condition.notify()
560      self.condition.release()
561      return
562    (build_file_path0, build_file_data0, dependencies0) = result
563    self.data[build_file_path0] = build_file_data0
564    self.data['target_build_files'].add(build_file_path0)
565    for new_dependency in dependencies0:
566      if new_dependency not in self.scheduled:
567        self.scheduled.add(new_dependency)
568        self.dependencies.append(new_dependency)
569    self.pending -= 1
570    self.condition.notify()
571    self.condition.release()
572
573
574def LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth,
575                                 check, generator_input_info):
576  parallel_state = ParallelState()
577  parallel_state.condition = threading.Condition()
578  # Make copies of the build_files argument that we can modify while working.
579  parallel_state.dependencies = list(build_files)
580  parallel_state.scheduled = set(build_files)
581  parallel_state.pending = 0
582  parallel_state.data = data
583
584  try:
585    parallel_state.condition.acquire()
586    while parallel_state.dependencies or parallel_state.pending:
587      if parallel_state.error:
588        break
589      if not parallel_state.dependencies:
590        parallel_state.condition.wait()
591        continue
592
593      dependency = parallel_state.dependencies.pop()
594
595      parallel_state.pending += 1
596      global_flags = {
597        'path_sections': globals()['path_sections'],
598        'non_configuration_keys': globals()['non_configuration_keys'],
599        'multiple_toolsets': globals()['multiple_toolsets']}
600
601      if not parallel_state.pool:
602        parallel_state.pool = multiprocessing.Pool(multiprocessing.cpu_count())
603      parallel_state.pool.apply_async(
604          CallLoadTargetBuildFile,
605          args = (global_flags, dependency,
606                  variables, includes, depth, check, generator_input_info),
607          callback = parallel_state.LoadTargetBuildFileCallback)
608  except KeyboardInterrupt as e:
609    parallel_state.pool.terminate()
610    raise e
611
612  parallel_state.condition.release()
613
614  parallel_state.pool.close()
615  parallel_state.pool.join()
616  parallel_state.pool = None
617
618  if parallel_state.error:
619    sys.exit(1)
620
621# Look for the bracket that matches the first bracket seen in a
622# string, and return the start and end as a tuple.  For example, if
623# the input is something like "<(foo <(bar)) blah", then it would
624# return (1, 13), indicating the entire string except for the leading
625# "<" and trailing " blah".
626LBRACKETS= set('{[(')
627BRACKETS = {'}': '{', ']': '[', ')': '('}
628def FindEnclosingBracketGroup(input_str):
629  stack = []
630  start = -1
631  for index, char in enumerate(input_str):
632    if char in LBRACKETS:
633      stack.append(char)
634      if start == -1:
635        start = index
636    elif char in BRACKETS:
637      if not stack:
638        return (-1, -1)
639      if stack.pop() != BRACKETS[char]:
640        return (-1, -1)
641      if not stack:
642        return (start, index + 1)
643  return (-1, -1)
644
645
646def IsStrCanonicalInt(string):
647  """Returns True if |string| is in its canonical integer form.
648
649  The canonical form is such that str(int(string)) == string.
650  """
651  if type(string) is str:
652    # This function is called a lot so for maximum performance, avoid
653    # involving regexps which would otherwise make the code much
654    # shorter. Regexps would need twice the time of this function.
655    if string:
656      if string == "0":
657        return True
658      if string[0] == "-":
659        string = string[1:]
660        if not string:
661          return False
662      if '1' <= string[0] <= '9':
663        return string.isdigit()
664
665  return False
666
667
668# This matches things like "<(asdf)", "<!(cmd)", "<!@(cmd)", "<|(list)",
669# "<!interpreter(arguments)", "<([list])", and even "<([)" and "<(<())".
670# In the last case, the inner "<()" is captured in match['content'].
671early_variable_re = re.compile(
672    r'(?P<replace>(?P<type><(?:(?:!?@?)|\|)?)'
673    r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
674    r'\((?P<is_array>\s*\[?)'
675    r'(?P<content>.*?)(\]?)\))')
676
677# This matches the same as early_variable_re, but with '>' instead of '<'.
678late_variable_re = re.compile(
679    r'(?P<replace>(?P<type>>(?:(?:!?@?)|\|)?)'
680    r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
681    r'\((?P<is_array>\s*\[?)'
682    r'(?P<content>.*?)(\]?)\))')
683
684# This matches the same as early_variable_re, but with '^' instead of '<'.
685latelate_variable_re = re.compile(
686    r'(?P<replace>(?P<type>[\^](?:(?:!?@?)|\|)?)'
687    r'(?P<command_string>[-a-zA-Z0-9_.]+)?'
688    r'\((?P<is_array>\s*\[?)'
689    r'(?P<content>.*?)(\]?)\))')
690
691# Global cache of results from running commands so they don't have to be run
692# more then once.
693cached_command_results = {}
694
695
696def FixupPlatformCommand(cmd):
697  if sys.platform == 'win32':
698    if type(cmd) is list:
699      cmd = [re.sub('^cat ', 'type ', cmd[0])] + cmd[1:]
700    else:
701      cmd = re.sub('^cat ', 'type ', cmd)
702  return cmd
703
704
705PHASE_EARLY = 0
706PHASE_LATE = 1
707PHASE_LATELATE = 2
708
709
710def ExpandVariables(input, phase, variables, build_file):
711  # Look for the pattern that gets expanded into variables
712  if phase == PHASE_EARLY:
713    variable_re = early_variable_re
714    expansion_symbol = '<'
715  elif phase == PHASE_LATE:
716    variable_re = late_variable_re
717    expansion_symbol = '>'
718  elif phase == PHASE_LATELATE:
719    variable_re = latelate_variable_re
720    expansion_symbol = '^'
721  else:
722    assert False
723
724  input_str = str(input)
725  if IsStrCanonicalInt(input_str):
726    return int(input_str)
727
728  # Do a quick scan to determine if an expensive regex search is warranted.
729  if expansion_symbol not in input_str:
730    return input_str
731
732  # Get the entire list of matches as a list of MatchObject instances.
733  # (using findall here would return strings instead of MatchObjects).
734  matches = list(variable_re.finditer(input_str))
735  if not matches:
736    return input_str
737
738  output = input_str
739  # Reverse the list of matches so that replacements are done right-to-left.
740  # That ensures that earlier replacements won't mess up the string in a
741  # way that causes later calls to find the earlier substituted text instead
742  # of what's intended for replacement.
743  matches.reverse()
744  for match_group in matches:
745    match = match_group.groupdict()
746    gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Matches: %r", match)
747    # match['replace'] is the substring to look for, match['type']
748    # is the character code for the replacement type (< > <! >! <| >| <@
749    # >@ <!@ >!@), match['is_array'] contains a '[' for command
750    # arrays, and match['content'] is the name of the variable (< >)
751    # or command to run (<! >!). match['command_string'] is an optional
752    # command string. Currently, only 'pymod_do_main' is supported.
753
754    # run_command is true if a ! variant is used.
755    run_command = '!' in match['type']
756    command_string = match['command_string']
757
758    # file_list is true if a | variant is used.
759    file_list = '|' in match['type']
760
761    # Capture these now so we can adjust them later.
762    replace_start = match_group.start('replace')
763    replace_end = match_group.end('replace')
764
765    # Find the ending paren, and re-evaluate the contained string.
766    (c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:])
767
768    # Adjust the replacement range to match the entire command
769    # found by FindEnclosingBracketGroup (since the variable_re
770    # probably doesn't match the entire command if it contained
771    # nested variables).
772    replace_end = replace_start + c_end
773
774    # Find the "real" replacement, matching the appropriate closing
775    # paren, and adjust the replacement start and end.
776    replacement = input_str[replace_start:replace_end]
777
778    # Figure out what the contents of the variable parens are.
779    contents_start = replace_start + c_start + 1
780    contents_end = replace_end - 1
781    contents = input_str[contents_start:contents_end]
782
783    # Do filter substitution now for <|().
784    # Admittedly, this is different than the evaluation order in other
785    # contexts. However, since filtration has no chance to run on <|(),
786    # this seems like the only obvious way to give them access to filters.
787    if file_list:
788      processed_variables = gyp.simple_copy.deepcopy(variables)
789      ProcessListFiltersInDict(contents, processed_variables)
790      # Recurse to expand variables in the contents
791      contents = ExpandVariables(contents, phase,
792                                 processed_variables, build_file)
793    else:
794      # Recurse to expand variables in the contents
795      contents = ExpandVariables(contents, phase, variables, build_file)
796
797    # Strip off leading/trailing whitespace so that variable matches are
798    # simpler below (and because they are rarely needed).
799    contents = contents.strip()
800
801    # expand_to_list is true if an @ variant is used.  In that case,
802    # the expansion should result in a list.  Note that the caller
803    # is to be expecting a list in return, and not all callers do
804    # because not all are working in list context.  Also, for list
805    # expansions, there can be no other text besides the variable
806    # expansion in the input string.
807    expand_to_list = '@' in match['type'] and input_str == replacement
808
809    if run_command or file_list:
810      # Find the build file's directory, so commands can be run or file lists
811      # generated relative to it.
812      build_file_dir = os.path.dirname(build_file)
813      if build_file_dir == '' and not file_list:
814        # If build_file is just a leaf filename indicating a file in the
815        # current directory, build_file_dir might be an empty string.  Set
816        # it to None to signal to subprocess.Popen that it should run the
817        # command in the current directory.
818        build_file_dir = None
819
820    # Support <|(listfile.txt ...) which generates a file
821    # containing items from a gyp list, generated at gyp time.
822    # This works around actions/rules which have more inputs than will
823    # fit on the command line.
824    if file_list:
825      if type(contents) is list:
826        contents_list = contents
827      else:
828        contents_list = contents.split(' ')
829      replacement = contents_list[0]
830      if os.path.isabs(replacement):
831        raise GypError('| cannot handle absolute paths, got "%s"' % replacement)
832
833      if not generator_filelist_paths:
834        path = os.path.join(build_file_dir, replacement)
835      else:
836        if os.path.isabs(build_file_dir):
837          toplevel = generator_filelist_paths['toplevel']
838          rel_build_file_dir = gyp.common.RelativePath(build_file_dir, toplevel)
839        else:
840          rel_build_file_dir = build_file_dir
841        qualified_out_dir = generator_filelist_paths['qualified_out_dir']
842        path = os.path.join(qualified_out_dir, rel_build_file_dir, replacement)
843        gyp.common.EnsureDirExists(path)
844
845      replacement = gyp.common.RelativePath(path, build_file_dir)
846      f = gyp.common.WriteOnDiff(path)
847      for i in contents_list[1:]:
848        f.write('%s\n' % i)
849      f.close()
850
851    elif run_command:
852      use_shell = True
853      if match['is_array']:
854        contents = eval(contents)
855        use_shell = False
856
857      # Check for a cached value to avoid executing commands, or generating
858      # file lists more than once. The cache key contains the command to be
859      # run as well as the directory to run it from, to account for commands
860      # that depend on their current directory.
861      # TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory,
862      # someone could author a set of GYP files where each time the command
863      # is invoked it produces different output by design. When the need
864      # arises, the syntax should be extended to support no caching off a
865      # command's output so it is run every time.
866      cache_key = (str(contents), build_file_dir)
867      cached_value = cached_command_results.get(cache_key, None)
868      if cached_value is None:
869        gyp.DebugOutput(gyp.DEBUG_VARIABLES,
870                        "Executing command '%s' in directory '%s'",
871                        contents, build_file_dir)
872
873        replacement = ''
874
875        if command_string == 'pymod_do_main':
876          # <!pymod_do_main(modulename param eters) loads |modulename| as a
877          # python module and then calls that module's DoMain() function,
878          # passing ["param", "eters"] as a single list argument. For modules
879          # that don't load quickly, this can be faster than
880          # <!(python modulename param eters). Do this in |build_file_dir|.
881          oldwd = os.getcwd()  # Python doesn't like os.open('.'): no fchdir.
882          if build_file_dir:  # build_file_dir may be None (see above).
883            os.chdir(build_file_dir)
884          try:
885
886            parsed_contents = shlex.split(contents)
887            try:
888              py_module = __import__(parsed_contents[0])
889            except ImportError as e:
890              raise GypError("Error importing pymod_do_main"
891                             "module (%s): %s" % (parsed_contents[0], e))
892            replacement = str(py_module.DoMain(parsed_contents[1:])).rstrip()
893          finally:
894            os.chdir(oldwd)
895          assert replacement != None
896        elif command_string:
897          raise GypError("Unknown command string '%s' in '%s'." %
898                         (command_string, contents))
899        else:
900          # Fix up command with platform specific workarounds.
901          contents = FixupPlatformCommand(contents)
902          try:
903            p = subprocess.Popen(contents, shell=use_shell,
904                                 stdout=subprocess.PIPE,
905                                 stderr=subprocess.PIPE,
906                                 stdin=subprocess.PIPE,
907                                 cwd=build_file_dir)
908          except Exception as e:
909            raise GypError("%s while executing command '%s' in %s" %
910                           (e, contents, build_file))
911
912          p_stdout, p_stderr = p.communicate('')
913          if PY3:
914            p_stdout = p_stdout.decode('utf-8')
915            p_stderr = p_stderr.decode('utf-8')
916
917          if p.wait() != 0 or p_stderr:
918            sys.stderr.write(p_stderr)
919            # Simulate check_call behavior, since check_call only exists
920            # in python 2.5 and later.
921            raise GypError("Call to '%s' returned exit status %d while in %s." %
922                           (contents, p.returncode, build_file))
923          replacement = p_stdout.rstrip()
924
925        cached_command_results[cache_key] = replacement
926      else:
927        gyp.DebugOutput(gyp.DEBUG_VARIABLES,
928                        "Had cache value for command '%s' in directory '%s'",
929                        contents,build_file_dir)
930        replacement = cached_value
931
932    else:
933      if not contents in variables:
934        if contents[-1] in ['!', '/']:
935          # In order to allow cross-compiles (nacl) to happen more naturally,
936          # we will allow references to >(sources/) etc. to resolve to
937          # and empty list if undefined. This allows actions to:
938          # 'action!': [
939          #   '>@(_sources!)',
940          # ],
941          # 'action/': [
942          #   '>@(_sources/)',
943          # ],
944          replacement = []
945        else:
946          raise GypError('Undefined variable ' + contents +
947                         ' in ' + build_file)
948      else:
949        replacement = variables[contents]
950
951    if isinstance(replacement, bytes) and not isinstance(replacement, str):
952          replacement = replacement.decode("utf-8")  # done on Python 3 only
953    if type(replacement) is list:
954      for item in replacement:
955        if isinstance(item, bytes) and not isinstance(item, str):
956          item = item.decode("utf-8")  # done on Python 3 only
957        if not contents[-1] == '/' and type(item) not in (str, int):
958          raise GypError('Variable ' + contents +
959                         ' must expand to a string or list of strings; ' +
960                         'list contains a ' +
961                         item.__class__.__name__)
962      # Run through the list and handle variable expansions in it.  Since
963      # the list is guaranteed not to contain dicts, this won't do anything
964      # with conditions sections.
965      ProcessVariablesAndConditionsInList(replacement, phase, variables,
966                                          build_file)
967    elif type(replacement) not in (str, int):
968          raise GypError('Variable ' + contents +
969                         ' must expand to a string or list of strings; ' +
970                         'found a ' + replacement.__class__.__name__)
971
972    if expand_to_list:
973      # Expanding in list context.  It's guaranteed that there's only one
974      # replacement to do in |input_str| and that it's this replacement.  See
975      # above.
976      if type(replacement) is list:
977        # If it's already a list, make a copy.
978        output = replacement[:]
979      else:
980        # Split it the same way sh would split arguments.
981        output = shlex.split(str(replacement))
982    else:
983      # Expanding in string context.
984      encoded_replacement = ''
985      if type(replacement) is list:
986        # When expanding a list into string context, turn the list items
987        # into a string in a way that will work with a subprocess call.
988        #
989        # TODO(mark): This isn't completely correct.  This should
990        # call a generator-provided function that observes the
991        # proper list-to-argument quoting rules on a specific
992        # platform instead of just calling the POSIX encoding
993        # routine.
994        encoded_replacement = gyp.common.EncodePOSIXShellList(replacement)
995      else:
996        encoded_replacement = replacement
997
998      output = output[:replace_start] + str(encoded_replacement) + \
999               output[replace_end:]
1000    # Prepare for the next match iteration.
1001    input_str = output
1002
1003  if output == input:
1004    gyp.DebugOutput(gyp.DEBUG_VARIABLES,
1005                    "Found only identity matches on %r, avoiding infinite "
1006                    "recursion.",
1007                    output)
1008  else:
1009    # Look for more matches now that we've replaced some, to deal with
1010    # expanding local variables (variables defined in the same
1011    # variables block as this one).
1012    gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output)
1013    if type(output) is list:
1014      if output and type(output[0]) is list:
1015        # Leave output alone if it's a list of lists.
1016        # We don't want such lists to be stringified.
1017        pass
1018      else:
1019        new_output = []
1020        for item in output:
1021          new_output.append(
1022              ExpandVariables(item, phase, variables, build_file))
1023        output = new_output
1024    else:
1025      output = ExpandVariables(output, phase, variables, build_file)
1026
1027  # Convert all strings that are canonically-represented integers into integers.
1028  if type(output) is list:
1029    for index in range(0, len(output)):
1030      if IsStrCanonicalInt(output[index]):
1031        output[index] = int(output[index])
1032  elif IsStrCanonicalInt(output):
1033    output = int(output)
1034
1035  return output
1036
1037# The same condition is often evaluated over and over again so it
1038# makes sense to cache as much as possible between evaluations.
1039cached_conditions_asts = {}
1040
1041def EvalCondition(condition, conditions_key, phase, variables, build_file):
1042  """Returns the dict that should be used or None if the result was
1043  that nothing should be used."""
1044  if type(condition) is not list:
1045    raise GypError(conditions_key + ' must be a list')
1046  if len(condition) < 2:
1047    # It's possible that condition[0] won't work in which case this
1048    # attempt will raise its own IndexError.  That's probably fine.
1049    raise GypError(conditions_key + ' ' + condition[0] +
1050                   ' must be at least length 2, not ' + str(len(condition)))
1051
1052  i = 0
1053  result = None
1054  while i < len(condition):
1055    cond_expr = condition[i]
1056    true_dict = condition[i + 1]
1057    if type(true_dict) is not dict:
1058      raise GypError('{} {} must be followed by a dictionary, not {}'.format(
1059        conditions_key, cond_expr, type(true_dict)))
1060    if len(condition) > i + 2 and type(condition[i + 2]) is dict:
1061      false_dict = condition[i + 2]
1062      i = i + 3
1063      if i != len(condition):
1064        raise GypError('{} {} has {} unexpected trailing items'.format(
1065          conditions_key, cond_expr, len(condition) - i))
1066    else:
1067      false_dict = None
1068      i = i + 2
1069    if result is None:
1070      result = EvalSingleCondition(
1071          cond_expr, true_dict, false_dict, phase, variables, build_file)
1072
1073  return result
1074
1075
1076def EvalSingleCondition(
1077    cond_expr, true_dict, false_dict, phase, variables, build_file):
1078  """Returns true_dict if cond_expr evaluates to true, and false_dict
1079  otherwise."""
1080  # Do expansions on the condition itself.  Since the condition can naturally
1081  # contain variable references without needing to resort to GYP expansion
1082  # syntax, this is of dubious value for variables, but someone might want to
1083  # use a command expansion directly inside a condition.
1084  cond_expr_expanded = ExpandVariables(cond_expr, phase, variables,
1085                                       build_file)
1086  if type(cond_expr_expanded) not in (str, int):
1087    raise ValueError(
1088          'Variable expansion in this context permits str and int ' + \
1089            'only, found ' + cond_expr_expanded.__class__.__name__)
1090
1091  try:
1092    if cond_expr_expanded in cached_conditions_asts:
1093      ast_code = cached_conditions_asts[cond_expr_expanded]
1094    else:
1095      ast_code = compile(cond_expr_expanded, '<string>', 'eval')
1096      cached_conditions_asts[cond_expr_expanded] = ast_code
1097    if eval(ast_code, {'__builtins__': {}}, variables):
1098      return true_dict
1099    return false_dict
1100  except SyntaxError as e:
1101    syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s '
1102                               'at character %d.' %
1103                               (str(e.args[0]), e.text, build_file, e.offset),
1104                               e.filename, e.lineno, e.offset, e.text)
1105    raise syntax_error
1106  except NameError as e:
1107    gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' %
1108                               (cond_expr_expanded, build_file))
1109    raise GypError(e)
1110
1111
1112def ProcessConditionsInDict(the_dict, phase, variables, build_file):
1113  # Process a 'conditions' or 'target_conditions' section in the_dict,
1114  # depending on phase.
1115  # early -> conditions
1116  # late -> target_conditions
1117  # latelate -> no conditions
1118  #
1119  # Each item in a conditions list consists of cond_expr, a string expression
1120  # evaluated as the condition, and true_dict, a dict that will be merged into
1121  # the_dict if cond_expr evaluates to true.  Optionally, a third item,
1122  # false_dict, may be present.  false_dict is merged into the_dict if
1123  # cond_expr evaluates to false.
1124  #
1125  # Any dict merged into the_dict will be recursively processed for nested
1126  # conditionals and other expansions, also according to phase, immediately
1127  # prior to being merged.
1128
1129  if phase == PHASE_EARLY:
1130    conditions_key = 'conditions'
1131  elif phase == PHASE_LATE:
1132    conditions_key = 'target_conditions'
1133  elif phase == PHASE_LATELATE:
1134    return
1135  else:
1136    assert False
1137
1138  if not conditions_key in the_dict:
1139    return
1140
1141  conditions_list = the_dict[conditions_key]
1142  # Unhook the conditions list, it's no longer needed.
1143  del the_dict[conditions_key]
1144
1145  for condition in conditions_list:
1146    merge_dict = EvalCondition(condition, conditions_key, phase, variables,
1147                               build_file)
1148
1149    if merge_dict != None:
1150      # Expand variables and nested conditinals in the merge_dict before
1151      # merging it.
1152      ProcessVariablesAndConditionsInDict(merge_dict, phase,
1153                                          variables, build_file)
1154
1155      MergeDicts(the_dict, merge_dict, build_file, build_file)
1156
1157
1158def LoadAutomaticVariablesFromDict(variables, the_dict):
1159  # Any keys with plain string values in the_dict become automatic variables.
1160  # The variable name is the key name with a "_" character prepended.
1161  for key, value in the_dict.items():
1162    if type(value) in (str, int, list):
1163      variables['_' + key] = value
1164
1165
1166def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key):
1167  # Any keys in the_dict's "variables" dict, if it has one, becomes a
1168  # variable.  The variable name is the key name in the "variables" dict.
1169  # Variables that end with the % character are set only if they are unset in
1170  # the variables dict.  the_dict_key is the name of the key that accesses
1171  # the_dict in the_dict's parent dict.  If the_dict's parent is not a dict
1172  # (it could be a list or it could be parentless because it is a root dict),
1173  # the_dict_key will be None.
1174  for key, value in the_dict.get('variables', {}).items():
1175    if type(value) not in (str, int, list):
1176      continue
1177
1178    if key.endswith('%'):
1179      variable_name = key[:-1]
1180      if variable_name in variables:
1181        # If the variable is already set, don't set it.
1182        continue
1183      if the_dict_key == 'variables' and variable_name in the_dict:
1184        # If the variable is set without a % in the_dict, and the_dict is a
1185        # variables dict (making |variables| a variables sub-dict of a
1186        # variables dict), use the_dict's definition.
1187        value = the_dict[variable_name]
1188    else:
1189      variable_name = key
1190
1191    variables[variable_name] = value
1192
1193
1194def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in,
1195                                        build_file, the_dict_key=None):
1196  """Handle all variable and command expansion and conditional evaluation.
1197
1198  This function is the public entry point for all variable expansions and
1199  conditional evaluations.  The variables_in dictionary will not be modified
1200  by this function.
1201  """
1202
1203  # Make a copy of the variables_in dict that can be modified during the
1204  # loading of automatics and the loading of the variables dict.
1205  variables = variables_in.copy()
1206  LoadAutomaticVariablesFromDict(variables, the_dict)
1207
1208  if 'variables' in the_dict:
1209    # Make sure all the local variables are added to the variables
1210    # list before we process them so that you can reference one
1211    # variable from another.  They will be fully expanded by recursion
1212    # in ExpandVariables.
1213    for key, value in the_dict['variables'].items():
1214      variables[key] = value
1215
1216    # Handle the associated variables dict first, so that any variable
1217    # references within can be resolved prior to using them as variables.
1218    # Pass a copy of the variables dict to avoid having it be tainted.
1219    # Otherwise, it would have extra automatics added for everything that
1220    # should just be an ordinary variable in this scope.
1221    ProcessVariablesAndConditionsInDict(the_dict['variables'], phase,
1222                                        variables, build_file, 'variables')
1223
1224  LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
1225
1226  for key, value in the_dict.items():
1227    # Skip "variables", which was already processed if present.
1228    if key != 'variables' and type(value) is str:
1229      expanded = ExpandVariables(value, phase, variables, build_file)
1230      if type(expanded) not in (str, int):
1231        raise ValueError(
1232              'Variable expansion in this context permits str and int ' + \
1233              'only, found ' + expanded.__class__.__name__ + ' for ' + key)
1234      the_dict[key] = expanded
1235
1236  # Variable expansion may have resulted in changes to automatics.  Reload.
1237  # TODO(mark): Optimization: only reload if no changes were made.
1238  variables = variables_in.copy()
1239  LoadAutomaticVariablesFromDict(variables, the_dict)
1240  LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
1241
1242  # Process conditions in this dict.  This is done after variable expansion
1243  # so that conditions may take advantage of expanded variables.  For example,
1244  # if the_dict contains:
1245  #   {'type':       '<(library_type)',
1246  #    'conditions': [['_type=="static_library"', { ... }]]},
1247  # _type, as used in the condition, will only be set to the value of
1248  # library_type if variable expansion is performed before condition
1249  # processing.  However, condition processing should occur prior to recursion
1250  # so that variables (both automatic and "variables" dict type) may be
1251  # adjusted by conditions sections, merged into the_dict, and have the
1252  # intended impact on contained dicts.
1253  #
1254  # This arrangement means that a "conditions" section containing a "variables"
1255  # section will only have those variables effective in subdicts, not in
1256  # the_dict.  The workaround is to put a "conditions" section within a
1257  # "variables" section.  For example:
1258  #   {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]],
1259  #    'defines':    ['<(define)'],
1260  #    'my_subdict': {'defines': ['<(define)']}},
1261  # will not result in "IS_MAC" being appended to the "defines" list in the
1262  # current scope but would result in it being appended to the "defines" list
1263  # within "my_subdict".  By comparison:
1264  #   {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]},
1265  #    'defines':    ['<(define)'],
1266  #    'my_subdict': {'defines': ['<(define)']}},
1267  # will append "IS_MAC" to both "defines" lists.
1268
1269  # Evaluate conditions sections, allowing variable expansions within them
1270  # as well as nested conditionals.  This will process a 'conditions' or
1271  # 'target_conditions' section, perform appropriate merging and recursive
1272  # conditional and variable processing, and then remove the conditions section
1273  # from the_dict if it is present.
1274  ProcessConditionsInDict(the_dict, phase, variables, build_file)
1275
1276  # Conditional processing may have resulted in changes to automatics or the
1277  # variables dict.  Reload.
1278  variables = variables_in.copy()
1279  LoadAutomaticVariablesFromDict(variables, the_dict)
1280  LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key)
1281
1282  # Recurse into child dicts, or process child lists which may result in
1283  # further recursion into descendant dicts.
1284  for key, value in the_dict.items():
1285    # Skip "variables" and string values, which were already processed if
1286    # present.
1287    if key == 'variables' or type(value) is str:
1288      continue
1289    if type(value) is dict:
1290      # Pass a copy of the variables dict so that subdicts can't influence
1291      # parents.
1292      ProcessVariablesAndConditionsInDict(value, phase, variables,
1293                                          build_file, key)
1294    elif type(value) is list:
1295      # The list itself can't influence the variables dict, and
1296      # ProcessVariablesAndConditionsInList will make copies of the variables
1297      # dict if it needs to pass it to something that can influence it.  No
1298      # copy is necessary here.
1299      ProcessVariablesAndConditionsInList(value, phase, variables,
1300                                          build_file)
1301    elif type(value) is not int:
1302      raise TypeError('Unknown type ' + value.__class__.__name__ + \
1303                      ' for ' + key)
1304
1305
1306def ProcessVariablesAndConditionsInList(the_list, phase, variables,
1307                                        build_file):
1308  # Iterate using an index so that new values can be assigned into the_list.
1309  index = 0
1310  while index < len(the_list):
1311    item = the_list[index]
1312    if type(item) is dict:
1313      # Make a copy of the variables dict so that it won't influence anything
1314      # outside of its own scope.
1315      ProcessVariablesAndConditionsInDict(item, phase, variables, build_file)
1316    elif type(item) is list:
1317      ProcessVariablesAndConditionsInList(item, phase, variables, build_file)
1318    elif type(item) is str:
1319      expanded = ExpandVariables(item, phase, variables, build_file)
1320      if type(expanded) in (str, int):
1321        the_list[index] = expanded
1322      elif type(expanded) is list:
1323        the_list[index:index+1] = expanded
1324        index += len(expanded)
1325
1326        # index now identifies the next item to examine.  Continue right now
1327        # without falling into the index increment below.
1328        continue
1329      else:
1330        raise ValueError(
1331              'Variable expansion in this context permits strings and ' + \
1332              'lists only, found ' + expanded.__class__.__name__ + ' at ' + \
1333              index)
1334    elif type(item) is not int:
1335      raise TypeError('Unknown type ' + item.__class__.__name__ + \
1336                      ' at index ' + index)
1337    index = index + 1
1338
1339
1340def BuildTargetsDict(data):
1341  """Builds a dict mapping fully-qualified target names to their target dicts.
1342
1343  |data| is a dict mapping loaded build files by pathname relative to the
1344  current directory.  Values in |data| are build file contents.  For each
1345  |data| value with a "targets" key, the value of the "targets" key is taken
1346  as a list containing target dicts.  Each target's fully-qualified name is
1347  constructed from the pathname of the build file (|data| key) and its
1348  "target_name" property.  These fully-qualified names are used as the keys
1349  in the returned dict.  These keys provide access to the target dicts,
1350  the dicts in the "targets" lists.
1351  """
1352
1353  targets = {}
1354  for build_file in data['target_build_files']:
1355    for target in data[build_file].get('targets', []):
1356      target_name = gyp.common.QualifiedTarget(build_file,
1357                                               target['target_name'],
1358                                               target['toolset'])
1359      if target_name in targets:
1360        raise GypError('Duplicate target definitions for ' + target_name)
1361      targets[target_name] = target
1362
1363  return targets
1364
1365
1366def QualifyDependencies(targets):
1367  """Make dependency links fully-qualified relative to the current directory.
1368
1369  |targets| is a dict mapping fully-qualified target names to their target
1370  dicts.  For each target in this dict, keys known to contain dependency
1371  links are examined, and any dependencies referenced will be rewritten
1372  so that they are fully-qualified and relative to the current directory.
1373  All rewritten dependencies are suitable for use as keys to |targets| or a
1374  similar dict.
1375  """
1376
1377  all_dependency_sections = [dep + op
1378                             for dep in dependency_sections
1379                             for op in ('', '!', '/')]
1380
1381  for target, target_dict in targets.items():
1382    target_build_file = gyp.common.BuildFile(target)
1383    toolset = target_dict['toolset']
1384    for dependency_key in all_dependency_sections:
1385      dependencies = target_dict.get(dependency_key, [])
1386      for index in range(0, len(dependencies)):
1387        dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget(
1388            target_build_file, dependencies[index], toolset)
1389        if not multiple_toolsets:
1390          # Ignore toolset specification in the dependency if it is specified.
1391          dep_toolset = toolset
1392        dependency = gyp.common.QualifiedTarget(dep_file,
1393                                                dep_target,
1394                                                dep_toolset)
1395        dependencies[index] = dependency
1396
1397        # Make sure anything appearing in a list other than "dependencies" also
1398        # appears in the "dependencies" list.
1399        if dependency_key != 'dependencies' and \
1400           dependency not in target_dict['dependencies']:
1401          raise GypError('Found ' + dependency + ' in ' + dependency_key +
1402                         ' of ' + target + ', but not in dependencies')
1403
1404
1405def ExpandWildcardDependencies(targets, data):
1406  """Expands dependencies specified as build_file:*.
1407
1408  For each target in |targets|, examines sections containing links to other
1409  targets.  If any such section contains a link of the form build_file:*, it
1410  is taken as a wildcard link, and is expanded to list each target in
1411  build_file.  The |data| dict provides access to build file dicts.
1412
1413  Any target that does not wish to be included by wildcard can provide an
1414  optional "suppress_wildcard" key in its target dict.  When present and
1415  true, a wildcard dependency link will not include such targets.
1416
1417  All dependency names, including the keys to |targets| and the values in each
1418  dependency list, must be qualified when this function is called.
1419  """
1420
1421  for target, target_dict in targets.items():
1422    toolset = target_dict['toolset']
1423    target_build_file = gyp.common.BuildFile(target)
1424    for dependency_key in dependency_sections:
1425      dependencies = target_dict.get(dependency_key, [])
1426
1427      # Loop this way instead of "for dependency in" or "for index in range"
1428      # because the dependencies list will be modified within the loop body.
1429      index = 0
1430      while index < len(dependencies):
1431        (dependency_build_file, dependency_target, dependency_toolset) = \
1432            gyp.common.ParseQualifiedTarget(dependencies[index])
1433        if dependency_target != '*' and dependency_toolset != '*':
1434          # Not a wildcard.  Keep it moving.
1435          index = index + 1
1436          continue
1437
1438        if dependency_build_file == target_build_file:
1439          # It's an error for a target to depend on all other targets in
1440          # the same file, because a target cannot depend on itself.
1441          raise GypError('Found wildcard in ' + dependency_key + ' of ' +
1442                         target + ' referring to same build file')
1443
1444        # Take the wildcard out and adjust the index so that the next
1445        # dependency in the list will be processed the next time through the
1446        # loop.
1447        del dependencies[index]
1448        index = index - 1
1449
1450        # Loop through the targets in the other build file, adding them to
1451        # this target's list of dependencies in place of the removed
1452        # wildcard.
1453        dependency_target_dicts = data[dependency_build_file]['targets']
1454        for dependency_target_dict in dependency_target_dicts:
1455          if int(dependency_target_dict.get('suppress_wildcard', False)):
1456            continue
1457          dependency_target_name = dependency_target_dict['target_name']
1458          if (dependency_target != '*' and
1459              dependency_target != dependency_target_name):
1460            continue
1461          dependency_target_toolset = dependency_target_dict['toolset']
1462          if (dependency_toolset != '*' and
1463              dependency_toolset != dependency_target_toolset):
1464            continue
1465          dependency = gyp.common.QualifiedTarget(dependency_build_file,
1466                                                  dependency_target_name,
1467                                                  dependency_target_toolset)
1468          index = index + 1
1469          dependencies.insert(index, dependency)
1470
1471        index = index + 1
1472
1473
1474def Unify(l):
1475  """Removes duplicate elements from l, keeping the first element."""
1476  seen = {}
1477  return [seen.setdefault(e, e) for e in l if e not in seen]
1478
1479
1480def RemoveDuplicateDependencies(targets):
1481  """Makes sure every dependency appears only once in all targets's dependency
1482  lists."""
1483  for target_name, target_dict in targets.items():
1484    for dependency_key in dependency_sections:
1485      dependencies = target_dict.get(dependency_key, [])
1486      if dependencies:
1487        target_dict[dependency_key] = Unify(dependencies)
1488
1489
1490def Filter(l, item):
1491  """Removes item from l."""
1492  res = {}
1493  return [res.setdefault(e, e) for e in l if e != item]
1494
1495
1496def RemoveSelfDependencies(targets):
1497  """Remove self dependencies from targets that have the prune_self_dependency
1498  variable set."""
1499  for target_name, target_dict in targets.items():
1500    for dependency_key in dependency_sections:
1501      dependencies = target_dict.get(dependency_key, [])
1502      if dependencies:
1503        for t in dependencies:
1504          if t == target_name:
1505            if targets[t].get('variables', {}).get('prune_self_dependency', 0):
1506              target_dict[dependency_key] = Filter(dependencies, target_name)
1507
1508
1509def RemoveLinkDependenciesFromNoneTargets(targets):
1510  """Remove dependencies having the 'link_dependency' attribute from the 'none'
1511  targets."""
1512  for target_name, target_dict in targets.items():
1513    for dependency_key in dependency_sections:
1514      dependencies = target_dict.get(dependency_key, [])
1515      if dependencies:
1516        for t in dependencies:
1517          if target_dict.get('type', None) == 'none':
1518            if targets[t].get('variables', {}).get('link_dependency', 0):
1519              target_dict[dependency_key] = \
1520                  Filter(target_dict[dependency_key], t)
1521
1522
1523class DependencyGraphNode(object):
1524  """
1525
1526  Attributes:
1527    ref: A reference to an object that this DependencyGraphNode represents.
1528    dependencies: List of DependencyGraphNodes on which this one depends.
1529    dependents: List of DependencyGraphNodes that depend on this one.
1530  """
1531
1532  class CircularException(GypError):
1533    pass
1534
1535  def __init__(self, ref):
1536    self.ref = ref
1537    self.dependencies = []
1538    self.dependents = []
1539
1540  def __repr__(self):
1541    return '<DependencyGraphNode: %r>' % self.ref
1542
1543  def FlattenToList(self):
1544    # flat_list is the sorted list of dependencies - actually, the list items
1545    # are the "ref" attributes of DependencyGraphNodes.  Every target will
1546    # appear in flat_list after all of its dependencies, and before all of its
1547    # dependents.
1548    flat_list = OrderedSet()
1549
1550    # in_degree_zeros is the list of DependencyGraphNodes that have no
1551    # dependencies not in flat_list.  Initially, it is a copy of the children
1552    # of this node, because when the graph was built, nodes with no
1553    # dependencies were made implicit dependents of the root node.
1554    in_degree_zeros = set(self.dependents[:])
1555
1556    while in_degree_zeros:
1557      # Nodes in in_degree_zeros have no dependencies not in flat_list, so they
1558      # can be appended to flat_list.  Take these nodes out of in_degree_zeros
1559      # as work progresses, so that the next node to process from the list can
1560      # always be accessed at a consistent position.
1561      node = in_degree_zeros.pop()
1562      flat_list.add(node.ref)
1563
1564      # Look at dependents of the node just added to flat_list.  Some of them
1565      # may now belong in in_degree_zeros.
1566      for node_dependent in node.dependents:
1567        is_in_degree_zero = True
1568        # TODO: We want to check through the
1569        # node_dependent.dependencies list but if it's long and we
1570        # always start at the beginning, then we get O(n^2) behaviour.
1571        for node_dependent_dependency in node_dependent.dependencies:
1572          if not node_dependent_dependency.ref in flat_list:
1573            # The dependent one or more dependencies not in flat_list.  There
1574            # will be more chances to add it to flat_list when examining
1575            # it again as a dependent of those other dependencies, provided
1576            # that there are no cycles.
1577            is_in_degree_zero = False
1578            break
1579
1580        if is_in_degree_zero:
1581          # All of the dependent's dependencies are already in flat_list.  Add
1582          # it to in_degree_zeros where it will be processed in a future
1583          # iteration of the outer loop.
1584          in_degree_zeros.add(node_dependent)
1585
1586    return list(flat_list)
1587
1588  def FindCycles(self):
1589    """
1590    Returns a list of cycles in the graph, where each cycle is its own list.
1591    """
1592    results = []
1593    visited = set()
1594
1595    def Visit(node, path):
1596      for child in node.dependents:
1597        if child in path:
1598          results.append([child] + path[:path.index(child) + 1])
1599        elif not child in visited:
1600          visited.add(child)
1601          Visit(child, [child] + path)
1602
1603    visited.add(self)
1604    Visit(self, [self])
1605
1606    return results
1607
1608  def DirectDependencies(self, dependencies=None):
1609    """Returns a list of just direct dependencies."""
1610    if dependencies is None:
1611      dependencies = []
1612
1613    for dependency in self.dependencies:
1614      # Check for None, corresponding to the root node.
1615      if dependency.ref != None and dependency.ref not in dependencies:
1616        dependencies.append(dependency.ref)
1617
1618    return dependencies
1619
1620  def _AddImportedDependencies(self, targets, dependencies=None):
1621    """Given a list of direct dependencies, adds indirect dependencies that
1622    other dependencies have declared to export their settings.
1623
1624    This method does not operate on self.  Rather, it operates on the list
1625    of dependencies in the |dependencies| argument.  For each dependency in
1626    that list, if any declares that it exports the settings of one of its
1627    own dependencies, those dependencies whose settings are "passed through"
1628    are added to the list.  As new items are added to the list, they too will
1629    be processed, so it is possible to import settings through multiple levels
1630    of dependencies.
1631
1632    This method is not terribly useful on its own, it depends on being
1633    "primed" with a list of direct dependencies such as one provided by
1634    DirectDependencies.  DirectAndImportedDependencies is intended to be the
1635    public entry point.
1636    """
1637
1638    if dependencies is None:
1639      dependencies = []
1640
1641    index = 0
1642    while index < len(dependencies):
1643      dependency = dependencies[index]
1644      dependency_dict = targets[dependency]
1645      # Add any dependencies whose settings should be imported to the list
1646      # if not already present.  Newly-added items will be checked for
1647      # their own imports when the list iteration reaches them.
1648      # Rather than simply appending new items, insert them after the
1649      # dependency that exported them.  This is done to more closely match
1650      # the depth-first method used by DeepDependencies.
1651      add_index = 1
1652      for imported_dependency in \
1653          dependency_dict.get('export_dependent_settings', []):
1654        if imported_dependency not in dependencies:
1655          dependencies.insert(index + add_index, imported_dependency)
1656          add_index = add_index + 1
1657      index = index + 1
1658
1659    return dependencies
1660
1661  def DirectAndImportedDependencies(self, targets, dependencies=None):
1662    """Returns a list of a target's direct dependencies and all indirect
1663    dependencies that a dependency has advertised settings should be exported
1664    through the dependency for.
1665    """
1666
1667    dependencies = self.DirectDependencies(dependencies)
1668    return self._AddImportedDependencies(targets, dependencies)
1669
1670  def DeepDependencies(self, dependencies=None):
1671    """Returns an OrderedSet of all of a target's dependencies, recursively."""
1672    if dependencies is None:
1673      # Using a list to get ordered output and a set to do fast "is it
1674      # already added" checks.
1675      dependencies = OrderedSet()
1676
1677    for dependency in self.dependencies:
1678      # Check for None, corresponding to the root node.
1679      if dependency.ref is None:
1680        continue
1681      if dependency.ref not in dependencies:
1682        dependency.DeepDependencies(dependencies)
1683        dependencies.add(dependency.ref)
1684
1685    return dependencies
1686
1687  def _LinkDependenciesInternal(self, targets, include_shared_libraries,
1688                                dependencies=None, initial=True):
1689    """Returns an OrderedSet of dependency targets that are linked
1690    into this target.
1691
1692    This function has a split personality, depending on the setting of
1693    |initial|.  Outside callers should always leave |initial| at its default
1694    setting.
1695
1696    When adding a target to the list of dependencies, this function will
1697    recurse into itself with |initial| set to False, to collect dependencies
1698    that are linked into the linkable target for which the list is being built.
1699
1700    If |include_shared_libraries| is False, the resulting dependencies will not
1701    include shared_library targets that are linked into this target.
1702    """
1703    if dependencies is None:
1704      # Using a list to get ordered output and a set to do fast "is it
1705      # already added" checks.
1706      dependencies = OrderedSet()
1707
1708    # Check for None, corresponding to the root node.
1709    if self.ref is None:
1710      return dependencies
1711
1712    # It's kind of sucky that |targets| has to be passed into this function,
1713    # but that's presently the easiest way to access the target dicts so that
1714    # this function can find target types.
1715
1716    if 'target_name' not in targets[self.ref]:
1717      raise GypError("Missing 'target_name' field in target.")
1718
1719    if 'type' not in targets[self.ref]:
1720      raise GypError("Missing 'type' field in target %s" %
1721                     targets[self.ref]['target_name'])
1722
1723    target_type = targets[self.ref]['type']
1724
1725    is_linkable = target_type in linkable_types
1726
1727    if initial and not is_linkable:
1728      # If this is the first target being examined and it's not linkable,
1729      # return an empty list of link dependencies, because the link
1730      # dependencies are intended to apply to the target itself (initial is
1731      # True) and this target won't be linked.
1732      return dependencies
1733
1734    # Don't traverse 'none' targets if explicitly excluded.
1735    if (target_type == 'none' and
1736        not targets[self.ref].get('dependencies_traverse', True)):
1737      dependencies.add(self.ref)
1738      return dependencies
1739
1740    # Executables, mac kernel extensions and loadable modules are already fully
1741    # and finally linked. Nothing else can be a link dependency of them, there
1742    # can only be dependencies in the sense that a dependent target might run
1743    # an executable or load the loadable_module.
1744    if not initial and target_type in ('executable', 'loadable_module',
1745                                       'mac_kernel_extension'):
1746      return dependencies
1747
1748    # Shared libraries are already fully linked.  They should only be included
1749    # in |dependencies| when adjusting static library dependencies (in order to
1750    # link against the shared_library's import lib), but should not be included
1751    # in |dependencies| when propagating link_settings.
1752    # The |include_shared_libraries| flag controls which of these two cases we
1753    # are handling.
1754    if (not initial and target_type == 'shared_library' and
1755        not include_shared_libraries):
1756      return dependencies
1757
1758    # The target is linkable, add it to the list of link dependencies.
1759    if self.ref not in dependencies:
1760      dependencies.add(self.ref)
1761      if initial or not is_linkable:
1762        # If this is a subsequent target and it's linkable, don't look any
1763        # further for linkable dependencies, as they'll already be linked into
1764        # this target linkable.  Always look at dependencies of the initial
1765        # target, and always look at dependencies of non-linkables.
1766        for dependency in self.dependencies:
1767          dependency._LinkDependenciesInternal(targets,
1768                                               include_shared_libraries,
1769                                               dependencies, False)
1770
1771    return dependencies
1772
1773  def DependenciesForLinkSettings(self, targets):
1774    """
1775    Returns a list of dependency targets whose link_settings should be merged
1776    into this target.
1777    """
1778
1779    # TODO(sbaig) Currently, chrome depends on the bug that shared libraries'
1780    # link_settings are propagated.  So for now, we will allow it, unless the
1781    # 'allow_sharedlib_linksettings_propagation' flag is explicitly set to
1782    # False.  Once chrome is fixed, we can remove this flag.
1783    include_shared_libraries = \
1784        targets[self.ref].get('allow_sharedlib_linksettings_propagation', True)
1785    return self._LinkDependenciesInternal(targets, include_shared_libraries)
1786
1787  def DependenciesToLinkAgainst(self, targets):
1788    """
1789    Returns a list of dependency targets that are linked into this target.
1790    """
1791    return self._LinkDependenciesInternal(targets, True)
1792
1793
1794def BuildDependencyList(targets):
1795  # Create a DependencyGraphNode for each target.  Put it into a dict for easy
1796  # access.
1797  dependency_nodes = {}
1798  for target, spec in targets.items():
1799    if target not in dependency_nodes:
1800      dependency_nodes[target] = DependencyGraphNode(target)
1801
1802  # Set up the dependency links.  Targets that have no dependencies are treated
1803  # as dependent on root_node.
1804  root_node = DependencyGraphNode(None)
1805  for target, spec in targets.items():
1806    target_node = dependency_nodes[target]
1807    target_build_file = gyp.common.BuildFile(target)
1808    dependencies = spec.get('dependencies')
1809    if not dependencies:
1810      target_node.dependencies = [root_node]
1811      root_node.dependents.append(target_node)
1812    else:
1813      for dependency in dependencies:
1814        dependency_node = dependency_nodes.get(dependency)
1815        if not dependency_node:
1816          raise GypError("Dependency '%s' not found while "
1817                         "trying to load target %s" % (dependency, target))
1818        target_node.dependencies.append(dependency_node)
1819        dependency_node.dependents.append(target_node)
1820
1821  flat_list = root_node.FlattenToList()
1822
1823  # If there's anything left unvisited, there must be a circular dependency
1824  # (cycle).
1825  if len(flat_list) != len(targets):
1826    if not root_node.dependents:
1827      # If all targets have dependencies, add the first target as a dependent
1828      # of root_node so that the cycle can be discovered from root_node.
1829      target = targets.keys()[0]
1830      target_node = dependency_nodes[target]
1831      target_node.dependencies.append(root_node)
1832      root_node.dependents.append(target_node)
1833
1834    cycles = []
1835    for cycle in root_node.FindCycles():
1836      paths = [node.ref for node in cycle]
1837      cycles.append('Cycle: %s' % ' -> '.join(paths))
1838    raise DependencyGraphNode.CircularException(
1839        'Cycles in dependency graph detected:\n' + '\n'.join(cycles))
1840
1841  return [dependency_nodes, flat_list]
1842
1843
1844def VerifyNoGYPFileCircularDependencies(targets):
1845  # Create a DependencyGraphNode for each gyp file containing a target.  Put
1846  # it into a dict for easy access.
1847  dependency_nodes = {}
1848  for target in targets:
1849    build_file = gyp.common.BuildFile(target)
1850    if not build_file in dependency_nodes:
1851      dependency_nodes[build_file] = DependencyGraphNode(build_file)
1852
1853  # Set up the dependency links.
1854  for target, spec in targets.items():
1855    build_file = gyp.common.BuildFile(target)
1856    build_file_node = dependency_nodes[build_file]
1857    target_dependencies = spec.get('dependencies', [])
1858    for dependency in target_dependencies:
1859      try:
1860        dependency_build_file = gyp.common.BuildFile(dependency)
1861      except GypError as e:
1862        gyp.common.ExceptionAppend(
1863            e, 'while computing dependencies of .gyp file %s' % build_file)
1864        raise
1865
1866      if dependency_build_file == build_file:
1867        # A .gyp file is allowed to refer back to itself.
1868        continue
1869      dependency_node = dependency_nodes.get(dependency_build_file)
1870      if not dependency_node:
1871        raise GypError("Dependency '%s' not found" % dependency_build_file)
1872      if dependency_node not in build_file_node.dependencies:
1873        build_file_node.dependencies.append(dependency_node)
1874        dependency_node.dependents.append(build_file_node)
1875
1876
1877  # Files that have no dependencies are treated as dependent on root_node.
1878  root_node = DependencyGraphNode(None)
1879  for build_file_node in dependency_nodes.values():
1880    if len(build_file_node.dependencies) == 0:
1881      build_file_node.dependencies.append(root_node)
1882      root_node.dependents.append(build_file_node)
1883
1884  flat_list = root_node.FlattenToList()
1885
1886  # If there's anything left unvisited, there must be a circular dependency
1887  # (cycle).
1888  if len(flat_list) != len(dependency_nodes):
1889    if not root_node.dependents:
1890      # If all files have dependencies, add the first file as a dependent
1891      # of root_node so that the cycle can be discovered from root_node.
1892      file_node = dependency_nodes.values()[0]
1893      file_node.dependencies.append(root_node)
1894      root_node.dependents.append(file_node)
1895    cycles = []
1896    for cycle in root_node.FindCycles():
1897      paths = [node.ref for node in cycle]
1898      cycles.append('Cycle: %s' % ' -> '.join(paths))
1899    raise DependencyGraphNode.CircularException(
1900        'Cycles in .gyp file dependency graph detected:\n' + '\n'.join(cycles))
1901
1902
1903def DoDependentSettings(key, flat_list, targets, dependency_nodes):
1904  # key should be one of all_dependent_settings, direct_dependent_settings,
1905  # or link_settings.
1906
1907  for target in flat_list:
1908    target_dict = targets[target]
1909    build_file = gyp.common.BuildFile(target)
1910
1911    if key == 'all_dependent_settings':
1912      dependencies = dependency_nodes[target].DeepDependencies()
1913    elif key == 'direct_dependent_settings':
1914      dependencies = \
1915          dependency_nodes[target].DirectAndImportedDependencies(targets)
1916    elif key == 'link_settings':
1917      dependencies = \
1918          dependency_nodes[target].DependenciesForLinkSettings(targets)
1919    else:
1920      raise GypError("DoDependentSettings doesn't know how to determine "
1921                      'dependencies for ' + key)
1922
1923    for dependency in dependencies:
1924      dependency_dict = targets[dependency]
1925      if not key in dependency_dict:
1926        continue
1927      dependency_build_file = gyp.common.BuildFile(dependency)
1928      MergeDicts(target_dict, dependency_dict[key],
1929                 build_file, dependency_build_file)
1930
1931
1932def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
1933                                    sort_dependencies):
1934  # Recompute target "dependencies" properties.  For each static library
1935  # target, remove "dependencies" entries referring to other static libraries,
1936  # unless the dependency has the "hard_dependency" attribute set.  For each
1937  # linkable target, add a "dependencies" entry referring to all of the
1938  # target's computed list of link dependencies (including static libraries
1939  # if no such entry is already present.
1940  for target in flat_list:
1941    target_dict = targets[target]
1942    target_type = target_dict['type']
1943
1944    if target_type == 'static_library':
1945      if not 'dependencies' in target_dict:
1946        continue
1947
1948      target_dict['dependencies_original'] = target_dict.get(
1949          'dependencies', [])[:]
1950
1951      # A static library should not depend on another static library unless
1952      # the dependency relationship is "hard," which should only be done when
1953      # a dependent relies on some side effect other than just the build
1954      # product, like a rule or action output. Further, if a target has a
1955      # non-hard dependency, but that dependency exports a hard dependency,
1956      # the non-hard dependency can safely be removed, but the exported hard
1957      # dependency must be added to the target to keep the same dependency
1958      # ordering.
1959      dependencies = \
1960          dependency_nodes[target].DirectAndImportedDependencies(targets)
1961      index = 0
1962      while index < len(dependencies):
1963        dependency = dependencies[index]
1964        dependency_dict = targets[dependency]
1965
1966        # Remove every non-hard static library dependency and remove every
1967        # non-static library dependency that isn't a direct dependency.
1968        if (dependency_dict['type'] == 'static_library' and \
1969            not dependency_dict.get('hard_dependency', False)) or \
1970           (dependency_dict['type'] != 'static_library' and \
1971            not dependency in target_dict['dependencies']):
1972          # Take the dependency out of the list, and don't increment index
1973          # because the next dependency to analyze will shift into the index
1974          # formerly occupied by the one being removed.
1975          del dependencies[index]
1976        else:
1977          index = index + 1
1978
1979      # Update the dependencies. If the dependencies list is empty, it's not
1980      # needed, so unhook it.
1981      if len(dependencies) > 0:
1982        target_dict['dependencies'] = dependencies
1983      else:
1984        del target_dict['dependencies']
1985
1986    elif target_type in linkable_types:
1987      # Get a list of dependency targets that should be linked into this
1988      # target.  Add them to the dependencies list if they're not already
1989      # present.
1990
1991      link_dependencies = \
1992          dependency_nodes[target].DependenciesToLinkAgainst(targets)
1993      for dependency in link_dependencies:
1994        if dependency == target:
1995          continue
1996        if not 'dependencies' in target_dict:
1997          target_dict['dependencies'] = []
1998        if not dependency in target_dict['dependencies']:
1999          target_dict['dependencies'].append(dependency)
2000      # Sort the dependencies list in the order from dependents to dependencies.
2001      # e.g. If A and B depend on C and C depends on D, sort them in A, B, C, D.
2002      # Note: flat_list is already sorted in the order from dependencies to
2003      # dependents.
2004      if sort_dependencies and 'dependencies' in target_dict:
2005        target_dict['dependencies'] = [dep for dep in reversed(flat_list)
2006                                       if dep in target_dict['dependencies']]
2007
2008
2009# Initialize this here to speed up MakePathRelative.
2010exception_re = re.compile(r'''["']?[-/$<>^]''')
2011
2012
2013def MakePathRelative(to_file, fro_file, item):
2014  # If item is a relative path, it's relative to the build file dict that it's
2015  # coming from.  Fix it up to make it relative to the build file dict that
2016  # it's going into.
2017  # Exception: any |item| that begins with these special characters is
2018  # returned without modification.
2019  #   /   Used when a path is already absolute (shortcut optimization;
2020  #       such paths would be returned as absolute anyway)
2021  #   $   Used for build environment variables
2022  #   -   Used for some build environment flags (such as -lapr-1 in a
2023  #       "libraries" section)
2024  #   <   Used for our own variable and command expansions (see ExpandVariables)
2025  #   >   Used for our own variable and command expansions (see ExpandVariables)
2026  #   ^   Used for our own variable and command expansions (see ExpandVariables)
2027  #
2028  #   "/' Used when a value is quoted.  If these are present, then we
2029  #       check the second character instead.
2030  #
2031  if to_file == fro_file or exception_re.match(item):
2032    return item
2033  else:
2034    # TODO(dglazkov) The backslash/forward-slash replacement at the end is a
2035    # temporary measure. This should really be addressed by keeping all paths
2036    # in POSIX until actual project generation.
2037    ret = os.path.normpath(os.path.join(
2038        gyp.common.RelativePath(os.path.dirname(fro_file),
2039                                os.path.dirname(to_file)),
2040                                item)).replace('\\', '/')
2041    if item[-1:] == '/':
2042      ret += '/'
2043    return ret
2044
2045def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True):
2046  # Python documentation recommends objects which do not support hash
2047  # set this value to None. Python library objects follow this rule.
2048  is_hashable = lambda val: val.__hash__
2049
2050  # If x is hashable, returns whether x is in s. Else returns whether x is in l.
2051  def is_in_set_or_list(x, s, l):
2052    if is_hashable(x):
2053      return x in s
2054    return x in l
2055
2056  prepend_index = 0
2057
2058  # Make membership testing of hashables in |to| (in particular, strings)
2059  # faster.
2060  hashable_to_set = set(x for x in to if is_hashable(x))
2061  for item in fro:
2062    singleton = False
2063    if type(item) in (str, int):
2064      # The cheap and easy case.
2065      if is_paths:
2066        to_item = MakePathRelative(to_file, fro_file, item)
2067      else:
2068        to_item = item
2069
2070      if not (type(item) is str and item.startswith('-')):
2071        # Any string that doesn't begin with a "-" is a singleton - it can
2072        # only appear once in a list, to be enforced by the list merge append
2073        # or prepend.
2074        singleton = True
2075    elif type(item) is dict:
2076      # Make a copy of the dictionary, continuing to look for paths to fix.
2077      # The other intelligent aspects of merge processing won't apply because
2078      # item is being merged into an empty dict.
2079      to_item = {}
2080      MergeDicts(to_item, item, to_file, fro_file)
2081    elif type(item) is list:
2082      # Recurse, making a copy of the list.  If the list contains any
2083      # descendant dicts, path fixing will occur.  Note that here, custom
2084      # values for is_paths and append are dropped; those are only to be
2085      # applied to |to| and |fro|, not sublists of |fro|.  append shouldn't
2086      # matter anyway because the new |to_item| list is empty.
2087      to_item = []
2088      MergeLists(to_item, item, to_file, fro_file)
2089    else:
2090      raise TypeError(
2091          'Attempt to merge list item of unsupported type ' + \
2092          item.__class__.__name__)
2093
2094    if append:
2095      # If appending a singleton that's already in the list, don't append.
2096      # This ensures that the earliest occurrence of the item will stay put.
2097      if not singleton or not is_in_set_or_list(to_item, hashable_to_set, to):
2098        to.append(to_item)
2099        if is_hashable(to_item):
2100          hashable_to_set.add(to_item)
2101    else:
2102      # If prepending a singleton that's already in the list, remove the
2103      # existing instance and proceed with the prepend.  This ensures that the
2104      # item appears at the earliest possible position in the list.
2105      while singleton and to_item in to:
2106        to.remove(to_item)
2107
2108      # Don't just insert everything at index 0.  That would prepend the new
2109      # items to the list in reverse order, which would be an unwelcome
2110      # surprise.
2111      to.insert(prepend_index, to_item)
2112      if is_hashable(to_item):
2113        hashable_to_set.add(to_item)
2114      prepend_index = prepend_index + 1
2115
2116
2117def MergeDicts(to, fro, to_file, fro_file):
2118  # I wanted to name the parameter "from" but it's a Python keyword...
2119  for k, v in fro.items():
2120    # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give
2121    # copy semantics.  Something else may want to merge from the |fro| dict
2122    # later, and having the same dict ref pointed to twice in the tree isn't
2123    # what anyone wants considering that the dicts may subsequently be
2124    # modified.
2125    if k in to:
2126      bad_merge = False
2127      if type(v) in (str, int):
2128        if type(to[k]) not in (str, int):
2129          bad_merge = True
2130      elif type(v) is not type(to[k]):
2131        bad_merge = True
2132
2133      if bad_merge:
2134        raise TypeError(
2135            'Attempt to merge dict value of type ' + v.__class__.__name__ + \
2136            ' into incompatible type ' + to[k].__class__.__name__ + \
2137            ' for key ' + k)
2138    if type(v) in (str, int):
2139      # Overwrite the existing value, if any.  Cheap and easy.
2140      is_path = IsPathSection(k)
2141      if is_path:
2142        to[k] = MakePathRelative(to_file, fro_file, v)
2143      else:
2144        to[k] = v
2145    elif type(v) is dict:
2146      # Recurse, guaranteeing copies will be made of objects that require it.
2147      if not k in to:
2148        to[k] = {}
2149      MergeDicts(to[k], v, to_file, fro_file)
2150    elif type(v) is list:
2151      # Lists in dicts can be merged with different policies, depending on
2152      # how the key in the "from" dict (k, the from-key) is written.
2153      #
2154      # If the from-key has          ...the to-list will have this action
2155      # this character appended:...     applied when receiving the from-list:
2156      #                           =  replace
2157      #                           +  prepend
2158      #                           ?  set, only if to-list does not yet exist
2159      #                      (none)  append
2160      #
2161      # This logic is list-specific, but since it relies on the associated
2162      # dict key, it's checked in this dict-oriented function.
2163      ext = k[-1]
2164      append = True
2165      if ext == '=':
2166        list_base = k[:-1]
2167        lists_incompatible = [list_base, list_base + '?']
2168        to[list_base] = []
2169      elif ext == '+':
2170        list_base = k[:-1]
2171        lists_incompatible = [list_base + '=', list_base + '?']
2172        append = False
2173      elif ext == '?':
2174        list_base = k[:-1]
2175        lists_incompatible = [list_base, list_base + '=', list_base + '+']
2176      else:
2177        list_base = k
2178        lists_incompatible = [list_base + '=', list_base + '?']
2179
2180      # Some combinations of merge policies appearing together are meaningless.
2181      # It's stupid to replace and append simultaneously, for example.  Append
2182      # and prepend are the only policies that can coexist.
2183      for list_incompatible in lists_incompatible:
2184        if list_incompatible in fro:
2185          raise GypError('Incompatible list policies ' + k + ' and ' +
2186                         list_incompatible)
2187
2188      if list_base in to:
2189        if ext == '?':
2190          # If the key ends in "?", the list will only be merged if it doesn't
2191          # already exist.
2192          continue
2193        elif type(to[list_base]) is not list:
2194          # This may not have been checked above if merging in a list with an
2195          # extension character.
2196          raise TypeError(
2197              'Attempt to merge dict value of type ' + v.__class__.__name__ + \
2198              ' into incompatible type ' + to[list_base].__class__.__name__ + \
2199              ' for key ' + list_base + '(' + k + ')')
2200      else:
2201        to[list_base] = []
2202
2203      # Call MergeLists, which will make copies of objects that require it.
2204      # MergeLists can recurse back into MergeDicts, although this will be
2205      # to make copies of dicts (with paths fixed), there will be no
2206      # subsequent dict "merging" once entering a list because lists are
2207      # always replaced, appended to, or prepended to.
2208      is_paths = IsPathSection(list_base)
2209      MergeLists(to[list_base], v, to_file, fro_file, is_paths, append)
2210    else:
2211      raise TypeError(
2212          'Attempt to merge dict value of unsupported type ' + \
2213          v.__class__.__name__ + ' for key ' + k)
2214
2215
2216def MergeConfigWithInheritance(new_configuration_dict, build_file,
2217                               target_dict, configuration, visited):
2218  # Skip if previously visted.
2219  if configuration in visited:
2220    return
2221
2222  # Look at this configuration.
2223  configuration_dict = target_dict['configurations'][configuration]
2224
2225  # Merge in parents.
2226  for parent in configuration_dict.get('inherit_from', []):
2227    MergeConfigWithInheritance(new_configuration_dict, build_file,
2228                               target_dict, parent, visited + [configuration])
2229
2230  # Merge it into the new config.
2231  MergeDicts(new_configuration_dict, configuration_dict,
2232             build_file, build_file)
2233
2234  # Drop abstract.
2235  if 'abstract' in new_configuration_dict:
2236    del new_configuration_dict['abstract']
2237
2238
2239def SetUpConfigurations(target, target_dict):
2240  # key_suffixes is a list of key suffixes that might appear on key names.
2241  # These suffixes are handled in conditional evaluations (for =, +, and ?)
2242  # and rules/exclude processing (for ! and /).  Keys with these suffixes
2243  # should be treated the same as keys without.
2244  key_suffixes = ['=', '+', '?', '!', '/']
2245
2246  build_file = gyp.common.BuildFile(target)
2247
2248  # Provide a single configuration by default if none exists.
2249  # TODO(mark): Signal an error if default_configurations exists but
2250  # configurations does not.
2251  if not 'configurations' in target_dict:
2252    target_dict['configurations'] = {'Default': {}}
2253  if not 'default_configuration' in target_dict:
2254    concrete = [i for (i, config) in target_dict['configurations'].items()
2255                if not config.get('abstract')]
2256    target_dict['default_configuration'] = sorted(concrete)[0]
2257
2258  merged_configurations = {}
2259  configs = target_dict['configurations']
2260  for (configuration, old_configuration_dict) in configs.items():
2261    # Skip abstract configurations (saves work only).
2262    if old_configuration_dict.get('abstract'):
2263      continue
2264    # Configurations inherit (most) settings from the enclosing target scope.
2265    # Get the inheritance relationship right by making a copy of the target
2266    # dict.
2267    new_configuration_dict = {}
2268    for (key, target_val) in target_dict.items():
2269      key_ext = key[-1:]
2270      if key_ext in key_suffixes:
2271        key_base = key[:-1]
2272      else:
2273        key_base = key
2274      if not key_base in non_configuration_keys:
2275        new_configuration_dict[key] = gyp.simple_copy.deepcopy(target_val)
2276
2277    # Merge in configuration (with all its parents first).
2278    MergeConfigWithInheritance(new_configuration_dict, build_file,
2279                               target_dict, configuration, [])
2280
2281    merged_configurations[configuration] = new_configuration_dict
2282
2283  # Put the new configurations back into the target dict as a configuration.
2284  for configuration in merged_configurations.keys():
2285    target_dict['configurations'][configuration] = (
2286        merged_configurations[configuration])
2287
2288  # Now drop all the abstract ones.
2289  for configuration in list(target_dict['configurations']):
2290    old_configuration_dict = target_dict['configurations'][configuration]
2291    if old_configuration_dict.get('abstract'):
2292      del target_dict['configurations'][configuration]
2293
2294  # Now that all of the target's configurations have been built, go through
2295  # the target dict's keys and remove everything that's been moved into a
2296  # "configurations" section.
2297  delete_keys = []
2298  for key in target_dict:
2299    key_ext = key[-1:]
2300    if key_ext in key_suffixes:
2301      key_base = key[:-1]
2302    else:
2303      key_base = key
2304    if not key_base in non_configuration_keys:
2305      delete_keys.append(key)
2306  for key in delete_keys:
2307    del target_dict[key]
2308
2309  # Check the configurations to see if they contain invalid keys.
2310  for configuration in target_dict['configurations'].keys():
2311    configuration_dict = target_dict['configurations'][configuration]
2312    for key in configuration_dict.keys():
2313      if key in invalid_configuration_keys:
2314        raise GypError('%s not allowed in the %s configuration, found in '
2315                       'target %s' % (key, configuration, target))
2316
2317
2318
2319def ProcessListFiltersInDict(name, the_dict):
2320  """Process regular expression and exclusion-based filters on lists.
2321
2322  An exclusion list is in a dict key named with a trailing "!", like
2323  "sources!".  Every item in such a list is removed from the associated
2324  main list, which in this example, would be "sources".  Removed items are
2325  placed into a "sources_excluded" list in the dict.
2326
2327  Regular expression (regex) filters are contained in dict keys named with a
2328  trailing "/", such as "sources/" to operate on the "sources" list.  Regex
2329  filters in a dict take the form:
2330    'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'],
2331                  ['include', '_mac\\.cc$'] ],
2332  The first filter says to exclude all files ending in _linux.cc, _mac.cc, and
2333  _win.cc.  The second filter then includes all files ending in _mac.cc that
2334  are now or were once in the "sources" list.  Items matching an "exclude"
2335  filter are subject to the same processing as would occur if they were listed
2336  by name in an exclusion list (ending in "!").  Items matching an "include"
2337  filter are brought back into the main list if previously excluded by an
2338  exclusion list or exclusion regex filter.  Subsequent matching "exclude"
2339  patterns can still cause items to be excluded after matching an "include".
2340  """
2341
2342  # Look through the dictionary for any lists whose keys end in "!" or "/".
2343  # These are lists that will be treated as exclude lists and regular
2344  # expression-based exclude/include lists.  Collect the lists that are
2345  # needed first, looking for the lists that they operate on, and assemble
2346  # then into |lists|.  This is done in a separate loop up front, because
2347  # the _included and _excluded keys need to be added to the_dict, and that
2348  # can't be done while iterating through it.
2349
2350  lists = []
2351  del_lists = []
2352  for key, value in the_dict.items():
2353    operation = key[-1]
2354    if operation != '!' and operation != '/':
2355      continue
2356
2357    if type(value) is not list:
2358      raise ValueError(name + ' key ' + key + ' must be list, not ' + \
2359                       value.__class__.__name__)
2360
2361    list_key = key[:-1]
2362    if list_key not in the_dict:
2363      # This happens when there's a list like "sources!" but no corresponding
2364      # "sources" list.  Since there's nothing for it to operate on, queue up
2365      # the "sources!" list for deletion now.
2366      del_lists.append(key)
2367      continue
2368
2369    if type(the_dict[list_key]) is not list:
2370      value = the_dict[list_key]
2371      raise ValueError(name + ' key ' + list_key + \
2372                       ' must be list, not ' + \
2373                       value.__class__.__name__ + ' when applying ' + \
2374                       {'!': 'exclusion', '/': 'regex'}[operation])
2375
2376    if not list_key in lists:
2377      lists.append(list_key)
2378
2379  # Delete the lists that are known to be unneeded at this point.
2380  for del_list in del_lists:
2381    del the_dict[del_list]
2382
2383  for list_key in lists:
2384    the_list = the_dict[list_key]
2385
2386    # Initialize the list_actions list, which is parallel to the_list.  Each
2387    # item in list_actions identifies whether the corresponding item in
2388    # the_list should be excluded, unconditionally preserved (included), or
2389    # whether no exclusion or inclusion has been applied.  Items for which
2390    # no exclusion or inclusion has been applied (yet) have value -1, items
2391    # excluded have value 0, and items included have value 1.  Includes and
2392    # excludes override previous actions.  All items in list_actions are
2393    # initialized to -1 because no excludes or includes have been processed
2394    # yet.
2395    list_actions = list((-1,) * len(the_list))
2396
2397    exclude_key = list_key + '!'
2398    if exclude_key in the_dict:
2399      for exclude_item in the_dict[exclude_key]:
2400        for index in range(0, len(the_list)):
2401          if exclude_item == the_list[index]:
2402            # This item matches the exclude_item, so set its action to 0
2403            # (exclude).
2404            list_actions[index] = 0
2405
2406      # The "whatever!" list is no longer needed, dump it.
2407      del the_dict[exclude_key]
2408
2409    regex_key = list_key + '/'
2410    if regex_key in the_dict:
2411      for regex_item in the_dict[regex_key]:
2412        [action, pattern] = regex_item
2413        pattern_re = re.compile(pattern)
2414
2415        if action == 'exclude':
2416          # This item matches an exclude regex, so set its value to 0 (exclude).
2417          action_value = 0
2418        elif action == 'include':
2419          # This item matches an include regex, so set its value to 1 (include).
2420          action_value = 1
2421        else:
2422          # This is an action that doesn't make any sense.
2423          raise ValueError('Unrecognized action ' + action + ' in ' + name + \
2424                           ' key ' + regex_key)
2425
2426        for index in range(0, len(the_list)):
2427          list_item = the_list[index]
2428          if list_actions[index] == action_value:
2429            # Even if the regex matches, nothing will change so continue (regex
2430            # searches are expensive).
2431            continue
2432          if pattern_re.search(list_item):
2433            # Regular expression match.
2434            list_actions[index] = action_value
2435
2436      # The "whatever/" list is no longer needed, dump it.
2437      del the_dict[regex_key]
2438
2439    # Add excluded items to the excluded list.
2440    #
2441    # Note that exclude_key ("sources!") is different from excluded_key
2442    # ("sources_excluded").  The exclude_key list is input and it was already
2443    # processed and deleted; the excluded_key list is output and it's about
2444    # to be created.
2445    excluded_key = list_key + '_excluded'
2446    if excluded_key in the_dict:
2447      raise GypError(name + ' key ' + excluded_key +
2448                     ' must not be present prior '
2449                     ' to applying exclusion/regex filters for ' + list_key)
2450
2451    excluded_list = []
2452
2453    # Go backwards through the list_actions list so that as items are deleted,
2454    # the indices of items that haven't been seen yet don't shift.  That means
2455    # that things need to be prepended to excluded_list to maintain them in the
2456    # same order that they existed in the_list.
2457    for index in range(len(list_actions) - 1, -1, -1):
2458      if list_actions[index] == 0:
2459        # Dump anything with action 0 (exclude).  Keep anything with action 1
2460        # (include) or -1 (no include or exclude seen for the item).
2461        excluded_list.insert(0, the_list[index])
2462        del the_list[index]
2463
2464    # If anything was excluded, put the excluded list into the_dict at
2465    # excluded_key.
2466    if len(excluded_list) > 0:
2467      the_dict[excluded_key] = excluded_list
2468
2469  # Now recurse into subdicts and lists that may contain dicts.
2470  for key, value in the_dict.items():
2471    if type(value) is dict:
2472      ProcessListFiltersInDict(key, value)
2473    elif type(value) is list:
2474      ProcessListFiltersInList(key, value)
2475
2476
2477def ProcessListFiltersInList(name, the_list):
2478  for item in the_list:
2479    if type(item) is dict:
2480      ProcessListFiltersInDict(name, item)
2481    elif type(item) is list:
2482      ProcessListFiltersInList(name, item)
2483
2484
2485def ValidateTargetType(target, target_dict):
2486  """Ensures the 'type' field on the target is one of the known types.
2487
2488  Arguments:
2489    target: string, name of target.
2490    target_dict: dict, target spec.
2491
2492  Raises an exception on error.
2493  """
2494  VALID_TARGET_TYPES = ('executable', 'loadable_module',
2495                        'static_library', 'shared_library',
2496                        'mac_kernel_extension', 'none')
2497  target_type = target_dict.get('type', None)
2498  if target_type not in VALID_TARGET_TYPES:
2499    raise GypError("Target %s has an invalid target type '%s'.  "
2500                   "Must be one of %s." %
2501                   (target, target_type, '/'.join(VALID_TARGET_TYPES)))
2502  if (target_dict.get('standalone_static_library', 0) and
2503      not target_type == 'static_library'):
2504    raise GypError('Target %s has type %s but standalone_static_library flag is'
2505                   ' only valid for static_library type.' % (target,
2506                                                             target_type))
2507
2508
2509def ValidateSourcesInTarget(target, target_dict, build_file,
2510                            duplicate_basename_check):
2511  if not duplicate_basename_check:
2512    return
2513  if target_dict.get('type', None) != 'static_library':
2514    return
2515  sources = target_dict.get('sources', [])
2516  basenames = {}
2517  for source in sources:
2518    name, ext = os.path.splitext(source)
2519    is_compiled_file = ext in [
2520        '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S']
2521    if not is_compiled_file:
2522      continue
2523    basename = os.path.basename(name)  # Don't include extension.
2524    basenames.setdefault(basename, []).append(source)
2525
2526  error = ''
2527  for basename, files in basenames.items():
2528    if len(files) > 1:
2529      error += '  %s: %s\n' % (basename, ' '.join(files))
2530
2531  if error:
2532    print('static library %s has several files with the same basename:\n' % target
2533          + error + 'libtool on Mac cannot handle that. Use '
2534          '--no-duplicate-basename-check to disable this validation.')
2535    raise GypError('Duplicate basenames in sources section, see list above')
2536
2537
2538def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules):
2539  """Ensures that the rules sections in target_dict are valid and consistent,
2540  and determines which sources they apply to.
2541
2542  Arguments:
2543    target: string, name of target.
2544    target_dict: dict, target spec containing "rules" and "sources" lists.
2545    extra_sources_for_rules: a list of keys to scan for rule matches in
2546        addition to 'sources'.
2547  """
2548
2549  # Dicts to map between values found in rules' 'rule_name' and 'extension'
2550  # keys and the rule dicts themselves.
2551  rule_names = {}
2552  rule_extensions = {}
2553
2554  rules = target_dict.get('rules', [])
2555  for rule in rules:
2556    # Make sure that there's no conflict among rule names and extensions.
2557    rule_name = rule['rule_name']
2558    if rule_name in rule_names:
2559      raise GypError('rule %s exists in duplicate, target %s' %
2560                     (rule_name, target))
2561    rule_names[rule_name] = rule
2562
2563    rule_extension = rule['extension']
2564    if rule_extension.startswith('.'):
2565      rule_extension = rule_extension[1:]
2566    if rule_extension in rule_extensions:
2567      raise GypError(('extension %s associated with multiple rules, ' +
2568                      'target %s rules %s and %s') %
2569                     (rule_extension, target,
2570                      rule_extensions[rule_extension]['rule_name'],
2571                      rule_name))
2572    rule_extensions[rule_extension] = rule
2573
2574    # Make sure rule_sources isn't already there.  It's going to be
2575    # created below if needed.
2576    if 'rule_sources' in rule:
2577      raise GypError(
2578            'rule_sources must not exist in input, target %s rule %s' %
2579            (target, rule_name))
2580
2581    rule_sources = []
2582    source_keys = ['sources']
2583    source_keys.extend(extra_sources_for_rules)
2584    for source_key in source_keys:
2585      for source in target_dict.get(source_key, []):
2586        (source_root, source_extension) = os.path.splitext(source)
2587        if source_extension.startswith('.'):
2588          source_extension = source_extension[1:]
2589        if source_extension == rule_extension:
2590          rule_sources.append(source)
2591
2592    if len(rule_sources) > 0:
2593      rule['rule_sources'] = rule_sources
2594
2595
2596def ValidateRunAsInTarget(target, target_dict, build_file):
2597  target_name = target_dict.get('target_name')
2598  run_as = target_dict.get('run_as')
2599  if not run_as:
2600    return
2601  if type(run_as) is not dict:
2602    raise GypError("The 'run_as' in target %s from file %s should be a "
2603                   "dictionary." %
2604                   (target_name, build_file))
2605  action = run_as.get('action')
2606  if not action:
2607    raise GypError("The 'run_as' in target %s from file %s must have an "
2608                   "'action' section." %
2609                   (target_name, build_file))
2610  if type(action) is not list:
2611    raise GypError("The 'action' for 'run_as' in target %s from file %s "
2612                   "must be a list." %
2613                   (target_name, build_file))
2614  working_directory = run_as.get('working_directory')
2615  if working_directory and type(working_directory) is not str:
2616    raise GypError("The 'working_directory' for 'run_as' in target %s "
2617                   "in file %s should be a string." %
2618                   (target_name, build_file))
2619  environment = run_as.get('environment')
2620  if environment and type(environment) is not dict:
2621    raise GypError("The 'environment' for 'run_as' in target %s "
2622                   "in file %s should be a dictionary." %
2623                   (target_name, build_file))
2624
2625
2626def ValidateActionsInTarget(target, target_dict, build_file):
2627  '''Validates the inputs to the actions in a target.'''
2628  target_name = target_dict.get('target_name')
2629  actions = target_dict.get('actions', [])
2630  for action in actions:
2631    action_name = action.get('action_name')
2632    if not action_name:
2633      raise GypError("Anonymous action in target %s.  "
2634                     "An action must have an 'action_name' field." %
2635                     target_name)
2636    inputs = action.get('inputs', None)
2637    if inputs is None:
2638      raise GypError('Action in target %s has no inputs.' % target_name)
2639    action_command = action.get('action')
2640    if action_command and not action_command[0]:
2641      raise GypError("Empty action as command in target %s." % target_name)
2642
2643
2644def TurnIntIntoStrInDict(the_dict):
2645  """Given dict the_dict, recursively converts all integers into strings.
2646  """
2647  # Use items instead of items because there's no need to try to look at
2648  # reinserted keys and their associated values.
2649  for k, v in the_dict.items():
2650    if type(v) is int:
2651      v = str(v)
2652      the_dict[k] = v
2653    elif type(v) is dict:
2654      TurnIntIntoStrInDict(v)
2655    elif type(v) is list:
2656      TurnIntIntoStrInList(v)
2657
2658    if type(k) is int:
2659      del the_dict[k]
2660      the_dict[str(k)] = v
2661
2662
2663def TurnIntIntoStrInList(the_list):
2664  """Given list the_list, recursively converts all integers into strings.
2665  """
2666  for index in range(0, len(the_list)):
2667    item = the_list[index]
2668    if type(item) is int:
2669      the_list[index] = str(item)
2670    elif type(item) is dict:
2671      TurnIntIntoStrInDict(item)
2672    elif type(item) is list:
2673      TurnIntIntoStrInList(item)
2674
2675
2676def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets,
2677                         data):
2678  """Return only the targets that are deep dependencies of |root_targets|."""
2679  qualified_root_targets = []
2680  for target in root_targets:
2681    target = target.strip()
2682    qualified_targets = gyp.common.FindQualifiedTargets(target, flat_list)
2683    if not qualified_targets:
2684      raise GypError("Could not find target %s" % target)
2685    qualified_root_targets.extend(qualified_targets)
2686
2687  wanted_targets = {}
2688  for target in qualified_root_targets:
2689    wanted_targets[target] = targets[target]
2690    for dependency in dependency_nodes[target].DeepDependencies():
2691      wanted_targets[dependency] = targets[dependency]
2692
2693  wanted_flat_list = [t for t in flat_list if t in wanted_targets]
2694
2695  # Prune unwanted targets from each build_file's data dict.
2696  for build_file in data['target_build_files']:
2697    if not 'targets' in data[build_file]:
2698      continue
2699    new_targets = []
2700    for target in data[build_file]['targets']:
2701      qualified_name = gyp.common.QualifiedTarget(build_file,
2702                                                  target['target_name'],
2703                                                  target['toolset'])
2704      if qualified_name in wanted_targets:
2705        new_targets.append(target)
2706    data[build_file]['targets'] = new_targets
2707
2708  return wanted_targets, wanted_flat_list
2709
2710
2711def VerifyNoCollidingTargets(targets):
2712  """Verify that no two targets in the same directory share the same name.
2713
2714  Arguments:
2715    targets: A list of targets in the form 'path/to/file.gyp:target_name'.
2716  """
2717  # Keep a dict going from 'subdirectory:target_name' to 'foo.gyp'.
2718  used = {}
2719  for target in targets:
2720    # Separate out 'path/to/file.gyp, 'target_name' from
2721    # 'path/to/file.gyp:target_name'.
2722    path, name = target.rsplit(':', 1)
2723    # Separate out 'path/to', 'file.gyp' from 'path/to/file.gyp'.
2724    subdir, gyp = os.path.split(path)
2725    # Use '.' for the current directory '', so that the error messages make
2726    # more sense.
2727    if not subdir:
2728      subdir = '.'
2729    # Prepare a key like 'path/to:target_name'.
2730    key = subdir + ':' + name
2731    if key in used:
2732      # Complain if this target is already used.
2733      raise GypError('Duplicate target name "%s" in directory "%s" used both '
2734                     'in "%s" and "%s".' % (name, subdir, gyp, used[key]))
2735    used[key] = gyp
2736
2737
2738def SetGeneratorGlobals(generator_input_info):
2739  # Set up path_sections and non_configuration_keys with the default data plus
2740  # the generator-specific data.
2741  global path_sections
2742  path_sections = set(base_path_sections)
2743  path_sections.update(generator_input_info['path_sections'])
2744
2745  global non_configuration_keys
2746  non_configuration_keys = base_non_configuration_keys[:]
2747  non_configuration_keys.extend(generator_input_info['non_configuration_keys'])
2748
2749  global multiple_toolsets
2750  multiple_toolsets = generator_input_info[
2751      'generator_supports_multiple_toolsets']
2752
2753  global generator_filelist_paths
2754  generator_filelist_paths = generator_input_info['generator_filelist_paths']
2755
2756
2757def Load(build_files, variables, includes, depth, generator_input_info, check,
2758         circular_check, duplicate_basename_check, parallel, root_targets):
2759  SetGeneratorGlobals(generator_input_info)
2760  # A generator can have other lists (in addition to sources) be processed
2761  # for rules.
2762  extra_sources_for_rules = generator_input_info['extra_sources_for_rules']
2763
2764  # Load build files.  This loads every target-containing build file into
2765  # the |data| dictionary such that the keys to |data| are build file names,
2766  # and the values are the entire build file contents after "early" or "pre"
2767  # processing has been done and includes have been resolved.
2768  # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as
2769  # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps
2770  # track of the keys corresponding to "target" files.
2771  data = {'target_build_files': set()}
2772  # Normalize paths everywhere.  This is important because paths will be
2773  # used as keys to the data dict and for references between input files.
2774  build_files = set(map(os.path.normpath, build_files))
2775  if parallel:
2776    LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth,
2777                                 check, generator_input_info)
2778  else:
2779    aux_data = {}
2780    for build_file in build_files:
2781      try:
2782        LoadTargetBuildFile(build_file, data, aux_data,
2783                            variables, includes, depth, check, True)
2784      except Exception as e:
2785        gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file)
2786        raise
2787
2788  # Build a dict to access each target's subdict by qualified name.
2789  targets = BuildTargetsDict(data)
2790
2791  # Fully qualify all dependency links.
2792  QualifyDependencies(targets)
2793
2794  # Remove self-dependencies from targets that have 'prune_self_dependencies'
2795  # set to 1.
2796  RemoveSelfDependencies(targets)
2797
2798  # Expand dependencies specified as build_file:*.
2799  ExpandWildcardDependencies(targets, data)
2800
2801  # Remove all dependencies marked as 'link_dependency' from the targets of
2802  # type 'none'.
2803  RemoveLinkDependenciesFromNoneTargets(targets)
2804
2805  # Apply exclude (!) and regex (/) list filters only for dependency_sections.
2806  for target_name, target_dict in targets.items():
2807    tmp_dict = {}
2808    for key_base in dependency_sections:
2809      for op in ('', '!', '/'):
2810        key = key_base + op
2811        if key in target_dict:
2812          tmp_dict[key] = target_dict[key]
2813          del target_dict[key]
2814    ProcessListFiltersInDict(target_name, tmp_dict)
2815    # Write the results back to |target_dict|.
2816    for key in tmp_dict:
2817      target_dict[key] = tmp_dict[key]
2818
2819  # Make sure every dependency appears at most once.
2820  RemoveDuplicateDependencies(targets)
2821
2822  if circular_check:
2823    # Make sure that any targets in a.gyp don't contain dependencies in other
2824    # .gyp files that further depend on a.gyp.
2825    VerifyNoGYPFileCircularDependencies(targets)
2826
2827  [dependency_nodes, flat_list] = BuildDependencyList(targets)
2828
2829  if root_targets:
2830    # Remove, from |targets| and |flat_list|, the targets that are not deep
2831    # dependencies of the targets specified in |root_targets|.
2832    targets, flat_list = PruneUnwantedTargets(
2833        targets, flat_list, dependency_nodes, root_targets, data)
2834
2835  # Check that no two targets in the same directory have the same name.
2836  VerifyNoCollidingTargets(flat_list)
2837
2838  # Handle dependent settings of various types.
2839  for settings_type in ['all_dependent_settings',
2840                        'direct_dependent_settings',
2841                        'link_settings']:
2842    DoDependentSettings(settings_type, flat_list, targets, dependency_nodes)
2843
2844    # Take out the dependent settings now that they've been published to all
2845    # of the targets that require them.
2846    for target in flat_list:
2847      if settings_type in targets[target]:
2848        del targets[target][settings_type]
2849
2850  # Make sure static libraries don't declare dependencies on other static
2851  # libraries, but that linkables depend on all unlinked static libraries
2852  # that they need so that their link steps will be correct.
2853  gii = generator_input_info
2854  if gii['generator_wants_static_library_dependencies_adjusted']:
2855    AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes,
2856                                    gii['generator_wants_sorted_dependencies'])
2857
2858  # Apply "post"/"late"/"target" variable expansions and condition evaluations.
2859  for target in flat_list:
2860    target_dict = targets[target]
2861    build_file = gyp.common.BuildFile(target)
2862    ProcessVariablesAndConditionsInDict(
2863        target_dict, PHASE_LATE, variables, build_file)
2864
2865  # Move everything that can go into a "configurations" section into one.
2866  for target in flat_list:
2867    target_dict = targets[target]
2868    SetUpConfigurations(target, target_dict)
2869
2870  # Apply exclude (!) and regex (/) list filters.
2871  for target in flat_list:
2872    target_dict = targets[target]
2873    ProcessListFiltersInDict(target, target_dict)
2874
2875  # Apply "latelate" variable expansions and condition evaluations.
2876  for target in flat_list:
2877    target_dict = targets[target]
2878    build_file = gyp.common.BuildFile(target)
2879    ProcessVariablesAndConditionsInDict(
2880        target_dict, PHASE_LATELATE, variables, build_file)
2881
2882  # Make sure that the rules make sense, and build up rule_sources lists as
2883  # needed.  Not all generators will need to use the rule_sources lists, but
2884  # some may, and it seems best to build the list in a common spot.
2885  # Also validate actions and run_as elements in targets.
2886  for target in flat_list:
2887    target_dict = targets[target]
2888    build_file = gyp.common.BuildFile(target)
2889    ValidateTargetType(target, target_dict)
2890    ValidateSourcesInTarget(target, target_dict, build_file,
2891                            duplicate_basename_check)
2892    ValidateRulesInTarget(target, target_dict, extra_sources_for_rules)
2893    ValidateRunAsInTarget(target, target_dict, build_file)
2894    ValidateActionsInTarget(target, target_dict, build_file)
2895
2896  # Generators might not expect ints.  Turn them into strs.
2897  TurnIntIntoStrInDict(data)
2898
2899  # TODO(mark): Return |data| for now because the generator needs a list of
2900  # build files that came in.  In the future, maybe it should just accept
2901  # a list, and not the whole data dict.
2902  return [flat_list, targets, data]
2903