1# Copyright (c) 2012 Google Inc. All rights reserved. 2# Use of this source code is governed by a BSD-style license that can be 3# found in the LICENSE file. 4 5from __future__ import print_function 6 7import ast 8import gyp.common 9import gyp.simple_copy 10import multiprocessing 11import optparse 12import os.path 13import re 14import shlex 15import signal 16import subprocess 17import sys 18import threading 19import time 20import traceback 21from gyp.common import GypError 22from gyp.common import OrderedSet 23 24 25# A list of types that are treated as linkable. 26linkable_types = [ 27 'executable', 28 'shared_library', 29 'loadable_module', 30 'mac_kernel_extension', 31 'windows_driver', 32] 33 34# A list of sections that contain links to other targets. 35dependency_sections = ['dependencies', 'export_dependent_settings'] 36 37# base_path_sections is a list of sections defined by GYP that contain 38# pathnames. The generators can provide more keys, the two lists are merged 39# into path_sections, but you should call IsPathSection instead of using either 40# list directly. 41base_path_sections = [ 42 'destination', 43 'files', 44 'include_dirs', 45 'inputs', 46 'libraries', 47 'outputs', 48 'sources', 49] 50path_sections = set() 51 52# These per-process dictionaries are used to cache build file data when loading 53# in parallel mode. 54per_process_data = {} 55per_process_aux_data = {} 56 57try: 58 _str_types = (basestring,) 59# There's no basestring in python3. 60except NameError: 61 _str_types = (str,) 62 63try: 64 _int_types = (int, long) 65# There's no long in python3. 66except NameError: 67 _int_types = (int,) 68 69# Shortcuts as we use these combos a lot. 70_str_int_types = _str_types + _int_types 71_str_int_list_types = _str_int_types + (list,) 72 73 74def IsPathSection(section): 75 # If section ends in one of the '=+?!' characters, it's applied to a section 76 # without the trailing characters. '/' is notably absent from this list, 77 # because there's no way for a regular expression to be treated as a path. 78 while section and section[-1:] in '=+?!': 79 section = section[:-1] 80 81 if section in path_sections: 82 return True 83 84 # Sections mathing the regexp '_(dir|file|path)s?$' are also 85 # considered PathSections. Using manual string matching since that 86 # is much faster than the regexp and this can be called hundreds of 87 # thousands of times so micro performance matters. 88 if "_" in section: 89 tail = section[-6:] 90 if tail[-1] == 's': 91 tail = tail[:-1] 92 if tail[-5:] in ('_file', '_path'): 93 return True 94 return tail[-4:] == '_dir' 95 96 return False 97 98# base_non_configuration_keys is a list of key names that belong in the target 99# itself and should not be propagated into its configurations. It is merged 100# with a list that can come from the generator to 101# create non_configuration_keys. 102base_non_configuration_keys = [ 103 # Sections that must exist inside targets and not configurations. 104 'actions', 105 'all_dependent_settings', 106 'configurations', 107 'copies', 108 'default_configuration', 109 'dependencies', 110 'dependencies_original', 111 'direct_dependent_settings', 112 'libraries', 113 'postbuilds', 114 'product_dir', 115 'product_extension', 116 'product_name', 117 'product_prefix', 118 'rules', 119 'run_as', 120 'sources', 121 'standalone_static_library', 122 'suppress_wildcard', 123 'target_name', 124 'toolset', 125 'toolsets', 126 'type', 127 128 # Sections that can be found inside targets or configurations, but that 129 # should not be propagated from targets into their configurations. 130 'variables', 131] 132non_configuration_keys = [] 133 134# Keys that do not belong inside a configuration dictionary. 135invalid_configuration_keys = [ 136 'actions', 137 'all_dependent_settings', 138 'configurations', 139 'dependencies', 140 'direct_dependent_settings', 141 'libraries', 142 'link_settings', 143 'sources', 144 'standalone_static_library', 145 'target_name', 146 'type', 147] 148 149# Controls whether or not the generator supports multiple toolsets. 150multiple_toolsets = False 151 152# Paths for converting filelist paths to output paths: { 153# toplevel, 154# qualified_output_dir, 155# } 156generator_filelist_paths = None 157 158def GetIncludedBuildFiles(build_file_path, aux_data, included=None): 159 """Return a list of all build files included into build_file_path. 160 161 The returned list will contain build_file_path as well as all other files 162 that it included, either directly or indirectly. Note that the list may 163 contain files that were included into a conditional section that evaluated 164 to false and was not merged into build_file_path's dict. 165 166 aux_data is a dict containing a key for each build file or included build 167 file. Those keys provide access to dicts whose "included" keys contain 168 lists of all other files included by the build file. 169 170 included should be left at its default None value by external callers. It 171 is used for recursion. 172 173 The returned list will not contain any duplicate entries. Each build file 174 in the list will be relative to the current directory. 175 """ 176 177 if included == None: 178 included = [] 179 180 if build_file_path in included: 181 return included 182 183 included.append(build_file_path) 184 185 for included_build_file in aux_data[build_file_path].get('included', []): 186 GetIncludedBuildFiles(included_build_file, aux_data, included) 187 188 return included 189 190 191def CheckedEval(file_contents): 192 """Return the eval of a gyp file. 193 194 The gyp file is restricted to dictionaries and lists only, and 195 repeated keys are not allowed. 196 197 Note that this is slower than eval() is. 198 """ 199 200 syntax_tree = ast.parse(file_contents) 201 assert isinstance(syntax_tree, ast.Module) 202 c1 = syntax_tree.body 203 assert len(c1) == 1 204 c2 = c1[0] 205 assert isinstance(c2, ast.Expr) 206 return CheckNode(c2.value, []) 207 208 209def CheckNode(node, keypath): 210 if isinstance(node, ast.Dict): 211 dict = {} 212 for key, value in zip(node.keys, node.values): 213 assert isinstance(key, ast.Str) 214 key = key.s 215 if key in dict: 216 raise GypError("Key '" + key + "' repeated at level " + 217 repr(len(keypath) + 1) + " with key path '" + 218 '.'.join(keypath) + "'") 219 kp = list(keypath) # Make a copy of the list for descending this node. 220 kp.append(key) 221 dict[key] = CheckNode(value, kp) 222 return dict 223 elif isinstance(node, ast.List): 224 children = [] 225 for index, child in enumerate(node.elts): 226 kp = list(keypath) # Copy list. 227 kp.append(repr(index)) 228 children.append(CheckNode(child, kp)) 229 return children 230 elif isinstance(node, ast.Str): 231 return node.s 232 else: 233 raise TypeError("Unknown AST node at key path '" + '.'.join(keypath) + 234 "': " + repr(node)) 235 236 237def LoadOneBuildFile(build_file_path, data, aux_data, includes, 238 is_target, check): 239 if build_file_path in data: 240 return data[build_file_path] 241 242 if os.path.exists(build_file_path): 243 build_file_contents = open(build_file_path, 'rb').read().decode('utf-8') 244 else: 245 raise GypError("%s not found (cwd: %s)" % (build_file_path, os.getcwd())) 246 247 build_file_data = None 248 try: 249 if check: 250 build_file_data = CheckedEval(build_file_contents) 251 else: 252 build_file_data = eval(build_file_contents, {'__builtins__': None}, 253 None) 254 except SyntaxError as e: 255 e.filename = build_file_path 256 raise 257 except Exception as e: 258 gyp.common.ExceptionAppend(e, 'while reading ' + build_file_path) 259 raise 260 261 if type(build_file_data) is not dict: 262 raise GypError("%s does not evaluate to a dictionary." % build_file_path) 263 264 data[build_file_path] = build_file_data 265 aux_data[build_file_path] = {} 266 267 # Scan for includes and merge them in. 268 if ('skip_includes' not in build_file_data or 269 not build_file_data['skip_includes']): 270 try: 271 if is_target: 272 LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data, 273 aux_data, includes, check) 274 else: 275 LoadBuildFileIncludesIntoDict(build_file_data, build_file_path, data, 276 aux_data, None, check) 277 except Exception as e: 278 gyp.common.ExceptionAppend(e, 279 'while reading includes of ' + build_file_path) 280 raise 281 282 return build_file_data 283 284 285def LoadBuildFileIncludesIntoDict(subdict, subdict_path, data, aux_data, 286 includes, check): 287 includes_list = [] 288 if includes != None: 289 includes_list.extend(includes) 290 if 'includes' in subdict: 291 for include in subdict['includes']: 292 # "include" is specified relative to subdict_path, so compute the real 293 # path to include by appending the provided "include" to the directory 294 # in which subdict_path resides. 295 relative_include = \ 296 os.path.normpath(os.path.join(os.path.dirname(subdict_path), include)) 297 includes_list.append(relative_include) 298 # Unhook the includes list, it's no longer needed. 299 del subdict['includes'] 300 301 # Merge in the included files. 302 for include in includes_list: 303 if not 'included' in aux_data[subdict_path]: 304 aux_data[subdict_path]['included'] = [] 305 aux_data[subdict_path]['included'].append(include) 306 307 gyp.DebugOutput(gyp.DEBUG_INCLUDES, "Loading Included File: '%s'", include) 308 309 MergeDicts(subdict, 310 LoadOneBuildFile(include, data, aux_data, None, False, check), 311 subdict_path, include) 312 313 # Recurse into subdictionaries. 314 for k, v in subdict.items(): 315 if type(v) is dict: 316 LoadBuildFileIncludesIntoDict(v, subdict_path, data, aux_data, 317 None, check) 318 elif type(v) is list: 319 LoadBuildFileIncludesIntoList(v, subdict_path, data, aux_data, 320 check) 321 322 323# This recurses into lists so that it can look for dicts. 324def LoadBuildFileIncludesIntoList(sublist, sublist_path, data, aux_data, check): 325 for item in sublist: 326 if type(item) is dict: 327 LoadBuildFileIncludesIntoDict(item, sublist_path, data, aux_data, 328 None, check) 329 elif type(item) is list: 330 LoadBuildFileIncludesIntoList(item, sublist_path, data, aux_data, check) 331 332# Processes toolsets in all the targets. This recurses into condition entries 333# since they can contain toolsets as well. 334def ProcessToolsetsInDict(data): 335 if 'targets' in data: 336 target_list = data['targets'] 337 new_target_list = [] 338 for target in target_list: 339 # If this target already has an explicit 'toolset', and no 'toolsets' 340 # list, don't modify it further. 341 if 'toolset' in target and 'toolsets' not in target: 342 new_target_list.append(target) 343 continue 344 if multiple_toolsets: 345 toolsets = target.get('toolsets', ['target']) 346 else: 347 toolsets = ['target'] 348 # Make sure this 'toolsets' definition is only processed once. 349 if 'toolsets' in target: 350 del target['toolsets'] 351 if len(toolsets) > 0: 352 # Optimization: only do copies if more than one toolset is specified. 353 for build in toolsets[1:]: 354 new_target = gyp.simple_copy.deepcopy(target) 355 new_target['toolset'] = build 356 new_target_list.append(new_target) 357 target['toolset'] = toolsets[0] 358 new_target_list.append(target) 359 data['targets'] = new_target_list 360 if 'conditions' in data: 361 for condition in data['conditions']: 362 if type(condition) is list: 363 for condition_dict in condition[1:]: 364 if type(condition_dict) is dict: 365 ProcessToolsetsInDict(condition_dict) 366 367 368# TODO(mark): I don't love this name. It just means that it's going to load 369# a build file that contains targets and is expected to provide a targets dict 370# that contains the targets... 371def LoadTargetBuildFile(build_file_path, data, aux_data, variables, includes, 372 depth, check, load_dependencies): 373 # If depth is set, predefine the DEPTH variable to be a relative path from 374 # this build file's directory to the directory identified by depth. 375 if depth: 376 # TODO(dglazkov) The backslash/forward-slash replacement at the end is a 377 # temporary measure. This should really be addressed by keeping all paths 378 # in POSIX until actual project generation. 379 d = gyp.common.RelativePath(depth, os.path.dirname(build_file_path)) 380 if d == '': 381 variables['DEPTH'] = '.' 382 else: 383 variables['DEPTH'] = d.replace('\\', '/') 384 385 # The 'target_build_files' key is only set when loading target build files in 386 # the non-parallel code path, where LoadTargetBuildFile is called 387 # recursively. In the parallel code path, we don't need to check whether the 388 # |build_file_path| has already been loaded, because the 'scheduled' set in 389 # ParallelState guarantees that we never load the same |build_file_path| 390 # twice. 391 if 'target_build_files' in data: 392 if build_file_path in data['target_build_files']: 393 # Already loaded. 394 return False 395 data['target_build_files'].add(build_file_path) 396 397 gyp.DebugOutput(gyp.DEBUG_INCLUDES, 398 "Loading Target Build File '%s'", build_file_path) 399 400 build_file_data = LoadOneBuildFile(build_file_path, data, aux_data, 401 includes, True, check) 402 403 # Store DEPTH for later use in generators. 404 build_file_data['_DEPTH'] = depth 405 406 # Set up the included_files key indicating which .gyp files contributed to 407 # this target dict. 408 if 'included_files' in build_file_data: 409 raise GypError(build_file_path + ' must not contain included_files key') 410 411 included = GetIncludedBuildFiles(build_file_path, aux_data) 412 build_file_data['included_files'] = [] 413 for included_file in included: 414 # included_file is relative to the current directory, but it needs to 415 # be made relative to build_file_path's directory. 416 included_relative = \ 417 gyp.common.RelativePath(included_file, 418 os.path.dirname(build_file_path)) 419 build_file_data['included_files'].append(included_relative) 420 421 # Do a first round of toolsets expansion so that conditions can be defined 422 # per toolset. 423 ProcessToolsetsInDict(build_file_data) 424 425 # Apply "pre"/"early" variable expansions and condition evaluations. 426 ProcessVariablesAndConditionsInDict( 427 build_file_data, PHASE_EARLY, variables, build_file_path) 428 429 # Since some toolsets might have been defined conditionally, perform 430 # a second round of toolsets expansion now. 431 ProcessToolsetsInDict(build_file_data) 432 433 # Look at each project's target_defaults dict, and merge settings into 434 # targets. 435 if 'target_defaults' in build_file_data: 436 if 'targets' not in build_file_data: 437 raise GypError("Unable to find targets in build file %s" % 438 build_file_path) 439 440 index = 0 441 while index < len(build_file_data['targets']): 442 # This procedure needs to give the impression that target_defaults is 443 # used as defaults, and the individual targets inherit from that. 444 # The individual targets need to be merged into the defaults. Make 445 # a deep copy of the defaults for each target, merge the target dict 446 # as found in the input file into that copy, and then hook up the 447 # copy with the target-specific data merged into it as the replacement 448 # target dict. 449 old_target_dict = build_file_data['targets'][index] 450 new_target_dict = gyp.simple_copy.deepcopy( 451 build_file_data['target_defaults']) 452 MergeDicts(new_target_dict, old_target_dict, 453 build_file_path, build_file_path) 454 build_file_data['targets'][index] = new_target_dict 455 index += 1 456 457 # No longer needed. 458 del build_file_data['target_defaults'] 459 460 # Look for dependencies. This means that dependency resolution occurs 461 # after "pre" conditionals and variable expansion, but before "post" - 462 # in other words, you can't put a "dependencies" section inside a "post" 463 # conditional within a target. 464 465 dependencies = [] 466 if 'targets' in build_file_data: 467 for target_dict in build_file_data['targets']: 468 if 'dependencies' not in target_dict: 469 continue 470 for dependency in target_dict['dependencies']: 471 dependencies.append( 472 gyp.common.ResolveTarget(build_file_path, dependency, None)[0]) 473 474 if load_dependencies: 475 for dependency in dependencies: 476 try: 477 LoadTargetBuildFile(dependency, data, aux_data, variables, 478 includes, depth, check, load_dependencies) 479 except Exception as e: 480 gyp.common.ExceptionAppend( 481 e, 'while loading dependencies of %s' % build_file_path) 482 raise 483 else: 484 return (build_file_path, dependencies) 485 486def CallLoadTargetBuildFile(global_flags, 487 build_file_path, variables, 488 includes, depth, check, 489 generator_input_info): 490 """Wrapper around LoadTargetBuildFile for parallel processing. 491 492 This wrapper is used when LoadTargetBuildFile is executed in 493 a worker process. 494 """ 495 496 try: 497 signal.signal(signal.SIGINT, signal.SIG_IGN) 498 499 # Apply globals so that the worker process behaves the same. 500 for key, value in global_flags.items(): 501 globals()[key] = value 502 503 SetGeneratorGlobals(generator_input_info) 504 result = LoadTargetBuildFile(build_file_path, per_process_data, 505 per_process_aux_data, variables, 506 includes, depth, check, False) 507 if not result: 508 return result 509 510 (build_file_path, dependencies) = result 511 512 # We can safely pop the build_file_data from per_process_data because it 513 # will never be referenced by this process again, so we don't need to keep 514 # it in the cache. 515 build_file_data = per_process_data.pop(build_file_path) 516 517 # This gets serialized and sent back to the main process via a pipe. 518 # It's handled in LoadTargetBuildFileCallback. 519 return (build_file_path, 520 build_file_data, 521 dependencies) 522 except GypError as e: 523 sys.stderr.write("gyp: %s\n" % e) 524 return None 525 except Exception as e: 526 print('Exception:', e, file=sys.stderr) 527 print(traceback.format_exc(), file=sys.stderr) 528 return None 529 530 531class ParallelProcessingError(Exception): 532 pass 533 534 535class ParallelState(object): 536 """Class to keep track of state when processing input files in parallel. 537 538 If build files are loaded in parallel, use this to keep track of 539 state during farming out and processing parallel jobs. It's stored 540 in a global so that the callback function can have access to it. 541 """ 542 543 def __init__(self): 544 # The multiprocessing pool. 545 self.pool = None 546 # The condition variable used to protect this object and notify 547 # the main loop when there might be more data to process. 548 self.condition = None 549 # The "data" dict that was passed to LoadTargetBuildFileParallel 550 self.data = None 551 # The number of parallel calls outstanding; decremented when a response 552 # was received. 553 self.pending = 0 554 # The set of all build files that have been scheduled, so we don't 555 # schedule the same one twice. 556 self.scheduled = set() 557 # A list of dependency build file paths that haven't been scheduled yet. 558 self.dependencies = [] 559 # Flag to indicate if there was an error in a child process. 560 self.error = False 561 562 def LoadTargetBuildFileCallback(self, result): 563 """Handle the results of running LoadTargetBuildFile in another process. 564 """ 565 self.condition.acquire() 566 if not result: 567 self.error = True 568 self.condition.notify() 569 self.condition.release() 570 return 571 (build_file_path0, build_file_data0, dependencies0) = result 572 self.data[build_file_path0] = build_file_data0 573 self.data['target_build_files'].add(build_file_path0) 574 for new_dependency in dependencies0: 575 if new_dependency not in self.scheduled: 576 self.scheduled.add(new_dependency) 577 self.dependencies.append(new_dependency) 578 self.pending -= 1 579 self.condition.notify() 580 self.condition.release() 581 582 583def LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth, 584 check, generator_input_info): 585 parallel_state = ParallelState() 586 parallel_state.condition = threading.Condition() 587 # Make copies of the build_files argument that we can modify while working. 588 parallel_state.dependencies = list(build_files) 589 parallel_state.scheduled = set(build_files) 590 parallel_state.pending = 0 591 parallel_state.data = data 592 593 try: 594 parallel_state.condition.acquire() 595 while parallel_state.dependencies or parallel_state.pending: 596 if parallel_state.error: 597 break 598 if not parallel_state.dependencies: 599 parallel_state.condition.wait() 600 continue 601 602 dependency = parallel_state.dependencies.pop() 603 604 parallel_state.pending += 1 605 global_flags = { 606 'path_sections': globals()['path_sections'], 607 'non_configuration_keys': globals()['non_configuration_keys'], 608 'multiple_toolsets': globals()['multiple_toolsets']} 609 610 if not parallel_state.pool: 611 parallel_state.pool = multiprocessing.Pool(multiprocessing.cpu_count()) 612 parallel_state.pool.apply_async( 613 CallLoadTargetBuildFile, 614 args = (global_flags, dependency, 615 variables, includes, depth, check, generator_input_info), 616 callback = parallel_state.LoadTargetBuildFileCallback) 617 except KeyboardInterrupt as e: 618 parallel_state.pool.terminate() 619 raise e 620 621 parallel_state.condition.release() 622 623 parallel_state.pool.close() 624 parallel_state.pool.join() 625 parallel_state.pool = None 626 627 if parallel_state.error: 628 sys.exit(1) 629 630# Look for the bracket that matches the first bracket seen in a 631# string, and return the start and end as a tuple. For example, if 632# the input is something like "<(foo <(bar)) blah", then it would 633# return (1, 13), indicating the entire string except for the leading 634# "<" and trailing " blah". 635LBRACKETS= set('{[(') 636BRACKETS = {'}': '{', ']': '[', ')': '('} 637def FindEnclosingBracketGroup(input_str): 638 stack = [] 639 start = -1 640 for index, char in enumerate(input_str): 641 if char in LBRACKETS: 642 stack.append(char) 643 if start == -1: 644 start = index 645 elif char in BRACKETS: 646 if not stack: 647 return (-1, -1) 648 if stack.pop() != BRACKETS[char]: 649 return (-1, -1) 650 if not stack: 651 return (start, index + 1) 652 return (-1, -1) 653 654 655def IsStrCanonicalInt(string): 656 """Returns True if |string| is in its canonical integer form. 657 658 The canonical form is such that str(int(string)) == string. 659 """ 660 if isinstance(string, _str_types): 661 # This function is called a lot so for maximum performance, avoid 662 # involving regexps which would otherwise make the code much 663 # shorter. Regexps would need twice the time of this function. 664 if string: 665 if string == "0": 666 return True 667 if string[0] == "-": 668 string = string[1:] 669 if not string: 670 return False 671 if '1' <= string[0] <= '9': 672 return string.isdigit() 673 674 return False 675 676 677# This matches things like "<(asdf)", "<!(cmd)", "<!@(cmd)", "<|(list)", 678# "<!interpreter(arguments)", "<([list])", and even "<([)" and "<(<())". 679# In the last case, the inner "<()" is captured in match['content']. 680early_variable_re = re.compile( 681 r'(?P<replace>(?P<type><(?:(?:!?@?)|\|)?)' 682 r'(?P<command_string>[-a-zA-Z0-9_.]+)?' 683 r'\((?P<is_array>\s*\[?)' 684 r'(?P<content>.*?)(\]?)\))') 685 686# This matches the same as early_variable_re, but with '>' instead of '<'. 687late_variable_re = re.compile( 688 r'(?P<replace>(?P<type>>(?:(?:!?@?)|\|)?)' 689 r'(?P<command_string>[-a-zA-Z0-9_.]+)?' 690 r'\((?P<is_array>\s*\[?)' 691 r'(?P<content>.*?)(\]?)\))') 692 693# This matches the same as early_variable_re, but with '^' instead of '<'. 694latelate_variable_re = re.compile( 695 r'(?P<replace>(?P<type>[\^](?:(?:!?@?)|\|)?)' 696 r'(?P<command_string>[-a-zA-Z0-9_.]+)?' 697 r'\((?P<is_array>\s*\[?)' 698 r'(?P<content>.*?)(\]?)\))') 699 700# Global cache of results from running commands so they don't have to be run 701# more then once. 702cached_command_results = {} 703 704 705def FixupPlatformCommand(cmd): 706 if sys.platform == 'win32': 707 if type(cmd) is list: 708 cmd = [re.sub('^cat ', 'type ', cmd[0])] + cmd[1:] 709 else: 710 cmd = re.sub('^cat ', 'type ', cmd) 711 return cmd 712 713 714PHASE_EARLY = 0 715PHASE_LATE = 1 716PHASE_LATELATE = 2 717 718 719def ExpandVariables(input, phase, variables, build_file): 720 # Look for the pattern that gets expanded into variables 721 if phase == PHASE_EARLY: 722 variable_re = early_variable_re 723 expansion_symbol = '<' 724 elif phase == PHASE_LATE: 725 variable_re = late_variable_re 726 expansion_symbol = '>' 727 elif phase == PHASE_LATELATE: 728 variable_re = latelate_variable_re 729 expansion_symbol = '^' 730 else: 731 assert False 732 733 input_str = str(input) 734 if IsStrCanonicalInt(input_str): 735 return int(input_str) 736 737 # Do a quick scan to determine if an expensive regex search is warranted. 738 if expansion_symbol not in input_str: 739 return input_str 740 741 # Get the entire list of matches as a list of MatchObject instances. 742 # (using findall here would return strings instead of MatchObjects). 743 matches = list(variable_re.finditer(input_str)) 744 if not matches: 745 return input_str 746 747 output = input_str 748 # Reverse the list of matches so that replacements are done right-to-left. 749 # That ensures that earlier replacements won't mess up the string in a 750 # way that causes later calls to find the earlier substituted text instead 751 # of what's intended for replacement. 752 matches.reverse() 753 for match_group in matches: 754 match = match_group.groupdict() 755 gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Matches: %r", match) 756 # match['replace'] is the substring to look for, match['type'] 757 # is the character code for the replacement type (< > <! >! <| >| <@ 758 # >@ <!@ >!@), match['is_array'] contains a '[' for command 759 # arrays, and match['content'] is the name of the variable (< >) 760 # or command to run (<! >!). match['command_string'] is an optional 761 # command string. Currently, only 'pymod_do_main' is supported. 762 763 # run_command is true if a ! variant is used. 764 run_command = '!' in match['type'] 765 command_string = match['command_string'] 766 767 # file_list is true if a | variant is used. 768 file_list = '|' in match['type'] 769 770 # Capture these now so we can adjust them later. 771 replace_start = match_group.start('replace') 772 replace_end = match_group.end('replace') 773 774 # Find the ending paren, and re-evaluate the contained string. 775 (c_start, c_end) = FindEnclosingBracketGroup(input_str[replace_start:]) 776 777 # Adjust the replacement range to match the entire command 778 # found by FindEnclosingBracketGroup (since the variable_re 779 # probably doesn't match the entire command if it contained 780 # nested variables). 781 replace_end = replace_start + c_end 782 783 # Find the "real" replacement, matching the appropriate closing 784 # paren, and adjust the replacement start and end. 785 replacement = input_str[replace_start:replace_end] 786 787 # Figure out what the contents of the variable parens are. 788 contents_start = replace_start + c_start + 1 789 contents_end = replace_end - 1 790 contents = input_str[contents_start:contents_end] 791 792 # Do filter substitution now for <|(). 793 # Admittedly, this is different than the evaluation order in other 794 # contexts. However, since filtration has no chance to run on <|(), 795 # this seems like the only obvious way to give them access to filters. 796 if file_list: 797 processed_variables = gyp.simple_copy.deepcopy(variables) 798 ProcessListFiltersInDict(contents, processed_variables) 799 # Recurse to expand variables in the contents 800 contents = ExpandVariables(contents, phase, 801 processed_variables, build_file) 802 else: 803 # Recurse to expand variables in the contents 804 contents = ExpandVariables(contents, phase, variables, build_file) 805 806 # Strip off leading/trailing whitespace so that variable matches are 807 # simpler below (and because they are rarely needed). 808 contents = contents.strip() 809 810 # expand_to_list is true if an @ variant is used. In that case, 811 # the expansion should result in a list. Note that the caller 812 # is to be expecting a list in return, and not all callers do 813 # because not all are working in list context. Also, for list 814 # expansions, there can be no other text besides the variable 815 # expansion in the input string. 816 expand_to_list = '@' in match['type'] and input_str == replacement 817 818 if run_command or file_list: 819 # Find the build file's directory, so commands can be run or file lists 820 # generated relative to it. 821 build_file_dir = os.path.dirname(build_file) 822 if build_file_dir == '' and not file_list: 823 # If build_file is just a leaf filename indicating a file in the 824 # current directory, build_file_dir might be an empty string. Set 825 # it to None to signal to subprocess.Popen that it should run the 826 # command in the current directory. 827 build_file_dir = None 828 829 # Support <|(listfile.txt ...) which generates a file 830 # containing items from a gyp list, generated at gyp time. 831 # This works around actions/rules which have more inputs than will 832 # fit on the command line. 833 if file_list: 834 if type(contents) is list: 835 contents_list = contents 836 else: 837 contents_list = contents.split(' ') 838 replacement = contents_list[0] 839 if os.path.isabs(replacement): 840 raise GypError('| cannot handle absolute paths, got "%s"' % replacement) 841 842 if not generator_filelist_paths: 843 path = os.path.join(build_file_dir, replacement) 844 else: 845 if os.path.isabs(build_file_dir): 846 toplevel = generator_filelist_paths['toplevel'] 847 rel_build_file_dir = gyp.common.RelativePath(build_file_dir, toplevel) 848 else: 849 rel_build_file_dir = build_file_dir 850 qualified_out_dir = generator_filelist_paths['qualified_out_dir'] 851 path = os.path.join(qualified_out_dir, rel_build_file_dir, replacement) 852 gyp.common.EnsureDirExists(path) 853 854 replacement = gyp.common.RelativePath(path, build_file_dir) 855 f = gyp.common.WriteOnDiff(path) 856 for i in contents_list[1:]: 857 f.write('%s\n' % i) 858 f.close() 859 860 elif run_command: 861 use_shell = True 862 if match['is_array']: 863 contents = eval(contents) 864 use_shell = False 865 866 # Check for a cached value to avoid executing commands, or generating 867 # file lists more than once. The cache key contains the command to be 868 # run as well as the directory to run it from, to account for commands 869 # that depend on their current directory. 870 # TODO(http://code.google.com/p/gyp/issues/detail?id=111): In theory, 871 # someone could author a set of GYP files where each time the command 872 # is invoked it produces different output by design. When the need 873 # arises, the syntax should be extended to support no caching off a 874 # command's output so it is run every time. 875 cache_key = (str(contents), build_file_dir) 876 cached_value = cached_command_results.get(cache_key, None) 877 if cached_value is None: 878 gyp.DebugOutput(gyp.DEBUG_VARIABLES, 879 "Executing command '%s' in directory '%s'", 880 contents, build_file_dir) 881 882 replacement = '' 883 884 if command_string == 'pymod_do_main': 885 # <!pymod_do_main(modulename param eters) loads |modulename| as a 886 # python module and then calls that module's DoMain() function, 887 # passing ["param", "eters"] as a single list argument. For modules 888 # that don't load quickly, this can be faster than 889 # <!(python modulename param eters). Do this in |build_file_dir|. 890 oldwd = os.getcwd() # Python doesn't like os.open('.'): no fchdir. 891 if build_file_dir: # build_file_dir may be None (see above). 892 os.chdir(build_file_dir) 893 try: 894 895 parsed_contents = shlex.split(contents) 896 try: 897 py_module = __import__(parsed_contents[0]) 898 except ImportError as e: 899 raise GypError("Error importing pymod_do_main" 900 "module (%s): %s" % (parsed_contents[0], e)) 901 replacement = str(py_module.DoMain(parsed_contents[1:])).rstrip() 902 finally: 903 os.chdir(oldwd) 904 assert replacement != None 905 elif command_string: 906 raise GypError("Unknown command string '%s' in '%s'." % 907 (command_string, contents)) 908 else: 909 # Fix up command with platform specific workarounds. 910 contents = FixupPlatformCommand(contents) 911 try: 912 p = subprocess.Popen(contents, shell=use_shell, 913 stdout=subprocess.PIPE, 914 stderr=subprocess.PIPE, 915 stdin=subprocess.PIPE, 916 cwd=build_file_dir) 917 except Exception as e: 918 raise GypError("%s while executing command '%s' in %s" % 919 (e, contents, build_file)) 920 921 p_stdout, p_stderr = p.communicate('') 922 923 if p.wait() != 0 or p_stderr: 924 p_stderr_decoded = p_stderr.decode('utf-8') 925 sys.stderr.write(p_stderr_decoded) 926 # Simulate check_call behavior, since check_call only exists 927 # in python 2.5 and later. 928 raise GypError("Call to '%s' returned exit status %d while in %s." % 929 (contents, p.returncode, build_file)) 930 replacement = p_stdout.decode('utf-8').rstrip() 931 932 cached_command_results[cache_key] = replacement 933 else: 934 gyp.DebugOutput(gyp.DEBUG_VARIABLES, 935 "Had cache value for command '%s' in directory '%s'", 936 contents,build_file_dir) 937 replacement = cached_value 938 939 else: 940 if not contents in variables: 941 if contents[-1] in ['!', '/']: 942 # In order to allow cross-compiles (nacl) to happen more naturally, 943 # we will allow references to >(sources/) etc. to resolve to 944 # and empty list if undefined. This allows actions to: 945 # 'action!': [ 946 # '>@(_sources!)', 947 # ], 948 # 'action/': [ 949 # '>@(_sources/)', 950 # ], 951 replacement = [] 952 else: 953 raise GypError('Undefined variable ' + contents + 954 ' in ' + build_file) 955 else: 956 replacement = variables[contents] 957 958 if type(replacement) is list: 959 for item in replacement: 960 if not contents[-1] == '/' and not isinstance(item, _str_int_types): 961 raise GypError('Variable ' + contents + 962 ' must expand to a string or list of strings; ' + 963 'list contains a ' + 964 item.__class__.__name__) 965 # Run through the list and handle variable expansions in it. Since 966 # the list is guaranteed not to contain dicts, this won't do anything 967 # with conditions sections. 968 ProcessVariablesAndConditionsInList(replacement, phase, variables, 969 build_file) 970 elif not isinstance(replacement, _str_int_types): 971 raise GypError('Variable ' + str(contents) + 972 ' must expand to a string or list of strings; ' + 973 'found a ' + replacement.__class__.__name__) 974 975 if expand_to_list: 976 # Expanding in list context. It's guaranteed that there's only one 977 # replacement to do in |input_str| and that it's this replacement. See 978 # above. 979 if type(replacement) is list: 980 # If it's already a list, make a copy. 981 output = replacement[:] 982 else: 983 # Split it the same way sh would split arguments. 984 output = shlex.split(str(replacement)) 985 else: 986 # Expanding in string context. 987 encoded_replacement = '' 988 if type(replacement) is list: 989 # When expanding a list into string context, turn the list items 990 # into a string in a way that will work with a subprocess call. 991 # 992 # TODO(mark): This isn't completely correct. This should 993 # call a generator-provided function that observes the 994 # proper list-to-argument quoting rules on a specific 995 # platform instead of just calling the POSIX encoding 996 # routine. 997 encoded_replacement = gyp.common.EncodePOSIXShellList(replacement) 998 else: 999 encoded_replacement = replacement 1000 1001 output = output[:replace_start] + str(encoded_replacement) + \ 1002 output[replace_end:] 1003 # Prepare for the next match iteration. 1004 input_str = output 1005 1006 if output == input: 1007 gyp.DebugOutput(gyp.DEBUG_VARIABLES, 1008 "Found only identity matches on %r, avoiding infinite " 1009 "recursion.", 1010 output) 1011 else: 1012 # Look for more matches now that we've replaced some, to deal with 1013 # expanding local variables (variables defined in the same 1014 # variables block as this one). 1015 gyp.DebugOutput(gyp.DEBUG_VARIABLES, "Found output %r, recursing.", output) 1016 if type(output) is list: 1017 if output and type(output[0]) is list: 1018 # Leave output alone if it's a list of lists. 1019 # We don't want such lists to be stringified. 1020 pass 1021 else: 1022 new_output = [] 1023 for item in output: 1024 new_output.append( 1025 ExpandVariables(item, phase, variables, build_file)) 1026 output = new_output 1027 else: 1028 output = ExpandVariables(output, phase, variables, build_file) 1029 1030 # Convert all strings that are canonically-represented integers into integers. 1031 if type(output) is list: 1032 for index, outstr in enumerate(output): 1033 if IsStrCanonicalInt(outstr): 1034 output[index] = int(outstr) 1035 elif IsStrCanonicalInt(output): 1036 output = int(output) 1037 1038 return output 1039 1040# The same condition is often evaluated over and over again so it 1041# makes sense to cache as much as possible between evaluations. 1042cached_conditions_asts = {} 1043 1044def EvalCondition(condition, conditions_key, phase, variables, build_file): 1045 """Returns the dict that should be used or None if the result was 1046 that nothing should be used.""" 1047 if type(condition) is not list: 1048 raise GypError(conditions_key + ' must be a list') 1049 if len(condition) < 2: 1050 # It's possible that condition[0] won't work in which case this 1051 # attempt will raise its own IndexError. That's probably fine. 1052 raise GypError(conditions_key + ' ' + condition[0] + 1053 ' must be at least length 2, not ' + str(len(condition))) 1054 1055 i = 0 1056 result = None 1057 while i < len(condition): 1058 cond_expr = condition[i] 1059 true_dict = condition[i + 1] 1060 if type(true_dict) is not dict: 1061 raise GypError('{} {} must be followed by a dictionary, not {}'.format( 1062 conditions_key, cond_expr, type(true_dict))) 1063 if len(condition) > i + 2 and type(condition[i + 2]) is dict: 1064 false_dict = condition[i + 2] 1065 i = i + 3 1066 if i != len(condition): 1067 raise GypError('{} {} has {} unexpected trailing items'.format( 1068 conditions_key, cond_expr, len(condition) - i)) 1069 else: 1070 false_dict = None 1071 i = i + 2 1072 if result == None: 1073 result = EvalSingleCondition( 1074 cond_expr, true_dict, false_dict, phase, variables, build_file) 1075 1076 return result 1077 1078 1079def EvalSingleCondition( 1080 cond_expr, true_dict, false_dict, phase, variables, build_file): 1081 """Returns true_dict if cond_expr evaluates to true, and false_dict 1082 otherwise.""" 1083 # Do expansions on the condition itself. Since the conditon can naturally 1084 # contain variable references without needing to resort to GYP expansion 1085 # syntax, this is of dubious value for variables, but someone might want to 1086 # use a command expansion directly inside a condition. 1087 cond_expr_expanded = ExpandVariables(cond_expr, phase, variables, 1088 build_file) 1089 if not isinstance(cond_expr_expanded, _str_int_types): 1090 raise ValueError( 1091 'Variable expansion in this context permits str and int ' + \ 1092 'only, found ' + cond_expr_expanded.__class__.__name__) 1093 1094 try: 1095 if cond_expr_expanded in cached_conditions_asts: 1096 ast_code = cached_conditions_asts[cond_expr_expanded] 1097 else: 1098 ast_code = compile(cond_expr_expanded, '<string>', 'eval') 1099 cached_conditions_asts[cond_expr_expanded] = ast_code 1100 if eval(ast_code, {'__builtins__': None}, variables): 1101 return true_dict 1102 return false_dict 1103 except SyntaxError as e: 1104 syntax_error = SyntaxError('%s while evaluating condition \'%s\' in %s ' 1105 'at character %d.' % 1106 (str(e.args[0]), e.text, build_file, e.offset), 1107 e.filename, e.lineno, e.offset, e.text) 1108 raise syntax_error 1109 except NameError as e: 1110 gyp.common.ExceptionAppend(e, 'while evaluating condition \'%s\' in %s' % 1111 (cond_expr_expanded, build_file)) 1112 raise GypError(e) 1113 1114 1115def ProcessConditionsInDict(the_dict, phase, variables, build_file): 1116 # Process a 'conditions' or 'target_conditions' section in the_dict, 1117 # depending on phase. 1118 # early -> conditions 1119 # late -> target_conditions 1120 # latelate -> no conditions 1121 # 1122 # Each item in a conditions list consists of cond_expr, a string expression 1123 # evaluated as the condition, and true_dict, a dict that will be merged into 1124 # the_dict if cond_expr evaluates to true. Optionally, a third item, 1125 # false_dict, may be present. false_dict is merged into the_dict if 1126 # cond_expr evaluates to false. 1127 # 1128 # Any dict merged into the_dict will be recursively processed for nested 1129 # conditionals and other expansions, also according to phase, immediately 1130 # prior to being merged. 1131 1132 if phase == PHASE_EARLY: 1133 conditions_key = 'conditions' 1134 elif phase == PHASE_LATE: 1135 conditions_key = 'target_conditions' 1136 elif phase == PHASE_LATELATE: 1137 return 1138 else: 1139 assert False 1140 1141 if not conditions_key in the_dict: 1142 return 1143 1144 conditions_list = the_dict[conditions_key] 1145 # Unhook the conditions list, it's no longer needed. 1146 del the_dict[conditions_key] 1147 1148 for condition in conditions_list: 1149 merge_dict = EvalCondition(condition, conditions_key, phase, variables, 1150 build_file) 1151 1152 if merge_dict != None: 1153 # Expand variables and nested conditinals in the merge_dict before 1154 # merging it. 1155 ProcessVariablesAndConditionsInDict(merge_dict, phase, 1156 variables, build_file) 1157 1158 MergeDicts(the_dict, merge_dict, build_file, build_file) 1159 1160 1161def LoadAutomaticVariablesFromDict(variables, the_dict): 1162 # Any keys with plain string values in the_dict become automatic variables. 1163 # The variable name is the key name with a "_" character prepended. 1164 for key, value in the_dict.items(): 1165 if isinstance(value, _str_int_list_types): 1166 variables['_' + key] = value 1167 1168 1169def LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key): 1170 # Any keys in the_dict's "variables" dict, if it has one, becomes a 1171 # variable. The variable name is the key name in the "variables" dict. 1172 # Variables that end with the % character are set only if they are unset in 1173 # the variables dict. the_dict_key is the name of the key that accesses 1174 # the_dict in the_dict's parent dict. If the_dict's parent is not a dict 1175 # (it could be a list or it could be parentless because it is a root dict), 1176 # the_dict_key will be None. 1177 for key, value in the_dict.get('variables', {}).items(): 1178 if not isinstance(value, _str_int_list_types): 1179 continue 1180 1181 if key.endswith('%'): 1182 variable_name = key[:-1] 1183 if variable_name in variables: 1184 # If the variable is already set, don't set it. 1185 continue 1186 if the_dict_key == 'variables' and variable_name in the_dict: 1187 # If the variable is set without a % in the_dict, and the_dict is a 1188 # variables dict (making |variables| a varaibles sub-dict of a 1189 # variables dict), use the_dict's definition. 1190 value = the_dict[variable_name] 1191 else: 1192 variable_name = key 1193 1194 variables[variable_name] = value 1195 1196 1197def ProcessVariablesAndConditionsInDict(the_dict, phase, variables_in, 1198 build_file, the_dict_key=None): 1199 """Handle all variable and command expansion and conditional evaluation. 1200 1201 This function is the public entry point for all variable expansions and 1202 conditional evaluations. The variables_in dictionary will not be modified 1203 by this function. 1204 """ 1205 1206 # Make a copy of the variables_in dict that can be modified during the 1207 # loading of automatics and the loading of the variables dict. 1208 variables = variables_in.copy() 1209 LoadAutomaticVariablesFromDict(variables, the_dict) 1210 1211 if 'variables' in the_dict: 1212 # Make sure all the local variables are added to the variables 1213 # list before we process them so that you can reference one 1214 # variable from another. They will be fully expanded by recursion 1215 # in ExpandVariables. 1216 for key, value in the_dict['variables'].items(): 1217 variables[key] = value 1218 1219 # Handle the associated variables dict first, so that any variable 1220 # references within can be resolved prior to using them as variables. 1221 # Pass a copy of the variables dict to avoid having it be tainted. 1222 # Otherwise, it would have extra automatics added for everything that 1223 # should just be an ordinary variable in this scope. 1224 ProcessVariablesAndConditionsInDict(the_dict['variables'], phase, 1225 variables, build_file, 'variables') 1226 1227 LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) 1228 1229 for key, value in the_dict.items(): 1230 # Skip "variables", which was already processed if present. 1231 if key != 'variables' and isinstance(value, _str_types): 1232 expanded = ExpandVariables(value, phase, variables, build_file) 1233 if not isinstance(expanded, _str_int_types): 1234 raise ValueError( 1235 'Variable expansion in this context permits str and int ' + \ 1236 'only, found ' + expanded.__class__.__name__ + ' for ' + key) 1237 the_dict[key] = expanded 1238 1239 # Variable expansion may have resulted in changes to automatics. Reload. 1240 # TODO(mark): Optimization: only reload if no changes were made. 1241 variables = variables_in.copy() 1242 LoadAutomaticVariablesFromDict(variables, the_dict) 1243 LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) 1244 1245 # Process conditions in this dict. This is done after variable expansion 1246 # so that conditions may take advantage of expanded variables. For example, 1247 # if the_dict contains: 1248 # {'type': '<(library_type)', 1249 # 'conditions': [['_type=="static_library"', { ... }]]}, 1250 # _type, as used in the condition, will only be set to the value of 1251 # library_type if variable expansion is performed before condition 1252 # processing. However, condition processing should occur prior to recursion 1253 # so that variables (both automatic and "variables" dict type) may be 1254 # adjusted by conditions sections, merged into the_dict, and have the 1255 # intended impact on contained dicts. 1256 # 1257 # This arrangement means that a "conditions" section containing a "variables" 1258 # section will only have those variables effective in subdicts, not in 1259 # the_dict. The workaround is to put a "conditions" section within a 1260 # "variables" section. For example: 1261 # {'conditions': [['os=="mac"', {'variables': {'define': 'IS_MAC'}}]], 1262 # 'defines': ['<(define)'], 1263 # 'my_subdict': {'defines': ['<(define)']}}, 1264 # will not result in "IS_MAC" being appended to the "defines" list in the 1265 # current scope but would result in it being appended to the "defines" list 1266 # within "my_subdict". By comparison: 1267 # {'variables': {'conditions': [['os=="mac"', {'define': 'IS_MAC'}]]}, 1268 # 'defines': ['<(define)'], 1269 # 'my_subdict': {'defines': ['<(define)']}}, 1270 # will append "IS_MAC" to both "defines" lists. 1271 1272 # Evaluate conditions sections, allowing variable expansions within them 1273 # as well as nested conditionals. This will process a 'conditions' or 1274 # 'target_conditions' section, perform appropriate merging and recursive 1275 # conditional and variable processing, and then remove the conditions section 1276 # from the_dict if it is present. 1277 ProcessConditionsInDict(the_dict, phase, variables, build_file) 1278 1279 # Conditional processing may have resulted in changes to automatics or the 1280 # variables dict. Reload. 1281 variables = variables_in.copy() 1282 LoadAutomaticVariablesFromDict(variables, the_dict) 1283 LoadVariablesFromVariablesDict(variables, the_dict, the_dict_key) 1284 1285 # Recurse into child dicts, or process child lists which may result in 1286 # further recursion into descendant dicts. 1287 for key, value in the_dict.items(): 1288 # Skip "variables" and string values, which were already processed if 1289 # present. 1290 if key == 'variables' or isinstance(value, _str_types): 1291 continue 1292 if type(value) is dict: 1293 # Pass a copy of the variables dict so that subdicts can't influence 1294 # parents. 1295 ProcessVariablesAndConditionsInDict(value, phase, variables, 1296 build_file, key) 1297 elif type(value) is list: 1298 # The list itself can't influence the variables dict, and 1299 # ProcessVariablesAndConditionsInList will make copies of the variables 1300 # dict if it needs to pass it to something that can influence it. No 1301 # copy is necessary here. 1302 ProcessVariablesAndConditionsInList(value, phase, variables, 1303 build_file) 1304 elif not isinstance(value, _int_types): 1305 raise TypeError('Unknown type ' + value.__class__.__name__ + \ 1306 ' for ' + key) 1307 1308 1309def ProcessVariablesAndConditionsInList(the_list, phase, variables, 1310 build_file): 1311 # Iterate using an index so that new values can be assigned into the_list. 1312 index = 0 1313 while index < len(the_list): 1314 item = the_list[index] 1315 if type(item) is dict: 1316 # Make a copy of the variables dict so that it won't influence anything 1317 # outside of its own scope. 1318 ProcessVariablesAndConditionsInDict(item, phase, variables, build_file) 1319 elif type(item) is list: 1320 ProcessVariablesAndConditionsInList(item, phase, variables, build_file) 1321 elif isinstance(item, _str_types): 1322 expanded = ExpandVariables(item, phase, variables, build_file) 1323 if isinstance(expanded, _str_int_types): 1324 the_list[index] = expanded 1325 elif type(expanded) is list: 1326 the_list[index:index+1] = expanded 1327 index += len(expanded) 1328 1329 # index now identifies the next item to examine. Continue right now 1330 # without falling into the index increment below. 1331 continue 1332 else: 1333 raise ValueError( 1334 'Variable expansion in this context permits strings and ' + \ 1335 'lists only, found ' + expanded.__class__.__name__ + ' at ' + \ 1336 index) 1337 elif not isinstance(item, _int_types): 1338 raise TypeError('Unknown type ' + item.__class__.__name__ + \ 1339 ' at index ' + index) 1340 index = index + 1 1341 1342 1343def BuildTargetsDict(data): 1344 """Builds a dict mapping fully-qualified target names to their target dicts. 1345 1346 |data| is a dict mapping loaded build files by pathname relative to the 1347 current directory. Values in |data| are build file contents. For each 1348 |data| value with a "targets" key, the value of the "targets" key is taken 1349 as a list containing target dicts. Each target's fully-qualified name is 1350 constructed from the pathname of the build file (|data| key) and its 1351 "target_name" property. These fully-qualified names are used as the keys 1352 in the returned dict. These keys provide access to the target dicts, 1353 the dicts in the "targets" lists. 1354 """ 1355 1356 targets = {} 1357 for build_file in data['target_build_files']: 1358 for target in data[build_file].get('targets', []): 1359 target_name = gyp.common.QualifiedTarget(build_file, 1360 target['target_name'], 1361 target['toolset']) 1362 if target_name in targets: 1363 raise GypError('Duplicate target definitions for ' + target_name) 1364 targets[target_name] = target 1365 1366 return targets 1367 1368 1369def QualifyDependencies(targets): 1370 """Make dependency links fully-qualified relative to the current directory. 1371 1372 |targets| is a dict mapping fully-qualified target names to their target 1373 dicts. For each target in this dict, keys known to contain dependency 1374 links are examined, and any dependencies referenced will be rewritten 1375 so that they are fully-qualified and relative to the current directory. 1376 All rewritten dependencies are suitable for use as keys to |targets| or a 1377 similar dict. 1378 """ 1379 1380 all_dependency_sections = [dep + op 1381 for dep in dependency_sections 1382 for op in ('', '!', '/')] 1383 1384 for target, target_dict in targets.items(): 1385 target_build_file = gyp.common.BuildFile(target) 1386 toolset = target_dict['toolset'] 1387 for dependency_key in all_dependency_sections: 1388 dependencies = target_dict.get(dependency_key, []) 1389 for index, dep in enumerate(dependencies): 1390 dep_file, dep_target, dep_toolset = gyp.common.ResolveTarget( 1391 target_build_file, dep, toolset) 1392 if not multiple_toolsets: 1393 # Ignore toolset specification in the dependency if it is specified. 1394 dep_toolset = toolset 1395 dependency = gyp.common.QualifiedTarget(dep_file, 1396 dep_target, 1397 dep_toolset) 1398 dependencies[index] = dependency 1399 1400 # Make sure anything appearing in a list other than "dependencies" also 1401 # appears in the "dependencies" list. 1402 if dependency_key != 'dependencies' and \ 1403 dependency not in target_dict['dependencies']: 1404 raise GypError('Found ' + dependency + ' in ' + dependency_key + 1405 ' of ' + target + ', but not in dependencies') 1406 1407 1408def ExpandWildcardDependencies(targets, data): 1409 """Expands dependencies specified as build_file:*. 1410 1411 For each target in |targets|, examines sections containing links to other 1412 targets. If any such section contains a link of the form build_file:*, it 1413 is taken as a wildcard link, and is expanded to list each target in 1414 build_file. The |data| dict provides access to build file dicts. 1415 1416 Any target that does not wish to be included by wildcard can provide an 1417 optional "suppress_wildcard" key in its target dict. When present and 1418 true, a wildcard dependency link will not include such targets. 1419 1420 All dependency names, including the keys to |targets| and the values in each 1421 dependency list, must be qualified when this function is called. 1422 """ 1423 1424 for target, target_dict in targets.items(): 1425 toolset = target_dict['toolset'] 1426 target_build_file = gyp.common.BuildFile(target) 1427 for dependency_key in dependency_sections: 1428 dependencies = target_dict.get(dependency_key, []) 1429 1430 # Loop this way instead of "for dependency in" or "for index in xrange" 1431 # because the dependencies list will be modified within the loop body. 1432 index = 0 1433 while index < len(dependencies): 1434 (dependency_build_file, dependency_target, dependency_toolset) = \ 1435 gyp.common.ParseQualifiedTarget(dependencies[index]) 1436 if dependency_target != '*' and dependency_toolset != '*': 1437 # Not a wildcard. Keep it moving. 1438 index = index + 1 1439 continue 1440 1441 if dependency_build_file == target_build_file: 1442 # It's an error for a target to depend on all other targets in 1443 # the same file, because a target cannot depend on itself. 1444 raise GypError('Found wildcard in ' + dependency_key + ' of ' + 1445 target + ' referring to same build file') 1446 1447 # Take the wildcard out and adjust the index so that the next 1448 # dependency in the list will be processed the next time through the 1449 # loop. 1450 del dependencies[index] 1451 index = index - 1 1452 1453 # Loop through the targets in the other build file, adding them to 1454 # this target's list of dependencies in place of the removed 1455 # wildcard. 1456 dependency_target_dicts = data[dependency_build_file]['targets'] 1457 for dependency_target_dict in dependency_target_dicts: 1458 if int(dependency_target_dict.get('suppress_wildcard', False)): 1459 continue 1460 dependency_target_name = dependency_target_dict['target_name'] 1461 if (dependency_target != '*' and 1462 dependency_target != dependency_target_name): 1463 continue 1464 dependency_target_toolset = dependency_target_dict['toolset'] 1465 if (dependency_toolset != '*' and 1466 dependency_toolset != dependency_target_toolset): 1467 continue 1468 dependency = gyp.common.QualifiedTarget(dependency_build_file, 1469 dependency_target_name, 1470 dependency_target_toolset) 1471 index = index + 1 1472 dependencies.insert(index, dependency) 1473 1474 index = index + 1 1475 1476 1477def Unify(l): 1478 """Removes duplicate elements from l, keeping the first element.""" 1479 seen = {} 1480 return [seen.setdefault(e, e) for e in l if e not in seen] 1481 1482 1483def RemoveDuplicateDependencies(targets): 1484 """Makes sure every dependency appears only once in all targets's dependency 1485 lists.""" 1486 for target_name, target_dict in targets.items(): 1487 for dependency_key in dependency_sections: 1488 dependencies = target_dict.get(dependency_key, []) 1489 if dependencies: 1490 target_dict[dependency_key] = Unify(dependencies) 1491 1492 1493def Filter(l, item): 1494 """Removes item from l.""" 1495 res = {} 1496 return [res.setdefault(e, e) for e in l if e != item] 1497 1498 1499def RemoveSelfDependencies(targets): 1500 """Remove self dependencies from targets that have the prune_self_dependency 1501 variable set.""" 1502 for target_name, target_dict in targets.items(): 1503 for dependency_key in dependency_sections: 1504 dependencies = target_dict.get(dependency_key, []) 1505 if dependencies: 1506 for t in dependencies: 1507 if t == target_name: 1508 if targets[t].get('variables', {}).get('prune_self_dependency', 0): 1509 target_dict[dependency_key] = Filter(dependencies, target_name) 1510 1511 1512def RemoveLinkDependenciesFromNoneTargets(targets): 1513 """Remove dependencies having the 'link_dependency' attribute from the 'none' 1514 targets.""" 1515 for target_name, target_dict in targets.items(): 1516 for dependency_key in dependency_sections: 1517 dependencies = target_dict.get(dependency_key, []) 1518 if dependencies: 1519 for t in dependencies: 1520 if target_dict.get('type', None) == 'none': 1521 if targets[t].get('variables', {}).get('link_dependency', 0): 1522 target_dict[dependency_key] = \ 1523 Filter(target_dict[dependency_key], t) 1524 1525 1526class DependencyGraphNode(object): 1527 """ 1528 1529 Attributes: 1530 ref: A reference to an object that this DependencyGraphNode represents. 1531 dependencies: List of DependencyGraphNodes on which this one depends. 1532 dependents: List of DependencyGraphNodes that depend on this one. 1533 """ 1534 1535 class CircularException(GypError): 1536 pass 1537 1538 def __init__(self, ref): 1539 self.ref = ref 1540 self.dependencies = [] 1541 self.dependents = [] 1542 1543 def __repr__(self): 1544 return '<DependencyGraphNode: %r>' % self.ref 1545 1546 def FlattenToList(self): 1547 # flat_list is the sorted list of dependencies - actually, the list items 1548 # are the "ref" attributes of DependencyGraphNodes. Every target will 1549 # appear in flat_list after all of its dependencies, and before all of its 1550 # dependents. 1551 flat_list = OrderedSet() 1552 1553 def ExtractNodeRef(node): 1554 """Extracts the object that the node represents from the given node.""" 1555 return node.ref 1556 1557 # in_degree_zeros is the list of DependencyGraphNodes that have no 1558 # dependencies not in flat_list. Initially, it is a copy of the children 1559 # of this node, because when the graph was built, nodes with no 1560 # dependencies were made implicit dependents of the root node. 1561 in_degree_zeros = sorted(self.dependents[:], key=ExtractNodeRef) 1562 1563 while in_degree_zeros: 1564 # Nodes in in_degree_zeros have no dependencies not in flat_list, so they 1565 # can be appended to flat_list. Take these nodes out of in_degree_zeros 1566 # as work progresses, so that the next node to process from the list can 1567 # always be accessed at a consistent position. 1568 node = in_degree_zeros.pop() 1569 flat_list.add(node.ref) 1570 1571 # Look at dependents of the node just added to flat_list. Some of them 1572 # may now belong in in_degree_zeros. 1573 for node_dependent in sorted(node.dependents, key=ExtractNodeRef): 1574 is_in_degree_zero = True 1575 # TODO: We want to check through the 1576 # node_dependent.dependencies list but if it's long and we 1577 # always start at the beginning, then we get O(n^2) behaviour. 1578 for node_dependent_dependency in (sorted(node_dependent.dependencies, 1579 key=ExtractNodeRef)): 1580 if not node_dependent_dependency.ref in flat_list: 1581 # The dependent one or more dependencies not in flat_list. There 1582 # will be more chances to add it to flat_list when examining 1583 # it again as a dependent of those other dependencies, provided 1584 # that there are no cycles. 1585 is_in_degree_zero = False 1586 break 1587 1588 if is_in_degree_zero: 1589 # All of the dependent's dependencies are already in flat_list. Add 1590 # it to in_degree_zeros where it will be processed in a future 1591 # iteration of the outer loop. 1592 in_degree_zeros += [node_dependent] 1593 1594 return list(flat_list) 1595 1596 def FindCycles(self): 1597 """ 1598 Returns a list of cycles in the graph, where each cycle is its own list. 1599 """ 1600 results = [] 1601 visited = set() 1602 1603 def Visit(node, path): 1604 for child in node.dependents: 1605 if child in path: 1606 results.append([child] + path[:path.index(child) + 1]) 1607 elif not child in visited: 1608 visited.add(child) 1609 Visit(child, [child] + path) 1610 1611 visited.add(self) 1612 Visit(self, [self]) 1613 1614 return results 1615 1616 def DirectDependencies(self, dependencies=None): 1617 """Returns a list of just direct dependencies.""" 1618 if dependencies == None: 1619 dependencies = [] 1620 1621 for dependency in self.dependencies: 1622 # Check for None, corresponding to the root node. 1623 if dependency.ref != None and dependency.ref not in dependencies: 1624 dependencies.append(dependency.ref) 1625 1626 return dependencies 1627 1628 def _AddImportedDependencies(self, targets, dependencies=None): 1629 """Given a list of direct dependencies, adds indirect dependencies that 1630 other dependencies have declared to export their settings. 1631 1632 This method does not operate on self. Rather, it operates on the list 1633 of dependencies in the |dependencies| argument. For each dependency in 1634 that list, if any declares that it exports the settings of one of its 1635 own dependencies, those dependencies whose settings are "passed through" 1636 are added to the list. As new items are added to the list, they too will 1637 be processed, so it is possible to import settings through multiple levels 1638 of dependencies. 1639 1640 This method is not terribly useful on its own, it depends on being 1641 "primed" with a list of direct dependencies such as one provided by 1642 DirectDependencies. DirectAndImportedDependencies is intended to be the 1643 public entry point. 1644 """ 1645 1646 if dependencies == None: 1647 dependencies = [] 1648 1649 index = 0 1650 while index < len(dependencies): 1651 dependency = dependencies[index] 1652 dependency_dict = targets[dependency] 1653 # Add any dependencies whose settings should be imported to the list 1654 # if not already present. Newly-added items will be checked for 1655 # their own imports when the list iteration reaches them. 1656 # Rather than simply appending new items, insert them after the 1657 # dependency that exported them. This is done to more closely match 1658 # the depth-first method used by DeepDependencies. 1659 add_index = 1 1660 for imported_dependency in \ 1661 dependency_dict.get('export_dependent_settings', []): 1662 if imported_dependency not in dependencies: 1663 dependencies.insert(index + add_index, imported_dependency) 1664 add_index = add_index + 1 1665 index = index + 1 1666 1667 return dependencies 1668 1669 def DirectAndImportedDependencies(self, targets, dependencies=None): 1670 """Returns a list of a target's direct dependencies and all indirect 1671 dependencies that a dependency has advertised settings should be exported 1672 through the dependency for. 1673 """ 1674 1675 dependencies = self.DirectDependencies(dependencies) 1676 return self._AddImportedDependencies(targets, dependencies) 1677 1678 def DeepDependencies(self, dependencies=None): 1679 """Returns an OrderedSet of all of a target's dependencies, recursively.""" 1680 if dependencies is None: 1681 # Using a list to get ordered output and a set to do fast "is it 1682 # already added" checks. 1683 dependencies = OrderedSet() 1684 1685 for dependency in self.dependencies: 1686 # Check for None, corresponding to the root node. 1687 if dependency.ref is None: 1688 continue 1689 if dependency.ref not in dependencies: 1690 dependency.DeepDependencies(dependencies) 1691 dependencies.add(dependency.ref) 1692 1693 return dependencies 1694 1695 def _LinkDependenciesInternal(self, targets, include_shared_libraries, 1696 dependencies=None, initial=True): 1697 """Returns an OrderedSet of dependency targets that are linked 1698 into this target. 1699 1700 This function has a split personality, depending on the setting of 1701 |initial|. Outside callers should always leave |initial| at its default 1702 setting. 1703 1704 When adding a target to the list of dependencies, this function will 1705 recurse into itself with |initial| set to False, to collect dependencies 1706 that are linked into the linkable target for which the list is being built. 1707 1708 If |include_shared_libraries| is False, the resulting dependencies will not 1709 include shared_library targets that are linked into this target. 1710 """ 1711 if dependencies is None: 1712 # Using a list to get ordered output and a set to do fast "is it 1713 # already added" checks. 1714 dependencies = OrderedSet() 1715 1716 # Check for None, corresponding to the root node. 1717 if self.ref is None: 1718 return dependencies 1719 1720 # It's kind of sucky that |targets| has to be passed into this function, 1721 # but that's presently the easiest way to access the target dicts so that 1722 # this function can find target types. 1723 1724 if 'target_name' not in targets[self.ref]: 1725 raise GypError("Missing 'target_name' field in target.") 1726 1727 if 'type' not in targets[self.ref]: 1728 raise GypError("Missing 'type' field in target %s" % 1729 targets[self.ref]['target_name']) 1730 1731 target_type = targets[self.ref]['type'] 1732 1733 is_linkable = target_type in linkable_types 1734 1735 if initial and not is_linkable: 1736 # If this is the first target being examined and it's not linkable, 1737 # return an empty list of link dependencies, because the link 1738 # dependencies are intended to apply to the target itself (initial is 1739 # True) and this target won't be linked. 1740 return dependencies 1741 1742 # Don't traverse 'none' targets if explicitly excluded. 1743 if (target_type == 'none' and 1744 not targets[self.ref].get('dependencies_traverse', True)): 1745 dependencies.add(self.ref) 1746 return dependencies 1747 1748 # Executables, mac kernel extensions, windows drivers and loadable modules 1749 # are already fully and finally linked. Nothing else can be a link 1750 # dependency of them, there can only be dependencies in the sense that a 1751 # dependent target might run an executable or load the loadable_module. 1752 if not initial and target_type in ('executable', 'loadable_module', 1753 'mac_kernel_extension', 1754 'windows_driver'): 1755 return dependencies 1756 1757 # Shared libraries are already fully linked. They should only be included 1758 # in |dependencies| when adjusting static library dependencies (in order to 1759 # link against the shared_library's import lib), but should not be included 1760 # in |dependencies| when propagating link_settings. 1761 # The |include_shared_libraries| flag controls which of these two cases we 1762 # are handling. 1763 if (not initial and target_type == 'shared_library' and 1764 not include_shared_libraries): 1765 return dependencies 1766 1767 # The target is linkable, add it to the list of link dependencies. 1768 if self.ref not in dependencies: 1769 dependencies.add(self.ref) 1770 if initial or not is_linkable: 1771 # If this is a subsequent target and it's linkable, don't look any 1772 # further for linkable dependencies, as they'll already be linked into 1773 # this target linkable. Always look at dependencies of the initial 1774 # target, and always look at dependencies of non-linkables. 1775 for dependency in self.dependencies: 1776 dependency._LinkDependenciesInternal(targets, 1777 include_shared_libraries, 1778 dependencies, False) 1779 1780 return dependencies 1781 1782 def DependenciesForLinkSettings(self, targets): 1783 """ 1784 Returns a list of dependency targets whose link_settings should be merged 1785 into this target. 1786 """ 1787 1788 # TODO(sbaig) Currently, chrome depends on the bug that shared libraries' 1789 # link_settings are propagated. So for now, we will allow it, unless the 1790 # 'allow_sharedlib_linksettings_propagation' flag is explicitly set to 1791 # False. Once chrome is fixed, we can remove this flag. 1792 include_shared_libraries = \ 1793 targets[self.ref].get('allow_sharedlib_linksettings_propagation', True) 1794 return self._LinkDependenciesInternal(targets, include_shared_libraries) 1795 1796 def DependenciesToLinkAgainst(self, targets): 1797 """ 1798 Returns a list of dependency targets that are linked into this target. 1799 """ 1800 return self._LinkDependenciesInternal(targets, True) 1801 1802 1803def BuildDependencyList(targets): 1804 # Create a DependencyGraphNode for each target. Put it into a dict for easy 1805 # access. 1806 dependency_nodes = {} 1807 for target, spec in targets.items(): 1808 if target not in dependency_nodes: 1809 dependency_nodes[target] = DependencyGraphNode(target) 1810 1811 # Set up the dependency links. Targets that have no dependencies are treated 1812 # as dependent on root_node. 1813 root_node = DependencyGraphNode(None) 1814 for target, spec in targets.items(): 1815 target_node = dependency_nodes[target] 1816 target_build_file = gyp.common.BuildFile(target) 1817 dependencies = spec.get('dependencies') 1818 if not dependencies: 1819 target_node.dependencies = [root_node] 1820 root_node.dependents.append(target_node) 1821 else: 1822 for dependency in dependencies: 1823 dependency_node = dependency_nodes.get(dependency) 1824 if not dependency_node: 1825 raise GypError("Dependency '%s' not found while " 1826 "trying to load target %s" % (dependency, target)) 1827 target_node.dependencies.append(dependency_node) 1828 dependency_node.dependents.append(target_node) 1829 1830 flat_list = root_node.FlattenToList() 1831 1832 # If there's anything left unvisited, there must be a circular dependency 1833 # (cycle). 1834 if len(flat_list) != len(targets): 1835 if not root_node.dependents: 1836 # If all targets have dependencies, add the first target as a dependent 1837 # of root_node so that the cycle can be discovered from root_node. 1838 target = next(iter(targets)) 1839 target_node = dependency_nodes[target] 1840 target_node.dependencies.append(root_node) 1841 root_node.dependents.append(target_node) 1842 1843 cycles = [] 1844 for cycle in root_node.FindCycles(): 1845 paths = [node.ref for node in cycle] 1846 cycles.append('Cycle: %s' % ' -> '.join(paths)) 1847 raise DependencyGraphNode.CircularException( 1848 'Cycles in dependency graph detected:\n' + '\n'.join(cycles)) 1849 1850 return [dependency_nodes, flat_list] 1851 1852 1853def VerifyNoGYPFileCircularDependencies(targets): 1854 # Create a DependencyGraphNode for each gyp file containing a target. Put 1855 # it into a dict for easy access. 1856 dependency_nodes = {} 1857 for target in targets.keys(): 1858 build_file = gyp.common.BuildFile(target) 1859 if not build_file in dependency_nodes: 1860 dependency_nodes[build_file] = DependencyGraphNode(build_file) 1861 1862 # Set up the dependency links. 1863 for target, spec in targets.items(): 1864 build_file = gyp.common.BuildFile(target) 1865 build_file_node = dependency_nodes[build_file] 1866 target_dependencies = spec.get('dependencies', []) 1867 for dependency in target_dependencies: 1868 try: 1869 dependency_build_file = gyp.common.BuildFile(dependency) 1870 except GypError as e: 1871 gyp.common.ExceptionAppend( 1872 e, 'while computing dependencies of .gyp file %s' % build_file) 1873 raise 1874 1875 if dependency_build_file == build_file: 1876 # A .gyp file is allowed to refer back to itself. 1877 continue 1878 dependency_node = dependency_nodes.get(dependency_build_file) 1879 if not dependency_node: 1880 raise GypError("Dependancy '%s' not found" % dependency_build_file) 1881 if dependency_node not in build_file_node.dependencies: 1882 build_file_node.dependencies.append(dependency_node) 1883 dependency_node.dependents.append(build_file_node) 1884 1885 1886 # Files that have no dependencies are treated as dependent on root_node. 1887 root_node = DependencyGraphNode(None) 1888 for build_file_node in dependency_nodes.values(): 1889 if len(build_file_node.dependencies) == 0: 1890 build_file_node.dependencies.append(root_node) 1891 root_node.dependents.append(build_file_node) 1892 1893 flat_list = root_node.FlattenToList() 1894 1895 # If there's anything left unvisited, there must be a circular dependency 1896 # (cycle). 1897 if len(flat_list) != len(dependency_nodes): 1898 if not root_node.dependents: 1899 # If all files have dependencies, add the first file as a dependent 1900 # of root_node so that the cycle can be discovered from root_node. 1901 file_node = next(iter(dependency_nodes.values())) 1902 file_node.dependencies.append(root_node) 1903 root_node.dependents.append(file_node) 1904 cycles = [] 1905 for cycle in root_node.FindCycles(): 1906 paths = [node.ref for node in cycle] 1907 cycles.append('Cycle: %s' % ' -> '.join(paths)) 1908 raise DependencyGraphNode.CircularException( 1909 'Cycles in .gyp file dependency graph detected:\n' + '\n'.join(cycles)) 1910 1911 1912def DoDependentSettings(key, flat_list, targets, dependency_nodes): 1913 # key should be one of all_dependent_settings, direct_dependent_settings, 1914 # or link_settings. 1915 1916 for target in flat_list: 1917 target_dict = targets[target] 1918 build_file = gyp.common.BuildFile(target) 1919 1920 if key == 'all_dependent_settings': 1921 dependencies = dependency_nodes[target].DeepDependencies() 1922 elif key == 'direct_dependent_settings': 1923 dependencies = \ 1924 dependency_nodes[target].DirectAndImportedDependencies(targets) 1925 elif key == 'link_settings': 1926 dependencies = \ 1927 dependency_nodes[target].DependenciesForLinkSettings(targets) 1928 else: 1929 raise GypError("DoDependentSettings doesn't know how to determine " 1930 'dependencies for ' + key) 1931 1932 for dependency in dependencies: 1933 dependency_dict = targets[dependency] 1934 if not key in dependency_dict: 1935 continue 1936 dependency_build_file = gyp.common.BuildFile(dependency) 1937 MergeDicts(target_dict, dependency_dict[key], 1938 build_file, dependency_build_file) 1939 1940 1941def AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes, 1942 sort_dependencies): 1943 # Recompute target "dependencies" properties. For each static library 1944 # target, remove "dependencies" entries referring to other static libraries, 1945 # unless the dependency has the "hard_dependency" attribute set. For each 1946 # linkable target, add a "dependencies" entry referring to all of the 1947 # target's computed list of link dependencies (including static libraries 1948 # if no such entry is already present. 1949 for target in flat_list: 1950 target_dict = targets[target] 1951 target_type = target_dict['type'] 1952 1953 if target_type == 'static_library': 1954 if not 'dependencies' in target_dict: 1955 continue 1956 1957 target_dict['dependencies_original'] = target_dict.get( 1958 'dependencies', [])[:] 1959 1960 # A static library should not depend on another static library unless 1961 # the dependency relationship is "hard," which should only be done when 1962 # a dependent relies on some side effect other than just the build 1963 # product, like a rule or action output. Further, if a target has a 1964 # non-hard dependency, but that dependency exports a hard dependency, 1965 # the non-hard dependency can safely be removed, but the exported hard 1966 # dependency must be added to the target to keep the same dependency 1967 # ordering. 1968 dependencies = \ 1969 dependency_nodes[target].DirectAndImportedDependencies(targets) 1970 index = 0 1971 while index < len(dependencies): 1972 dependency = dependencies[index] 1973 dependency_dict = targets[dependency] 1974 1975 # Remove every non-hard static library dependency and remove every 1976 # non-static library dependency that isn't a direct dependency. 1977 if (dependency_dict['type'] == 'static_library' and \ 1978 not dependency_dict.get('hard_dependency', False)) or \ 1979 (dependency_dict['type'] != 'static_library' and \ 1980 not dependency in target_dict['dependencies']): 1981 # Take the dependency out of the list, and don't increment index 1982 # because the next dependency to analyze will shift into the index 1983 # formerly occupied by the one being removed. 1984 del dependencies[index] 1985 else: 1986 index = index + 1 1987 1988 # Update the dependencies. If the dependencies list is empty, it's not 1989 # needed, so unhook it. 1990 if len(dependencies) > 0: 1991 target_dict['dependencies'] = dependencies 1992 else: 1993 del target_dict['dependencies'] 1994 1995 elif target_type in linkable_types: 1996 # Get a list of dependency targets that should be linked into this 1997 # target. Add them to the dependencies list if they're not already 1998 # present. 1999 2000 link_dependencies = \ 2001 dependency_nodes[target].DependenciesToLinkAgainst(targets) 2002 for dependency in link_dependencies: 2003 if dependency == target: 2004 continue 2005 if not 'dependencies' in target_dict: 2006 target_dict['dependencies'] = [] 2007 if not dependency in target_dict['dependencies']: 2008 target_dict['dependencies'].append(dependency) 2009 # Sort the dependencies list in the order from dependents to dependencies. 2010 # e.g. If A and B depend on C and C depends on D, sort them in A, B, C, D. 2011 # Note: flat_list is already sorted in the order from dependencies to 2012 # dependents. 2013 if sort_dependencies and 'dependencies' in target_dict: 2014 target_dict['dependencies'] = [dep for dep in reversed(flat_list) 2015 if dep in target_dict['dependencies']] 2016 2017 2018# Initialize this here to speed up MakePathRelative. 2019exception_re = re.compile(r'''["']?[-/$<>^]''') 2020 2021 2022def MakePathRelative(to_file, fro_file, item): 2023 # If item is a relative path, it's relative to the build file dict that it's 2024 # coming from. Fix it up to make it relative to the build file dict that 2025 # it's going into. 2026 # Exception: any |item| that begins with these special characters is 2027 # returned without modification. 2028 # / Used when a path is already absolute (shortcut optimization; 2029 # such paths would be returned as absolute anyway) 2030 # $ Used for build environment variables 2031 # - Used for some build environment flags (such as -lapr-1 in a 2032 # "libraries" section) 2033 # < Used for our own variable and command expansions (see ExpandVariables) 2034 # > Used for our own variable and command expansions (see ExpandVariables) 2035 # ^ Used for our own variable and command expansions (see ExpandVariables) 2036 # 2037 # "/' Used when a value is quoted. If these are present, then we 2038 # check the second character instead. 2039 # 2040 if to_file == fro_file or exception_re.match(item): 2041 return item 2042 else: 2043 # TODO(dglazkov) The backslash/forward-slash replacement at the end is a 2044 # temporary measure. This should really be addressed by keeping all paths 2045 # in POSIX until actual project generation. 2046 ret = os.path.normpath(os.path.join( 2047 gyp.common.RelativePath(os.path.dirname(fro_file), 2048 os.path.dirname(to_file)), 2049 item)).replace('\\', '/') 2050 if item[-1] == '/': 2051 ret += '/' 2052 return ret 2053 2054def MergeLists(to, fro, to_file, fro_file, is_paths=False, append=True): 2055 # Python documentation recommends objects which do not support hash 2056 # set this value to None. Python library objects follow this rule. 2057 is_hashable = lambda val: val.__hash__ 2058 2059 # If x is hashable, returns whether x is in s. Else returns whether x is in l. 2060 def is_in_set_or_list(x, s, l): 2061 if is_hashable(x): 2062 return x in s 2063 return x in l 2064 2065 prepend_index = 0 2066 2067 # Make membership testing of hashables in |to| (in particular, strings) 2068 # faster. 2069 hashable_to_set = set(x for x in to if is_hashable(x)) 2070 for item in fro: 2071 singleton = False 2072 if isinstance(item, _str_int_types): 2073 # The cheap and easy case. 2074 if is_paths: 2075 to_item = MakePathRelative(to_file, fro_file, item) 2076 else: 2077 to_item = item 2078 2079 if not (isinstance(item, _str_types) and item.startswith('-')): 2080 # Any string that doesn't begin with a "-" is a singleton - it can 2081 # only appear once in a list, to be enforced by the list merge append 2082 # or prepend. 2083 singleton = True 2084 elif type(item) is dict: 2085 # Make a copy of the dictionary, continuing to look for paths to fix. 2086 # The other intelligent aspects of merge processing won't apply because 2087 # item is being merged into an empty dict. 2088 to_item = {} 2089 MergeDicts(to_item, item, to_file, fro_file) 2090 elif type(item) is list: 2091 # Recurse, making a copy of the list. If the list contains any 2092 # descendant dicts, path fixing will occur. Note that here, custom 2093 # values for is_paths and append are dropped; those are only to be 2094 # applied to |to| and |fro|, not sublists of |fro|. append shouldn't 2095 # matter anyway because the new |to_item| list is empty. 2096 to_item = [] 2097 MergeLists(to_item, item, to_file, fro_file) 2098 else: 2099 raise TypeError( 2100 'Attempt to merge list item of unsupported type ' + \ 2101 item.__class__.__name__) 2102 2103 if append: 2104 # If appending a singleton that's already in the list, don't append. 2105 # This ensures that the earliest occurrence of the item will stay put. 2106 if not singleton or not is_in_set_or_list(to_item, hashable_to_set, to): 2107 to.append(to_item) 2108 if is_hashable(to_item): 2109 hashable_to_set.add(to_item) 2110 else: 2111 # If prepending a singleton that's already in the list, remove the 2112 # existing instance and proceed with the prepend. This ensures that the 2113 # item appears at the earliest possible position in the list. 2114 while singleton and to_item in to: 2115 to.remove(to_item) 2116 2117 # Don't just insert everything at index 0. That would prepend the new 2118 # items to the list in reverse order, which would be an unwelcome 2119 # surprise. 2120 to.insert(prepend_index, to_item) 2121 if is_hashable(to_item): 2122 hashable_to_set.add(to_item) 2123 prepend_index = prepend_index + 1 2124 2125 2126def MergeDicts(to, fro, to_file, fro_file): 2127 # I wanted to name the parameter "from" but it's a Python keyword... 2128 for k, v in fro.items(): 2129 # It would be nice to do "if not k in to: to[k] = v" but that wouldn't give 2130 # copy semantics. Something else may want to merge from the |fro| dict 2131 # later, and having the same dict ref pointed to twice in the tree isn't 2132 # what anyone wants considering that the dicts may subsequently be 2133 # modified. 2134 if k in to: 2135 bad_merge = False 2136 if isinstance(v, _str_int_types): 2137 if not isinstance(to[k], _str_int_types): 2138 bad_merge = True 2139 elif type(v) is not type(to[k]): 2140 bad_merge = True 2141 2142 if bad_merge: 2143 raise TypeError( 2144 'Attempt to merge dict value of type ' + v.__class__.__name__ + \ 2145 ' into incompatible type ' + to[k].__class__.__name__ + \ 2146 ' for key ' + k) 2147 if isinstance(v, _str_int_types): 2148 # Overwrite the existing value, if any. Cheap and easy. 2149 is_path = IsPathSection(k) 2150 if is_path: 2151 to[k] = MakePathRelative(to_file, fro_file, v) 2152 else: 2153 to[k] = v 2154 elif type(v) is dict: 2155 # Recurse, guaranteeing copies will be made of objects that require it. 2156 if not k in to: 2157 to[k] = {} 2158 MergeDicts(to[k], v, to_file, fro_file) 2159 elif type(v) is list: 2160 # Lists in dicts can be merged with different policies, depending on 2161 # how the key in the "from" dict (k, the from-key) is written. 2162 # 2163 # If the from-key has ...the to-list will have this action 2164 # this character appended:... applied when receiving the from-list: 2165 # = replace 2166 # + prepend 2167 # ? set, only if to-list does not yet exist 2168 # (none) append 2169 # 2170 # This logic is list-specific, but since it relies on the associated 2171 # dict key, it's checked in this dict-oriented function. 2172 ext = k[-1] 2173 append = True 2174 if ext == '=': 2175 list_base = k[:-1] 2176 lists_incompatible = [list_base, list_base + '?'] 2177 to[list_base] = [] 2178 elif ext == '+': 2179 list_base = k[:-1] 2180 lists_incompatible = [list_base + '=', list_base + '?'] 2181 append = False 2182 elif ext == '?': 2183 list_base = k[:-1] 2184 lists_incompatible = [list_base, list_base + '=', list_base + '+'] 2185 else: 2186 list_base = k 2187 lists_incompatible = [list_base + '=', list_base + '?'] 2188 2189 # Some combinations of merge policies appearing together are meaningless. 2190 # It's stupid to replace and append simultaneously, for example. Append 2191 # and prepend are the only policies that can coexist. 2192 for list_incompatible in lists_incompatible: 2193 if list_incompatible in fro: 2194 raise GypError('Incompatible list policies ' + k + ' and ' + 2195 list_incompatible) 2196 2197 if list_base in to: 2198 if ext == '?': 2199 # If the key ends in "?", the list will only be merged if it doesn't 2200 # already exist. 2201 continue 2202 elif type(to[list_base]) is not list: 2203 # This may not have been checked above if merging in a list with an 2204 # extension character. 2205 raise TypeError( 2206 'Attempt to merge dict value of type ' + v.__class__.__name__ + \ 2207 ' into incompatible type ' + to[list_base].__class__.__name__ + \ 2208 ' for key ' + list_base + '(' + k + ')') 2209 else: 2210 to[list_base] = [] 2211 2212 # Call MergeLists, which will make copies of objects that require it. 2213 # MergeLists can recurse back into MergeDicts, although this will be 2214 # to make copies of dicts (with paths fixed), there will be no 2215 # subsequent dict "merging" once entering a list because lists are 2216 # always replaced, appended to, or prepended to. 2217 is_paths = IsPathSection(list_base) 2218 MergeLists(to[list_base], v, to_file, fro_file, is_paths, append) 2219 else: 2220 raise TypeError( 2221 'Attempt to merge dict value of unsupported type ' + \ 2222 v.__class__.__name__ + ' for key ' + k) 2223 2224 2225def MergeConfigWithInheritance(new_configuration_dict, build_file, 2226 target_dict, configuration, visited): 2227 # Skip if previously visted. 2228 if configuration in visited: 2229 return 2230 2231 # Look at this configuration. 2232 configuration_dict = target_dict['configurations'][configuration] 2233 2234 # Merge in parents. 2235 for parent in configuration_dict.get('inherit_from', []): 2236 MergeConfigWithInheritance(new_configuration_dict, build_file, 2237 target_dict, parent, visited + [configuration]) 2238 2239 # Merge it into the new config. 2240 MergeDicts(new_configuration_dict, configuration_dict, 2241 build_file, build_file) 2242 2243 # Drop abstract. 2244 if 'abstract' in new_configuration_dict: 2245 del new_configuration_dict['abstract'] 2246 2247 2248def SetUpConfigurations(target, target_dict): 2249 # key_suffixes is a list of key suffixes that might appear on key names. 2250 # These suffixes are handled in conditional evaluations (for =, +, and ?) 2251 # and rules/exclude processing (for ! and /). Keys with these suffixes 2252 # should be treated the same as keys without. 2253 key_suffixes = ['=', '+', '?', '!', '/'] 2254 2255 build_file = gyp.common.BuildFile(target) 2256 2257 # Provide a single configuration by default if none exists. 2258 # TODO(mark): Signal an error if default_configurations exists but 2259 # configurations does not. 2260 if not 'configurations' in target_dict: 2261 target_dict['configurations'] = {'Default': {}} 2262 if not 'default_configuration' in target_dict: 2263 concrete = [i for (i, config) in target_dict['configurations'].items() 2264 if not config.get('abstract')] 2265 target_dict['default_configuration'] = sorted(concrete)[0] 2266 2267 merged_configurations = {} 2268 configs = target_dict['configurations'] 2269 for (configuration, old_configuration_dict) in configs.items(): 2270 # Skip abstract configurations (saves work only). 2271 if old_configuration_dict.get('abstract'): 2272 continue 2273 # Configurations inherit (most) settings from the enclosing target scope. 2274 # Get the inheritance relationship right by making a copy of the target 2275 # dict. 2276 new_configuration_dict = {} 2277 for (key, target_val) in target_dict.items(): 2278 key_ext = key[-1:] 2279 if key_ext in key_suffixes: 2280 key_base = key[:-1] 2281 else: 2282 key_base = key 2283 if not key_base in non_configuration_keys: 2284 new_configuration_dict[key] = gyp.simple_copy.deepcopy(target_val) 2285 2286 # Merge in configuration (with all its parents first). 2287 MergeConfigWithInheritance(new_configuration_dict, build_file, 2288 target_dict, configuration, []) 2289 2290 merged_configurations[configuration] = new_configuration_dict 2291 2292 # Put the new configurations back into the target dict as a configuration. 2293 for configuration in merged_configurations.keys(): 2294 target_dict['configurations'][configuration] = ( 2295 merged_configurations[configuration]) 2296 2297 # Now drop all the abstract ones. 2298 configs = target_dict['configurations'] 2299 target_dict['configurations'] = \ 2300 {k: v for k, v in configs.items() if not v.get('abstract')} 2301 2302 # Now that all of the target's configurations have been built, go through 2303 # the target dict's keys and remove everything that's been moved into a 2304 # "configurations" section. 2305 delete_keys = [] 2306 for key in target_dict: 2307 key_ext = key[-1:] 2308 if key_ext in key_suffixes: 2309 key_base = key[:-1] 2310 else: 2311 key_base = key 2312 if not key_base in non_configuration_keys: 2313 delete_keys.append(key) 2314 for key in delete_keys: 2315 del target_dict[key] 2316 2317 # Check the configurations to see if they contain invalid keys. 2318 for configuration in target_dict['configurations'].keys(): 2319 configuration_dict = target_dict['configurations'][configuration] 2320 for key in configuration_dict.keys(): 2321 if key in invalid_configuration_keys: 2322 raise GypError('%s not allowed in the %s configuration, found in ' 2323 'target %s' % (key, configuration, target)) 2324 2325 2326 2327def ProcessListFiltersInDict(name, the_dict): 2328 """Process regular expression and exclusion-based filters on lists. 2329 2330 An exclusion list is in a dict key named with a trailing "!", like 2331 "sources!". Every item in such a list is removed from the associated 2332 main list, which in this example, would be "sources". Removed items are 2333 placed into a "sources_excluded" list in the dict. 2334 2335 Regular expression (regex) filters are contained in dict keys named with a 2336 trailing "/", such as "sources/" to operate on the "sources" list. Regex 2337 filters in a dict take the form: 2338 'sources/': [ ['exclude', '_(linux|mac|win)\\.cc$'], 2339 ['include', '_mac\\.cc$'] ], 2340 The first filter says to exclude all files ending in _linux.cc, _mac.cc, and 2341 _win.cc. The second filter then includes all files ending in _mac.cc that 2342 are now or were once in the "sources" list. Items matching an "exclude" 2343 filter are subject to the same processing as would occur if they were listed 2344 by name in an exclusion list (ending in "!"). Items matching an "include" 2345 filter are brought back into the main list if previously excluded by an 2346 exclusion list or exclusion regex filter. Subsequent matching "exclude" 2347 patterns can still cause items to be excluded after matching an "include". 2348 """ 2349 2350 # Look through the dictionary for any lists whose keys end in "!" or "/". 2351 # These are lists that will be treated as exclude lists and regular 2352 # expression-based exclude/include lists. Collect the lists that are 2353 # needed first, looking for the lists that they operate on, and assemble 2354 # then into |lists|. This is done in a separate loop up front, because 2355 # the _included and _excluded keys need to be added to the_dict, and that 2356 # can't be done while iterating through it. 2357 2358 lists = [] 2359 del_lists = [] 2360 for key, value in the_dict.items(): 2361 operation = key[-1] 2362 if operation != '!' and operation != '/': 2363 continue 2364 2365 if type(value) is not list: 2366 raise ValueError(name + ' key ' + key + ' must be list, not ' + \ 2367 value.__class__.__name__) 2368 2369 list_key = key[:-1] 2370 if list_key not in the_dict: 2371 # This happens when there's a list like "sources!" but no corresponding 2372 # "sources" list. Since there's nothing for it to operate on, queue up 2373 # the "sources!" list for deletion now. 2374 del_lists.append(key) 2375 continue 2376 2377 if type(the_dict[list_key]) is not list: 2378 value = the_dict[list_key] 2379 raise ValueError(name + ' key ' + list_key + \ 2380 ' must be list, not ' + \ 2381 value.__class__.__name__ + ' when applying ' + \ 2382 {'!': 'exclusion', '/': 'regex'}[operation]) 2383 2384 if not list_key in lists: 2385 lists.append(list_key) 2386 2387 # Delete the lists that are known to be unneeded at this point. 2388 for del_list in del_lists: 2389 del the_dict[del_list] 2390 2391 for list_key in lists: 2392 the_list = the_dict[list_key] 2393 2394 # Initialize the list_actions list, which is parallel to the_list. Each 2395 # item in list_actions identifies whether the corresponding item in 2396 # the_list should be excluded, unconditionally preserved (included), or 2397 # whether no exclusion or inclusion has been applied. Items for which 2398 # no exclusion or inclusion has been applied (yet) have value -1, items 2399 # excluded have value 0, and items included have value 1. Includes and 2400 # excludes override previous actions. All items in list_actions are 2401 # initialized to -1 because no excludes or includes have been processed 2402 # yet. 2403 list_actions = list((-1,) * len(the_list)) 2404 2405 exclude_key = list_key + '!' 2406 if exclude_key in the_dict: 2407 for exclude_item in the_dict[exclude_key]: 2408 for index, list_item in enumerate(the_list): 2409 if exclude_item == list_item: 2410 # This item matches the exclude_item, so set its action to 0 2411 # (exclude). 2412 list_actions[index] = 0 2413 2414 # The "whatever!" list is no longer needed, dump it. 2415 del the_dict[exclude_key] 2416 2417 regex_key = list_key + '/' 2418 if regex_key in the_dict: 2419 for regex_item in the_dict[regex_key]: 2420 [action, pattern] = regex_item 2421 pattern_re = re.compile(pattern) 2422 2423 if action == 'exclude': 2424 # This item matches an exclude regex, so set its value to 0 (exclude). 2425 action_value = 0 2426 elif action == 'include': 2427 # This item matches an include regex, so set its value to 1 (include). 2428 action_value = 1 2429 else: 2430 # This is an action that doesn't make any sense. 2431 raise ValueError('Unrecognized action ' + action + ' in ' + name + \ 2432 ' key ' + regex_key) 2433 2434 for index, list_item in enumerate(the_list): 2435 if list_actions[index] == action_value: 2436 # Even if the regex matches, nothing will change so continue (regex 2437 # searches are expensive). 2438 continue 2439 if pattern_re.search(list_item): 2440 # Regular expression match. 2441 list_actions[index] = action_value 2442 2443 # The "whatever/" list is no longer needed, dump it. 2444 del the_dict[regex_key] 2445 2446 # Add excluded items to the excluded list. 2447 # 2448 # Note that exclude_key ("sources!") is different from excluded_key 2449 # ("sources_excluded"). The exclude_key list is input and it was already 2450 # processed and deleted; the excluded_key list is output and it's about 2451 # to be created. 2452 excluded_key = list_key + '_excluded' 2453 if excluded_key in the_dict: 2454 raise GypError(name + ' key ' + excluded_key + 2455 ' must not be present prior ' 2456 ' to applying exclusion/regex filters for ' + list_key) 2457 2458 excluded_list = [] 2459 2460 # Go backwards through the list_actions list so that as items are deleted, 2461 # the indices of items that haven't been seen yet don't shift. That means 2462 # that things need to be prepended to excluded_list to maintain them in the 2463 # same order that they existed in the_list. 2464 for index in range(len(list_actions) - 1, -1, -1): 2465 if list_actions[index] == 0: 2466 # Dump anything with action 0 (exclude). Keep anything with action 1 2467 # (include) or -1 (no include or exclude seen for the item). 2468 excluded_list.insert(0, the_list[index]) 2469 del the_list[index] 2470 2471 # If anything was excluded, put the excluded list into the_dict at 2472 # excluded_key. 2473 if len(excluded_list) > 0: 2474 the_dict[excluded_key] = excluded_list 2475 2476 # Now recurse into subdicts and lists that may contain dicts. 2477 for key, value in the_dict.items(): 2478 if type(value) is dict: 2479 ProcessListFiltersInDict(key, value) 2480 elif type(value) is list: 2481 ProcessListFiltersInList(key, value) 2482 2483 2484def ProcessListFiltersInList(name, the_list): 2485 for item in the_list: 2486 if type(item) is dict: 2487 ProcessListFiltersInDict(name, item) 2488 elif type(item) is list: 2489 ProcessListFiltersInList(name, item) 2490 2491 2492def ValidateTargetType(target, target_dict): 2493 """Ensures the 'type' field on the target is one of the known types. 2494 2495 Arguments: 2496 target: string, name of target. 2497 target_dict: dict, target spec. 2498 2499 Raises an exception on error. 2500 """ 2501 VALID_TARGET_TYPES = ('executable', 'loadable_module', 2502 'static_library', 'shared_library', 2503 'mac_kernel_extension', 'none', 'windows_driver') 2504 target_type = target_dict.get('type', None) 2505 if target_type not in VALID_TARGET_TYPES: 2506 raise GypError("Target %s has an invalid target type '%s'. " 2507 "Must be one of %s." % 2508 (target, target_type, '/'.join(VALID_TARGET_TYPES))) 2509 if (target_dict.get('standalone_static_library', 0) and 2510 not target_type == 'static_library'): 2511 raise GypError('Target %s has type %s but standalone_static_library flag is' 2512 ' only valid for static_library type.' % (target, 2513 target_type)) 2514 2515 2516def ValidateSourcesInTarget(target, target_dict, build_file, 2517 duplicate_basename_check): 2518 if not duplicate_basename_check: 2519 return 2520 if target_dict.get('type', None) != 'static_library': 2521 return 2522 sources = target_dict.get('sources', []) 2523 basenames = {} 2524 for source in sources: 2525 name, ext = os.path.splitext(source) 2526 is_compiled_file = ext in [ 2527 '.c', '.cc', '.cpp', '.cxx', '.m', '.mm', '.s', '.S'] 2528 if not is_compiled_file: 2529 continue 2530 basename = os.path.basename(name) # Don't include extension. 2531 basenames.setdefault(basename, []).append(source) 2532 2533 error = '' 2534 for basename, files in basenames.items(): 2535 if len(files) > 1: 2536 error += ' %s: %s\n' % (basename, ' '.join(files)) 2537 2538 if error: 2539 print('static library %s has several files with the same basename:\n' % 2540 target + error + 'libtool on Mac cannot handle that. Use ' 2541 '--no-duplicate-basename-check to disable this validation.') 2542 raise GypError('Duplicate basenames in sources section, see list above') 2543 2544 2545def ValidateRulesInTarget(target, target_dict, extra_sources_for_rules): 2546 """Ensures that the rules sections in target_dict are valid and consistent, 2547 and determines which sources they apply to. 2548 2549 Arguments: 2550 target: string, name of target. 2551 target_dict: dict, target spec containing "rules" and "sources" lists. 2552 extra_sources_for_rules: a list of keys to scan for rule matches in 2553 addition to 'sources'. 2554 """ 2555 2556 # Dicts to map between values found in rules' 'rule_name' and 'extension' 2557 # keys and the rule dicts themselves. 2558 rule_names = {} 2559 rule_extensions = {} 2560 2561 rules = target_dict.get('rules', []) 2562 for rule in rules: 2563 # Make sure that there's no conflict among rule names and extensions. 2564 rule_name = rule['rule_name'] 2565 if rule_name in rule_names: 2566 raise GypError('rule %s exists in duplicate, target %s' % 2567 (rule_name, target)) 2568 rule_names[rule_name] = rule 2569 2570 rule_extension = rule['extension'] 2571 if rule_extension.startswith('.'): 2572 rule_extension = rule_extension[1:] 2573 if rule_extension in rule_extensions: 2574 raise GypError(('extension %s associated with multiple rules, ' + 2575 'target %s rules %s and %s') % 2576 (rule_extension, target, 2577 rule_extensions[rule_extension]['rule_name'], 2578 rule_name)) 2579 rule_extensions[rule_extension] = rule 2580 2581 # Make sure rule_sources isn't already there. It's going to be 2582 # created below if needed. 2583 if 'rule_sources' in rule: 2584 raise GypError( 2585 'rule_sources must not exist in input, target %s rule %s' % 2586 (target, rule_name)) 2587 2588 rule_sources = [] 2589 source_keys = ['sources'] 2590 source_keys.extend(extra_sources_for_rules) 2591 for source_key in source_keys: 2592 for source in target_dict.get(source_key, []): 2593 (source_root, source_extension) = os.path.splitext(source) 2594 if source_extension.startswith('.'): 2595 source_extension = source_extension[1:] 2596 if source_extension == rule_extension: 2597 rule_sources.append(source) 2598 2599 if len(rule_sources) > 0: 2600 rule['rule_sources'] = rule_sources 2601 2602 2603def ValidateRunAsInTarget(target, target_dict, build_file): 2604 target_name = target_dict.get('target_name') 2605 run_as = target_dict.get('run_as') 2606 if not run_as: 2607 return 2608 if type(run_as) is not dict: 2609 raise GypError("The 'run_as' in target %s from file %s should be a " 2610 "dictionary." % 2611 (target_name, build_file)) 2612 action = run_as.get('action') 2613 if not action: 2614 raise GypError("The 'run_as' in target %s from file %s must have an " 2615 "'action' section." % 2616 (target_name, build_file)) 2617 if type(action) is not list: 2618 raise GypError("The 'action' for 'run_as' in target %s from file %s " 2619 "must be a list." % 2620 (target_name, build_file)) 2621 working_directory = run_as.get('working_directory') 2622 if working_directory and not isinstance(working_directory, _str_types): 2623 raise GypError("The 'working_directory' for 'run_as' in target %s " 2624 "in file %s should be a string." % 2625 (target_name, build_file)) 2626 environment = run_as.get('environment') 2627 if environment and type(environment) is not dict: 2628 raise GypError("The 'environment' for 'run_as' in target %s " 2629 "in file %s should be a dictionary." % 2630 (target_name, build_file)) 2631 2632 2633def ValidateActionsInTarget(target, target_dict, build_file): 2634 '''Validates the inputs to the actions in a target.''' 2635 target_name = target_dict.get('target_name') 2636 actions = target_dict.get('actions', []) 2637 for action in actions: 2638 action_name = action.get('action_name') 2639 if not action_name: 2640 raise GypError("Anonymous action in target %s. " 2641 "An action must have an 'action_name' field." % 2642 target_name) 2643 inputs = action.get('inputs', None) 2644 if inputs is None: 2645 raise GypError('Action in target %s has no inputs.' % target_name) 2646 action_command = action.get('action') 2647 if action_command and not action_command[0]: 2648 raise GypError("Empty action as command in target %s." % target_name) 2649 2650 2651def TurnIntIntoStrInDict(the_dict): 2652 """Given dict the_dict, recursively converts all integers into strings. 2653 """ 2654 # Use items instead of iteritems because there's no need to try to look at 2655 # reinserted keys and their associated values. 2656 for k, v in the_dict.items(): 2657 if isinstance(v, _int_types): 2658 v = str(v) 2659 the_dict[k] = v 2660 elif type(v) is dict: 2661 TurnIntIntoStrInDict(v) 2662 elif type(v) is list: 2663 TurnIntIntoStrInList(v) 2664 2665 if isinstance(k, _int_types): 2666 del the_dict[k] 2667 the_dict[str(k)] = v 2668 2669 2670def TurnIntIntoStrInList(the_list): 2671 """Given list the_list, recursively converts all integers into strings. 2672 """ 2673 for index, item in enumerate(the_list): 2674 if isinstance(item, _int_types): 2675 the_list[index] = str(item) 2676 elif type(item) is dict: 2677 TurnIntIntoStrInDict(item) 2678 elif type(item) is list: 2679 TurnIntIntoStrInList(item) 2680 2681 2682def PruneUnwantedTargets(targets, flat_list, dependency_nodes, root_targets, 2683 data): 2684 """Return only the targets that are deep dependencies of |root_targets|.""" 2685 qualified_root_targets = [] 2686 for target in root_targets: 2687 target = target.strip() 2688 qualified_targets = gyp.common.FindQualifiedTargets(target, flat_list) 2689 if not qualified_targets: 2690 raise GypError("Could not find target %s" % target) 2691 qualified_root_targets.extend(qualified_targets) 2692 2693 wanted_targets = {} 2694 for target in qualified_root_targets: 2695 wanted_targets[target] = targets[target] 2696 for dependency in dependency_nodes[target].DeepDependencies(): 2697 wanted_targets[dependency] = targets[dependency] 2698 2699 wanted_flat_list = [t for t in flat_list if t in wanted_targets] 2700 2701 # Prune unwanted targets from each build_file's data dict. 2702 for build_file in data['target_build_files']: 2703 if not 'targets' in data[build_file]: 2704 continue 2705 new_targets = [] 2706 for target in data[build_file]['targets']: 2707 qualified_name = gyp.common.QualifiedTarget(build_file, 2708 target['target_name'], 2709 target['toolset']) 2710 if qualified_name in wanted_targets: 2711 new_targets.append(target) 2712 data[build_file]['targets'] = new_targets 2713 2714 return wanted_targets, wanted_flat_list 2715 2716 2717def VerifyNoCollidingTargets(targets): 2718 """Verify that no two targets in the same directory share the same name. 2719 2720 Arguments: 2721 targets: A list of targets in the form 'path/to/file.gyp:target_name'. 2722 """ 2723 # Keep a dict going from 'subdirectory:target_name' to 'foo.gyp'. 2724 used = {} 2725 for target in targets: 2726 # Separate out 'path/to/file.gyp, 'target_name' from 2727 # 'path/to/file.gyp:target_name'. 2728 path, name = target.rsplit(':', 1) 2729 # Separate out 'path/to', 'file.gyp' from 'path/to/file.gyp'. 2730 subdir, gyp = os.path.split(path) 2731 # Use '.' for the current directory '', so that the error messages make 2732 # more sense. 2733 if not subdir: 2734 subdir = '.' 2735 # Prepare a key like 'path/to:target_name'. 2736 key = subdir + ':' + name 2737 if key in used: 2738 # Complain if this target is already used. 2739 raise GypError('Duplicate target name "%s" in directory "%s" used both ' 2740 'in "%s" and "%s".' % (name, subdir, gyp, used[key])) 2741 used[key] = gyp 2742 2743 2744def SetGeneratorGlobals(generator_input_info): 2745 # Set up path_sections and non_configuration_keys with the default data plus 2746 # the generator-specific data. 2747 global path_sections 2748 path_sections = set(base_path_sections) 2749 path_sections.update(generator_input_info['path_sections']) 2750 2751 global non_configuration_keys 2752 non_configuration_keys = base_non_configuration_keys[:] 2753 non_configuration_keys.extend(generator_input_info['non_configuration_keys']) 2754 2755 global multiple_toolsets 2756 multiple_toolsets = generator_input_info[ 2757 'generator_supports_multiple_toolsets'] 2758 2759 global generator_filelist_paths 2760 generator_filelist_paths = generator_input_info['generator_filelist_paths'] 2761 2762 2763def Load(build_files, variables, includes, depth, generator_input_info, check, 2764 circular_check, duplicate_basename_check, parallel, root_targets): 2765 SetGeneratorGlobals(generator_input_info) 2766 # A generator can have other lists (in addition to sources) be processed 2767 # for rules. 2768 extra_sources_for_rules = generator_input_info['extra_sources_for_rules'] 2769 2770 # Load build files. This loads every target-containing build file into 2771 # the |data| dictionary such that the keys to |data| are build file names, 2772 # and the values are the entire build file contents after "early" or "pre" 2773 # processing has been done and includes have been resolved. 2774 # NOTE: data contains both "target" files (.gyp) and "includes" (.gypi), as 2775 # well as meta-data (e.g. 'included_files' key). 'target_build_files' keeps 2776 # track of the keys corresponding to "target" files. 2777 data = {'target_build_files': set()} 2778 # Normalize paths everywhere. This is important because paths will be 2779 # used as keys to the data dict and for references between input files. 2780 build_files = set(map(os.path.normpath, build_files)) 2781 if parallel: 2782 LoadTargetBuildFilesParallel(build_files, data, variables, includes, depth, 2783 check, generator_input_info) 2784 else: 2785 aux_data = {} 2786 for build_file in build_files: 2787 try: 2788 LoadTargetBuildFile(build_file, data, aux_data, 2789 variables, includes, depth, check, True) 2790 except Exception as e: 2791 gyp.common.ExceptionAppend(e, 'while trying to load %s' % build_file) 2792 raise 2793 2794 # Build a dict to access each target's subdict by qualified name. 2795 targets = BuildTargetsDict(data) 2796 2797 # Fully qualify all dependency links. 2798 QualifyDependencies(targets) 2799 2800 # Remove self-dependencies from targets that have 'prune_self_dependencies' 2801 # set to 1. 2802 RemoveSelfDependencies(targets) 2803 2804 # Expand dependencies specified as build_file:*. 2805 ExpandWildcardDependencies(targets, data) 2806 2807 # Remove all dependencies marked as 'link_dependency' from the targets of 2808 # type 'none'. 2809 RemoveLinkDependenciesFromNoneTargets(targets) 2810 2811 # Apply exclude (!) and regex (/) list filters only for dependency_sections. 2812 for target_name, target_dict in targets.items(): 2813 tmp_dict = {} 2814 for key_base in dependency_sections: 2815 for op in ('', '!', '/'): 2816 key = key_base + op 2817 if key in target_dict: 2818 tmp_dict[key] = target_dict[key] 2819 del target_dict[key] 2820 ProcessListFiltersInDict(target_name, tmp_dict) 2821 # Write the results back to |target_dict|. 2822 for key in tmp_dict: 2823 target_dict[key] = tmp_dict[key] 2824 2825 # Make sure every dependency appears at most once. 2826 RemoveDuplicateDependencies(targets) 2827 2828 if circular_check: 2829 # Make sure that any targets in a.gyp don't contain dependencies in other 2830 # .gyp files that further depend on a.gyp. 2831 VerifyNoGYPFileCircularDependencies(targets) 2832 2833 [dependency_nodes, flat_list] = BuildDependencyList(targets) 2834 2835 if root_targets: 2836 # Remove, from |targets| and |flat_list|, the targets that are not deep 2837 # dependencies of the targets specified in |root_targets|. 2838 targets, flat_list = PruneUnwantedTargets( 2839 targets, flat_list, dependency_nodes, root_targets, data) 2840 2841 # Check that no two targets in the same directory have the same name. 2842 VerifyNoCollidingTargets(flat_list) 2843 2844 # Handle dependent settings of various types. 2845 for settings_type in ['all_dependent_settings', 2846 'direct_dependent_settings', 2847 'link_settings']: 2848 DoDependentSettings(settings_type, flat_list, targets, dependency_nodes) 2849 2850 # Take out the dependent settings now that they've been published to all 2851 # of the targets that require them. 2852 for target in flat_list: 2853 if settings_type in targets[target]: 2854 del targets[target][settings_type] 2855 2856 # Make sure static libraries don't declare dependencies on other static 2857 # libraries, but that linkables depend on all unlinked static libraries 2858 # that they need so that their link steps will be correct. 2859 gii = generator_input_info 2860 if gii['generator_wants_static_library_dependencies_adjusted']: 2861 AdjustStaticLibraryDependencies(flat_list, targets, dependency_nodes, 2862 gii['generator_wants_sorted_dependencies']) 2863 2864 # Apply "post"/"late"/"target" variable expansions and condition evaluations. 2865 for target in flat_list: 2866 target_dict = targets[target] 2867 build_file = gyp.common.BuildFile(target) 2868 ProcessVariablesAndConditionsInDict( 2869 target_dict, PHASE_LATE, variables, build_file) 2870 2871 # Move everything that can go into a "configurations" section into one. 2872 for target in flat_list: 2873 target_dict = targets[target] 2874 SetUpConfigurations(target, target_dict) 2875 2876 # Apply exclude (!) and regex (/) list filters. 2877 for target in flat_list: 2878 target_dict = targets[target] 2879 ProcessListFiltersInDict(target, target_dict) 2880 2881 # Apply "latelate" variable expansions and condition evaluations. 2882 for target in flat_list: 2883 target_dict = targets[target] 2884 build_file = gyp.common.BuildFile(target) 2885 ProcessVariablesAndConditionsInDict( 2886 target_dict, PHASE_LATELATE, variables, build_file) 2887 2888 # Make sure that the rules make sense, and build up rule_sources lists as 2889 # needed. Not all generators will need to use the rule_sources lists, but 2890 # some may, and it seems best to build the list in a common spot. 2891 # Also validate actions and run_as elements in targets. 2892 for target in flat_list: 2893 target_dict = targets[target] 2894 build_file = gyp.common.BuildFile(target) 2895 ValidateTargetType(target, target_dict) 2896 ValidateSourcesInTarget(target, target_dict, build_file, 2897 duplicate_basename_check) 2898 ValidateRulesInTarget(target, target_dict, extra_sources_for_rules) 2899 ValidateRunAsInTarget(target, target_dict, build_file) 2900 ValidateActionsInTarget(target, target_dict, build_file) 2901 2902 # Generators might not expect ints. Turn them into strs. 2903 TurnIntIntoStrInDict(data) 2904 2905 # TODO(mark): Return |data| for now because the generator needs a list of 2906 # build files that came in. In the future, maybe it should just accept 2907 # a list, and not the whole data dict. 2908 return [flat_list, targets, data] 2909