1# Status: being ported by Vladimir Prus 2# Base revision: 48649 3# TODO: replace the logging with dout 4 5# Copyright Vladimir Prus 2002. 6# Copyright Rene Rivera 2006. 7# 8# Distributed under the Boost Software License, Version 1.0. 9# (See accompanying file LICENSE_1_0.txt or copy at 10# http://www.boost.org/LICENSE_1_0.txt) 11 12# Manages 'generators' --- objects which can do transformation between different 13# target types and contain algorithm for finding transformation from sources 14# to targets. 15# 16# The main entry point to this module is generators.construct rule. It is given 17# a list of source targets, desired target type and a set of properties. 18# It starts by selecting 'viable generators', which have any chances of producing 19# the desired target type with the required properties. Generators are ranked and 20# a set of most specific ones is selected. 21# 22# The most specific generators have their 'run' methods called, with the properties 23# and list of sources. Each one selects target which can be directly consumed, and 24# tries to convert the remaining ones to the types it can consume. This is done 25# by recursively calling 'construct' with all consumable types. 26# 27# If the generator has collected all the targets it needs, it creates targets 28# corresponding to result, and returns it. When all generators have been run, 29# results of one of them are selected and returned as result. 30# 31# It's quite possible that 'construct' returns more targets that it was asked for. 32# For example, it was asked to target type EXE, but the only found generators produces 33# both EXE and TDS (file with debug) information. The extra target will be returned. 34# 35# Likewise, when generator tries to convert sources to consumable types, it can get 36# more targets that it was asked for. The question is what to do with extra targets. 37# Boost.Build attempts to convert them to requested types, and attempts as early as 38# possible. Specifically, this is done after invoking each generator. (Later I'll 39# document the rationale for trying extra target conversion at that point). 40# 41# That early conversion is not always desirable. Suppose a generator got a source of 42# type Y and must consume one target of type X_1 and one target of type X_2. 43# When converting Y to X_1 extra target of type Y_2 is created. We should not try to 44# convert it to type X_1, because if we do so, the generator will get two targets 45# of type X_1, and will be at loss as to which one to use. Because of that, the 46# 'construct' rule has a parameter, telling if multiple targets can be returned. If 47# the parameter is false, conversion of extra targets is not performed. 48 49 50import re 51import cStringIO 52import os.path 53 54from virtual_target import Subvariant 55import virtual_target, type, property_set, property 56from b2.util.logger import * 57from b2.util.utility import * 58from b2.util import set 59from b2.util.sequence import unique 60import b2.util.sequence as sequence 61from b2.manager import get_manager 62import b2.build.type 63 64def reset (): 65 """ Clear the module state. This is mainly for testing purposes. 66 """ 67 global __generators, __type_to_generators, __generators_for_toolset, __construct_stack 68 global __overrides, __active_generators 69 global __viable_generators_cache, __viable_source_types_cache 70 global __vstg_cached_generators, __vst_cached_types 71 72 __generators = {} 73 __type_to_generators = {} 74 __generators_for_toolset = {} 75 __overrides = {} 76 77 # TODO: can these be global? 78 __construct_stack = [] 79 __viable_generators_cache = {} 80 __viable_source_types_cache = {} 81 __active_generators = [] 82 83 __vstg_cached_generators = [] 84 __vst_cached_types = [] 85 86reset () 87 88_re_separate_types_prefix_and_postfix = re.compile ('([^\\(]*)(\\((.*)%(.*)\\))?') 89_re_match_type = re.compile('([^\\(]*)(\\(.*\\))?') 90 91 92__debug = None 93__indent = "" 94 95def debug(): 96 global __debug 97 if __debug is None: 98 __debug = "--debug-generators" in bjam.variable("ARGV") 99 return __debug 100 101def increase_indent(): 102 global __indent 103 __indent += " " 104 105def decrease_indent(): 106 global __indent 107 __indent = __indent[0:-4] 108 109 110# Updated cached viable source target type information as needed after a new 111# derived target type gets added. This is needed because if a target type is a 112# viable source target type for some generator then all of the target type's 113# derived target types are automatically viable as source target types for the 114# same generator. Does nothing if a non-derived target type is passed to it. 115# 116def update_cached_information_with_a_new_type(type): 117 118 base_type = b2.build.type.base(type) 119 120 if base_type: 121 for g in __vstg_cached_generators: 122 if base_type in __viable_source_types_cache.get(g, []): 123 __viable_source_types_cache[g].append(type) 124 125 for t in __vst_cached_types: 126 if base_type in __viable_source_types_cache.get(t, []): 127 __viable_source_types_cache[t].append(type) 128 129# Clears cached viable source target type information except for target types 130# and generators with all source types listed as viable. Should be called when 131# something invalidates those cached values by possibly causing some new source 132# types to become viable. 133# 134def invalidate_extendable_viable_source_target_type_cache(): 135 136 global __vstg_cached_generators 137 generators_with_cached_source_types = __vstg_cached_generators 138 __vstg_cached_generators = [] 139 140 for g in generators_with_cached_source_types: 141 if __viable_source_types_cache.has_key(g): 142 if __viable_source_types_cache[g] == ["*"]: 143 __vstg_cached_generators.append(g) 144 else: 145 del __viable_source_types_cache[g] 146 147 global __vst_cached_types 148 types_with_cached_sources_types = __vst_cached_types 149 __vst_cached_types = [] 150 for t in types_with_cached_sources_types: 151 if __viable_source_types_cache.has_key(t): 152 if __viable_source_types_cache[t] == ["*"]: 153 __vst_cached_types.append(t) 154 else: 155 del __viable_source_types_cache[t] 156 157def dout(message): 158 if debug(): 159 print __indent + message 160 161class Generator: 162 """ Creates a generator. 163 manager: the build manager. 164 id: identifies the generator 165 166 rule: the rule which sets up build actions. 167 168 composing: whether generator processes each source target in 169 turn, converting it to required types. 170 Ordinary generators pass all sources together to 171 recusrive generators.construct_types call. 172 173 source_types (optional): types that this generator can handle 174 175 target_types_and_names: types the generator will create and, optionally, names for 176 created targets. Each element should have the form 177 type["(" name-pattern ")"] 178 for example, obj(%_x). Name of generated target will be found 179 by replacing % with the name of source, provided explicit name 180 was not specified. 181 182 requirements (optional) 183 184 NOTE: all subclasses must have a similar signature for clone to work! 185 """ 186 def __init__ (self, id, composing, source_types, target_types_and_names, requirements = []): 187 assert(not isinstance(source_types, str)) 188 assert(not isinstance(target_types_and_names, str)) 189 self.id_ = id 190 self.composing_ = composing 191 self.source_types_ = source_types 192 self.target_types_and_names_ = target_types_and_names 193 self.requirements_ = requirements 194 195 self.target_types_ = [] 196 self.name_prefix_ = [] 197 self.name_postfix_ = [] 198 199 for e in target_types_and_names: 200 # Create three parallel lists: one with the list of target types, 201 # and two other with prefixes and postfixes to be added to target 202 # name. We use parallel lists for prefix and postfix (as opposed 203 # to mapping), because given target type might occur several times, 204 # for example "H H(%_symbols)". 205 m = _re_separate_types_prefix_and_postfix.match (e) 206 207 if not m: 208 raise BaseException ("Invalid type and name '%s' in declaration of type '%s'" % (e, id)) 209 210 target_type = m.group (1) 211 if not target_type: target_type = '' 212 prefix = m.group (3) 213 if not prefix: prefix = '' 214 postfix = m.group (4) 215 if not postfix: postfix = '' 216 217 self.target_types_.append (target_type) 218 self.name_prefix_.append (prefix) 219 self.name_postfix_.append (postfix) 220 221 for x in self.source_types_: 222 type.validate (x) 223 224 for x in self.target_types_: 225 type.validate (x) 226 227 def clone (self, new_id, new_toolset_properties): 228 """ Returns another generator which differers from $(self) in 229 - id 230 - value to <toolset> feature in properties 231 """ 232 return self.__class__ (new_id, 233 self.composing_, 234 self.source_types_, 235 self.target_types_and_names_, 236 # Note: this does not remove any subfeatures of <toolset> 237 # which might cause problems 238 property.change (self.requirements_, '<toolset>') + new_toolset_properties) 239 240 def clone_and_change_target_type(self, base, type): 241 """Creates another generator that is the same as $(self), except that 242 if 'base' is in target types of $(self), 'type' will in target types 243 of the new generator.""" 244 target_types = [] 245 for t in self.target_types_and_names_: 246 m = _re_match_type.match(t) 247 assert m 248 249 if m.group(1) == base: 250 if m.group(2): 251 target_types.append(type + m.group(2)) 252 else: 253 target_types.append(type) 254 else: 255 target_types.append(t) 256 257 return self.__class__(self.id_, self.composing_, 258 self.source_types_, 259 target_types, 260 self.requirements_) 261 262 263 def id(self): 264 return self.id_ 265 266 def source_types (self): 267 """ Returns the list of target type the generator accepts. 268 """ 269 return self.source_types_ 270 271 def target_types (self): 272 """ Returns the list of target types that this generator produces. 273 It is assumed to be always the same -- i.e. it cannot change depending 274 list of sources. 275 """ 276 return self.target_types_ 277 278 def requirements (self): 279 """ Returns the required properties for this generator. Properties 280 in returned set must be present in build properties if this 281 generator is to be used. If result has grist-only element, 282 that build properties must include some value of that feature. 283 """ 284 return self.requirements_ 285 286 def match_rank (self, ps): 287 """ Returns true if the generator can be run with the specified 288 properties. 289 """ 290 # See if generator's requirements are satisfied by 291 # 'properties'. Treat a feature name in requirements 292 # (i.e. grist-only element), as matching any value of the 293 # feature. 294 all_requirements = self.requirements () 295 296 property_requirements = [] 297 feature_requirements = [] 298 # This uses strings because genenator requirements allow 299 # the '<feature>' syntax without value and regular validation 300 # is not happy about that. 301 for r in all_requirements: 302 if get_value (r): 303 property_requirements.append (r) 304 305 else: 306 feature_requirements.append (r) 307 308 return all(ps.get(get_grist(s)) == [get_value(s)] for s in property_requirements) \ 309 and all(ps.get(get_grist(s)) for s in feature_requirements) 310 311 def run (self, project, name, prop_set, sources): 312 """ Tries to invoke this generator on the given sources. Returns a 313 list of generated targets (instances of 'virtual-target'). 314 315 project: Project for which the targets are generated. 316 317 name: Determines the name of 'name' attribute for 318 all generated targets. See 'generated_targets' method. 319 320 prop_set: Desired properties for generated targets. 321 322 sources: Source targets. 323 """ 324 325 if project.manager ().logger ().on (): 326 project.manager ().logger ().log (__name__, " generator '%s'" % self.id_) 327 project.manager ().logger ().log (__name__, " composing: '%s'" % self.composing_) 328 329 if not self.composing_ and len (sources) > 1 and len (self.source_types_) > 1: 330 raise BaseException ("Unsupported source/source_type combination") 331 332 # We don't run composing generators if no name is specified. The reason 333 # is that composing generator combines several targets, which can have 334 # different names, and it cannot decide which name to give for produced 335 # target. Therefore, the name must be passed. 336 # 337 # This in effect, means that composing generators are runnable only 338 # at top-level of transofrmation graph, or if name is passed explicitly. 339 # Thus, we dissallow composing generators in the middle. For example, the 340 # transofrmation CPP -> OBJ -> STATIC_LIB -> RSP -> EXE won't be allowed 341 # (the OBJ -> STATIC_LIB generator is composing) 342 if not self.composing_ or name: 343 return self.run_really (project, name, prop_set, sources) 344 else: 345 return [] 346 347 def run_really (self, project, name, prop_set, sources): 348 349 # consumed: Targets that this generator will consume directly. 350 # bypassed: Targets that can't be consumed and will be returned as-is. 351 352 if self.composing_: 353 (consumed, bypassed) = self.convert_multiple_sources_to_consumable_types (project, prop_set, sources) 354 else: 355 (consumed, bypassed) = self.convert_to_consumable_types (project, name, prop_set, sources) 356 357 result = [] 358 if consumed: 359 result = self.construct_result (consumed, project, name, prop_set) 360 result.extend (bypassed) 361 362 if result: 363 if project.manager ().logger ().on (): 364 project.manager ().logger ().log (__name__, " SUCCESS: ", result) 365 366 else: 367 project.manager ().logger ().log (__name__, " FAILURE") 368 369 return result 370 371 def construct_result (self, consumed, project, name, prop_set): 372 """ Constructs the dependency graph that will be returned by this 373 generator. 374 consumed: Already prepared list of consumable targets 375 If generator requires several source files will contain 376 exactly len $(self.source_types_) targets with matching types 377 Otherwise, might contain several targets with the type of 378 self.source_types_ [0] 379 project: 380 name: 381 prop_set: Properties to be used for all actions create here 382 """ 383 result = [] 384 # If this is 1->1 transformation, apply it to all consumed targets in order. 385 if len (self.source_types_) < 2 and not self.composing_: 386 387 for r in consumed: 388 result.extend (self.generated_targets ([r], prop_set, project, name)) 389 390 else: 391 392 if consumed: 393 result.extend (self.generated_targets (consumed, prop_set, project, name)) 394 395 return result 396 397 def determine_target_name(self, fullname): 398 # Determine target name from fullname (maybe including path components) 399 # Place optional prefix and postfix around basename 400 401 dir = os.path.dirname(fullname) 402 name = os.path.basename(fullname) 403 idx = name.find(".") 404 if idx != -1: 405 name = name[:idx] 406 407 if dir and not ".." in dir and not os.path.isabs(dir): 408 # Relative path is always relative to the source 409 # directory. Retain it, so that users can have files 410 # with the same in two different subdirectories. 411 name = dir + "/" + name 412 413 return name 414 415 def determine_output_name(self, sources): 416 """Determine the name of the produced target from the 417 names of the sources.""" 418 419 # The simple case if when a name 420 # of source has single dot. Then, we take the part before 421 # dot. Several dots can be caused by: 422 # - Using source file like a.host.cpp 423 # - A type which suffix has a dot. Say, we can 424 # type 'host_cpp' with extension 'host.cpp'. 425 # In the first case, we want to take the part till the last 426 # dot. In the second case -- no sure, but for now take 427 # the part till the last dot too. 428 name = os.path.splitext(sources[0].name())[0] 429 430 for s in sources[1:]: 431 n2 = os.path.splitext(s.name()) 432 if n2 != name: 433 get_manager().errors()( 434 "%s: source targets have different names: cannot determine target name" 435 % (self.id_)) 436 437 # Names of sources might include directory. We should strip it. 438 return self.determine_target_name(sources[0].name()) 439 440 441 def generated_targets (self, sources, prop_set, project, name): 442 """ Constructs targets that are created after consuming 'sources'. 443 The result will be the list of virtual-target, which the same length 444 as 'target_types' attribute and with corresponding types. 445 446 When 'name' is empty, all source targets must have the same value of 447 the 'name' attribute, which will be used instead of the 'name' argument. 448 449 The value of 'name' attribute for each generated target will be equal to 450 the 'name' parameter if there's no name pattern for this type. Otherwise, 451 the '%' symbol in the name pattern will be replaced with the 'name' parameter 452 to obtain the 'name' attribute. 453 454 For example, if targets types are T1 and T2(with name pattern "%_x"), suffixes 455 for T1 and T2 are .t1 and t2, and source if foo.z, then created files would 456 be "foo.t1" and "foo_x.t2". The 'name' attribute actually determined the 457 basename of a file. 458 459 Note that this pattern mechanism has nothing to do with implicit patterns 460 in make. It's a way to produce target which name is different for name of 461 source. 462 """ 463 if not name: 464 name = self.determine_output_name(sources) 465 466 # Assign an action for each target 467 action = self.action_class() 468 a = action(project.manager(), sources, self.id_, prop_set) 469 470 # Create generated target for each target type. 471 targets = [] 472 pre = self.name_prefix_ 473 post = self.name_postfix_ 474 for t in self.target_types_: 475 basename = os.path.basename(name) 476 generated_name = pre[0] + basename + post[0] 477 generated_name = os.path.join(os.path.dirname(name), generated_name) 478 pre = pre[1:] 479 post = post[1:] 480 481 targets.append(virtual_target.FileTarget(generated_name, t, project, a)) 482 483 return [ project.manager().virtual_targets().register(t) for t in targets ] 484 485 def convert_to_consumable_types (self, project, name, prop_set, sources, only_one=False): 486 """ Attempts to convert 'source' to the types that this generator can 487 handle. The intention is to produce the set of targets can should be 488 used when generator is run. 489 only_one: convert 'source' to only one of source types 490 if there's more that one possibility, report an 491 error. 492 493 Returns a pair: 494 consumed: all targets that can be consumed. 495 bypassed: all targets that cannot be consumed. 496 """ 497 consumed = [] 498 bypassed = [] 499 missing_types = [] 500 501 if len (sources) > 1: 502 # Don't know how to handle several sources yet. Just try 503 # to pass the request to other generator 504 missing_types = self.source_types_ 505 506 else: 507 (c, m) = self.consume_directly (sources [0]) 508 consumed += c 509 missing_types += m 510 511 # No need to search for transformation if 512 # some source type has consumed source and 513 # no more source types are needed. 514 if only_one and consumed: 515 missing_types = [] 516 517 #TODO: we should check that only one source type 518 #if create of 'only_one' is true. 519 # TODO: consider if consuned/bypassed separation should 520 # be done by 'construct_types'. 521 522 if missing_types: 523 transformed = construct_types (project, name, missing_types, prop_set, sources) 524 525 # Add targets of right type to 'consumed'. Add others to 526 # 'bypassed'. The 'generators.construct' rule has done 527 # its best to convert everything to the required type. 528 # There's no need to rerun it on targets of different types. 529 530 # NOTE: ignoring usage requirements 531 for t in transformed[1]: 532 if t.type() in missing_types: 533 consumed.append(t) 534 535 else: 536 bypassed.append(t) 537 538 consumed = unique(consumed) 539 bypassed = unique(bypassed) 540 541 # remove elements of 'bypassed' that are in 'consumed' 542 543 # Suppose the target type of current generator, X is produced from 544 # X_1 and X_2, which are produced from Y by one generator. 545 # When creating X_1 from Y, X_2 will be added to 'bypassed' 546 # Likewise, when creating X_2 from Y, X_1 will be added to 'bypassed' 547 # But they are also in 'consumed'. We have to remove them from 548 # bypassed, so that generators up the call stack don't try to convert 549 # them. 550 551 # In this particular case, X_1 instance in 'consumed' and X_1 instance 552 # in 'bypassed' will be the same: because they have the same source and 553 # action name, and 'virtual-target.register' won't allow two different 554 # instances. Therefore, it's OK to use 'set.difference'. 555 556 bypassed = set.difference(bypassed, consumed) 557 558 return (consumed, bypassed) 559 560 561 def convert_multiple_sources_to_consumable_types (self, project, prop_set, sources): 562 """ Converts several files to consumable types. 563 """ 564 consumed = [] 565 bypassed = [] 566 567 # We process each source one-by-one, trying to convert it to 568 # a usable type. 569 for s in sources: 570 # TODO: need to check for failure on each source. 571 (c, b) = self.convert_to_consumable_types (project, None, prop_set, [s], True) 572 if not c: 573 project.manager ().logger ().log (__name__, " failed to convert ", s) 574 575 consumed.extend (c) 576 bypassed.extend (b) 577 578 return (consumed, bypassed) 579 580 def consume_directly (self, source): 581 real_source_type = source.type () 582 583 # If there are no source types, we can consume anything 584 source_types = self.source_types() 585 if not source_types: 586 source_types = [real_source_type] 587 588 consumed = [] 589 missing_types = [] 590 for st in source_types: 591 # The 'source' if of right type already) 592 if real_source_type == st or type.is_derived (real_source_type, st): 593 consumed.append (source) 594 595 else: 596 missing_types.append (st) 597 598 return (consumed, missing_types) 599 600 def action_class (self): 601 """ Returns the class to be used to actions. Default implementation 602 returns "action". 603 """ 604 return virtual_target.Action 605 606 607def find (id): 608 """ Finds the generator with id. Returns None if not found. 609 """ 610 return __generators.get (id, None) 611 612def register (g): 613 """ Registers new generator instance 'g'. 614 """ 615 id = g.id() 616 617 __generators [id] = g 618 619 # A generator can produce several targets of the 620 # same type. We want unique occurence of that generator 621 # in .generators.$(t) in that case, otherwise, it will 622 # be tried twice and we'll get false ambiguity. 623 for t in sequence.unique(g.target_types()): 624 __type_to_generators.setdefault(t, []).append(g) 625 626 # Update the set of generators for toolset 627 628 # TODO: should we check that generator with this id 629 # is not already registered. For example, the fop.jam 630 # module intentionally declared two generators with the 631 # same id, so such check will break it. 632 633 # Some generators have multiple periods in their name, so the 634 # normal $(id:S=) won't generate the right toolset name. 635 # e.g. if id = gcc.compile.c++, then 636 # .generators-for-toolset.$(id:S=) will append to 637 # .generators-for-toolset.gcc.compile, which is a separate 638 # value from .generators-for-toolset.gcc. Correcting this 639 # makes generator inheritance work properly. 640 # See also inherit-generators in module toolset 641 base = id.split ('.', 100) [0] 642 643 __generators_for_toolset.setdefault(base, []).append(g) 644 645 # After adding a new generator that can construct new target types, we need 646 # to clear the related cached viable source target type information for 647 # constructing a specific target type or using a specific generator. Cached 648 # viable source target type lists affected by this are those containing any 649 # of the target types constructed by the new generator or any of their base 650 # target types. 651 # 652 # A more advanced alternative to clearing that cached viable source target 653 # type information would be to expand it with additional source types or 654 # even better - mark it as needing to be expanded on next use. 655 # 656 # For now we just clear all the cached viable source target type information 657 # that does not simply state 'all types' and may implement a more detailed 658 # algorithm later on if it becomes needed. 659 660 invalidate_extendable_viable_source_target_type_cache() 661 662 663def register_standard (id, source_types, target_types, requirements = []): 664 """ Creates new instance of the 'generator' class and registers it. 665 Returns the creates instance. 666 Rationale: the instance is returned so that it's possible to first register 667 a generator and then call 'run' method on that generator, bypassing all 668 generator selection. 669 """ 670 g = Generator (id, False, source_types, target_types, requirements) 671 register (g) 672 return g 673 674def register_composing (id, source_types, target_types, requirements = []): 675 g = Generator (id, True, source_types, target_types, requirements) 676 register (g) 677 return g 678 679def generators_for_toolset (toolset): 680 """ Returns all generators which belong to 'toolset'. 681 """ 682 return __generators_for_toolset.get(toolset, []) 683 684def override (overrider_id, overridee_id): 685 """Make generator 'overrider-id' be preferred to 686 'overridee-id'. If, when searching for generators 687 that could produce a target of certain type, 688 both those generators are amoung viable generators, 689 the overridden generator is immediately discarded. 690 691 The overridden generators are discarded immediately 692 after computing the list of viable generators, before 693 running any of them.""" 694 695 __overrides.setdefault(overrider_id, []).append(overridee_id) 696 697def __viable_source_types_real (target_type): 698 """ Returns a list of source type which can possibly be converted 699 to 'target_type' by some chain of generator invocation. 700 701 More formally, takes all generators for 'target_type' and 702 returns union of source types for those generators and result 703 of calling itself recusrively on source types. 704 """ 705 generators = [] 706 707 # 't0' is the initial list of target types we need to process to get a list 708 # of their viable source target types. New target types will not be added to 709 # this list. 710 t0 = type.all_bases (target_type) 711 712 713 # 't' is the list of target types which have not yet been processed to get a 714 # list of their viable source target types. This list will get expanded as 715 # we locate more target types to process. 716 t = t0 717 718 result = [] 719 while t: 720 # Find all generators for current type. 721 # Unlike 'find_viable_generators' we don't care about prop_set. 722 generators = __type_to_generators.get (t [0], []) 723 t = t[1:] 724 725 for g in generators: 726 if not g.source_types(): 727 # Empty source types -- everything can be accepted 728 result = "*" 729 # This will terminate outer loop. 730 t = None 731 break 732 733 for source_type in g.source_types (): 734 if not source_type in result: 735 # If generator accepts 'source_type' it 736 # will happily accept any type derived from it 737 all = type.all_derived (source_type) 738 for n in all: 739 if not n in result: 740 741 # Here there is no point in adding target types to 742 # the list of types to process in case they are or 743 # have already been on that list. We optimize this 744 # check by realizing that we only need to avoid the 745 # original target type's base types. Other target 746 # types that are or have been on the list of target 747 # types to process have been added to the 'result' 748 # list as well and have thus already been eliminated 749 # by the previous if. 750 if not n in t0: 751 t.append (n) 752 result.append (n) 753 754 return result 755 756 757def viable_source_types (target_type): 758 """ Helper rule, caches the result of '__viable_source_types_real'. 759 """ 760 if not __viable_source_types_cache.has_key(target_type): 761 __vst_cached_types.append(target_type) 762 __viable_source_types_cache [target_type] = __viable_source_types_real (target_type) 763 return __viable_source_types_cache [target_type] 764 765def viable_source_types_for_generator_real (generator): 766 """ Returns the list of source types, which, when passed to 'run' 767 method of 'generator', has some change of being eventually used 768 (probably after conversion by other generators) 769 """ 770 source_types = generator.source_types () 771 772 if not source_types: 773 # If generator does not specify any source types, 774 # it might be special generator like builtin.lib-generator 775 # which just relays to other generators. Return '*' to 776 # indicate that any source type is possibly OK, since we don't 777 # know for sure. 778 return ['*'] 779 780 else: 781 result = [] 782 for s in source_types: 783 viable_sources = viable_source_types(s) 784 if viable_sources == "*": 785 result = ["*"] 786 break 787 else: 788 result.extend(type.all_derived(s) + viable_sources) 789 return unique(result) 790 791def viable_source_types_for_generator (generator): 792 """ Caches the result of 'viable_source_types_for_generator'. 793 """ 794 if not __viable_source_types_cache.has_key(generator): 795 __vstg_cached_generators.append(generator) 796 __viable_source_types_cache[generator] = viable_source_types_for_generator_real (generator) 797 798 return __viable_source_types_cache[generator] 799 800def try_one_generator_really (project, name, generator, target_type, properties, sources): 801 """ Returns usage requirements + list of created targets. 802 """ 803 targets = generator.run (project, name, properties, sources) 804 805 usage_requirements = [] 806 success = False 807 808 dout("returned " + str(targets)) 809 810 if targets: 811 success = True; 812 813 if isinstance (targets[0], property_set.PropertySet): 814 usage_requirements = targets [0] 815 targets = targets [1] 816 817 else: 818 usage_requirements = property_set.empty () 819 820 dout( " generator" + generator.id() + " spawned ") 821 # generators.dout [ indent ] " " $(targets) ; 822# if $(usage-requirements) 823# { 824# generators.dout [ indent ] " with usage requirements:" $(x) ; 825# } 826 827 if success: 828 return (usage_requirements, targets) 829 else: 830 return None 831 832def try_one_generator (project, name, generator, target_type, properties, sources): 833 """ Checks if generator invocation can be pruned, because it's guaranteed 834 to fail. If so, quickly returns empty list. Otherwise, calls 835 try_one_generator_really. 836 """ 837 source_types = [] 838 839 for s in sources: 840 source_types.append (s.type ()) 841 842 viable_source_types = viable_source_types_for_generator (generator) 843 844 if source_types and viable_source_types != ['*'] and\ 845 not set.intersection (source_types, viable_source_types): 846 if project.manager ().logger ().on (): 847 id = generator.id () 848 project.manager ().logger ().log (__name__, "generator '%s' pruned" % id) 849 project.manager ().logger ().log (__name__, "source_types" '%s' % source_types) 850 project.manager ().logger ().log (__name__, "viable_source_types '%s'" % viable_source_types) 851 852 return [] 853 854 else: 855 return try_one_generator_really (project, name, generator, target_type, properties, sources) 856 857 858def construct_types (project, name, target_types, prop_set, sources): 859 860 result = [] 861 usage_requirements = property_set.empty() 862 863 for t in target_types: 864 r = construct (project, name, t, prop_set, sources) 865 866 if r: 867 (ur, targets) = r 868 usage_requirements = usage_requirements.add(ur) 869 result.extend(targets) 870 871 # TODO: have to introduce parameter controlling if 872 # several types can be matched and add appropriate 873 # checks 874 875 # TODO: need to review the documentation for 876 # 'construct' to see if it should return $(source) even 877 # if nothing can be done with it. Currents docs seem to 878 # imply that, contrary to the behaviour. 879 if result: 880 return (usage_requirements, result) 881 882 else: 883 return (usage_requirements, sources) 884 885def __ensure_type (targets): 886 """ Ensures all 'targets' have types. If this is not so, exists with 887 error. 888 """ 889 for t in targets: 890 if not t.type (): 891 get_manager().errors()("target '%s' has no type" % str (t)) 892 893def find_viable_generators_aux (target_type, prop_set): 894 """ Returns generators which can be used to construct target of specified type 895 with specified properties. Uses the following algorithm: 896 - iterates over requested target_type and all it's bases (in the order returned bt 897 type.all-bases. 898 - for each type find all generators that generate that type and which requirements 899 are satisfied by properties. 900 - if the set of generators is not empty, returns that set. 901 902 Note: this algorithm explicitly ignores generators for base classes if there's 903 at least one generator for requested target_type. 904 """ 905 # Select generators that can create the required target type. 906 viable_generators = [] 907 initial_generators = [] 908 909 import type 910 911 # Try all-type generators first. Assume they have 912 # quite specific requirements. 913 all_bases = type.all_bases(target_type) 914 915 for t in all_bases: 916 917 initial_generators = __type_to_generators.get(t, []) 918 919 if initial_generators: 920 dout("there are generators for this type") 921 if t != target_type: 922 # We're here, when no generators for target-type are found, 923 # but there are some generators for a base type. 924 # We'll try to use them, but they will produce targets of 925 # base type, not of 'target-type'. So, we clone the generators 926 # and modify the list of target types. 927 generators2 = [] 928 for g in initial_generators[:]: 929 # generators.register adds generator to the list of generators 930 # for toolsets, which is a bit strange, but should work. 931 # That list is only used when inheriting toolset, which 932 # should have being done before generators are run. 933 ng = g.clone_and_change_target_type(t, target_type) 934 generators2.append(ng) 935 register(ng) 936 937 initial_generators = generators2 938 break 939 940 for g in initial_generators: 941 dout("trying generator " + g.id() 942 + "(" + str(g.source_types()) + "->" + str(g.target_types()) + ")") 943 944 m = g.match_rank(prop_set) 945 if m: 946 dout(" is viable") 947 viable_generators.append(g) 948 949 return viable_generators 950 951def find_viable_generators (target_type, prop_set): 952 key = target_type + '.' + str (prop_set) 953 954 l = __viable_generators_cache.get (key, None) 955 if not l: 956 l = [] 957 958 if not l: 959 l = find_viable_generators_aux (target_type, prop_set) 960 961 __viable_generators_cache [key] = l 962 963 viable_generators = [] 964 for g in l: 965 # Avoid trying the same generator twice on different levels. 966 # TODO: is this really used? 967 if not g in __active_generators: 968 viable_generators.append (g) 969 else: 970 dout(" generator %s is active, discarding" % g.id()) 971 972 # Generators which override 'all'. 973 all_overrides = [] 974 975 # Generators which are overriden 976 overriden_ids = [] 977 978 for g in viable_generators: 979 id = g.id () 980 981 this_overrides = __overrides.get (id, []) 982 983 if this_overrides: 984 overriden_ids.extend (this_overrides) 985 if 'all' in this_overrides: 986 all_overrides.append (g) 987 988 if all_overrides: 989 viable_generators = all_overrides 990 991 return [g for g in viable_generators if not g.id() in overriden_ids] 992 993def __construct_really (project, name, target_type, prop_set, sources): 994 """ Attempts to construct target by finding viable generators, running them 995 and selecting the dependency graph. 996 """ 997 viable_generators = find_viable_generators (target_type, prop_set) 998 999 result = [] 1000 1001 dout(" *** %d viable generators" % len (viable_generators)) 1002 1003 generators_that_succeeded = [] 1004 1005 for g in viable_generators: 1006 __active_generators.append(g) 1007 r = try_one_generator (project, name, g, target_type, prop_set, sources) 1008 del __active_generators[-1] 1009 1010 if r: 1011 generators_that_succeeded.append(g) 1012 if result: 1013 output = cStringIO.StringIO() 1014 print >>output, "ambiguity found when searching for best transformation" 1015 print >>output, "Trying to produce type '%s' from: " % (target_type) 1016 for s in sources: 1017 print >>output, " - " + s.str() 1018 print >>output, "Generators that succeeded:" 1019 for g in generators_that_succeeded: 1020 print >>output, " - " + g.id() 1021 print >>output, "First generator produced: " 1022 for t in result[1:]: 1023 print >>output, " - " + str(t) 1024 print >>output, "Second generator produced:" 1025 for t in r[1:]: 1026 print >>output, " - " + str(t) 1027 get_manager().errors()(output.getvalue()) 1028 else: 1029 result = r; 1030 1031 return result; 1032 1033 1034def construct (project, name, target_type, prop_set, sources, top_level=False): 1035 """ Attempts to create target of 'target-type' with 'properties' 1036 from 'sources'. The 'sources' are treated as a collection of 1037 *possible* ingridients -- i.e. it is not required to consume 1038 them all. If 'multiple' is true, the rule is allowed to return 1039 several targets of 'target-type'. 1040 1041 Returns a list of target. When this invocation is first instance of 1042 'construct' in stack, returns only targets of requested 'target-type', 1043 otherwise, returns also unused sources and additionally generated 1044 targets. 1045 1046 If 'top-level' is set, does not suppress generators that are already 1047 used in the stack. This may be useful in cases where a generator 1048 has to build a metatarget -- for example a target corresponding to 1049 built tool. 1050 """ 1051 1052 global __active_generators 1053 if top_level: 1054 saved_active = __active_generators 1055 __active_generators = [] 1056 1057 global __construct_stack 1058 if not __construct_stack: 1059 __ensure_type (sources) 1060 1061 __construct_stack.append (1) 1062 1063 increase_indent () 1064 1065 if project.manager().logger().on(): 1066 dout( "*** construct " + target_type) 1067 1068 for s in sources: 1069 dout(" from " + str(s)) 1070 1071 project.manager().logger().log (__name__, " properties: ", prop_set.raw ()) 1072 1073 result = __construct_really(project, name, target_type, prop_set, sources) 1074 1075 decrease_indent() 1076 1077 __construct_stack = __construct_stack [1:] 1078 1079 if top_level: 1080 __active_generators = saved_active 1081 1082 return result 1083 1084def add_usage_requirements (result, raw_properties): 1085 if result: 1086 if isinstance (result[0], property_set.PropertySet): 1087 return (result[0].add_raw(raw_properties), result[1]) 1088 else: 1089 return (propery_set.create(raw-properties), result) 1090 #if [ class.is-a $(result[1]) : property-set ] 1091 #{ 1092 # return [ $(result[1]).add-raw $(raw-properties) ] $(result[2-]) ; 1093 #} 1094 #else 1095 #{ 1096 # return [ property-set.create $(raw-properties) ] $(result) ; 1097 #} 1098