1"""scons.Node.FS 2 3File system nodes. 4 5These Nodes represent the canonical external objects that people think 6of when they think of building software: files and directories. 7 8This holds a "default_fs" variable that should be initialized with an FS 9that can be used by scripts or modules looking for the canonical default. 10 11""" 12 13# 14# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009 The SCons Foundation 15# 16# Permission is hereby granted, free of charge, to any person obtaining 17# a copy of this software and associated documentation files (the 18# "Software"), to deal in the Software without restriction, including 19# without limitation the rights to use, copy, modify, merge, publish, 20# distribute, sublicense, and/or sell copies of the Software, and to 21# permit persons to whom the Software is furnished to do so, subject to 22# the following conditions: 23# 24# The above copyright notice and this permission notice shall be included 25# in all copies or substantial portions of the Software. 26# 27# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY 28# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE 29# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 30# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE 31# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION 32# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION 33# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 34# 35 36__revision__ = "src/engine/SCons/Node/FS.py 4369 2009/09/19 15:58:29 scons" 37 38from itertools import izip 39import cStringIO 40import fnmatch 41import os 42import os.path 43import re 44import shutil 45import stat 46import string 47import sys 48import time 49 50try: 51 import codecs 52except ImportError: 53 pass 54else: 55 # TODO(2.2): Remove when 2.3 becomes the minimal supported version. 56 try: 57 codecs.BOM_UTF8 58 except AttributeError: 59 codecs.BOM_UTF8 = '\xef\xbb\xbf' 60 try: 61 codecs.BOM_UTF16_LE 62 codecs.BOM_UTF16_BE 63 except AttributeError: 64 codecs.BOM_UTF16_LE = '\xff\xfe' 65 codecs.BOM_UTF16_BE = '\xfe\xff' 66 67 # Provide a wrapper function to handle decoding differences in 68 # different versions of Python. Normally, we'd try to do this in the 69 # compat layer (and maybe it still makes sense to move there?) but 70 # that doesn't provide a way to supply the string class used in 71 # pre-2.3 Python versions with a .decode() method that all strings 72 # naturally have. Plus, the 2.[01] encodings behave differently 73 # enough that we have to settle for a lowest-common-denominator 74 # wrapper approach. 75 # 76 # Note that the 2.[012] implementations below may be inefficient 77 # because they perform an explicit look up of the encoding for every 78 # decode, but they're old enough (and we want to stop supporting 79 # them soon enough) that it's not worth complicating the interface. 80 # Think of it as additional incentive for people to upgrade... 81 try: 82 ''.decode 83 except AttributeError: 84 # 2.0 through 2.2: strings have no .decode() method 85 try: 86 codecs.lookup('ascii').decode 87 except AttributeError: 88 # 2.0 and 2.1: encodings are a tuple of functions, and the 89 # decode() function returns a (result, length) tuple. 90 def my_decode(contents, encoding): 91 return codecs.lookup(encoding)[1](contents)[0] 92 else: 93 # 2.2: encodings are an object with methods, and the 94 # .decode() method returns just the decoded bytes. 95 def my_decode(contents, encoding): 96 return codecs.lookup(encoding).decode(contents) 97 else: 98 # 2.3 or later: use the .decode() string method 99 def my_decode(contents, encoding): 100 return contents.decode(encoding) 101 102import SCons.Action 103from SCons.Debug import logInstanceCreation 104import SCons.Errors 105import SCons.Memoize 106import SCons.Node 107import SCons.Node.Alias 108import SCons.Subst 109import SCons.Util 110import SCons.Warnings 111 112from SCons.Debug import Trace 113 114do_store_info = True 115 116 117class EntryProxyAttributeError(AttributeError): 118 """ 119 An AttributeError subclass for recording and displaying the name 120 of the underlying Entry involved in an AttributeError exception. 121 """ 122 def __init__(self, entry_proxy, attribute): 123 AttributeError.__init__(self) 124 self.entry_proxy = entry_proxy 125 self.attribute = attribute 126 def __str__(self): 127 entry = self.entry_proxy.get() 128 fmt = "%s instance %s has no attribute %s" 129 return fmt % (entry.__class__.__name__, 130 repr(entry.name), 131 repr(self.attribute)) 132 133# The max_drift value: by default, use a cached signature value for 134# any file that's been untouched for more than two days. 135default_max_drift = 2*24*60*60 136 137# 138# We stringify these file system Nodes a lot. Turning a file system Node 139# into a string is non-trivial, because the final string representation 140# can depend on a lot of factors: whether it's a derived target or not, 141# whether it's linked to a repository or source directory, and whether 142# there's duplication going on. The normal technique for optimizing 143# calculations like this is to memoize (cache) the string value, so you 144# only have to do the calculation once. 145# 146# A number of the above factors, however, can be set after we've already 147# been asked to return a string for a Node, because a Repository() or 148# VariantDir() call or the like may not occur until later in SConscript 149# files. So this variable controls whether we bother trying to save 150# string values for Nodes. The wrapper interface can set this whenever 151# they're done mucking with Repository and VariantDir and the other stuff, 152# to let this module know it can start returning saved string values 153# for Nodes. 154# 155Save_Strings = None 156 157def save_strings(val): 158 global Save_Strings 159 Save_Strings = val 160 161# 162# Avoid unnecessary function calls by recording a Boolean value that 163# tells us whether or not os.path.splitdrive() actually does anything 164# on this system, and therefore whether we need to bother calling it 165# when looking up path names in various methods below. 166# 167 168do_splitdrive = None 169 170def initialize_do_splitdrive(): 171 global do_splitdrive 172 drive, path = os.path.splitdrive('X:/foo') 173 do_splitdrive = not not drive 174 175initialize_do_splitdrive() 176 177# 178 179needs_normpath_check = None 180 181def initialize_normpath_check(): 182 """ 183 Initialize the normpath_check regular expression. 184 185 This function is used by the unit tests to re-initialize the pattern 186 when testing for behavior with different values of os.sep. 187 """ 188 global needs_normpath_check 189 if os.sep == '/': 190 pattern = r'.*/|\.$|\.\.$' 191 else: 192 pattern = r'.*[/%s]|\.$|\.\.$' % re.escape(os.sep) 193 needs_normpath_check = re.compile(pattern) 194 195initialize_normpath_check() 196 197# 198# SCons.Action objects for interacting with the outside world. 199# 200# The Node.FS methods in this module should use these actions to 201# create and/or remove files and directories; they should *not* use 202# os.{link,symlink,unlink,mkdir}(), etc., directly. 203# 204# Using these SCons.Action objects ensures that descriptions of these 205# external activities are properly displayed, that the displays are 206# suppressed when the -s (silent) option is used, and (most importantly) 207# the actions are disabled when the the -n option is used, in which case 208# there should be *no* changes to the external file system(s)... 209# 210 211if hasattr(os, 'link'): 212 def _hardlink_func(fs, src, dst): 213 # If the source is a symlink, we can't just hard-link to it 214 # because a relative symlink may point somewhere completely 215 # different. We must disambiguate the symlink and then 216 # hard-link the final destination file. 217 while fs.islink(src): 218 link = fs.readlink(src) 219 if not os.path.isabs(link): 220 src = link 221 else: 222 src = os.path.join(os.path.dirname(src), link) 223 fs.link(src, dst) 224else: 225 _hardlink_func = None 226 227if hasattr(os, 'symlink'): 228 def _softlink_func(fs, src, dst): 229 fs.symlink(src, dst) 230else: 231 _softlink_func = None 232 233def _copy_func(fs, src, dest): 234 shutil.copy2(src, dest) 235 st = fs.stat(src) 236 fs.chmod(dest, stat.S_IMODE(st[stat.ST_MODE]) | stat.S_IWRITE) 237 238 239Valid_Duplicates = ['hard-soft-copy', 'soft-hard-copy', 240 'hard-copy', 'soft-copy', 'copy'] 241 242Link_Funcs = [] # contains the callables of the specified duplication style 243 244def set_duplicate(duplicate): 245 # Fill in the Link_Funcs list according to the argument 246 # (discarding those not available on the platform). 247 248 # Set up the dictionary that maps the argument names to the 249 # underlying implementations. We do this inside this function, 250 # not in the top-level module code, so that we can remap os.link 251 # and os.symlink for testing purposes. 252 link_dict = { 253 'hard' : _hardlink_func, 254 'soft' : _softlink_func, 255 'copy' : _copy_func 256 } 257 258 if not duplicate in Valid_Duplicates: 259 raise SCons.Errors.InternalError("The argument of set_duplicate " 260 "should be in Valid_Duplicates") 261 global Link_Funcs 262 Link_Funcs = [] 263 for func in string.split(duplicate,'-'): 264 if link_dict[func]: 265 Link_Funcs.append(link_dict[func]) 266 267def LinkFunc(target, source, env): 268 # Relative paths cause problems with symbolic links, so 269 # we use absolute paths, which may be a problem for people 270 # who want to move their soft-linked src-trees around. Those 271 # people should use the 'hard-copy' mode, softlinks cannot be 272 # used for that; at least I have no idea how ... 273 src = source[0].abspath 274 dest = target[0].abspath 275 dir, file = os.path.split(dest) 276 if dir and not target[0].fs.isdir(dir): 277 os.makedirs(dir) 278 if not Link_Funcs: 279 # Set a default order of link functions. 280 set_duplicate('hard-soft-copy') 281 fs = source[0].fs 282 # Now link the files with the previously specified order. 283 for func in Link_Funcs: 284 try: 285 func(fs, src, dest) 286 break 287 except (IOError, OSError): 288 # An OSError indicates something happened like a permissions 289 # problem or an attempt to symlink across file-system 290 # boundaries. An IOError indicates something like the file 291 # not existing. In either case, keeping trying additional 292 # functions in the list and only raise an error if the last 293 # one failed. 294 if func == Link_Funcs[-1]: 295 # exception of the last link method (copy) are fatal 296 raise 297 return 0 298 299Link = SCons.Action.Action(LinkFunc, None) 300def LocalString(target, source, env): 301 return 'Local copy of %s from %s' % (target[0], source[0]) 302 303LocalCopy = SCons.Action.Action(LinkFunc, LocalString) 304 305def UnlinkFunc(target, source, env): 306 t = target[0] 307 t.fs.unlink(t.abspath) 308 return 0 309 310Unlink = SCons.Action.Action(UnlinkFunc, None) 311 312def MkdirFunc(target, source, env): 313 t = target[0] 314 if not t.exists(): 315 t.fs.mkdir(t.abspath) 316 return 0 317 318Mkdir = SCons.Action.Action(MkdirFunc, None, presub=None) 319 320MkdirBuilder = None 321 322def get_MkdirBuilder(): 323 global MkdirBuilder 324 if MkdirBuilder is None: 325 import SCons.Builder 326 import SCons.Defaults 327 # "env" will get filled in by Executor.get_build_env() 328 # calling SCons.Defaults.DefaultEnvironment() when necessary. 329 MkdirBuilder = SCons.Builder.Builder(action = Mkdir, 330 env = None, 331 explain = None, 332 is_explicit = None, 333 target_scanner = SCons.Defaults.DirEntryScanner, 334 name = "MkdirBuilder") 335 return MkdirBuilder 336 337class _Null: 338 pass 339 340_null = _Null() 341 342DefaultSCCSBuilder = None 343DefaultRCSBuilder = None 344 345def get_DefaultSCCSBuilder(): 346 global DefaultSCCSBuilder 347 if DefaultSCCSBuilder is None: 348 import SCons.Builder 349 # "env" will get filled in by Executor.get_build_env() 350 # calling SCons.Defaults.DefaultEnvironment() when necessary. 351 act = SCons.Action.Action('$SCCSCOM', '$SCCSCOMSTR') 352 DefaultSCCSBuilder = SCons.Builder.Builder(action = act, 353 env = None, 354 name = "DefaultSCCSBuilder") 355 return DefaultSCCSBuilder 356 357def get_DefaultRCSBuilder(): 358 global DefaultRCSBuilder 359 if DefaultRCSBuilder is None: 360 import SCons.Builder 361 # "env" will get filled in by Executor.get_build_env() 362 # calling SCons.Defaults.DefaultEnvironment() when necessary. 363 act = SCons.Action.Action('$RCS_COCOM', '$RCS_COCOMSTR') 364 DefaultRCSBuilder = SCons.Builder.Builder(action = act, 365 env = None, 366 name = "DefaultRCSBuilder") 367 return DefaultRCSBuilder 368 369# Cygwin's os.path.normcase pretends it's on a case-sensitive filesystem. 370_is_cygwin = sys.platform == "cygwin" 371if os.path.normcase("TeSt") == os.path.normpath("TeSt") and not _is_cygwin: 372 def _my_normcase(x): 373 return x 374else: 375 def _my_normcase(x): 376 return string.upper(x) 377 378 379 380class DiskChecker: 381 def __init__(self, type, do, ignore): 382 self.type = type 383 self.do = do 384 self.ignore = ignore 385 self.set_do() 386 def set_do(self): 387 self.__call__ = self.do 388 def set_ignore(self): 389 self.__call__ = self.ignore 390 def set(self, list): 391 if self.type in list: 392 self.set_do() 393 else: 394 self.set_ignore() 395 396def do_diskcheck_match(node, predicate, errorfmt): 397 result = predicate() 398 try: 399 # If calling the predicate() cached a None value from stat(), 400 # remove it so it doesn't interfere with later attempts to 401 # build this Node as we walk the DAG. (This isn't a great way 402 # to do this, we're reaching into an interface that doesn't 403 # really belong to us, but it's all about performance, so 404 # for now we'll just document the dependency...) 405 if node._memo['stat'] is None: 406 del node._memo['stat'] 407 except (AttributeError, KeyError): 408 pass 409 if result: 410 raise TypeError(errorfmt % node.abspath) 411 412def ignore_diskcheck_match(node, predicate, errorfmt): 413 pass 414 415def do_diskcheck_rcs(node, name): 416 try: 417 rcs_dir = node.rcs_dir 418 except AttributeError: 419 if node.entry_exists_on_disk('RCS'): 420 rcs_dir = node.Dir('RCS') 421 else: 422 rcs_dir = None 423 node.rcs_dir = rcs_dir 424 if rcs_dir: 425 return rcs_dir.entry_exists_on_disk(name+',v') 426 return None 427 428def ignore_diskcheck_rcs(node, name): 429 return None 430 431def do_diskcheck_sccs(node, name): 432 try: 433 sccs_dir = node.sccs_dir 434 except AttributeError: 435 if node.entry_exists_on_disk('SCCS'): 436 sccs_dir = node.Dir('SCCS') 437 else: 438 sccs_dir = None 439 node.sccs_dir = sccs_dir 440 if sccs_dir: 441 return sccs_dir.entry_exists_on_disk('s.'+name) 442 return None 443 444def ignore_diskcheck_sccs(node, name): 445 return None 446 447diskcheck_match = DiskChecker('match', do_diskcheck_match, ignore_diskcheck_match) 448diskcheck_rcs = DiskChecker('rcs', do_diskcheck_rcs, ignore_diskcheck_rcs) 449diskcheck_sccs = DiskChecker('sccs', do_diskcheck_sccs, ignore_diskcheck_sccs) 450 451diskcheckers = [ 452 diskcheck_match, 453 diskcheck_rcs, 454 diskcheck_sccs, 455] 456 457def set_diskcheck(list): 458 for dc in diskcheckers: 459 dc.set(list) 460 461def diskcheck_types(): 462 return map(lambda dc: dc.type, diskcheckers) 463 464 465 466class EntryProxy(SCons.Util.Proxy): 467 def __get_abspath(self): 468 entry = self.get() 469 return SCons.Subst.SpecialAttrWrapper(entry.get_abspath(), 470 entry.name + "_abspath") 471 472 def __get_filebase(self): 473 name = self.get().name 474 return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[0], 475 name + "_filebase") 476 477 def __get_suffix(self): 478 name = self.get().name 479 return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(name)[1], 480 name + "_suffix") 481 482 def __get_file(self): 483 name = self.get().name 484 return SCons.Subst.SpecialAttrWrapper(name, name + "_file") 485 486 def __get_base_path(self): 487 """Return the file's directory and file name, with the 488 suffix stripped.""" 489 entry = self.get() 490 return SCons.Subst.SpecialAttrWrapper(SCons.Util.splitext(entry.get_path())[0], 491 entry.name + "_base") 492 493 def __get_posix_path(self): 494 """Return the path with / as the path separator, 495 regardless of platform.""" 496 if os.sep == '/': 497 return self 498 else: 499 entry = self.get() 500 r = string.replace(entry.get_path(), os.sep, '/') 501 return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_posix") 502 503 def __get_windows_path(self): 504 """Return the path with \ as the path separator, 505 regardless of platform.""" 506 if os.sep == '\\': 507 return self 508 else: 509 entry = self.get() 510 r = string.replace(entry.get_path(), os.sep, '\\') 511 return SCons.Subst.SpecialAttrWrapper(r, entry.name + "_windows") 512 513 def __get_srcnode(self): 514 return EntryProxy(self.get().srcnode()) 515 516 def __get_srcdir(self): 517 """Returns the directory containing the source node linked to this 518 node via VariantDir(), or the directory of this node if not linked.""" 519 return EntryProxy(self.get().srcnode().dir) 520 521 def __get_rsrcnode(self): 522 return EntryProxy(self.get().srcnode().rfile()) 523 524 def __get_rsrcdir(self): 525 """Returns the directory containing the source node linked to this 526 node via VariantDir(), or the directory of this node if not linked.""" 527 return EntryProxy(self.get().srcnode().rfile().dir) 528 529 def __get_dir(self): 530 return EntryProxy(self.get().dir) 531 532 dictSpecialAttrs = { "base" : __get_base_path, 533 "posix" : __get_posix_path, 534 "windows" : __get_windows_path, 535 "win32" : __get_windows_path, 536 "srcpath" : __get_srcnode, 537 "srcdir" : __get_srcdir, 538 "dir" : __get_dir, 539 "abspath" : __get_abspath, 540 "filebase" : __get_filebase, 541 "suffix" : __get_suffix, 542 "file" : __get_file, 543 "rsrcpath" : __get_rsrcnode, 544 "rsrcdir" : __get_rsrcdir, 545 } 546 547 def __getattr__(self, name): 548 # This is how we implement the "special" attributes 549 # such as base, posix, srcdir, etc. 550 try: 551 attr_function = self.dictSpecialAttrs[name] 552 except KeyError: 553 try: 554 attr = SCons.Util.Proxy.__getattr__(self, name) 555 except AttributeError as e: 556 # Raise our own AttributeError subclass with an 557 # overridden __str__() method that identifies the 558 # name of the entry that caused the exception. 559 raise EntryProxyAttributeError(self, name) 560 return attr 561 else: 562 return attr_function(self) 563 564class Base(SCons.Node.Node): 565 """A generic class for file system entries. This class is for 566 when we don't know yet whether the entry being looked up is a file 567 or a directory. Instances of this class can morph into either 568 Dir or File objects by a later, more precise lookup. 569 570 Note: this class does not define __cmp__ and __hash__ for 571 efficiency reasons. SCons does a lot of comparing of 572 Node.FS.{Base,Entry,File,Dir} objects, so those operations must be 573 as fast as possible, which means we want to use Python's built-in 574 object identity comparisons. 575 """ 576 577 memoizer_counters = [] 578 579 def __init__(self, name, directory, fs): 580 """Initialize a generic Node.FS.Base object. 581 582 Call the superclass initialization, take care of setting up 583 our relative and absolute paths, identify our parent 584 directory, and indicate that this node should use 585 signatures.""" 586 if __debug__: logInstanceCreation(self, 'Node.FS.Base') 587 SCons.Node.Node.__init__(self) 588 589 # Filenames and paths are probably reused and are intern'ed to 590 # save some memory. 591 self.name = SCons.Util.silent_intern(name) 592 self.suffix = SCons.Util.silent_intern(SCons.Util.splitext(name)[1]) 593 self.fs = fs 594 595 assert directory, "A directory must be provided" 596 597 self.abspath = SCons.Util.silent_intern(directory.entry_abspath(name)) 598 self.labspath = SCons.Util.silent_intern(directory.entry_labspath(name)) 599 if directory.path == '.': 600 self.path = SCons.Util.silent_intern(name) 601 else: 602 self.path = SCons.Util.silent_intern(directory.entry_path(name)) 603 if directory.tpath == '.': 604 self.tpath = SCons.Util.silent_intern(name) 605 else: 606 self.tpath = SCons.Util.silent_intern(directory.entry_tpath(name)) 607 self.path_elements = directory.path_elements + [self] 608 609 self.dir = directory 610 self.cwd = None # will hold the SConscript directory for target nodes 611 self.duplicate = directory.duplicate 612 613 def str_for_display(self): 614 return '"' + self.__str__() + '"' 615 616 def must_be_same(self, klass): 617 """ 618 This node, which already existed, is being looked up as the 619 specified klass. Raise an exception if it isn't. 620 """ 621 if isinstance(self, klass) or klass is Entry: 622 return 623 raise TypeError("Tried to lookup %s '%s' as a %s." %\ 624 (self.__class__.__name__, self.path, klass.__name__)) 625 626 def get_dir(self): 627 return self.dir 628 629 def get_suffix(self): 630 return self.suffix 631 632 def rfile(self): 633 return self 634 635 def __str__(self): 636 """A Node.FS.Base object's string representation is its path 637 name.""" 638 global Save_Strings 639 if Save_Strings: 640 return self._save_str() 641 return self._get_str() 642 643 memoizer_counters.append(SCons.Memoize.CountValue('_save_str')) 644 645 def _save_str(self): 646 try: 647 return self._memo['_save_str'] 648 except KeyError: 649 pass 650 result = sys.intern(self._get_str()) 651 self._memo['_save_str'] = result 652 return result 653 654 def _get_str(self): 655 global Save_Strings 656 if self.duplicate or self.is_derived(): 657 return self.get_path() 658 srcnode = self.srcnode() 659 if srcnode.stat() is None and self.stat() is not None: 660 result = self.get_path() 661 else: 662 result = srcnode.get_path() 663 if not Save_Strings: 664 # We're not at the point where we're saving the string string 665 # representations of FS Nodes (because we haven't finished 666 # reading the SConscript files and need to have str() return 667 # things relative to them). That also means we can't yet 668 # cache values returned (or not returned) by stat(), since 669 # Python code in the SConscript files might still create 670 # or otherwise affect the on-disk file. So get rid of the 671 # values that the underlying stat() method saved. 672 try: del self._memo['stat'] 673 except KeyError: pass 674 if self is not srcnode: 675 try: del srcnode._memo['stat'] 676 except KeyError: pass 677 return result 678 679 rstr = __str__ 680 681 memoizer_counters.append(SCons.Memoize.CountValue('stat')) 682 683 def stat(self): 684 try: return self._memo['stat'] 685 except KeyError: pass 686 try: result = self.fs.stat(self.abspath) 687 except os.error: result = None 688 self._memo['stat'] = result 689 return result 690 691 def exists(self): 692 return self.stat() is not None 693 694 def rexists(self): 695 return self.rfile().exists() 696 697 def getmtime(self): 698 st = self.stat() 699 if st: return st[stat.ST_MTIME] 700 else: return None 701 702 def getsize(self): 703 st = self.stat() 704 if st: return st[stat.ST_SIZE] 705 else: return None 706 707 def isdir(self): 708 st = self.stat() 709 return st is not None and stat.S_ISDIR(st[stat.ST_MODE]) 710 711 def isfile(self): 712 st = self.stat() 713 return st is not None and stat.S_ISREG(st[stat.ST_MODE]) 714 715 if hasattr(os, 'symlink'): 716 def islink(self): 717 try: st = self.fs.lstat(self.abspath) 718 except os.error: return 0 719 return stat.S_ISLNK(st[stat.ST_MODE]) 720 else: 721 def islink(self): 722 return 0 # no symlinks 723 724 def is_under(self, dir): 725 if self is dir: 726 return 1 727 else: 728 return self.dir.is_under(dir) 729 730 def set_local(self): 731 self._local = 1 732 733 def srcnode(self): 734 """If this node is in a build path, return the node 735 corresponding to its source file. Otherwise, return 736 ourself. 737 """ 738 srcdir_list = self.dir.srcdir_list() 739 if srcdir_list: 740 srcnode = srcdir_list[0].Entry(self.name) 741 srcnode.must_be_same(self.__class__) 742 return srcnode 743 return self 744 745 def get_path(self, dir=None): 746 """Return path relative to the current working directory of the 747 Node.FS.Base object that owns us.""" 748 if not dir: 749 dir = self.fs.getcwd() 750 if self == dir: 751 return '.' 752 path_elems = self.path_elements 753 try: i = path_elems.index(dir) 754 except ValueError: pass 755 else: path_elems = path_elems[i+1:] 756 path_elems = map(lambda n: n.name, path_elems) 757 return string.join(path_elems, os.sep) 758 759 def set_src_builder(self, builder): 760 """Set the source code builder for this node.""" 761 self.sbuilder = builder 762 if not self.has_builder(): 763 self.builder_set(builder) 764 765 def src_builder(self): 766 """Fetch the source code builder for this node. 767 768 If there isn't one, we cache the source code builder specified 769 for the directory (which in turn will cache the value from its 770 parent directory, and so on up to the file system root). 771 """ 772 try: 773 scb = self.sbuilder 774 except AttributeError: 775 scb = self.dir.src_builder() 776 self.sbuilder = scb 777 return scb 778 779 def get_abspath(self): 780 """Get the absolute path of the file.""" 781 return self.abspath 782 783 def for_signature(self): 784 # Return just our name. Even an absolute path would not work, 785 # because that can change thanks to symlinks or remapped network 786 # paths. 787 return self.name 788 789 def get_subst_proxy(self): 790 try: 791 return self._proxy 792 except AttributeError: 793 ret = EntryProxy(self) 794 self._proxy = ret 795 return ret 796 797 def target_from_source(self, prefix, suffix, splitext=SCons.Util.splitext): 798 """ 799 800 Generates a target entry that corresponds to this entry (usually 801 a source file) with the specified prefix and suffix. 802 803 Note that this method can be overridden dynamically for generated 804 files that need different behavior. See Tool/swig.py for 805 an example. 806 """ 807 return self.dir.Entry(prefix + splitext(self.name)[0] + suffix) 808 809 def _Rfindalldirs_key(self, pathlist): 810 return pathlist 811 812 memoizer_counters.append(SCons.Memoize.CountDict('Rfindalldirs', _Rfindalldirs_key)) 813 814 def Rfindalldirs(self, pathlist): 815 """ 816 Return all of the directories for a given path list, including 817 corresponding "backing" directories in any repositories. 818 819 The Node lookups are relative to this Node (typically a 820 directory), so memoizing result saves cycles from looking 821 up the same path for each target in a given directory. 822 """ 823 try: 824 memo_dict = self._memo['Rfindalldirs'] 825 except KeyError: 826 memo_dict = {} 827 self._memo['Rfindalldirs'] = memo_dict 828 else: 829 try: 830 return memo_dict[pathlist] 831 except KeyError: 832 pass 833 834 create_dir_relative_to_self = self.Dir 835 result = [] 836 for path in pathlist: 837 if isinstance(path, SCons.Node.Node): 838 result.append(path) 839 else: 840 dir = create_dir_relative_to_self(path) 841 result.extend(dir.get_all_rdirs()) 842 843 memo_dict[pathlist] = result 844 845 return result 846 847 def RDirs(self, pathlist): 848 """Search for a list of directories in the Repository list.""" 849 cwd = self.cwd or self.fs._cwd 850 return cwd.Rfindalldirs(pathlist) 851 852 memoizer_counters.append(SCons.Memoize.CountValue('rentry')) 853 854 def rentry(self): 855 try: 856 return self._memo['rentry'] 857 except KeyError: 858 pass 859 result = self 860 if not self.exists(): 861 norm_name = _my_normcase(self.name) 862 for dir in self.dir.get_all_rdirs(): 863 try: 864 node = dir.entries[norm_name] 865 except KeyError: 866 if dir.entry_exists_on_disk(self.name): 867 result = dir.Entry(self.name) 868 break 869 self._memo['rentry'] = result 870 return result 871 872 def _glob1(self, pattern, ondisk=True, source=False, strings=False): 873 return [] 874 875class Entry(Base): 876 """This is the class for generic Node.FS entries--that is, things 877 that could be a File or a Dir, but we're just not sure yet. 878 Consequently, the methods in this class really exist just to 879 transform their associated object into the right class when the 880 time comes, and then call the same-named method in the transformed 881 class.""" 882 883 def diskcheck_match(self): 884 pass 885 886 def disambiguate(self, must_exist=None): 887 """ 888 """ 889 if self.isdir(): 890 self.__class__ = Dir 891 self._morph() 892 elif self.isfile(): 893 self.__class__ = File 894 self._morph() 895 self.clear() 896 else: 897 # There was nothing on-disk at this location, so look in 898 # the src directory. 899 # 900 # We can't just use self.srcnode() straight away because 901 # that would create an actual Node for this file in the src 902 # directory, and there might not be one. Instead, use the 903 # dir_on_disk() method to see if there's something on-disk 904 # with that name, in which case we can go ahead and call 905 # self.srcnode() to create the right type of entry. 906 srcdir = self.dir.srcnode() 907 if srcdir != self.dir and \ 908 srcdir.entry_exists_on_disk(self.name) and \ 909 self.srcnode().isdir(): 910 self.__class__ = Dir 911 self._morph() 912 elif must_exist: 913 msg = "No such file or directory: '%s'" % self.abspath 914 raise SCons.Errors.UserError(msg) 915 else: 916 self.__class__ = File 917 self._morph() 918 self.clear() 919 return self 920 921 def rfile(self): 922 """We're a generic Entry, but the caller is actually looking for 923 a File at this point, so morph into one.""" 924 self.__class__ = File 925 self._morph() 926 self.clear() 927 return File.rfile(self) 928 929 def scanner_key(self): 930 return self.get_suffix() 931 932 def get_contents(self): 933 """Fetch the contents of the entry. Returns the exact binary 934 contents of the file.""" 935 try: 936 self = self.disambiguate(must_exist=1) 937 except SCons.Errors.UserError: 938 # There was nothing on disk with which to disambiguate 939 # this entry. Leave it as an Entry, but return a null 940 # string so calls to get_contents() in emitters and the 941 # like (e.g. in qt.py) don't have to disambiguate by hand 942 # or catch the exception. 943 return '' 944 else: 945 return self.get_contents() 946 947 def get_text_contents(self): 948 """Fetch the decoded text contents of a Unicode encoded Entry. 949 950 Since this should return the text contents from the file 951 system, we check to see into what sort of subclass we should 952 morph this Entry.""" 953 try: 954 self = self.disambiguate(must_exist=1) 955 except SCons.Errors.UserError: 956 # There was nothing on disk with which to disambiguate 957 # this entry. Leave it as an Entry, but return a null 958 # string so calls to get_text_contents() in emitters and 959 # the like (e.g. in qt.py) don't have to disambiguate by 960 # hand or catch the exception. 961 return '' 962 else: 963 return self.get_text_contents() 964 965 def must_be_same(self, klass): 966 """Called to make sure a Node is a Dir. Since we're an 967 Entry, we can morph into one.""" 968 if self.__class__ is not klass: 969 self.__class__ = klass 970 self._morph() 971 self.clear() 972 973 # The following methods can get called before the Taskmaster has 974 # had a chance to call disambiguate() directly to see if this Entry 975 # should really be a Dir or a File. We therefore use these to call 976 # disambiguate() transparently (from our caller's point of view). 977 # 978 # Right now, this minimal set of methods has been derived by just 979 # looking at some of the methods that will obviously be called early 980 # in any of the various Taskmasters' calling sequences, and then 981 # empirically figuring out which additional methods are necessary 982 # to make various tests pass. 983 984 def exists(self): 985 """Return if the Entry exists. Check the file system to see 986 what we should turn into first. Assume a file if there's no 987 directory.""" 988 return self.disambiguate().exists() 989 990 def rel_path(self, other): 991 d = self.disambiguate() 992 if d.__class__ is Entry: 993 raise Exception("rel_path() could not disambiguate File/Dir") 994 return d.rel_path(other) 995 996 def new_ninfo(self): 997 return self.disambiguate().new_ninfo() 998 999 def changed_since_last_build(self, target, prev_ni): 1000 return self.disambiguate().changed_since_last_build(target, prev_ni) 1001 1002 def _glob1(self, pattern, ondisk=True, source=False, strings=False): 1003 return self.disambiguate()._glob1(pattern, ondisk, source, strings) 1004 1005 def get_subst_proxy(self): 1006 return self.disambiguate().get_subst_proxy() 1007 1008# This is for later so we can differentiate between Entry the class and Entry 1009# the method of the FS class. 1010_classEntry = Entry 1011 1012 1013class LocalFS: 1014 1015 if SCons.Memoize.use_memoizer: 1016 __metaclass__ = SCons.Memoize.Memoized_Metaclass 1017 1018 # This class implements an abstraction layer for operations involving 1019 # a local file system. Essentially, this wraps any function in 1020 # the os, os.path or shutil modules that we use to actually go do 1021 # anything with or to the local file system. 1022 # 1023 # Note that there's a very good chance we'll refactor this part of 1024 # the architecture in some way as we really implement the interface(s) 1025 # for remote file system Nodes. For example, the right architecture 1026 # might be to have this be a subclass instead of a base class. 1027 # Nevertheless, we're using this as a first step in that direction. 1028 # 1029 # We're not using chdir() yet because the calling subclass method 1030 # needs to use os.chdir() directly to avoid recursion. Will we 1031 # really need this one? 1032 #def chdir(self, path): 1033 # return os.chdir(path) 1034 def chmod(self, path, mode): 1035 return os.chmod(path, mode) 1036 def copy(self, src, dst): 1037 return shutil.copy(src, dst) 1038 def copy2(self, src, dst): 1039 return shutil.copy2(src, dst) 1040 def exists(self, path): 1041 return os.path.exists(path) 1042 def getmtime(self, path): 1043 return os.path.getmtime(path) 1044 def getsize(self, path): 1045 return os.path.getsize(path) 1046 def isdir(self, path): 1047 return os.path.isdir(path) 1048 def isfile(self, path): 1049 return os.path.isfile(path) 1050 def link(self, src, dst): 1051 return os.link(src, dst) 1052 def lstat(self, path): 1053 return os.lstat(path) 1054 def listdir(self, path): 1055 return os.listdir(path) 1056 def makedirs(self, path): 1057 return os.makedirs(path) 1058 def mkdir(self, path): 1059 return os.mkdir(path) 1060 def rename(self, old, new): 1061 return os.rename(old, new) 1062 def stat(self, path): 1063 return os.stat(path) 1064 def symlink(self, src, dst): 1065 return os.symlink(src, dst) 1066 def open(self, path): 1067 return open(path) 1068 def unlink(self, path): 1069 return os.unlink(path) 1070 1071 if hasattr(os, 'symlink'): 1072 def islink(self, path): 1073 return os.path.islink(path) 1074 else: 1075 def islink(self, path): 1076 return 0 # no symlinks 1077 1078 if hasattr(os, 'readlink'): 1079 def readlink(self, file): 1080 return os.readlink(file) 1081 else: 1082 def readlink(self, file): 1083 return '' 1084 1085 1086#class RemoteFS: 1087# # Skeleton for the obvious methods we might need from the 1088# # abstraction layer for a remote filesystem. 1089# def upload(self, local_src, remote_dst): 1090# pass 1091# def download(self, remote_src, local_dst): 1092# pass 1093 1094 1095class FS(LocalFS): 1096 1097 memoizer_counters = [] 1098 1099 def __init__(self, path = None): 1100 """Initialize the Node.FS subsystem. 1101 1102 The supplied path is the top of the source tree, where we 1103 expect to find the top-level build file. If no path is 1104 supplied, the current directory is the default. 1105 1106 The path argument must be a valid absolute path. 1107 """ 1108 if __debug__: logInstanceCreation(self, 'Node.FS') 1109 1110 self._memo = {} 1111 1112 self.Root = {} 1113 self.SConstruct_dir = None 1114 self.max_drift = default_max_drift 1115 1116 self.Top = None 1117 if path is None: 1118 self.pathTop = os.getcwd() 1119 else: 1120 self.pathTop = path 1121 self.defaultDrive = _my_normcase(os.path.splitdrive(self.pathTop)[0]) 1122 1123 self.Top = self.Dir(self.pathTop) 1124 self.Top.path = '.' 1125 self.Top.tpath = '.' 1126 self._cwd = self.Top 1127 1128 DirNodeInfo.fs = self 1129 FileNodeInfo.fs = self 1130 1131 def set_SConstruct_dir(self, dir): 1132 self.SConstruct_dir = dir 1133 1134 def get_max_drift(self): 1135 return self.max_drift 1136 1137 def set_max_drift(self, max_drift): 1138 self.max_drift = max_drift 1139 1140 def getcwd(self): 1141 return self._cwd 1142 1143 def chdir(self, dir, change_os_dir=0): 1144 """Change the current working directory for lookups. 1145 If change_os_dir is true, we will also change the "real" cwd 1146 to match. 1147 """ 1148 curr=self._cwd 1149 try: 1150 if dir is not None: 1151 self._cwd = dir 1152 if change_os_dir: 1153 os.chdir(dir.abspath) 1154 except OSError: 1155 self._cwd = curr 1156 raise 1157 1158 def get_root(self, drive): 1159 """ 1160 Returns the root directory for the specified drive, creating 1161 it if necessary. 1162 """ 1163 drive = _my_normcase(drive) 1164 try: 1165 return self.Root[drive] 1166 except KeyError: 1167 root = RootDir(drive, self) 1168 self.Root[drive] = root 1169 if not drive: 1170 self.Root[self.defaultDrive] = root 1171 elif drive == self.defaultDrive: 1172 self.Root[''] = root 1173 return root 1174 1175 def _lookup(self, p, directory, fsclass, create=1): 1176 """ 1177 The generic entry point for Node lookup with user-supplied data. 1178 1179 This translates arbitrary input into a canonical Node.FS object 1180 of the specified fsclass. The general approach for strings is 1181 to turn it into a fully normalized absolute path and then call 1182 the root directory's lookup_abs() method for the heavy lifting. 1183 1184 If the path name begins with '#', it is unconditionally 1185 interpreted relative to the top-level directory of this FS. '#' 1186 is treated as a synonym for the top-level SConstruct directory, 1187 much like '~' is treated as a synonym for the user's home 1188 directory in a UNIX shell. So both '#foo' and '#/foo' refer 1189 to the 'foo' subdirectory underneath the top-level SConstruct 1190 directory. 1191 1192 If the path name is relative, then the path is looked up relative 1193 to the specified directory, or the current directory (self._cwd, 1194 typically the SConscript directory) if the specified directory 1195 is None. 1196 """ 1197 if isinstance(p, Base): 1198 # It's already a Node.FS object. Make sure it's the right 1199 # class and return. 1200 p.must_be_same(fsclass) 1201 return p 1202 # str(p) in case it's something like a proxy object 1203 p = str(p) 1204 1205 initial_hash = (p[0:1] == '#') 1206 if initial_hash: 1207 # There was an initial '#', so we strip it and override 1208 # whatever directory they may have specified with the 1209 # top-level SConstruct directory. 1210 p = p[1:] 1211 directory = self.Top 1212 1213 if directory and not isinstance(directory, Dir): 1214 directory = self.Dir(directory) 1215 1216 if do_splitdrive: 1217 drive, p = os.path.splitdrive(p) 1218 else: 1219 drive = '' 1220 if drive and not p: 1221 # This causes a naked drive letter to be treated as a synonym 1222 # for the root directory on that drive. 1223 p = os.sep 1224 absolute = os.path.isabs(p) 1225 1226 needs_normpath = needs_normpath_check.match(p) 1227 1228 if initial_hash or not absolute: 1229 # This is a relative lookup, either to the top-level 1230 # SConstruct directory (because of the initial '#') or to 1231 # the current directory (the path name is not absolute). 1232 # Add the string to the appropriate directory lookup path, 1233 # after which the whole thing gets normalized. 1234 if not directory: 1235 directory = self._cwd 1236 if p: 1237 p = directory.labspath + '/' + p 1238 else: 1239 p = directory.labspath 1240 1241 if needs_normpath: 1242 p = os.path.normpath(p) 1243 1244 if drive or absolute: 1245 root = self.get_root(drive) 1246 else: 1247 if not directory: 1248 directory = self._cwd 1249 root = directory.root 1250 1251 if os.sep != '/': 1252 p = string.replace(p, os.sep, '/') 1253 return root._lookup_abs(p, fsclass, create) 1254 1255 def Entry(self, name, directory = None, create = 1): 1256 """Look up or create a generic Entry node with the specified name. 1257 If the name is a relative path (begins with ./, ../, or a file 1258 name), then it is looked up relative to the supplied directory 1259 node, or to the top level directory of the FS (supplied at 1260 construction time) if no directory is supplied. 1261 """ 1262 return self._lookup(name, directory, Entry, create) 1263 1264 def File(self, name, directory = None, create = 1): 1265 """Look up or create a File node with the specified name. If 1266 the name is a relative path (begins with ./, ../, or a file name), 1267 then it is looked up relative to the supplied directory node, 1268 or to the top level directory of the FS (supplied at construction 1269 time) if no directory is supplied. 1270 1271 This method will raise TypeError if a directory is found at the 1272 specified path. 1273 """ 1274 return self._lookup(name, directory, File, create) 1275 1276 def Dir(self, name, directory = None, create = True): 1277 """Look up or create a Dir node with the specified name. If 1278 the name is a relative path (begins with ./, ../, or a file name), 1279 then it is looked up relative to the supplied directory node, 1280 or to the top level directory of the FS (supplied at construction 1281 time) if no directory is supplied. 1282 1283 This method will raise TypeError if a normal file is found at the 1284 specified path. 1285 """ 1286 return self._lookup(name, directory, Dir, create) 1287 1288 def VariantDir(self, variant_dir, src_dir, duplicate=1): 1289 """Link the supplied variant directory to the source directory 1290 for purposes of building files.""" 1291 1292 if not isinstance(src_dir, SCons.Node.Node): 1293 src_dir = self.Dir(src_dir) 1294 if not isinstance(variant_dir, SCons.Node.Node): 1295 variant_dir = self.Dir(variant_dir) 1296 if src_dir.is_under(variant_dir): 1297 raise SCons.Errors.UserError("Source directory cannot be under variant directory.") 1298 if variant_dir.srcdir: 1299 if variant_dir.srcdir == src_dir: 1300 return # We already did this. 1301 raise SCons.Errors.UserError("'%s' already has a source directory: '%s'."%(variant_dir, variant_dir.srcdir)) 1302 variant_dir.link(src_dir, duplicate) 1303 1304 def Repository(self, *dirs): 1305 """Specify Repository directories to search.""" 1306 for d in dirs: 1307 if not isinstance(d, SCons.Node.Node): 1308 d = self.Dir(d) 1309 self.Top.addRepository(d) 1310 1311 def variant_dir_target_climb(self, orig, dir, tail): 1312 """Create targets in corresponding variant directories 1313 1314 Climb the directory tree, and look up path names 1315 relative to any linked variant directories we find. 1316 1317 Even though this loops and walks up the tree, we don't memoize 1318 the return value because this is really only used to process 1319 the command-line targets. 1320 """ 1321 targets = [] 1322 message = None 1323 fmt = "building associated VariantDir targets: %s" 1324 start_dir = dir 1325 while dir: 1326 for bd in dir.variant_dirs: 1327 if start_dir.is_under(bd): 1328 # If already in the build-dir location, don't reflect 1329 return [orig], fmt % str(orig) 1330 p = os.path.join(*[bd.path] + tail) 1331 targets.append(self.Entry(p)) 1332 tail = [dir.name] + tail 1333 dir = dir.up() 1334 if targets: 1335 message = fmt % string.join(map(str, targets)) 1336 return targets, message 1337 1338 def Glob(self, pathname, ondisk=True, source=True, strings=False, cwd=None): 1339 """ 1340 Globs 1341 1342 This is mainly a shim layer 1343 """ 1344 if cwd is None: 1345 cwd = self.getcwd() 1346 return cwd.glob(pathname, ondisk, source, strings) 1347 1348class DirNodeInfo(SCons.Node.NodeInfoBase): 1349 # This should get reset by the FS initialization. 1350 current_version_id = 1 1351 1352 fs = None 1353 1354 def str_to_node(self, s): 1355 top = self.fs.Top 1356 root = top.root 1357 if do_splitdrive: 1358 drive, s = os.path.splitdrive(s) 1359 if drive: 1360 root = self.fs.get_root(drive) 1361 if not os.path.isabs(s): 1362 s = top.labspath + '/' + s 1363 return root._lookup_abs(s, Entry) 1364 1365class DirBuildInfo(SCons.Node.BuildInfoBase): 1366 current_version_id = 1 1367 1368glob_magic_check = re.compile('[*?[]') 1369 1370def has_glob_magic(s): 1371 return glob_magic_check.search(s) is not None 1372 1373class Dir(Base): 1374 """A class for directories in a file system. 1375 """ 1376 1377 memoizer_counters = [] 1378 1379 NodeInfo = DirNodeInfo 1380 BuildInfo = DirBuildInfo 1381 1382 def __init__(self, name, directory, fs): 1383 if __debug__: logInstanceCreation(self, 'Node.FS.Dir') 1384 Base.__init__(self, name, directory, fs) 1385 self._morph() 1386 1387 def _morph(self): 1388 """Turn a file system Node (either a freshly initialized directory 1389 object or a separate Entry object) into a proper directory object. 1390 1391 Set up this directory's entries and hook it into the file 1392 system tree. Specify that directories (this Node) don't use 1393 signatures for calculating whether they're current. 1394 """ 1395 1396 self.repositories = [] 1397 self.srcdir = None 1398 1399 self.entries = {} 1400 self.entries['.'] = self 1401 self.entries['..'] = self.dir 1402 self.cwd = self 1403 self.searched = 0 1404 self._sconsign = None 1405 self.variant_dirs = [] 1406 self.root = self.dir.root 1407 1408 # Don't just reset the executor, replace its action list, 1409 # because it might have some pre-or post-actions that need to 1410 # be preserved. 1411 self.builder = get_MkdirBuilder() 1412 self.get_executor().set_action_list(self.builder.action) 1413 1414 def diskcheck_match(self): 1415 diskcheck_match(self, self.isfile, 1416 "File %s found where directory expected.") 1417 1418 def __clearRepositoryCache(self, duplicate=None): 1419 """Called when we change the repository(ies) for a directory. 1420 This clears any cached information that is invalidated by changing 1421 the repository.""" 1422 1423 for node in self.entries.values(): 1424 if node != self.dir: 1425 if node != self and isinstance(node, Dir): 1426 node.__clearRepositoryCache(duplicate) 1427 else: 1428 node.clear() 1429 try: 1430 del node._srcreps 1431 except AttributeError: 1432 pass 1433 if duplicate is not None: 1434 node.duplicate=duplicate 1435 1436 def __resetDuplicate(self, node): 1437 if node != self: 1438 node.duplicate = node.get_dir().duplicate 1439 1440 def Entry(self, name): 1441 """ 1442 Looks up or creates an entry node named 'name' relative to 1443 this directory. 1444 """ 1445 return self.fs.Entry(name, self) 1446 1447 def Dir(self, name, create=True): 1448 """ 1449 Looks up or creates a directory node named 'name' relative to 1450 this directory. 1451 """ 1452 return self.fs.Dir(name, self, create) 1453 1454 def File(self, name): 1455 """ 1456 Looks up or creates a file node named 'name' relative to 1457 this directory. 1458 """ 1459 return self.fs.File(name, self) 1460 1461 def _lookup_rel(self, name, klass, create=1): 1462 """ 1463 Looks up a *normalized* relative path name, relative to this 1464 directory. 1465 1466 This method is intended for use by internal lookups with 1467 already-normalized path data. For general-purpose lookups, 1468 use the Entry(), Dir() and File() methods above. 1469 1470 This method does *no* input checking and will die or give 1471 incorrect results if it's passed a non-normalized path name (e.g., 1472 a path containing '..'), an absolute path name, a top-relative 1473 ('#foo') path name, or any kind of object. 1474 """ 1475 name = self.entry_labspath(name) 1476 return self.root._lookup_abs(name, klass, create) 1477 1478 def link(self, srcdir, duplicate): 1479 """Set this directory as the variant directory for the 1480 supplied source directory.""" 1481 self.srcdir = srcdir 1482 self.duplicate = duplicate 1483 self.__clearRepositoryCache(duplicate) 1484 srcdir.variant_dirs.append(self) 1485 1486 def getRepositories(self): 1487 """Returns a list of repositories for this directory. 1488 """ 1489 if self.srcdir and not self.duplicate: 1490 return self.srcdir.get_all_rdirs() + self.repositories 1491 return self.repositories 1492 1493 memoizer_counters.append(SCons.Memoize.CountValue('get_all_rdirs')) 1494 1495 def get_all_rdirs(self): 1496 try: 1497 return list(self._memo['get_all_rdirs']) 1498 except KeyError: 1499 pass 1500 1501 result = [self] 1502 fname = '.' 1503 dir = self 1504 while dir: 1505 for rep in dir.getRepositories(): 1506 result.append(rep.Dir(fname)) 1507 if fname == '.': 1508 fname = dir.name 1509 else: 1510 fname = dir.name + os.sep + fname 1511 dir = dir.up() 1512 1513 self._memo['get_all_rdirs'] = list(result) 1514 1515 return result 1516 1517 def addRepository(self, dir): 1518 if dir != self and not dir in self.repositories: 1519 self.repositories.append(dir) 1520 dir.tpath = '.' 1521 self.__clearRepositoryCache() 1522 1523 def up(self): 1524 return self.entries['..'] 1525 1526 def _rel_path_key(self, other): 1527 return str(other) 1528 1529 memoizer_counters.append(SCons.Memoize.CountDict('rel_path', _rel_path_key)) 1530 1531 def rel_path(self, other): 1532 """Return a path to "other" relative to this directory. 1533 """ 1534 1535 # This complicated and expensive method, which constructs relative 1536 # paths between arbitrary Node.FS objects, is no longer used 1537 # by SCons itself. It was introduced to store dependency paths 1538 # in .sconsign files relative to the target, but that ended up 1539 # being significantly inefficient. 1540 # 1541 # We're continuing to support the method because some SConstruct 1542 # files out there started using it when it was available, and 1543 # we're all about backwards compatibility.. 1544 1545 try: 1546 memo_dict = self._memo['rel_path'] 1547 except KeyError: 1548 memo_dict = {} 1549 self._memo['rel_path'] = memo_dict 1550 else: 1551 try: 1552 return memo_dict[other] 1553 except KeyError: 1554 pass 1555 1556 if self is other: 1557 result = '.' 1558 1559 elif not other in self.path_elements: 1560 try: 1561 other_dir = other.get_dir() 1562 except AttributeError: 1563 result = str(other) 1564 else: 1565 if other_dir is None: 1566 result = other.name 1567 else: 1568 dir_rel_path = self.rel_path(other_dir) 1569 if dir_rel_path == '.': 1570 result = other.name 1571 else: 1572 result = dir_rel_path + os.sep + other.name 1573 else: 1574 i = self.path_elements.index(other) + 1 1575 1576 path_elems = ['..'] * (len(self.path_elements) - i) \ 1577 + map(lambda n: n.name, other.path_elements[i:]) 1578 1579 result = string.join(path_elems, os.sep) 1580 1581 memo_dict[other] = result 1582 1583 return result 1584 1585 def get_env_scanner(self, env, kw={}): 1586 import SCons.Defaults 1587 return SCons.Defaults.DirEntryScanner 1588 1589 def get_target_scanner(self): 1590 import SCons.Defaults 1591 return SCons.Defaults.DirEntryScanner 1592 1593 def get_found_includes(self, env, scanner, path): 1594 """Return this directory's implicit dependencies. 1595 1596 We don't bother caching the results because the scan typically 1597 shouldn't be requested more than once (as opposed to scanning 1598 .h file contents, which can be requested as many times as the 1599 files is #included by other files). 1600 """ 1601 if not scanner: 1602 return [] 1603 # Clear cached info for this Dir. If we already visited this 1604 # directory on our walk down the tree (because we didn't know at 1605 # that point it was being used as the source for another Node) 1606 # then we may have calculated build signature before realizing 1607 # we had to scan the disk. Now that we have to, though, we need 1608 # to invalidate the old calculated signature so that any node 1609 # dependent on our directory structure gets one that includes 1610 # info about everything on disk. 1611 self.clear() 1612 return scanner(self, env, path) 1613 1614 # 1615 # Taskmaster interface subsystem 1616 # 1617 1618 def prepare(self): 1619 pass 1620 1621 def build(self, **kw): 1622 """A null "builder" for directories.""" 1623 global MkdirBuilder 1624 if self.builder is not MkdirBuilder: 1625 SCons.Node.Node.build(*[self,], **kw) 1626 1627 # 1628 # 1629 # 1630 1631 def _create(self): 1632 """Create this directory, silently and without worrying about 1633 whether the builder is the default or not.""" 1634 listDirs = [] 1635 parent = self 1636 while parent: 1637 if parent.exists(): 1638 break 1639 listDirs.append(parent) 1640 p = parent.up() 1641 if p is None: 1642 # Don't use while: - else: for this condition because 1643 # if so, then parent is None and has no .path attribute. 1644 raise SCons.Errors.StopError(parent.path) 1645 parent = p 1646 listDirs.reverse() 1647 for dirnode in listDirs: 1648 try: 1649 # Don't call dirnode.build(), call the base Node method 1650 # directly because we definitely *must* create this 1651 # directory. The dirnode.build() method will suppress 1652 # the build if it's the default builder. 1653 SCons.Node.Node.build(dirnode) 1654 dirnode.get_executor().nullify() 1655 # The build() action may or may not have actually 1656 # created the directory, depending on whether the -n 1657 # option was used or not. Delete the _exists and 1658 # _rexists attributes so they can be reevaluated. 1659 dirnode.clear() 1660 except OSError: 1661 pass 1662 1663 def multiple_side_effect_has_builder(self): 1664 global MkdirBuilder 1665 return self.builder is not MkdirBuilder and self.has_builder() 1666 1667 def alter_targets(self): 1668 """Return any corresponding targets in a variant directory. 1669 """ 1670 return self.fs.variant_dir_target_climb(self, self, []) 1671 1672 def scanner_key(self): 1673 """A directory does not get scanned.""" 1674 return None 1675 1676 def get_text_contents(self): 1677 """We already emit things in text, so just return the binary 1678 version.""" 1679 return self.get_contents() 1680 1681 def get_contents(self): 1682 """Return content signatures and names of all our children 1683 separated by new-lines. Ensure that the nodes are sorted.""" 1684 contents = [] 1685 name_cmp = lambda a, b: cmp(a.name, b.name) 1686 sorted_children = self.children()[:] 1687 sorted_children.sort(name_cmp) 1688 for node in sorted_children: 1689 contents.append('%s %s\n' % (node.get_csig(), node.name)) 1690 return string.join(contents, '') 1691 1692 def get_csig(self): 1693 """Compute the content signature for Directory nodes. In 1694 general, this is not needed and the content signature is not 1695 stored in the DirNodeInfo. However, if get_contents on a Dir 1696 node is called which has a child directory, the child 1697 directory should return the hash of its contents.""" 1698 contents = self.get_contents() 1699 return SCons.Util.MD5signature(contents) 1700 1701 def do_duplicate(self, src): 1702 pass 1703 1704 changed_since_last_build = SCons.Node.Node.state_has_changed 1705 1706 def is_up_to_date(self): 1707 """If any child is not up-to-date, then this directory isn't, 1708 either.""" 1709 if self.builder is not MkdirBuilder and not self.exists(): 1710 return 0 1711 up_to_date = SCons.Node.up_to_date 1712 for kid in self.children(): 1713 if kid.get_state() > up_to_date: 1714 return 0 1715 return 1 1716 1717 def rdir(self): 1718 if not self.exists(): 1719 norm_name = _my_normcase(self.name) 1720 for dir in self.dir.get_all_rdirs(): 1721 try: node = dir.entries[norm_name] 1722 except KeyError: node = dir.dir_on_disk(self.name) 1723 if node and node.exists() and \ 1724 (isinstance(dir, Dir) or isinstance(dir, Entry)): 1725 return node 1726 return self 1727 1728 def sconsign(self): 1729 """Return the .sconsign file info for this directory, 1730 creating it first if necessary.""" 1731 if not self._sconsign: 1732 import SCons.SConsign 1733 self._sconsign = SCons.SConsign.ForDirectory(self) 1734 return self._sconsign 1735 1736 def srcnode(self): 1737 """Dir has a special need for srcnode()...if we 1738 have a srcdir attribute set, then that *is* our srcnode.""" 1739 if self.srcdir: 1740 return self.srcdir 1741 return Base.srcnode(self) 1742 1743 def get_timestamp(self): 1744 """Return the latest timestamp from among our children""" 1745 stamp = 0 1746 for kid in self.children(): 1747 if kid.get_timestamp() > stamp: 1748 stamp = kid.get_timestamp() 1749 return stamp 1750 1751 def entry_abspath(self, name): 1752 return self.abspath + os.sep + name 1753 1754 def entry_labspath(self, name): 1755 return self.labspath + '/' + name 1756 1757 def entry_path(self, name): 1758 return self.path + os.sep + name 1759 1760 def entry_tpath(self, name): 1761 return self.tpath + os.sep + name 1762 1763 def entry_exists_on_disk(self, name): 1764 try: 1765 d = self.on_disk_entries 1766 except AttributeError: 1767 d = {} 1768 try: 1769 entries = os.listdir(self.abspath) 1770 except OSError: 1771 pass 1772 else: 1773 for entry in map(_my_normcase, entries): 1774 d[entry] = True 1775 self.on_disk_entries = d 1776 if sys.platform == 'win32': 1777 name = _my_normcase(name) 1778 result = d.get(name) 1779 if result is None: 1780 # Belt-and-suspenders for Windows: check directly for 1781 # 8.3 file names that don't show up in os.listdir(). 1782 result = os.path.exists(self.abspath + os.sep + name) 1783 d[name] = result 1784 return result 1785 else: 1786 return name in d 1787 1788 memoizer_counters.append(SCons.Memoize.CountValue('srcdir_list')) 1789 1790 def srcdir_list(self): 1791 try: 1792 return self._memo['srcdir_list'] 1793 except KeyError: 1794 pass 1795 1796 result = [] 1797 1798 dirname = '.' 1799 dir = self 1800 while dir: 1801 if dir.srcdir: 1802 result.append(dir.srcdir.Dir(dirname)) 1803 dirname = dir.name + os.sep + dirname 1804 dir = dir.up() 1805 1806 self._memo['srcdir_list'] = result 1807 1808 return result 1809 1810 def srcdir_duplicate(self, name): 1811 for dir in self.srcdir_list(): 1812 if self.is_under(dir): 1813 # We shouldn't source from something in the build path; 1814 # variant_dir is probably under src_dir, in which case 1815 # we are reflecting. 1816 break 1817 if dir.entry_exists_on_disk(name): 1818 srcnode = dir.Entry(name).disambiguate() 1819 if self.duplicate: 1820 node = self.Entry(name).disambiguate() 1821 node.do_duplicate(srcnode) 1822 return node 1823 else: 1824 return srcnode 1825 return None 1826 1827 def _srcdir_find_file_key(self, filename): 1828 return filename 1829 1830 memoizer_counters.append(SCons.Memoize.CountDict('srcdir_find_file', _srcdir_find_file_key)) 1831 1832 def srcdir_find_file(self, filename): 1833 try: 1834 memo_dict = self._memo['srcdir_find_file'] 1835 except KeyError: 1836 memo_dict = {} 1837 self._memo['srcdir_find_file'] = memo_dict 1838 else: 1839 try: 1840 return memo_dict[filename] 1841 except KeyError: 1842 pass 1843 1844 def func(node): 1845 if (isinstance(node, File) or isinstance(node, Entry)) and \ 1846 (node.is_derived() or node.exists()): 1847 return node 1848 return None 1849 1850 norm_name = _my_normcase(filename) 1851 1852 for rdir in self.get_all_rdirs(): 1853 try: node = rdir.entries[norm_name] 1854 except KeyError: node = rdir.file_on_disk(filename) 1855 else: node = func(node) 1856 if node: 1857 result = (node, self) 1858 memo_dict[filename] = result 1859 return result 1860 1861 for srcdir in self.srcdir_list(): 1862 for rdir in srcdir.get_all_rdirs(): 1863 try: node = rdir.entries[norm_name] 1864 except KeyError: node = rdir.file_on_disk(filename) 1865 else: node = func(node) 1866 if node: 1867 result = (File(filename, self, self.fs), srcdir) 1868 memo_dict[filename] = result 1869 return result 1870 1871 result = (None, None) 1872 memo_dict[filename] = result 1873 return result 1874 1875 def dir_on_disk(self, name): 1876 if self.entry_exists_on_disk(name): 1877 try: return self.Dir(name) 1878 except TypeError: pass 1879 node = self.srcdir_duplicate(name) 1880 if isinstance(node, File): 1881 return None 1882 return node 1883 1884 def file_on_disk(self, name): 1885 if self.entry_exists_on_disk(name) or \ 1886 diskcheck_rcs(self, name) or \ 1887 diskcheck_sccs(self, name): 1888 try: return self.File(name) 1889 except TypeError: pass 1890 node = self.srcdir_duplicate(name) 1891 if isinstance(node, Dir): 1892 return None 1893 return node 1894 1895 def walk(self, func, arg): 1896 """ 1897 Walk this directory tree by calling the specified function 1898 for each directory in the tree. 1899 1900 This behaves like the os.path.walk() function, but for in-memory 1901 Node.FS.Dir objects. The function takes the same arguments as 1902 the functions passed to os.path.walk(): 1903 1904 func(arg, dirname, fnames) 1905 1906 Except that "dirname" will actually be the directory *Node*, 1907 not the string. The '.' and '..' entries are excluded from 1908 fnames. The fnames list may be modified in-place to filter the 1909 subdirectories visited or otherwise impose a specific order. 1910 The "arg" argument is always passed to func() and may be used 1911 in any way (or ignored, passing None is common). 1912 """ 1913 entries = self.entries 1914 names = entries.keys() 1915 names.remove('.') 1916 names.remove('..') 1917 func(arg, self, names) 1918 select_dirs = lambda n, e=entries: isinstance(e[n], Dir) 1919 for dirname in filter(select_dirs, names): 1920 entries[dirname].walk(func, arg) 1921 1922 def glob(self, pathname, ondisk=True, source=False, strings=False): 1923 """ 1924 Returns a list of Nodes (or strings) matching a specified 1925 pathname pattern. 1926 1927 Pathname patterns follow UNIX shell semantics: * matches 1928 any-length strings of any characters, ? matches any character, 1929 and [] can enclose lists or ranges of characters. Matches do 1930 not span directory separators. 1931 1932 The matches take into account Repositories, returning local 1933 Nodes if a corresponding entry exists in a Repository (either 1934 an in-memory Node or something on disk). 1935 1936 By defafult, the glob() function matches entries that exist 1937 on-disk, in addition to in-memory Nodes. Setting the "ondisk" 1938 argument to False (or some other non-true value) causes the glob() 1939 function to only match in-memory Nodes. The default behavior is 1940 to return both the on-disk and in-memory Nodes. 1941 1942 The "source" argument, when true, specifies that corresponding 1943 source Nodes must be returned if you're globbing in a build 1944 directory (initialized with VariantDir()). The default behavior 1945 is to return Nodes local to the VariantDir(). 1946 1947 The "strings" argument, when true, returns the matches as strings, 1948 not Nodes. The strings are path names relative to this directory. 1949 1950 The underlying algorithm is adapted from the glob.glob() function 1951 in the Python library (but heavily modified), and uses fnmatch() 1952 under the covers. 1953 """ 1954 dirname, basename = os.path.split(pathname) 1955 if not dirname: 1956 result = self._glob1(basename, ondisk, source, strings) 1957 result.sort(lambda a, b: cmp(str(a), str(b))) 1958 return result 1959 if has_glob_magic(dirname): 1960 list = self.glob(dirname, ondisk, source, strings=False) 1961 else: 1962 list = [self.Dir(dirname, create=True)] 1963 result = [] 1964 for dir in list: 1965 r = dir._glob1(basename, ondisk, source, strings) 1966 if strings: 1967 r = map(lambda x, d=str(dir): os.path.join(d, x), r) 1968 result.extend(r) 1969 result.sort(lambda a, b: cmp(str(a), str(b))) 1970 return result 1971 1972 def _glob1(self, pattern, ondisk=True, source=False, strings=False): 1973 """ 1974 Globs for and returns a list of entry names matching a single 1975 pattern in this directory. 1976 1977 This searches any repositories and source directories for 1978 corresponding entries and returns a Node (or string) relative 1979 to the current directory if an entry is found anywhere. 1980 1981 TODO: handle pattern with no wildcard 1982 """ 1983 search_dir_list = self.get_all_rdirs() 1984 for srcdir in self.srcdir_list(): 1985 search_dir_list.extend(srcdir.get_all_rdirs()) 1986 1987 selfEntry = self.Entry 1988 names = [] 1989 for dir in search_dir_list: 1990 # We use the .name attribute from the Node because the keys of 1991 # the dir.entries dictionary are normalized (that is, all upper 1992 # case) on case-insensitive systems like Windows. 1993 #node_names = [ v.name for k, v in dir.entries.items() if k not in ('.', '..') ] 1994 entry_names = filter(lambda n: n not in ('.', '..'), dir.entries.keys()) 1995 node_names = map(lambda n, e=dir.entries: e[n].name, entry_names) 1996 names.extend(node_names) 1997 if not strings: 1998 # Make sure the working directory (self) actually has 1999 # entries for all Nodes in repositories or variant dirs. 2000 for name in node_names: selfEntry(name) 2001 if ondisk: 2002 try: 2003 disk_names = os.listdir(dir.abspath) 2004 except os.error: 2005 continue 2006 names.extend(disk_names) 2007 if not strings: 2008 # We're going to return corresponding Nodes in 2009 # the local directory, so we need to make sure 2010 # those Nodes exist. We only want to create 2011 # Nodes for the entries that will match the 2012 # specified pattern, though, which means we 2013 # need to filter the list here, even though 2014 # the overall list will also be filtered later, 2015 # after we exit this loop. 2016 if pattern[0] != '.': 2017 #disk_names = [ d for d in disk_names if d[0] != '.' ] 2018 disk_names = filter(lambda x: x[0] != '.', disk_names) 2019 disk_names = fnmatch.filter(disk_names, pattern) 2020 dirEntry = dir.Entry 2021 for name in disk_names: 2022 # Add './' before disk filename so that '#' at 2023 # beginning of filename isn't interpreted. 2024 name = './' + name 2025 node = dirEntry(name).disambiguate() 2026 n = selfEntry(name) 2027 if n.__class__ != node.__class__: 2028 n.__class__ = node.__class__ 2029 n._morph() 2030 2031 names = set(names) 2032 if pattern[0] != '.': 2033 #names = [ n for n in names if n[0] != '.' ] 2034 names = filter(lambda x: x[0] != '.', names) 2035 names = fnmatch.filter(names, pattern) 2036 2037 if strings: 2038 return names 2039 2040 #return [ self.entries[_my_normcase(n)] for n in names ] 2041 return map(lambda n, e=self.entries: e[_my_normcase(n)], names) 2042 2043class RootDir(Dir): 2044 """A class for the root directory of a file system. 2045 2046 This is the same as a Dir class, except that the path separator 2047 ('/' or '\\') is actually part of the name, so we don't need to 2048 add a separator when creating the path names of entries within 2049 this directory. 2050 """ 2051 def __init__(self, name, fs): 2052 if __debug__: logInstanceCreation(self, 'Node.FS.RootDir') 2053 # We're going to be our own parent directory (".." entry and .dir 2054 # attribute) so we have to set up some values so Base.__init__() 2055 # won't gag won't it calls some of our methods. 2056 self.abspath = '' 2057 self.labspath = '' 2058 self.path = '' 2059 self.tpath = '' 2060 self.path_elements = [] 2061 self.duplicate = 0 2062 self.root = self 2063 Base.__init__(self, name, self, fs) 2064 2065 # Now set our paths to what we really want them to be: the 2066 # initial drive letter (the name) plus the directory separator, 2067 # except for the "lookup abspath," which does not have the 2068 # drive letter. 2069 self.abspath = name + os.sep 2070 self.labspath = '' 2071 self.path = name + os.sep 2072 self.tpath = name + os.sep 2073 self._morph() 2074 2075 self._lookupDict = {} 2076 2077 # The // and os.sep + os.sep entries are necessary because 2078 # os.path.normpath() seems to preserve double slashes at the 2079 # beginning of a path (presumably for UNC path names), but 2080 # collapses triple slashes to a single slash. 2081 self._lookupDict[''] = self 2082 self._lookupDict['/'] = self 2083 self._lookupDict['//'] = self 2084 self._lookupDict[os.sep] = self 2085 self._lookupDict[os.sep + os.sep] = self 2086 2087 def must_be_same(self, klass): 2088 if klass is Dir: 2089 return 2090 Base.must_be_same(self, klass) 2091 2092 def _lookup_abs(self, p, klass, create=1): 2093 """ 2094 Fast (?) lookup of a *normalized* absolute path. 2095 2096 This method is intended for use by internal lookups with 2097 already-normalized path data. For general-purpose lookups, 2098 use the FS.Entry(), FS.Dir() or FS.File() methods. 2099 2100 The caller is responsible for making sure we're passed a 2101 normalized absolute path; we merely let Python's dictionary look 2102 up and return the One True Node.FS object for the path. 2103 2104 If no Node for the specified "p" doesn't already exist, and 2105 "create" is specified, the Node may be created after recursive 2106 invocation to find or create the parent directory or directories. 2107 """ 2108 k = _my_normcase(p) 2109 try: 2110 result = self._lookupDict[k] 2111 except KeyError: 2112 if not create: 2113 raise SCons.Errors.UserError 2114 # There is no Node for this path name, and we're allowed 2115 # to create it. 2116 dir_name, file_name = os.path.split(p) 2117 dir_node = self._lookup_abs(dir_name, Dir) 2118 result = klass(file_name, dir_node, self.fs) 2119 2120 # Double-check on disk (as configured) that the Node we 2121 # created matches whatever is out there in the real world. 2122 result.diskcheck_match() 2123 2124 self._lookupDict[k] = result 2125 dir_node.entries[_my_normcase(file_name)] = result 2126 dir_node.implicit = None 2127 else: 2128 # There is already a Node for this path name. Allow it to 2129 # complain if we were looking for an inappropriate type. 2130 result.must_be_same(klass) 2131 return result 2132 2133 def __str__(self): 2134 return self.abspath 2135 2136 def entry_abspath(self, name): 2137 return self.abspath + name 2138 2139 def entry_labspath(self, name): 2140 return '/' + name 2141 2142 def entry_path(self, name): 2143 return self.path + name 2144 2145 def entry_tpath(self, name): 2146 return self.tpath + name 2147 2148 def is_under(self, dir): 2149 if self is dir: 2150 return 1 2151 else: 2152 return 0 2153 2154 def up(self): 2155 return None 2156 2157 def get_dir(self): 2158 return None 2159 2160 def src_builder(self): 2161 return _null 2162 2163class FileNodeInfo(SCons.Node.NodeInfoBase): 2164 current_version_id = 1 2165 2166 field_list = ['csig', 'timestamp', 'size'] 2167 2168 # This should get reset by the FS initialization. 2169 fs = None 2170 2171 def str_to_node(self, s): 2172 top = self.fs.Top 2173 root = top.root 2174 if do_splitdrive: 2175 drive, s = os.path.splitdrive(s) 2176 if drive: 2177 root = self.fs.get_root(drive) 2178 if not os.path.isabs(s): 2179 s = top.labspath + '/' + s 2180 return root._lookup_abs(s, Entry) 2181 2182class FileBuildInfo(SCons.Node.BuildInfoBase): 2183 current_version_id = 1 2184 2185 def convert_to_sconsign(self): 2186 """ 2187 Converts this FileBuildInfo object for writing to a .sconsign file 2188 2189 This replaces each Node in our various dependency lists with its 2190 usual string representation: relative to the top-level SConstruct 2191 directory, or an absolute path if it's outside. 2192 """ 2193 if os.sep == '/': 2194 node_to_str = str 2195 else: 2196 def node_to_str(n): 2197 try: 2198 s = n.path 2199 except AttributeError: 2200 s = str(n) 2201 else: 2202 s = string.replace(s, os.sep, '/') 2203 return s 2204 for attr in ['bsources', 'bdepends', 'bimplicit']: 2205 try: 2206 val = getattr(self, attr) 2207 except AttributeError: 2208 pass 2209 else: 2210 setattr(self, attr, map(node_to_str, val)) 2211 def convert_from_sconsign(self, dir, name): 2212 """ 2213 Converts a newly-read FileBuildInfo object for in-SCons use 2214 2215 For normal up-to-date checking, we don't have any conversion to 2216 perform--but we're leaving this method here to make that clear. 2217 """ 2218 pass 2219 def prepare_dependencies(self): 2220 """ 2221 Prepares a FileBuildInfo object for explaining what changed 2222 2223 The bsources, bdepends and bimplicit lists have all been 2224 stored on disk as paths relative to the top-level SConstruct 2225 directory. Convert the strings to actual Nodes (for use by the 2226 --debug=explain code and --implicit-cache). 2227 """ 2228 attrs = [ 2229 ('bsources', 'bsourcesigs'), 2230 ('bdepends', 'bdependsigs'), 2231 ('bimplicit', 'bimplicitsigs'), 2232 ] 2233 for (nattr, sattr) in attrs: 2234 try: 2235 strings = getattr(self, nattr) 2236 nodeinfos = getattr(self, sattr) 2237 except AttributeError: 2238 continue 2239 nodes = [] 2240 for s, ni in izip(strings, nodeinfos): 2241 if not isinstance(s, SCons.Node.Node): 2242 s = ni.str_to_node(s) 2243 nodes.append(s) 2244 setattr(self, nattr, nodes) 2245 def format(self, names=0): 2246 result = [] 2247 bkids = self.bsources + self.bdepends + self.bimplicit 2248 bkidsigs = self.bsourcesigs + self.bdependsigs + self.bimplicitsigs 2249 for bkid, bkidsig in izip(bkids, bkidsigs): 2250 result.append(str(bkid) + ': ' + 2251 string.join(bkidsig.format(names=names), ' ')) 2252 result.append('%s [%s]' % (self.bactsig, self.bact)) 2253 return string.join(result, '\n') 2254 2255class File(Base): 2256 """A class for files in a file system. 2257 """ 2258 2259 memoizer_counters = [] 2260 2261 NodeInfo = FileNodeInfo 2262 BuildInfo = FileBuildInfo 2263 2264 md5_chunksize = 64 2265 2266 def diskcheck_match(self): 2267 diskcheck_match(self, self.isdir, 2268 "Directory %s found where file expected.") 2269 2270 def __init__(self, name, directory, fs): 2271 if __debug__: logInstanceCreation(self, 'Node.FS.File') 2272 Base.__init__(self, name, directory, fs) 2273 self._morph() 2274 2275 def Entry(self, name): 2276 """Create an entry node named 'name' relative to 2277 the directory of this file.""" 2278 return self.dir.Entry(name) 2279 2280 def Dir(self, name, create=True): 2281 """Create a directory node named 'name' relative to 2282 the directory of this file.""" 2283 return self.dir.Dir(name, create=create) 2284 2285 def Dirs(self, pathlist): 2286 """Create a list of directories relative to the SConscript 2287 directory of this file.""" 2288 # TODO(1.5) 2289 # return [self.Dir(p) for p in pathlist] 2290 return map(lambda p, s=self: s.Dir(p), pathlist) 2291 2292 def File(self, name): 2293 """Create a file node named 'name' relative to 2294 the directory of this file.""" 2295 return self.dir.File(name) 2296 2297 #def generate_build_dict(self): 2298 # """Return an appropriate dictionary of values for building 2299 # this File.""" 2300 # return {'Dir' : self.Dir, 2301 # 'File' : self.File, 2302 # 'RDirs' : self.RDirs} 2303 2304 def _morph(self): 2305 """Turn a file system node into a File object.""" 2306 self.scanner_paths = {} 2307 if not hasattr(self, '_local'): 2308 self._local = 0 2309 2310 # If there was already a Builder set on this entry, then 2311 # we need to make sure we call the target-decider function, 2312 # not the source-decider. Reaching in and doing this by hand 2313 # is a little bogus. We'd prefer to handle this by adding 2314 # an Entry.builder_set() method that disambiguates like the 2315 # other methods, but that starts running into problems with the 2316 # fragile way we initialize Dir Nodes with their Mkdir builders, 2317 # yet still allow them to be overridden by the user. Since it's 2318 # not clear right now how to fix that, stick with what works 2319 # until it becomes clear... 2320 if self.has_builder(): 2321 self.changed_since_last_build = self.decide_target 2322 2323 def scanner_key(self): 2324 return self.get_suffix() 2325 2326 def get_contents(self): 2327 if not self.rexists(): 2328 return '' 2329 fname = self.rfile().abspath 2330 try: 2331 contents = open(fname, "rb").read() 2332 except EnvironmentError as e: 2333 if not e.filename: 2334 e.filename = fname 2335 raise 2336 return contents 2337 2338 try: 2339 import codecs 2340 except ImportError: 2341 get_text_contents = get_contents 2342 else: 2343 # This attempts to figure out what the encoding of the text is 2344 # based upon the BOM bytes, and then decodes the contents so that 2345 # it's a valid python string. 2346 def get_text_contents(self): 2347 contents = self.get_contents() 2348 # The behavior of various decode() methods and functions 2349 # w.r.t. the initial BOM bytes is different for different 2350 # encodings and/or Python versions. ('utf-8' does not strip 2351 # them, but has a 'utf-8-sig' which does; 'utf-16' seems to 2352 # strip them; etc.) Just side step all the complication by 2353 # explicitly stripping the BOM before we decode(). 2354 if contents.startswith(codecs.BOM_UTF8): 2355 contents = contents[len(codecs.BOM_UTF8):] 2356 # TODO(2.2): Remove when 2.3 becomes floor. 2357 #contents = contents.decode('utf-8') 2358 contents = my_decode(contents, 'utf-8') 2359 elif contents.startswith(codecs.BOM_UTF16_LE): 2360 contents = contents[len(codecs.BOM_UTF16_LE):] 2361 # TODO(2.2): Remove when 2.3 becomes floor. 2362 #contents = contents.decode('utf-16-le') 2363 contents = my_decode(contents, 'utf-16-le') 2364 elif contents.startswith(codecs.BOM_UTF16_BE): 2365 contents = contents[len(codecs.BOM_UTF16_BE):] 2366 # TODO(2.2): Remove when 2.3 becomes floor. 2367 #contents = contents.decode('utf-16-be') 2368 contents = my_decode(contents, 'utf-16-be') 2369 return contents 2370 2371 def get_content_hash(self): 2372 """ 2373 Compute and return the MD5 hash for this file. 2374 """ 2375 if not self.rexists(): 2376 return SCons.Util.MD5signature('') 2377 fname = self.rfile().abspath 2378 try: 2379 cs = SCons.Util.MD5filesignature(fname, 2380 chunksize=SCons.Node.FS.File.md5_chunksize*1024) 2381 except EnvironmentError as e: 2382 if not e.filename: 2383 e.filename = fname 2384 raise 2385 return cs 2386 2387 2388 memoizer_counters.append(SCons.Memoize.CountValue('get_size')) 2389 2390 def get_size(self): 2391 try: 2392 return self._memo['get_size'] 2393 except KeyError: 2394 pass 2395 2396 if self.rexists(): 2397 size = self.rfile().getsize() 2398 else: 2399 size = 0 2400 2401 self._memo['get_size'] = size 2402 2403 return size 2404 2405 memoizer_counters.append(SCons.Memoize.CountValue('get_timestamp')) 2406 2407 def get_timestamp(self): 2408 try: 2409 return self._memo['get_timestamp'] 2410 except KeyError: 2411 pass 2412 2413 if self.rexists(): 2414 timestamp = self.rfile().getmtime() 2415 else: 2416 timestamp = 0 2417 2418 self._memo['get_timestamp'] = timestamp 2419 2420 return timestamp 2421 2422 def store_info(self): 2423 # Merge our build information into the already-stored entry. 2424 # This accomodates "chained builds" where a file that's a target 2425 # in one build (SConstruct file) is a source in a different build. 2426 # See test/chained-build.py for the use case. 2427 if do_store_info: 2428 self.dir.sconsign().store_info(self.name, self) 2429 2430 convert_copy_attrs = [ 2431 'bsources', 2432 'bimplicit', 2433 'bdepends', 2434 'bact', 2435 'bactsig', 2436 'ninfo', 2437 ] 2438 2439 2440 convert_sig_attrs = [ 2441 'bsourcesigs', 2442 'bimplicitsigs', 2443 'bdependsigs', 2444 ] 2445 2446 def convert_old_entry(self, old_entry): 2447 # Convert a .sconsign entry from before the Big Signature 2448 # Refactoring, doing what we can to convert its information 2449 # to the new .sconsign entry format. 2450 # 2451 # The old format looked essentially like this: 2452 # 2453 # BuildInfo 2454 # .ninfo (NodeInfo) 2455 # .bsig 2456 # .csig 2457 # .timestamp 2458 # .size 2459 # .bsources 2460 # .bsourcesigs ("signature" list) 2461 # .bdepends 2462 # .bdependsigs ("signature" list) 2463 # .bimplicit 2464 # .bimplicitsigs ("signature" list) 2465 # .bact 2466 # .bactsig 2467 # 2468 # The new format looks like this: 2469 # 2470 # .ninfo (NodeInfo) 2471 # .bsig 2472 # .csig 2473 # .timestamp 2474 # .size 2475 # .binfo (BuildInfo) 2476 # .bsources 2477 # .bsourcesigs (NodeInfo list) 2478 # .bsig 2479 # .csig 2480 # .timestamp 2481 # .size 2482 # .bdepends 2483 # .bdependsigs (NodeInfo list) 2484 # .bsig 2485 # .csig 2486 # .timestamp 2487 # .size 2488 # .bimplicit 2489 # .bimplicitsigs (NodeInfo list) 2490 # .bsig 2491 # .csig 2492 # .timestamp 2493 # .size 2494 # .bact 2495 # .bactsig 2496 # 2497 # The basic idea of the new structure is that a NodeInfo always 2498 # holds all available information about the state of a given Node 2499 # at a certain point in time. The various .b*sigs lists can just 2500 # be a list of pointers to the .ninfo attributes of the different 2501 # dependent nodes, without any copying of information until it's 2502 # time to pickle it for writing out to a .sconsign file. 2503 # 2504 # The complicating issue is that the *old* format only stored one 2505 # "signature" per dependency, based on however the *last* build 2506 # was configured. We don't know from just looking at it whether 2507 # it was a build signature, a content signature, or a timestamp 2508 # "signature". Since we no longer use build signatures, the 2509 # best we can do is look at the length and if it's thirty two, 2510 # assume that it was (or might have been) a content signature. 2511 # If it was actually a build signature, then it will cause a 2512 # rebuild anyway when it doesn't match the new content signature, 2513 # but that's probably the best we can do. 2514 import SCons.SConsign 2515 new_entry = SCons.SConsign.SConsignEntry() 2516 new_entry.binfo = self.new_binfo() 2517 binfo = new_entry.binfo 2518 for attr in self.convert_copy_attrs: 2519 try: 2520 value = getattr(old_entry, attr) 2521 except AttributeError: 2522 continue 2523 setattr(binfo, attr, value) 2524 delattr(old_entry, attr) 2525 for attr in self.convert_sig_attrs: 2526 try: 2527 sig_list = getattr(old_entry, attr) 2528 except AttributeError: 2529 continue 2530 value = [] 2531 for sig in sig_list: 2532 ninfo = self.new_ninfo() 2533 if len(sig) == 32: 2534 ninfo.csig = sig 2535 else: 2536 ninfo.timestamp = sig 2537 value.append(ninfo) 2538 setattr(binfo, attr, value) 2539 delattr(old_entry, attr) 2540 return new_entry 2541 2542 memoizer_counters.append(SCons.Memoize.CountValue('get_stored_info')) 2543 2544 def get_stored_info(self): 2545 try: 2546 return self._memo['get_stored_info'] 2547 except KeyError: 2548 pass 2549 2550 try: 2551 sconsign_entry = self.dir.sconsign().get_entry(self.name) 2552 except (KeyError, EnvironmentError): 2553 import SCons.SConsign 2554 sconsign_entry = SCons.SConsign.SConsignEntry() 2555 sconsign_entry.binfo = self.new_binfo() 2556 sconsign_entry.ninfo = self.new_ninfo() 2557 else: 2558 if isinstance(sconsign_entry, FileBuildInfo): 2559 # This is a .sconsign file from before the Big Signature 2560 # Refactoring; convert it as best we can. 2561 sconsign_entry = self.convert_old_entry(sconsign_entry) 2562 try: 2563 delattr(sconsign_entry.ninfo, 'bsig') 2564 except AttributeError: 2565 pass 2566 2567 self._memo['get_stored_info'] = sconsign_entry 2568 2569 return sconsign_entry 2570 2571 def get_stored_implicit(self): 2572 binfo = self.get_stored_info().binfo 2573 binfo.prepare_dependencies() 2574 try: return binfo.bimplicit 2575 except AttributeError: return None 2576 2577 def rel_path(self, other): 2578 return self.dir.rel_path(other) 2579 2580 def _get_found_includes_key(self, env, scanner, path): 2581 return (id(env), id(scanner), path) 2582 2583 memoizer_counters.append(SCons.Memoize.CountDict('get_found_includes', _get_found_includes_key)) 2584 2585 def get_found_includes(self, env, scanner, path): 2586 """Return the included implicit dependencies in this file. 2587 Cache results so we only scan the file once per path 2588 regardless of how many times this information is requested. 2589 """ 2590 memo_key = (id(env), id(scanner), path) 2591 try: 2592 memo_dict = self._memo['get_found_includes'] 2593 except KeyError: 2594 memo_dict = {} 2595 self._memo['get_found_includes'] = memo_dict 2596 else: 2597 try: 2598 return memo_dict[memo_key] 2599 except KeyError: 2600 pass 2601 2602 if scanner: 2603 # result = [n.disambiguate() for n in scanner(self, env, path)] 2604 result = scanner(self, env, path) 2605 result = map(lambda N: N.disambiguate(), result) 2606 else: 2607 result = [] 2608 2609 memo_dict[memo_key] = result 2610 2611 return result 2612 2613 def _createDir(self): 2614 # ensure that the directories for this node are 2615 # created. 2616 self.dir._create() 2617 2618 def push_to_cache(self): 2619 """Try to push the node into a cache 2620 """ 2621 # This should get called before the Nodes' .built() method is 2622 # called, which would clear the build signature if the file has 2623 # a source scanner. 2624 # 2625 # We have to clear the local memoized values *before* we push 2626 # the node to cache so that the memoization of the self.exists() 2627 # return value doesn't interfere. 2628 if self.nocache: 2629 return 2630 self.clear_memoized_values() 2631 if self.exists(): 2632 self.get_build_env().get_CacheDir().push(self) 2633 2634 def retrieve_from_cache(self): 2635 """Try to retrieve the node's content from a cache 2636 2637 This method is called from multiple threads in a parallel build, 2638 so only do thread safe stuff here. Do thread unsafe stuff in 2639 built(). 2640 2641 Returns true iff the node was successfully retrieved. 2642 """ 2643 if self.nocache: 2644 return None 2645 if not self.is_derived(): 2646 return None 2647 return self.get_build_env().get_CacheDir().retrieve(self) 2648 2649 def visited(self): 2650 if self.exists(): 2651 self.get_build_env().get_CacheDir().push_if_forced(self) 2652 2653 ninfo = self.get_ninfo() 2654 2655 csig = self.get_max_drift_csig() 2656 if csig: 2657 ninfo.csig = csig 2658 2659 ninfo.timestamp = self.get_timestamp() 2660 ninfo.size = self.get_size() 2661 2662 if not self.has_builder(): 2663 # This is a source file, but it might have been a target file 2664 # in another build that included more of the DAG. Copy 2665 # any build information that's stored in the .sconsign file 2666 # into our binfo object so it doesn't get lost. 2667 old = self.get_stored_info() 2668 self.get_binfo().__dict__.update(old.binfo.__dict__) 2669 2670 self.store_info() 2671 2672 def find_src_builder(self): 2673 if self.rexists(): 2674 return None 2675 scb = self.dir.src_builder() 2676 if scb is _null: 2677 if diskcheck_sccs(self.dir, self.name): 2678 scb = get_DefaultSCCSBuilder() 2679 elif diskcheck_rcs(self.dir, self.name): 2680 scb = get_DefaultRCSBuilder() 2681 else: 2682 scb = None 2683 if scb is not None: 2684 try: 2685 b = self.builder 2686 except AttributeError: 2687 b = None 2688 if b is None: 2689 self.builder_set(scb) 2690 return scb 2691 2692 def has_src_builder(self): 2693 """Return whether this Node has a source builder or not. 2694 2695 If this Node doesn't have an explicit source code builder, this 2696 is where we figure out, on the fly, if there's a transparent 2697 source code builder for it. 2698 2699 Note that if we found a source builder, we also set the 2700 self.builder attribute, so that all of the methods that actually 2701 *build* this file don't have to do anything different. 2702 """ 2703 try: 2704 scb = self.sbuilder 2705 except AttributeError: 2706 scb = self.sbuilder = self.find_src_builder() 2707 return scb is not None 2708 2709 def alter_targets(self): 2710 """Return any corresponding targets in a variant directory. 2711 """ 2712 if self.is_derived(): 2713 return [], None 2714 return self.fs.variant_dir_target_climb(self, self.dir, [self.name]) 2715 2716 def _rmv_existing(self): 2717 self.clear_memoized_values() 2718 e = Unlink(self, [], None) 2719 if isinstance(e, SCons.Errors.BuildError): 2720 raise e 2721 2722 # 2723 # Taskmaster interface subsystem 2724 # 2725 2726 def make_ready(self): 2727 self.has_src_builder() 2728 self.get_binfo() 2729 2730 def prepare(self): 2731 """Prepare for this file to be created.""" 2732 SCons.Node.Node.prepare(self) 2733 2734 if self.get_state() != SCons.Node.up_to_date: 2735 if self.exists(): 2736 if self.is_derived() and not self.precious: 2737 self._rmv_existing() 2738 else: 2739 try: 2740 self._createDir() 2741 except SCons.Errors.StopError as drive: 2742 desc = "No drive `%s' for target `%s'." % (drive, self) 2743 raise SCons.Errors.StopError(desc) 2744 2745 # 2746 # 2747 # 2748 2749 def remove(self): 2750 """Remove this file.""" 2751 if self.exists() or self.islink(): 2752 self.fs.unlink(self.path) 2753 return 1 2754 return None 2755 2756 def do_duplicate(self, src): 2757 self._createDir() 2758 Unlink(self, None, None) 2759 e = Link(self, src, None) 2760 if isinstance(e, SCons.Errors.BuildError): 2761 desc = "Cannot duplicate `%s' in `%s': %s." % (src.path, self.dir.path, e.errstr) 2762 raise SCons.Errors.StopError(desc) 2763 self.linked = 1 2764 # The Link() action may or may not have actually 2765 # created the file, depending on whether the -n 2766 # option was used or not. Delete the _exists and 2767 # _rexists attributes so they can be reevaluated. 2768 self.clear() 2769 2770 memoizer_counters.append(SCons.Memoize.CountValue('exists')) 2771 2772 def exists(self): 2773 try: 2774 return self._memo['exists'] 2775 except KeyError: 2776 pass 2777 # Duplicate from source path if we are set up to do this. 2778 if self.duplicate and not self.is_derived() and not self.linked: 2779 src = self.srcnode() 2780 if src is not self: 2781 # At this point, src is meant to be copied in a variant directory. 2782 src = src.rfile() 2783 if src.abspath != self.abspath: 2784 if src.exists(): 2785 self.do_duplicate(src) 2786 # Can't return 1 here because the duplication might 2787 # not actually occur if the -n option is being used. 2788 else: 2789 # The source file does not exist. Make sure no old 2790 # copy remains in the variant directory. 2791 if Base.exists(self) or self.islink(): 2792 self.fs.unlink(self.path) 2793 # Return None explicitly because the Base.exists() call 2794 # above will have cached its value if the file existed. 2795 self._memo['exists'] = None 2796 return None 2797 result = Base.exists(self) 2798 self._memo['exists'] = result 2799 return result 2800 2801 # 2802 # SIGNATURE SUBSYSTEM 2803 # 2804 2805 def get_max_drift_csig(self): 2806 """ 2807 Returns the content signature currently stored for this node 2808 if it's been unmodified longer than the max_drift value, or the 2809 max_drift value is 0. Returns None otherwise. 2810 """ 2811 old = self.get_stored_info() 2812 mtime = self.get_timestamp() 2813 2814 max_drift = self.fs.max_drift 2815 if max_drift > 0: 2816 if (time.time() - mtime) > max_drift: 2817 try: 2818 n = old.ninfo 2819 if n.timestamp and n.csig and n.timestamp == mtime: 2820 return n.csig 2821 except AttributeError: 2822 pass 2823 elif max_drift == 0: 2824 try: 2825 return old.ninfo.csig 2826 except AttributeError: 2827 pass 2828 2829 return None 2830 2831 def get_csig(self): 2832 """ 2833 Generate a node's content signature, the digested signature 2834 of its content. 2835 2836 node - the node 2837 cache - alternate node to use for the signature cache 2838 returns - the content signature 2839 """ 2840 ninfo = self.get_ninfo() 2841 try: 2842 return ninfo.csig 2843 except AttributeError: 2844 pass 2845 2846 csig = self.get_max_drift_csig() 2847 if csig is None: 2848 2849 try: 2850 if self.get_size() < SCons.Node.FS.File.md5_chunksize: 2851 contents = self.get_contents() 2852 else: 2853 csig = self.get_content_hash() 2854 except IOError: 2855 # This can happen if there's actually a directory on-disk, 2856 # which can be the case if they've disabled disk checks, 2857 # or if an action with a File target actually happens to 2858 # create a same-named directory by mistake. 2859 csig = '' 2860 else: 2861 if not csig: 2862 csig = SCons.Util.MD5signature(contents) 2863 2864 ninfo.csig = csig 2865 2866 return csig 2867 2868 # 2869 # DECISION SUBSYSTEM 2870 # 2871 2872 def builder_set(self, builder): 2873 SCons.Node.Node.builder_set(self, builder) 2874 self.changed_since_last_build = self.decide_target 2875 2876 def changed_content(self, target, prev_ni): 2877 cur_csig = self.get_csig() 2878 try: 2879 return cur_csig != prev_ni.csig 2880 except AttributeError: 2881 return 1 2882 2883 def changed_state(self, target, prev_ni): 2884 return self.state != SCons.Node.up_to_date 2885 2886 def changed_timestamp_then_content(self, target, prev_ni): 2887 if not self.changed_timestamp_match(target, prev_ni): 2888 try: 2889 self.get_ninfo().csig = prev_ni.csig 2890 except AttributeError: 2891 pass 2892 return False 2893 return self.changed_content(target, prev_ni) 2894 2895 def changed_timestamp_newer(self, target, prev_ni): 2896 try: 2897 return self.get_timestamp() > target.get_timestamp() 2898 except AttributeError: 2899 return 1 2900 2901 def changed_timestamp_match(self, target, prev_ni): 2902 try: 2903 return self.get_timestamp() != prev_ni.timestamp 2904 except AttributeError: 2905 return 1 2906 2907 def decide_source(self, target, prev_ni): 2908 return target.get_build_env().decide_source(self, target, prev_ni) 2909 2910 def decide_target(self, target, prev_ni): 2911 return target.get_build_env().decide_target(self, target, prev_ni) 2912 2913 # Initialize this Node's decider function to decide_source() because 2914 # every file is a source file until it has a Builder attached... 2915 changed_since_last_build = decide_source 2916 2917 def is_up_to_date(self): 2918 T = 0 2919 if T: Trace('is_up_to_date(%s):' % self) 2920 if not self.exists(): 2921 if T: Trace(' not self.exists():') 2922 # The file doesn't exist locally... 2923 r = self.rfile() 2924 if r != self: 2925 # ...but there is one in a Repository... 2926 if not self.changed(r): 2927 if T: Trace(' changed(%s):' % r) 2928 # ...and it's even up-to-date... 2929 if self._local: 2930 # ...and they'd like a local copy. 2931 e = LocalCopy(self, r, None) 2932 if isinstance(e, SCons.Errors.BuildError): 2933 raise 2934 self.store_info() 2935 if T: Trace(' 1\n') 2936 return 1 2937 self.changed() 2938 if T: Trace(' None\n') 2939 return None 2940 else: 2941 r = self.changed() 2942 if T: Trace(' self.exists(): %s\n' % r) 2943 return not r 2944 2945 memoizer_counters.append(SCons.Memoize.CountValue('rfile')) 2946 2947 def rfile(self): 2948 try: 2949 return self._memo['rfile'] 2950 except KeyError: 2951 pass 2952 result = self 2953 if not self.exists(): 2954 norm_name = _my_normcase(self.name) 2955 for dir in self.dir.get_all_rdirs(): 2956 try: node = dir.entries[norm_name] 2957 except KeyError: node = dir.file_on_disk(self.name) 2958 if node and node.exists() and \ 2959 (isinstance(node, File) or isinstance(node, Entry) \ 2960 or not node.is_derived()): 2961 result = node 2962 # Copy over our local attributes to the repository 2963 # Node so we identify shared object files in the 2964 # repository and don't assume they're static. 2965 # 2966 # This isn't perfect; the attribute would ideally 2967 # be attached to the object in the repository in 2968 # case it was built statically in the repository 2969 # and we changed it to shared locally, but that's 2970 # rarely the case and would only occur if you 2971 # intentionally used the same suffix for both 2972 # shared and static objects anyway. So this 2973 # should work well in practice. 2974 result.attributes = self.attributes 2975 break 2976 self._memo['rfile'] = result 2977 return result 2978 2979 def rstr(self): 2980 return str(self.rfile()) 2981 2982 def get_cachedir_csig(self): 2983 """ 2984 Fetch a Node's content signature for purposes of computing 2985 another Node's cachesig. 2986 2987 This is a wrapper around the normal get_csig() method that handles 2988 the somewhat obscure case of using CacheDir with the -n option. 2989 Any files that don't exist would normally be "built" by fetching 2990 them from the cache, but the normal get_csig() method will try 2991 to open up the local file, which doesn't exist because the -n 2992 option meant we didn't actually pull the file from cachedir. 2993 But since the file *does* actually exist in the cachedir, we 2994 can use its contents for the csig. 2995 """ 2996 try: 2997 return self.cachedir_csig 2998 except AttributeError: 2999 pass 3000 3001 cachedir, cachefile = self.get_build_env().get_CacheDir().cachepath(self) 3002 if not self.exists() and cachefile and os.path.exists(cachefile): 3003 self.cachedir_csig = SCons.Util.MD5filesignature(cachefile, \ 3004 SCons.Node.FS.File.md5_chunksize * 1024) 3005 else: 3006 self.cachedir_csig = self.get_csig() 3007 return self.cachedir_csig 3008 3009 def get_cachedir_bsig(self): 3010 try: 3011 return self.cachesig 3012 except AttributeError: 3013 pass 3014 3015 # Add the path to the cache signature, because multiple 3016 # targets built by the same action will all have the same 3017 # build signature, and we have to differentiate them somehow. 3018 children = self.children() 3019 executor = self.get_executor() 3020 # sigs = [n.get_cachedir_csig() for n in children] 3021 sigs = map(lambda n: n.get_cachedir_csig(), children) 3022 sigs.append(SCons.Util.MD5signature(executor.get_contents())) 3023 sigs.append(self.path) 3024 result = self.cachesig = SCons.Util.MD5collect(sigs) 3025 return result 3026 3027 3028default_fs = None 3029 3030def get_default_fs(): 3031 global default_fs 3032 if not default_fs: 3033 default_fs = FS() 3034 return default_fs 3035 3036class FileFinder: 3037 """ 3038 """ 3039 if SCons.Memoize.use_memoizer: 3040 __metaclass__ = SCons.Memoize.Memoized_Metaclass 3041 3042 memoizer_counters = [] 3043 3044 def __init__(self): 3045 self._memo = {} 3046 3047 def filedir_lookup(self, p, fd=None): 3048 """ 3049 A helper method for find_file() that looks up a directory for 3050 a file we're trying to find. This only creates the Dir Node if 3051 it exists on-disk, since if the directory doesn't exist we know 3052 we won't find any files in it... :-) 3053 3054 It would be more compact to just use this as a nested function 3055 with a default keyword argument (see the commented-out version 3056 below), but that doesn't work unless you have nested scopes, 3057 so we define it here just so this work under Python 1.5.2. 3058 """ 3059 if fd is None: 3060 fd = self.default_filedir 3061 dir, name = os.path.split(fd) 3062 drive, d = os.path.splitdrive(dir) 3063 if not name and d[:1] in ('/', os.sep): 3064 #return p.fs.get_root(drive).dir_on_disk(name) 3065 return p.fs.get_root(drive) 3066 if dir: 3067 p = self.filedir_lookup(p, dir) 3068 if not p: 3069 return None 3070 norm_name = _my_normcase(name) 3071 try: 3072 node = p.entries[norm_name] 3073 except KeyError: 3074 return p.dir_on_disk(name) 3075 if isinstance(node, Dir): 3076 return node 3077 if isinstance(node, Entry): 3078 node.must_be_same(Dir) 3079 return node 3080 return None 3081 3082 def _find_file_key(self, filename, paths, verbose=None): 3083 return (filename, paths) 3084 3085 memoizer_counters.append(SCons.Memoize.CountDict('find_file', _find_file_key)) 3086 3087 def find_file(self, filename, paths, verbose=None): 3088 """ 3089 find_file(str, [Dir()]) -> [nodes] 3090 3091 filename - a filename to find 3092 paths - a list of directory path *nodes* to search in. Can be 3093 represented as a list, a tuple, or a callable that is 3094 called with no arguments and returns the list or tuple. 3095 3096 returns - the node created from the found file. 3097 3098 Find a node corresponding to either a derived file or a file 3099 that exists already. 3100 3101 Only the first file found is returned, and none is returned 3102 if no file is found. 3103 """ 3104 memo_key = self._find_file_key(filename, paths) 3105 try: 3106 memo_dict = self._memo['find_file'] 3107 except KeyError: 3108 memo_dict = {} 3109 self._memo['find_file'] = memo_dict 3110 else: 3111 try: 3112 return memo_dict[memo_key] 3113 except KeyError: 3114 pass 3115 3116 if verbose and not callable(verbose): 3117 if not SCons.Util.is_String(verbose): 3118 verbose = "find_file" 3119 verbose = ' %s: ' % verbose 3120 verbose = lambda s, v=verbose: sys.stdout.write(v + s) 3121 3122 filedir, filename = os.path.split(filename) 3123 if filedir: 3124 # More compact code that we can't use until we drop 3125 # support for Python 1.5.2: 3126 # 3127 #def filedir_lookup(p, fd=filedir): 3128 # """ 3129 # A helper function that looks up a directory for a file 3130 # we're trying to find. This only creates the Dir Node 3131 # if it exists on-disk, since if the directory doesn't 3132 # exist we know we won't find any files in it... :-) 3133 # """ 3134 # dir, name = os.path.split(fd) 3135 # if dir: 3136 # p = filedir_lookup(p, dir) 3137 # if not p: 3138 # return None 3139 # norm_name = _my_normcase(name) 3140 # try: 3141 # node = p.entries[norm_name] 3142 # except KeyError: 3143 # return p.dir_on_disk(name) 3144 # if isinstance(node, Dir): 3145 # return node 3146 # if isinstance(node, Entry): 3147 # node.must_be_same(Dir) 3148 # return node 3149 # if isinstance(node, Dir) or isinstance(node, Entry): 3150 # return node 3151 # return None 3152 #paths = filter(None, map(filedir_lookup, paths)) 3153 3154 self.default_filedir = filedir 3155 paths = filter(None, map(self.filedir_lookup, paths)) 3156 3157 result = None 3158 for dir in paths: 3159 if verbose: 3160 verbose("looking for '%s' in '%s' ...\n" % (filename, dir)) 3161 node, d = dir.srcdir_find_file(filename) 3162 if node: 3163 if verbose: 3164 verbose("... FOUND '%s' in '%s'\n" % (filename, d)) 3165 result = node 3166 break 3167 3168 memo_dict[memo_key] = result 3169 3170 return result 3171 3172find_file = FileFinder().find_file 3173 3174 3175def invalidate_node_memos(targets): 3176 """ 3177 Invalidate the memoized values of all Nodes (files or directories) 3178 that are associated with the given entries. Has been added to 3179 clear the cache of nodes affected by a direct execution of an 3180 action (e.g. Delete/Copy/Chmod). Existing Node caches become 3181 inconsistent if the action is run through Execute(). The argument 3182 `targets` can be a single Node object or filename, or a sequence 3183 of Nodes/filenames. 3184 """ 3185 from traceback import extract_stack 3186 3187 # First check if the cache really needs to be flushed. Only 3188 # actions run in the SConscript with Execute() seem to be 3189 # affected. XXX The way to check if Execute() is in the stacktrace 3190 # is a very dirty hack and should be replaced by a more sensible 3191 # solution. 3192 for f in extract_stack(): 3193 if f[2] == 'Execute' and f[0][-14:] == 'Environment.py': 3194 break 3195 else: 3196 # Dont have to invalidate, so return 3197 return 3198 3199 if not SCons.Util.is_List(targets): 3200 targets = [targets] 3201 3202 for entry in targets: 3203 # If the target is a Node object, clear the cache. If it is a 3204 # filename, look up potentially existing Node object first. 3205 try: 3206 entry.clear_memoized_values() 3207 except AttributeError: 3208 # Not a Node object, try to look up Node by filename. XXX 3209 # This creates Node objects even for those filenames which 3210 # do not correspond to an existing Node object. 3211 node = get_default_fs().Entry(entry) 3212 if node: 3213 node.clear_memoized_values() 3214 3215# Local Variables: 3216# tab-width:4 3217# indent-tabs-mode:nil 3218# End: 3219# vim: set expandtab tabstop=4 shiftwidth=4: 3220