1# MIT License
2#
3# Copyright The SCons Foundation
4#
5# Permission is hereby granted, free of charge, to any person obtaining
6# a copy of this software and associated documentation files (the
7# "Software"), to deal in the Software without restriction, including
8# without limitation the rights to use, copy, modify, merge, publish,
9# distribute, sublicense, and/or sell copies of the Software, and to
10# permit persons to whom the Software is furnished to do so, subject to
11# the following conditions:
12#
13# The above copyright notice and this permission notice shall be included
14# in all copies or substantial portions of the Software.
15#
16# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
17# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
18# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
19# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
20# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
21# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
22# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
23
24"""Various SCons utility functions."""
25
26import copy
27import hashlib
28import os
29import pprint
30import re
31import sys
32from collections import UserDict, UserList, UserString, OrderedDict
33from collections.abc import MappingView
34from contextlib import suppress
35from types import MethodType, FunctionType
36from typing import Optional, Union
37
38# Note: Util module cannot import other bits of SCons globally without getting
39# into import loops. Both the below modules import SCons.Util early on.
40# --> SCons.Warnings
41# --> SCons.Errors
42# Thus the local imports, which are annotated for pylint to show we mean it.
43
44
45PYPY = hasattr(sys, 'pypy_translation_info')
46
47# this string will be hashed if a Node refers to a file that doesn't exist
48# in order to distinguish from a file that exists but is empty.
49NOFILE = "SCONS_MAGIC_MISSING_FILE_STRING"
50
51# unused?
52def dictify(keys, values, result=None) -> dict:
53    if result is None:
54        result = {}
55    result.update(dict(zip(keys, values)))
56    return result
57
58_ALTSEP = os.altsep
59if _ALTSEP is None and sys.platform == 'win32':
60    # My ActivePython 2.0.1 doesn't set os.altsep!  What gives?
61    _ALTSEP = '/'
62if _ALTSEP:
63    def rightmost_separator(path, sep):
64        return max(path.rfind(sep), path.rfind(_ALTSEP))
65else:
66    def rightmost_separator(path, sep):
67        return path.rfind(sep)
68
69# First two from the Python Cookbook, just for completeness.
70# (Yeah, yeah, YAGNI...)
71def containsAny(s, pat) -> bool:
72    """Check whether string `s` contains ANY of the items in `pat`."""
73    for c in pat:
74        if c in s:
75            return True
76    return False
77
78def containsAll(s, pat) -> bool:
79    """Check whether string `s` contains ALL of the items in `pat`."""
80    for c in pat:
81        if c not in s:
82            return False
83    return True
84
85def containsOnly(s, pat) -> bool:
86    """Check whether string `s` contains ONLY items in `pat`."""
87    for c in s:
88        if c not in pat:
89            return False
90    return True
91
92
93# TODO: Verify this method is STILL faster than os.path.splitext
94def splitext(path) -> tuple:
95    """Split `path` into a (root, ext) pair.
96
97    Same as :mod:`os.path.splitext` but faster.
98    """
99    sep = rightmost_separator(path, os.sep)
100    dot = path.rfind('.')
101    # An ext is only real if it has at least one non-digit char
102    if dot > sep and not containsOnly(path[dot:], "0123456789."):
103        return path[:dot], path[dot:]
104
105    return path, ""
106
107def updrive(path) -> str:
108    """Make the drive letter (if any) upper case.
109
110    This is useful because Windows is inconsistent on the case
111    of the drive letter, which can cause inconsistencies when
112    calculating command signatures.
113    """
114    drive, rest = os.path.splitdrive(path)
115    if drive:
116        path = drive.upper() + rest
117    return path
118
119class NodeList(UserList):
120    """A list of Nodes with special attribute retrieval.
121
122    This class is almost exactly like a regular list of Nodes
123    (actually it can hold any object), with one important difference.
124    If you try to get an attribute from this list, it will return that
125    attribute from every item in the list.  For example:
126
127    >>> someList = NodeList(['  foo  ', '  bar  '])
128    >>> someList.strip()
129    ['foo', 'bar']
130    """
131
132    def __bool__(self):
133        return bool(self.data)
134
135    def __str__(self):
136        return ' '.join(map(str, self.data))
137
138    def __iter__(self):
139        return iter(self.data)
140
141    def __call__(self, *args, **kwargs):
142        result = [x(*args, **kwargs) for x in self.data]
143        return self.__class__(result)
144
145    def __getattr__(self, name):
146        result = [getattr(x, name) for x in self.data]
147        return self.__class__(result)
148
149    def __getitem__(self, index):
150        """
151        This comes for free on py2,
152        but py3 slices of NodeList are returning a list
153        breaking slicing nodelist and refering to
154        properties and methods on contained object
155        """
156#        return self.__class__(self.data[index])
157
158        if isinstance(index, slice):
159            # Expand the slice object using range()
160            # limited by number of items in self.data
161            indices = index.indices(len(self.data))
162            return self.__class__([self[x] for x in range(*indices)])
163
164        # Return one item of the tart
165        return self.data[index]
166
167
168_get_env_var = re.compile(r'^\$([_a-zA-Z]\w*|{[_a-zA-Z]\w*})$')
169
170def get_environment_var(varstr) -> Optional[str]:
171    """Return undecorated construction variable string.
172
173    Determine if `varstr` looks like a reference
174    to a single environment variable, like `"$FOO"` or `"${FOO}"`.
175    If so, return that variable with no decorations, like `"FOO"`.
176    If not, return `None`.
177    """
178
179    mo = _get_env_var.match(to_String(varstr))
180    if mo:
181        var = mo.group(1)
182        if var[0] == '{':
183            return var[1:-1]
184        return var
185
186    return None
187
188
189class DisplayEngine:
190    """A callable class used to display SCons messages."""
191
192    print_it = True
193
194    def __call__(self, text, append_newline=1):
195        if not self.print_it:
196            return
197
198        if append_newline:
199            text = text + '\n'
200
201        try:
202            sys.stdout.write(str(text))
203        except IOError:
204            # Stdout might be connected to a pipe that has been closed
205            # by now. The most likely reason for the pipe being closed
206            # is that the user has press ctrl-c. It this is the case,
207            # then SCons is currently shutdown. We therefore ignore
208            # IOError's here so that SCons can continue and shutdown
209            # properly so that the .sconsign is correctly written
210            # before SCons exits.
211            pass
212
213    def set_mode(self, mode):
214        self.print_it = mode
215
216
217# TODO: W0102: Dangerous default value [] as argument (dangerous-default-value)
218def render_tree(root, child_func, prune=0, margin=[0], visited=None):
219    """Render a tree of nodes into an ASCII tree view.
220
221    Args:
222        root: the root node of the tree
223        child_func: the function called to get the children of a node
224        prune: don't visit the same node twice
225        margin: the format of the left margin to use for children of `root`.
226          1 results in a pipe, and 0 results in no pipe.
227        visited: a dictionary of visited nodes in the current branch if
228          `prune` is 0, or in the whole tree if `prune` is 1.
229    """
230
231    rname = str(root)
232
233    # Initialize 'visited' dict, if required
234    if visited is None:
235        visited = {}
236
237    children = child_func(root)
238    retval = ""
239    for pipe in margin[:-1]:
240        if pipe:
241            retval = retval + "| "
242        else:
243            retval = retval + "  "
244
245    if rname in visited:
246        return retval + "+-[" + rname + "]\n"
247
248    retval = retval + "+-" + rname + "\n"
249    if not prune:
250        visited = copy.copy(visited)
251    visited[rname] = True
252
253    for i, child in enumerate(children):
254        margin.append(i < len(children)-1)
255        retval = retval + render_tree(child, child_func, prune, margin, visited)
256        margin.pop()
257
258    return retval
259
260def IDX(n) -> bool:
261    """Generate in index into strings from the tree legends.
262
263    These are always a choice between two, so bool works fine.
264    """
265    return bool(n)
266
267# unicode line drawing chars:
268BOX_HORIZ = chr(0x2500)  # '─'
269BOX_VERT = chr(0x2502)  # '│'
270BOX_UP_RIGHT = chr(0x2514)  # '└'
271BOX_DOWN_RIGHT = chr(0x250c)  # '┌'
272BOX_DOWN_LEFT = chr(0x2510)   # '┐'
273BOX_UP_LEFT = chr(0x2518)  # '┘'
274BOX_VERT_RIGHT = chr(0x251c)  # '├'
275BOX_HORIZ_DOWN = chr(0x252c)  # '┬'
276
277
278# TODO: W0102: Dangerous default value [] as argument (dangerous-default-value)
279def print_tree(
280    root,
281    child_func,
282    prune=0,
283    showtags=False,
284    margin=[0],
285    visited=None,
286    lastChild=False,
287    singleLineDraw=False,
288):
289    """Print a tree of nodes.
290
291    This is like func:`render_tree`, except it prints lines directly instead
292    of creating a string representation in memory, so that huge trees can
293    be handled.
294
295    Args:
296        root: the root node of the tree
297        child_func: the function called to get the children of a node
298        prune: don't visit the same node twice
299        showtags: print status information to the left of each node line
300        margin: the format of the left margin to use for children of `root`.
301          1 results in a pipe, and 0 results in no pipe.
302        visited: a dictionary of visited nodes in the current branch if
303          prune` is 0, or in the whole tree if `prune` is 1.
304        singleLineDraw: use line-drawing characters rather than ASCII.
305    """
306
307    rname = str(root)
308
309    # Initialize 'visited' dict, if required
310    if visited is None:
311        visited = {}
312
313    if showtags:
314
315        if showtags == 2:
316            legend = (' E         = exists\n' +
317                      '  R        = exists in repository only\n' +
318                      '   b       = implicit builder\n' +
319                      '   B       = explicit builder\n' +
320                      '    S      = side effect\n' +
321                      '     P     = precious\n' +
322                      '      A    = always build\n' +
323                      '       C   = current\n' +
324                      '        N  = no clean\n' +
325                      '         H = no cache\n' +
326                      '\n')
327            sys.stdout.write(legend)
328
329        tags = [
330            '[',
331            ' E'[IDX(root.exists())],
332            ' R'[IDX(root.rexists() and not root.exists())],
333            ' BbB'[
334                [0, 1][IDX(root.has_explicit_builder())] +
335                [0, 2][IDX(root.has_builder())]
336            ],
337            ' S'[IDX(root.side_effect)],
338            ' P'[IDX(root.precious)],
339            ' A'[IDX(root.always_build)],
340            ' C'[IDX(root.is_up_to_date())],
341            ' N'[IDX(root.noclean)],
342            ' H'[IDX(root.nocache)],
343            ']'
344        ]
345
346    else:
347        tags = []
348
349    def MMM(m):
350        if singleLineDraw:
351            return ["  ", BOX_VERT + " "][m]
352
353        return ["  ", "| "][m]
354
355    margins = list(map(MMM, margin[:-1]))
356    children = child_func(root)
357    cross = "+-"
358    if singleLineDraw:
359        cross = BOX_VERT_RIGHT + BOX_HORIZ   # sign used to point to the leaf.
360        # check if this is the last leaf of the branch
361        if lastChild:
362            #if this if the last leaf, then terminate:
363            cross = BOX_UP_RIGHT + BOX_HORIZ  # sign for the last leaf
364
365        # if this branch has children then split it
366        if children:
367            # if it's a leaf:
368            if prune and rname in visited and children:
369                cross += BOX_HORIZ
370            else:
371                cross += BOX_HORIZ_DOWN
372
373    if prune and rname in visited and children:
374        sys.stdout.write(''.join(tags + margins + [cross,'[', rname, ']']) + '\n')
375        return
376
377    sys.stdout.write(''.join(tags + margins + [cross, rname]) + '\n')
378
379    visited[rname] = 1
380
381    # if this item has children:
382    if children:
383        margin.append(1)  # Initialize margin with 1 for vertical bar.
384        idx = IDX(showtags)
385        _child = 0  # Initialize this for the first child.
386        for C in children[:-1]:
387            _child = _child + 1  # number the children
388            print_tree(
389                C,
390                child_func,
391                prune,
392                idx,
393                margin,
394                visited,
395                (len(children) - _child) <= 0,
396                singleLineDraw,
397            )
398        # margins are with space (index 0) because we arrived to the last child.
399        margin[-1] = 0
400        # for this call child and nr of children needs to be set 0, to signal the second phase.
401        print_tree(children[-1], child_func, prune, idx, margin, visited, True, singleLineDraw)
402        margin.pop()  # destroy the last margin added
403
404
405# Functions for deciding if things are like various types, mainly to
406# handle UserDict, UserList and UserString like their underlying types.
407#
408# Yes, all of this manual testing breaks polymorphism, and the real
409# Pythonic way to do all of this would be to just try it and handle the
410# exception, but handling the exception when it's not the right type is
411# often too slow.
412
413# We are using the following trick to speed up these
414# functions. Default arguments are used to take a snapshot of
415# the global functions and constants used by these functions. This
416# transforms accesses to global variable into local variables
417# accesses (i.e. LOAD_FAST instead of LOAD_GLOBAL).
418# Since checkers dislike this, it's now annotated for pylint to flag
419# (mostly for other readers of this code) we're doing this intentionally.
420# TODO: PY3 check these are still valid choices for all of these funcs.
421
422DictTypes = (dict, UserDict)
423ListTypes = (list, UserList)
424
425# Handle getting dictionary views.
426SequenceTypes = (list, tuple, UserList, MappingView)
427
428# Note that profiling data shows a speed-up when comparing
429# explicitly with str instead of simply comparing
430# with basestring. (at least on Python 2.5.1)
431# TODO: PY3 check this benchmarking is still correct.
432StringTypes = (str, UserString)
433
434# Empirically, it is faster to check explicitly for str than for basestring.
435BaseStringTypes = str
436
437def is_Dict(  # pylint: disable=redefined-outer-name,redefined-builtin
438    obj, isinstance=isinstance, DictTypes=DictTypes
439) -> bool:
440    return isinstance(obj, DictTypes)
441
442
443def is_List(  # pylint: disable=redefined-outer-name,redefined-builtin
444    obj, isinstance=isinstance, ListTypes=ListTypes
445) -> bool:
446    return isinstance(obj, ListTypes)
447
448
449def is_Sequence(  # pylint: disable=redefined-outer-name,redefined-builtin
450    obj, isinstance=isinstance, SequenceTypes=SequenceTypes
451) -> bool:
452    return isinstance(obj, SequenceTypes)
453
454
455def is_Tuple(  # pylint: disable=redefined-builtin
456    obj, isinstance=isinstance, tuple=tuple
457) -> bool:
458    return isinstance(obj, tuple)
459
460
461def is_String(  # pylint: disable=redefined-outer-name,redefined-builtin
462    obj, isinstance=isinstance, StringTypes=StringTypes
463) -> bool:
464    return isinstance(obj, StringTypes)
465
466
467def is_Scalar(  # pylint: disable=redefined-outer-name,redefined-builtin
468    obj, isinstance=isinstance, StringTypes=StringTypes, SequenceTypes=SequenceTypes
469) -> bool:
470
471    # Profiling shows that there is an impressive speed-up of 2x
472    # when explicitly checking for strings instead of just not
473    # sequence when the argument (i.e. obj) is already a string.
474    # But, if obj is a not string then it is twice as fast to
475    # check only for 'not sequence'. The following code therefore
476    # assumes that the obj argument is a string most of the time.
477    return isinstance(obj, StringTypes) or not isinstance(obj, SequenceTypes)
478
479
480def do_flatten(
481    sequence,
482    result,
483    isinstance=isinstance,
484    StringTypes=StringTypes,
485    SequenceTypes=SequenceTypes,
486):  # pylint: disable=redefined-outer-name,redefined-builtin
487    for item in sequence:
488        if isinstance(item, StringTypes) or not isinstance(item, SequenceTypes):
489            result.append(item)
490        else:
491            do_flatten(item, result)
492
493
494def flatten(  # pylint: disable=redefined-outer-name,redefined-builtin
495    obj,
496    isinstance=isinstance,
497    StringTypes=StringTypes,
498    SequenceTypes=SequenceTypes,
499    do_flatten=do_flatten,
500) -> list:
501    """Flatten a sequence to a non-nested list.
502
503    Converts either a single scalar or a nested sequence to a non-nested list.
504    Note that :func:`flatten` considers strings
505    to be scalars instead of sequences like pure Python would.
506    """
507    if isinstance(obj, StringTypes) or not isinstance(obj, SequenceTypes):
508        return [obj]
509    result = []
510    for item in obj:
511        if isinstance(item, StringTypes) or not isinstance(item, SequenceTypes):
512            result.append(item)
513        else:
514            do_flatten(item, result)
515    return result
516
517
518def flatten_sequence(  # pylint: disable=redefined-outer-name,redefined-builtin
519    sequence,
520    isinstance=isinstance,
521    StringTypes=StringTypes,
522    SequenceTypes=SequenceTypes,
523    do_flatten=do_flatten,
524) -> list:
525    """Flatten a sequence to a non-nested list.
526
527    Same as :func:`flatten`, but it does not handle the single scalar case.
528    This is slightly more efficient when one knows that the sequence
529    to flatten can not be a scalar.
530    """
531    result = []
532    for item in sequence:
533        if isinstance(item, StringTypes) or not isinstance(item, SequenceTypes):
534            result.append(item)
535        else:
536            do_flatten(item, result)
537    return result
538
539# Generic convert-to-string functions.  The wrapper
540# to_String_for_signature() will use a for_signature() method if the
541# specified object has one.
542
543def to_String(  # pylint: disable=redefined-outer-name,redefined-builtin
544    obj,
545    isinstance=isinstance,
546    str=str,
547    UserString=UserString,
548    BaseStringTypes=BaseStringTypes,
549) -> str:
550    """Return a string version of obj."""
551
552    if isinstance(obj, BaseStringTypes):
553        # Early out when already a string!
554        return obj
555
556    if isinstance(obj, UserString):
557        # obj.data can only be a regular string. Please see the UserString initializer.
558        return obj.data
559
560    return str(obj)
561
562def to_String_for_subst(  # pylint: disable=redefined-outer-name,redefined-builtin
563    obj,
564    isinstance=isinstance,
565    str=str,
566    BaseStringTypes=BaseStringTypes,
567    SequenceTypes=SequenceTypes,
568    UserString=UserString,
569) -> str:
570    """Return a string version of obj for subst usage."""
571
572    # Note that the test cases are sorted by order of probability.
573    if isinstance(obj, BaseStringTypes):
574        return obj
575
576    if isinstance(obj, SequenceTypes):
577        return ' '.join([to_String_for_subst(e) for e in obj])
578
579    if isinstance(obj, UserString):
580        # obj.data can only a regular string. Please see the UserString initializer.
581        return obj.data
582
583    return str(obj)
584
585def to_String_for_signature(  # pylint: disable=redefined-outer-name,redefined-builtin
586    obj, to_String_for_subst=to_String_for_subst, AttributeError=AttributeError
587) -> str:
588    """Return a string version of obj for signature usage.
589
590    Like :func:`to_String_for_subst` but has special handling for
591    scons objects that have a :meth:`for_signature` method, and for dicts.
592    """
593
594    try:
595        f = obj.for_signature
596    except AttributeError:
597        if isinstance(obj, dict):
598            # pprint will output dictionary in key sorted order
599            # with py3.5 the order was randomized. In general depending on dictionary order
600            # which was undefined until py3.6 (where it's by insertion order) was not wise.
601            # TODO: Change code when floor is raised to PY36
602            return pprint.pformat(obj, width=1000000)
603        return to_String_for_subst(obj)
604    else:
605        return f()
606
607
608# The SCons "semi-deep" copy.
609#
610# This makes separate copies of lists (including UserList objects)
611# dictionaries (including UserDict objects) and tuples, but just copies
612# references to anything else it finds.
613#
614# A special case is any object that has a __semi_deepcopy__() method,
615# which we invoke to create the copy. Currently only used by
616# BuilderDict to actually prevent the copy operation (as invalid on that object).
617#
618# The dispatch table approach used here is a direct rip-off from the
619# normal Python copy module.
620
621def semi_deepcopy_dict(obj, exclude=None) -> dict:
622    if exclude is None:
623        exclude = []
624    return {k: semi_deepcopy(v) for k, v in obj.items() if k not in exclude}
625
626def _semi_deepcopy_list(obj) -> list:
627    return [semi_deepcopy(item) for item in obj]
628
629def _semi_deepcopy_tuple(obj) -> tuple:
630    return tuple(map(semi_deepcopy, obj))
631
632_semi_deepcopy_dispatch = {
633    dict: semi_deepcopy_dict,
634    list: _semi_deepcopy_list,
635    tuple: _semi_deepcopy_tuple,
636}
637
638def semi_deepcopy(obj):
639    copier = _semi_deepcopy_dispatch.get(type(obj))
640    if copier:
641        return copier(obj)
642
643    if hasattr(obj, '__semi_deepcopy__') and callable(obj.__semi_deepcopy__):
644        return obj.__semi_deepcopy__()
645
646    if isinstance(obj, UserDict):
647        return obj.__class__(semi_deepcopy_dict(obj))
648
649    if isinstance(obj, UserList):
650        return obj.__class__(_semi_deepcopy_list(obj))
651
652    return obj
653
654
655class Proxy:
656    """A simple generic Proxy class, forwarding all calls to subject.
657
658    This means you can take an object, let's call it `'obj_a`,
659    and wrap it in this Proxy class, with a statement like this::
660
661        proxy_obj = Proxy(obj_a)
662
663    Then, if in the future, you do something like this::
664
665        x = proxy_obj.var1
666
667    since the :class:`Proxy` class does not have a :attr:`var1` attribute
668    (but presumably `objA` does), the request actually is equivalent to saying::
669
670        x = obj_a.var1
671
672    Inherit from this class to create a Proxy.
673
674    With Python 3.5+ this does *not* work transparently
675    for :class:`Proxy` subclasses that use special .__*__() method names,
676    because those names are now bound to the class, not the individual
677    instances.  You now need to know in advance which special method names you
678    want to pass on to the underlying Proxy object, and specifically delegate
679    their calls like this::
680
681        class Foo(Proxy):
682            __str__ = Delegate('__str__')
683    """
684
685    def __init__(self, subject):
686        """Wrap an object as a Proxy object"""
687        self._subject = subject
688
689    def __getattr__(self, name):
690        """Retrieve an attribute from the wrapped object.
691
692        Raises:
693           AttributeError: if attribute `name` doesn't exist.
694        """
695        return getattr(self._subject, name)
696
697    def get(self):
698        """Retrieve the entire wrapped object"""
699        return self._subject
700
701    def __eq__(self, other):
702        if issubclass(other.__class__, self._subject.__class__):
703            return self._subject == other
704        return self.__dict__ == other.__dict__
705
706
707class Delegate:
708    """A Python Descriptor class that delegates attribute fetches
709    to an underlying wrapped subject of a Proxy.  Typical use::
710
711        class Foo(Proxy):
712            __str__ = Delegate('__str__')
713    """
714    def __init__(self, attribute):
715        self.attribute = attribute
716
717    def __get__(self, obj, cls):
718        if isinstance(obj, cls):
719            return getattr(obj._subject, self.attribute)
720
721        return self
722
723
724class MethodWrapper:
725    """A generic Wrapper class that associates a method with an object.
726
727    As part of creating this MethodWrapper object an attribute with the
728    specified name (by default, the name of the supplied method) is added
729    to the underlying object.  When that new "method" is called, our
730    :meth:`__call__` method adds the object as the first argument, simulating
731    the Python behavior of supplying "self" on method calls.
732
733    We hang on to the name by which the method was added to the underlying
734    base class so that we can provide a method to "clone" ourselves onto
735    a new underlying object being copied (without which we wouldn't need
736    to save that info).
737    """
738    def __init__(self, obj, method, name=None):
739        if name is None:
740            name = method.__name__
741        self.object = obj
742        self.method = method
743        self.name = name
744        setattr(self.object, name, self)
745
746    def __call__(self, *args, **kwargs):
747        nargs = (self.object,) + args
748        return self.method(*nargs, **kwargs)
749
750    def clone(self, new_object):
751        """
752        Returns an object that re-binds the underlying "method" to
753        the specified new object.
754        """
755        return self.__class__(new_object, self.method, self.name)
756
757
758# attempt to load the windows registry module:
759can_read_reg = False
760try:
761    import winreg
762
763    can_read_reg = True
764    hkey_mod = winreg
765
766except ImportError:
767    class _NoError(Exception):
768        pass
769    RegError = _NoError
770
771if can_read_reg:
772    HKEY_CLASSES_ROOT = hkey_mod.HKEY_CLASSES_ROOT
773    HKEY_LOCAL_MACHINE = hkey_mod.HKEY_LOCAL_MACHINE
774    HKEY_CURRENT_USER = hkey_mod.HKEY_CURRENT_USER
775    HKEY_USERS = hkey_mod.HKEY_USERS
776
777    RegOpenKeyEx = winreg.OpenKeyEx
778    RegEnumKey = winreg.EnumKey
779    RegEnumValue = winreg.EnumValue
780    RegQueryValueEx = winreg.QueryValueEx
781    RegError = winreg.error
782
783    def RegGetValue(root, key):
784        r"""Returns a registry value without having to open the key first.
785
786        Only available on Windows platforms with a version of Python that
787        can read the registry.
788
789        Returns the same thing as :func:`RegQueryValueEx`, except you just
790        specify the entire path to the value, and don't have to bother
791        opening the key first.  So, instead of::
792
793          k = SCons.Util.RegOpenKeyEx(SCons.Util.HKEY_LOCAL_MACHINE,
794                r'SOFTWARE\Microsoft\Windows\CurrentVersion')
795          out = SCons.Util.RegQueryValueEx(k, 'ProgramFilesDir')
796
797        You can write::
798
799          out = SCons.Util.RegGetValue(SCons.Util.HKEY_LOCAL_MACHINE,
800                r'SOFTWARE\Microsoft\Windows\CurrentVersion\ProgramFilesDir')
801        """
802        # I would use os.path.split here, but it's not a filesystem
803        # path...
804        p = key.rfind('\\') + 1
805        keyp = key[: p - 1]  # -1 to omit trailing slash
806        val = key[p:]
807        k = RegOpenKeyEx(root, keyp)
808        return RegQueryValueEx(k, val)
809
810
811else:
812    HKEY_CLASSES_ROOT = None
813    HKEY_LOCAL_MACHINE = None
814    HKEY_CURRENT_USER = None
815    HKEY_USERS = None
816
817    def RegGetValue(root, key):
818        raise OSError
819
820    def RegOpenKeyEx(root, key):
821        raise OSError
822
823
824if sys.platform == 'win32':
825
826    def WhereIs(file, path=None, pathext=None, reject=None) -> Optional[str]:
827        if path is None:
828            try:
829                path = os.environ['PATH']
830            except KeyError:
831                return None
832        if is_String(path):
833            path = path.split(os.pathsep)
834        if pathext is None:
835            try:
836                pathext = os.environ['PATHEXT']
837            except KeyError:
838                pathext = '.COM;.EXE;.BAT;.CMD'
839        if is_String(pathext):
840            pathext = pathext.split(os.pathsep)
841        for ext in pathext:
842            if ext.lower() == file[-len(ext):].lower():
843                pathext = ['']
844                break
845        if reject is None:
846            reject = []
847        if not is_List(reject) and not is_Tuple(reject):
848            reject = [reject]
849        for p in path:
850            f = os.path.join(p, file)
851            for ext in pathext:
852                fext = f + ext
853                if os.path.isfile(fext):
854                    try:
855                        reject.index(fext)
856                    except ValueError:
857                        return os.path.normpath(fext)
858                    continue
859        return None
860
861elif os.name == 'os2':
862
863    def WhereIs(file, path=None, pathext=None, reject=None) -> Optional[str]:
864        if path is None:
865            try:
866                path = os.environ['PATH']
867            except KeyError:
868                return None
869        if is_String(path):
870            path = path.split(os.pathsep)
871        if pathext is None:
872            pathext = ['.exe', '.cmd']
873        for ext in pathext:
874            if ext.lower() == file[-len(ext):].lower():
875                pathext = ['']
876                break
877        if reject is None:
878            reject = []
879        if not is_List(reject) and not is_Tuple(reject):
880            reject = [reject]
881        for p in path:
882            f = os.path.join(p, file)
883            for ext in pathext:
884                fext = f + ext
885                if os.path.isfile(fext):
886                    try:
887                        reject.index(fext)
888                    except ValueError:
889                        return os.path.normpath(fext)
890                    continue
891        return None
892
893else:
894
895    def WhereIs(file, path=None, pathext=None, reject=None) -> Optional[str]:
896        import stat  # pylint: disable=import-outside-toplevel
897
898        if path is None:
899            try:
900                path = os.environ['PATH']
901            except KeyError:
902                return None
903        if is_String(path):
904            path = path.split(os.pathsep)
905        if reject is None:
906            reject = []
907        if not is_List(reject) and not is_Tuple(reject):
908            reject = [reject]
909        for p in path:
910            f = os.path.join(p, file)
911            if os.path.isfile(f):
912                try:
913                    st = os.stat(f)
914                except OSError:
915                    # os.stat() raises OSError, not IOError if the file
916                    # doesn't exist, so in this case we let IOError get
917                    # raised so as to not mask possibly serious disk or
918                    # network issues.
919                    continue
920                if stat.S_IMODE(st[stat.ST_MODE]) & 0o111:
921                    try:
922                        reject.index(f)
923                    except ValueError:
924                        return os.path.normpath(f)
925                    continue
926        return None
927
928WhereIs.__doc__ = """\
929Return the path to an executable that matches `file`.
930
931Searches the given `path` for `file`, respecting any filename
932extensions `pathext` (on the Windows platform only), and
933returns the full path to the matching command.  If no
934command is found, return ``None``.
935
936If `path` is not specified, :attr:`os.environ[PATH]` is used.
937If `pathext` is not specified, :attr:`os.environ[PATHEXT]`
938is used. Will not select any path name or names in the optional
939`reject` list.
940"""
941
942def PrependPath(
943    oldpath, newpath, sep=os.pathsep, delete_existing=True, canonicalize=None
944) -> Union[list, str]:
945    """Prepends `newpath` path elements to `oldpath`.
946
947    Will only add any particular path once (leaving the first one it
948    encounters and ignoring the rest, to preserve path order), and will
949    :mod:`os.path.normpath` and :mod:`os.path.normcase` all paths to help
950    assure this.  This can also handle the case where `oldpath`
951    is a list instead of a string, in which case a list will be returned
952    instead of a string. For example:
953
954    >>> p = PrependPath("/foo/bar:/foo", "/biz/boom:/foo")
955    >>> print(p)
956    /biz/boom:/foo:/foo/bar
957
958    If `delete_existing` is ``False``, then adding a path that exists will
959    not move it to the beginning; it will stay where it is in the list.
960
961    >>> p = PrependPath("/foo/bar:/foo", "/biz/boom:/foo", delete_existing=False)
962    >>> print(p)
963    /biz/boom:/foo/bar:/foo
964
965    If `canonicalize` is not ``None``, it is applied to each element of
966    `newpath` before use.
967    """
968
969    orig = oldpath
970    is_list = True
971    paths = orig
972    if not is_List(orig) and not is_Tuple(orig):
973        paths = paths.split(sep)
974        is_list = False
975
976    if is_String(newpath):
977        newpaths = newpath.split(sep)
978    elif not is_List(newpath) and not is_Tuple(newpath):
979        newpaths = [ newpath ]  # might be a Dir
980    else:
981        newpaths = newpath
982
983    if canonicalize:
984        newpaths=list(map(canonicalize, newpaths))
985
986    if not delete_existing:
987        # First uniquify the old paths, making sure to
988        # preserve the first instance (in Unix/Linux,
989        # the first one wins), and remembering them in normpaths.
990        # Then insert the new paths at the head of the list
991        # if they're not already in the normpaths list.
992        result = []
993        normpaths = []
994        for path in paths:
995            if not path:
996                continue
997            normpath = os.path.normpath(os.path.normcase(path))
998            if normpath not in normpaths:
999                result.append(path)
1000                normpaths.append(normpath)
1001        newpaths.reverse()      # since we're inserting at the head
1002        for path in newpaths:
1003            if not path:
1004                continue
1005            normpath = os.path.normpath(os.path.normcase(path))
1006            if normpath not in normpaths:
1007                result.insert(0, path)
1008                normpaths.append(normpath)
1009        paths = result
1010
1011    else:
1012        newpaths = newpaths + paths # prepend new paths
1013
1014        normpaths = []
1015        paths = []
1016        # now we add them only if they are unique
1017        for path in newpaths:
1018            normpath = os.path.normpath(os.path.normcase(path))
1019            if path and normpath not in normpaths:
1020                paths.append(path)
1021                normpaths.append(normpath)
1022
1023    if is_list:
1024        return paths
1025
1026    return sep.join(paths)
1027
1028def AppendPath(
1029    oldpath, newpath, sep=os.pathsep, delete_existing=True, canonicalize=None
1030) -> Union[list, str]:
1031    """Appends `newpath` path elements to `oldpath`.
1032
1033    Will only add any particular path once (leaving the last one it
1034    encounters and ignoring the rest, to preserve path order), and will
1035    :mod:`os.path.normpath` and :mod:`os.path.normcase` all paths to help
1036    assure this.  This can also handle the case where `oldpath`
1037    is a list instead of a string, in which case a list will be returned
1038    instead of a string. For example:
1039
1040    >>> p = AppendPath("/foo/bar:/foo", "/biz/boom:/foo")
1041    >>> print(p)
1042    /foo/bar:/biz/boom:/foo
1043
1044    If `delete_existing` is ``False``, then adding a path that exists
1045    will not move it to the end; it will stay where it is in the list.
1046
1047    >>> p = AppendPath("/foo/bar:/foo", "/biz/boom:/foo", delete_existing=False)
1048    >>> print(p)
1049    /foo/bar:/foo:/biz/boom
1050
1051    If `canonicalize` is not ``None``, it is applied to each element of
1052    `newpath` before use.
1053    """
1054
1055    orig = oldpath
1056    is_list = True
1057    paths = orig
1058    if not is_List(orig) and not is_Tuple(orig):
1059        paths = paths.split(sep)
1060        is_list = False
1061
1062    if is_String(newpath):
1063        newpaths = newpath.split(sep)
1064    elif not is_List(newpath) and not is_Tuple(newpath):
1065        newpaths = [newpath]  # might be a Dir
1066    else:
1067        newpaths = newpath
1068
1069    if canonicalize:
1070        newpaths=list(map(canonicalize, newpaths))
1071
1072    if not delete_existing:
1073        # add old paths to result, then
1074        # add new paths if not already present
1075        # (I thought about using a dict for normpaths for speed,
1076        # but it's not clear hashing the strings would be faster
1077        # than linear searching these typically short lists.)
1078        result = []
1079        normpaths = []
1080        for path in paths:
1081            if not path:
1082                continue
1083            result.append(path)
1084            normpaths.append(os.path.normpath(os.path.normcase(path)))
1085        for path in newpaths:
1086            if not path:
1087                continue
1088            normpath = os.path.normpath(os.path.normcase(path))
1089            if normpath not in normpaths:
1090                result.append(path)
1091                normpaths.append(normpath)
1092        paths = result
1093    else:
1094        # start w/ new paths, add old ones if not present,
1095        # then reverse.
1096        newpaths = paths + newpaths # append new paths
1097        newpaths.reverse()
1098
1099        normpaths = []
1100        paths = []
1101        # now we add them only if they are unique
1102        for path in newpaths:
1103            normpath = os.path.normpath(os.path.normcase(path))
1104            if path and normpath not in normpaths:
1105                paths.append(path)
1106                normpaths.append(normpath)
1107        paths.reverse()
1108
1109    if is_list:
1110        return paths
1111
1112    return sep.join(paths)
1113
1114def AddPathIfNotExists(env_dict, key, path, sep=os.pathsep):
1115    """Add a path element to a construction variable.
1116
1117    `key` is looked up in `env_dict`, and `path` is added to it if it
1118    is not already present. `env_dict[key]` is assumed to be in the
1119    format of a PATH variable: a list of paths separated by `sep` tokens.
1120    Example:
1121
1122    >>> env = {'PATH': '/bin:/usr/bin:/usr/local/bin'}
1123    >>> AddPathIfNotExists(env, 'PATH', '/opt/bin')
1124    >>> print(env['PATH'])
1125    /opt/bin:/bin:/usr/bin:/usr/local/bin
1126    """
1127
1128    try:
1129        is_list = True
1130        paths = env_dict[key]
1131        if not is_List(env_dict[key]):
1132            paths = paths.split(sep)
1133            is_list = False
1134        if os.path.normcase(path) not in list(map(os.path.normcase, paths)):
1135            paths = [ path ] + paths
1136        if is_list:
1137            env_dict[key] = paths
1138        else:
1139            env_dict[key] = sep.join(paths)
1140    except KeyError:
1141        env_dict[key] = path
1142
1143if sys.platform == 'cygwin':
1144    import subprocess  # pylint: disable=import-outside-toplevel
1145
1146    def get_native_path(path) -> str:
1147        cp = subprocess.run(('cygpath', '-w', path), check=False, stdout=subprocess.PIPE)
1148        return cp.stdout.decode().replace('\n', '')
1149else:
1150    def get_native_path(path) -> str:
1151        return path
1152
1153get_native_path.__doc__ = """\
1154Transform an absolute path into a native path for the system.
1155
1156In Cygwin, this converts from a Cygwin path to a Windows path,
1157without regard to whether `path` refers to an existing file
1158system object.  For other platforms, `path` is unchanged.
1159"""
1160
1161
1162display = DisplayEngine()
1163
1164def Split(arg) -> list:
1165    """Returns a list of file names or other objects.
1166
1167    If `arg` is a string, it will be split on strings of white-space
1168    characters within the string.  If `arg` is already a list, the list
1169    will be returned untouched. If `arg` is any other type of object,
1170    it will be returned as a list containing just the object.
1171
1172    >>> print(Split(" this  is  a  string  "))
1173    ['this', 'is', 'a', 'string']
1174    >>> print(Split(["stringlist", " preserving ", " spaces "]))
1175    ['stringlist', ' preserving ', ' spaces ']
1176    """
1177    if is_List(arg) or is_Tuple(arg):
1178        return arg
1179
1180    if is_String(arg):
1181        return arg.split()
1182
1183    return [arg]
1184
1185
1186class CLVar(UserList):
1187    """A container for command-line construction variables.
1188
1189    Forces the use of a list of strings intended as command-line
1190    arguments.  Like :class:`collections.UserList`, but the argument
1191    passed to the initializter will be processed by the :func:`Split`
1192    function, which includes special handling for string types: they
1193    will be split into a list of words, not coereced directly to a list.
1194    The same happens if a string is added to a :class:`CLVar`,
1195    which allows doing the right thing with both
1196    :func:`Append`/:func:`Prepend` methods,
1197    as well as with pure Python addition, regardless of whether adding
1198    a list or a string to a construction variable.
1199
1200    Side effect: spaces will be stripped from individual string
1201    arguments. If you need spaces preserved, pass strings containing
1202    spaces inside a list argument.
1203
1204    >>> u = UserList("--some --opts and args")
1205    >>> print(len(u), repr(u))
1206    22 ['-', '-', 's', 'o', 'm', 'e', ' ', '-', '-', 'o', 'p', 't', 's', ' ', 'a', 'n', 'd', ' ', 'a', 'r', 'g', 's']
1207    >>> c = CLVar("--some --opts and args")
1208    >>> print(len(c), repr(c))
1209    4 ['--some', '--opts', 'and', 'args']
1210    >>> c += "    strips spaces    "
1211    >>> print(len(c), repr(c))
1212    6 ['--some', '--opts', 'and', 'args', 'strips', 'spaces']
1213    """
1214
1215    def __init__(self, initlist=None):
1216        super().__init__(Split(initlist))
1217
1218    def __add__(self, other):
1219        return super().__add__(CLVar(other))
1220
1221    def __radd__(self, other):
1222        return super().__radd__(CLVar(other))
1223
1224    def __iadd__(self, other):
1225        return super().__iadd__(CLVar(other))
1226
1227    def __str__(self):
1228        return ' '.join(self.data)
1229
1230
1231class Selector(OrderedDict):
1232    """A callable ordered dictionary that maps file suffixes to
1233    dictionary values.  We preserve the order in which items are added
1234    so that :func:`get_suffix` calls always return the first suffix added.
1235    """
1236    def __call__(self, env, source, ext=None):
1237        if ext is None:
1238            try:
1239                ext = source[0].get_suffix()
1240            except IndexError:
1241                ext = ""
1242        try:
1243            return self[ext]
1244        except KeyError:
1245            # Try to perform Environment substitution on the keys of
1246            # the dictionary before giving up.
1247            s_dict = {}
1248            for (k,v) in self.items():
1249                if k is not None:
1250                    s_k = env.subst(k)
1251                    if s_k in s_dict:
1252                        # We only raise an error when variables point
1253                        # to the same suffix.  If one suffix is literal
1254                        # and a variable suffix contains this literal,
1255                        # the literal wins and we don't raise an error.
1256                        raise KeyError(s_dict[s_k][0], k, s_k)
1257                    s_dict[s_k] = (k,v)
1258            try:
1259                return s_dict[ext][1]
1260            except KeyError:
1261                try:
1262                    return self[None]
1263                except KeyError:
1264                    return None
1265
1266
1267if sys.platform == 'cygwin':
1268    # On Cygwin, os.path.normcase() lies, so just report back the
1269    # fact that the underlying Windows OS is case-insensitive.
1270    def case_sensitive_suffixes(s1, s2) -> bool:  # pylint: disable=unused-argument
1271        return False
1272
1273else:
1274    def case_sensitive_suffixes(s1, s2) -> bool:
1275        return os.path.normcase(s1) != os.path.normcase(s2)
1276
1277
1278def adjustixes(fname, pre, suf, ensure_suffix=False) -> str:
1279    """Adjust filename prefixes and suffixes as needed.
1280
1281    Add `prefix` to `fname` if specified.
1282    Add `suffix` to `fname` if specified and if `ensure_suffix` is ``True``
1283    """
1284
1285    if pre:
1286        path, fn = os.path.split(os.path.normpath(fname))
1287
1288        # Handle the odd case where the filename = the prefix.
1289        # In that case, we still want to add the prefix to the file
1290        if not fn.startswith(pre) or fn == pre:
1291            fname = os.path.join(path, pre + fn)
1292    # Only append a suffix if the suffix we're going to add isn't already
1293    # there, and if either we've been asked to ensure the specific suffix
1294    # is present or there's no suffix on it at all.
1295    # Also handle the odd case where the filename = the suffix.
1296    # in that case we still want to append the suffix
1297    if suf and not fname.endswith(suf) and \
1298            (ensure_suffix or not splitext(fname)[1]):
1299        fname = fname + suf
1300    return fname
1301
1302
1303
1304# From Tim Peters,
1305# https://code.activestate.com/recipes/52560
1306# ASPN: Python Cookbook: Remove duplicates from a sequence
1307# (Also in the printed Python Cookbook.)
1308# Updated. This algorithm is used by some scanners and tools.
1309
1310def unique(seq):
1311    """Return a list of the elements in seq without duplicates, ignoring order.
1312
1313    >>> mylist = unique([1, 2, 3, 1, 2, 3])
1314    >>> print(sorted(mylist))
1315    [1, 2, 3]
1316    >>> mylist = unique("abcabc")
1317    >>> print(sorted(mylist))
1318    ['a', 'b', 'c']
1319    >>> mylist = unique(([1, 2], [2, 3], [1, 2]))
1320    >>> print(sorted(mylist))
1321    [[1, 2], [2, 3]]
1322
1323    For best speed, all sequence elements should be hashable.  Then
1324    unique() will usually work in linear time.
1325
1326    If not possible, the sequence elements should enjoy a total
1327    ordering, and if list(s).sort() doesn't raise TypeError it's
1328    assumed that they do enjoy a total ordering.  Then unique() will
1329    usually work in O(N*log2(N)) time.
1330
1331    If that's not possible either, the sequence elements must support
1332    equality-testing.  Then unique() will usually work in quadratic time.
1333    """
1334
1335    if not seq:
1336        return []
1337
1338    # Try using a dict first, as that's the fastest and will usually
1339    # work.  If it doesn't work, it will usually fail quickly, so it
1340    # usually doesn't cost much to *try* it.  It requires that all the
1341    # sequence elements be hashable, and support equality comparison.
1342    # TODO: should be even faster: return(list(set(seq)))
1343    with suppress(TypeError):
1344        return list(dict.fromkeys(seq))
1345
1346    # We couldn't hash all the elements (got a TypeError).
1347    # Next fastest is to sort, which brings the equal elements together;
1348    # then duplicates are easy to weed out in a single pass.
1349    # NOTE:  Python's list.sort() was designed to be efficient in the
1350    # presence of many duplicate elements.  This isn't true of all
1351    # sort functions in all languages or libraries, so this approach
1352    # is more effective in Python than it may be elsewhere.
1353    n = len(seq)
1354    try:
1355        t = sorted(seq)
1356    except TypeError:
1357        pass    # move on to the next method
1358    else:
1359        last = t[0]
1360        lasti = i = 1
1361        while i < n:
1362            if t[i] != last:
1363                t[lasti] = last = t[i]
1364                lasti = lasti + 1
1365            i = i + 1
1366        return t[:lasti]
1367
1368    # Brute force is all that's left.
1369    u = []
1370    for x in seq:
1371        if x not in u:
1372            u.append(x)
1373    return u
1374
1375
1376# From Alex Martelli,
1377# https://code.activestate.com/recipes/52560
1378# ASPN: Python Cookbook: Remove duplicates from a sequence
1379# First comment, dated 2001/10/13.
1380# (Also in the printed Python Cookbook.)
1381# This not currently used, in favor of the next function...
1382
1383def uniquer(seq, idfun=None):
1384    def default_idfun(x):
1385        return x
1386    if not idfun:
1387        idfun = default_idfun
1388    seen = {}
1389    result = []
1390    result_append = result.append  # perf: avoid repeated method lookups
1391    for item in seq:
1392        marker = idfun(item)
1393        if marker in seen:
1394            continue
1395        seen[marker] = 1
1396        result_append(item)
1397    return result
1398
1399# A more efficient implementation of Alex's uniquer(), this avoids the
1400# idfun() argument and function-call overhead by assuming that all
1401# items in the sequence are hashable.  Order-preserving.
1402
1403def uniquer_hashables(seq):
1404    seen = {}
1405    result = []
1406    result_append = result.append  # perf: avoid repeated method lookups
1407    for item in seq:
1408        if item not in seen:
1409            seen[item] = 1
1410            result_append(item)
1411    return result
1412
1413
1414# Recipe 19.11 "Reading Lines with Continuation Characters",
1415# by Alex Martelli, straight from the Python CookBook (2nd edition).
1416def logical_lines(physical_lines, joiner=''.join):
1417    logical_line = []
1418    for line in physical_lines:
1419        stripped = line.rstrip()
1420        if stripped.endswith('\\'):
1421            # a line which continues w/the next physical line
1422            logical_line.append(stripped[:-1])
1423        else:
1424            # a line which does not continue, end of logical line
1425            logical_line.append(line)
1426            yield joiner(logical_line)
1427            logical_line = []
1428    if logical_line:
1429        # end of sequence implies end of last logical line
1430        yield joiner(logical_line)
1431
1432
1433class LogicalLines:
1434    """ Wrapper class for the logical_lines method.
1435
1436    Allows us to read all "logical" lines at once from a given file object.
1437    """
1438
1439    def __init__(self, fileobj):
1440        self.fileobj = fileobj
1441
1442    def readlines(self):
1443        return list(logical_lines(self.fileobj))
1444
1445
1446class UniqueList(UserList):
1447    """A list which maintains uniqueness.
1448
1449    Uniquing is lazy: rather than being assured on list changes, it is fixed
1450    up on access by those methods which need to act on a uniqe list to be
1451    correct. That means things like "in" don't have to eat the uniquing time.
1452    """
1453    def __init__(self, initlist=None):
1454        super().__init__(initlist)
1455        self.unique = True
1456
1457    def __make_unique(self):
1458        if not self.unique:
1459            self.data = uniquer_hashables(self.data)
1460            self.unique = True
1461
1462    def __repr__(self):
1463        self.__make_unique()
1464        return super().__repr__()
1465
1466    def __lt__(self, other):
1467        self.__make_unique()
1468        return super().__lt__(other)
1469
1470    def __le__(self, other):
1471        self.__make_unique()
1472        return super().__le__(other)
1473
1474    def __eq__(self, other):
1475        self.__make_unique()
1476        return super().__eq__(other)
1477
1478    def __ne__(self, other):
1479        self.__make_unique()
1480        return super().__ne__(other)
1481
1482    def __gt__(self, other):
1483        self.__make_unique()
1484        return super().__gt__(other)
1485
1486    def __ge__(self, other):
1487        self.__make_unique()
1488        return super().__ge__(other)
1489
1490    # __contains__ doesn't need to worry about uniquing, inherit
1491
1492    def __len__(self):
1493        self.__make_unique()
1494        return super().__len__()
1495
1496    def __getitem__(self, i):
1497        self.__make_unique()
1498        return super().__getitem__(i)
1499
1500    def __setitem__(self, i, item):
1501        super().__setitem__(i, item)
1502        self.unique = False
1503
1504    # __delitem__ doesn't need to worry about uniquing, inherit
1505
1506    def __add__(self, other):
1507        result = super().__add__(other)
1508        result.unique = False
1509        return result
1510
1511    def __radd__(self, other):
1512        result = super().__radd__(other)
1513        result.unique = False
1514        return result
1515
1516    def __iadd__(self, other):
1517        result = super().__iadd__(other)
1518        result.unique = False
1519        return result
1520
1521    def __mul__(self, other):
1522        result = super().__mul__(other)
1523        result.unique = False
1524        return result
1525
1526    def __rmul__(self, other):
1527        result = super().__rmul__(other)
1528        result.unique = False
1529        return result
1530
1531    def __imul__(self, other):
1532        result = super().__imul__(other)
1533        result.unique = False
1534        return result
1535
1536    def append(self, item):
1537        super().append(item)
1538        self.unique = False
1539
1540    def insert(self, i, item):
1541        super().insert(i, item)
1542        self.unique = False
1543
1544    def count(self, item):
1545        self.__make_unique()
1546        return super().count(item)
1547
1548    def index(self, item, *args):
1549        self.__make_unique()
1550        return super().index(item, *args)
1551
1552    def reverse(self):
1553        self.__make_unique()
1554        super().reverse()
1555
1556    # TODO: Py3.8: def sort(self, /, *args, **kwds):
1557    def sort(self, *args, **kwds):
1558        self.__make_unique()
1559        return super().sort(*args, **kwds)
1560
1561    def extend(self, other):
1562        super().extend(other)
1563        self.unique = False
1564
1565
1566class Unbuffered:
1567    """A proxy  that wraps a file object, flushing after every write.
1568
1569    Delegates everything else to the wrapped object.
1570    """
1571    def __init__(self, file):
1572        self.file = file
1573
1574    def write(self, arg):
1575        # Stdout might be connected to a pipe that has been closed
1576        # by now. The most likely reason for the pipe being closed
1577        # is that the user has press ctrl-c. It this is the case,
1578        # then SCons is currently shutdown. We therefore ignore
1579        # IOError's here so that SCons can continue and shutdown
1580        # properly so that the .sconsign is correctly written
1581        # before SCons exits.
1582        with suppress(IOError):
1583            self.file.write(arg)
1584            self.file.flush()
1585
1586    def writelines(self, arg):
1587        with suppress(IOError):
1588            self.file.writelines(arg)
1589            self.file.flush()
1590
1591    def __getattr__(self, attr):
1592        return getattr(self.file, attr)
1593
1594def make_path_relative(path) -> str:
1595    """Converts an absolute path name to a relative pathname."""
1596
1597    if os.path.isabs(path):
1598        drive_s, path = os.path.splitdrive(path)
1599
1600        if not drive_s:
1601            path=re.compile(r"/*(.*)").findall(path)[0]
1602        else:
1603            path=path[1:]
1604
1605    assert not os.path.isabs(path), path
1606    return path
1607
1608
1609# The original idea for AddMethod() came from the
1610# following post to the ActiveState Python Cookbook:
1611#
1612# ASPN: Python Cookbook : Install bound methods in an instance
1613# https://code.activestate.com/recipes/223613
1614#
1615# Changed as follows:
1616# * Switched the installmethod() "object" and "function" arguments,
1617#   so the order reflects that the left-hand side is the thing being
1618#   "assigned to" and the right-hand side is the value being assigned.
1619# * The instance/class detection is changed a bit, as it's all
1620#   new-style classes now with Py3.
1621# * The by-hand construction of the function object from renamefunction()
1622#   is not needed, the remaining bit is now used inline in AddMethod.
1623
1624def AddMethod(obj, function, name=None):
1625    """Adds a method to an object.
1626
1627    Adds `function` to `obj` if `obj` is a class object.
1628    Adds `function` as a bound method if `obj` is an instance object.
1629    If `obj` looks like an environment instance, use `MethodWrapper`
1630    to add it.  If `name` is supplied it is used as the name of `function`.
1631
1632    Although this works for any class object, the intent as a public
1633    API is to be used on Environment, to be able to add a method to all
1634    construction environments; it is preferred to use env.AddMethod
1635    to add to an individual environment.
1636
1637    >>> class A:
1638    ...    ...
1639
1640    >>> a = A()
1641
1642    >>> def f(self, x, y):
1643    ...    self.z = x + y
1644
1645    >>> AddMethod(A, f, "add")
1646    >>> a.add(2, 4)
1647    >>> print(a.z)
1648    6
1649    >>> a.data = ['a', 'b', 'c', 'd', 'e', 'f']
1650    >>> AddMethod(a, lambda self, i: self.data[i], "listIndex")
1651    >>> print(a.listIndex(3))
1652    d
1653
1654    """
1655    if name is None:
1656        name = function.__name__
1657    else:
1658        # "rename"
1659        function = FunctionType(
1660            function.__code__, function.__globals__, name, function.__defaults__
1661        )
1662
1663    if hasattr(obj, '__class__') and obj.__class__ is not type:
1664        # obj is an instance, so it gets a bound method.
1665        if hasattr(obj, "added_methods"):
1666            method = MethodWrapper(obj, function, name)
1667            obj.added_methods.append(method)
1668        else:
1669            method = MethodType(function, obj)
1670    else:
1671        # obj is a class
1672        method = function
1673
1674    setattr(obj, name, method)
1675
1676
1677# Default hash function and format. SCons-internal.
1678ALLOWED_HASH_FORMATS = ['md5', 'sha1', 'sha256']
1679_HASH_FUNCTION = None
1680_HASH_FORMAT = None
1681
1682
1683def get_hash_format():
1684    """Retrieves the hash format or ``None`` if not overridden.
1685
1686    A return value of ``None``
1687    does not guarantee that MD5 is being used; instead, it means that the
1688    default precedence order documented in :func:`SCons.Util.set_hash_format`
1689    is respected.
1690    """
1691    return _HASH_FORMAT
1692
1693
1694def set_hash_format(hash_format):
1695    """Sets the default hash format used by SCons.
1696
1697    If `hash_format` is ``None`` or
1698    an empty string, the default is determined by this function.
1699
1700    Currently the default behavior is to use the first available format of
1701    the following options: MD5, SHA1, SHA256.
1702    """
1703    global _HASH_FORMAT, _HASH_FUNCTION
1704
1705    _HASH_FORMAT = hash_format
1706    if hash_format:
1707        hash_format_lower = hash_format.lower()
1708        if hash_format_lower not in ALLOWED_HASH_FORMATS:
1709            from SCons.Errors import UserError  # pylint: disable=import-outside-toplevel
1710
1711            raise UserError('Hash format "%s" is not supported by SCons. Only '
1712                            'the following hash formats are supported: %s' %
1713                            (hash_format_lower,
1714                             ', '.join(ALLOWED_HASH_FORMATS)))
1715
1716        _HASH_FUNCTION = getattr(hashlib, hash_format_lower, None)
1717        if _HASH_FUNCTION is None:
1718            from SCons.Errors import UserError  # pylint: disable=import-outside-toplevel
1719
1720            raise UserError(
1721                'Hash format "%s" is not available in your Python interpreter.'
1722                % hash_format_lower
1723            )
1724    else:
1725        # Set the default hash format based on what is available, defaulting
1726        # to md5 for backwards compatibility.
1727        for choice in ALLOWED_HASH_FORMATS:
1728            _HASH_FUNCTION = getattr(hashlib, choice, None)
1729            if _HASH_FUNCTION is not None:
1730                break
1731        else:
1732            # This is not expected to happen in practice.
1733            from SCons.Errors import UserError  # pylint: disable=import-outside-toplevel
1734
1735            raise UserError(
1736                'Your Python interpreter does not have MD5, SHA1, or SHA256. '
1737                'SCons requires at least one.')
1738
1739# Ensure that this is initialized in case either:
1740#    1. This code is running in a unit test.
1741#    2. This code is running in a consumer that does hash operations while
1742#       SConscript files are being loaded.
1743set_hash_format(None)
1744
1745
1746def _get_hash_object(hash_format):
1747    """Allocates a hash object using the requested hash format.
1748
1749    Args:
1750        hash_format: Hash format to use.
1751
1752    Returns:
1753        hashlib object.
1754    """
1755    if hash_format is None:
1756        if _HASH_FUNCTION is None:
1757            from SCons.Errors import UserError  # pylint: disable=import-outside-toplevel
1758
1759            raise UserError('There is no default hash function. Did you call '
1760                            'a hashing function before SCons was initialized?')
1761        return _HASH_FUNCTION()
1762
1763    if not hasattr(hashlib, hash_format):
1764        from SCons.Errors import UserError  # pylint: disable=import-outside-toplevel
1765
1766        raise UserError(
1767            'Hash format "%s" is not available in your Python interpreter.' %
1768            hash_format)
1769
1770    return getattr(hashlib, hash_format)()
1771
1772
1773def hash_signature(s, hash_format=None):
1774    """
1775    Generate hash signature of a string
1776
1777    Args:
1778        s: either string or bytes. Normally should be bytes
1779        hash_format: Specify to override default hash format
1780
1781    Returns:
1782        String of hex digits representing the signature
1783    """
1784    m = _get_hash_object(hash_format)
1785    try:
1786        m.update(to_bytes(s))
1787    except TypeError:
1788        m.update(to_bytes(str(s)))
1789
1790    return m.hexdigest()
1791
1792
1793def hash_file_signature(fname, chunksize=65536, hash_format=None):
1794    """
1795    Generate the md5 signature of a file
1796
1797    Args:
1798        fname: file to hash
1799        chunksize: chunk size to read
1800        hash_format: Specify to override default hash format
1801
1802    Returns:
1803        String of Hex digits representing the signature
1804    """
1805
1806    m = _get_hash_object(hash_format)
1807    with open(fname, "rb") as f:
1808        while True:
1809            blck = f.read(chunksize)
1810            if not blck:
1811                break
1812            m.update(to_bytes(blck))
1813    return m.hexdigest()
1814
1815
1816def hash_collect(signatures, hash_format=None):
1817    """
1818    Collects a list of signatures into an aggregate signature.
1819
1820    Args:
1821        signatures: a list of signatures
1822        hash_format: Specify to override default hash format
1823
1824    Returns:
1825        the aggregate signature
1826    """
1827
1828    if len(signatures) == 1:
1829        return signatures[0]
1830
1831    return hash_signature(', '.join(signatures), hash_format)
1832
1833
1834_MD5_WARNING_SHOWN = False
1835
1836def _show_md5_warning(function_name):
1837    """Shows a deprecation warning for various MD5 functions."""
1838
1839    global _MD5_WARNING_SHOWN
1840
1841    if not _MD5_WARNING_SHOWN:
1842        import SCons.Warnings  # pylint: disable=import-outside-toplevel
1843
1844        SCons.Warnings.warn(SCons.Warnings.DeprecatedWarning,
1845                            "Function %s is deprecated" % function_name)
1846        _MD5_WARNING_SHOWN = True
1847
1848
1849def MD5signature(s):
1850    """Deprecated. Use :func:`hash_signature` instead."""
1851
1852    _show_md5_warning("MD5signature")
1853    return hash_signature(s)
1854
1855
1856def MD5filesignature(fname, chunksize=65536):
1857    """Deprecated. Use :func:`hash_file_signature` instead."""
1858
1859    _show_md5_warning("MD5filesignature")
1860    return hash_file_signature(fname, chunksize)
1861
1862
1863def MD5collect(signatures):
1864    """Deprecated. Use :func:`hash_collect` instead."""
1865
1866    _show_md5_warning("MD5collect")
1867    return hash_collect(signatures)
1868
1869
1870def silent_intern(x):
1871    """
1872    Perform :mod:`sys.intern` on the passed argument and return the result.
1873    If the input is ineligible for interning the original argument is
1874    returned and no exception is thrown.
1875    """
1876    try:
1877        return sys.intern(x)
1878    except TypeError:
1879        return x
1880
1881
1882# From Dinu C. Gherman,
1883# Python Cookbook, second edition, recipe 6.17, p. 277.
1884# Also: https://code.activestate.com/recipes/68205
1885# ASPN: Python Cookbook: Null Object Design Pattern
1886
1887class Null:
1888    """ Null objects always and reliably "do nothing." """
1889    def __new__(cls, *args, **kwargs):
1890        if '_instance' not in vars(cls):
1891            cls._instance = super(Null, cls).__new__(cls, *args, **kwargs)
1892        return cls._instance
1893    def __init__(self, *args, **kwargs):
1894        pass
1895    def __call__(self, *args, **kwargs):
1896        return self
1897    def __repr__(self):
1898        return "Null(0x%08X)" % id(self)
1899    def __bool__(self):
1900        return False
1901    def __getattr__(self, name):
1902        return self
1903    def __setattr__(self, name, value):
1904        return self
1905    def __delattr__(self, name):
1906        return self
1907
1908
1909class NullSeq(Null):
1910    """ A Null object that can also be iterated over. """
1911    def __len__(self):
1912        return 0
1913    def __iter__(self):
1914        return iter(())
1915    def __getitem__(self, i):
1916        return self
1917    def __delitem__(self, i):
1918        return self
1919    def __setitem__(self, i, v):
1920        return self
1921
1922
1923def to_bytes(s) -> bytes:
1924    if s is None:
1925        return b'None'
1926    if isinstance(s, (bytes, bytearray)):
1927        # if already bytes return.
1928        return s
1929    return bytes(s, 'utf-8')
1930
1931
1932def to_str(s) -> str:
1933    if s is None:
1934        return 'None'
1935    if is_String(s):
1936        return s
1937    return str(s, 'utf-8')
1938
1939
1940def cmp(a, b) -> bool:
1941    """A cmp function because one is no longer available in python3."""
1942    return (a > b) - (a < b)
1943
1944
1945def get_env_bool(env, name, default=False) -> bool:
1946    """Convert a construction variable to bool.
1947
1948    If the value of `name` in `env` is 'true', 'yes', 'y', 'on' (case
1949    insensitive) or anything convertible to int that yields non-zero then
1950    return ``True``; if 'false', 'no', 'n', 'off' (case insensitive)
1951    or a number that converts to integer zero return ``False``.
1952    Otherwise, return `default`.
1953
1954    Args:
1955        env: construction environment, or any dict-like object
1956        name: name of the variable
1957        default: value to return if `name` not in `env` or cannot
1958          be converted (default: False)
1959
1960    Returns:
1961        the "truthiness" of `name`
1962    """
1963    try:
1964        var = env[name]
1965    except KeyError:
1966        return default
1967    try:
1968        return bool(int(var))
1969    except ValueError:
1970        if str(var).lower() in ('true', 'yes', 'y', 'on'):
1971            return True
1972
1973        if str(var).lower() in ('false', 'no', 'n', 'off'):
1974            return False
1975
1976        return default
1977
1978
1979def get_os_env_bool(name, default=False) -> bool:
1980    """Convert an environment variable to bool.
1981
1982    Conversion is the same as for :func:`get_env_bool`.
1983    """
1984    return get_env_bool(os.environ, name, default)
1985
1986
1987def print_time():
1988    """Hack to return a value from Main if can't import Main."""
1989    # pylint: disable=redefined-outer-name,import-outside-toplevel
1990    from SCons.Script.Main import print_time
1991    return print_time
1992
1993# Local Variables:
1994# tab-width:4
1995# indent-tabs-mode:nil
1996# End:
1997# vim: set expandtab tabstop=4 shiftwidth=4:
1998