1# This Source Code Form is subject to the terms of the Mozilla Public
2# License, v. 2.0. If a copy of the MPL was not distributed with this file,
3# You can obtain one at http://mozilla.org/MPL/2.0/.
4
5# This file contains miscellaneous utility functions that don't belong anywhere
6# in particular.
7
8from __future__ import absolute_import, print_function, unicode_literals
9
10import argparse
11import collections
12import collections.abc
13import copy
14import ctypes
15import difflib
16import errno
17import functools
18import hashlib
19import io
20import itertools
21import os
22import pprint
23import re
24import stat
25import sys
26import time
27from collections import OrderedDict
28from io import BytesIO, StringIO
29
30import six
31
32MOZBUILD_METRICS_PATH = os.path.abspath(
33    os.path.join(__file__, "..", "..", "metrics.yaml")
34)
35
36if sys.platform == "win32":
37    _kernel32 = ctypes.windll.kernel32
38    _FILE_ATTRIBUTE_NOT_CONTENT_INDEXED = 0x2000
39    system_encoding = "mbcs"
40else:
41    system_encoding = "utf-8"
42
43
44def exec_(object, globals=None, locals=None):
45    """Wrapper around the exec statement to avoid bogus errors like:
46
47    SyntaxError: unqualified exec is not allowed in function ...
48    it is a nested function.
49
50    or
51
52    SyntaxError: unqualified exec is not allowed in function ...
53    it contains a nested function with free variable
54
55    which happen with older versions of python 2.7.
56    """
57    exec(object, globals, locals)
58
59
60def _open(path, mode):
61    if "b" in mode:
62        return io.open(path, mode)
63    return io.open(path, mode, encoding="utf-8", newline="\n")
64
65
66def hash_file(path, hasher=None):
67    """Hashes a file specified by the path given and returns the hex digest."""
68
69    # If the default hashing function changes, this may invalidate
70    # lots of cached data.  Don't change it lightly.
71    h = hasher or hashlib.sha1()
72
73    with open(path, "rb") as fh:
74        while True:
75            data = fh.read(8192)
76
77            if not len(data):
78                break
79
80            h.update(data)
81
82    return h.hexdigest()
83
84
85class EmptyValue(six.text_type):
86    """A dummy type that behaves like an empty string and sequence.
87
88    This type exists in order to support
89    :py:class:`mozbuild.frontend.reader.EmptyConfig`. It should likely not be
90    used elsewhere.
91    """
92
93    def __init__(self):
94        super(EmptyValue, self).__init__()
95
96
97class ReadOnlyNamespace(object):
98    """A class for objects with immutable attributes set at initialization."""
99
100    def __init__(self, **kwargs):
101        for k, v in six.iteritems(kwargs):
102            super(ReadOnlyNamespace, self).__setattr__(k, v)
103
104    def __delattr__(self, key):
105        raise Exception("Object does not support deletion.")
106
107    def __setattr__(self, key, value):
108        raise Exception("Object does not support assignment.")
109
110    def __ne__(self, other):
111        return not (self == other)
112
113    def __eq__(self, other):
114        return self is other or (
115            hasattr(other, "__dict__") and self.__dict__ == other.__dict__
116        )
117
118    def __repr__(self):
119        return "<%s %r>" % (self.__class__.__name__, self.__dict__)
120
121
122class ReadOnlyDict(dict):
123    """A read-only dictionary."""
124
125    def __init__(self, *args, **kwargs):
126        dict.__init__(self, *args, **kwargs)
127
128    def __delitem__(self, key):
129        raise Exception("Object does not support deletion.")
130
131    def __setitem__(self, key, value):
132        raise Exception("Object does not support assignment.")
133
134    def update(self, *args, **kwargs):
135        raise Exception("Object does not support update.")
136
137    def __copy__(self, *args, **kwargs):
138        return ReadOnlyDict(**dict.copy(self, *args, **kwargs))
139
140    def __deepcopy__(self, memo):
141        result = {}
142        for k, v in self.items():
143            result[k] = copy.deepcopy(v, memo)
144
145        return ReadOnlyDict(**result)
146
147
148class undefined_default(object):
149    """Represents an undefined argument value that isn't None."""
150
151
152undefined = undefined_default()
153
154
155class ReadOnlyDefaultDict(ReadOnlyDict):
156    """A read-only dictionary that supports default values on retrieval."""
157
158    def __init__(self, default_factory, *args, **kwargs):
159        ReadOnlyDict.__init__(self, *args, **kwargs)
160        self._default_factory = default_factory
161
162    def __missing__(self, key):
163        value = self._default_factory()
164        dict.__setitem__(self, key, value)
165        return value
166
167
168def ensureParentDir(path):
169    """Ensures the directory parent to the given file exists."""
170    d = os.path.dirname(path)
171    if d and not os.path.exists(path):
172        try:
173            os.makedirs(d)
174        except OSError as error:
175            if error.errno != errno.EEXIST:
176                raise
177
178
179def mkdir(path, not_indexed=False):
180    """Ensure a directory exists.
181
182    If ``not_indexed`` is True, an attribute is set that disables content
183    indexing on the directory.
184    """
185    try:
186        os.makedirs(path)
187    except OSError as e:
188        if e.errno != errno.EEXIST:
189            raise
190
191    if not_indexed:
192        if sys.platform == "win32":
193            if isinstance(path, six.string_types):
194                fn = _kernel32.SetFileAttributesW
195            else:
196                fn = _kernel32.SetFileAttributesA
197
198            fn(path, _FILE_ATTRIBUTE_NOT_CONTENT_INDEXED)
199        elif sys.platform == "darwin":
200            with open(os.path.join(path, ".metadata_never_index"), "a"):
201                pass
202
203
204def simple_diff(filename, old_lines, new_lines):
205    """Returns the diff between old_lines and new_lines, in unified diff form,
206    as a list of lines.
207
208    old_lines and new_lines are lists of non-newline terminated lines to
209    compare.
210    old_lines can be None, indicating a file creation.
211    new_lines can be None, indicating a file deletion.
212    """
213
214    old_name = "/dev/null" if old_lines is None else filename
215    new_name = "/dev/null" if new_lines is None else filename
216
217    return difflib.unified_diff(
218        old_lines or [], new_lines or [], old_name, new_name, n=4, lineterm=""
219    )
220
221
222class FileAvoidWrite(BytesIO):
223    """File-like object that buffers output and only writes if content changed.
224
225    We create an instance from an existing filename. New content is written to
226    it. When we close the file object, if the content in the in-memory buffer
227    differs from what is on disk, then we write out the new content. Otherwise,
228    the original file is untouched.
229
230    Instances can optionally capture diffs of file changes. This feature is not
231    enabled by default because it a) doesn't make sense for binary files b)
232    could add unwanted overhead to calls.
233
234    Additionally, there is dry run mode where the file is not actually written
235    out, but reports whether the file was existing and would have been updated
236    still occur, as well as diff capture if requested.
237    """
238
239    def __init__(self, filename, capture_diff=False, dry_run=False, readmode="rU"):
240        BytesIO.__init__(self)
241        self.name = filename
242        assert type(capture_diff) == bool
243        assert type(dry_run) == bool
244        assert "r" in readmode
245        self._capture_diff = capture_diff
246        self._write_to_file = not dry_run
247        self.diff = None
248        self.mode = readmode
249        self._binary_mode = "b" in readmode
250
251    def write(self, buf):
252        BytesIO.write(self, six.ensure_binary(buf))
253
254    def avoid_writing_to_file(self):
255        self._write_to_file = False
256
257    def close(self):
258        """Stop accepting writes, compare file contents, and rewrite if needed.
259
260        Returns a tuple of bools indicating what action was performed:
261
262            (file existed, file updated)
263
264        If ``capture_diff`` was specified at construction time and the
265        underlying file was changed, ``.diff`` will be populated with the diff
266        of the result.
267        """
268        # Use binary data if the caller explicitly asked for it.
269        ensure = six.ensure_binary if self._binary_mode else six.ensure_text
270        buf = ensure(self.getvalue())
271
272        BytesIO.close(self)
273        existed = False
274        old_content = None
275
276        try:
277            existing = _open(self.name, self.mode)
278            existed = True
279        except IOError:
280            pass
281        else:
282            try:
283                old_content = existing.read()
284                if old_content == buf:
285                    return True, False
286            except IOError:
287                pass
288            finally:
289                existing.close()
290
291        if self._write_to_file:
292            ensureParentDir(self.name)
293            # Maintain 'b' if specified.  'U' only applies to modes starting with
294            # 'r', so it is dropped.
295            writemode = "w"
296            if self._binary_mode:
297                writemode += "b"
298                buf = six.ensure_binary(buf)
299            else:
300                buf = six.ensure_text(buf)
301            with _open(self.name, writemode) as file:
302                file.write(buf)
303
304        self._generate_diff(buf, old_content)
305
306        return existed, True
307
308    def _generate_diff(self, new_content, old_content):
309        """Generate a diff for the changed contents if `capture_diff` is True.
310
311        If the changed contents could not be decoded as utf-8 then generate a
312        placeholder message instead of a diff.
313
314        Args:
315            new_content: Str or bytes holding the new file contents.
316            old_content: Str or bytes holding the original file contents. Should be
317                None if no old content is being overwritten.
318        """
319        if not self._capture_diff:
320            return
321
322        try:
323            if old_content is None:
324                old_lines = None
325            else:
326                if self._binary_mode:
327                    # difflib doesn't work with bytes.
328                    old_content = old_content.decode("utf-8")
329
330                old_lines = old_content.splitlines()
331
332            if self._binary_mode:
333                # difflib doesn't work with bytes.
334                new_content = new_content.decode("utf-8")
335
336            new_lines = new_content.splitlines()
337
338            self.diff = simple_diff(self.name, old_lines, new_lines)
339        # FileAvoidWrite isn't unicode/bytes safe. So, files with non-ascii
340        # content or opened and written in different modes may involve
341        # implicit conversion and this will make Python unhappy. Since
342        # diffing isn't a critical feature, we just ignore the failure.
343        # This can go away once FileAvoidWrite uses io.BytesIO and
344        # io.StringIO. But that will require a lot of work.
345        except (UnicodeDecodeError, UnicodeEncodeError):
346            self.diff = ["Binary or non-ascii file changed: %s" % self.name]
347
348    def __enter__(self):
349        return self
350
351    def __exit__(self, type, value, traceback):
352        if not self.closed:
353            self.close()
354
355
356def resolve_target_to_make(topobjdir, target):
357    r"""
358    Resolve `target` (a target, directory, or file) to a make target.
359
360    `topobjdir` is the object directory; all make targets will be
361    rooted at or below the top-level Makefile in this directory.
362
363    Returns a pair `(reldir, target)` where `reldir` is a directory
364    relative to `topobjdir` containing a Makefile and `target` is a
365    make target (possibly `None`).
366
367    A directory resolves to the nearest directory at or above
368    containing a Makefile, and target `None`.
369
370    A regular (non-Makefile) file resolves to the nearest directory at
371    or above the file containing a Makefile, and an appropriate
372    target.
373
374    A Makefile resolves to the nearest parent strictly above the
375    Makefile containing a different Makefile, and an appropriate
376    target.
377    """
378
379    target = target.replace(os.sep, "/").lstrip("/")
380    abs_target = os.path.join(topobjdir, target)
381
382    # For directories, run |make -C dir|. If the directory does not
383    # contain a Makefile, check parents until we find one. At worst,
384    # this will terminate at the root.
385    if os.path.isdir(abs_target):
386        current = abs_target
387
388        while True:
389            make_path = os.path.join(current, "Makefile")
390            if os.path.exists(make_path):
391                return (current[len(topobjdir) + 1 :], None)
392
393            current = os.path.dirname(current)
394
395    # If it's not in a directory, this is probably a top-level make
396    # target. Treat it as such.
397    if "/" not in target:
398        return (None, target)
399
400    # We have a relative path within the tree. We look for a Makefile
401    # as far into the path as possible. Then, we compute the make
402    # target as relative to that directory.
403    reldir = os.path.dirname(target)
404    target = os.path.basename(target)
405
406    while True:
407        make_path = os.path.join(topobjdir, reldir, "Makefile")
408
409        # We append to target every iteration, so the check below
410        # happens exactly once.
411        if target != "Makefile" and os.path.exists(make_path):
412            return (reldir, target)
413
414        target = os.path.join(os.path.basename(reldir), target)
415        reldir = os.path.dirname(reldir)
416
417
418class List(list):
419    """A list specialized for moz.build environments.
420
421    We overload the assignment and append operations to require that the
422    appended thing is a list. This avoids bad surprises coming from appending
423    a string to a list, which would just add each letter of the string.
424    """
425
426    def __init__(self, iterable=None, **kwargs):
427        if iterable is None:
428            iterable = []
429        if not isinstance(iterable, list):
430            raise ValueError("List can only be created from other list instances.")
431
432        self._kwargs = kwargs
433        return super(List, self).__init__(iterable)
434
435    def extend(self, l):
436        if not isinstance(l, list):
437            raise ValueError("List can only be extended with other list instances.")
438
439        return super(List, self).extend(l)
440
441    def __setitem__(self, key, val):
442        if isinstance(key, slice):
443            if not isinstance(val, list):
444                raise ValueError(
445                    "List can only be sliced with other list " "instances."
446                )
447            if key.step:
448                raise ValueError("List cannot be sliced with a nonzero step " "value")
449            # Python 2 and Python 3 do this differently for some reason.
450            if six.PY2:
451                return super(List, self).__setslice__(key.start, key.stop, val)
452            else:
453                return super(List, self).__setitem__(key, val)
454        return super(List, self).__setitem__(key, val)
455
456    def __setslice__(self, i, j, sequence):
457        return self.__setitem__(slice(i, j), sequence)
458
459    def __add__(self, other):
460        # Allow None and EmptyValue is a special case because it makes undefined
461        # variable references in moz.build behave better.
462        other = [] if isinstance(other, (type(None), EmptyValue)) else other
463        if not isinstance(other, list):
464            raise ValueError("Only lists can be appended to lists.")
465
466        new_list = self.__class__(self, **self._kwargs)
467        new_list.extend(other)
468        return new_list
469
470    def __iadd__(self, other):
471        other = [] if isinstance(other, (type(None), EmptyValue)) else other
472        if not isinstance(other, list):
473            raise ValueError("Only lists can be appended to lists.")
474
475        return super(List, self).__iadd__(other)
476
477
478class UnsortedError(Exception):
479    def __init__(self, srtd, original):
480        assert len(srtd) == len(original)
481
482        self.sorted = srtd
483        self.original = original
484
485        for i, orig in enumerate(original):
486            s = srtd[i]
487
488            if orig != s:
489                self.i = i
490                break
491
492    def __str__(self):
493        s = StringIO()
494
495        s.write("An attempt was made to add an unsorted sequence to a list. ")
496        s.write("The incoming list is unsorted starting at element %d. " % self.i)
497        s.write(
498            'We expected "%s" but got "%s"'
499            % (self.sorted[self.i], self.original[self.i])
500        )
501
502        return s.getvalue()
503
504
505class StrictOrderingOnAppendList(List):
506    """A list specialized for moz.build environments.
507
508    We overload the assignment and append operations to require that incoming
509    elements be ordered. This enforces cleaner style in moz.build files.
510    """
511
512    @staticmethod
513    def ensure_sorted(l):
514        if isinstance(l, StrictOrderingOnAppendList):
515            return
516
517        def _first_element(e):
518            # If the list entry is a tuple, we sort based on the first element
519            # in the tuple.
520            return e[0] if isinstance(e, tuple) else e
521
522        srtd = sorted(l, key=lambda x: _first_element(x).lower())
523
524        if srtd != l:
525            raise UnsortedError(srtd, l)
526
527    def __init__(self, iterable=None, **kwargs):
528        if iterable is None:
529            iterable = []
530
531        StrictOrderingOnAppendList.ensure_sorted(iterable)
532
533        super(StrictOrderingOnAppendList, self).__init__(iterable, **kwargs)
534
535    def extend(self, l):
536        StrictOrderingOnAppendList.ensure_sorted(l)
537
538        return super(StrictOrderingOnAppendList, self).extend(l)
539
540    def __setitem__(self, key, val):
541        if isinstance(key, slice):
542            StrictOrderingOnAppendList.ensure_sorted(val)
543        return super(StrictOrderingOnAppendList, self).__setitem__(key, val)
544
545    def __add__(self, other):
546        StrictOrderingOnAppendList.ensure_sorted(other)
547
548        return super(StrictOrderingOnAppendList, self).__add__(other)
549
550    def __iadd__(self, other):
551        StrictOrderingOnAppendList.ensure_sorted(other)
552
553        return super(StrictOrderingOnAppendList, self).__iadd__(other)
554
555
556class ImmutableStrictOrderingOnAppendList(StrictOrderingOnAppendList):
557    """Like StrictOrderingOnAppendList, but not allowing mutations of the value."""
558
559    def append(self, elt):
560        raise Exception("cannot use append on this type")
561
562    def extend(self, iterable):
563        raise Exception("cannot use extend on this type")
564
565    def __setslice__(self, i, j, iterable):
566        raise Exception("cannot assign to slices on this type")
567
568    def __setitem__(self, i, elt):
569        raise Exception("cannot assign to indexes on this type")
570
571    def __iadd__(self, other):
572        raise Exception("cannot use += on this type")
573
574
575class StrictOrderingOnAppendListWithAction(StrictOrderingOnAppendList):
576    """An ordered list that accepts a callable to be applied to each item.
577
578    A callable (action) passed to the constructor is run on each item of input.
579    The result of running the callable on each item will be stored in place of
580    the original input, but the original item must be used to enforce sortedness.
581    """
582
583    def __init__(self, iterable=(), action=None):
584        if not callable(action):
585            raise ValueError(
586                "A callable action is required to construct "
587                "a StrictOrderingOnAppendListWithAction"
588            )
589
590        self._action = action
591        if not isinstance(iterable, (tuple, list)):
592            raise ValueError(
593                "StrictOrderingOnAppendListWithAction can only be initialized "
594                "with another list"
595            )
596        iterable = [self._action(i) for i in iterable]
597        super(StrictOrderingOnAppendListWithAction, self).__init__(
598            iterable, action=action
599        )
600
601    def extend(self, l):
602        if not isinstance(l, list):
603            raise ValueError(
604                "StrictOrderingOnAppendListWithAction can only be extended "
605                "with another list"
606            )
607        l = [self._action(i) for i in l]
608        return super(StrictOrderingOnAppendListWithAction, self).extend(l)
609
610    def __setitem__(self, key, val):
611        if isinstance(key, slice):
612            if not isinstance(val, list):
613                raise ValueError(
614                    "StrictOrderingOnAppendListWithAction can only be sliced "
615                    "with another list"
616                )
617            val = [self._action(item) for item in val]
618        return super(StrictOrderingOnAppendListWithAction, self).__setitem__(key, val)
619
620    def __add__(self, other):
621        if not isinstance(other, list):
622            raise ValueError(
623                "StrictOrderingOnAppendListWithAction can only be added with "
624                "another list"
625            )
626        return super(StrictOrderingOnAppendListWithAction, self).__add__(other)
627
628    def __iadd__(self, other):
629        if not isinstance(other, list):
630            raise ValueError(
631                "StrictOrderingOnAppendListWithAction can only be added with "
632                "another list"
633            )
634        other = [self._action(i) for i in other]
635        return super(StrictOrderingOnAppendListWithAction, self).__iadd__(other)
636
637
638class MozbuildDeletionError(Exception):
639    pass
640
641
642def FlagsFactory(flags):
643    """Returns a class which holds optional flags for an item in a list.
644
645    The flags are defined in the dict given as argument, where keys are
646    the flag names, and values the type used for the value of that flag.
647
648    The resulting class is used by the various <TypeName>WithFlagsFactory
649    functions below.
650    """
651    assert isinstance(flags, dict)
652    assert all(isinstance(v, type) for v in flags.values())
653
654    class Flags(object):
655        __slots__ = flags.keys()
656        _flags = flags
657
658        def update(self, **kwargs):
659            for k, v in six.iteritems(kwargs):
660                setattr(self, k, v)
661
662        def __getattr__(self, name):
663            if name not in self.__slots__:
664                raise AttributeError(
665                    "'%s' object has no attribute '%s'"
666                    % (self.__class__.__name__, name)
667                )
668            try:
669                return object.__getattr__(self, name)
670            except AttributeError:
671                value = self._flags[name]()
672                self.__setattr__(name, value)
673                return value
674
675        def __setattr__(self, name, value):
676            if name not in self.__slots__:
677                raise AttributeError(
678                    "'%s' object has no attribute '%s'"
679                    % (self.__class__.__name__, name)
680                )
681            if not isinstance(value, self._flags[name]):
682                raise TypeError(
683                    "'%s' attribute of class '%s' must be '%s'"
684                    % (name, self.__class__.__name__, self._flags[name].__name__)
685                )
686            return object.__setattr__(self, name, value)
687
688        def __delattr__(self, name):
689            raise MozbuildDeletionError("Unable to delete attributes for this object")
690
691    return Flags
692
693
694class StrictOrderingOnAppendListWithFlags(StrictOrderingOnAppendList):
695    """A list with flags specialized for moz.build environments.
696
697    Each subclass has a set of typed flags; this class lets us use `isinstance`
698    for natural testing.
699    """
700
701
702def StrictOrderingOnAppendListWithFlagsFactory(flags):
703    """Returns a StrictOrderingOnAppendList-like object, with optional
704    flags on each item.
705
706    The flags are defined in the dict given as argument, where keys are
707    the flag names, and values the type used for the value of that flag.
708
709    Example:
710        FooList = StrictOrderingOnAppendListWithFlagsFactory({
711            'foo': bool, 'bar': unicode
712        })
713        foo = FooList(['a', 'b', 'c'])
714        foo['a'].foo = True
715        foo['b'].bar = 'bar'
716    """
717
718    class StrictOrderingOnAppendListWithFlagsSpecialization(
719        StrictOrderingOnAppendListWithFlags
720    ):
721        def __init__(self, iterable=None):
722            if iterable is None:
723                iterable = []
724            StrictOrderingOnAppendListWithFlags.__init__(self, iterable)
725            self._flags_type = FlagsFactory(flags)
726            self._flags = dict()
727
728        def __getitem__(self, name):
729            if name not in self._flags:
730                if name not in self:
731                    raise KeyError("'%s'" % name)
732                self._flags[name] = self._flags_type()
733            return self._flags[name]
734
735        def __setitem__(self, name, value):
736            if not isinstance(name, slice):
737                raise TypeError(
738                    "'%s' object does not support item assignment"
739                    % self.__class__.__name__
740                )
741            result = super(
742                StrictOrderingOnAppendListWithFlagsSpecialization, self
743            ).__setitem__(name, value)
744            # We may have removed items.
745            for k in set(self._flags.keys()) - set(self):
746                del self._flags[k]
747            if isinstance(value, StrictOrderingOnAppendListWithFlags):
748                self._update_flags(value)
749            return result
750
751        def _update_flags(self, other):
752            if self._flags_type._flags != other._flags_type._flags:
753                raise ValueError(
754                    "Expected a list of strings with flags like %s, not like %s"
755                    % (self._flags_type._flags, other._flags_type._flags)
756                )
757            intersection = set(self._flags.keys()) & set(other._flags.keys())
758            if intersection:
759                raise ValueError(
760                    "Cannot update flags: both lists of strings with flags configure %s"
761                    % intersection
762                )
763            self._flags.update(other._flags)
764
765        def extend(self, l):
766            result = super(
767                StrictOrderingOnAppendListWithFlagsSpecialization, self
768            ).extend(l)
769            if isinstance(l, StrictOrderingOnAppendListWithFlags):
770                self._update_flags(l)
771            return result
772
773        def __add__(self, other):
774            result = super(
775                StrictOrderingOnAppendListWithFlagsSpecialization, self
776            ).__add__(other)
777            if isinstance(other, StrictOrderingOnAppendListWithFlags):
778                # Result has flags from other but not from self, since
779                # internally we duplicate self and then extend with other, and
780                # only extend knows about flags.  Since we don't allow updating
781                # when the set of flag keys intersect, which we instance we pass
782                # to _update_flags here matters.  This needs to be correct but
783                # is an implementation detail.
784                result._update_flags(self)
785            return result
786
787        def __iadd__(self, other):
788            result = super(
789                StrictOrderingOnAppendListWithFlagsSpecialization, self
790            ).__iadd__(other)
791            if isinstance(other, StrictOrderingOnAppendListWithFlags):
792                self._update_flags(other)
793            return result
794
795    return StrictOrderingOnAppendListWithFlagsSpecialization
796
797
798class HierarchicalStringList(object):
799    """A hierarchy of lists of strings.
800
801    Each instance of this object contains a list of strings, which can be set or
802    appended to. A sub-level of the hierarchy is also an instance of this class,
803    can be added by appending to an attribute instead.
804
805    For example, the moz.build variable EXPORTS is an instance of this class. We
806    can do:
807
808    EXPORTS += ['foo.h']
809    EXPORTS.mozilla.dom += ['bar.h']
810
811    In this case, we have 3 instances (EXPORTS, EXPORTS.mozilla, and
812    EXPORTS.mozilla.dom), and the first and last each have one element in their
813    list.
814    """
815
816    __slots__ = ("_strings", "_children")
817
818    def __init__(self):
819        # Please change ContextDerivedTypedHierarchicalStringList in context.py
820        # if you make changes here.
821        self._strings = StrictOrderingOnAppendList()
822        self._children = {}
823
824    class StringListAdaptor(collections.abc.Sequence):
825        def __init__(self, hsl):
826            self._hsl = hsl
827
828        def __getitem__(self, index):
829            return self._hsl._strings[index]
830
831        def __len__(self):
832            return len(self._hsl._strings)
833
834    def walk(self):
835        """Walk over all HierarchicalStringLists in the hierarchy.
836
837        This is a generator of (path, sequence).
838
839        The path is '' for the root level and '/'-delimited strings for
840        any descendants.  The sequence is a read-only sequence of the
841        strings contained at that level.
842        """
843
844        if self._strings:
845            path_to_here = ""
846            yield path_to_here, self.StringListAdaptor(self)
847
848        for k, l in sorted(self._children.items()):
849            for p, v in l.walk():
850                path_to_there = "%s/%s" % (k, p)
851                yield path_to_there.strip("/"), v
852
853    def __setattr__(self, name, value):
854        if name in self.__slots__:
855            return object.__setattr__(self, name, value)
856
857        # __setattr__ can be called with a list when a simple assignment is
858        # used:
859        #
860        # EXPORTS.foo = ['file.h']
861        #
862        # In this case, we need to overwrite foo's current list of strings.
863        #
864        # However, __setattr__ is also called with a HierarchicalStringList
865        # to try to actually set the attribute. We want to ignore this case,
866        # since we don't actually create an attribute called 'foo', but just add
867        # it to our list of children (using _get_exportvariable()).
868        self._set_exportvariable(name, value)
869
870    def __getattr__(self, name):
871        if name.startswith("__"):
872            return object.__getattr__(self, name)
873        return self._get_exportvariable(name)
874
875    def __delattr__(self, name):
876        raise MozbuildDeletionError("Unable to delete attributes for this object")
877
878    def __iadd__(self, other):
879        if isinstance(other, HierarchicalStringList):
880            self._strings += other._strings
881            for c in other._children:
882                self[c] += other[c]
883        else:
884            self._check_list(other)
885            self._strings += other
886        return self
887
888    def __getitem__(self, name):
889        return self._get_exportvariable(name)
890
891    def __setitem__(self, name, value):
892        self._set_exportvariable(name, value)
893
894    def _get_exportvariable(self, name):
895        # Please change ContextDerivedTypedHierarchicalStringList in context.py
896        # if you make changes here.
897        child = self._children.get(name)
898        if not child:
899            child = self._children[name] = HierarchicalStringList()
900        return child
901
902    def _set_exportvariable(self, name, value):
903        if name in self._children:
904            if value is self._get_exportvariable(name):
905                return
906            raise KeyError("global_ns", "reassign", "<some variable>.%s" % name)
907
908        exports = self._get_exportvariable(name)
909        exports._check_list(value)
910        exports._strings += value
911
912    def _check_list(self, value):
913        if not isinstance(value, list):
914            raise ValueError("Expected a list of strings, not %s" % type(value))
915        for v in value:
916            if not isinstance(v, six.string_types):
917                raise ValueError(
918                    "Expected a list of strings, not an element of %s" % type(v)
919                )
920
921
922class LockFile(object):
923    """LockFile is used by the lock_file method to hold the lock.
924
925    This object should not be used directly, but only through
926    the lock_file method below.
927    """
928
929    def __init__(self, lockfile):
930        self.lockfile = lockfile
931
932    def __del__(self):
933        while True:
934            try:
935                os.remove(self.lockfile)
936                break
937            except OSError as e:
938                if e.errno == errno.EACCES:
939                    # Another process probably has the file open, we'll retry.
940                    # Just a short sleep since we want to drop the lock ASAP
941                    # (but we need to let some other process close the file
942                    # first).
943                    time.sleep(0.1)
944                else:
945                    # Re-raise unknown errors
946                    raise
947
948
949def lock_file(lockfile, max_wait=600):
950    """Create and hold a lockfile of the given name, with the given timeout.
951
952    To release the lock, delete the returned object.
953    """
954
955    # FUTURE This function and object could be written as a context manager.
956
957    while True:
958        try:
959            fd = os.open(lockfile, os.O_EXCL | os.O_RDWR | os.O_CREAT)
960            # We created the lockfile, so we're the owner
961            break
962        except OSError as e:
963            if e.errno == errno.EEXIST or (
964                sys.platform == "win32" and e.errno == errno.EACCES
965            ):
966                pass
967            else:
968                # Should not occur
969                raise
970
971        try:
972            # The lock file exists, try to stat it to get its age
973            # and read its contents to report the owner PID
974            f = open(lockfile, "r")
975            s = os.stat(lockfile)
976        except EnvironmentError as e:
977            if e.errno == errno.ENOENT or e.errno == errno.EACCES:
978                # We didn't create the lockfile, so it did exist, but it's
979                # gone now. Just try again
980                continue
981
982            raise Exception(
983                "{0} exists but stat() failed: {1}".format(lockfile, e.strerror)
984            )
985
986        # We didn't create the lockfile and it's still there, check
987        # its age
988        now = int(time.time())
989        if now - s[stat.ST_MTIME] > max_wait:
990            pid = f.readline().rstrip()
991            raise Exception(
992                "{0} has been locked for more than "
993                "{1} seconds (PID {2})".format(lockfile, max_wait, pid)
994            )
995
996        # It's not been locked too long, wait a while and retry
997        f.close()
998        time.sleep(1)
999
1000    # if we get here. we have the lockfile. Convert the os.open file
1001    # descriptor into a Python file object and record our PID in it
1002    f = os.fdopen(fd, "w")
1003    f.write("{0}\n".format(os.getpid()))
1004    f.close()
1005
1006    return LockFile(lockfile)
1007
1008
1009class OrderedDefaultDict(OrderedDict):
1010    """A combination of OrderedDict and defaultdict."""
1011
1012    def __init__(self, default_factory, *args, **kwargs):
1013        OrderedDict.__init__(self, *args, **kwargs)
1014        self._default_factory = default_factory
1015
1016    def __missing__(self, key):
1017        value = self[key] = self._default_factory()
1018        return value
1019
1020
1021class KeyedDefaultDict(dict):
1022    """Like a defaultdict, but the default_factory function takes the key as
1023    argument"""
1024
1025    def __init__(self, default_factory, *args, **kwargs):
1026        dict.__init__(self, *args, **kwargs)
1027        self._default_factory = default_factory
1028
1029    def __missing__(self, key):
1030        value = self._default_factory(key)
1031        dict.__setitem__(self, key, value)
1032        return value
1033
1034
1035class ReadOnlyKeyedDefaultDict(KeyedDefaultDict, ReadOnlyDict):
1036    """Like KeyedDefaultDict, but read-only."""
1037
1038
1039class memoize(dict):
1040    """A decorator to memoize the results of function calls depending
1041    on its arguments.
1042    Both functions and instance methods are handled, although in the
1043    instance method case, the results are cache in the instance itself.
1044    """
1045
1046    def __init__(self, func):
1047        self.func = func
1048        functools.update_wrapper(self, func)
1049
1050    def __call__(self, *args):
1051        if args not in self:
1052            self[args] = self.func(*args)
1053        return self[args]
1054
1055    def method_call(self, instance, *args):
1056        name = "_%s" % self.func.__name__
1057        if not hasattr(instance, name):
1058            setattr(instance, name, {})
1059        cache = getattr(instance, name)
1060        if args not in cache:
1061            cache[args] = self.func(instance, *args)
1062        return cache[args]
1063
1064    def __get__(self, instance, cls):
1065        return functools.update_wrapper(
1066            functools.partial(self.method_call, instance), self.func
1067        )
1068
1069
1070class memoized_property(object):
1071    """A specialized version of the memoize decorator that works for
1072    class instance properties.
1073    """
1074
1075    def __init__(self, func):
1076        self.func = func
1077
1078    def __get__(self, instance, cls):
1079        name = "_%s" % self.func.__name__
1080        if not hasattr(instance, name):
1081            setattr(instance, name, self.func(instance))
1082        return getattr(instance, name)
1083
1084
1085def TypedNamedTuple(name, fields):
1086    """Factory for named tuple types with strong typing.
1087
1088    Arguments are an iterable of 2-tuples. The first member is the
1089    the field name. The second member is a type the field will be validated
1090    to be.
1091
1092    Construction of instances varies from ``collections.namedtuple``.
1093
1094    First, if a single tuple argument is given to the constructor, this is
1095    treated as the equivalent of passing each tuple value as a separate
1096    argument into __init__. e.g.::
1097
1098        t = (1, 2)
1099        TypedTuple(t) == TypedTuple(1, 2)
1100
1101    This behavior is meant for moz.build files, so vanilla tuples are
1102    automatically cast to typed tuple instances.
1103
1104    Second, fields in the tuple are validated to be instances of the specified
1105    type. This is done via an ``isinstance()`` check. To allow multiple types,
1106    pass a tuple as the allowed types field.
1107    """
1108    cls = collections.namedtuple(name, (name for name, typ in fields))
1109
1110    class TypedTuple(cls):
1111        __slots__ = ()
1112
1113        def __new__(klass, *args, **kwargs):
1114            if len(args) == 1 and not kwargs and isinstance(args[0], tuple):
1115                args = args[0]
1116
1117            return super(TypedTuple, klass).__new__(klass, *args, **kwargs)
1118
1119        def __init__(self, *args, **kwargs):
1120            for i, (fname, ftype) in enumerate(self._fields):
1121                value = self[i]
1122
1123                if not isinstance(value, ftype):
1124                    raise TypeError(
1125                        "field in tuple not of proper type: %s; "
1126                        "got %s, expected %s" % (fname, type(value), ftype)
1127                    )
1128
1129    TypedTuple._fields = fields
1130
1131    return TypedTuple
1132
1133
1134@memoize
1135def TypedList(type, base_class=List):
1136    """A list with type coercion.
1137
1138    The given ``type`` is what list elements are being coerced to. It may do
1139    strict validation, throwing ValueError exceptions.
1140
1141    A ``base_class`` type can be given for more specific uses than a List. For
1142    example, a Typed StrictOrderingOnAppendList can be created with:
1143
1144       TypedList(unicode, StrictOrderingOnAppendList)
1145    """
1146
1147    class _TypedList(base_class):
1148        @staticmethod
1149        def normalize(e):
1150            if not isinstance(e, type):
1151                e = type(e)
1152            return e
1153
1154        def _ensure_type(self, l):
1155            if isinstance(l, self.__class__):
1156                return l
1157
1158            return [self.normalize(e) for e in l]
1159
1160        def __init__(self, iterable=None, **kwargs):
1161            if iterable is None:
1162                iterable = []
1163            iterable = self._ensure_type(iterable)
1164
1165            super(_TypedList, self).__init__(iterable, **kwargs)
1166
1167        def extend(self, l):
1168            l = self._ensure_type(l)
1169
1170            return super(_TypedList, self).extend(l)
1171
1172        def __setitem__(self, key, val):
1173            val = self._ensure_type(val)
1174
1175            return super(_TypedList, self).__setitem__(key, val)
1176
1177        def __add__(self, other):
1178            other = self._ensure_type(other)
1179
1180            return super(_TypedList, self).__add__(other)
1181
1182        def __iadd__(self, other):
1183            other = self._ensure_type(other)
1184
1185            return super(_TypedList, self).__iadd__(other)
1186
1187        def append(self, other):
1188            self += [other]
1189
1190    return _TypedList
1191
1192
1193def group_unified_files(files, unified_prefix, unified_suffix, files_per_unified_file):
1194    """Return an iterator of (unified_filename, source_filenames) tuples.
1195
1196    We compile most C and C++ files in "unified mode"; instead of compiling
1197    ``a.cpp``, ``b.cpp``, and ``c.cpp`` separately, we compile a single file
1198    that looks approximately like::
1199
1200       #include "a.cpp"
1201       #include "b.cpp"
1202       #include "c.cpp"
1203
1204    This function handles the details of generating names for the unified
1205    files, and determining which original source files go in which unified
1206    file."""
1207
1208    # Our last returned list of source filenames may be short, and we
1209    # don't want the fill value inserted by zip_longest to be an
1210    # issue.  So we do a little dance to filter it out ourselves.
1211    dummy_fill_value = ("dummy",)
1212
1213    def filter_out_dummy(iterable):
1214        return six.moves.filter(lambda x: x != dummy_fill_value, iterable)
1215
1216    # From the itertools documentation, slightly modified:
1217    def grouper(n, iterable):
1218        "grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx"
1219        args = [iter(iterable)] * n
1220        return six.moves.zip_longest(fillvalue=dummy_fill_value, *args)
1221
1222    for i, unified_group in enumerate(grouper(files_per_unified_file, files)):
1223        just_the_filenames = list(filter_out_dummy(unified_group))
1224        yield "%s%d.%s" % (unified_prefix, i, unified_suffix), just_the_filenames
1225
1226
1227def pair(iterable):
1228    """Given an iterable, returns an iterable pairing its items.
1229
1230    For example,
1231        list(pair([1,2,3,4,5,6]))
1232    returns
1233        [(1,2), (3,4), (5,6)]
1234    """
1235    i = iter(iterable)
1236    return six.moves.zip_longest(i, i)
1237
1238
1239def pairwise(iterable):
1240    """Given an iterable, returns an iterable of overlapped pairs of
1241    its items. Based on the Python itertools documentation.
1242
1243    For example,
1244        list(pairwise([1,2,3,4,5,6]))
1245    returns
1246        [(1,2), (2,3), (3,4), (4,5), (5,6)]
1247    """
1248    a, b = itertools.tee(iterable)
1249    next(b, None)
1250    return zip(a, b)
1251
1252
1253VARIABLES_RE = re.compile("\$\((\w+)\)")
1254
1255
1256def expand_variables(s, variables):
1257    """Given a string with $(var) variable references, replace those references
1258    with the corresponding entries from the given `variables` dict.
1259
1260    If a variable value is not a string, it is iterated and its items are
1261    joined with a whitespace."""
1262    result = ""
1263    for s, name in pair(VARIABLES_RE.split(s)):
1264        result += s
1265        value = variables.get(name)
1266        if not value:
1267            continue
1268        if not isinstance(value, six.string_types):
1269            value = " ".join(value)
1270        result += value
1271    return result
1272
1273
1274class DefinesAction(argparse.Action):
1275    """An ArgumentParser action to handle -Dvar[=value] type of arguments."""
1276
1277    def __call__(self, parser, namespace, values, option_string):
1278        defines = getattr(namespace, self.dest)
1279        if defines is None:
1280            defines = {}
1281        values = values.split("=", 1)
1282        if len(values) == 1:
1283            name, value = values[0], 1
1284        else:
1285            name, value = values
1286            if value.isdigit():
1287                value = int(value)
1288        defines[name] = value
1289        setattr(namespace, self.dest, defines)
1290
1291
1292class EnumStringComparisonError(Exception):
1293    pass
1294
1295
1296class EnumString(six.text_type):
1297    """A string type that only can have a limited set of values, similarly to
1298    an Enum, and can only be compared against that set of values.
1299
1300    The class is meant to be subclassed, where the subclass defines
1301    POSSIBLE_VALUES. The `subclass` method is a helper to create such
1302    subclasses.
1303    """
1304
1305    POSSIBLE_VALUES = ()
1306
1307    def __init__(self, value):
1308        if value not in self.POSSIBLE_VALUES:
1309            raise ValueError(
1310                "'%s' is not a valid value for %s" % (value, self.__class__.__name__)
1311            )
1312
1313    def __eq__(self, other):
1314        if other not in self.POSSIBLE_VALUES:
1315            raise EnumStringComparisonError(
1316                "Can only compare with %s"
1317                % ", ".join("'%s'" % v for v in self.POSSIBLE_VALUES)
1318            )
1319        return super(EnumString, self).__eq__(other)
1320
1321    def __ne__(self, other):
1322        return not (self == other)
1323
1324    def __hash__(self):
1325        return super(EnumString, self).__hash__()
1326
1327    @staticmethod
1328    def subclass(*possible_values):
1329        class EnumStringSubclass(EnumString):
1330            POSSIBLE_VALUES = possible_values
1331
1332        return EnumStringSubclass
1333
1334
1335def _escape_char(c):
1336    # str.encode('unicode_espace') doesn't escape quotes, presumably because
1337    # quoting could be done with either ' or ".
1338    if c == "'":
1339        return "\\'"
1340    return six.text_type(c.encode("unicode_escape"))
1341
1342
1343if six.PY2:  # Delete when we get rid of Python 2.
1344    # Mapping table between raw characters below \x80 and their escaped
1345    # counterpart, when they differ
1346    _INDENTED_REPR_TABLE = {
1347        c: e
1348        for c, e in map(lambda x: (x, _escape_char(x)), map(unichr, range(128)))
1349        if c != e
1350    }
1351    # Regexp matching all characters to escape.
1352    _INDENTED_REPR_RE = re.compile(
1353        "([" + "".join(_INDENTED_REPR_TABLE.values()) + "]+)"
1354    )
1355
1356
1357def write_indented_repr(f, o, indent=4):
1358    """Write an indented representation (similar to repr()) of the object to the
1359    given file `f`.
1360
1361    One notable difference with repr is that the returned representation
1362    assumes `from __future__ import unicode_literals`.
1363    """
1364    if six.PY3:
1365        pprint.pprint(o, stream=f, indent=indent)
1366        return
1367    # Delete everything below when we get rid of Python 2.
1368    one_indent = " " * indent
1369
1370    def recurse_indented_repr(o, level):
1371        if isinstance(o, dict):
1372            yield "{\n"
1373            for k, v in sorted(o.items()):
1374                yield one_indent * (level + 1)
1375                for d in recurse_indented_repr(k, level + 1):
1376                    yield d
1377                yield ": "
1378                for d in recurse_indented_repr(v, level + 1):
1379                    yield d
1380                yield ",\n"
1381            yield one_indent * level
1382            yield "}"
1383        elif isinstance(o, bytes):
1384            yield "b"
1385            yield repr(o)
1386        elif isinstance(o, six.text_type):
1387            yield "'"
1388            # We want a readable string (non escaped unicode), but some
1389            # special characters need escaping (e.g. \n, \t, etc.)
1390            for i, s in enumerate(_INDENTED_REPR_RE.split(o)):
1391                if i % 2:
1392                    for c in s:
1393                        yield _INDENTED_REPR_TABLE[c]
1394                else:
1395                    yield s
1396            yield "'"
1397        elif hasattr(o, "__iter__"):
1398            yield "[\n"
1399            for i in o:
1400                yield one_indent * (level + 1)
1401                for d in recurse_indented_repr(i, level + 1):
1402                    yield d
1403                yield ",\n"
1404            yield one_indent * level
1405            yield "]"
1406        else:
1407            yield repr(o)
1408
1409    result = "".join(recurse_indented_repr(o, 0)) + "\n"
1410    f.write(result)
1411
1412
1413def ensure_bytes(value, encoding="utf-8"):
1414    if isinstance(value, six.text_type):
1415        return value.encode(encoding)
1416    return value
1417
1418
1419def ensure_unicode(value, encoding="utf-8"):
1420    if isinstance(value, six.binary_type):
1421        return value.decode(encoding)
1422    return value
1423
1424
1425def process_time():
1426    if six.PY2:
1427        return time.clock()
1428    else:
1429        return time.process_time()
1430
1431
1432def hexdump(buf):
1433    """
1434    Returns a list of hexdump-like lines corresponding to the given input buffer.
1435    """
1436    assert six.PY3
1437    off_format = "%0{}x ".format(len(str(len(buf))))
1438    lines = []
1439    for off in range(0, len(buf), 16):
1440        line = off_format % off
1441        chunk = buf[off : min(off + 16, len(buf))]
1442        for n, byte in enumerate(chunk):
1443            line += " %02x" % byte
1444            if n == 7:
1445                line += " "
1446        for n in range(len(chunk), 16):
1447            line += "   "
1448            if n == 7:
1449                line += " "
1450        line += "  |"
1451        for byte in chunk:
1452            if byte < 127 and byte >= 32:
1453                line += chr(byte)
1454            else:
1455                line += "."
1456        for n in range(len(chunk), 16):
1457            line += " "
1458        line += "|\n"
1459        lines.append(line)
1460    return lines
1461