1# This Source Code Form is subject to the terms of the Mozilla Public
2# License, v. 2.0. If a copy of the MPL was not distributed with this file,
3# You can obtain one at http://mozilla.org/MPL/2.0/.
4
5# This file contains miscellaneous utility functions that don't belong anywhere
6# in particular.
7
8from __future__ import absolute_import, print_function, unicode_literals
9
10import argparse
11import collections
12import ctypes
13import difflib
14import errno
15import functools
16import hashlib
17import io
18import itertools
19import os
20import pprint
21import re
22import stat
23import sys
24import time
25from collections import (
26    OrderedDict,
27)
28from io import (BytesIO, StringIO)
29
30if sys.version.startswith('2'):
31    from collections import Sequence
32else:
33    from collections.abc import Sequence
34
35import six
36
37if sys.platform == 'win32':
38    _kernel32 = ctypes.windll.kernel32
39    _FILE_ATTRIBUTE_NOT_CONTENT_INDEXED = 0x2000
40    system_encoding = 'mbcs'
41else:
42    system_encoding = 'utf-8'
43
44
45def exec_(object, globals=None, locals=None):
46    """Wrapper around the exec statement to avoid bogus errors like:
47
48    SyntaxError: unqualified exec is not allowed in function ...
49    it is a nested function.
50
51    or
52
53    SyntaxError: unqualified exec is not allowed in function ...
54    it contains a nested function with free variable
55
56    which happen with older versions of python 2.7.
57    """
58    exec(object, globals, locals)
59
60
61def _open(path, mode):
62    if 'b' in mode:
63        return io.open(path, mode)
64    return io.open(path, mode, encoding='utf-8', newline='\n')
65
66
67def hash_file(path, hasher=None):
68    """Hashes a file specified by the path given and returns the hex digest."""
69
70    # If the default hashing function changes, this may invalidate
71    # lots of cached data.  Don't change it lightly.
72    h = hasher or hashlib.sha1()
73
74    with open(path, 'rb') as fh:
75        while True:
76            data = fh.read(8192)
77
78            if not len(data):
79                break
80
81            h.update(data)
82
83    return h.hexdigest()
84
85
86class EmptyValue(six.text_type):
87    """A dummy type that behaves like an empty string and sequence.
88
89    This type exists in order to support
90    :py:class:`mozbuild.frontend.reader.EmptyConfig`. It should likely not be
91    used elsewhere.
92    """
93
94    def __init__(self):
95        super(EmptyValue, self).__init__()
96
97
98class ReadOnlyNamespace(object):
99    """A class for objects with immutable attributes set at initialization."""
100
101    def __init__(self, **kwargs):
102        for k, v in six.iteritems(kwargs):
103            super(ReadOnlyNamespace, self).__setattr__(k, v)
104
105    def __delattr__(self, key):
106        raise Exception('Object does not support deletion.')
107
108    def __setattr__(self, key, value):
109        raise Exception('Object does not support assignment.')
110
111    def __ne__(self, other):
112        return not (self == other)
113
114    def __eq__(self, other):
115        return self is other or (
116            hasattr(other, '__dict__') and self.__dict__ == other.__dict__)
117
118    def __repr__(self):
119        return '<%s %r>' % (self.__class__.__name__, self.__dict__)
120
121
122class ReadOnlyDict(dict):
123    """A read-only dictionary."""
124
125    def __init__(self, *args, **kwargs):
126        dict.__init__(self, *args, **kwargs)
127
128    def __delitem__(self, key):
129        raise Exception('Object does not support deletion.')
130
131    def __setitem__(self, key, value):
132        raise Exception('Object does not support assignment.')
133
134    def update(self, *args, **kwargs):
135        raise Exception('Object does not support update.')
136
137
138class undefined_default(object):
139    """Represents an undefined argument value that isn't None."""
140
141
142undefined = undefined_default()
143
144
145class ReadOnlyDefaultDict(ReadOnlyDict):
146    """A read-only dictionary that supports default values on retrieval."""
147
148    def __init__(self, default_factory, *args, **kwargs):
149        ReadOnlyDict.__init__(self, *args, **kwargs)
150        self._default_factory = default_factory
151
152    def __missing__(self, key):
153        value = self._default_factory()
154        dict.__setitem__(self, key, value)
155        return value
156
157
158def ensureParentDir(path):
159    """Ensures the directory parent to the given file exists."""
160    d = os.path.dirname(path)
161    if d and not os.path.exists(path):
162        try:
163            os.makedirs(d)
164        except OSError as error:
165            if error.errno != errno.EEXIST:
166                raise
167
168
169def mkdir(path, not_indexed=False):
170    """Ensure a directory exists.
171
172    If ``not_indexed`` is True, an attribute is set that disables content
173    indexing on the directory.
174    """
175    try:
176        os.makedirs(path)
177    except OSError as e:
178        if e.errno != errno.EEXIST:
179            raise
180
181    if not_indexed:
182        if sys.platform == 'win32':
183            if isinstance(path, six.string_types):
184                fn = _kernel32.SetFileAttributesW
185            else:
186                fn = _kernel32.SetFileAttributesA
187
188            fn(path, _FILE_ATTRIBUTE_NOT_CONTENT_INDEXED)
189        elif sys.platform == 'darwin':
190            with open(os.path.join(path, '.metadata_never_index'), 'a'):
191                pass
192
193
194def simple_diff(filename, old_lines, new_lines):
195    """Returns the diff between old_lines and new_lines, in unified diff form,
196    as a list of lines.
197
198    old_lines and new_lines are lists of non-newline terminated lines to
199    compare.
200    old_lines can be None, indicating a file creation.
201    new_lines can be None, indicating a file deletion.
202    """
203
204    old_name = '/dev/null' if old_lines is None else filename
205    new_name = '/dev/null' if new_lines is None else filename
206
207    return difflib.unified_diff(old_lines or [], new_lines or [],
208                                old_name, new_name, n=4, lineterm='')
209
210
211class FileAvoidWrite(BytesIO):
212    """File-like object that buffers output and only writes if content changed.
213
214    We create an instance from an existing filename. New content is written to
215    it. When we close the file object, if the content in the in-memory buffer
216    differs from what is on disk, then we write out the new content. Otherwise,
217    the original file is untouched.
218
219    Instances can optionally capture diffs of file changes. This feature is not
220    enabled by default because it a) doesn't make sense for binary files b)
221    could add unwanted overhead to calls.
222
223    Additionally, there is dry run mode where the file is not actually written
224    out, but reports whether the file was existing and would have been updated
225    still occur, as well as diff capture if requested.
226    """
227
228    def __init__(self, filename, capture_diff=False, dry_run=False, readmode='rU'):
229        BytesIO.__init__(self)
230        self.name = filename
231        assert type(capture_diff) == bool
232        assert type(dry_run) == bool
233        assert 'r' in readmode
234        self._capture_diff = capture_diff
235        self._write_to_file = not dry_run
236        self.diff = None
237        self.mode = readmode
238        self._binary_mode = 'b' in readmode
239
240    def write(self, buf):
241        BytesIO.write(self, six.ensure_binary(buf))
242
243    def avoid_writing_to_file(self):
244        self._write_to_file = False
245
246    def close(self):
247        """Stop accepting writes, compare file contents, and rewrite if needed.
248
249        Returns a tuple of bools indicating what action was performed:
250
251            (file existed, file updated)
252
253        If ``capture_diff`` was specified at construction time and the
254        underlying file was changed, ``.diff`` will be populated with the diff
255        of the result.
256        """
257        # Use binary data if the caller explicitly asked for it.
258        ensure = six.ensure_binary if self._binary_mode else six.ensure_text
259        buf = ensure(self.getvalue())
260
261        BytesIO.close(self)
262        existed = False
263        old_content = None
264
265        try:
266            existing = _open(self.name, self.mode)
267            existed = True
268        except IOError:
269            pass
270        else:
271            try:
272                old_content = existing.read()
273                if old_content == buf:
274                    return True, False
275            except IOError:
276                pass
277            finally:
278                existing.close()
279
280        if self._write_to_file:
281            ensureParentDir(self.name)
282            # Maintain 'b' if specified.  'U' only applies to modes starting with
283            # 'r', so it is dropped.
284            writemode = 'w'
285            if self._binary_mode:
286                writemode += 'b'
287                buf = six.ensure_binary(buf)
288            else:
289                buf = six.ensure_text(buf)
290            with _open(self.name, writemode) as file:
291                file.write(buf)
292
293        self._generate_diff(buf, old_content)
294
295        return existed, True
296
297    def _generate_diff(self, new_content, old_content):
298        """Generate a diff for the changed contents if `capture_diff` is True.
299
300        If the changed contents could not be decoded as utf-8 then generate a
301        placeholder message instead of a diff.
302
303        Args:
304            new_content: Str or bytes holding the new file contents.
305            old_content: Str or bytes holding the original file contents. Should be
306                None if no old content is being overwritten.
307        """
308        if not self._capture_diff:
309            return
310
311        try:
312            if old_content is None:
313                old_lines = None
314            else:
315                if self._binary_mode:
316                    # difflib doesn't work with bytes.
317                    old_content = old_content.decode('utf-8')
318
319                old_lines = old_content.splitlines()
320
321            if self._binary_mode:
322                # difflib doesn't work with bytes.
323                new_content = new_content.decode('utf-8')
324
325            new_lines = new_content.splitlines()
326
327            self.diff = simple_diff(self.name, old_lines, new_lines)
328        # FileAvoidWrite isn't unicode/bytes safe. So, files with non-ascii
329        # content or opened and written in different modes may involve
330        # implicit conversion and this will make Python unhappy. Since
331        # diffing isn't a critical feature, we just ignore the failure.
332        # This can go away once FileAvoidWrite uses io.BytesIO and
333        # io.StringIO. But that will require a lot of work.
334        except (UnicodeDecodeError, UnicodeEncodeError):
335            self.diff = ['Binary or non-ascii file changed: %s' %
336                         self.name]
337
338    def __enter__(self):
339        return self
340
341    def __exit__(self, type, value, traceback):
342        if not self.closed:
343            self.close()
344
345
346def resolve_target_to_make(topobjdir, target):
347    r'''
348    Resolve `target` (a target, directory, or file) to a make target.
349
350    `topobjdir` is the object directory; all make targets will be
351    rooted at or below the top-level Makefile in this directory.
352
353    Returns a pair `(reldir, target)` where `reldir` is a directory
354    relative to `topobjdir` containing a Makefile and `target` is a
355    make target (possibly `None`).
356
357    A directory resolves to the nearest directory at or above
358    containing a Makefile, and target `None`.
359
360    A regular (non-Makefile) file resolves to the nearest directory at
361    or above the file containing a Makefile, and an appropriate
362    target.
363
364    A Makefile resolves to the nearest parent strictly above the
365    Makefile containing a different Makefile, and an appropriate
366    target.
367    '''
368
369    target = target.replace(os.sep, '/').lstrip('/')
370    abs_target = os.path.join(topobjdir, target)
371
372    # For directories, run |make -C dir|. If the directory does not
373    # contain a Makefile, check parents until we find one. At worst,
374    # this will terminate at the root.
375    if os.path.isdir(abs_target):
376        current = abs_target
377
378        while True:
379            make_path = os.path.join(current, 'Makefile')
380            if os.path.exists(make_path):
381                return (current[len(topobjdir) + 1:], None)
382
383            current = os.path.dirname(current)
384
385    # If it's not in a directory, this is probably a top-level make
386    # target. Treat it as such.
387    if '/' not in target:
388        return (None, target)
389
390    # We have a relative path within the tree. We look for a Makefile
391    # as far into the path as possible. Then, we compute the make
392    # target as relative to that directory.
393    reldir = os.path.dirname(target)
394    target = os.path.basename(target)
395
396    while True:
397        make_path = os.path.join(topobjdir, reldir, 'Makefile')
398
399        # We append to target every iteration, so the check below
400        # happens exactly once.
401        if target != 'Makefile' and os.path.exists(make_path):
402            return (reldir, target)
403
404        target = os.path.join(os.path.basename(reldir), target)
405        reldir = os.path.dirname(reldir)
406
407
408class List(list):
409    """A list specialized for moz.build environments.
410
411    We overload the assignment and append operations to require that the
412    appended thing is a list. This avoids bad surprises coming from appending
413    a string to a list, which would just add each letter of the string.
414    """
415
416    def __init__(self, iterable=None, **kwargs):
417        if iterable is None:
418            iterable = []
419        if not isinstance(iterable, list):
420            raise ValueError('List can only be created from other list instances.')
421
422        self._kwargs = kwargs
423        return super(List, self).__init__(iterable)
424
425    def extend(self, l):
426        if not isinstance(l, list):
427            raise ValueError('List can only be extended with other list instances.')
428
429        return super(List, self).extend(l)
430
431    def __setitem__(self, key, val):
432        if isinstance(key, slice):
433            if not isinstance(val, list):
434                raise ValueError('List can only be sliced with other list '
435                                 'instances.')
436            if key.step:
437                raise ValueError('List cannot be sliced with a nonzero step '
438                                 'value')
439            # Python 2 and Python 3 do this differently for some reason.
440            if six.PY2:
441                return super(List, self).__setslice__(key.start, key.stop,
442                                                      val)
443            else:
444                return super(List, self).__setitem__(key, val)
445        return super(List, self).__setitem__(key, val)
446
447    def __setslice__(self, i, j, sequence):
448        return self.__setitem__(slice(i, j), sequence)
449
450    def __add__(self, other):
451        # Allow None and EmptyValue is a special case because it makes undefined
452        # variable references in moz.build behave better.
453        other = [] if isinstance(other, (type(None), EmptyValue)) else other
454        if not isinstance(other, list):
455            raise ValueError('Only lists can be appended to lists.')
456
457        new_list = self.__class__(self, **self._kwargs)
458        new_list.extend(other)
459        return new_list
460
461    def __iadd__(self, other):
462        other = [] if isinstance(other, (type(None), EmptyValue)) else other
463        if not isinstance(other, list):
464            raise ValueError('Only lists can be appended to lists.')
465
466        return super(List, self).__iadd__(other)
467
468
469class UnsortedError(Exception):
470    def __init__(self, srtd, original):
471        assert len(srtd) == len(original)
472
473        self.sorted = srtd
474        self.original = original
475
476        for i, orig in enumerate(original):
477            s = srtd[i]
478
479            if orig != s:
480                self.i = i
481                break
482
483    def __str__(self):
484        s = StringIO()
485
486        s.write('An attempt was made to add an unsorted sequence to a list. ')
487        s.write('The incoming list is unsorted starting at element %d. ' %
488                self.i)
489        s.write('We expected "%s" but got "%s"' % (
490            self.sorted[self.i], self.original[self.i]))
491
492        return s.getvalue()
493
494
495class StrictOrderingOnAppendList(List):
496    """A list specialized for moz.build environments.
497
498    We overload the assignment and append operations to require that incoming
499    elements be ordered. This enforces cleaner style in moz.build files.
500    """
501
502    @staticmethod
503    def ensure_sorted(l):
504        if isinstance(l, StrictOrderingOnAppendList):
505            return
506
507        def _first_element(e):
508            # If the list entry is a tuple, we sort based on the first element
509            # in the tuple.
510            return e[0] if isinstance(e, tuple) else e
511        srtd = sorted(l, key=lambda x: _first_element(x).lower())
512
513        if srtd != l:
514            raise UnsortedError(srtd, l)
515
516    def __init__(self, iterable=None, **kwargs):
517        if iterable is None:
518            iterable = []
519
520        StrictOrderingOnAppendList.ensure_sorted(iterable)
521
522        super(StrictOrderingOnAppendList, self).__init__(iterable, **kwargs)
523
524    def extend(self, l):
525        StrictOrderingOnAppendList.ensure_sorted(l)
526
527        return super(StrictOrderingOnAppendList, self).extend(l)
528
529    def __setitem__(self, key, val):
530        if isinstance(key, slice):
531            StrictOrderingOnAppendList.ensure_sorted(val)
532        return super(StrictOrderingOnAppendList, self).__setitem__(key, val)
533
534    def __add__(self, other):
535        StrictOrderingOnAppendList.ensure_sorted(other)
536
537        return super(StrictOrderingOnAppendList, self).__add__(other)
538
539    def __iadd__(self, other):
540        StrictOrderingOnAppendList.ensure_sorted(other)
541
542        return super(StrictOrderingOnAppendList, self).__iadd__(other)
543
544
545class ImmutableStrictOrderingOnAppendList(StrictOrderingOnAppendList):
546    """Like StrictOrderingOnAppendList, but not allowing mutations of the value.
547    """
548
549    def append(self, elt):
550        raise Exception("cannot use append on this type")
551
552    def extend(self, iterable):
553        raise Exception("cannot use extend on this type")
554
555    def __setslice__(self, i, j, iterable):
556        raise Exception("cannot assign to slices on this type")
557
558    def __setitem__(self, i, elt):
559        raise Exception("cannot assign to indexes on this type")
560
561    def __iadd__(self, other):
562        raise Exception("cannot use += on this type")
563
564
565class StrictOrderingOnAppendListWithAction(StrictOrderingOnAppendList):
566    """An ordered list that accepts a callable to be applied to each item.
567
568    A callable (action) passed to the constructor is run on each item of input.
569    The result of running the callable on each item will be stored in place of
570    the original input, but the original item must be used to enforce sortedness.
571    """
572
573    def __init__(self, iterable=(), action=None):
574        if not callable(action):
575            raise ValueError('A callable action is required to construct '
576                             'a StrictOrderingOnAppendListWithAction')
577
578        self._action = action
579        if not isinstance(iterable, (tuple, list)):
580            raise ValueError(
581                'StrictOrderingOnAppendListWithAction can only be initialized '
582                'with another list')
583        iterable = [self._action(i) for i in iterable]
584        super(StrictOrderingOnAppendListWithAction, self).__init__(
585            iterable, action=action)
586
587    def extend(self, l):
588        if not isinstance(l, list):
589            raise ValueError(
590                'StrictOrderingOnAppendListWithAction can only be extended '
591                'with another list')
592        l = [self._action(i) for i in l]
593        return super(StrictOrderingOnAppendListWithAction, self).extend(l)
594
595    def __setitem__(self, key, val):
596        if isinstance(key, slice):
597            if not isinstance(val, list):
598                raise ValueError(
599                    'StrictOrderingOnAppendListWithAction can only be sliced '
600                    'with another list')
601            val = [self._action(item) for item in val]
602        return super(StrictOrderingOnAppendListWithAction, self).__setitem__(
603            key, val)
604
605    def __add__(self, other):
606        if not isinstance(other, list):
607            raise ValueError(
608                'StrictOrderingOnAppendListWithAction can only be added with '
609                'another list')
610        return super(StrictOrderingOnAppendListWithAction, self).__add__(other)
611
612    def __iadd__(self, other):
613        if not isinstance(other, list):
614            raise ValueError(
615                'StrictOrderingOnAppendListWithAction can only be added with '
616                'another list')
617        other = [self._action(i) for i in other]
618        return super(StrictOrderingOnAppendListWithAction, self).__iadd__(other)
619
620
621class MozbuildDeletionError(Exception):
622    pass
623
624
625def FlagsFactory(flags):
626    """Returns a class which holds optional flags for an item in a list.
627
628    The flags are defined in the dict given as argument, where keys are
629    the flag names, and values the type used for the value of that flag.
630
631    The resulting class is used by the various <TypeName>WithFlagsFactory
632    functions below.
633    """
634    assert isinstance(flags, dict)
635    assert all(isinstance(v, type) for v in flags.values())
636
637    class Flags(object):
638        __slots__ = flags.keys()
639        _flags = flags
640
641        def update(self, **kwargs):
642            for k, v in six.iteritems(kwargs):
643                setattr(self, k, v)
644
645        def __getattr__(self, name):
646            if name not in self.__slots__:
647                raise AttributeError("'%s' object has no attribute '%s'" %
648                                     (self.__class__.__name__, name))
649            try:
650                return object.__getattr__(self, name)
651            except AttributeError:
652                value = self._flags[name]()
653                self.__setattr__(name, value)
654                return value
655
656        def __setattr__(self, name, value):
657            if name not in self.__slots__:
658                raise AttributeError("'%s' object has no attribute '%s'" %
659                                     (self.__class__.__name__, name))
660            if not isinstance(value, self._flags[name]):
661                raise TypeError("'%s' attribute of class '%s' must be '%s'" %
662                                (name, self.__class__.__name__,
663                                 self._flags[name].__name__))
664            return object.__setattr__(self, name, value)
665
666        def __delattr__(self, name):
667            raise MozbuildDeletionError('Unable to delete attributes for this object')
668
669    return Flags
670
671
672class StrictOrderingOnAppendListWithFlags(StrictOrderingOnAppendList):
673    """A list with flags specialized for moz.build environments.
674
675    Each subclass has a set of typed flags; this class lets us use `isinstance`
676    for natural testing.
677    """
678
679
680def StrictOrderingOnAppendListWithFlagsFactory(flags):
681    """Returns a StrictOrderingOnAppendList-like object, with optional
682    flags on each item.
683
684    The flags are defined in the dict given as argument, where keys are
685    the flag names, and values the type used for the value of that flag.
686
687    Example:
688        FooList = StrictOrderingOnAppendListWithFlagsFactory({
689            'foo': bool, 'bar': unicode
690        })
691        foo = FooList(['a', 'b', 'c'])
692        foo['a'].foo = True
693        foo['b'].bar = 'bar'
694    """
695    class StrictOrderingOnAppendListWithFlagsSpecialization(StrictOrderingOnAppendListWithFlags):
696        def __init__(self, iterable=None):
697            if iterable is None:
698                iterable = []
699            StrictOrderingOnAppendListWithFlags.__init__(self, iterable)
700            self._flags_type = FlagsFactory(flags)
701            self._flags = dict()
702
703        def __getitem__(self, name):
704            if name not in self._flags:
705                if name not in self:
706                    raise KeyError("'%s'" % name)
707                self._flags[name] = self._flags_type()
708            return self._flags[name]
709
710        def __setitem__(self, name, value):
711            if not isinstance(name, slice):
712                raise TypeError("'%s' object does not support item assignment" %
713                                self.__class__.__name__)
714            result = super(StrictOrderingOnAppendListWithFlagsSpecialization,
715                           self).__setitem__(name, value)
716            # We may have removed items.
717            for k in set(self._flags.keys()) - set(self):
718                del self._flags[k]
719            if isinstance(value, StrictOrderingOnAppendListWithFlags):
720                self._update_flags(value)
721            return result
722
723        def _update_flags(self, other):
724            if self._flags_type._flags != other._flags_type._flags:
725                raise ValueError('Expected a list of strings with flags like %s, not like %s' %
726                                 (self._flags_type._flags, other._flags_type._flags))
727            intersection = set(self._flags.keys()) & set(other._flags.keys())
728            if intersection:
729                raise ValueError(
730                    'Cannot update flags: both lists of strings with flags configure %s' %
731                    intersection
732                    )
733            self._flags.update(other._flags)
734
735        def extend(self, l):
736            result = super(StrictOrderingOnAppendListWithFlagsSpecialization,
737                           self).extend(l)
738            if isinstance(l, StrictOrderingOnAppendListWithFlags):
739                self._update_flags(l)
740            return result
741
742        def __add__(self, other):
743            result = super(StrictOrderingOnAppendListWithFlagsSpecialization,
744                           self).__add__(other)
745            if isinstance(other, StrictOrderingOnAppendListWithFlags):
746                # Result has flags from other but not from self, since
747                # internally we duplicate self and then extend with other, and
748                # only extend knows about flags.  Since we don't allow updating
749                # when the set of flag keys intersect, which we instance we pass
750                # to _update_flags here matters.  This needs to be correct but
751                # is an implementation detail.
752                result._update_flags(self)
753            return result
754
755        def __iadd__(self, other):
756            result = super(StrictOrderingOnAppendListWithFlagsSpecialization,
757                           self).__iadd__(other)
758            if isinstance(other, StrictOrderingOnAppendListWithFlags):
759                self._update_flags(other)
760            return result
761
762    return StrictOrderingOnAppendListWithFlagsSpecialization
763
764
765class HierarchicalStringList(object):
766    """A hierarchy of lists of strings.
767
768    Each instance of this object contains a list of strings, which can be set or
769    appended to. A sub-level of the hierarchy is also an instance of this class,
770    can be added by appending to an attribute instead.
771
772    For example, the moz.build variable EXPORTS is an instance of this class. We
773    can do:
774
775    EXPORTS += ['foo.h']
776    EXPORTS.mozilla.dom += ['bar.h']
777
778    In this case, we have 3 instances (EXPORTS, EXPORTS.mozilla, and
779    EXPORTS.mozilla.dom), and the first and last each have one element in their
780    list.
781    """
782    __slots__ = ('_strings', '_children')
783
784    def __init__(self):
785        # Please change ContextDerivedTypedHierarchicalStringList in context.py
786        # if you make changes here.
787        self._strings = StrictOrderingOnAppendList()
788        self._children = {}
789
790    class StringListAdaptor(Sequence):
791        def __init__(self, hsl):
792            self._hsl = hsl
793
794        def __getitem__(self, index):
795            return self._hsl._strings[index]
796
797        def __len__(self):
798            return len(self._hsl._strings)
799
800    def walk(self):
801        """Walk over all HierarchicalStringLists in the hierarchy.
802
803        This is a generator of (path, sequence).
804
805        The path is '' for the root level and '/'-delimited strings for
806        any descendants.  The sequence is a read-only sequence of the
807        strings contained at that level.
808        """
809
810        if self._strings:
811            path_to_here = ''
812            yield path_to_here, self.StringListAdaptor(self)
813
814        for k, l in sorted(self._children.items()):
815            for p, v in l.walk():
816                path_to_there = '%s/%s' % (k, p)
817                yield path_to_there.strip('/'), v
818
819    def __setattr__(self, name, value):
820        if name in self.__slots__:
821            return object.__setattr__(self, name, value)
822
823        # __setattr__ can be called with a list when a simple assignment is
824        # used:
825        #
826        # EXPORTS.foo = ['file.h']
827        #
828        # In this case, we need to overwrite foo's current list of strings.
829        #
830        # However, __setattr__ is also called with a HierarchicalStringList
831        # to try to actually set the attribute. We want to ignore this case,
832        # since we don't actually create an attribute called 'foo', but just add
833        # it to our list of children (using _get_exportvariable()).
834        self._set_exportvariable(name, value)
835
836    def __getattr__(self, name):
837        if name.startswith('__'):
838            return object.__getattr__(self, name)
839        return self._get_exportvariable(name)
840
841    def __delattr__(self, name):
842        raise MozbuildDeletionError('Unable to delete attributes for this object')
843
844    def __iadd__(self, other):
845        if isinstance(other, HierarchicalStringList):
846            self._strings += other._strings
847            for c in other._children:
848                self[c] += other[c]
849        else:
850            self._check_list(other)
851            self._strings += other
852        return self
853
854    def __getitem__(self, name):
855        return self._get_exportvariable(name)
856
857    def __setitem__(self, name, value):
858        self._set_exportvariable(name, value)
859
860    def _get_exportvariable(self, name):
861        # Please change ContextDerivedTypedHierarchicalStringList in context.py
862        # if you make changes here.
863        child = self._children.get(name)
864        if not child:
865            child = self._children[name] = HierarchicalStringList()
866        return child
867
868    def _set_exportvariable(self, name, value):
869        if name in self._children:
870            if value is self._get_exportvariable(name):
871                return
872            raise KeyError('global_ns', 'reassign',
873                           '<some variable>.%s' % name)
874
875        exports = self._get_exportvariable(name)
876        exports._check_list(value)
877        exports._strings += value
878
879    def _check_list(self, value):
880        if not isinstance(value, list):
881            raise ValueError('Expected a list of strings, not %s' % type(value))
882        for v in value:
883            if not isinstance(v, six.string_types):
884                raise ValueError(
885                    'Expected a list of strings, not an element of %s' % type(v))
886
887
888class LockFile(object):
889    """LockFile is used by the lock_file method to hold the lock.
890
891    This object should not be used directly, but only through
892    the lock_file method below.
893    """
894
895    def __init__(self, lockfile):
896        self.lockfile = lockfile
897
898    def __del__(self):
899        while True:
900            try:
901                os.remove(self.lockfile)
902                break
903            except OSError as e:
904                if e.errno == errno.EACCES:
905                    # Another process probably has the file open, we'll retry.
906                    # Just a short sleep since we want to drop the lock ASAP
907                    # (but we need to let some other process close the file
908                    # first).
909                    time.sleep(0.1)
910                else:
911                    # Re-raise unknown errors
912                    raise
913
914
915def lock_file(lockfile, max_wait=600):
916    """Create and hold a lockfile of the given name, with the given timeout.
917
918    To release the lock, delete the returned object.
919    """
920
921    # FUTURE This function and object could be written as a context manager.
922
923    while True:
924        try:
925            fd = os.open(lockfile, os.O_EXCL | os.O_RDWR | os.O_CREAT)
926            # We created the lockfile, so we're the owner
927            break
928        except OSError as e:
929            if (e.errno == errno.EEXIST or
930                (sys.platform == "win32" and e.errno == errno.EACCES)):
931                pass
932            else:
933                # Should not occur
934                raise
935
936        try:
937            # The lock file exists, try to stat it to get its age
938            # and read its contents to report the owner PID
939            f = open(lockfile, 'r')
940            s = os.stat(lockfile)
941        except EnvironmentError as e:
942            if e.errno == errno.ENOENT or e.errno == errno.EACCES:
943                # We didn't create the lockfile, so it did exist, but it's
944                # gone now. Just try again
945                continue
946
947            raise Exception('{0} exists but stat() failed: {1}'.format(
948                lockfile, e.strerror))
949
950        # We didn't create the lockfile and it's still there, check
951        # its age
952        now = int(time.time())
953        if now - s[stat.ST_MTIME] > max_wait:
954            pid = f.readline().rstrip()
955            raise Exception('{0} has been locked for more than '
956                            '{1} seconds (PID {2})'.format(lockfile, max_wait, pid))
957
958        # It's not been locked too long, wait a while and retry
959        f.close()
960        time.sleep(1)
961
962    # if we get here. we have the lockfile. Convert the os.open file
963    # descriptor into a Python file object and record our PID in it
964    f = os.fdopen(fd, 'w')
965    f.write('{0}\n'.format(os.getpid()))
966    f.close()
967
968    return LockFile(lockfile)
969
970
971class OrderedDefaultDict(OrderedDict):
972    '''A combination of OrderedDict and defaultdict.'''
973
974    def __init__(self, default_factory, *args, **kwargs):
975        OrderedDict.__init__(self, *args, **kwargs)
976        self._default_factory = default_factory
977
978    def __missing__(self, key):
979        value = self[key] = self._default_factory()
980        return value
981
982
983class KeyedDefaultDict(dict):
984    '''Like a defaultdict, but the default_factory function takes the key as
985    argument'''
986
987    def __init__(self, default_factory, *args, **kwargs):
988        dict.__init__(self, *args, **kwargs)
989        self._default_factory = default_factory
990
991    def __missing__(self, key):
992        value = self._default_factory(key)
993        dict.__setitem__(self, key, value)
994        return value
995
996
997class ReadOnlyKeyedDefaultDict(KeyedDefaultDict, ReadOnlyDict):
998    '''Like KeyedDefaultDict, but read-only.'''
999
1000
1001class memoize(dict):
1002    '''A decorator to memoize the results of function calls depending
1003    on its arguments.
1004    Both functions and instance methods are handled, although in the
1005    instance method case, the results are cache in the instance itself.
1006    '''
1007
1008    def __init__(self, func):
1009        self.func = func
1010        functools.update_wrapper(self, func)
1011
1012    def __call__(self, *args):
1013        if args not in self:
1014            self[args] = self.func(*args)
1015        return self[args]
1016
1017    def method_call(self, instance, *args):
1018        name = '_%s' % self.func.__name__
1019        if not hasattr(instance, name):
1020            setattr(instance, name, {})
1021        cache = getattr(instance, name)
1022        if args not in cache:
1023            cache[args] = self.func(instance, *args)
1024        return cache[args]
1025
1026    def __get__(self, instance, cls):
1027        return functools.update_wrapper(
1028            functools.partial(self.method_call, instance), self.func)
1029
1030
1031class memoized_property(object):
1032    '''A specialized version of the memoize decorator that works for
1033    class instance properties.
1034    '''
1035
1036    def __init__(self, func):
1037        self.func = func
1038
1039    def __get__(self, instance, cls):
1040        name = '_%s' % self.func.__name__
1041        if not hasattr(instance, name):
1042            setattr(instance, name, self.func(instance))
1043        return getattr(instance, name)
1044
1045
1046def TypedNamedTuple(name, fields):
1047    """Factory for named tuple types with strong typing.
1048
1049    Arguments are an iterable of 2-tuples. The first member is the
1050    the field name. The second member is a type the field will be validated
1051    to be.
1052
1053    Construction of instances varies from ``collections.namedtuple``.
1054
1055    First, if a single tuple argument is given to the constructor, this is
1056    treated as the equivalent of passing each tuple value as a separate
1057    argument into __init__. e.g.::
1058
1059        t = (1, 2)
1060        TypedTuple(t) == TypedTuple(1, 2)
1061
1062    This behavior is meant for moz.build files, so vanilla tuples are
1063    automatically cast to typed tuple instances.
1064
1065    Second, fields in the tuple are validated to be instances of the specified
1066    type. This is done via an ``isinstance()`` check. To allow multiple types,
1067    pass a tuple as the allowed types field.
1068    """
1069    cls = collections.namedtuple(name, (name for name, typ in fields))
1070
1071    class TypedTuple(cls):
1072        __slots__ = ()
1073
1074        def __new__(klass, *args, **kwargs):
1075            if len(args) == 1 and not kwargs and isinstance(args[0], tuple):
1076                args = args[0]
1077
1078            return super(TypedTuple, klass).__new__(klass, *args, **kwargs)
1079
1080        def __init__(self, *args, **kwargs):
1081            for i, (fname, ftype) in enumerate(self._fields):
1082                value = self[i]
1083
1084                if not isinstance(value, ftype):
1085                    raise TypeError('field in tuple not of proper type: %s; '
1086                                    'got %s, expected %s' % (fname,
1087                                                             type(value), ftype))
1088
1089    TypedTuple._fields = fields
1090
1091    return TypedTuple
1092
1093
1094@memoize
1095def TypedList(type, base_class=List):
1096    '''A list with type coercion.
1097
1098    The given ``type`` is what list elements are being coerced to. It may do
1099    strict validation, throwing ValueError exceptions.
1100
1101    A ``base_class`` type can be given for more specific uses than a List. For
1102    example, a Typed StrictOrderingOnAppendList can be created with:
1103
1104       TypedList(unicode, StrictOrderingOnAppendList)
1105    '''
1106    class _TypedList(base_class):
1107        @staticmethod
1108        def normalize(e):
1109            if not isinstance(e, type):
1110                e = type(e)
1111            return e
1112
1113        def _ensure_type(self, l):
1114            if isinstance(l, self.__class__):
1115                return l
1116
1117            return [self.normalize(e) for e in l]
1118
1119        def __init__(self, iterable=None, **kwargs):
1120            if iterable is None:
1121                iterable = []
1122            iterable = self._ensure_type(iterable)
1123
1124            super(_TypedList, self).__init__(iterable, **kwargs)
1125
1126        def extend(self, l):
1127            l = self._ensure_type(l)
1128
1129            return super(_TypedList, self).extend(l)
1130
1131        def __setitem__(self, key, val):
1132            val = self._ensure_type(val)
1133
1134            return super(_TypedList, self).__setitem__(key, val)
1135
1136        def __add__(self, other):
1137            other = self._ensure_type(other)
1138
1139            return super(_TypedList, self).__add__(other)
1140
1141        def __iadd__(self, other):
1142            other = self._ensure_type(other)
1143
1144            return super(_TypedList, self).__iadd__(other)
1145
1146        def append(self, other):
1147            self += [other]
1148
1149    return _TypedList
1150
1151
1152def group_unified_files(files, unified_prefix, unified_suffix,
1153                        files_per_unified_file):
1154    """Return an iterator of (unified_filename, source_filenames) tuples.
1155
1156    We compile most C and C++ files in "unified mode"; instead of compiling
1157    ``a.cpp``, ``b.cpp``, and ``c.cpp`` separately, we compile a single file
1158    that looks approximately like::
1159
1160       #include "a.cpp"
1161       #include "b.cpp"
1162       #include "c.cpp"
1163
1164    This function handles the details of generating names for the unified
1165    files, and determining which original source files go in which unified
1166    file."""
1167
1168    # Make sure the input list is sorted. If it's not, bad things could happen!
1169    files = sorted(files)
1170
1171    # Our last returned list of source filenames may be short, and we
1172    # don't want the fill value inserted by zip_longest to be an
1173    # issue.  So we do a little dance to filter it out ourselves.
1174    dummy_fill_value = ("dummy",)
1175
1176    def filter_out_dummy(iterable):
1177        return six.moves.filter(lambda x: x != dummy_fill_value,
1178                                iterable)
1179
1180    # From the itertools documentation, slightly modified:
1181    def grouper(n, iterable):
1182        "grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx"
1183        args = [iter(iterable)] * n
1184        return six.moves.zip_longest(fillvalue=dummy_fill_value, *args)
1185
1186    for i, unified_group in enumerate(grouper(files_per_unified_file,
1187                                              files)):
1188        just_the_filenames = list(filter_out_dummy(unified_group))
1189        yield '%s%d.%s' % (unified_prefix, i, unified_suffix), just_the_filenames
1190
1191
1192def pair(iterable):
1193    '''Given an iterable, returns an iterable pairing its items.
1194
1195    For example,
1196        list(pair([1,2,3,4,5,6]))
1197    returns
1198        [(1,2), (3,4), (5,6)]
1199    '''
1200    i = iter(iterable)
1201    return six.moves.zip_longest(i, i)
1202
1203
1204def pairwise(iterable):
1205    '''Given an iterable, returns an iterable of overlapped pairs of
1206    its items. Based on the Python itertools documentation.
1207
1208    For example,
1209        list(pairwise([1,2,3,4,5,6]))
1210    returns
1211        [(1,2), (2,3), (3,4), (4,5), (5,6)]
1212    '''
1213    a, b = itertools.tee(iterable)
1214    next(b, None)
1215    return zip(a, b)
1216
1217
1218VARIABLES_RE = re.compile('\$\((\w+)\)')
1219
1220
1221def expand_variables(s, variables):
1222    '''Given a string with $(var) variable references, replace those references
1223    with the corresponding entries from the given `variables` dict.
1224
1225    If a variable value is not a string, it is iterated and its items are
1226    joined with a whitespace.'''
1227    result = ''
1228    for s, name in pair(VARIABLES_RE.split(s)):
1229        result += s
1230        value = variables.get(name)
1231        if not value:
1232            continue
1233        if not isinstance(value, six.string_types):
1234            value = ' '.join(value)
1235        result += value
1236    return result
1237
1238
1239class DefinesAction(argparse.Action):
1240    '''An ArgumentParser action to handle -Dvar[=value] type of arguments.'''
1241
1242    def __call__(self, parser, namespace, values, option_string):
1243        defines = getattr(namespace, self.dest)
1244        if defines is None:
1245            defines = {}
1246        values = values.split('=', 1)
1247        if len(values) == 1:
1248            name, value = values[0], 1
1249        else:
1250            name, value = values
1251            if value.isdigit():
1252                value = int(value)
1253        defines[name] = value
1254        setattr(namespace, self.dest, defines)
1255
1256
1257class EnumStringComparisonError(Exception):
1258    pass
1259
1260
1261class EnumString(six.text_type):
1262    '''A string type that only can have a limited set of values, similarly to
1263    an Enum, and can only be compared against that set of values.
1264
1265    The class is meant to be subclassed, where the subclass defines
1266    POSSIBLE_VALUES. The `subclass` method is a helper to create such
1267    subclasses.
1268    '''
1269    POSSIBLE_VALUES = ()
1270
1271    def __init__(self, value):
1272        if value not in self.POSSIBLE_VALUES:
1273            raise ValueError("'%s' is not a valid value for %s"
1274                             % (value, self.__class__.__name__))
1275
1276    def __eq__(self, other):
1277        if other not in self.POSSIBLE_VALUES:
1278            raise EnumStringComparisonError(
1279                'Can only compare with %s'
1280                % ', '.join("'%s'" % v for v in self.POSSIBLE_VALUES))
1281        return super(EnumString, self).__eq__(other)
1282
1283    def __ne__(self, other):
1284        return not (self == other)
1285
1286    def __hash__(self):
1287        return super(EnumString, self).__hash__()
1288
1289    @staticmethod
1290    def subclass(*possible_values):
1291        class EnumStringSubclass(EnumString):
1292            POSSIBLE_VALUES = possible_values
1293        return EnumStringSubclass
1294
1295
1296def _escape_char(c):
1297    # str.encode('unicode_espace') doesn't escape quotes, presumably because
1298    # quoting could be done with either ' or ".
1299    if c == "'":
1300        return "\\'"
1301    return six.text_type(c.encode('unicode_escape'))
1302
1303
1304if six.PY2:  # Delete when we get rid of Python 2.
1305    # Mapping table between raw characters below \x80 and their escaped
1306    # counterpart, when they differ
1307    _INDENTED_REPR_TABLE = {
1308        c: e
1309        for c, e in map(lambda x: (x, _escape_char(x)),
1310                        map(unichr, range(128)))
1311        if c != e
1312    }
1313    # Regexp matching all characters to escape.
1314    _INDENTED_REPR_RE = re.compile(
1315        '([' + ''.join(_INDENTED_REPR_TABLE.values()) + ']+)')
1316
1317
1318def write_indented_repr(f, o, indent=4):
1319    '''Write an indented representation (similar to repr()) of the object to the
1320    given file `f`.
1321
1322    One notable difference with repr is that the returned representation
1323    assumes `from __future__ import unicode_literals`.
1324    '''
1325    if six.PY3:
1326        pprint.pprint(o, stream=f, indent=indent)
1327        return
1328    # Delete everything below when we get rid of Python 2.
1329    one_indent = ' ' * indent
1330
1331    def recurse_indented_repr(o, level):
1332        if isinstance(o, dict):
1333            yield '{\n'
1334            for k, v in sorted(o.items()):
1335                yield one_indent * (level + 1)
1336                for d in recurse_indented_repr(k, level + 1):
1337                    yield d
1338                yield ': '
1339                for d in recurse_indented_repr(v, level + 1):
1340                    yield d
1341                yield ',\n'
1342            yield one_indent * level
1343            yield '}'
1344        elif isinstance(o, bytes):
1345            yield 'b'
1346            yield repr(o)
1347        elif isinstance(o, six.text_type):
1348            yield "'"
1349            # We want a readable string (non escaped unicode), but some
1350            # special characters need escaping (e.g. \n, \t, etc.)
1351            for i, s in enumerate(_INDENTED_REPR_RE.split(o)):
1352                if i % 2:
1353                    for c in s:
1354                        yield _INDENTED_REPR_TABLE[c]
1355                else:
1356                    yield s
1357            yield "'"
1358        elif hasattr(o, '__iter__'):
1359            yield '[\n'
1360            for i in o:
1361                yield one_indent * (level + 1)
1362                for d in recurse_indented_repr(i, level + 1):
1363                    yield d
1364                yield ',\n'
1365            yield one_indent * level
1366            yield ']'
1367        else:
1368            yield repr(o)
1369    result = ''.join(recurse_indented_repr(o, 0)) + '\n'
1370    f.write(result)
1371
1372
1373def patch_main():
1374    '''This is a hack to work around the fact that Windows multiprocessing needs
1375    to import the original main module, and assumes that it corresponds to a file
1376    ending in .py.
1377
1378    We do this by a sort of two-level function interposing. The first
1379    level interposes forking.get_command_line() with our version defined
1380    in my_get_command_line(). Our version of get_command_line will
1381    replace the command string with the contents of the fork_interpose()
1382    function to be used in the subprocess.
1383
1384    The subprocess then gets an interposed imp.find_module(), which we
1385    hack up to find the main module name multiprocessing will assume, since we
1386    know what this will be based on the main module in the parent. If we're not
1387    looking for our main module, then the original find_module will suffice.
1388
1389    See also: http://bugs.python.org/issue19946
1390    And: https://bugzilla.mozilla.org/show_bug.cgi?id=914563
1391    '''
1392    # XXX In Python 3.4 the multiprocessing module was re-written and the below
1393    # code is no longer valid. The Python issue19946 also claims to be fixed in
1394    # this version. It's not clear whether this hack is still needed in 3.4+ or
1395    # not, but at least some basic mach commands appear to work without it. So
1396    # skip it in 3.4+ until we determine it's still needed.
1397    if sys.platform == 'win32' and sys.version_info < (3, 4):
1398        import inspect
1399        import os
1400        from multiprocessing import forking
1401        global orig_command_line
1402
1403        # Figure out what multiprocessing will assume our main module
1404        # is called (see python/Lib/multiprocessing/forking.py).
1405        main_path = getattr(sys.modules['__main__'], '__file__', None)
1406        if main_path is None:
1407            # If someone deleted or modified __main__, there's nothing left for
1408            # us to do.
1409            return
1410        main_file_name = os.path.basename(main_path)
1411        main_module_name, ext = os.path.splitext(main_file_name)
1412        if ext == '.py':
1413            # If main is a .py file, everything ought to work as expected.
1414            return
1415
1416        def fork_interpose():
1417            import imp
1418            import os
1419            import sys
1420            orig_find_module = imp.find_module
1421
1422            def my_find_module(name, dirs):
1423                if name == main_module_name:
1424                    path = os.path.join(dirs[0], main_file_name)
1425                    f = open(path)
1426                    return (f, path, ('', 'r', imp.PY_SOURCE))
1427                return orig_find_module(name, dirs)
1428
1429            # Don't allow writing bytecode file for the main module.
1430            orig_load_module = imp.load_module
1431
1432            def my_load_module(name, file, path, description):
1433                # multiprocess.forking invokes imp.load_module manually and
1434                # hard-codes the name __parents_main__ as the module name.
1435                if name == '__parents_main__':
1436                    old_bytecode = sys.dont_write_bytecode
1437                    sys.dont_write_bytecode = True
1438                    try:
1439                        return orig_load_module(name, file, path, description)
1440                    finally:
1441                        sys.dont_write_bytecode = old_bytecode
1442
1443                return orig_load_module(name, file, path, description)
1444
1445            imp.find_module = my_find_module
1446            imp.load_module = my_load_module
1447            from multiprocessing.forking import main
1448            main()
1449
1450        def my_get_command_line():
1451            fork_code, lineno = inspect.getsourcelines(fork_interpose)
1452            # Remove the first line (for 'def fork_interpose():') and the three
1453            # levels of indentation (12 spaces), add our relevant globals.
1454            fork_string = ("main_file_name = '%s'\n" % main_file_name +
1455                           "main_module_name = '%s'\n" % main_module_name +
1456                           ''.join(x[12:] for x in fork_code[1:]))
1457            cmdline = orig_command_line()
1458            cmdline[2] = fork_string
1459            return cmdline
1460        orig_command_line = forking.get_command_line
1461        forking.get_command_line = my_get_command_line
1462
1463
1464def ensure_bytes(value, encoding='utf-8'):
1465    if isinstance(value, six.text_type):
1466        return value.encode(encoding)
1467    return value
1468
1469
1470def ensure_unicode(value, encoding='utf-8'):
1471    if isinstance(value, six.binary_type):
1472        return value.decode(encoding)
1473    return value
1474
1475
1476def ensure_subprocess_env(env, encoding='utf-8'):
1477    """Ensure the environment is in the correct format for the `subprocess`
1478    module.
1479
1480    This will convert all keys and values to bytes on Python 2, and text on
1481    Python 3.
1482
1483    Args:
1484        env (dict): Environment to ensure.
1485        encoding (str): Encoding to use when converting to/from bytes/text
1486                        (default: utf-8).
1487    """
1488    ensure = ensure_bytes if sys.version_info[0] < 3 else ensure_unicode
1489    return {ensure(k, encoding): ensure(v, encoding) for k, v in six.iteritems(env)}
1490
1491
1492def process_time():
1493    if six.PY2:
1494        return time.clock()
1495    else:
1496        return time.process_time()
1497