1# This Source Code Form is subject to the terms of the Mozilla Public
2# License, v. 2.0. If a copy of the MPL was not distributed with this file,
3# You can obtain one at http://mozilla.org/MPL/2.0/.
4
5# This file contains miscellaneous utility functions that don't belong anywhere
6# in particular.
7
8from __future__ import absolute_import, unicode_literals, print_function
9
10import argparse
11import collections
12import ctypes
13import difflib
14import errno
15import functools
16import hashlib
17import itertools
18import os
19import re
20import stat
21import sys
22import time
23import types
24
25from collections import (
26    defaultdict,
27    Iterable,
28    OrderedDict,
29)
30from io import (
31    StringIO,
32    BytesIO,
33)
34
35
36if sys.version_info[0] == 3:
37    str_type = str
38else:
39    str_type = basestring
40
41if sys.platform == 'win32':
42    _kernel32 = ctypes.windll.kernel32
43    _FILE_ATTRIBUTE_NOT_CONTENT_INDEXED = 0x2000
44
45
46def exec_(object, globals=None, locals=None):
47    """Wrapper around the exec statement to avoid bogus errors like:
48
49    SyntaxError: unqualified exec is not allowed in function ...
50    it is a nested function.
51
52    or
53
54    SyntaxError: unqualified exec is not allowed in function ...
55    it contains a nested function with free variable
56
57    which happen with older versions of python 2.7.
58    """
59    exec(object, globals, locals)
60
61
62def hash_file(path, hasher=None):
63    """Hashes a file specified by the path given and returns the hex digest."""
64
65    # If the default hashing function changes, this may invalidate
66    # lots of cached data.  Don't change it lightly.
67    h = hasher or hashlib.sha1()
68
69    with open(path, 'rb') as fh:
70        while True:
71            data = fh.read(8192)
72
73            if not len(data):
74                break
75
76            h.update(data)
77
78    return h.hexdigest()
79
80
81class EmptyValue(unicode):
82    """A dummy type that behaves like an empty string and sequence.
83
84    This type exists in order to support
85    :py:class:`mozbuild.frontend.reader.EmptyConfig`. It should likely not be
86    used elsewhere.
87    """
88    def __init__(self):
89        super(EmptyValue, self).__init__()
90
91
92class ReadOnlyNamespace(object):
93    """A class for objects with immutable attributes set at initialization."""
94    def __init__(self, **kwargs):
95        for k, v in kwargs.iteritems():
96            super(ReadOnlyNamespace, self).__setattr__(k, v)
97
98    def __delattr__(self, key):
99        raise Exception('Object does not support deletion.')
100
101    def __setattr__(self, key, value):
102        raise Exception('Object does not support assignment.')
103
104    def __ne__(self, other):
105        return not (self == other)
106
107    def __eq__(self, other):
108        return self is other or (
109            hasattr(other, '__dict__') and self.__dict__ == other.__dict__)
110
111    def __repr__(self):
112        return '<%s %r>' % (self.__class__.__name__, self.__dict__)
113
114
115class ReadOnlyDict(dict):
116    """A read-only dictionary."""
117    def __init__(self, *args, **kwargs):
118        dict.__init__(self, *args, **kwargs)
119
120    def __delitem__(self, key):
121        raise Exception('Object does not support deletion.')
122
123    def __setitem__(self, key, value):
124        raise Exception('Object does not support assignment.')
125
126    def update(self, *args, **kwargs):
127        raise Exception('Object does not support update.')
128
129
130class undefined_default(object):
131    """Represents an undefined argument value that isn't None."""
132
133
134undefined = undefined_default()
135
136
137class ReadOnlyDefaultDict(ReadOnlyDict):
138    """A read-only dictionary that supports default values on retrieval."""
139    def __init__(self, default_factory, *args, **kwargs):
140        ReadOnlyDict.__init__(self, *args, **kwargs)
141        self._default_factory = default_factory
142
143    def __missing__(self, key):
144        value = self._default_factory()
145        dict.__setitem__(self, key, value)
146        return value
147
148
149def ensureParentDir(path):
150    """Ensures the directory parent to the given file exists."""
151    d = os.path.dirname(path)
152    if d and not os.path.exists(path):
153        try:
154            os.makedirs(d)
155        except OSError as error:
156            if error.errno != errno.EEXIST:
157                raise
158
159
160def mkdir(path, not_indexed=False):
161    """Ensure a directory exists.
162
163    If ``not_indexed`` is True, an attribute is set that disables content
164    indexing on the directory.
165    """
166    try:
167        os.makedirs(path)
168    except OSError as e:
169        if e.errno != errno.EEXIST:
170            raise
171
172    if not_indexed:
173        if sys.platform == 'win32':
174            if isinstance(path, str_type):
175                fn = _kernel32.SetFileAttributesW
176            else:
177                fn = _kernel32.SetFileAttributesA
178
179            fn(path, _FILE_ATTRIBUTE_NOT_CONTENT_INDEXED)
180        elif sys.platform == 'darwin':
181            with open(os.path.join(path, '.metadata_never_index'), 'a'):
182                pass
183
184
185def simple_diff(filename, old_lines, new_lines):
186    """Returns the diff between old_lines and new_lines, in unified diff form,
187    as a list of lines.
188
189    old_lines and new_lines are lists of non-newline terminated lines to
190    compare.
191    old_lines can be None, indicating a file creation.
192    new_lines can be None, indicating a file deletion.
193    """
194
195    old_name = '/dev/null' if old_lines is None else filename
196    new_name = '/dev/null' if new_lines is None else filename
197
198    return difflib.unified_diff(old_lines or [], new_lines or [],
199                                old_name, new_name, n=4, lineterm='')
200
201
202class FileAvoidWrite(BytesIO):
203    """File-like object that buffers output and only writes if content changed.
204
205    We create an instance from an existing filename. New content is written to
206    it. When we close the file object, if the content in the in-memory buffer
207    differs from what is on disk, then we write out the new content. Otherwise,
208    the original file is untouched.
209
210    Instances can optionally capture diffs of file changes. This feature is not
211    enabled by default because it a) doesn't make sense for binary files b)
212    could add unwanted overhead to calls.
213
214    Additionally, there is dry run mode where the file is not actually written
215    out, but reports whether the file was existing and would have been updated
216    still occur, as well as diff capture if requested.
217    """
218    def __init__(self, filename, capture_diff=False, dry_run=False, mode='rU'):
219        BytesIO.__init__(self)
220        self.name = filename
221        self._capture_diff = capture_diff
222        self._dry_run = dry_run
223        self.diff = None
224        self.mode = mode
225
226    def write(self, buf):
227        if isinstance(buf, unicode):
228            buf = buf.encode('utf-8')
229        BytesIO.write(self, buf)
230
231    def close(self):
232        """Stop accepting writes, compare file contents, and rewrite if needed.
233
234        Returns a tuple of bools indicating what action was performed:
235
236            (file existed, file updated)
237
238        If ``capture_diff`` was specified at construction time and the
239        underlying file was changed, ``.diff`` will be populated with the diff
240        of the result.
241        """
242        buf = self.getvalue()
243        BytesIO.close(self)
244        existed = False
245        old_content = None
246
247        try:
248            existing = open(self.name, self.mode)
249            existed = True
250        except IOError:
251            pass
252        else:
253            try:
254                old_content = existing.read()
255                if old_content == buf:
256                    return True, False
257            except IOError:
258                pass
259            finally:
260                existing.close()
261
262        if not self._dry_run:
263            ensureParentDir(self.name)
264            # Maintain 'b' if specified.  'U' only applies to modes starting with
265            # 'r', so it is dropped.
266            writemode = 'w'
267            if 'b' in self.mode:
268                writemode += 'b'
269            with open(self.name, writemode) as file:
270                file.write(buf)
271
272        if self._capture_diff:
273            try:
274                old_lines = old_content.splitlines() if existed else None
275                new_lines = buf.splitlines()
276
277                self.diff = simple_diff(self.name, old_lines, new_lines)
278            # FileAvoidWrite isn't unicode/bytes safe. So, files with non-ascii
279            # content or opened and written in different modes may involve
280            # implicit conversion and this will make Python unhappy. Since
281            # diffing isn't a critical feature, we just ignore the failure.
282            # This can go away once FileAvoidWrite uses io.BytesIO and
283            # io.StringIO. But that will require a lot of work.
284            except (UnicodeDecodeError, UnicodeEncodeError):
285                self.diff = ['Binary or non-ascii file changed: %s' %
286                             self.name]
287
288        return existed, True
289
290    def __enter__(self):
291        return self
292    def __exit__(self, type, value, traceback):
293        if not self.closed:
294            self.close()
295
296
297def resolve_target_to_make(topobjdir, target):
298    r'''
299    Resolve `target` (a target, directory, or file) to a make target.
300
301    `topobjdir` is the object directory; all make targets will be
302    rooted at or below the top-level Makefile in this directory.
303
304    Returns a pair `(reldir, target)` where `reldir` is a directory
305    relative to `topobjdir` containing a Makefile and `target` is a
306    make target (possibly `None`).
307
308    A directory resolves to the nearest directory at or above
309    containing a Makefile, and target `None`.
310
311    A regular (non-Makefile) file resolves to the nearest directory at
312    or above the file containing a Makefile, and an appropriate
313    target.
314
315    A Makefile resolves to the nearest parent strictly above the
316    Makefile containing a different Makefile, and an appropriate
317    target.
318    '''
319
320    target = target.replace(os.sep, '/').lstrip('/')
321    abs_target = os.path.join(topobjdir, target)
322
323    # For directories, run |make -C dir|. If the directory does not
324    # contain a Makefile, check parents until we find one. At worst,
325    # this will terminate at the root.
326    if os.path.isdir(abs_target):
327        current = abs_target
328
329        while True:
330            make_path = os.path.join(current, 'Makefile')
331            if os.path.exists(make_path):
332                return (current[len(topobjdir) + 1:], None)
333
334            current = os.path.dirname(current)
335
336    # If it's not in a directory, this is probably a top-level make
337    # target. Treat it as such.
338    if '/' not in target:
339        return (None, target)
340
341    # We have a relative path within the tree. We look for a Makefile
342    # as far into the path as possible. Then, we compute the make
343    # target as relative to that directory.
344    reldir = os.path.dirname(target)
345    target = os.path.basename(target)
346
347    while True:
348        make_path = os.path.join(topobjdir, reldir, 'Makefile')
349
350        # We append to target every iteration, so the check below
351        # happens exactly once.
352        if target != 'Makefile' and os.path.exists(make_path):
353            return (reldir, target)
354
355        target = os.path.join(os.path.basename(reldir), target)
356        reldir = os.path.dirname(reldir)
357
358
359class ListMixin(object):
360    def __init__(self, iterable=None, **kwargs):
361        if iterable is None:
362            iterable = []
363        if not isinstance(iterable, list):
364            raise ValueError('List can only be created from other list instances.')
365
366        self._kwargs = kwargs
367        return super(ListMixin, self).__init__(iterable, **kwargs)
368
369    def extend(self, l):
370        if not isinstance(l, list):
371            raise ValueError('List can only be extended with other list instances.')
372
373        return super(ListMixin, self).extend(l)
374
375    def __setslice__(self, i, j, sequence):
376        if not isinstance(sequence, list):
377            raise ValueError('List can only be sliced with other list instances.')
378
379        return super(ListMixin, self).__setslice__(i, j, sequence)
380
381    def __add__(self, other):
382        # Allow None and EmptyValue is a special case because it makes undefined
383        # variable references in moz.build behave better.
384        other = [] if isinstance(other, (types.NoneType, EmptyValue)) else other
385        if not isinstance(other, list):
386            raise ValueError('Only lists can be appended to lists.')
387
388        new_list = self.__class__(self, **self._kwargs)
389        new_list.extend(other)
390        return new_list
391
392    def __iadd__(self, other):
393        other = [] if isinstance(other, (types.NoneType, EmptyValue)) else other
394        if not isinstance(other, list):
395            raise ValueError('Only lists can be appended to lists.')
396
397        return super(ListMixin, self).__iadd__(other)
398
399
400class List(ListMixin, list):
401    """A list specialized for moz.build environments.
402
403    We overload the assignment and append operations to require that the
404    appended thing is a list. This avoids bad surprises coming from appending
405    a string to a list, which would just add each letter of the string.
406    """
407
408
409class UnsortedError(Exception):
410    def __init__(self, srtd, original):
411        assert len(srtd) == len(original)
412
413        self.sorted = srtd
414        self.original = original
415
416        for i, orig in enumerate(original):
417            s = srtd[i]
418
419            if orig != s:
420                self.i = i
421                break
422
423    def __str__(self):
424        s = StringIO()
425
426        s.write('An attempt was made to add an unsorted sequence to a list. ')
427        s.write('The incoming list is unsorted starting at element %d. ' %
428            self.i)
429        s.write('We expected "%s" but got "%s"' % (
430            self.sorted[self.i], self.original[self.i]))
431
432        return s.getvalue()
433
434
435class StrictOrderingOnAppendListMixin(object):
436    @staticmethod
437    def ensure_sorted(l):
438        if isinstance(l, StrictOrderingOnAppendList):
439            return
440
441        def _first_element(e):
442            # If the list entry is a tuple, we sort based on the first element
443            # in the tuple.
444            return e[0] if isinstance(e, tuple) else e
445        srtd = sorted(l, key=lambda x: _first_element(x).lower())
446
447        if srtd != l:
448            raise UnsortedError(srtd, l)
449
450    def __init__(self, iterable=None, **kwargs):
451        if iterable is None:
452            iterable = []
453
454        StrictOrderingOnAppendListMixin.ensure_sorted(iterable)
455
456        super(StrictOrderingOnAppendListMixin, self).__init__(iterable, **kwargs)
457
458    def extend(self, l):
459        StrictOrderingOnAppendListMixin.ensure_sorted(l)
460
461        return super(StrictOrderingOnAppendListMixin, self).extend(l)
462
463    def __setslice__(self, i, j, sequence):
464        StrictOrderingOnAppendListMixin.ensure_sorted(sequence)
465
466        return super(StrictOrderingOnAppendListMixin, self).__setslice__(i, j,
467            sequence)
468
469    def __add__(self, other):
470        StrictOrderingOnAppendListMixin.ensure_sorted(other)
471
472        return super(StrictOrderingOnAppendListMixin, self).__add__(other)
473
474    def __iadd__(self, other):
475        StrictOrderingOnAppendListMixin.ensure_sorted(other)
476
477        return super(StrictOrderingOnAppendListMixin, self).__iadd__(other)
478
479
480class StrictOrderingOnAppendList(ListMixin, StrictOrderingOnAppendListMixin,
481        list):
482    """A list specialized for moz.build environments.
483
484    We overload the assignment and append operations to require that incoming
485    elements be ordered. This enforces cleaner style in moz.build files.
486    """
487
488class ImmutableStrictOrderingOnAppendList(StrictOrderingOnAppendList):
489    """Like StrictOrderingOnAppendList, but not allowing mutations of the value.
490    """
491    def append(self, elt):
492        raise Exception("cannot use append on this type")
493
494    def extend(self, iterable):
495        raise Exception("cannot use extend on this type")
496
497    def __setslice__(self, i, j, iterable):
498        raise Exception("cannot assign to slices on this type")
499
500    def __setitem__(self, i, elt):
501        raise Exception("cannot assign to indexes on this type")
502
503    def __iadd__(self, other):
504        raise Exception("cannot use += on this type")
505
506
507class ListWithActionMixin(object):
508    """Mixin to create lists with pre-processing. See ListWithAction."""
509    def __init__(self, iterable=None, action=None):
510        if iterable is None:
511            iterable = []
512        if not callable(action):
513            raise ValueError('A callabe action is required to construct '
514                             'a ListWithAction')
515
516        self._action = action
517        iterable = [self._action(i) for i in iterable]
518        super(ListWithActionMixin, self).__init__(iterable)
519
520    def extend(self, l):
521        l = [self._action(i) for i in l]
522        return super(ListWithActionMixin, self).extend(l)
523
524    def __setslice__(self, i, j, sequence):
525        sequence = [self._action(item) for item in sequence]
526        return super(ListWithActionMixin, self).__setslice__(i, j, sequence)
527
528    def __iadd__(self, other):
529        other = [self._action(i) for i in other]
530        return super(ListWithActionMixin, self).__iadd__(other)
531
532class StrictOrderingOnAppendListWithAction(StrictOrderingOnAppendListMixin,
533    ListMixin, ListWithActionMixin, list):
534    """An ordered list that accepts a callable to be applied to each item.
535
536    A callable (action) passed to the constructor is run on each item of input.
537    The result of running the callable on each item will be stored in place of
538    the original input, but the original item must be used to enforce sortedness.
539    Note that the order of superclasses is therefore significant.
540    """
541
542class ListWithAction(ListMixin, ListWithActionMixin, list):
543    """A list that accepts a callable to be applied to each item.
544
545    A callable (action) may optionally be passed to the constructor to run on
546    each item of input. The result of calling the callable on each item will be
547    stored in place of the original input.
548    """
549
550class MozbuildDeletionError(Exception):
551    pass
552
553
554def FlagsFactory(flags):
555    """Returns a class which holds optional flags for an item in a list.
556
557    The flags are defined in the dict given as argument, where keys are
558    the flag names, and values the type used for the value of that flag.
559
560    The resulting class is used by the various <TypeName>WithFlagsFactory
561    functions below.
562    """
563    assert isinstance(flags, dict)
564    assert all(isinstance(v, type) for v in flags.values())
565
566    class Flags(object):
567        __slots__ = flags.keys()
568        _flags = flags
569
570        def update(self, **kwargs):
571            for k, v in kwargs.iteritems():
572                setattr(self, k, v)
573
574        def __getattr__(self, name):
575            if name not in self.__slots__:
576                raise AttributeError("'%s' object has no attribute '%s'" %
577                                     (self.__class__.__name__, name))
578            try:
579                return object.__getattr__(self, name)
580            except AttributeError:
581                value = self._flags[name]()
582                self.__setattr__(name, value)
583                return value
584
585        def __setattr__(self, name, value):
586            if name not in self.__slots__:
587                raise AttributeError("'%s' object has no attribute '%s'" %
588                                     (self.__class__.__name__, name))
589            if not isinstance(value, self._flags[name]):
590                raise TypeError("'%s' attribute of class '%s' must be '%s'" %
591                                (name, self.__class__.__name__,
592                                 self._flags[name].__name__))
593            return object.__setattr__(self, name, value)
594
595        def __delattr__(self, name):
596            raise MozbuildDeletionError('Unable to delete attributes for this object')
597
598    return Flags
599
600
601class StrictOrderingOnAppendListWithFlags(StrictOrderingOnAppendList):
602    """A list with flags specialized for moz.build environments.
603
604    Each subclass has a set of typed flags; this class lets us use `isinstance`
605    for natural testing.
606    """
607
608
609def StrictOrderingOnAppendListWithFlagsFactory(flags):
610    """Returns a StrictOrderingOnAppendList-like object, with optional
611    flags on each item.
612
613    The flags are defined in the dict given as argument, where keys are
614    the flag names, and values the type used for the value of that flag.
615
616    Example:
617        FooList = StrictOrderingOnAppendListWithFlagsFactory({
618            'foo': bool, 'bar': unicode
619        })
620        foo = FooList(['a', 'b', 'c'])
621        foo['a'].foo = True
622        foo['b'].bar = 'bar'
623    """
624    class StrictOrderingOnAppendListWithFlagsSpecialization(StrictOrderingOnAppendListWithFlags):
625        def __init__(self, iterable=None):
626            if iterable is None:
627                iterable = []
628            StrictOrderingOnAppendListWithFlags.__init__(self, iterable)
629            self._flags_type = FlagsFactory(flags)
630            self._flags = dict()
631
632        def __getitem__(self, name):
633            if name not in self._flags:
634                if name not in self:
635                    raise KeyError("'%s'" % name)
636                self._flags[name] = self._flags_type()
637            return self._flags[name]
638
639        def __setitem__(self, name, value):
640            raise TypeError("'%s' object does not support item assignment" %
641                            self.__class__.__name__)
642
643        def _update_flags(self, other):
644            if self._flags_type._flags != other._flags_type._flags:
645                raise ValueError('Expected a list of strings with flags like %s, not like %s' %
646                                 (self._flags_type._flags, other._flags_type._flags))
647            intersection = set(self._flags.keys()) & set(other._flags.keys())
648            if intersection:
649                raise ValueError('Cannot update flags: both lists of strings with flags configure %s' %
650                                 intersection)
651            self._flags.update(other._flags)
652
653        def extend(self, l):
654            result = super(StrictOrderingOnAppendList, self).extend(l)
655            if isinstance(l, StrictOrderingOnAppendListWithFlags):
656                self._update_flags(l)
657            return result
658
659        def __setslice__(self, i, j, sequence):
660            result = super(StrictOrderingOnAppendList, self).__setslice__(i, j, sequence)
661            # We may have removed items.
662            for name in set(self._flags.keys()) - set(self):
663                del self._flags[name]
664            if isinstance(sequence, StrictOrderingOnAppendListWithFlags):
665                self._update_flags(sequence)
666            return result
667
668        def __add__(self, other):
669            result = super(StrictOrderingOnAppendList, self).__add__(other)
670            if isinstance(other, StrictOrderingOnAppendListWithFlags):
671                # Result has flags from other but not from self, since
672                # internally we duplicate self and then extend with other, and
673                # only extend knows about flags.  Since we don't allow updating
674                # when the set of flag keys intersect, which we instance we pass
675                # to _update_flags here matters.  This needs to be correct but
676                # is an implementation detail.
677                result._update_flags(self)
678            return result
679
680        def __iadd__(self, other):
681            result = super(StrictOrderingOnAppendList, self).__iadd__(other)
682            if isinstance(other, StrictOrderingOnAppendListWithFlags):
683                self._update_flags(other)
684            return result
685
686    return StrictOrderingOnAppendListWithFlagsSpecialization
687
688
689class HierarchicalStringList(object):
690    """A hierarchy of lists of strings.
691
692    Each instance of this object contains a list of strings, which can be set or
693    appended to. A sub-level of the hierarchy is also an instance of this class,
694    can be added by appending to an attribute instead.
695
696    For example, the moz.build variable EXPORTS is an instance of this class. We
697    can do:
698
699    EXPORTS += ['foo.h']
700    EXPORTS.mozilla.dom += ['bar.h']
701
702    In this case, we have 3 instances (EXPORTS, EXPORTS.mozilla, and
703    EXPORTS.mozilla.dom), and the first and last each have one element in their
704    list.
705    """
706    __slots__ = ('_strings', '_children')
707
708    def __init__(self):
709        # Please change ContextDerivedTypedHierarchicalStringList in context.py
710        # if you make changes here.
711        self._strings = StrictOrderingOnAppendList()
712        self._children = {}
713
714    class StringListAdaptor(collections.Sequence):
715        def __init__(self, hsl):
716            self._hsl = hsl
717
718        def __getitem__(self, index):
719            return self._hsl._strings[index]
720
721        def __len__(self):
722            return len(self._hsl._strings)
723
724    def walk(self):
725        """Walk over all HierarchicalStringLists in the hierarchy.
726
727        This is a generator of (path, sequence).
728
729        The path is '' for the root level and '/'-delimited strings for
730        any descendants.  The sequence is a read-only sequence of the
731        strings contained at that level.
732        """
733
734        if self._strings:
735            path_to_here = ''
736            yield path_to_here, self.StringListAdaptor(self)
737
738        for k, l in sorted(self._children.items()):
739            for p, v in l.walk():
740                path_to_there = '%s/%s' % (k, p)
741                yield path_to_there.strip('/'), v
742
743    def __setattr__(self, name, value):
744        if name in self.__slots__:
745            return object.__setattr__(self, name, value)
746
747        # __setattr__ can be called with a list when a simple assignment is
748        # used:
749        #
750        # EXPORTS.foo = ['file.h']
751        #
752        # In this case, we need to overwrite foo's current list of strings.
753        #
754        # However, __setattr__ is also called with a HierarchicalStringList
755        # to try to actually set the attribute. We want to ignore this case,
756        # since we don't actually create an attribute called 'foo', but just add
757        # it to our list of children (using _get_exportvariable()).
758        self._set_exportvariable(name, value)
759
760    def __getattr__(self, name):
761        if name.startswith('__'):
762            return object.__getattr__(self, name)
763        return self._get_exportvariable(name)
764
765    def __delattr__(self, name):
766        raise MozbuildDeletionError('Unable to delete attributes for this object')
767
768    def __iadd__(self, other):
769        if isinstance(other, HierarchicalStringList):
770            self._strings += other._strings
771            for c in other._children:
772                self[c] += other[c]
773        else:
774            self._check_list(other)
775            self._strings += other
776        return self
777
778    def __getitem__(self, name):
779        return self._get_exportvariable(name)
780
781    def __setitem__(self, name, value):
782        self._set_exportvariable(name, value)
783
784    def _get_exportvariable(self, name):
785        # Please change ContextDerivedTypedHierarchicalStringList in context.py
786        # if you make changes here.
787        child = self._children.get(name)
788        if not child:
789            child = self._children[name] = HierarchicalStringList()
790        return child
791
792    def _set_exportvariable(self, name, value):
793        if name in self._children:
794            if value is self._get_exportvariable(name):
795                return
796            raise KeyError('global_ns', 'reassign',
797                           '<some variable>.%s' % name)
798
799        exports = self._get_exportvariable(name)
800        exports._check_list(value)
801        exports._strings += value
802
803    def _check_list(self, value):
804        if not isinstance(value, list):
805            raise ValueError('Expected a list of strings, not %s' % type(value))
806        for v in value:
807            if not isinstance(v, str_type):
808                raise ValueError(
809                    'Expected a list of strings, not an element of %s' % type(v))
810
811
812class LockFile(object):
813    """LockFile is used by the lock_file method to hold the lock.
814
815    This object should not be used directly, but only through
816    the lock_file method below.
817    """
818
819    def __init__(self, lockfile):
820        self.lockfile = lockfile
821
822    def __del__(self):
823        while True:
824            try:
825                os.remove(self.lockfile)
826                break
827            except OSError as e:
828                if e.errno == errno.EACCES:
829                    # Another process probably has the file open, we'll retry.
830                    # Just a short sleep since we want to drop the lock ASAP
831                    # (but we need to let some other process close the file
832                    # first).
833                    time.sleep(0.1)
834            else:
835                # Re-raise unknown errors
836                raise
837
838
839def lock_file(lockfile, max_wait = 600):
840    """Create and hold a lockfile of the given name, with the given timeout.
841
842    To release the lock, delete the returned object.
843    """
844
845    # FUTURE This function and object could be written as a context manager.
846
847    while True:
848        try:
849            fd = os.open(lockfile, os.O_EXCL | os.O_RDWR | os.O_CREAT)
850            # We created the lockfile, so we're the owner
851            break
852        except OSError as e:
853            if (e.errno == errno.EEXIST or
854                (sys.platform == "win32" and e.errno == errno.EACCES)):
855                pass
856            else:
857                # Should not occur
858                raise
859
860        try:
861            # The lock file exists, try to stat it to get its age
862            # and read its contents to report the owner PID
863            f = open(lockfile, 'r')
864            s = os.stat(lockfile)
865        except EnvironmentError as e:
866            if e.errno == errno.ENOENT or e.errno == errno.EACCES:
867            # We didn't create the lockfile, so it did exist, but it's
868            # gone now. Just try again
869                continue
870
871            raise Exception('{0} exists but stat() failed: {1}'.format(
872                lockfile, e.strerror))
873
874        # We didn't create the lockfile and it's still there, check
875        # its age
876        now = int(time.time())
877        if now - s[stat.ST_MTIME] > max_wait:
878            pid = f.readline().rstrip()
879            raise Exception('{0} has been locked for more than '
880                '{1} seconds (PID {2})'.format(lockfile, max_wait, pid))
881
882        # It's not been locked too long, wait a while and retry
883        f.close()
884        time.sleep(1)
885
886    # if we get here. we have the lockfile. Convert the os.open file
887    # descriptor into a Python file object and record our PID in it
888    f = os.fdopen(fd, 'w')
889    f.write('{0}\n'.format(os.getpid()))
890    f.close()
891
892    return LockFile(lockfile)
893
894
895class OrderedDefaultDict(OrderedDict):
896    '''A combination of OrderedDict and defaultdict.'''
897    def __init__(self, default_factory, *args, **kwargs):
898        OrderedDict.__init__(self, *args, **kwargs)
899        self._default_factory = default_factory
900
901    def __missing__(self, key):
902        value = self[key] = self._default_factory()
903        return value
904
905
906class KeyedDefaultDict(dict):
907    '''Like a defaultdict, but the default_factory function takes the key as
908    argument'''
909    def __init__(self, default_factory, *args, **kwargs):
910        dict.__init__(self, *args, **kwargs)
911        self._default_factory = default_factory
912
913    def __missing__(self, key):
914        value = self._default_factory(key)
915        dict.__setitem__(self, key, value)
916        return value
917
918
919class ReadOnlyKeyedDefaultDict(KeyedDefaultDict, ReadOnlyDict):
920    '''Like KeyedDefaultDict, but read-only.'''
921
922
923class memoize(dict):
924    '''A decorator to memoize the results of function calls depending
925    on its arguments.
926    Both functions and instance methods are handled, although in the
927    instance method case, the results are cache in the instance itself.
928    '''
929    def __init__(self, func):
930        self.func = func
931        functools.update_wrapper(self, func)
932
933    def __call__(self, *args):
934        if args not in self:
935            self[args] = self.func(*args)
936        return self[args]
937
938    def method_call(self, instance, *args):
939        name = '_%s' % self.func.__name__
940        if not hasattr(instance, name):
941            setattr(instance, name, {})
942        cache = getattr(instance, name)
943        if args not in cache:
944            cache[args] = self.func(instance, *args)
945        return cache[args]
946
947    def __get__(self, instance, cls):
948        return functools.update_wrapper(
949            functools.partial(self.method_call, instance), self.func)
950
951
952class memoized_property(object):
953    '''A specialized version of the memoize decorator that works for
954    class instance properties.
955    '''
956    def __init__(self, func):
957        self.func = func
958
959    def __get__(self, instance, cls):
960        name = '_%s' % self.func.__name__
961        if not hasattr(instance, name):
962            setattr(instance, name, self.func(instance))
963        return getattr(instance, name)
964
965
966def TypedNamedTuple(name, fields):
967    """Factory for named tuple types with strong typing.
968
969    Arguments are an iterable of 2-tuples. The first member is the
970    the field name. The second member is a type the field will be validated
971    to be.
972
973    Construction of instances varies from ``collections.namedtuple``.
974
975    First, if a single tuple argument is given to the constructor, this is
976    treated as the equivalent of passing each tuple value as a separate
977    argument into __init__. e.g.::
978
979        t = (1, 2)
980        TypedTuple(t) == TypedTuple(1, 2)
981
982    This behavior is meant for moz.build files, so vanilla tuples are
983    automatically cast to typed tuple instances.
984
985    Second, fields in the tuple are validated to be instances of the specified
986    type. This is done via an ``isinstance()`` check. To allow multiple types,
987    pass a tuple as the allowed types field.
988    """
989    cls = collections.namedtuple(name, (name for name, typ in fields))
990
991    class TypedTuple(cls):
992        __slots__ = ()
993
994        def __new__(klass, *args, **kwargs):
995            if len(args) == 1 and not kwargs and isinstance(args[0], tuple):
996                args = args[0]
997
998            return super(TypedTuple, klass).__new__(klass, *args, **kwargs)
999
1000        def __init__(self, *args, **kwargs):
1001            for i, (fname, ftype) in enumerate(self._fields):
1002                value = self[i]
1003
1004                if not isinstance(value, ftype):
1005                    raise TypeError('field in tuple not of proper type: %s; '
1006                                    'got %s, expected %s' % (fname,
1007                                    type(value), ftype))
1008
1009            super(TypedTuple, self).__init__(*args, **kwargs)
1010
1011    TypedTuple._fields = fields
1012
1013    return TypedTuple
1014
1015
1016class TypedListMixin(object):
1017    '''Mixin for a list with type coercion. See TypedList.'''
1018
1019    def _ensure_type(self, l):
1020        if isinstance(l, self.__class__):
1021            return l
1022
1023        return [self.normalize(e) for e in l]
1024
1025    def __init__(self, iterable=None, **kwargs):
1026        if iterable is None:
1027            iterable = []
1028        iterable = self._ensure_type(iterable)
1029
1030        super(TypedListMixin, self).__init__(iterable, **kwargs)
1031
1032    def extend(self, l):
1033        l = self._ensure_type(l)
1034
1035        return super(TypedListMixin, self).extend(l)
1036
1037    def __setslice__(self, i, j, sequence):
1038        sequence = self._ensure_type(sequence)
1039
1040        return super(TypedListMixin, self).__setslice__(i, j,
1041            sequence)
1042
1043    def __add__(self, other):
1044        other = self._ensure_type(other)
1045
1046        return super(TypedListMixin, self).__add__(other)
1047
1048    def __iadd__(self, other):
1049        other = self._ensure_type(other)
1050
1051        return super(TypedListMixin, self).__iadd__(other)
1052
1053    def append(self, other):
1054        self += [other]
1055
1056
1057@memoize
1058def TypedList(type, base_class=List):
1059    '''A list with type coercion.
1060
1061    The given ``type`` is what list elements are being coerced to. It may do
1062    strict validation, throwing ValueError exceptions.
1063
1064    A ``base_class`` type can be given for more specific uses than a List. For
1065    example, a Typed StrictOrderingOnAppendList can be created with:
1066
1067       TypedList(unicode, StrictOrderingOnAppendList)
1068    '''
1069    class _TypedList(TypedListMixin, base_class):
1070        @staticmethod
1071        def normalize(e):
1072            if not isinstance(e, type):
1073                e = type(e)
1074            return e
1075
1076    return _TypedList
1077
1078def group_unified_files(files, unified_prefix, unified_suffix,
1079                        files_per_unified_file):
1080    """Return an iterator of (unified_filename, source_filenames) tuples.
1081
1082    We compile most C and C++ files in "unified mode"; instead of compiling
1083    ``a.cpp``, ``b.cpp``, and ``c.cpp`` separately, we compile a single file
1084    that looks approximately like::
1085
1086       #include "a.cpp"
1087       #include "b.cpp"
1088       #include "c.cpp"
1089
1090    This function handles the details of generating names for the unified
1091    files, and determining which original source files go in which unified
1092    file."""
1093
1094    # Make sure the input list is sorted. If it's not, bad things could happen!
1095    files = sorted(files)
1096
1097    # Our last returned list of source filenames may be short, and we
1098    # don't want the fill value inserted by izip_longest to be an
1099    # issue.  So we do a little dance to filter it out ourselves.
1100    dummy_fill_value = ("dummy",)
1101    def filter_out_dummy(iterable):
1102        return itertools.ifilter(lambda x: x != dummy_fill_value,
1103                                 iterable)
1104
1105    # From the itertools documentation, slightly modified:
1106    def grouper(n, iterable):
1107        "grouper(3, 'ABCDEFG', 'x') --> ABC DEF Gxx"
1108        args = [iter(iterable)] * n
1109        return itertools.izip_longest(fillvalue=dummy_fill_value, *args)
1110
1111    for i, unified_group in enumerate(grouper(files_per_unified_file,
1112                                              files)):
1113        just_the_filenames = list(filter_out_dummy(unified_group))
1114        yield '%s%d.%s' % (unified_prefix, i, unified_suffix), just_the_filenames
1115
1116
1117def pair(iterable):
1118    '''Given an iterable, returns an iterable pairing its items.
1119
1120    For example,
1121        list(pair([1,2,3,4,5,6]))
1122    returns
1123        [(1,2), (3,4), (5,6)]
1124    '''
1125    i = iter(iterable)
1126    return itertools.izip_longest(i, i)
1127
1128
1129VARIABLES_RE = re.compile('\$\((\w+)\)')
1130
1131
1132def expand_variables(s, variables):
1133    '''Given a string with $(var) variable references, replace those references
1134    with the corresponding entries from the given `variables` dict.
1135
1136    If a variable value is not a string, it is iterated and its items are
1137    joined with a whitespace.'''
1138    result = ''
1139    for s, name in pair(VARIABLES_RE.split(s)):
1140        result += s
1141        value = variables.get(name)
1142        if not value:
1143            continue
1144        if not isinstance(value, types.StringTypes):
1145            value = ' '.join(value)
1146        result += value
1147    return result
1148
1149
1150class DefinesAction(argparse.Action):
1151    '''An ArgumentParser action to handle -Dvar[=value] type of arguments.'''
1152    def __call__(self, parser, namespace, values, option_string):
1153        defines = getattr(namespace, self.dest)
1154        if defines is None:
1155            defines = {}
1156        values = values.split('=', 1)
1157        if len(values) == 1:
1158            name, value = values[0], 1
1159        else:
1160            name, value = values
1161            if value.isdigit():
1162                value = int(value)
1163        defines[name] = value
1164        setattr(namespace, self.dest, defines)
1165
1166
1167class EnumStringComparisonError(Exception):
1168    pass
1169
1170
1171class EnumString(unicode):
1172    '''A string type that only can have a limited set of values, similarly to
1173    an Enum, and can only be compared against that set of values.
1174
1175    The class is meant to be subclassed, where the subclass defines
1176    POSSIBLE_VALUES. The `subclass` method is a helper to create such
1177    subclasses.
1178    '''
1179    POSSIBLE_VALUES = ()
1180    def __init__(self, value):
1181        if value not in self.POSSIBLE_VALUES:
1182            raise ValueError("'%s' is not a valid value for %s"
1183                             % (value, self.__class__.__name__))
1184
1185    def __eq__(self, other):
1186        if other not in self.POSSIBLE_VALUES:
1187            raise EnumStringComparisonError(
1188                'Can only compare with %s'
1189                % ', '.join("'%s'" % v for v in self.POSSIBLE_VALUES))
1190        return super(EnumString, self).__eq__(other)
1191
1192    def __ne__(self, other):
1193        return not (self == other)
1194
1195    @staticmethod
1196    def subclass(*possible_values):
1197        class EnumStringSubclass(EnumString):
1198            POSSIBLE_VALUES = possible_values
1199        return EnumStringSubclass
1200
1201
1202def _escape_char(c):
1203    # str.encode('unicode_espace') doesn't escape quotes, presumably because
1204    # quoting could be done with either ' or ".
1205    if c == "'":
1206        return "\\'"
1207    return unicode(c.encode('unicode_escape'))
1208
1209# Mapping table between raw characters below \x80 and their escaped
1210# counterpart, when they differ
1211_INDENTED_REPR_TABLE = {
1212    c: e
1213    for c, e in map(lambda x: (x, _escape_char(x)),
1214                    map(unichr, range(128)))
1215    if c != e
1216}
1217# Regexp matching all characters to escape.
1218_INDENTED_REPR_RE = re.compile(
1219    '([' + ''.join(_INDENTED_REPR_TABLE.values()) + ']+)')
1220
1221
1222def indented_repr(o, indent=4):
1223    '''Similar to repr(), but returns an indented representation of the object
1224
1225    One notable difference with repr is that the returned representation
1226    assumes `from __future__ import unicode_literals`.
1227    '''
1228    one_indent = ' ' * indent
1229    def recurse_indented_repr(o, level):
1230        if isinstance(o, dict):
1231            yield '{\n'
1232            for k, v in sorted(o.items()):
1233                yield one_indent * (level + 1)
1234                for d in recurse_indented_repr(k, level + 1):
1235                    yield d
1236                yield ': '
1237                for d in recurse_indented_repr(v, level + 1):
1238                    yield d
1239                yield ',\n'
1240            yield one_indent * level
1241            yield '}'
1242        elif isinstance(o, bytes):
1243            yield 'b'
1244            yield repr(o)
1245        elif isinstance(o, unicode):
1246            yield "'"
1247            # We want a readable string (non escaped unicode), but some
1248            # special characters need escaping (e.g. \n, \t, etc.)
1249            for i, s in enumerate(_INDENTED_REPR_RE.split(o)):
1250                if i % 2:
1251                    for c in s:
1252                        yield _INDENTED_REPR_TABLE[c]
1253                else:
1254                    yield s
1255            yield "'"
1256        elif hasattr(o, '__iter__'):
1257            yield '[\n'
1258            for i in o:
1259                yield one_indent * (level + 1)
1260                for d in recurse_indented_repr(i, level + 1):
1261                    yield d
1262                yield ',\n'
1263            yield one_indent * level
1264            yield ']'
1265        else:
1266            yield repr(o)
1267    return ''.join(recurse_indented_repr(o, 0))
1268
1269
1270def encode(obj, encoding='utf-8'):
1271    '''Recursively encode unicode strings with the given encoding.'''
1272    if isinstance(obj, dict):
1273        return {
1274            encode(k, encoding): encode(v, encoding)
1275            for k, v in obj.iteritems()
1276        }
1277    if isinstance(obj, bytes):
1278        return obj
1279    if isinstance(obj, unicode):
1280        return obj.encode(encoding)
1281    if isinstance(obj, Iterable):
1282        return [encode(i, encoding) for i in obj]
1283    return obj
1284
1285
1286def patch_main():
1287    '''This is a hack to work around the fact that Windows multiprocessing needs
1288    to import the original main module, and assumes that it corresponds to a file
1289    ending in .py.
1290
1291    We do this by a sort of two-level function interposing. The first
1292    level interposes forking.get_command_line() with our version defined
1293    in my_get_command_line(). Our version of get_command_line will
1294    replace the command string with the contents of the fork_interpose()
1295    function to be used in the subprocess.
1296
1297    The subprocess then gets an interposed imp.find_module(), which we
1298    hack up to find the main module name multiprocessing will assume, since we
1299    know what this will be based on the main module in the parent. If we're not
1300    looking for our main module, then the original find_module will suffice.
1301
1302    See also: http://bugs.python.org/issue19946
1303    And: https://bugzilla.mozilla.org/show_bug.cgi?id=914563
1304    '''
1305    if sys.platform == 'win32':
1306        import inspect
1307        import os
1308        from multiprocessing import forking
1309        global orig_command_line
1310
1311        # Figure out what multiprocessing will assume our main module
1312        # is called (see python/Lib/multiprocessing/forking.py).
1313        main_path = getattr(sys.modules['__main__'], '__file__', None)
1314        if main_path is None:
1315            # If someone deleted or modified __main__, there's nothing left for
1316            # us to do.
1317            return
1318        main_file_name = os.path.basename(main_path)
1319        main_module_name, ext = os.path.splitext(main_file_name)
1320        if ext == '.py':
1321            # If main is a .py file, everything ought to work as expected.
1322            return
1323
1324        def fork_interpose():
1325            import imp
1326            import os
1327            import sys
1328            orig_find_module = imp.find_module
1329            def my_find_module(name, dirs):
1330                if name == main_module_name:
1331                    path = os.path.join(dirs[0], main_file_name)
1332                    f = open(path)
1333                    return (f, path, ('', 'r', imp.PY_SOURCE))
1334                return orig_find_module(name, dirs)
1335
1336            # Don't allow writing bytecode file for the main module.
1337            orig_load_module = imp.load_module
1338            def my_load_module(name, file, path, description):
1339                # multiprocess.forking invokes imp.load_module manually and
1340                # hard-codes the name __parents_main__ as the module name.
1341                if name == '__parents_main__':
1342                    old_bytecode = sys.dont_write_bytecode
1343                    sys.dont_write_bytecode = True
1344                    try:
1345                        return orig_load_module(name, file, path, description)
1346                    finally:
1347                        sys.dont_write_bytecode = old_bytecode
1348
1349                return orig_load_module(name, file, path, description)
1350
1351            imp.find_module = my_find_module
1352            imp.load_module = my_load_module
1353            from multiprocessing.forking import main; main()
1354
1355        def my_get_command_line():
1356            fork_code, lineno = inspect.getsourcelines(fork_interpose)
1357            # Remove the first line (for 'def fork_interpose():') and the three
1358            # levels of indentation (12 spaces), add our relevant globals.
1359            fork_string = ("main_file_name = '%s'\n" % main_file_name +
1360                           "main_module_name = '%s'\n" % main_module_name +
1361                           ''.join(x[12:] for x in fork_code[1:]))
1362            cmdline = orig_command_line()
1363            cmdline[2] = fork_string
1364            return cmdline
1365        orig_command_line = forking.get_command_line
1366        forking.get_command_line = my_get_command_line
1367