1# coding: utf-8
2"""
3Package resource API
4--------------------
5
6A resource is a logical file contained within a package, or a logical
7subdirectory thereof.  The package resource API expects resource names
8to have their path parts separated with ``/``, *not* whatever the local
9path separator is.  Do not use os.path operations to manipulate resource
10names being passed into the API.
11
12The package resource API is designed to work with normal filesystem packages,
13.egg files, and unpacked .egg files.  It can also work in a limited way with
14.zip files and with custom PEP 302 loaders that support the ``get_data()``
15method.
16"""
17
18from __future__ import absolute_import
19
20import sys
21import os
22import io
23import time
24import re
25import types
26import zipfile
27import zipimport
28import warnings
29import stat
30import functools
31import pkgutil
32import operator
33import platform
34import collections
35import plistlib
36import email.parser
37import tempfile
38import textwrap
39import itertools
40from pkgutil import get_importer
41
42try:
43    import _imp
44except ImportError:
45    # Python 3.2 compatibility
46    import imp as _imp
47
48from pip9._vendor import six
49from pip9._vendor.six.moves import urllib, map, filter
50
51# capture these to bypass sandboxing
52from os import utime
53try:
54    from os import mkdir, rename, unlink
55    WRITE_SUPPORT = True
56except ImportError:
57    # no write support, probably under GAE
58    WRITE_SUPPORT = False
59
60from os import open as os_open
61from os.path import isdir, split
62
63try:
64    import importlib.machinery as importlib_machinery
65    # access attribute to force import under delayed import mechanisms.
66    importlib_machinery.__name__
67except ImportError:
68    importlib_machinery = None
69
70from pip9._vendor import appdirs
71from pip9._vendor import packaging
72__import__('pip9._vendor.packaging.version')
73__import__('pip9._vendor.packaging.specifiers')
74__import__('pip9._vendor.packaging.requirements')
75__import__('pip9._vendor.packaging.markers')
76
77if (3, 0) < sys.version_info < (3, 3):
78    raise RuntimeError("Python 3.3 or later is required")
79
80# declare some globals that will be defined later to
81# satisfy the linters.
82require = None
83working_set = None
84
85
86class PEP440Warning(RuntimeWarning):
87    """
88    Used when there is an issue with a version or specifier not complying with
89    PEP 440.
90    """
91
92
93class _SetuptoolsVersionMixin(object):
94    def __hash__(self):
95        return super(_SetuptoolsVersionMixin, self).__hash__()
96
97    def __lt__(self, other):
98        if isinstance(other, tuple):
99            return tuple(self) < other
100        else:
101            return super(_SetuptoolsVersionMixin, self).__lt__(other)
102
103    def __le__(self, other):
104        if isinstance(other, tuple):
105            return tuple(self) <= other
106        else:
107            return super(_SetuptoolsVersionMixin, self).__le__(other)
108
109    def __eq__(self, other):
110        if isinstance(other, tuple):
111            return tuple(self) == other
112        else:
113            return super(_SetuptoolsVersionMixin, self).__eq__(other)
114
115    def __ge__(self, other):
116        if isinstance(other, tuple):
117            return tuple(self) >= other
118        else:
119            return super(_SetuptoolsVersionMixin, self).__ge__(other)
120
121    def __gt__(self, other):
122        if isinstance(other, tuple):
123            return tuple(self) > other
124        else:
125            return super(_SetuptoolsVersionMixin, self).__gt__(other)
126
127    def __ne__(self, other):
128        if isinstance(other, tuple):
129            return tuple(self) != other
130        else:
131            return super(_SetuptoolsVersionMixin, self).__ne__(other)
132
133    def __getitem__(self, key):
134        return tuple(self)[key]
135
136    def __iter__(self):
137        component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
138        replace = {
139            'pre': 'c',
140            'preview': 'c',
141            '-': 'final-',
142            'rc': 'c',
143            'dev': '@',
144        }.get
145
146        def _parse_version_parts(s):
147            for part in component_re.split(s):
148                part = replace(part, part)
149                if not part or part == '.':
150                    continue
151                if part[:1] in '0123456789':
152                    # pad for numeric comparison
153                    yield part.zfill(8)
154                else:
155                    yield '*' + part
156
157            # ensure that alpha/beta/candidate are before final
158            yield '*final'
159
160        def old_parse_version(s):
161            parts = []
162            for part in _parse_version_parts(s.lower()):
163                if part.startswith('*'):
164                    # remove '-' before a prerelease tag
165                    if part < '*final':
166                        while parts and parts[-1] == '*final-':
167                            parts.pop()
168                    # remove trailing zeros from each series of numeric parts
169                    while parts and parts[-1] == '00000000':
170                        parts.pop()
171                parts.append(part)
172            return tuple(parts)
173
174        # Warn for use of this function
175        warnings.warn(
176            "You have iterated over the result of "
177            "pkg_resources.parse_version. This is a legacy behavior which is "
178            "inconsistent with the new version class introduced in setuptools "
179            "8.0. In most cases, conversion to a tuple is unnecessary. For "
180            "comparison of versions, sort the Version instances directly. If "
181            "you have another use case requiring the tuple, please file a "
182            "bug with the setuptools project describing that need.",
183            RuntimeWarning,
184            stacklevel=1,
185        )
186
187        for part in old_parse_version(str(self)):
188            yield part
189
190
191class SetuptoolsVersion(_SetuptoolsVersionMixin, packaging.version.Version):
192    pass
193
194
195class SetuptoolsLegacyVersion(_SetuptoolsVersionMixin,
196                              packaging.version.LegacyVersion):
197    pass
198
199
200def parse_version(v):
201    try:
202        return SetuptoolsVersion(v)
203    except packaging.version.InvalidVersion:
204        return SetuptoolsLegacyVersion(v)
205
206
207_state_vars = {}
208
209
210def _declare_state(vartype, **kw):
211    globals().update(kw)
212    _state_vars.update(dict.fromkeys(kw, vartype))
213
214
215def __getstate__():
216    state = {}
217    g = globals()
218    for k, v in _state_vars.items():
219        state[k] = g['_sget_' + v](g[k])
220    return state
221
222
223def __setstate__(state):
224    g = globals()
225    for k, v in state.items():
226        g['_sset_' + _state_vars[k]](k, g[k], v)
227    return state
228
229
230def _sget_dict(val):
231    return val.copy()
232
233
234def _sset_dict(key, ob, state):
235    ob.clear()
236    ob.update(state)
237
238
239def _sget_object(val):
240    return val.__getstate__()
241
242
243def _sset_object(key, ob, state):
244    ob.__setstate__(state)
245
246
247_sget_none = _sset_none = lambda *args: None
248
249
250def get_supported_platform():
251    """Return this platform's maximum compatible version.
252
253    distutils.util.get_platform() normally reports the minimum version
254    of Mac OS X that would be required to *use* extensions produced by
255    distutils.  But what we want when checking compatibility is to know the
256    version of Mac OS X that we are *running*.  To allow usage of packages that
257    explicitly require a newer version of Mac OS X, we must also know the
258    current version of the OS.
259
260    If this condition occurs for any other platform with a version in its
261    platform strings, this function should be extended accordingly.
262    """
263    plat = get_build_platform()
264    m = macosVersionString.match(plat)
265    if m is not None and sys.platform == "darwin":
266        try:
267            plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
268        except ValueError:
269            # not Mac OS X
270            pass
271    return plat
272
273
274__all__ = [
275    # Basic resource access and distribution/entry point discovery
276    'require', 'run_script', 'get_provider', 'get_distribution',
277    'load_entry_point', 'get_entry_map', 'get_entry_info',
278    'iter_entry_points',
279    'resource_string', 'resource_stream', 'resource_filename',
280    'resource_listdir', 'resource_exists', 'resource_isdir',
281
282    # Environmental control
283    'declare_namespace', 'working_set', 'add_activation_listener',
284    'find_distributions', 'set_extraction_path', 'cleanup_resources',
285    'get_default_cache',
286
287    # Primary implementation classes
288    'Environment', 'WorkingSet', 'ResourceManager',
289    'Distribution', 'Requirement', 'EntryPoint',
290
291    # Exceptions
292    'ResolutionError', 'VersionConflict', 'DistributionNotFound',
293    'UnknownExtra', 'ExtractionError',
294
295    # Warnings
296    'PEP440Warning',
297
298    # Parsing functions and string utilities
299    'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
300    'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
301    'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker',
302
303    # filesystem utilities
304    'ensure_directory', 'normalize_path',
305
306    # Distribution "precedence" constants
307    'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
308
309    # "Provider" interfaces, implementations, and registration/lookup APIs
310    'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
311    'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
312    'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
313    'register_finder', 'register_namespace_handler', 'register_loader_type',
314    'fixup_namespace_packages', 'get_importer',
315
316    # Deprecated/backward compatibility only
317    'run_main', 'AvailableDistributions',
318]
319
320
321class ResolutionError(Exception):
322    """Abstract base for dependency resolution errors"""
323
324    def __repr__(self):
325        return self.__class__.__name__ + repr(self.args)
326
327
328class VersionConflict(ResolutionError):
329    """
330    An already-installed version conflicts with the requested version.
331
332    Should be initialized with the installed Distribution and the requested
333    Requirement.
334    """
335
336    _template = "{self.dist} is installed but {self.req} is required"
337
338    @property
339    def dist(self):
340        return self.args[0]
341
342    @property
343    def req(self):
344        return self.args[1]
345
346    def report(self):
347        return self._template.format(**locals())
348
349    def with_context(self, required_by):
350        """
351        If required_by is non-empty, return a version of self that is a
352        ContextualVersionConflict.
353        """
354        if not required_by:
355            return self
356        args = self.args + (required_by,)
357        return ContextualVersionConflict(*args)
358
359
360class ContextualVersionConflict(VersionConflict):
361    """
362    A VersionConflict that accepts a third parameter, the set of the
363    requirements that required the installed Distribution.
364    """
365
366    _template = VersionConflict._template + ' by {self.required_by}'
367
368    @property
369    def required_by(self):
370        return self.args[2]
371
372
373class DistributionNotFound(ResolutionError):
374    """A requested distribution was not found"""
375
376    _template = ("The '{self.req}' distribution was not found "
377                 "and is required by {self.requirers_str}")
378
379    @property
380    def req(self):
381        return self.args[0]
382
383    @property
384    def requirers(self):
385        return self.args[1]
386
387    @property
388    def requirers_str(self):
389        if not self.requirers:
390            return 'the application'
391        return ', '.join(self.requirers)
392
393    def report(self):
394        return self._template.format(**locals())
395
396    def __str__(self):
397        return self.report()
398
399
400class UnknownExtra(ResolutionError):
401    """Distribution doesn't have an "extra feature" of the given name"""
402
403
404_provider_factories = {}
405
406PY_MAJOR = sys.version[:3]
407EGG_DIST = 3
408BINARY_DIST = 2
409SOURCE_DIST = 1
410CHECKOUT_DIST = 0
411DEVELOP_DIST = -1
412
413
414def register_loader_type(loader_type, provider_factory):
415    """Register `provider_factory` to make providers for `loader_type`
416
417    `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
418    and `provider_factory` is a function that, passed a *module* object,
419    returns an ``IResourceProvider`` for that module.
420    """
421    _provider_factories[loader_type] = provider_factory
422
423
424def get_provider(moduleOrReq):
425    """Return an IResourceProvider for the named module or requirement"""
426    if isinstance(moduleOrReq, Requirement):
427        return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
428    try:
429        module = sys.modules[moduleOrReq]
430    except KeyError:
431        __import__(moduleOrReq)
432        module = sys.modules[moduleOrReq]
433    loader = getattr(module, '__loader__', None)
434    return _find_adapter(_provider_factories, loader)(module)
435
436
437def _macosx_vers(_cache=[]):
438    if not _cache:
439        version = platform.mac_ver()[0]
440        # fallback for MacPorts
441        if version == '':
442            plist = '/System/Library/CoreServices/SystemVersion.plist'
443            if os.path.exists(plist):
444                if hasattr(plistlib, 'readPlist'):
445                    plist_content = plistlib.readPlist(plist)
446                    if 'ProductVersion' in plist_content:
447                        version = plist_content['ProductVersion']
448
449        _cache.append(version.split('.'))
450    return _cache[0]
451
452
453def _macosx_arch(machine):
454    return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine)
455
456
457def get_build_platform():
458    """Return this platform's string for platform-specific distributions
459
460    XXX Currently this is the same as ``distutils.util.get_platform()``, but it
461    needs some hacks for Linux and Mac OS X.
462    """
463    try:
464        # Python 2.7 or >=3.2
465        from sysconfig import get_platform
466    except ImportError:
467        from distutils.util import get_platform
468
469    plat = get_platform()
470    if sys.platform == "darwin" and not plat.startswith('macosx-'):
471        try:
472            version = _macosx_vers()
473            machine = os.uname()[4].replace(" ", "_")
474            return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
475                _macosx_arch(machine))
476        except ValueError:
477            # if someone is running a non-Mac darwin system, this will fall
478            # through to the default implementation
479            pass
480    return plat
481
482
483macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
484darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
485# XXX backward compat
486get_platform = get_build_platform
487
488
489def compatible_platforms(provided, required):
490    """Can code for the `provided` platform run on the `required` platform?
491
492    Returns true if either platform is ``None``, or the platforms are equal.
493
494    XXX Needs compatibility checks for Linux and other unixy OSes.
495    """
496    if provided is None or required is None or provided == required:
497        # easy case
498        return True
499
500    # Mac OS X special cases
501    reqMac = macosVersionString.match(required)
502    if reqMac:
503        provMac = macosVersionString.match(provided)
504
505        # is this a Mac package?
506        if not provMac:
507            # this is backwards compatibility for packages built before
508            # setuptools 0.6. All packages built after this point will
509            # use the new macosx designation.
510            provDarwin = darwinVersionString.match(provided)
511            if provDarwin:
512                dversion = int(provDarwin.group(1))
513                macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
514                if dversion == 7 and macosversion >= "10.3" or \
515                        dversion == 8 and macosversion >= "10.4":
516                    return True
517            # egg isn't macosx or legacy darwin
518            return False
519
520        # are they the same major version and machine type?
521        if provMac.group(1) != reqMac.group(1) or \
522                provMac.group(3) != reqMac.group(3):
523            return False
524
525        # is the required OS major update >= the provided one?
526        if int(provMac.group(2)) > int(reqMac.group(2)):
527            return False
528
529        return True
530
531    # XXX Linux and other platforms' special cases should go here
532    return False
533
534
535def run_script(dist_spec, script_name):
536    """Locate distribution `dist_spec` and run its `script_name` script"""
537    ns = sys._getframe(1).f_globals
538    name = ns['__name__']
539    ns.clear()
540    ns['__name__'] = name
541    require(dist_spec)[0].run_script(script_name, ns)
542
543
544# backward compatibility
545run_main = run_script
546
547
548def get_distribution(dist):
549    """Return a current distribution object for a Requirement or string"""
550    if isinstance(dist, six.string_types):
551        dist = Requirement.parse(dist)
552    if isinstance(dist, Requirement):
553        dist = get_provider(dist)
554    if not isinstance(dist, Distribution):
555        raise TypeError("Expected string, Requirement, or Distribution", dist)
556    return dist
557
558
559def load_entry_point(dist, group, name):
560    """Return `name` entry point of `group` for `dist` or raise ImportError"""
561    return get_distribution(dist).load_entry_point(group, name)
562
563
564def get_entry_map(dist, group=None):
565    """Return the entry point map for `group`, or the full entry map"""
566    return get_distribution(dist).get_entry_map(group)
567
568
569def get_entry_info(dist, group, name):
570    """Return the EntryPoint object for `group`+`name`, or ``None``"""
571    return get_distribution(dist).get_entry_info(group, name)
572
573
574class IMetadataProvider:
575    def has_metadata(name):
576        """Does the package's distribution contain the named metadata?"""
577
578    def get_metadata(name):
579        """The named metadata resource as a string"""
580
581    def get_metadata_lines(name):
582        """Yield named metadata resource as list of non-blank non-comment lines
583
584       Leading and trailing whitespace is stripped from each line, and lines
585       with ``#`` as the first non-blank character are omitted."""
586
587    def metadata_isdir(name):
588        """Is the named metadata a directory?  (like ``os.path.isdir()``)"""
589
590    def metadata_listdir(name):
591        """List of metadata names in the directory (like ``os.listdir()``)"""
592
593    def run_script(script_name, namespace):
594        """Execute the named script in the supplied namespace dictionary"""
595
596
597class IResourceProvider(IMetadataProvider):
598    """An object that provides access to package resources"""
599
600    def get_resource_filename(manager, resource_name):
601        """Return a true filesystem path for `resource_name`
602
603        `manager` must be an ``IResourceManager``"""
604
605    def get_resource_stream(manager, resource_name):
606        """Return a readable file-like object for `resource_name`
607
608        `manager` must be an ``IResourceManager``"""
609
610    def get_resource_string(manager, resource_name):
611        """Return a string containing the contents of `resource_name`
612
613        `manager` must be an ``IResourceManager``"""
614
615    def has_resource(resource_name):
616        """Does the package contain the named resource?"""
617
618    def resource_isdir(resource_name):
619        """Is the named resource a directory?  (like ``os.path.isdir()``)"""
620
621    def resource_listdir(resource_name):
622        """List of resource names in the directory (like ``os.listdir()``)"""
623
624
625class WorkingSet(object):
626    """A collection of active distributions on sys.path (or a similar list)"""
627
628    def __init__(self, entries=None):
629        """Create working set from list of path entries (default=sys.path)"""
630        self.entries = []
631        self.entry_keys = {}
632        self.by_key = {}
633        self.callbacks = []
634
635        if entries is None:
636            entries = sys.path
637
638        for entry in entries:
639            self.add_entry(entry)
640
641    @classmethod
642    def _build_master(cls):
643        """
644        Prepare the master working set.
645        """
646        ws = cls()
647        try:
648            from __main__ import __requires__
649        except ImportError:
650            # The main program does not list any requirements
651            return ws
652
653        # ensure the requirements are met
654        try:
655            ws.require(__requires__)
656        except VersionConflict:
657            return cls._build_from_requirements(__requires__)
658
659        return ws
660
661    @classmethod
662    def _build_from_requirements(cls, req_spec):
663        """
664        Build a working set from a requirement spec. Rewrites sys.path.
665        """
666        # try it without defaults already on sys.path
667        # by starting with an empty path
668        ws = cls([])
669        reqs = parse_requirements(req_spec)
670        dists = ws.resolve(reqs, Environment())
671        for dist in dists:
672            ws.add(dist)
673
674        # add any missing entries from sys.path
675        for entry in sys.path:
676            if entry not in ws.entries:
677                ws.add_entry(entry)
678
679        # then copy back to sys.path
680        sys.path[:] = ws.entries
681        return ws
682
683    def add_entry(self, entry):
684        """Add a path item to ``.entries``, finding any distributions on it
685
686        ``find_distributions(entry, True)`` is used to find distributions
687        corresponding to the path entry, and they are added.  `entry` is
688        always appended to ``.entries``, even if it is already present.
689        (This is because ``sys.path`` can contain the same value more than
690        once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
691        equal ``sys.path``.)
692        """
693        self.entry_keys.setdefault(entry, [])
694        self.entries.append(entry)
695        for dist in find_distributions(entry, True):
696            self.add(dist, entry, False)
697
698    def __contains__(self, dist):
699        """True if `dist` is the active distribution for its project"""
700        return self.by_key.get(dist.key) == dist
701
702    def find(self, req):
703        """Find a distribution matching requirement `req`
704
705        If there is an active distribution for the requested project, this
706        returns it as long as it meets the version requirement specified by
707        `req`.  But, if there is an active distribution for the project and it
708        does *not* meet the `req` requirement, ``VersionConflict`` is raised.
709        If there is no active distribution for the requested project, ``None``
710        is returned.
711        """
712        dist = self.by_key.get(req.key)
713        if dist is not None and dist not in req:
714            # XXX add more info
715            raise VersionConflict(dist, req)
716        return dist
717
718    def iter_entry_points(self, group, name=None):
719        """Yield entry point objects from `group` matching `name`
720
721        If `name` is None, yields all entry points in `group` from all
722        distributions in the working set, otherwise only ones matching
723        both `group` and `name` are yielded (in distribution order).
724        """
725        for dist in self:
726            entries = dist.get_entry_map(group)
727            if name is None:
728                for ep in entries.values():
729                    yield ep
730            elif name in entries:
731                yield entries[name]
732
733    def run_script(self, requires, script_name):
734        """Locate distribution for `requires` and run `script_name` script"""
735        ns = sys._getframe(1).f_globals
736        name = ns['__name__']
737        ns.clear()
738        ns['__name__'] = name
739        self.require(requires)[0].run_script(script_name, ns)
740
741    def __iter__(self):
742        """Yield distributions for non-duplicate projects in the working set
743
744        The yield order is the order in which the items' path entries were
745        added to the working set.
746        """
747        seen = {}
748        for item in self.entries:
749            if item not in self.entry_keys:
750                # workaround a cache issue
751                continue
752
753            for key in self.entry_keys[item]:
754                if key not in seen:
755                    seen[key] = 1
756                    yield self.by_key[key]
757
758    def add(self, dist, entry=None, insert=True, replace=False):
759        """Add `dist` to working set, associated with `entry`
760
761        If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
762        On exit from this routine, `entry` is added to the end of the working
763        set's ``.entries`` (if it wasn't already present).
764
765        `dist` is only added to the working set if it's for a project that
766        doesn't already have a distribution in the set, unless `replace=True`.
767        If it's added, any callbacks registered with the ``subscribe()`` method
768        will be called.
769        """
770        if insert:
771            dist.insert_on(self.entries, entry, replace=replace)
772
773        if entry is None:
774            entry = dist.location
775        keys = self.entry_keys.setdefault(entry, [])
776        keys2 = self.entry_keys.setdefault(dist.location, [])
777        if not replace and dist.key in self.by_key:
778            # ignore hidden distros
779            return
780
781        self.by_key[dist.key] = dist
782        if dist.key not in keys:
783            keys.append(dist.key)
784        if dist.key not in keys2:
785            keys2.append(dist.key)
786        self._added_new(dist)
787
788    def resolve(self, requirements, env=None, installer=None,
789                replace_conflicting=False, extras=None):
790        """List all distributions needed to (recursively) meet `requirements`
791
792        `requirements` must be a sequence of ``Requirement`` objects.  `env`,
793        if supplied, should be an ``Environment`` instance.  If
794        not supplied, it defaults to all distributions available within any
795        entry or distribution in the working set.  `installer`, if supplied,
796        will be invoked with each requirement that cannot be met by an
797        already-installed distribution; it should return a ``Distribution`` or
798        ``None``.
799
800        Unless `replace_conflicting=True`, raises a VersionConflict exception if
801        any requirements are found on the path that have the correct name but
802        the wrong version.  Otherwise, if an `installer` is supplied it will be
803        invoked to obtain the correct version of the requirement and activate
804        it.
805
806        `extras` is a list of the extras to be used with these requirements.
807        This is important because extra requirements may look like `my_req;
808        extra = "my_extra"`, which would otherwise be interpreted as a purely
809        optional requirement.  Instead, we want to be able to assert that these
810        requirements are truly required.
811        """
812
813        # set up the stack
814        requirements = list(requirements)[::-1]
815        # set of processed requirements
816        processed = {}
817        # key -> dist
818        best = {}
819        to_activate = []
820
821        req_extras = _ReqExtras()
822
823        # Mapping of requirement to set of distributions that required it;
824        # useful for reporting info about conflicts.
825        required_by = collections.defaultdict(set)
826
827        while requirements:
828            # process dependencies breadth-first
829            req = requirements.pop(0)
830            if req in processed:
831                # Ignore cyclic or redundant dependencies
832                continue
833
834            if not req_extras.markers_pass(req, extras):
835                continue
836
837            dist = best.get(req.key)
838            if dist is None:
839                # Find the best distribution and add it to the map
840                dist = self.by_key.get(req.key)
841                if dist is None or (dist not in req and replace_conflicting):
842                    ws = self
843                    if env is None:
844                        if dist is None:
845                            env = Environment(self.entries)
846                        else:
847                            # Use an empty environment and workingset to avoid
848                            # any further conflicts with the conflicting
849                            # distribution
850                            env = Environment([])
851                            ws = WorkingSet([])
852                    dist = best[req.key] = env.best_match(req, ws, installer)
853                    if dist is None:
854                        requirers = required_by.get(req, None)
855                        raise DistributionNotFound(req, requirers)
856                to_activate.append(dist)
857            if dist not in req:
858                # Oops, the "best" so far conflicts with a dependency
859                dependent_req = required_by[req]
860                raise VersionConflict(dist, req).with_context(dependent_req)
861
862            # push the new requirements onto the stack
863            new_requirements = dist.requires(req.extras)[::-1]
864            requirements.extend(new_requirements)
865
866            # Register the new requirements needed by req
867            for new_requirement in new_requirements:
868                required_by[new_requirement].add(req.project_name)
869                req_extras[new_requirement] = req.extras
870
871            processed[req] = True
872
873        # return list of distros to activate
874        return to_activate
875
876    def find_plugins(self, plugin_env, full_env=None, installer=None,
877            fallback=True):
878        """Find all activatable distributions in `plugin_env`
879
880        Example usage::
881
882            distributions, errors = working_set.find_plugins(
883                Environment(plugin_dirlist)
884            )
885            # add plugins+libs to sys.path
886            map(working_set.add, distributions)
887            # display errors
888            print('Could not load', errors)
889
890        The `plugin_env` should be an ``Environment`` instance that contains
891        only distributions that are in the project's "plugin directory" or
892        directories. The `full_env`, if supplied, should be an ``Environment``
893        contains all currently-available distributions.  If `full_env` is not
894        supplied, one is created automatically from the ``WorkingSet`` this
895        method is called on, which will typically mean that every directory on
896        ``sys.path`` will be scanned for distributions.
897
898        `installer` is a standard installer callback as used by the
899        ``resolve()`` method. The `fallback` flag indicates whether we should
900        attempt to resolve older versions of a plugin if the newest version
901        cannot be resolved.
902
903        This method returns a 2-tuple: (`distributions`, `error_info`), where
904        `distributions` is a list of the distributions found in `plugin_env`
905        that were loadable, along with any other distributions that are needed
906        to resolve their dependencies.  `error_info` is a dictionary mapping
907        unloadable plugin distributions to an exception instance describing the
908        error that occurred. Usually this will be a ``DistributionNotFound`` or
909        ``VersionConflict`` instance.
910        """
911
912        plugin_projects = list(plugin_env)
913        # scan project names in alphabetic order
914        plugin_projects.sort()
915
916        error_info = {}
917        distributions = {}
918
919        if full_env is None:
920            env = Environment(self.entries)
921            env += plugin_env
922        else:
923            env = full_env + plugin_env
924
925        shadow_set = self.__class__([])
926        # put all our entries in shadow_set
927        list(map(shadow_set.add, self))
928
929        for project_name in plugin_projects:
930
931            for dist in plugin_env[project_name]:
932
933                req = [dist.as_requirement()]
934
935                try:
936                    resolvees = shadow_set.resolve(req, env, installer)
937
938                except ResolutionError as v:
939                    # save error info
940                    error_info[dist] = v
941                    if fallback:
942                        # try the next older version of project
943                        continue
944                    else:
945                        # give up on this project, keep going
946                        break
947
948                else:
949                    list(map(shadow_set.add, resolvees))
950                    distributions.update(dict.fromkeys(resolvees))
951
952                    # success, no need to try any more versions of this project
953                    break
954
955        distributions = list(distributions)
956        distributions.sort()
957
958        return distributions, error_info
959
960    def require(self, *requirements):
961        """Ensure that distributions matching `requirements` are activated
962
963        `requirements` must be a string or a (possibly-nested) sequence
964        thereof, specifying the distributions and versions required.  The
965        return value is a sequence of the distributions that needed to be
966        activated to fulfill the requirements; all relevant distributions are
967        included, even if they were already activated in this working set.
968        """
969        needed = self.resolve(parse_requirements(requirements))
970
971        for dist in needed:
972            self.add(dist)
973
974        return needed
975
976    def subscribe(self, callback, existing=True):
977        """Invoke `callback` for all distributions
978
979        If `existing=True` (default),
980        call on all existing ones, as well.
981        """
982        if callback in self.callbacks:
983            return
984        self.callbacks.append(callback)
985        if not existing:
986            return
987        for dist in self:
988            callback(dist)
989
990    def _added_new(self, dist):
991        for callback in self.callbacks:
992            callback(dist)
993
994    def __getstate__(self):
995        return (
996            self.entries[:], self.entry_keys.copy(), self.by_key.copy(),
997            self.callbacks[:]
998        )
999
1000    def __setstate__(self, e_k_b_c):
1001        entries, keys, by_key, callbacks = e_k_b_c
1002        self.entries = entries[:]
1003        self.entry_keys = keys.copy()
1004        self.by_key = by_key.copy()
1005        self.callbacks = callbacks[:]
1006
1007
1008class _ReqExtras(dict):
1009    """
1010    Map each requirement to the extras that demanded it.
1011    """
1012
1013    def markers_pass(self, req, extras=None):
1014        """
1015        Evaluate markers for req against each extra that
1016        demanded it.
1017
1018        Return False if the req has a marker and fails
1019        evaluation. Otherwise, return True.
1020        """
1021        extra_evals = (
1022            req.marker.evaluate({'extra': extra})
1023            for extra in self.get(req, ()) + (extras or (None,))
1024        )
1025        return not req.marker or any(extra_evals)
1026
1027
1028class Environment(object):
1029    """Searchable snapshot of distributions on a search path"""
1030
1031    def __init__(self, search_path=None, platform=get_supported_platform(),
1032            python=PY_MAJOR):
1033        """Snapshot distributions available on a search path
1034
1035        Any distributions found on `search_path` are added to the environment.
1036        `search_path` should be a sequence of ``sys.path`` items.  If not
1037        supplied, ``sys.path`` is used.
1038
1039        `platform` is an optional string specifying the name of the platform
1040        that platform-specific distributions must be compatible with.  If
1041        unspecified, it defaults to the current platform.  `python` is an
1042        optional string naming the desired version of Python (e.g. ``'3.3'``);
1043        it defaults to the current version.
1044
1045        You may explicitly set `platform` (and/or `python`) to ``None`` if you
1046        wish to map *all* distributions, not just those compatible with the
1047        running platform or Python version.
1048        """
1049        self._distmap = {}
1050        self.platform = platform
1051        self.python = python
1052        self.scan(search_path)
1053
1054    def can_add(self, dist):
1055        """Is distribution `dist` acceptable for this environment?
1056
1057        The distribution must match the platform and python version
1058        requirements specified when this environment was created, or False
1059        is returned.
1060        """
1061        return (self.python is None or dist.py_version is None
1062            or dist.py_version == self.python) \
1063            and compatible_platforms(dist.platform, self.platform)
1064
1065    def remove(self, dist):
1066        """Remove `dist` from the environment"""
1067        self._distmap[dist.key].remove(dist)
1068
1069    def scan(self, search_path=None):
1070        """Scan `search_path` for distributions usable in this environment
1071
1072        Any distributions found are added to the environment.
1073        `search_path` should be a sequence of ``sys.path`` items.  If not
1074        supplied, ``sys.path`` is used.  Only distributions conforming to
1075        the platform/python version defined at initialization are added.
1076        """
1077        if search_path is None:
1078            search_path = sys.path
1079
1080        for item in search_path:
1081            for dist in find_distributions(item):
1082                self.add(dist)
1083
1084    def __getitem__(self, project_name):
1085        """Return a newest-to-oldest list of distributions for `project_name`
1086
1087        Uses case-insensitive `project_name` comparison, assuming all the
1088        project's distributions use their project's name converted to all
1089        lowercase as their key.
1090
1091        """
1092        distribution_key = project_name.lower()
1093        return self._distmap.get(distribution_key, [])
1094
1095    def add(self, dist):
1096        """Add `dist` if we ``can_add()`` it and it has not already been added
1097        """
1098        if self.can_add(dist) and dist.has_version():
1099            dists = self._distmap.setdefault(dist.key, [])
1100            if dist not in dists:
1101                dists.append(dist)
1102                dists.sort(key=operator.attrgetter('hashcmp'), reverse=True)
1103
1104    def best_match(self, req, working_set, installer=None):
1105        """Find distribution best matching `req` and usable on `working_set`
1106
1107        This calls the ``find(req)`` method of the `working_set` to see if a
1108        suitable distribution is already active.  (This may raise
1109        ``VersionConflict`` if an unsuitable version of the project is already
1110        active in the specified `working_set`.)  If a suitable distribution
1111        isn't active, this method returns the newest distribution in the
1112        environment that meets the ``Requirement`` in `req`.  If no suitable
1113        distribution is found, and `installer` is supplied, then the result of
1114        calling the environment's ``obtain(req, installer)`` method will be
1115        returned.
1116        """
1117        dist = working_set.find(req)
1118        if dist is not None:
1119            return dist
1120        for dist in self[req.key]:
1121            if dist in req:
1122                return dist
1123        # try to download/install
1124        return self.obtain(req, installer)
1125
1126    def obtain(self, requirement, installer=None):
1127        """Obtain a distribution matching `requirement` (e.g. via download)
1128
1129        Obtain a distro that matches requirement (e.g. via download).  In the
1130        base ``Environment`` class, this routine just returns
1131        ``installer(requirement)``, unless `installer` is None, in which case
1132        None is returned instead.  This method is a hook that allows subclasses
1133        to attempt other ways of obtaining a distribution before falling back
1134        to the `installer` argument."""
1135        if installer is not None:
1136            return installer(requirement)
1137
1138    def __iter__(self):
1139        """Yield the unique project names of the available distributions"""
1140        for key in self._distmap.keys():
1141            if self[key]:
1142                yield key
1143
1144    def __iadd__(self, other):
1145        """In-place addition of a distribution or environment"""
1146        if isinstance(other, Distribution):
1147            self.add(other)
1148        elif isinstance(other, Environment):
1149            for project in other:
1150                for dist in other[project]:
1151                    self.add(dist)
1152        else:
1153            raise TypeError("Can't add %r to environment" % (other,))
1154        return self
1155
1156    def __add__(self, other):
1157        """Add an environment or distribution to an environment"""
1158        new = self.__class__([], platform=None, python=None)
1159        for env in self, other:
1160            new += env
1161        return new
1162
1163
1164# XXX backward compatibility
1165AvailableDistributions = Environment
1166
1167
1168class ExtractionError(RuntimeError):
1169    """An error occurred extracting a resource
1170
1171    The following attributes are available from instances of this exception:
1172
1173    manager
1174        The resource manager that raised this exception
1175
1176    cache_path
1177        The base directory for resource extraction
1178
1179    original_error
1180        The exception instance that caused extraction to fail
1181    """
1182
1183
1184class ResourceManager:
1185    """Manage resource extraction and packages"""
1186    extraction_path = None
1187
1188    def __init__(self):
1189        self.cached_files = {}
1190
1191    def resource_exists(self, package_or_requirement, resource_name):
1192        """Does the named resource exist?"""
1193        return get_provider(package_or_requirement).has_resource(resource_name)
1194
1195    def resource_isdir(self, package_or_requirement, resource_name):
1196        """Is the named resource an existing directory?"""
1197        return get_provider(package_or_requirement).resource_isdir(
1198            resource_name
1199        )
1200
1201    def resource_filename(self, package_or_requirement, resource_name):
1202        """Return a true filesystem path for specified resource"""
1203        return get_provider(package_or_requirement).get_resource_filename(
1204            self, resource_name
1205        )
1206
1207    def resource_stream(self, package_or_requirement, resource_name):
1208        """Return a readable file-like object for specified resource"""
1209        return get_provider(package_or_requirement).get_resource_stream(
1210            self, resource_name
1211        )
1212
1213    def resource_string(self, package_or_requirement, resource_name):
1214        """Return specified resource as a string"""
1215        return get_provider(package_or_requirement).get_resource_string(
1216            self, resource_name
1217        )
1218
1219    def resource_listdir(self, package_or_requirement, resource_name):
1220        """List the contents of the named resource directory"""
1221        return get_provider(package_or_requirement).resource_listdir(
1222            resource_name
1223        )
1224
1225    def extraction_error(self):
1226        """Give an error message for problems extracting file(s)"""
1227
1228        old_exc = sys.exc_info()[1]
1229        cache_path = self.extraction_path or get_default_cache()
1230
1231        tmpl = textwrap.dedent("""
1232            Can't extract file(s) to egg cache
1233
1234            The following error occurred while trying to extract file(s) to the Python egg
1235            cache:
1236
1237              {old_exc}
1238
1239            The Python egg cache directory is currently set to:
1240
1241              {cache_path}
1242
1243            Perhaps your account does not have write access to this directory?  You can
1244            change the cache directory by setting the PYTHON_EGG_CACHE environment
1245            variable to point to an accessible directory.
1246            """).lstrip()
1247        err = ExtractionError(tmpl.format(**locals()))
1248        err.manager = self
1249        err.cache_path = cache_path
1250        err.original_error = old_exc
1251        raise err
1252
1253    def get_cache_path(self, archive_name, names=()):
1254        """Return absolute location in cache for `archive_name` and `names`
1255
1256        The parent directory of the resulting path will be created if it does
1257        not already exist.  `archive_name` should be the base filename of the
1258        enclosing egg (which may not be the name of the enclosing zipfile!),
1259        including its ".egg" extension.  `names`, if provided, should be a
1260        sequence of path name parts "under" the egg's extraction location.
1261
1262        This method should only be called by resource providers that need to
1263        obtain an extraction location, and only for names they intend to
1264        extract, as it tracks the generated names for possible cleanup later.
1265        """
1266        extract_path = self.extraction_path or get_default_cache()
1267        target_path = os.path.join(extract_path, archive_name + '-tmp', *names)
1268        try:
1269            _bypass_ensure_directory(target_path)
1270        except:
1271            self.extraction_error()
1272
1273        self._warn_unsafe_extraction_path(extract_path)
1274
1275        self.cached_files[target_path] = 1
1276        return target_path
1277
1278    @staticmethod
1279    def _warn_unsafe_extraction_path(path):
1280        """
1281        If the default extraction path is overridden and set to an insecure
1282        location, such as /tmp, it opens up an opportunity for an attacker to
1283        replace an extracted file with an unauthorized payload. Warn the user
1284        if a known insecure location is used.
1285
1286        See Distribute #375 for more details.
1287        """
1288        if os.name == 'nt' and not path.startswith(os.environ['windir']):
1289            # On Windows, permissions are generally restrictive by default
1290            #  and temp directories are not writable by other users, so
1291            #  bypass the warning.
1292            return
1293        mode = os.stat(path).st_mode
1294        if mode & stat.S_IWOTH or mode & stat.S_IWGRP:
1295            msg = ("%s is writable by group/others and vulnerable to attack "
1296                "when "
1297                "used with get_resource_filename. Consider a more secure "
1298                "location (set with .set_extraction_path or the "
1299                "PYTHON_EGG_CACHE environment variable)." % path)
1300            warnings.warn(msg, UserWarning)
1301
1302    def postprocess(self, tempname, filename):
1303        """Perform any platform-specific postprocessing of `tempname`
1304
1305        This is where Mac header rewrites should be done; other platforms don't
1306        have anything special they should do.
1307
1308        Resource providers should call this method ONLY after successfully
1309        extracting a compressed resource.  They must NOT call it on resources
1310        that are already in the filesystem.
1311
1312        `tempname` is the current (temporary) name of the file, and `filename`
1313        is the name it will be renamed to by the caller after this routine
1314        returns.
1315        """
1316
1317        if os.name == 'posix':
1318            # Make the resource executable
1319            mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777
1320            os.chmod(tempname, mode)
1321
1322    def set_extraction_path(self, path):
1323        """Set the base path where resources will be extracted to, if needed.
1324
1325        If you do not call this routine before any extractions take place, the
1326        path defaults to the return value of ``get_default_cache()``.  (Which
1327        is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
1328        platform-specific fallbacks.  See that routine's documentation for more
1329        details.)
1330
1331        Resources are extracted to subdirectories of this path based upon
1332        information given by the ``IResourceProvider``.  You may set this to a
1333        temporary directory, but then you must call ``cleanup_resources()`` to
1334        delete the extracted files when done.  There is no guarantee that
1335        ``cleanup_resources()`` will be able to remove all extracted files.
1336
1337        (Note: you may not change the extraction path for a given resource
1338        manager once resources have been extracted, unless you first call
1339        ``cleanup_resources()``.)
1340        """
1341        if self.cached_files:
1342            raise ValueError(
1343                "Can't change extraction path, files already extracted"
1344            )
1345
1346        self.extraction_path = path
1347
1348    def cleanup_resources(self, force=False):
1349        """
1350        Delete all extracted resource files and directories, returning a list
1351        of the file and directory names that could not be successfully removed.
1352        This function does not have any concurrency protection, so it should
1353        generally only be called when the extraction path is a temporary
1354        directory exclusive to a single process.  This method is not
1355        automatically called; you must call it explicitly or register it as an
1356        ``atexit`` function if you wish to ensure cleanup of a temporary
1357        directory used for extractions.
1358        """
1359        # XXX
1360
1361
1362def get_default_cache():
1363    """
1364    Return the ``PYTHON_EGG_CACHE`` environment variable
1365    or a platform-relevant user cache dir for an app
1366    named "Python-Eggs".
1367    """
1368    return (
1369        os.environ.get('PYTHON_EGG_CACHE')
1370        or appdirs.user_cache_dir(appname='Python-Eggs')
1371    )
1372
1373
1374def safe_name(name):
1375    """Convert an arbitrary string to a standard distribution name
1376
1377    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
1378    """
1379    return re.sub('[^A-Za-z0-9.]+', '-', name)
1380
1381
1382def safe_version(version):
1383    """
1384    Convert an arbitrary string to a standard version string
1385    """
1386    try:
1387        # normalize the version
1388        return str(packaging.version.Version(version))
1389    except packaging.version.InvalidVersion:
1390        version = version.replace(' ', '.')
1391        return re.sub('[^A-Za-z0-9.]+', '-', version)
1392
1393
1394def safe_extra(extra):
1395    """Convert an arbitrary string to a standard 'extra' name
1396
1397    Any runs of non-alphanumeric characters are replaced with a single '_',
1398    and the result is always lowercased.
1399    """
1400    return re.sub('[^A-Za-z0-9.-]+', '_', extra).lower()
1401
1402
1403def to_filename(name):
1404    """Convert a project or version name to its filename-escaped form
1405
1406    Any '-' characters are currently replaced with '_'.
1407    """
1408    return name.replace('-', '_')
1409
1410
1411def invalid_marker(text):
1412    """
1413    Validate text as a PEP 508 environment marker; return an exception
1414    if invalid or False otherwise.
1415    """
1416    try:
1417        evaluate_marker(text)
1418    except SyntaxError as e:
1419        e.filename = None
1420        e.lineno = None
1421        return e
1422    return False
1423
1424
1425def evaluate_marker(text, extra=None):
1426    """
1427    Evaluate a PEP 508 environment marker.
1428    Return a boolean indicating the marker result in this environment.
1429    Raise SyntaxError if marker is invalid.
1430
1431    This implementation uses the 'pyparsing' module.
1432    """
1433    try:
1434        marker = packaging.markers.Marker(text)
1435        return marker.evaluate()
1436    except packaging.markers.InvalidMarker as e:
1437        raise SyntaxError(e)
1438
1439
1440class NullProvider:
1441    """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
1442
1443    egg_name = None
1444    egg_info = None
1445    loader = None
1446
1447    def __init__(self, module):
1448        self.loader = getattr(module, '__loader__', None)
1449        self.module_path = os.path.dirname(getattr(module, '__file__', ''))
1450
1451    def get_resource_filename(self, manager, resource_name):
1452        return self._fn(self.module_path, resource_name)
1453
1454    def get_resource_stream(self, manager, resource_name):
1455        return io.BytesIO(self.get_resource_string(manager, resource_name))
1456
1457    def get_resource_string(self, manager, resource_name):
1458        return self._get(self._fn(self.module_path, resource_name))
1459
1460    def has_resource(self, resource_name):
1461        return self._has(self._fn(self.module_path, resource_name))
1462
1463    def has_metadata(self, name):
1464        return self.egg_info and self._has(self._fn(self.egg_info, name))
1465
1466    def get_metadata(self, name):
1467        if not self.egg_info:
1468            return ""
1469        value = self._get(self._fn(self.egg_info, name))
1470        return value.decode('utf-8') if six.PY3 else value
1471
1472    def get_metadata_lines(self, name):
1473        return yield_lines(self.get_metadata(name))
1474
1475    def resource_isdir(self, resource_name):
1476        return self._isdir(self._fn(self.module_path, resource_name))
1477
1478    def metadata_isdir(self, name):
1479        return self.egg_info and self._isdir(self._fn(self.egg_info, name))
1480
1481    def resource_listdir(self, resource_name):
1482        return self._listdir(self._fn(self.module_path, resource_name))
1483
1484    def metadata_listdir(self, name):
1485        if self.egg_info:
1486            return self._listdir(self._fn(self.egg_info, name))
1487        return []
1488
1489    def run_script(self, script_name, namespace):
1490        script = 'scripts/' + script_name
1491        if not self.has_metadata(script):
1492            raise ResolutionError("No script named %r" % script_name)
1493        script_text = self.get_metadata(script).replace('\r\n', '\n')
1494        script_text = script_text.replace('\r', '\n')
1495        script_filename = self._fn(self.egg_info, script)
1496        namespace['__file__'] = script_filename
1497        if os.path.exists(script_filename):
1498            source = open(script_filename).read()
1499            code = compile(source, script_filename, 'exec')
1500            exec(code, namespace, namespace)
1501        else:
1502            from linecache import cache
1503            cache[script_filename] = (
1504                len(script_text), 0, script_text.split('\n'), script_filename
1505            )
1506            script_code = compile(script_text, script_filename, 'exec')
1507            exec(script_code, namespace, namespace)
1508
1509    def _has(self, path):
1510        raise NotImplementedError(
1511            "Can't perform this operation for unregistered loader type"
1512        )
1513
1514    def _isdir(self, path):
1515        raise NotImplementedError(
1516            "Can't perform this operation for unregistered loader type"
1517        )
1518
1519    def _listdir(self, path):
1520        raise NotImplementedError(
1521            "Can't perform this operation for unregistered loader type"
1522        )
1523
1524    def _fn(self, base, resource_name):
1525        if resource_name:
1526            return os.path.join(base, *resource_name.split('/'))
1527        return base
1528
1529    def _get(self, path):
1530        if hasattr(self.loader, 'get_data'):
1531            return self.loader.get_data(path)
1532        raise NotImplementedError(
1533            "Can't perform this operation for loaders without 'get_data()'"
1534        )
1535
1536
1537register_loader_type(object, NullProvider)
1538
1539
1540class EggProvider(NullProvider):
1541    """Provider based on a virtual filesystem"""
1542
1543    def __init__(self, module):
1544        NullProvider.__init__(self, module)
1545        self._setup_prefix()
1546
1547    def _setup_prefix(self):
1548        # we assume here that our metadata may be nested inside a "basket"
1549        # of multiple eggs; that's why we use module_path instead of .archive
1550        path = self.module_path
1551        old = None
1552        while path != old:
1553            if _is_unpacked_egg(path):
1554                self.egg_name = os.path.basename(path)
1555                self.egg_info = os.path.join(path, 'EGG-INFO')
1556                self.egg_root = path
1557                break
1558            old = path
1559            path, base = os.path.split(path)
1560
1561
1562class DefaultProvider(EggProvider):
1563    """Provides access to package resources in the filesystem"""
1564
1565    def _has(self, path):
1566        return os.path.exists(path)
1567
1568    def _isdir(self, path):
1569        return os.path.isdir(path)
1570
1571    def _listdir(self, path):
1572        return os.listdir(path)
1573
1574    def get_resource_stream(self, manager, resource_name):
1575        return open(self._fn(self.module_path, resource_name), 'rb')
1576
1577    def _get(self, path):
1578        with open(path, 'rb') as stream:
1579            return stream.read()
1580
1581    @classmethod
1582    def _register(cls):
1583        loader_cls = getattr(importlib_machinery, 'SourceFileLoader',
1584            type(None))
1585        register_loader_type(loader_cls, cls)
1586
1587
1588DefaultProvider._register()
1589
1590
1591class EmptyProvider(NullProvider):
1592    """Provider that returns nothing for all requests"""
1593
1594    _isdir = _has = lambda self, path: False
1595    _get = lambda self, path: ''
1596    _listdir = lambda self, path: []
1597    module_path = None
1598
1599    def __init__(self):
1600        pass
1601
1602
1603empty_provider = EmptyProvider()
1604
1605
1606class ZipManifests(dict):
1607    """
1608    zip manifest builder
1609    """
1610
1611    @classmethod
1612    def build(cls, path):
1613        """
1614        Build a dictionary similar to the zipimport directory
1615        caches, except instead of tuples, store ZipInfo objects.
1616
1617        Use a platform-specific path separator (os.sep) for the path keys
1618        for compatibility with pypy on Windows.
1619        """
1620        with ContextualZipFile(path) as zfile:
1621            items = (
1622                (
1623                    name.replace('/', os.sep),
1624                    zfile.getinfo(name),
1625                )
1626                for name in zfile.namelist()
1627            )
1628            return dict(items)
1629
1630    load = build
1631
1632
1633class MemoizedZipManifests(ZipManifests):
1634    """
1635    Memoized zipfile manifests.
1636    """
1637    manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime')
1638
1639    def load(self, path):
1640        """
1641        Load a manifest at path or return a suitable manifest already loaded.
1642        """
1643        path = os.path.normpath(path)
1644        mtime = os.stat(path).st_mtime
1645
1646        if path not in self or self[path].mtime != mtime:
1647            manifest = self.build(path)
1648            self[path] = self.manifest_mod(manifest, mtime)
1649
1650        return self[path].manifest
1651
1652
1653class ContextualZipFile(zipfile.ZipFile):
1654    """
1655    Supplement ZipFile class to support context manager for Python 2.6
1656    """
1657
1658    def __enter__(self):
1659        return self
1660
1661    def __exit__(self, type, value, traceback):
1662        self.close()
1663
1664    def __new__(cls, *args, **kwargs):
1665        """
1666        Construct a ZipFile or ContextualZipFile as appropriate
1667        """
1668        if hasattr(zipfile.ZipFile, '__exit__'):
1669            return zipfile.ZipFile(*args, **kwargs)
1670        return super(ContextualZipFile, cls).__new__(cls)
1671
1672
1673class ZipProvider(EggProvider):
1674    """Resource support for zips and eggs"""
1675
1676    eagers = None
1677    _zip_manifests = MemoizedZipManifests()
1678
1679    def __init__(self, module):
1680        EggProvider.__init__(self, module)
1681        self.zip_pre = self.loader.archive + os.sep
1682
1683    def _zipinfo_name(self, fspath):
1684        # Convert a virtual filename (full path to file) into a zipfile subpath
1685        # usable with the zipimport directory cache for our target archive
1686        if fspath.startswith(self.zip_pre):
1687            return fspath[len(self.zip_pre):]
1688        raise AssertionError(
1689            "%s is not a subpath of %s" % (fspath, self.zip_pre)
1690        )
1691
1692    def _parts(self, zip_path):
1693        # Convert a zipfile subpath into an egg-relative path part list.
1694        # pseudo-fs path
1695        fspath = self.zip_pre + zip_path
1696        if fspath.startswith(self.egg_root + os.sep):
1697            return fspath[len(self.egg_root) + 1:].split(os.sep)
1698        raise AssertionError(
1699            "%s is not a subpath of %s" % (fspath, self.egg_root)
1700        )
1701
1702    @property
1703    def zipinfo(self):
1704        return self._zip_manifests.load(self.loader.archive)
1705
1706    def get_resource_filename(self, manager, resource_name):
1707        if not self.egg_name:
1708            raise NotImplementedError(
1709                "resource_filename() only supported for .egg, not .zip"
1710            )
1711        # no need to lock for extraction, since we use temp names
1712        zip_path = self._resource_to_zip(resource_name)
1713        eagers = self._get_eager_resources()
1714        if '/'.join(self._parts(zip_path)) in eagers:
1715            for name in eagers:
1716                self._extract_resource(manager, self._eager_to_zip(name))
1717        return self._extract_resource(manager, zip_path)
1718
1719    @staticmethod
1720    def _get_date_and_size(zip_stat):
1721        size = zip_stat.file_size
1722        # ymdhms+wday, yday, dst
1723        date_time = zip_stat.date_time + (0, 0, -1)
1724        # 1980 offset already done
1725        timestamp = time.mktime(date_time)
1726        return timestamp, size
1727
1728    def _extract_resource(self, manager, zip_path):
1729
1730        if zip_path in self._index():
1731            for name in self._index()[zip_path]:
1732                last = self._extract_resource(
1733                    manager, os.path.join(zip_path, name)
1734                )
1735            # return the extracted directory name
1736            return os.path.dirname(last)
1737
1738        timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
1739
1740        if not WRITE_SUPPORT:
1741            raise IOError('"os.rename" and "os.unlink" are not supported '
1742                          'on this platform')
1743        try:
1744
1745            real_path = manager.get_cache_path(
1746                self.egg_name, self._parts(zip_path)
1747            )
1748
1749            if self._is_current(real_path, zip_path):
1750                return real_path
1751
1752            outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
1753            os.write(outf, self.loader.get_data(zip_path))
1754            os.close(outf)
1755            utime(tmpnam, (timestamp, timestamp))
1756            manager.postprocess(tmpnam, real_path)
1757
1758            try:
1759                rename(tmpnam, real_path)
1760
1761            except os.error:
1762                if os.path.isfile(real_path):
1763                    if self._is_current(real_path, zip_path):
1764                        # the file became current since it was checked above,
1765                        #  so proceed.
1766                        return real_path
1767                    # Windows, del old file and retry
1768                    elif os.name == 'nt':
1769                        unlink(real_path)
1770                        rename(tmpnam, real_path)
1771                        return real_path
1772                raise
1773
1774        except os.error:
1775            # report a user-friendly error
1776            manager.extraction_error()
1777
1778        return real_path
1779
1780    def _is_current(self, file_path, zip_path):
1781        """
1782        Return True if the file_path is current for this zip_path
1783        """
1784        timestamp, size = self._get_date_and_size(self.zipinfo[zip_path])
1785        if not os.path.isfile(file_path):
1786            return False
1787        stat = os.stat(file_path)
1788        if stat.st_size != size or stat.st_mtime != timestamp:
1789            return False
1790        # check that the contents match
1791        zip_contents = self.loader.get_data(zip_path)
1792        with open(file_path, 'rb') as f:
1793            file_contents = f.read()
1794        return zip_contents == file_contents
1795
1796    def _get_eager_resources(self):
1797        if self.eagers is None:
1798            eagers = []
1799            for name in ('native_libs.txt', 'eager_resources.txt'):
1800                if self.has_metadata(name):
1801                    eagers.extend(self.get_metadata_lines(name))
1802            self.eagers = eagers
1803        return self.eagers
1804
1805    def _index(self):
1806        try:
1807            return self._dirindex
1808        except AttributeError:
1809            ind = {}
1810            for path in self.zipinfo:
1811                parts = path.split(os.sep)
1812                while parts:
1813                    parent = os.sep.join(parts[:-1])
1814                    if parent in ind:
1815                        ind[parent].append(parts[-1])
1816                        break
1817                    else:
1818                        ind[parent] = [parts.pop()]
1819            self._dirindex = ind
1820            return ind
1821
1822    def _has(self, fspath):
1823        zip_path = self._zipinfo_name(fspath)
1824        return zip_path in self.zipinfo or zip_path in self._index()
1825
1826    def _isdir(self, fspath):
1827        return self._zipinfo_name(fspath) in self._index()
1828
1829    def _listdir(self, fspath):
1830        return list(self._index().get(self._zipinfo_name(fspath), ()))
1831
1832    def _eager_to_zip(self, resource_name):
1833        return self._zipinfo_name(self._fn(self.egg_root, resource_name))
1834
1835    def _resource_to_zip(self, resource_name):
1836        return self._zipinfo_name(self._fn(self.module_path, resource_name))
1837
1838
1839register_loader_type(zipimport.zipimporter, ZipProvider)
1840
1841
1842class FileMetadata(EmptyProvider):
1843    """Metadata handler for standalone PKG-INFO files
1844
1845    Usage::
1846
1847        metadata = FileMetadata("/path/to/PKG-INFO")
1848
1849    This provider rejects all data and metadata requests except for PKG-INFO,
1850    which is treated as existing, and will be the contents of the file at
1851    the provided location.
1852    """
1853
1854    def __init__(self, path):
1855        self.path = path
1856
1857    def has_metadata(self, name):
1858        return name == 'PKG-INFO' and os.path.isfile(self.path)
1859
1860    def get_metadata(self, name):
1861        if name != 'PKG-INFO':
1862            raise KeyError("No metadata except PKG-INFO is available")
1863
1864        with io.open(self.path, encoding='utf-8', errors="replace") as f:
1865            metadata = f.read()
1866        self._warn_on_replacement(metadata)
1867        return metadata
1868
1869    def _warn_on_replacement(self, metadata):
1870        # Python 2.6 and 3.2 compat for: replacement_char = '�'
1871        replacement_char = b'\xef\xbf\xbd'.decode('utf-8')
1872        if replacement_char in metadata:
1873            tmpl = "{self.path} could not be properly decoded in UTF-8"
1874            msg = tmpl.format(**locals())
1875            warnings.warn(msg)
1876
1877    def get_metadata_lines(self, name):
1878        return yield_lines(self.get_metadata(name))
1879
1880
1881class PathMetadata(DefaultProvider):
1882    """Metadata provider for egg directories
1883
1884    Usage::
1885
1886        # Development eggs:
1887
1888        egg_info = "/path/to/PackageName.egg-info"
1889        base_dir = os.path.dirname(egg_info)
1890        metadata = PathMetadata(base_dir, egg_info)
1891        dist_name = os.path.splitext(os.path.basename(egg_info))[0]
1892        dist = Distribution(basedir, project_name=dist_name, metadata=metadata)
1893
1894        # Unpacked egg directories:
1895
1896        egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
1897        metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
1898        dist = Distribution.from_filename(egg_path, metadata=metadata)
1899    """
1900
1901    def __init__(self, path, egg_info):
1902        self.module_path = path
1903        self.egg_info = egg_info
1904
1905
1906class EggMetadata(ZipProvider):
1907    """Metadata provider for .egg files"""
1908
1909    def __init__(self, importer):
1910        """Create a metadata provider from a zipimporter"""
1911
1912        self.zip_pre = importer.archive + os.sep
1913        self.loader = importer
1914        if importer.prefix:
1915            self.module_path = os.path.join(importer.archive, importer.prefix)
1916        else:
1917            self.module_path = importer.archive
1918        self._setup_prefix()
1919
1920
1921_declare_state('dict', _distribution_finders={})
1922
1923
1924def register_finder(importer_type, distribution_finder):
1925    """Register `distribution_finder` to find distributions in sys.path items
1926
1927    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
1928    handler), and `distribution_finder` is a callable that, passed a path
1929    item and the importer instance, yields ``Distribution`` instances found on
1930    that path item.  See ``pkg_resources.find_on_path`` for an example."""
1931    _distribution_finders[importer_type] = distribution_finder
1932
1933
1934def find_distributions(path_item, only=False):
1935    """Yield distributions accessible via `path_item`"""
1936    importer = get_importer(path_item)
1937    finder = _find_adapter(_distribution_finders, importer)
1938    return finder(importer, path_item, only)
1939
1940
1941def find_eggs_in_zip(importer, path_item, only=False):
1942    """
1943    Find eggs in zip files; possibly multiple nested eggs.
1944    """
1945    if importer.archive.endswith('.whl'):
1946        # wheels are not supported with this finder
1947        # they don't have PKG-INFO metadata, and won't ever contain eggs
1948        return
1949    metadata = EggMetadata(importer)
1950    if metadata.has_metadata('PKG-INFO'):
1951        yield Distribution.from_filename(path_item, metadata=metadata)
1952    if only:
1953        # don't yield nested distros
1954        return
1955    for subitem in metadata.resource_listdir('/'):
1956        if _is_unpacked_egg(subitem):
1957            subpath = os.path.join(path_item, subitem)
1958            for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath):
1959                yield dist
1960        elif subitem.lower().endswith('.dist-info'):
1961            subpath = os.path.join(path_item, subitem)
1962            submeta = EggMetadata(zipimport.zipimporter(subpath))
1963            submeta.egg_info = subpath
1964            yield Distribution.from_location(path_item, subitem, submeta)
1965
1966
1967
1968register_finder(zipimport.zipimporter, find_eggs_in_zip)
1969
1970
1971def find_nothing(importer, path_item, only=False):
1972    return ()
1973
1974
1975register_finder(object, find_nothing)
1976
1977
1978def _by_version_descending(names):
1979    """
1980    Given a list of filenames, return them in descending order
1981    by version number.
1982
1983    >>> names = 'bar', 'foo', 'Python-2.7.10.egg', 'Python-2.7.2.egg'
1984    >>> _by_version_descending(names)
1985    ['Python-2.7.10.egg', 'Python-2.7.2.egg', 'foo', 'bar']
1986    >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.egg'
1987    >>> _by_version_descending(names)
1988    ['Setuptools-1.2.3.egg', 'Setuptools-1.2.3b1.egg']
1989    >>> names = 'Setuptools-1.2.3b1.egg', 'Setuptools-1.2.3.post1.egg'
1990    >>> _by_version_descending(names)
1991    ['Setuptools-1.2.3.post1.egg', 'Setuptools-1.2.3b1.egg']
1992    """
1993    def _by_version(name):
1994        """
1995        Parse each component of the filename
1996        """
1997        name, ext = os.path.splitext(name)
1998        parts = itertools.chain(name.split('-'), [ext])
1999        return [packaging.version.parse(part) for part in parts]
2000
2001    return sorted(names, key=_by_version, reverse=True)
2002
2003
2004def find_on_path(importer, path_item, only=False):
2005    """Yield distributions accessible on a sys.path directory"""
2006    path_item = _normalize_cached(path_item)
2007
2008    if os.path.isdir(path_item) and os.access(path_item, os.R_OK):
2009        if _is_unpacked_egg(path_item):
2010            yield Distribution.from_filename(
2011                path_item, metadata=PathMetadata(
2012                    path_item, os.path.join(path_item, 'EGG-INFO')
2013                )
2014            )
2015        else:
2016            # scan for .egg and .egg-info in directory
2017            path_item_entries = _by_version_descending(os.listdir(path_item))
2018            for entry in path_item_entries:
2019                lower = entry.lower()
2020                if lower.endswith('.egg-info') or lower.endswith('.dist-info'):
2021                    fullpath = os.path.join(path_item, entry)
2022                    if os.path.isdir(fullpath):
2023                        # egg-info directory, allow getting metadata
2024                        if len(os.listdir(fullpath)) == 0:
2025                            # Empty egg directory, skip.
2026                            continue
2027                        metadata = PathMetadata(path_item, fullpath)
2028                    else:
2029                        metadata = FileMetadata(fullpath)
2030                    yield Distribution.from_location(
2031                        path_item, entry, metadata, precedence=DEVELOP_DIST
2032                    )
2033                elif not only and _is_unpacked_egg(entry):
2034                    dists = find_distributions(os.path.join(path_item, entry))
2035                    for dist in dists:
2036                        yield dist
2037                elif not only and lower.endswith('.egg-link'):
2038                    with open(os.path.join(path_item, entry)) as entry_file:
2039                        entry_lines = entry_file.readlines()
2040                    for line in entry_lines:
2041                        if not line.strip():
2042                            continue
2043                        path = os.path.join(path_item, line.rstrip())
2044                        dists = find_distributions(path)
2045                        for item in dists:
2046                            yield item
2047                        break
2048
2049
2050register_finder(pkgutil.ImpImporter, find_on_path)
2051
2052if hasattr(importlib_machinery, 'FileFinder'):
2053    register_finder(importlib_machinery.FileFinder, find_on_path)
2054
2055_declare_state('dict', _namespace_handlers={})
2056_declare_state('dict', _namespace_packages={})
2057
2058
2059def register_namespace_handler(importer_type, namespace_handler):
2060    """Register `namespace_handler` to declare namespace packages
2061
2062    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
2063    handler), and `namespace_handler` is a callable like this::
2064
2065        def namespace_handler(importer, path_entry, moduleName, module):
2066            # return a path_entry to use for child packages
2067
2068    Namespace handlers are only called if the importer object has already
2069    agreed that it can handle the relevant path item, and they should only
2070    return a subpath if the module __path__ does not already contain an
2071    equivalent subpath.  For an example namespace handler, see
2072    ``pkg_resources.file_ns_handler``.
2073    """
2074    _namespace_handlers[importer_type] = namespace_handler
2075
2076
2077def _handle_ns(packageName, path_item):
2078    """Ensure that named package includes a subpath of path_item (if needed)"""
2079
2080    importer = get_importer(path_item)
2081    if importer is None:
2082        return None
2083    loader = importer.find_module(packageName)
2084    if loader is None:
2085        return None
2086    module = sys.modules.get(packageName)
2087    if module is None:
2088        module = sys.modules[packageName] = types.ModuleType(packageName)
2089        module.__path__ = []
2090        _set_parent_ns(packageName)
2091    elif not hasattr(module, '__path__'):
2092        raise TypeError("Not a package:", packageName)
2093    handler = _find_adapter(_namespace_handlers, importer)
2094    subpath = handler(importer, path_item, packageName, module)
2095    if subpath is not None:
2096        path = module.__path__
2097        path.append(subpath)
2098        loader.load_module(packageName)
2099        _rebuild_mod_path(path, packageName, module)
2100    return subpath
2101
2102
2103def _rebuild_mod_path(orig_path, package_name, module):
2104    """
2105    Rebuild module.__path__ ensuring that all entries are ordered
2106    corresponding to their sys.path order
2107    """
2108    sys_path = [_normalize_cached(p) for p in sys.path]
2109
2110    def safe_sys_path_index(entry):
2111        """
2112        Workaround for #520 and #513.
2113        """
2114        try:
2115            return sys_path.index(entry)
2116        except ValueError:
2117            return float('inf')
2118
2119    def position_in_sys_path(path):
2120        """
2121        Return the ordinal of the path based on its position in sys.path
2122        """
2123        path_parts = path.split(os.sep)
2124        module_parts = package_name.count('.') + 1
2125        parts = path_parts[:-module_parts]
2126        return safe_sys_path_index(_normalize_cached(os.sep.join(parts)))
2127
2128    if not isinstance(orig_path, list):
2129        # Is this behavior useful when module.__path__ is not a list?
2130        return
2131
2132    orig_path.sort(key=position_in_sys_path)
2133    module.__path__[:] = [_normalize_cached(p) for p in orig_path]
2134
2135
2136def declare_namespace(packageName):
2137    """Declare that package 'packageName' is a namespace package"""
2138
2139    _imp.acquire_lock()
2140    try:
2141        if packageName in _namespace_packages:
2142            return
2143
2144        path, parent = sys.path, None
2145        if '.' in packageName:
2146            parent = '.'.join(packageName.split('.')[:-1])
2147            declare_namespace(parent)
2148            if parent not in _namespace_packages:
2149                __import__(parent)
2150            try:
2151                path = sys.modules[parent].__path__
2152            except AttributeError:
2153                raise TypeError("Not a package:", parent)
2154
2155        # Track what packages are namespaces, so when new path items are added,
2156        # they can be updated
2157        _namespace_packages.setdefault(parent, []).append(packageName)
2158        _namespace_packages.setdefault(packageName, [])
2159
2160        for path_item in path:
2161            # Ensure all the parent's path items are reflected in the child,
2162            # if they apply
2163            _handle_ns(packageName, path_item)
2164
2165    finally:
2166        _imp.release_lock()
2167
2168
2169def fixup_namespace_packages(path_item, parent=None):
2170    """Ensure that previously-declared namespace packages include path_item"""
2171    _imp.acquire_lock()
2172    try:
2173        for package in _namespace_packages.get(parent, ()):
2174            subpath = _handle_ns(package, path_item)
2175            if subpath:
2176                fixup_namespace_packages(subpath, package)
2177    finally:
2178        _imp.release_lock()
2179
2180
2181def file_ns_handler(importer, path_item, packageName, module):
2182    """Compute an ns-package subpath for a filesystem or zipfile importer"""
2183
2184    subpath = os.path.join(path_item, packageName.split('.')[-1])
2185    normalized = _normalize_cached(subpath)
2186    for item in module.__path__:
2187        if _normalize_cached(item) == normalized:
2188            break
2189    else:
2190        # Only return the path if it's not already there
2191        return subpath
2192
2193
2194register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
2195register_namespace_handler(zipimport.zipimporter, file_ns_handler)
2196
2197if hasattr(importlib_machinery, 'FileFinder'):
2198    register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler)
2199
2200
2201def null_ns_handler(importer, path_item, packageName, module):
2202    return None
2203
2204
2205register_namespace_handler(object, null_ns_handler)
2206
2207
2208def normalize_path(filename):
2209    """Normalize a file/dir name for comparison purposes"""
2210    return os.path.normcase(os.path.realpath(filename))
2211
2212
2213def _normalize_cached(filename, _cache={}):
2214    try:
2215        return _cache[filename]
2216    except KeyError:
2217        _cache[filename] = result = normalize_path(filename)
2218        return result
2219
2220
2221def _is_unpacked_egg(path):
2222    """
2223    Determine if given path appears to be an unpacked egg.
2224    """
2225    return (
2226        path.lower().endswith('.egg')
2227    )
2228
2229
2230def _set_parent_ns(packageName):
2231    parts = packageName.split('.')
2232    name = parts.pop()
2233    if parts:
2234        parent = '.'.join(parts)
2235        setattr(sys.modules[parent], name, sys.modules[packageName])
2236
2237
2238def yield_lines(strs):
2239    """Yield non-empty/non-comment lines of a string or sequence"""
2240    if isinstance(strs, six.string_types):
2241        for s in strs.splitlines():
2242            s = s.strip()
2243            # skip blank lines/comments
2244            if s and not s.startswith('#'):
2245                yield s
2246    else:
2247        for ss in strs:
2248            for s in yield_lines(ss):
2249                yield s
2250
2251
2252MODULE = re.compile(r"\w+(\.\w+)*$").match
2253EGG_NAME = re.compile(
2254    r"""
2255    (?P<name>[^-]+) (
2256        -(?P<ver>[^-]+) (
2257            -py(?P<pyver>[^-]+) (
2258                -(?P<plat>.+)
2259            )?
2260        )?
2261    )?
2262    """,
2263    re.VERBOSE | re.IGNORECASE,
2264).match
2265
2266
2267class EntryPoint(object):
2268    """Object representing an advertised importable object"""
2269
2270    def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
2271        if not MODULE(module_name):
2272            raise ValueError("Invalid module name", module_name)
2273        self.name = name
2274        self.module_name = module_name
2275        self.attrs = tuple(attrs)
2276        self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras
2277        self.dist = dist
2278
2279    def __str__(self):
2280        s = "%s = %s" % (self.name, self.module_name)
2281        if self.attrs:
2282            s += ':' + '.'.join(self.attrs)
2283        if self.extras:
2284            s += ' [%s]' % ','.join(self.extras)
2285        return s
2286
2287    def __repr__(self):
2288        return "EntryPoint.parse(%r)" % str(self)
2289
2290    def load(self, require=True, *args, **kwargs):
2291        """
2292        Require packages for this EntryPoint, then resolve it.
2293        """
2294        if not require or args or kwargs:
2295            warnings.warn(
2296                "Parameters to load are deprecated.  Call .resolve and "
2297                ".require separately.",
2298                DeprecationWarning,
2299                stacklevel=2,
2300            )
2301        if require:
2302            self.require(*args, **kwargs)
2303        return self.resolve()
2304
2305    def resolve(self):
2306        """
2307        Resolve the entry point from its module and attrs.
2308        """
2309        module = __import__(self.module_name, fromlist=['__name__'], level=0)
2310        try:
2311            return functools.reduce(getattr, self.attrs, module)
2312        except AttributeError as exc:
2313            raise ImportError(str(exc))
2314
2315    def require(self, env=None, installer=None):
2316        if self.extras and not self.dist:
2317            raise UnknownExtra("Can't require() without a distribution", self)
2318
2319        # Get the requirements for this entry point with all its extras and
2320        # then resolve them. We have to pass `extras` along when resolving so
2321        # that the working set knows what extras we want. Otherwise, for
2322        # dist-info distributions, the working set will assume that the
2323        # requirements for that extra are purely optional and skip over them.
2324        reqs = self.dist.requires(self.extras)
2325        items = working_set.resolve(reqs, env, installer, extras=self.extras)
2326        list(map(working_set.add, items))
2327
2328    pattern = re.compile(
2329        r'\s*'
2330        r'(?P<name>.+?)\s*'
2331        r'=\s*'
2332        r'(?P<module>[\w.]+)\s*'
2333        r'(:\s*(?P<attr>[\w.]+))?\s*'
2334        r'(?P<extras>\[.*\])?\s*$'
2335    )
2336
2337    @classmethod
2338    def parse(cls, src, dist=None):
2339        """Parse a single entry point from string `src`
2340
2341        Entry point syntax follows the form::
2342
2343            name = some.module:some.attr [extra1, extra2]
2344
2345        The entry name and module name are required, but the ``:attrs`` and
2346        ``[extras]`` parts are optional
2347        """
2348        m = cls.pattern.match(src)
2349        if not m:
2350            msg = "EntryPoint must be in 'name=module:attrs [extras]' format"
2351            raise ValueError(msg, src)
2352        res = m.groupdict()
2353        extras = cls._parse_extras(res['extras'])
2354        attrs = res['attr'].split('.') if res['attr'] else ()
2355        return cls(res['name'], res['module'], attrs, extras, dist)
2356
2357    @classmethod
2358    def _parse_extras(cls, extras_spec):
2359        if not extras_spec:
2360            return ()
2361        req = Requirement.parse('x' + extras_spec)
2362        if req.specs:
2363            raise ValueError()
2364        return req.extras
2365
2366    @classmethod
2367    def parse_group(cls, group, lines, dist=None):
2368        """Parse an entry point group"""
2369        if not MODULE(group):
2370            raise ValueError("Invalid group name", group)
2371        this = {}
2372        for line in yield_lines(lines):
2373            ep = cls.parse(line, dist)
2374            if ep.name in this:
2375                raise ValueError("Duplicate entry point", group, ep.name)
2376            this[ep.name] = ep
2377        return this
2378
2379    @classmethod
2380    def parse_map(cls, data, dist=None):
2381        """Parse a map of entry point groups"""
2382        if isinstance(data, dict):
2383            data = data.items()
2384        else:
2385            data = split_sections(data)
2386        maps = {}
2387        for group, lines in data:
2388            if group is None:
2389                if not lines:
2390                    continue
2391                raise ValueError("Entry points must be listed in groups")
2392            group = group.strip()
2393            if group in maps:
2394                raise ValueError("Duplicate group name", group)
2395            maps[group] = cls.parse_group(group, lines, dist)
2396        return maps
2397
2398
2399def _remove_md5_fragment(location):
2400    if not location:
2401        return ''
2402    parsed = urllib.parse.urlparse(location)
2403    if parsed[-1].startswith('md5='):
2404        return urllib.parse.urlunparse(parsed[:-1] + ('',))
2405    return location
2406
2407
2408def _version_from_file(lines):
2409    """
2410    Given an iterable of lines from a Metadata file, return
2411    the value of the Version field, if present, or None otherwise.
2412    """
2413    is_version_line = lambda line: line.lower().startswith('version:')
2414    version_lines = filter(is_version_line, lines)
2415    line = next(iter(version_lines), '')
2416    _, _, value = line.partition(':')
2417    return safe_version(value.strip()) or None
2418
2419
2420class Distribution(object):
2421    """Wrap an actual or potential sys.path entry w/metadata"""
2422    PKG_INFO = 'PKG-INFO'
2423
2424    def __init__(self, location=None, metadata=None, project_name=None,
2425            version=None, py_version=PY_MAJOR, platform=None,
2426            precedence=EGG_DIST):
2427        self.project_name = safe_name(project_name or 'Unknown')
2428        if version is not None:
2429            self._version = safe_version(version)
2430        self.py_version = py_version
2431        self.platform = platform
2432        self.location = location
2433        self.precedence = precedence
2434        self._provider = metadata or empty_provider
2435
2436    @classmethod
2437    def from_location(cls, location, basename, metadata=None, **kw):
2438        project_name, version, py_version, platform = [None] * 4
2439        basename, ext = os.path.splitext(basename)
2440        if ext.lower() in _distributionImpl:
2441            cls = _distributionImpl[ext.lower()]
2442
2443            match = EGG_NAME(basename)
2444            if match:
2445                project_name, version, py_version, platform = match.group(
2446                    'name', 'ver', 'pyver', 'plat'
2447                )
2448        return cls(
2449            location, metadata, project_name=project_name, version=version,
2450            py_version=py_version, platform=platform, **kw
2451        )._reload_version()
2452
2453    def _reload_version(self):
2454        return self
2455
2456    @property
2457    def hashcmp(self):
2458        return (
2459            self.parsed_version,
2460            self.precedence,
2461            self.key,
2462            _remove_md5_fragment(self.location),
2463            self.py_version or '',
2464            self.platform or '',
2465        )
2466
2467    def __hash__(self):
2468        return hash(self.hashcmp)
2469
2470    def __lt__(self, other):
2471        return self.hashcmp < other.hashcmp
2472
2473    def __le__(self, other):
2474        return self.hashcmp <= other.hashcmp
2475
2476    def __gt__(self, other):
2477        return self.hashcmp > other.hashcmp
2478
2479    def __ge__(self, other):
2480        return self.hashcmp >= other.hashcmp
2481
2482    def __eq__(self, other):
2483        if not isinstance(other, self.__class__):
2484            # It's not a Distribution, so they are not equal
2485            return False
2486        return self.hashcmp == other.hashcmp
2487
2488    def __ne__(self, other):
2489        return not self == other
2490
2491    # These properties have to be lazy so that we don't have to load any
2492    # metadata until/unless it's actually needed.  (i.e., some distributions
2493    # may not know their name or version without loading PKG-INFO)
2494
2495    @property
2496    def key(self):
2497        try:
2498            return self._key
2499        except AttributeError:
2500            self._key = key = self.project_name.lower()
2501            return key
2502
2503    @property
2504    def parsed_version(self):
2505        if not hasattr(self, "_parsed_version"):
2506            self._parsed_version = parse_version(self.version)
2507
2508        return self._parsed_version
2509
2510    def _warn_legacy_version(self):
2511        LV = packaging.version.LegacyVersion
2512        is_legacy = isinstance(self._parsed_version, LV)
2513        if not is_legacy:
2514            return
2515
2516        # While an empty version is technically a legacy version and
2517        # is not a valid PEP 440 version, it's also unlikely to
2518        # actually come from someone and instead it is more likely that
2519        # it comes from setuptools attempting to parse a filename and
2520        # including it in the list. So for that we'll gate this warning
2521        # on if the version is anything at all or not.
2522        if not self.version:
2523            return
2524
2525        tmpl = textwrap.dedent("""
2526            '{project_name} ({version})' is being parsed as a legacy,
2527            non PEP 440,
2528            version. You may find odd behavior and sort order.
2529            In particular it will be sorted as less than 0.0. It
2530            is recommended to migrate to PEP 440 compatible
2531            versions.
2532            """).strip().replace('\n', ' ')
2533
2534        warnings.warn(tmpl.format(**vars(self)), PEP440Warning)
2535
2536    @property
2537    def version(self):
2538        try:
2539            return self._version
2540        except AttributeError:
2541            version = _version_from_file(self._get_metadata(self.PKG_INFO))
2542            if version is None:
2543                tmpl = "Missing 'Version:' header and/or %s file"
2544                raise ValueError(tmpl % self.PKG_INFO, self)
2545            return version
2546
2547    @property
2548    def _dep_map(self):
2549        try:
2550            return self.__dep_map
2551        except AttributeError:
2552            dm = self.__dep_map = {None: []}
2553            for name in 'requires.txt', 'depends.txt':
2554                for extra, reqs in split_sections(self._get_metadata(name)):
2555                    if extra:
2556                        if ':' in extra:
2557                            extra, marker = extra.split(':', 1)
2558                            if invalid_marker(marker):
2559                                # XXX warn
2560                                reqs = []
2561                            elif not evaluate_marker(marker):
2562                                reqs = []
2563                        extra = safe_extra(extra) or None
2564                    dm.setdefault(extra, []).extend(parse_requirements(reqs))
2565            return dm
2566
2567    def requires(self, extras=()):
2568        """List of Requirements needed for this distro if `extras` are used"""
2569        dm = self._dep_map
2570        deps = []
2571        deps.extend(dm.get(None, ()))
2572        for ext in extras:
2573            try:
2574                deps.extend(dm[safe_extra(ext)])
2575            except KeyError:
2576                raise UnknownExtra(
2577                    "%s has no such extra feature %r" % (self, ext)
2578                )
2579        return deps
2580
2581    def _get_metadata(self, name):
2582        if self.has_metadata(name):
2583            for line in self.get_metadata_lines(name):
2584                yield line
2585
2586    def activate(self, path=None, replace=False):
2587        """Ensure distribution is importable on `path` (default=sys.path)"""
2588        if path is None:
2589            path = sys.path
2590        self.insert_on(path, replace=replace)
2591        if path is sys.path:
2592            fixup_namespace_packages(self.location)
2593            for pkg in self._get_metadata('namespace_packages.txt'):
2594                if pkg in sys.modules:
2595                    declare_namespace(pkg)
2596
2597    def egg_name(self):
2598        """Return what this distribution's standard .egg filename should be"""
2599        filename = "%s-%s-py%s" % (
2600            to_filename(self.project_name), to_filename(self.version),
2601            self.py_version or PY_MAJOR
2602        )
2603
2604        if self.platform:
2605            filename += '-' + self.platform
2606        return filename
2607
2608    def __repr__(self):
2609        if self.location:
2610            return "%s (%s)" % (self, self.location)
2611        else:
2612            return str(self)
2613
2614    def __str__(self):
2615        try:
2616            version = getattr(self, 'version', None)
2617        except ValueError:
2618            version = None
2619        version = version or "[unknown version]"
2620        return "%s %s" % (self.project_name, version)
2621
2622    def __getattr__(self, attr):
2623        """Delegate all unrecognized public attributes to .metadata provider"""
2624        if attr.startswith('_'):
2625            raise AttributeError(attr)
2626        return getattr(self._provider, attr)
2627
2628    @classmethod
2629    def from_filename(cls, filename, metadata=None, **kw):
2630        return cls.from_location(
2631            _normalize_cached(filename), os.path.basename(filename), metadata,
2632            **kw
2633        )
2634
2635    def as_requirement(self):
2636        """Return a ``Requirement`` that matches this distribution exactly"""
2637        if isinstance(self.parsed_version, packaging.version.Version):
2638            spec = "%s==%s" % (self.project_name, self.parsed_version)
2639        else:
2640            spec = "%s===%s" % (self.project_name, self.parsed_version)
2641
2642        return Requirement.parse(spec)
2643
2644    def load_entry_point(self, group, name):
2645        """Return the `name` entry point of `group` or raise ImportError"""
2646        ep = self.get_entry_info(group, name)
2647        if ep is None:
2648            raise ImportError("Entry point %r not found" % ((group, name),))
2649        return ep.load()
2650
2651    def get_entry_map(self, group=None):
2652        """Return the entry point map for `group`, or the full entry map"""
2653        try:
2654            ep_map = self._ep_map
2655        except AttributeError:
2656            ep_map = self._ep_map = EntryPoint.parse_map(
2657                self._get_metadata('entry_points.txt'), self
2658            )
2659        if group is not None:
2660            return ep_map.get(group, {})
2661        return ep_map
2662
2663    def get_entry_info(self, group, name):
2664        """Return the EntryPoint object for `group`+`name`, or ``None``"""
2665        return self.get_entry_map(group).get(name)
2666
2667    def insert_on(self, path, loc=None, replace=False):
2668        """Ensure self.location is on path
2669
2670        If replace=False (default):
2671            - If location is already in path anywhere, do nothing.
2672            - Else:
2673              - If it's an egg and its parent directory is on path,
2674                insert just ahead of the parent.
2675              - Else: add to the end of path.
2676        If replace=True:
2677            - If location is already on path anywhere (not eggs)
2678              or higher priority than its parent (eggs)
2679              do nothing.
2680            - Else:
2681              - If it's an egg and its parent directory is on path,
2682                insert just ahead of the parent,
2683                removing any lower-priority entries.
2684              - Else: add it to the front of path.
2685        """
2686
2687        loc = loc or self.location
2688        if not loc:
2689            return
2690
2691        nloc = _normalize_cached(loc)
2692        bdir = os.path.dirname(nloc)
2693        npath = [(p and _normalize_cached(p) or p) for p in path]
2694
2695        for p, item in enumerate(npath):
2696            if item == nloc:
2697                if replace:
2698                    break
2699                else:
2700                    # don't modify path (even removing duplicates) if found and not replace
2701                    return
2702            elif item == bdir and self.precedence == EGG_DIST:
2703                # if it's an .egg, give it precedence over its directory
2704                # UNLESS it's already been added to sys.path and replace=False
2705                if (not replace) and nloc in npath[p:]:
2706                    return
2707                if path is sys.path:
2708                    self.check_version_conflict()
2709                path.insert(p, loc)
2710                npath.insert(p, nloc)
2711                break
2712        else:
2713            if path is sys.path:
2714                self.check_version_conflict()
2715            if replace:
2716                path.insert(0, loc)
2717            else:
2718                path.append(loc)
2719            return
2720
2721        # p is the spot where we found or inserted loc; now remove duplicates
2722        while True:
2723            try:
2724                np = npath.index(nloc, p + 1)
2725            except ValueError:
2726                break
2727            else:
2728                del npath[np], path[np]
2729                # ha!
2730                p = np
2731
2732        return
2733
2734    def check_version_conflict(self):
2735        if self.key == 'setuptools':
2736            # ignore the inevitable setuptools self-conflicts  :(
2737            return
2738
2739        nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
2740        loc = normalize_path(self.location)
2741        for modname in self._get_metadata('top_level.txt'):
2742            if (modname not in sys.modules or modname in nsp
2743                    or modname in _namespace_packages):
2744                continue
2745            if modname in ('pkg_resources', 'setuptools', 'site'):
2746                continue
2747            fn = getattr(sys.modules[modname], '__file__', None)
2748            if fn and (normalize_path(fn).startswith(loc) or
2749                       fn.startswith(self.location)):
2750                continue
2751            issue_warning(
2752                "Module %s was already imported from %s, but %s is being added"
2753                " to sys.path" % (modname, fn, self.location),
2754            )
2755
2756    def has_version(self):
2757        try:
2758            self.version
2759        except ValueError:
2760            issue_warning("Unbuilt egg for " + repr(self))
2761            return False
2762        return True
2763
2764    def clone(self, **kw):
2765        """Copy this distribution, substituting in any changed keyword args"""
2766        names = 'project_name version py_version platform location precedence'
2767        for attr in names.split():
2768            kw.setdefault(attr, getattr(self, attr, None))
2769        kw.setdefault('metadata', self._provider)
2770        return self.__class__(**kw)
2771
2772    @property
2773    def extras(self):
2774        return [dep for dep in self._dep_map if dep]
2775
2776
2777class EggInfoDistribution(Distribution):
2778    def _reload_version(self):
2779        """
2780        Packages installed by distutils (e.g. numpy or scipy),
2781        which uses an old safe_version, and so
2782        their version numbers can get mangled when
2783        converted to filenames (e.g., 1.11.0.dev0+2329eae to
2784        1.11.0.dev0_2329eae). These distributions will not be
2785        parsed properly
2786        downstream by Distribution and safe_version, so
2787        take an extra step and try to get the version number from
2788        the metadata file itself instead of the filename.
2789        """
2790        md_version = _version_from_file(self._get_metadata(self.PKG_INFO))
2791        if md_version:
2792            self._version = md_version
2793        return self
2794
2795
2796class DistInfoDistribution(Distribution):
2797    """Wrap an actual or potential sys.path entry w/metadata, .dist-info style"""
2798    PKG_INFO = 'METADATA'
2799    EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])")
2800
2801    @property
2802    def _parsed_pkg_info(self):
2803        """Parse and cache metadata"""
2804        try:
2805            return self._pkg_info
2806        except AttributeError:
2807            metadata = self.get_metadata(self.PKG_INFO)
2808            self._pkg_info = email.parser.Parser().parsestr(metadata)
2809            return self._pkg_info
2810
2811    @property
2812    def _dep_map(self):
2813        try:
2814            return self.__dep_map
2815        except AttributeError:
2816            self.__dep_map = self._compute_dependencies()
2817            return self.__dep_map
2818
2819    def _compute_dependencies(self):
2820        """Recompute this distribution's dependencies."""
2821        dm = self.__dep_map = {None: []}
2822
2823        reqs = []
2824        # Including any condition expressions
2825        for req in self._parsed_pkg_info.get_all('Requires-Dist') or []:
2826            reqs.extend(parse_requirements(req))
2827
2828        def reqs_for_extra(extra):
2829            for req in reqs:
2830                if not req.marker or req.marker.evaluate({'extra': extra}):
2831                    yield req
2832
2833        common = frozenset(reqs_for_extra(None))
2834        dm[None].extend(common)
2835
2836        for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []:
2837            s_extra = safe_extra(extra.strip())
2838            dm[s_extra] = list(frozenset(reqs_for_extra(extra)) - common)
2839
2840        return dm
2841
2842
2843_distributionImpl = {
2844    '.egg': Distribution,
2845    '.egg-info': EggInfoDistribution,
2846    '.dist-info': DistInfoDistribution,
2847    }
2848
2849
2850def issue_warning(*args, **kw):
2851    level = 1
2852    g = globals()
2853    try:
2854        # find the first stack frame that is *not* code in
2855        # the pkg_resources module, to use for the warning
2856        while sys._getframe(level).f_globals is g:
2857            level += 1
2858    except ValueError:
2859        pass
2860    warnings.warn(stacklevel=level + 1, *args, **kw)
2861
2862
2863class RequirementParseError(ValueError):
2864    def __str__(self):
2865        return ' '.join(self.args)
2866
2867
2868def parse_requirements(strs):
2869    """Yield ``Requirement`` objects for each specification in `strs`
2870
2871    `strs` must be a string, or a (possibly-nested) iterable thereof.
2872    """
2873    # create a steppable iterator, so we can handle \-continuations
2874    lines = iter(yield_lines(strs))
2875
2876    for line in lines:
2877        # Drop comments -- a hash without a space may be in a URL.
2878        if ' #' in line:
2879            line = line[:line.find(' #')]
2880        # If there is a line continuation, drop it, and append the next line.
2881        if line.endswith('\\'):
2882            line = line[:-2].strip()
2883            line += next(lines)
2884        yield Requirement(line)
2885
2886
2887class Requirement(packaging.requirements.Requirement):
2888    def __init__(self, requirement_string):
2889        """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
2890        try:
2891            super(Requirement, self).__init__(requirement_string)
2892        except packaging.requirements.InvalidRequirement as e:
2893            raise RequirementParseError(str(e))
2894        self.unsafe_name = self.name
2895        project_name = safe_name(self.name)
2896        self.project_name, self.key = project_name, project_name.lower()
2897        self.specs = [
2898            (spec.operator, spec.version) for spec in self.specifier]
2899        self.extras = tuple(map(safe_extra, self.extras))
2900        self.hashCmp = (
2901            self.key,
2902            self.specifier,
2903            frozenset(self.extras),
2904            str(self.marker) if self.marker else None,
2905        )
2906        self.__hash = hash(self.hashCmp)
2907
2908    def __eq__(self, other):
2909        return (
2910            isinstance(other, Requirement) and
2911            self.hashCmp == other.hashCmp
2912        )
2913
2914    def __ne__(self, other):
2915        return not self == other
2916
2917    def __contains__(self, item):
2918        if isinstance(item, Distribution):
2919            if item.key != self.key:
2920                return False
2921
2922            item = item.version
2923
2924        # Allow prereleases always in order to match the previous behavior of
2925        # this method. In the future this should be smarter and follow PEP 440
2926        # more accurately.
2927        return self.specifier.contains(item, prereleases=True)
2928
2929    def __hash__(self):
2930        return self.__hash
2931
2932    def __repr__(self): return "Requirement.parse(%r)" % str(self)
2933
2934    @staticmethod
2935    def parse(s):
2936        req, = parse_requirements(s)
2937        return req
2938
2939
2940def _get_mro(cls):
2941    """Get an mro for a type or classic class"""
2942    if not isinstance(cls, type):
2943
2944        class cls(cls, object):
2945            pass
2946
2947        return cls.__mro__[1:]
2948    return cls.__mro__
2949
2950
2951def _find_adapter(registry, ob):
2952    """Return an adapter factory for `ob` from `registry`"""
2953    for t in _get_mro(getattr(ob, '__class__', type(ob))):
2954        if t in registry:
2955            return registry[t]
2956
2957
2958def ensure_directory(path):
2959    """Ensure that the parent directory of `path` exists"""
2960    dirname = os.path.dirname(path)
2961    if not os.path.isdir(dirname):
2962        os.makedirs(dirname)
2963
2964
2965def _bypass_ensure_directory(path):
2966    """Sandbox-bypassing version of ensure_directory()"""
2967    if not WRITE_SUPPORT:
2968        raise IOError('"os.mkdir" not supported on this platform.')
2969    dirname, filename = split(path)
2970    if dirname and filename and not isdir(dirname):
2971        _bypass_ensure_directory(dirname)
2972        mkdir(dirname, 0o755)
2973
2974
2975def split_sections(s):
2976    """Split a string or iterable thereof into (section, content) pairs
2977
2978    Each ``section`` is a stripped version of the section header ("[section]")
2979    and each ``content`` is a list of stripped lines excluding blank lines and
2980    comment-only lines.  If there are any such lines before the first section
2981    header, they're returned in a first ``section`` of ``None``.
2982    """
2983    section = None
2984    content = []
2985    for line in yield_lines(s):
2986        if line.startswith("["):
2987            if line.endswith("]"):
2988                if section or content:
2989                    yield section, content
2990                section = line[1:-1].strip()
2991                content = []
2992            else:
2993                raise ValueError("Invalid section heading", line)
2994        else:
2995            content.append(line)
2996
2997    # wrap up last segment
2998    yield section, content
2999
3000
3001def _mkstemp(*args, **kw):
3002    old_open = os.open
3003    try:
3004        # temporarily bypass sandboxing
3005        os.open = os_open
3006        return tempfile.mkstemp(*args, **kw)
3007    finally:
3008        # and then put it back
3009        os.open = old_open
3010
3011
3012# Silence the PEP440Warning by default, so that end users don't get hit by it
3013# randomly just because they use pkg_resources. We want to append the rule
3014# because we want earlier uses of filterwarnings to take precedence over this
3015# one.
3016warnings.filterwarnings("ignore", category=PEP440Warning, append=True)
3017
3018
3019# from jaraco.functools 1.3
3020def _call_aside(f, *args, **kwargs):
3021    f(*args, **kwargs)
3022    return f
3023
3024
3025@_call_aside
3026def _initialize(g=globals()):
3027    "Set up global resource manager (deliberately not state-saved)"
3028    manager = ResourceManager()
3029    g['_manager'] = manager
3030    g.update(
3031        (name, getattr(manager, name))
3032        for name in dir(manager)
3033        if not name.startswith('_')
3034    )
3035
3036
3037@_call_aside
3038def _initialize_master_working_set():
3039    """
3040    Prepare the master working set and make the ``require()``
3041    API available.
3042
3043    This function has explicit effects on the global state
3044    of pkg_resources. It is intended to be invoked once at
3045    the initialization of this module.
3046
3047    Invocation by other packages is unsupported and done
3048    at their own risk.
3049    """
3050    working_set = WorkingSet._build_master()
3051    _declare_state('object', working_set=working_set)
3052
3053    require = working_set.require
3054    iter_entry_points = working_set.iter_entry_points
3055    add_activation_listener = working_set.subscribe
3056    run_script = working_set.run_script
3057    # backward compatibility
3058    run_main = run_script
3059    # Activate all distributions already on sys.path with replace=False and
3060    # ensure that all distributions added to the working set in the future
3061    # (e.g. by calling ``require()``) will get activated as well,
3062    # with higher priority (replace=True).
3063    tuple(
3064        dist.activate(replace=False)
3065        for dist in working_set
3066    )
3067    add_activation_listener(lambda dist: dist.activate(replace=True), existing=False)
3068    working_set.entries = []
3069    # match order
3070    list(map(working_set.add_entry, sys.path))
3071    globals().update(locals())
3072